hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e8541d0be059291ec2bfeb77903f2ebc10bebbaa | 136 | ex | Elixir | lib/absinthe_websocket/subscriber.ex | karlosmid/absinthe_websocket | b08e0cf07c1dcdbb98e14c3905d8ee7b777aee66 | [
"MIT"
] | 8 | 2018-07-13T20:54:53.000Z | 2020-03-07T08:27:11.000Z | lib/absinthe_websocket/subscriber.ex | karlosmid/absinthe_websocket | b08e0cf07c1dcdbb98e14c3905d8ee7b777aee66 | [
"MIT"
] | 10 | 2018-07-13T15:29:19.000Z | 2019-12-04T18:58:15.000Z | lib/absinthe_websocket/subscriber.ex | karlosmid/absinthe_websocket | b08e0cf07c1dcdbb98e14c3905d8ee7b777aee66 | [
"MIT"
] | 3 | 2018-11-28T19:53:45.000Z | 2021-12-17T15:47:10.000Z | defmodule AbsintheWebSocket.Subscriber do
@callback subscribe() :: no_return()
# @callback receive(atom(), any) :: no_return()
end
| 22.666667 | 49 | 0.720588 |
e85458071c3ae2ab22908a81721c93f6ccb7bbca | 2,664 | ex | Elixir | lib/fusion/udp_tunnel.ex | elpddev/fusion | 7106ba4922786e1d79310074e46e6996d0c2f4aa | [
"MIT"
] | 2 | 2020-02-02T20:22:28.000Z | 2020-12-10T18:25:03.000Z | lib/fusion/udp_tunnel.ex | elpddev/fusion | 7106ba4922786e1d79310074e46e6996d0c2f4aa | [
"MIT"
] | null | null | null | lib/fusion/udp_tunnel.ex | elpddev/fusion | 7106ba4922786e1d79310074e46e6996d0c2f4aa | [
"MIT"
] | null | null | null | defmodule Fusion.UdpTunnel do
@moduledoc """
A GenServer for handling of opening udp tunnel network and observing their state.
"""
alias Fusion.UdpTunnel
alias Fusion.Utilities.Ssh
alias Fusion.Utilities.Exec
alias Fusion.Net.Spot
alias Fusion.Net
alias Fusion.PortRelay
defstruct status: :off,
ssh_tunnel_pid: nil,
auth: nil,
remote: nil,
direction: nil,
from_port: nil,
to_spot: nil,
mediator_tcp_from_port: nil,
mediator_tcp_to_port: nil
use GenServer
require Logger
def start_link(auth, remote, direction, from_port, to_spot) do
GenServer.start_link(__MODULE__, [auth, remote, direction, from_port, to_spot], [])
end
def start_link_now(auth, remote, direction, from_port, to_spot) do
{:ok, server} = res = GenServer.start_link(__MODULE__, [
auth, remote, direction, from_port, to_spot], [])
:ok = start_tunnel(server)
res
end
def start_tunnel(server) do
GenServer.call(server, {:start_tunnel})
end
## Server Callbacks
def init([auth, remote, direction, from_port, to_spot]) do
{:ok, %UdpTunnel{
auth: auth,
remote: remote,
direction: direction,
from_port: from_port,
to_spot: to_spot
}}
end
def handle_call({:start_tunnel}, _from, %UdpTunnel{status: :off} = state) do
mediator_tcp_from_port = Net.gen_port()
mediator_tcp_to_port = mediator_tcp_from_port
{:ok, tunnel_pid, _os_pid} = Ssh.cmd_port_tunnel(
state.auth, state.remote, mediator_tcp_from_port,
%Spot{host: "localhost", port: mediator_tcp_from_port},
state.direction)
|> Exec.capture_std_mon
case state.direction do
:reverse ->
{:ok, _} = PortRelay.start_link_now(
state.auth, state.remote, state.from_port, :udp, mediator_tcp_from_port, :tcp)
{:ok, _} = PortRelay.start_link_now(mediator_tcp_to_port, :tcp, state.to_spot.port, :udp)
:forward ->
{:ok, _} = PortRelay.start_link_now(state.from_port, :udp, mediator_tcp_from_port, :tcp)
{:ok, _} = PortRelay.start_link_now(
state.auth, state.remote, mediator_tcp_to_port, :tcp, state.to_spot.port, :udp)
end
{:reply, :ok, %{ state |
ssh_tunnel_pid: tunnel_pid,
mediator_tcp_from_port: mediator_tcp_from_port,
mediator_tcp_to_port: mediator_tcp_to_port,
status: :after_connect_trial
}}
end
def handle_info({:stdout, _proc_id, msg}, state) do
Logger.debug "stdout"
IO.inspect msg
# todo
{:noreply, state}
end
def handle_info({:stderr, _proc_id, _msg}, state) do
cond do
true ->
{:stop, :stderr, state}
end
end
end
| 27.75 | 97 | 0.673423 |
e85461fafc6c347389634d13a33b570d3faf9fa1 | 3,672 | ex | Elixir | implementations/elixir/ockam/ockam/lib/ockam/stream/index/worker.ex | L3pereira/ockam | f53336ec673eb137876551c240e31fae741ef0f9 | [
"Apache-2.0"
] | null | null | null | implementations/elixir/ockam/ockam/lib/ockam/stream/index/worker.ex | L3pereira/ockam | f53336ec673eb137876551c240e31fae741ef0f9 | [
"Apache-2.0"
] | null | null | null | implementations/elixir/ockam/ockam/lib/ockam/stream/index/worker.ex | L3pereira/ockam | f53336ec673eb137876551c240e31fae741ef0f9 | [
"Apache-2.0"
] | null | null | null | defmodule Ockam.Stream.Index.Worker do
@moduledoc false
use Ockam.Protocol.Mapping
use Ockam.Worker
require Logger
@default_mod Ockam.Stream.Index.Storage.Internal
@protocol Ockam.Protocol.Stream.Index
@partitioned_protocol Ockam.Protocol.Stream.Partitioned.Index
@protocol_mapping Ockam.Protocol.Mapping.server(Ockam.Protocol.Stream.Index)
@impl true
def protocol_mapping() do
@protocol_mapping
end
@impl true
def setup(options, state) do
storage_mod = Keyword.get(options, :storage_mod, @default_mod)
storage_options = Keyword.get(options, :storage_options, [])
case storage_mod.init(storage_options) do
{:ok, storage_state} ->
{:ok, Map.put(state, :storage, {storage_mod, storage_state})}
{:error, err} ->
Logger.error("Stream index setup error: #{inspect(err)}")
{:error, err}
end
end
@impl true
def handle_message(%{payload: payload} = message, state) do
case decode_payload(payload) do
{:ok, protocol, {:save, data}}
when protocol == @protocol or protocol == @partitioned_protocol ->
handle_save(protocol, data, state)
{:ok, protocol, {:get, data}}
when protocol == @protocol or protocol == @partitioned_protocol ->
handle_get(protocol, data, Ockam.Message.return_route(message), state)
{:error, other} ->
Logger.error("Unexpected message #{inspect(other)}")
{:ok, state}
end
end
def handle_save(_protocol, data, state) do
%{client_id: client_id, stream_name: stream_name, index: index} = data
partition = Map.get(data, :partition, 0)
Logger.debug("Save index #{inspect({client_id, stream_name, partition, index})}")
case save_index(client_id, stream_name, partition, index, state) do
{:ok, state} ->
{:ok, state}
{{:error, error}, _state} ->
Logger.error("Unable to save index: #{inspect(data)}. Reason: #{inspect(error)}")
{:error, error}
end
end
def handle_get(protocol, data, return_route, state) do
%{client_id: client_id, stream_name: stream_name} = data
partition = Map.get(data, :partition, 0)
Logger.debug("get index #{inspect({client_id, stream_name})}")
case get_index(client_id, stream_name, partition, state) do
{{:ok, index}, state} ->
reply_index(protocol, client_id, stream_name, partition, index, return_route, state)
{:ok, state}
{{:error, error}, _state} ->
Logger.error("Unable to get index for: #{inspect(data)}. Reason: #{inspect(error)}")
{:error, error}
end
end
def save_index(client_id, stream_name, partition, index, state) do
with_storage(state, fn storage_mod, storage_state ->
storage_mod.save_index(client_id, stream_name, partition, index, storage_state)
end)
end
def get_index(client_id, stream_name, partition, state) do
with_storage(state, fn storage_mod, storage_state ->
storage_mod.get_index(client_id, stream_name, partition, storage_state)
end)
end
def with_storage(state, fun) do
{storage_mod, storage_state} = Map.get(state, :storage)
{result, new_storage_state} = fun.(storage_mod, storage_state)
{result, Map.put(state, :storage, {storage_mod, new_storage_state})}
end
def reply_index(protocol, client_id, stream_name, partition, index, return_route, state) do
Ockam.Router.route(%{
onward_route: return_route,
return_route: [state.address],
payload:
encode_payload(protocol, %{
client_id: client_id,
stream_name: stream_name,
partition: partition,
index: index
})
})
end
end
| 31.930435 | 93 | 0.670479 |
e854768e971cd8ebef781aff276f0182fbcc8d6a | 2,103 | ex | Elixir | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v2/model/folder_operation.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v2/model/folder_operation.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v2/model/folder_operation.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudResourceManager.V2.Model.FolderOperation do
@moduledoc """
Metadata describing a long running folder operation
## Attributes
* `destinationParent` (*type:* `String.t`, *default:* `nil`) - The resource name of the folder or organization we are either creating
the folder under or moving the folder to.
* `displayName` (*type:* `String.t`, *default:* `nil`) - The display name of the folder.
* `operationType` (*type:* `String.t`, *default:* `nil`) - The type of this operation.
* `sourceParent` (*type:* `String.t`, *default:* `nil`) - The resource name of the folder's parent.
Only applicable when the operation_type is MOVE.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:destinationParent => String.t(),
:displayName => String.t(),
:operationType => String.t(),
:sourceParent => String.t()
}
field(:destinationParent)
field(:displayName)
field(:operationType)
field(:sourceParent)
end
defimpl Poison.Decoder, for: GoogleApi.CloudResourceManager.V2.Model.FolderOperation do
def decode(value, options) do
GoogleApi.CloudResourceManager.V2.Model.FolderOperation.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudResourceManager.V2.Model.FolderOperation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.258621 | 137 | 0.718497 |
e8549723f62b0bd89310274b23f1844d5aaab252 | 138 | exs | Elixir | implementations/elixir/ockam/ockam_transport_udp/test/test_helper.exs | digikata/ockam | 015a834ce0d34921b634439d965f726d877fd076 | [
"Apache-2.0"
] | null | null | null | implementations/elixir/ockam/ockam_transport_udp/test/test_helper.exs | digikata/ockam | 015a834ce0d34921b634439d965f726d877fd076 | [
"Apache-2.0"
] | null | null | null | implementations/elixir/ockam/ockam_transport_udp/test/test_helper.exs | digikata/ockam | 015a834ce0d34921b634439d965f726d877fd076 | [
"Apache-2.0"
] | null | null | null | Application.ensure_all_started(:ockam)
Application.ensure_all_started(:ockam_transport_udp)
ExUnit.start(capture_log: true, trace: true)
| 27.6 | 52 | 0.847826 |
e854af544d6ec1c3b81dc645253f954a5f37bfc4 | 1,267 | exs | Elixir | mix.exs | DataKrewTech/tirexs | 8238da373f4547d27eea57a10826114e947aa66b | [
"Apache-2.0"
] | null | null | null | mix.exs | DataKrewTech/tirexs | 8238da373f4547d27eea57a10826114e947aa66b | [
"Apache-2.0"
] | null | null | null | mix.exs | DataKrewTech/tirexs | 8238da373f4547d27eea57a10826114e947aa66b | [
"Apache-2.0"
] | 1 | 2021-05-11T22:35:50.000Z | 2021-05-11T22:35:50.000Z | defmodule Tirexs.Mixfile do
use Mix.Project
def project do
[
app: :tirexs,
name: "Tirexs",
version: "0.8.16",
source_url: github(),
homepage_url: github(),
elixir: ">= 1.3.0",
description: description(),
package: package(),
deps: deps(),
docs: [extras: ["README.md", "CONTRIBUTING.md"]]
]
end
def application do
[ applications: [:exjsx, :inets, :logger], env: env() ]
end
defp env do
[ uri: %URI{ authority: "127.0.0.1:9200", scheme: "http", host: "127.0.0.1", port: 9200 } ]
end
defp deps do
[ {:exjsx, git: "https://github.com/DataKrewTech/exjsx.git"}, {:ex_doc, "~> 0.12", only: :dev}, {:earmark, "~> 1.0", only: :dev} ]
end
defp description do
"""
An Elixir flavored DSL for building JSON based queries to Elasticsearch engine
"""
end
defp package do
[
maintainers: ["Aleksey Zatvobor"],
licenses: ["Apache 2.0"],
links: %{"GitHub" => github(), "Contributors" => contributors(), "Issues" => issues()}
]
end
defp github, do: "https://github.com/Zatvobor/tirexs"
defp contributors, do: "https://github.com/Zatvobor/tirexs/graphs/contributors"
defp issues, do: "https://github.com/Zatvobor/tirexs/issues"
end
| 25.857143 | 134 | 0.597474 |
e854b467b917555770759f81096952b21544db92 | 1,576 | ex | Elixir | lib/realworld_phoenix_web/views/error_helpers.ex | tamanugi/realworld-phoenix | 1c0f90234926550a9124863b3946934b8f2e19a4 | [
"MIT"
] | 9 | 2021-08-23T23:21:31.000Z | 2022-03-27T23:19:55.000Z | lib/realworld_phoenix_web/views/error_helpers.ex | tamanugi/realworld-phoenix | 1c0f90234926550a9124863b3946934b8f2e19a4 | [
"MIT"
] | null | null | null | lib/realworld_phoenix_web/views/error_helpers.ex | tamanugi/realworld-phoenix | 1c0f90234926550a9124863b3946934b8f2e19a4 | [
"MIT"
] | 1 | 2021-09-06T07:26:37.000Z | 2021-09-06T07:26:37.000Z | defmodule RealworldPhoenixWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error),
class: "invalid-feedback",
phx_feedback_for: input_name(form, field)
)
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(RealworldPhoenixWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(RealworldPhoenixWeb.Gettext, "errors", msg, opts)
end
end
end
| 32.833333 | 85 | 0.67132 |
e854bbe43024d6f817335cecb020f759c2527a7b | 358 | exs | Elixir | priv/repo/seeds.exs | jespr/ex_blog_example | 0a014d4b88b224210a7e15d7c60521aff05059c9 | [
"Unlicense"
] | null | null | null | priv/repo/seeds.exs | jespr/ex_blog_example | 0a014d4b88b224210a7e15d7c60521aff05059c9 | [
"Unlicense"
] | null | null | null | priv/repo/seeds.exs | jespr/ex_blog_example | 0a014d4b88b224210a7e15d7c60521aff05059c9 | [
"Unlicense"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# BlogExample.Repo.insert!(%BlogExample.SomeModel{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 29.833333 | 61 | 0.712291 |
e854c46c1ea629f7fda5b9191296da3353d70ec0 | 60 | ex | Elixir | apps/voyager/lib/voyager_web/views/faq_view.ex | msk-access/seqosystem | 4d99c50a0b0bc74c7f9f899be4eda8eddf6e5a39 | [
"Apache-2.0"
] | 3 | 2020-11-24T07:45:26.000Z | 2021-07-29T13:37:02.000Z | apps/voyager/lib/voyager_web/views/faq_view.ex | mskcc/seqosystem | 4d99c50a0b0bc74c7f9f899be4eda8eddf6e5a39 | [
"Apache-2.0"
] | 52 | 2020-10-21T19:47:59.000Z | 2021-09-09T18:42:33.000Z | apps/voyager/lib/voyager_web/views/faq_view.ex | msk-access/seqosystem | 4d99c50a0b0bc74c7f9f899be4eda8eddf6e5a39 | [
"Apache-2.0"
] | 1 | 2020-12-15T03:33:31.000Z | 2020-12-15T03:33:31.000Z | defmodule VoyagerWeb.FaqView do
use VoyagerWeb, :view
end
| 15 | 31 | 0.8 |
e854efb7994d7c0f9cb8127e3463e27c82a30a0c | 182 | exs | Elixir | priv/repo/migrations/20190208150347_add_repositories_organization_id_index.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 691 | 2017-03-08T09:15:45.000Z | 2022-03-23T22:04:47.000Z | priv/repo/migrations/20190208150347_add_repositories_organization_id_index.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 491 | 2017-03-07T12:58:42.000Z | 2022-03-29T23:32:54.000Z | priv/repo/migrations/20190208150347_add_repositories_organization_id_index.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 200 | 2017-03-12T23:03:39.000Z | 2022-03-05T17:55:52.000Z | defmodule Hexpm.RepoBase.Migrations.AddRepositoriesOrganizationIdIndex do
use Ecto.Migration
def change do
create(unique_index(:repositories, [:organization_id]))
end
end
| 22.75 | 73 | 0.802198 |
e854f17c51eaba7bb2b05b4cbebbba2b3da8617f | 425 | ex | Elixir | elixir/concurrency-model/state-actor/counter3.ex | lijiansong/lang | e255709da2b12e09dea45f86d54f77a19b96f13b | [
"WTFPL"
] | 1 | 2020-01-09T03:22:09.000Z | 2020-01-09T03:22:09.000Z | elixir/concurrency-model/state-actor/counter3.ex | lijiansong/lang | e255709da2b12e09dea45f86d54f77a19b96f13b | [
"WTFPL"
] | null | null | null | elixir/concurrency-model/state-actor/counter3.ex | lijiansong/lang | e255709da2b12e09dea45f86d54f77a19b96f13b | [
"WTFPL"
] | null | null | null | defmodule Counter do
def loop(count) do
receive do
{:next, sender, ref} ->
send(sender, {:ok, ref, count})
loop(count + 1)
end
end
def start(count) do
pid = spawn(__MODULE__, :loop, [count])
Process.register(pid, :counter)
pid
end
def next do
ref = make_ref()
send(:counter, {:next, self(), ref})
receive do
{:ok, ^ref, count} -> count
end
end
end
| 18.478261 | 43 | 0.56 |
e855196328a9a90b4df893e92e0b1b0dd876072a | 625 | ex | Elixir | lib/exscript/transformers/comprehensions.ex | craigspaeth/exscript | c3a24790dff0eac592060ba3ae349be7c567e0a7 | [
"MIT"
] | 1 | 2017-12-15T23:55:05.000Z | 2017-12-15T23:55:05.000Z | lib/exscript/transformers/comprehensions.ex | craigspaeth/exscript | c3a24790dff0eac592060ba3ae349be7c567e0a7 | [
"MIT"
] | null | null | null | lib/exscript/transformers/comprehensions.ex | craigspaeth/exscript | c3a24790dff0eac592060ba3ae349be7c567e0a7 | [
"MIT"
] | null | null | null | defmodule ExScript.Transformers.Comprehensions do
@moduledoc """
Transforms module (and module-related) Elixir ASTs to ESTree JSON
"""
alias ExScript.Compile, as: Compile
alias ExScript.Common, as: Common
def transform_comprehension({_, _, for_ast}) do
[{_, _, [left, enum]}, [do: block]] = for_ast
%{
type: "CallExpression",
callee: %{
type: "MemberExpression",
object: Compile.transform!(enum),
property: %{
type: "Identifier",
name: "map"
}
},
arguments: [Common.function_expression(:arrow, [left], block)]
}
end
end
| 24.038462 | 68 | 0.6048 |
e85538bf4dfc372ecbdd10a7533e36513dd33e47 | 586 | exs | Elixir | test/speech_markdown/extended_cases_test.exs | pylon/speechmarkdown-ex | 6305d16f82955e4e74b54bf8c7724bc2e6668659 | [
"Apache-2.0"
] | 4 | 2020-05-21T06:30:47.000Z | 2021-04-19T18:12:25.000Z | test/speech_markdown/extended_cases_test.exs | pylon/speechmarkdown-ex | 6305d16f82955e4e74b54bf8c7724bc2e6668659 | [
"Apache-2.0"
] | 3 | 2020-01-31T21:12:29.000Z | 2020-06-26T08:48:33.000Z | test/speech_markdown/extended_cases_test.exs | pylon/speechmarkdown-ex | 6305d16f82955e4e74b54bf8c7724bc2e6668659 | [
"Apache-2.0"
] | 2 | 2020-02-02T11:11:47.000Z | 2021-09-21T15:21:42.000Z | defmodule SpeechMarkdowntendedCasesTests do
use ExUnit.Case, async: true
import SpeechMarkdown, only: [to_ssml: 2, to_plaintext: 1]
import SpeechMarkdown.TestSupport.FixtureHelper
# out-of-spec SMD cases
@cases [
# SSML mark
{"$[a]", "<speak><mark name=\"a\" /></speak>", ""}
]
for {smd, ssml, txt} <- @cases do
test "SMD #{smd}" do
assert {:ok, result} = to_ssml(unquote(smd), variant: :generic)
assert_xml(unquote(ssml) == result)
assert {:ok, result} = to_plaintext(unquote(smd))
assert unquote(txt) == result
end
end
end
| 24.416667 | 69 | 0.636519 |
e8553b1f1215254c62655a76cf43e99a08e4d243 | 2,974 | exs | Elixir | test/mix/tasks/phx.gen.channel_test.exs | blunckr/fenix | aeccae9658ed3d85d8af8f28ce2584d407b43d6b | [
"MIT"
] | null | null | null | test/mix/tasks/phx.gen.channel_test.exs | blunckr/fenix | aeccae9658ed3d85d8af8f28ce2584d407b43d6b | [
"MIT"
] | null | null | null | test/mix/tasks/phx.gen.channel_test.exs | blunckr/fenix | aeccae9658ed3d85d8af8f28ce2584d407b43d6b | [
"MIT"
] | null | null | null | Code.require_file "../../../installer/test/mix_helper.exs", __DIR__
defmodule Phoenix.Web.DupChannel do
end
defmodule Mix.Tasks.Phx.Gen.ChannelTest do
use ExUnit.Case
import MixHelper
alias Mix.Tasks.Phx.Gen
setup do
Mix.Task.clear()
:ok
end
test "generates channel" do
in_tmp_project "generates channel", fn ->
Gen.Channel.run ["room"]
assert_file "lib/phoenix/web/channels/room_channel.ex", fn file ->
assert file =~ ~S|defmodule Phoenix.Web.RoomChannel do|
assert file =~ ~S|use Phoenix.Web, :channel|
assert file =~ ~S|def join("room:lobby", payload, socket) do|
assert file =~ ~S|def handle_in("ping", payload, socket) do|
assert file =~ ~S|{:reply, {:ok, payload}, socket}|
assert file =~ ~S|def handle_in("shout", payload, socket) do|
assert file =~ ~S|broadcast socket, "shout", payload|
assert file =~ ~S|{:noreply, socket}|
end
assert_file "test/web/channels/room_channel_test.exs", fn file ->
assert file =~ ~S|defmodule Phoenix.Web.RoomChannelTest|
assert file =~ ~S|use Phoenix.Web.ChannelCase|
assert file =~ ~S|alias Phoenix.Web.RoomChannel|
assert file =~ ~S|subscribe_and_join(RoomChannel|
assert file =~ ~S|test "ping replies with status ok"|
assert file =~ ~S|ref = push socket, "ping", %{"hello" => "there"}|
assert file =~ ~S|assert_reply ref, :ok, %{"hello" => "there"}|
assert file =~ ~S|test "shout broadcasts to room:lobby"|
assert file =~ ~S|push socket, "shout", %{"hello" => "all"}|
assert file =~ ~S|assert_broadcast "shout", %{"hello" => "all"}|
assert file =~ ~S|test "broadcasts are pushed to the client"|
assert file =~ ~S|broadcast_from! socket, "broadcast", %{"some" => "data"}|
assert file =~ ~S|assert_push "broadcast", %{"some" => "data"}|
end
end
end
test "generates nested channel" do
in_tmp_project "generates nested channel", fn ->
Gen.Channel.run ["Admin.Room"]
assert_file "lib/phoenix/web/channels/admin/room_channel.ex", fn file ->
assert file =~ ~S|defmodule Phoenix.Web.Admin.RoomChannel do|
assert file =~ ~S|use Phoenix.Web, :channel|
end
assert_file "test/web/channels/admin/room_channel_test.exs", fn file ->
assert file =~ ~S|defmodule Phoenix.Web.Admin.RoomChannelTest|
assert file =~ ~S|use Phoenix.Web.ChannelCase|
assert file =~ ~S|alias Phoenix.Web.Admin.RoomChannel|
end
end
end
test "passing no args raises error" do
assert_raise Mix.Error, fn ->
Gen.Channel.run []
end
end
test "passing extra args raises error" do
assert_raise Mix.Error, fn ->
Gen.Channel.run ["Admin.Room", "new_message"]
end
end
test "name is already defined" do
assert_raise Mix.Error, ~r/DupChannel is already taken/, fn ->
Gen.Channel.run ["Dup"]
end
end
end
| 33.41573 | 83 | 0.629455 |
e8555892b6b60ea5b748ff10d18a3e6a1fc69c80 | 445 | ex | Elixir | lib/hl7/2.3/segments/loc.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.3/segments/loc.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.3/segments/loc.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | defmodule HL7.V2_3.Segments.LOC do
@moduledoc false
require Logger
alias HL7.V2_3.{DataTypes}
use HL7.Segment,
fields: [
segment: nil,
primary_key_value: DataTypes.Pl,
location_description: nil,
location_type: nil,
organization_name: DataTypes.Xon,
location_address: DataTypes.Xad,
location_phone: DataTypes.Xtn,
license_number: DataTypes.Ce,
location_equipment: nil
]
end
| 22.25 | 39 | 0.685393 |
e85566093dd21171053aca1be01158793d69ac67 | 504 | exs | Elixir | lib/keywords_maps.exs | wasnotrice/elixir_intro | 9f4085478400d43cc5c432f6b0684af6f6f47861 | [
"Apache-2.0"
] | null | null | null | lib/keywords_maps.exs | wasnotrice/elixir_intro | 9f4085478400d43cc5c432f6b0684af6f6f47861 | [
"Apache-2.0"
] | null | null | null | lib/keywords_maps.exs | wasnotrice/elixir_intro | 9f4085478400d43cc5c432f6b0684af6f6f47861 | [
"Apache-2.0"
] | null | null | null | [a: "a", b: "b", c: "c", a: "A"]
[{:a, "a"}, {:b, "b"}, {:c, "c"}, {:a, "A"}]
%{a: "a", b: "b", c: "c", a: "A"}
%{"a" => "a", "b" => "b", "c" => "c", "a" => "A"}
%{1 => "a", [] => "b", :three => "c", "four" => "A"}
# Reading values
map.a
map[:a]
Map.get(map, :a)
Map.fetch(map, :a)
Map.fetch!(map, :a)
# "Modifying" a map. Only through functions
Map.put(map, :a, "aaa")
Map.put_new(map, :a, "AAA")
Map.put_new(map, :i, "iii")
Map.update(map, :j, "jjj", fn old -> old <> "JJJ" end)
Map.delete(map, :j)
| 24 | 54 | 0.444444 |
e855ce7d213ea66f56256598fcff95fdf5bc3d60 | 1,482 | exs | Elixir | test/rendering/tag_test.exs | thepeoplesbourgeois/slime | 55c4d738632ed6540a0a03e0a4022b02e398c2d9 | [
"MIT"
] | 312 | 2015-12-28T05:25:47.000Z | 2022-02-16T05:18:16.000Z | test/rendering/tag_test.exs | thepeoplesbourgeois/slime | 55c4d738632ed6540a0a03e0a4022b02e398c2d9 | [
"MIT"
] | 105 | 2015-12-27T21:20:58.000Z | 2022-02-24T05:23:38.000Z | test/rendering/tag_test.exs | thepeoplesbourgeois/slime | 55c4d738632ed6540a0a03e0a4022b02e398c2d9 | [
"MIT"
] | 45 | 2016-01-12T04:02:01.000Z | 2021-09-14T16:19:18.000Z | defmodule TagTest do
use ExUnit.Case, async: true
import Slime, only: [render: 1, render: 2]
test "dashed-strings can be used as tags" do
assert render(~s(my-component text)) == ~s(<my-component>text</my-component>)
end
test "render nested tags" do
slime = """
#id.class
p Hello World
"""
assert render(slime) == ~s(<div class="class" id="id"><p>Hello World</p></div>)
end
test "render nested tags with text node" do
slime = """
#id.class
p
| Hello World
"""
assert render(slime) == ~s(<div class="class" id="id"><p>Hello World</p></div>)
end
test "render closed tag (ending with /)" do
assert render(~s(img src="image.png"/)) == ~s(<img src="image.png"/>)
end
test "render attributes and inline children" do
assert render(~s(div id="id" text content)) == ~s(<div id="id">text content</div>)
assert render(~s(div id="id" = elixir_func), elixir_func: "text") == ~s(<div id="id">text</div>)
end
test "parses inline children with interpolation" do
assert render("div text \#{content}", content: "test") == ~s(<div>text test</div>)
end
void_elements = ~w(
area base br col embed hr img input keygen link menuitem
meta param source track wbr
)
for tag <- void_elements do
test "void element #{tag} requires no closing tag" do
html = render(~s(#{unquote(tag)} data-foo="bar"))
assert html == ~s(<#{unquote(tag)} data-foo="bar">)
end
end
end
| 27.444444 | 100 | 0.616734 |
e855dcbbad49570c1120f31f0caaa85894ea53f1 | 81,604 | ex | Elixir | clients/real_time_bidding/lib/google_api/real_time_bidding/v1/api/bidders.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/real_time_bidding/lib/google_api/real_time_bidding/v1/api/bidders.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/real_time_bidding/lib/google_api/real_time_bidding/v1/api/bidders.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.RealTimeBidding.V1.Api.Bidders do
@moduledoc """
API calls for all endpoints tagged `Bidders`.
"""
alias GoogleApi.RealTimeBidding.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Gets a bidder account by its name.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. Name of the bidder to get. Format: `bidders/{bidderAccountId}`
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.Bidder{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_get(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.Bidder.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_get(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.Bidder{}])
end
@doc """
Lists all the bidder accounts that belong to the caller.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:pageSize` (*type:* `integer()`) - The maximum number of bidders to return. If unspecified, at most 100 bidders will be returned. The maximum value is 500; values above 500 will be coerced to 500.
* `:pageToken` (*type:* `String.t`) - A token identifying a page of results the server should return. This value is received from a previous `ListBidders` call in ListBiddersResponse.nextPageToken.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.ListBiddersResponse{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_list(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.ListBiddersResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_list(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/bidders", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.ListBiddersResponse{}]
)
end
@doc """
Lists creatives.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. Name of the parent buyer that owns the creatives. The pattern for this resource is either `buyers/{buyerAccountId}` or `bidders/{bidderAccountId}`. For `buyers/{buyerAccountId}`, the `buyerAccountId` can be one of the following: 1. The ID of the buyer that is accessing their own creatives. 2. The ID of the child seat buyer under a bidder account. So for listing creatives pertaining to the child seat buyer (`456`) under bidder account (`123`), you would use the pattern: `buyers/456`. 3. The ID of the bidder itself. So for listing creatives pertaining to bidder (`123`), you would use `buyers/123`. If you want to access all creatives pertaining to both the bidder and all of its child seat accounts, you would use `bidders/{bidderAccountId}`, e.g., for all creatives pertaining to bidder (`123`), use `bidders/123`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - Query string to filter creatives. If no filter is specified, all active creatives will be returned. Example: 'accountId=12345 AND (dealsStatus:DISAPPROVED AND disapprovalReason:UNACCEPTABLE_CONTENT) OR declaredAttributes:IS_COOKIE_TARGETED'
* `:pageSize` (*type:* `integer()`) - Requested page size. The server may return fewer creatives than requested (due to timeout constraint) even if more are available via another call. If unspecified, server will pick an appropriate default. Acceptable values are 1 to 1000, inclusive.
* `:pageToken` (*type:* `String.t`) - A token identifying a page of results the server should return. Typically, this is the value of ListCreativesResponse.nextPageToken returned from the previous call to the 'ListCreatives' method. Page tokens for continued pages are valid for up to five hours, counting from the call to 'ListCreatives' for the first page.
* `:view` (*type:* `String.t`) - Controls the amount of information included in the response. By default only creativeServingDecision is included. To retrieve the entire creative resource (including the declared fields and the creative content) specify the view as "FULL".
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.ListCreativesResponse{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_creatives_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.ListCreativesResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_creatives_list(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:pageSize => :query,
:pageToken => :query,
:view => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+parent}/creatives", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.ListCreativesResponse{}]
)
end
@doc """
Watches all creatives pertaining to a bidder. It is sufficient to invoke this endpoint once per bidder. A Pub/Sub topic will be created and notifications will be pushed to the topic when any of the bidder's creatives change status. All of the bidder's service accounts will have access to read from the topic. Subsequent invocations of this method will return the existing Pub/Sub configuration.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. To watch all creatives pertaining to the bidder and all its child seat accounts, the bidder must follow the pattern `bidders/{bidderAccountId}`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.RealTimeBidding.V1.Model.WatchCreativesRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.WatchCreativesResponse{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_creatives_watch(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.WatchCreativesResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_creatives_watch(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+parent}/creatives:watch", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.WatchCreativesResponse{}]
)
end
@doc """
Gets a bidder endpoint by its name.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. Name of the bidder endpoint to get. Format: `bidders/{bidderAccountId}/endpoints/{endpointId}`
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.Endpoint{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_endpoints_get(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.Endpoint.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_endpoints_get(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.Endpoint{}])
end
@doc """
Lists all the bidder's endpoints.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. Name of the bidder whose endpoints will be listed. Format: `bidders/{bidderAccountId}`
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:pageSize` (*type:* `integer()`) - The maximum number of endpoints to return. If unspecified, at most 100 endpoints will be returned. The maximum value is 500; values above 500 will be coerced to 500.
* `:pageToken` (*type:* `String.t`) - A token identifying a page of results the server should return. This value is received from a previous `ListEndpoints` call in ListEndpointsResponse.nextPageToken.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.ListEndpointsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_endpoints_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.ListEndpointsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_endpoints_list(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+parent}/endpoints", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.ListEndpointsResponse{}]
)
end
@doc """
Updates a bidder's endpoint.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Output only. Name of the endpoint resource that must follow the pattern `bidders/{bidderAccountId}/endpoints/{endpointId}`, where {bidderAccountId} is the account ID of the bidder who operates this endpoint, and {endpointId} is a unique ID assigned by the server.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:updateMask` (*type:* `String.t`) - Field mask to use for partial in-place updates.
* `:body` (*type:* `GoogleApi.RealTimeBidding.V1.Model.Endpoint.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.Endpoint{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_endpoints_patch(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.Endpoint.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_endpoints_patch(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.Endpoint{}])
end
@doc """
Activates a pretargeting configuration.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. The name of the pretargeting configuration. Format: bidders/{bidderAccountId}/pretargetingConfig/{configId}
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.RealTimeBidding.V1.Model.ActivatePretargetingConfigRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_pretargeting_configs_activate(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_pretargeting_configs_activate(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+name}:activate", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}])
end
@doc """
Adds targeted apps to the pretargeting configuration.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `pretargeting_config` (*type:* `String.t`) - Required. The name of the pretargeting configuration. Format: bidders/{bidderAccountId}/pretargetingConfig/{configId}
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.RealTimeBidding.V1.Model.AddTargetedAppsRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_pretargeting_configs_add_targeted_apps(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_pretargeting_configs_add_targeted_apps(
connection,
pretargeting_config,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+pretargetingConfig}:addTargetedApps", %{
"pretargetingConfig" => URI.encode(pretargeting_config, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}])
end
@doc """
Adds targeted publishers to the pretargeting config.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `pretargeting_config` (*type:* `String.t`) - Required. The name of the pretargeting configuration. Format: bidders/{bidderAccountId}/pretargetingConfig/{configId}
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.RealTimeBidding.V1.Model.AddTargetedPublishersRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_pretargeting_configs_add_targeted_publishers(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_pretargeting_configs_add_targeted_publishers(
connection,
pretargeting_config,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+pretargetingConfig}:addTargetedPublishers", %{
"pretargetingConfig" => URI.encode(pretargeting_config, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}])
end
@doc """
Adds targeted sites to the pretargeting configuration.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `pretargeting_config` (*type:* `String.t`) - Required. The name of the pretargeting configuration. Format: bidders/{bidderAccountId}/pretargetingConfig/{configId}
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.RealTimeBidding.V1.Model.AddTargetedSitesRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_pretargeting_configs_add_targeted_sites(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_pretargeting_configs_add_targeted_sites(
connection,
pretargeting_config,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+pretargetingConfig}:addTargetedSites", %{
"pretargetingConfig" => URI.encode(pretargeting_config, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}])
end
@doc """
Creates a pretargeting configuration. A pretargeting configuration's state (PretargetingConfig.state) is active upon creation, and it will start to affect traffic shortly after. A bidder may create a maximum of 10 pretargeting configurations. Attempts to exceed this maximum results in a 400 bad request error.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. Name of the bidder to create the pretargeting configuration for. Format: bidders/{bidderAccountId}
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_pretargeting_configs_create(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_pretargeting_configs_create(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+parent}/pretargetingConfigs", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}])
end
@doc """
Deletes a pretargeting configuration.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. The name of the pretargeting configuration to delete. Format: bidders/{bidderAccountId}/pretargetingConfig/{configId}
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_pretargeting_configs_delete(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_pretargeting_configs_delete(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.Empty{}])
end
@doc """
Gets a pretargeting configuration.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. Name of the pretargeting configuration to get. Format: bidders/{bidderAccountId}/pretargetingConfig/{configId}
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_pretargeting_configs_get(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_pretargeting_configs_get(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}])
end
@doc """
Lists all pretargeting configurations for a single bidder.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. Name of the bidder whose pretargeting configurations will be listed. Format: bidders/{bidderAccountId}
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:pageSize` (*type:* `integer()`) - The maximum number of pretargeting configurations to return. If unspecified, at most 10 pretargeting configurations will be returned. The maximum value is 100; values above 100 will be coerced to 100.
* `:pageToken` (*type:* `String.t`) - A token identifying a page of results the server should return. This value is received from a previous `ListPretargetingConfigs` call in ListPretargetingConfigsResponse.nextPageToken.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.ListPretargetingConfigsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_pretargeting_configs_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.ListPretargetingConfigsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_pretargeting_configs_list(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+parent}/pretargetingConfigs", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.ListPretargetingConfigsResponse{}]
)
end
@doc """
Updates a pretargeting configuration.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Output only. Name of the pretargeting configuration that must follow the pattern `bidders/{bidder_account_id}/pretargetingConfigs/{config_id}`
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:updateMask` (*type:* `String.t`) - Field mask to use for partial in-place updates.
* `:body` (*type:* `GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_pretargeting_configs_patch(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_pretargeting_configs_patch(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}])
end
@doc """
Removes targeted apps from the pretargeting configuration.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `pretargeting_config` (*type:* `String.t`) - Required. The name of the pretargeting configuration. Format: bidders/{bidderAccountId}/pretargetingConfig/{configId}
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.RealTimeBidding.V1.Model.RemoveTargetedAppsRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_pretargeting_configs_remove_targeted_apps(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_pretargeting_configs_remove_targeted_apps(
connection,
pretargeting_config,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+pretargetingConfig}:removeTargetedApps", %{
"pretargetingConfig" => URI.encode(pretargeting_config, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}])
end
@doc """
Removes targeted publishers from the pretargeting config.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `pretargeting_config` (*type:* `String.t`) - Required. The name of the pretargeting configuration. Format: bidders/{bidderAccountId}/pretargetingConfig/{configId}
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.RealTimeBidding.V1.Model.RemoveTargetedPublishersRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_pretargeting_configs_remove_targeted_publishers(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_pretargeting_configs_remove_targeted_publishers(
connection,
pretargeting_config,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+pretargetingConfig}:removeTargetedPublishers", %{
"pretargetingConfig" => URI.encode(pretargeting_config, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}])
end
@doc """
Removes targeted sites from the pretargeting configuration.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `pretargeting_config` (*type:* `String.t`) - Required. The name of the pretargeting configuration. Format: bidders/{bidderAccountId}/pretargetingConfig/{configId}
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.RealTimeBidding.V1.Model.RemoveTargetedSitesRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_pretargeting_configs_remove_targeted_sites(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_pretargeting_configs_remove_targeted_sites(
connection,
pretargeting_config,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+pretargetingConfig}:removeTargetedSites", %{
"pretargetingConfig" => URI.encode(pretargeting_config, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}])
end
@doc """
Suspends a pretargeting configuration.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. The name of the pretargeting configuration. Format: bidders/{bidderAccountId}/pretargetingConfig/{configId}
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.RealTimeBidding.V1.Model.SuspendPretargetingConfigRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_pretargeting_configs_suspend(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_pretargeting_configs_suspend(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+name}:suspend", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.PretargetingConfig{}])
end
@doc """
Batch approves multiple publisher connections.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. The bidder for whom publisher connections will be approved. Format: `bidders/{bidder}` where `{bidder}` is the account ID of the bidder.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.RealTimeBidding.V1.Model.BatchApprovePublisherConnectionsRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.BatchApprovePublisherConnectionsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_publisher_connections_batch_approve(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.BatchApprovePublisherConnectionsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_publisher_connections_batch_approve(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+parent}/publisherConnections:batchApprove", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[struct: %GoogleApi.RealTimeBidding.V1.Model.BatchApprovePublisherConnectionsResponse{}]
)
end
@doc """
Batch rejects multiple publisher connections.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. The bidder for whom publisher connections will be rejected. Format: `bidders/{bidder}` where `{bidder}` is the account ID of the bidder.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.RealTimeBidding.V1.Model.BatchRejectPublisherConnectionsRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.BatchRejectPublisherConnectionsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_publisher_connections_batch_reject(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.BatchRejectPublisherConnectionsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_publisher_connections_batch_reject(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+parent}/publisherConnections:batchReject", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++
[struct: %GoogleApi.RealTimeBidding.V1.Model.BatchRejectPublisherConnectionsResponse{}]
)
end
@doc """
Gets a publisher connection.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - Required. Name of the publisher whose connection information is to be retrieved. In the pattern `bidders/{bidder}/publisherConnections/{publisher}` where `{bidder}` is the account ID of the bidder, and `{publisher}` is the ads.txt/app-ads.txt publisher ID. See publisherConnection.name.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.PublisherConnection{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_publisher_connections_get(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.PublisherConnection.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_publisher_connections_get(
connection,
name,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.PublisherConnection{}]
)
end
@doc """
Lists publisher connections for a given bidder.
## Parameters
* `connection` (*type:* `GoogleApi.RealTimeBidding.V1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - Required. Name of the bidder for which publishers have initiated connections. The pattern for this resource is `bidders/{bidder}` where `{bidder}` represents the account ID of the bidder.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - Query string to filter publisher connections. Connections can be filtered by `displayName`, `publisherPlatform`, and `biddingState`. If no filter is specified, all publisher connections will be returned. Example: 'displayName="Great Publisher*" AND publisherPlatform=ADMOB AND biddingState != PENDING' See https://google.aip.dev/160 for more information about filtering syntax.
* `:orderBy` (*type:* `String.t`) - Order specification by which results should be sorted. If no sort order is specified, the results will be returned in an arbitrary order. Currently results can be sorted by `createTime`. Example: 'createTime DESC'.
* `:pageSize` (*type:* `integer()`) - Requested page size. The server may return fewer results than requested (due to timeout constraint) even if more are available via another call. If unspecified, the server will pick an appropriate default. Acceptable values are 1 to 5000, inclusive.
* `:pageToken` (*type:* `String.t`) - A token identifying a page of results the server should return. Typically, this is the value of ListPublisherConnectionsResponse.nextPageToken returned from the previous call to the 'ListPublisherConnections' method.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.RealTimeBidding.V1.Model.ListPublisherConnectionsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec realtimebidding_bidders_publisher_connections_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.RealTimeBidding.V1.Model.ListPublisherConnectionsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def realtimebidding_bidders_publisher_connections_list(
connection,
parent,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:orderBy => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+parent}/publisherConnections", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.RealTimeBidding.V1.Model.ListPublisherConnectionsResponse{}]
)
end
end
| 46.498006 | 868 | 0.623254 |
e855ed49a3ff08e88893e8d6e5874ff6fad2c271 | 209 | exs | Elixir | hello-word.exs | MaraniMatias/elixir-hola-mundo | 325a6ba623378521ec6f79bd4627a0eb7c6cd1fa | [
"MIT"
] | 1 | 2016-12-25T09:53:53.000Z | 2016-12-25T09:53:53.000Z | hello-word.exs | MaraniMatias/elixir-hola-mundo | 325a6ba623378521ec6f79bd4627a0eb7c6cd1fa | [
"MIT"
] | null | null | null | hello-word.exs | MaraniMatias/elixir-hola-mundo | 325a6ba623378521ec6f79bd4627a0eb7c6cd1fa | [
"MIT"
] | null | null | null | IO.puts "Hello world from Elixir...\n"
greet = fn s -> IO.puts "Hello, #{s}!" end
if length(System.argv) == 0 do
greet.("Try: \n elixir hello-word.exs Elixir")
else
Enum.each(System.argv, greet)
end
| 20.9 | 50 | 0.641148 |
e8561cd1dcc90165f086bfdba15ba64d85289f1b | 1,282 | ex | Elixir | year_2020/lib/day_12/ship.ex | bschmeck/advent_of_code | cbec98019c6c00444e0f4c7e15e01b1ed9ae6145 | [
"MIT"
] | null | null | null | year_2020/lib/day_12/ship.ex | bschmeck/advent_of_code | cbec98019c6c00444e0f4c7e15e01b1ed9ae6145 | [
"MIT"
] | null | null | null | year_2020/lib/day_12/ship.ex | bschmeck/advent_of_code | cbec98019c6c00444e0f4c7e15e01b1ed9ae6145 | [
"MIT"
] | null | null | null | defmodule Day12.Ship do
defstruct x: 0, y: 0, dir: 90
def follow_instructions(instrs), do: follow_instructions(instrs, %__MODULE__{})
def follow_instructions([], ship), do: ship
def follow_instructions([instr | rest], ship),
do: follow_instructions(rest, follow_instruction(instr, ship))
def follow_instruction({"N", v}, ship), do: %__MODULE__{ship | y: ship.y + v}
def follow_instruction({"S", v}, ship), do: %__MODULE__{ship | y: ship.y - v}
def follow_instruction({"E", v}, ship), do: %__MODULE__{ship | x: ship.x + v}
def follow_instruction({"W", v}, ship), do: %__MODULE__{ship | x: ship.x - v}
def follow_instruction({"L", v}, ship), do: rotate(ship, 360 - v)
def follow_instruction({"R", v}, ship), do: rotate(ship, v)
def follow_instruction({"F", v}, %__MODULE__{dir: 0} = ship),
do: follow_instruction({"N", v}, ship)
def follow_instruction({"F", v}, %__MODULE__{dir: 90} = ship),
do: follow_instruction({"E", v}, ship)
def follow_instruction({"F", v}, %__MODULE__{dir: 180} = ship),
do: follow_instruction({"S", v}, ship)
def follow_instruction({"F", v}, %__MODULE__{dir: 270} = ship),
do: follow_instruction({"W", v}, ship)
def rotate(ship, degrees), do: %__MODULE__{ship | dir: rem(ship.dir + degrees, 360)}
end
| 41.354839 | 86 | 0.655226 |
e85625b7a6a232e43e31b1240edaa01c35271869 | 89 | ex | Elixir | lib/ambrosia_web/views/pow_reset_password/reset_password_view.ex | emeric-martineau/ambrosia | 74c55d35cf66537d7c8a33ef6057e89d44abd347 | [
"MIT"
] | 2 | 2020-05-25T05:28:31.000Z | 2020-05-25T08:10:43.000Z | lib/ambrosia_web/views/pow_reset_password/reset_password_view.ex | emeric-martineau/ambrosia | 74c55d35cf66537d7c8a33ef6057e89d44abd347 | [
"MIT"
] | 9 | 2020-05-25T16:39:15.000Z | 2020-11-11T16:51:37.000Z | lib/ambrosia_web/views/pow_reset_password/reset_password_view.ex | emeric-martineau/ambrosia | 74c55d35cf66537d7c8a33ef6057e89d44abd347 | [
"MIT"
] | null | null | null | defmodule AmbrosiaWeb.PowResetPassword.ResetPasswordView do
use AmbrosiaWeb, :view
end
| 22.25 | 59 | 0.853933 |
e856452bc6ef26f6de7549be31ba0e59f36e3c08 | 425 | exs | Elixir | test_fixtures/proj/good/serum.exs | skunkwerks/serum | 45039417a101bf89f3a86a4aa3d42ce41df2d676 | [
"MIT"
] | 302 | 2016-08-01T05:16:06.000Z | 2022-03-26T07:20:46.000Z | test_fixtures/proj/good/serum.exs | skunkwerks/serum | 45039417a101bf89f3a86a4aa3d42ce41df2d676 | [
"MIT"
] | 206 | 2016-08-04T15:18:35.000Z | 2022-03-01T03:07:57.000Z | test_fixtures/proj/good/serum.exs | skunkwerks/serum | 45039417a101bf89f3a86a4aa3d42ce41df2d676 | [
"MIT"
] | 35 | 2016-10-29T13:59:34.000Z | 2022-03-01T01:50:31.000Z | %{
site_name: "Test Webite",
site_description: "This is the test website.",
server_root: "https://www.example.com",
base_url: "/test-site/",
author: "John Doe",
author_email: "john.doe@example.com",
date_format: "{YYYY}-{0M}-{0D}",
list_title_all: "All Posts",
list_title_tag: "Posts Tagged ~s",
pagination: false,
posts_per_page: 5,
preview_length: 200,
plugins: [Serum.Plugins.SitemapGenerator]
}
| 26.5625 | 48 | 0.684706 |
e8564c883199acf81ea47ec9930905ac61f42aec | 2,128 | exs | Elixir | farmbot_core/test/asset_workers/fbos_config_worker_test.exs | elbow-jason/farmbot_os | f5dfc8f58a309285ca3d441b1b7272f15315b2a9 | [
"MIT"
] | 1 | 2019-08-06T11:51:48.000Z | 2019-08-06T11:51:48.000Z | farmbot_core/test/asset_workers/fbos_config_worker_test.exs | SeppPenner/farmbot_os | 39ba5c5880f8aef71792e2c009514bed1177089c | [
"MIT"
] | null | null | null | farmbot_core/test/asset_workers/fbos_config_worker_test.exs | SeppPenner/farmbot_os | 39ba5c5880f8aef71792e2c009514bed1177089c | [
"MIT"
] | null | null | null | defmodule FarmbotCore.FbosConfigWorkerTest do
use ExUnit.Case, async: true
alias FarmbotCore.{Asset.FbosConfig, AssetWorker, BotState}
import Farmbot.TestSupport.AssetFixtures
test "adds configs to bot state and config_storage" do
%FbosConfig{} =
conf =
fbos_config(%{
monitor: true,
arduino_debug_messages: true,
auto_sync: false,
beta_opt_in: true,
disable_factory_reset: false,
firmware_hardware: nil,
firmware_input_log: false,
firmware_output_log: false,
network_not_found_timer: nil,
os_auto_update: false,
sequence_body_log: true,
sequence_complete_log: true,
sequence_init_log: true
})
{:ok, pid} = AssetWorker.start_link(conf)
send(pid, :timeout)
state = BotState.subscribe()
state_conf = receive_changes(state).configuration
assert state_conf.arduino_debug_messages == conf.arduino_debug_messages
assert state_conf.auto_sync == conf.auto_sync
assert state_conf.beta_opt_in == conf.beta_opt_in
assert state_conf.disable_factory_reset == conf.disable_factory_reset
# The TTYDetector actually will set this value
# only if the state actually changes properly.
# assert state_conf.firmware_hardware == conf.firmware_hardware
assert state_conf.firmware_input_log == conf.firmware_input_log
assert state_conf.firmware_output_log == conf.firmware_output_log
assert state_conf.network_not_found_timer == conf.network_not_found_timer
assert state_conf.os_auto_update == conf.os_auto_update
assert state_conf.sequence_body_log == conf.sequence_body_log
assert state_conf.sequence_complete_log == conf.sequence_complete_log
assert state_conf.sequence_init_log == conf.sequence_init_log
# TODO(Connor) assert config_storage
end
def receive_changes(state) do
receive do
{BotState, %{changes: %{configuration: _}} = change} ->
Ecto.Changeset.apply_changes(change)
|> receive_changes()
_ ->
receive_changes(state)
after
100 ->
state
end
end
end
| 32.242424 | 77 | 0.716635 |
e8564e4ab75517101f118cc55a49e5d35029c902 | 642 | ex | Elixir | lib/ex_oauth2_provider/behaviors/skip_authorization.ex | heroinbob/ex_oauth2_provider | 80c21a53bba0955ab3b66f1bd32cc81db0f04f49 | [
"MIT"
] | null | null | null | lib/ex_oauth2_provider/behaviors/skip_authorization.ex | heroinbob/ex_oauth2_provider | 80c21a53bba0955ab3b66f1bd32cc81db0f04f49 | [
"MIT"
] | null | null | null | lib/ex_oauth2_provider/behaviors/skip_authorization.ex | heroinbob/ex_oauth2_provider | 80c21a53bba0955ab3b66f1bd32cc81db0f04f49 | [
"MIT"
] | null | null | null | defmodule ExOauth2Provider.Behaviors.SkipAuthorization do
@moduledoc """
Define the rules used to determine if authorization can be skipped. If your
app has unique criteria then implement it.
For example:
defmodule MyModule do
@behaviour ExOauth2Provider.Behaviors.SkipAuthorization
def skip_authorization(user, application) do
user.super_cool? || application.trusted?
end
end
"""
alias ExOauth2Provider.Applications.Application
alias ExOauth2Provider.Schema
@callback skip_authorization?(
user :: Schema.t(),
application :: Application.t()
) :: boolean()
end
| 26.75 | 77 | 0.71028 |
e8565b45116a893e5660fdcb86ffd86ab15e70a9 | 880 | ex | Elixir | clients/storage/lib/google_api/storage/v1/metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/storage/lib/google_api/storage/v1/metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/storage/lib/google_api/storage/v1/metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Storage.V1 do
@moduledoc """
API client metadata for GoogleApi.Storage.V1.
"""
@discovery_revision "20200927"
def discovery_revision(), do: @discovery_revision
end
| 32.592593 | 74 | 0.757955 |
e8567af241e30dc74375622e0fbd0b6f0f3589a6 | 1,239 | exs | Elixir | config/prod.secret.exs | enlego/hasher | 43b528059119971ffbca6538ffb6713fb78e077f | [
"MIT"
] | null | null | null | config/prod.secret.exs | enlego/hasher | 43b528059119971ffbca6538ffb6713fb78e077f | [
"MIT"
] | null | null | null | config/prod.secret.exs | enlego/hasher | 43b528059119971ffbca6538ffb6713fb78e077f | [
"MIT"
] | null | null | null | # In this file, we load production configuration and secrets
# from environment variables. You can also hardcode secrets,
# although such is generally not recommended and you have to
# remember to add this file to your .gitignore.
use Mix.Config
database_url =
System.get_env("DATABASE_URL") ||
raise """
environment variable DATABASE_URL is missing.
For example: ecto://USER:PASS@HOST/DATABASE
"""
config :hasher, Hasher.Repo,
# ssl: true,
url: database_url,
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10")
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
"""
config :hasher, HasherWeb.Endpoint,
http: [
port: String.to_integer(System.get_env("PORT") || "4000"),
transport_options: [socket_opts: [:inet6]]
],
secret_key_base: secret_key_base
# ## Using releases (Elixir v1.9+)
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start each relevant endpoint:
#
# config :hasher, HasherWeb.Endpoint, server: true
#
# Then you can assemble a release by calling `mix release`.
# See `mix help release` for more information.
| 29.5 | 67 | 0.717514 |
e85690b3df8c7cb2101ab5874cf7e51aaada04d5 | 5,694 | ex | Elixir | lib/changelog_web/views/episode_view.ex | wojtekmach/changelog.com | d4a8a7703c5f07a3da63bffd770f4642488cf8fd | [
"MIT"
] | null | null | null | lib/changelog_web/views/episode_view.ex | wojtekmach/changelog.com | d4a8a7703c5f07a3da63bffd770f4642488cf8fd | [
"MIT"
] | null | null | null | lib/changelog_web/views/episode_view.ex | wojtekmach/changelog.com | d4a8a7703c5f07a3da63bffd770f4642488cf8fd | [
"MIT"
] | null | null | null | defmodule ChangelogWeb.EpisodeView do
use ChangelogWeb, :public_view
import ChangelogWeb.Meta.{Title, Description}
alias Changelog.{Episode, Files, Github, ListKit}
alias ChangelogWeb.{Endpoint, LayoutView, NewsItemView, PersonView,
PodcastView, SponsorView, TimeView}
def admin_edit_link(conn, %{admin: true}, episode) do
path = admin_podcast_episode_path(conn, :edit, episode.podcast.slug, episode.slug, next: current_path(conn))
content_tag(:span) do
[
link("(#{comma_separated(episode.reach_count)})", to: path, data: [turbolinks: false])
]
end
end
def admin_edit_link(_, _, _), do: nil
def audio_filename(episode) do
Files.Audio.filename(:original, {episode.audio_file.file_name, episode}) <> ".mp3"
end
def audio_local_path(episode) do
{episode.audio_file.file_name, episode}
|> Files.Audio.url(:original)
|> String.replace_leading("/priv", "priv") # remove Arc's "/" when storage is priv
end
def audio_path(episode) do
if episode.audio_file do
episode
|> audio_local_path
|> String.replace_leading("priv", "") # ensure relative reference is removed
else
"/california.mp3"
end
end
def audio_url(episode) do
static_url(Endpoint, audio_path(episode))
end
def classy_highlight(episode) do
episode.highlight
|> no_widowed_words
|> with_smart_quotes
|> raw
end
def embed_code(episode), do: embed_code(episode, episode.podcast)
def embed_code(episode, podcast) do
~s{<audio data-theme="night" data-src="#{url(episode, :embed)}" src="#{audio_url(episode)}" preload="none" class="changelog-episode" controls></audio>} <>
~s{<p><a href="#{url(episode, :show)}">#{podcast.name} #{numbered_title(episode)}</a> – Listen on <a href="#{root_url(Endpoint, :index)}">Changelog.com</a></p>} <>
~s{<script async src="//cdn.changelog.com/embed.js"></script>}
end
def embed_iframe(episode, theme) do
~s{<iframe src="#{url(episode, :embed)}?theme=#{theme}" width="100%" height=220 scrolling=no frameborder=no></iframe>}
end
def guest_focused_subtitle(episode) do
if is_subtitle_guest_focused(episode), do: episode.subtitle, else: ""
end
def guid(episode) do
episode.guid || "changelog.com/#{episode.podcast_id}/#{episode.id}"
end
def is_subtitle_guest_focused(%{subtitle: nil}), do: false
def is_subtitle_guest_focused(%{guests: nil}), do: false
def is_subtitle_guest_focused(%{guests: []}), do: false
def is_subtitle_guest_focused(%{subtitle: subtitle}) do
String.starts_with?(subtitle, "with ") ||
String.starts_with?(subtitle, "featuring ")
end
def megabytes(episode) do
round((episode.bytes || 0) / 1000 / 1000)
end
def number(episode) do
case Float.parse(episode.slug) do
{_, _} -> episode.slug
:error -> nil
end
end
def number_with_pound(episode) do
if episode_number = number(episode) do
"##{episode_number}"
end
end
def numbered_title(episode, prefix \\ "") do
episode_number = number(episode)
if is_nil(episode_number) do
episode.title
else
"#{prefix}#{episode_number}: #{episode.title}"
end
end
def participants(episode) do
Episode.participants(episode)
end
def podcast_aside(episode) do
"(#{podcast_name_and_number(episode)})"
end
def podcast_name_and_number(episode) do
[episode.podcast.name, number_with_pound(episode)] |> ListKit.compact_join(" ")
end
def sponsorships_with_dark_logo(episode) do
Enum.reject(episode.episode_sponsors, fn(s) -> is_nil(s.sponsor.dark_logo) end)
end
def show_notes_source_url(episode) do
Github.Source.new("show-notes", episode).html_url
end
def title_with_podcast_aside(episode) do
[
episode.title,
podcast_aside(episode)
] |> ListKit.compact_join(" ")
end
def title_with_guest_focused_subtitle_and_podcast_aside(episode = %{type: :trailer}), do: episode.title
def title_with_guest_focused_subtitle_and_podcast_aside(episode) do
[
episode.title,
guest_focused_subtitle(episode),
"(#{podcast_name_and_number((episode))})"
] |> ListKit.compact_join(" ")
end
def transcript_source_url(episode) do
Github.Source.new("transcripts", episode).html_url
end
def render("play.json", %{podcast: podcast, episode: episode, prev: prev, next: next}) do
info = %{
podcast: podcast.name,
title: episode.title,
number: number(episode),
duration: episode.duration,
art_url: PodcastView.cover_url(podcast, :small),
audio_url: audio_url(episode),
share_url: url(episode, :show)
}
info = if prev do
Map.put(info, :prev, %{
number: prev.slug,
title: prev.title,
location: episode_path(Endpoint, :play, podcast.slug, prev.slug),
audio_url: audio_url(prev)
})
else
info
end
info = if next do
Map.put(info, :next, %{
number: next.slug,
title: next.title,
location: episode_path(Endpoint, :play, podcast.slug, next.slug),
audio_url: audio_url(next)
})
else
info
end
info
end
def render("share.json", %{podcast: _podcast, episode: episode}) do
url = url(episode, :show)
%{url: url,
twitter: tweet_url(episode.title, url),
hackernews: hackernews_url(episode.title, url),
reddit: reddit_url(episode.title, url),
facebook: facebook_url(url),
embed: embed_code(episode)}
end
def url(episode, action) do
episode = Episode.preload_podcast(episode)
episode_url(Endpoint, action, episode.podcast.slug, episode.slug)
end
end
| 28.903553 | 167 | 0.671584 |
e856a6ed009dbeabb448a72db3f24ebd6e80df82 | 895 | ex | Elixir | lib/news/util/temp_file.ex | randomlabs/news | 6aa200858bac69613af1de91420c6425f4517853 | [
"MIT"
] | 3 | 2015-08-12T20:45:57.000Z | 2015-10-26T09:20:10.000Z | lib/news/util/temp_file.ex | randomlabs/news | 6aa200858bac69613af1de91420c6425f4517853 | [
"MIT"
] | 15 | 2015-08-12T16:20:42.000Z | 2015-10-12T16:12:15.000Z | lib/news/util/temp_file.ex | randomlabs/news | 6aa200858bac69613af1de91420c6425f4517853 | [
"MIT"
] | 1 | 2015-08-20T17:43:10.000Z | 2015-08-20T17:43:10.000Z | defmodule News.Util.TempFile do
def write(name, data) do
file = "/tmp/news.temp." <> name
:ok = File.write!(file, data)
parent = self
monitor_file(file)
end
def attach(file) do
monitor_file(file)
end
def release do
pid = Process.get(:temp_file_pid)
if pid do
send(pid, :release)
Process.delete(:temp_file_pid)
end
end
def monitor_loop(files) do
receive do
{:monitor, file} -> monitor_loop([file|files])
_ ->
Enum.each(files, fn(file) -> File.rm(file) end)
exit(:normal)
end
end
defp monitor_file(file) do
pid = Process.get(:temp_file_pid)
if pid do
send(pid, {:monitor, file})
else
parent = self
pid = spawn(fn() ->
_ref = Process.monitor(parent)
monitor_loop([file])
end)
Process.put(:temp_file_pid, pid)
end
file
end
end
| 19.042553 | 55 | 0.589944 |
e856c88076905c5f50b90660566f715140c51b68 | 68,087 | ex | Elixir | lib/ecto/query.ex | codinghombre/ecto | 16d2de8e19aa76b91156684d8d0477e5c8df3b87 | [
"Apache-2.0"
] | null | null | null | lib/ecto/query.ex | codinghombre/ecto | 16d2de8e19aa76b91156684d8d0477e5c8df3b87 | [
"Apache-2.0"
] | null | null | null | lib/ecto/query.ex | codinghombre/ecto | 16d2de8e19aa76b91156684d8d0477e5c8df3b87 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.SubQuery do
@moduledoc """
A struct representing subqueries.
See `Ecto.Query.subquery/2` for more information.
"""
defstruct [:query, :params, :select, :cache]
end
defmodule Ecto.Query do
@moduledoc ~S"""
Provides the Query DSL.
Queries are used to retrieve and manipulate data from a repository
(see `Ecto.Repo`). Ecto queries come in two flavors: keyword-based
and macro-based. Most examples will use the keyword-based syntax,
the macro one will be explored in later sections.
Let's see a sample query:
# Imports only from/2 of Ecto.Query
import Ecto.Query, only: [from: 2]
# Create a query
query = from u in "users",
where: u.age > 18,
select: u.name
# Send the query to the repository
Repo.all(query)
In the example above, we are directly querying the "users" table
from the database.
## Query expressions
Ecto allows a limited set of expressions inside queries. In the
query below, for example, we use `u.age` to access a field, the
`>` comparison operator and the literal `0`:
query = from u in "users", where: u.age > 0, select: u.name
You can find the full list of operations in `Ecto.Query.API`.
Besides the operations listed there, the following literals are
supported in queries:
* Integers: `1`, `2`, `3`
* Floats: `1.0`, `2.0`, `3.0`
* Booleans: `true`, `false`
* Binaries: `<<1, 2, 3>>`
* Strings: `"foo bar"`, `~s(this is a string)`
* Arrays: `[1, 2, 3]`, `~w(interpolate words)`
All other types and dynamic values must be passed as a parameter using
interpolation as explained below.
## Interpolation and casting
External values and Elixir expressions can be injected into a query
expression with `^`:
def with_minimum(age, height_ft) do
from u in "users",
where: u.age > ^age and u.height > ^(height_ft * 3.28),
select: u.name
end
with_minimum(18, 5.0)
When interpolating values, you may want to explicitly tell Ecto
what is the expected type of the value being interpolated:
age = "18"
Repo.all(from u in "users",
where: u.age > type(^age, :integer),
select: u.name)
In the example above, Ecto will cast the age to type integer. When
a value cannot be cast, `Ecto.Query.CastError` is raised.
To avoid the repetition of always specifying the types, you may define
an `Ecto.Schema`. In such cases, Ecto will analyze your queries and
automatically cast the interpolated "age" when compared to the `u.age`
field, as long as the age field is defined with type `:integer` in
your schema:
age = "18"
Repo.all(from u in User, where: u.age > ^age, select: u.name)
Another advantage of using schemas is that we no longer need to specify
the select option in queries, as by default Ecto will retrieve all
fields specified in the schema:
age = "18"
Repo.all(from u in User, where: u.age > ^age)
For this reason, we will use schemas on the remaining examples but
remember Ecto does not require them in order to write queries.
## `nil` comparison
`nil` comparison in filters, such as where and having, is forbidden
and it will raise an error:
# Raises if age is nil
from u in User, where: u.age == ^age
This is done as a security measure to avoid attacks that attempt
to traverse entries with nil columns. To check that value is `nil`,
use `is_nil/1` instead:
from u in User, where: is_nil(u.age)
## Composition
Ecto queries are composable. For example, the query above can
actually be defined in two parts:
# Create a query
query = from u in User, where: u.age > 18
# Extend the query
query = from u in query, select: u.name
Composing queries uses the same syntax as creating a query.
The difference is that, instead of passing a schema like `User`
on the right side of `in`, we passed the query itself.
Any value can be used on the right-side of `in` as long as it implements
the `Ecto.Queryable` protocol. For now, we know the protocol is
implemented for both atoms (like `User`) and strings (like "users").
In any case, regardless if a schema has been given or not, Ecto
queries are always composable thanks to its binding system.
### Positional bindings
On the left side of `in` we specify the query bindings. This is
done inside from and join clauses. In the query below `u` is a
binding and `u.age` is a field access using this binding.
query = from u in User, where: u.age > 18
Bindings are not exposed from the query. When composing queries, you
must specify bindings again for each refinement query. For example,
to further narrow down the above query, we again need to tell Ecto what
bindings to expect:
query = from u in query, select: u.city
Bindings in Ecto are positional, and the names do not have to be
consistent between input and refinement queries. For example, the
query above could also be written as:
query = from q in query, select: q.city
It would make no difference to Ecto. This is important because
it allows developers to compose queries without caring about
the bindings used in the initial query.
When using joins, the bindings should be matched in the order they
are specified:
# Create a query
query = from p in Post,
join: c in Comment, on: c.post_id == p.id
# Extend the query
query = from [p, c] in query,
select: {p.title, c.body}
You are not required to specify all bindings when composing.
For example, if we would like to order the results above by
post insertion date, we could further extend it as:
query = from q in query, order_by: q.inserted_at
The example above will work if the input query has 1 or 10
bindings. As long as the number of bindings is less than the
number of from + joins, Ecto will match only what you have
specified. The first binding always matches the source given
in `from`.
Similarly, if you are interested only on the last binding
(or the last bindings) in a query, you can use `...` to
specify "all bindings before" and match on the last one.
For instance, imagine you wrote:
posts_with_comments =
from p in query, join: c in Comment, on: c.post_id == p.id
And now we want to make sure to return both the post title
and the comment body. Although we may not know how many
bindings there are in the query, we are sure posts is the
first binding and comments are the last one, so we can write:
from [p, ..., c] in posts_with_comments, select: {p.title, c.body}
In other words, `...` will include all the binding between the
first and the last, which may be no binding at all, one or many.
### Named bindings
Another option for flexibly building queries with joins are
named bindings. Coming back to the previous example, provided
we bind a join to a concrete name:
posts_with_comments =
from p in query,
join: c in Comment, as: :comment, on: c.post_id == p.id
We can refer to it by that name using a following form of
bindings list:
from [p, comment: c] in posts_with_comments, select: {p.title, c.body}
This approach lets us not worry about keeping track of the position
of the bindings when composing the query.
What's more, a name can be assigned to the first binding as well:
from p in Post, as: :post
Only atoms are accepted for binding names. Named binding references
are expected to be placed in the tail position of the bindings list.
### Bindingless operations
Although bindings are extremely useful when working with joins,
they are not necessary when the query has only the `from` clause.
For such cases, Ecto supports a way for building queries
without specifying the binding:
from Post,
where: [category: "fresh and new"],
order_by: [desc: :published_at],
select: [:id, :title, :body]
The query above will select all posts with category "fresh and new",
order by the most recently published, and return Post structs with
only the id, title and body fields set. It is equivalent to:
from p in Post,
where: p.category == "fresh and new",
order_by: [desc: p.published_at],
select: struct(p, [:id, :title, :body])
One advantage of bindingless queries is that they are data-driven
and therefore useful for dynamically building queries. For example,
the query above could also be written as:
where = [category: "fresh and new"]
order_by = [desc: :published_at]
select = [:id, :title, :body]
from Post, where: ^where, order_by: ^order_by, select: ^select
This feature is very useful when queries need to be built based
on some user input, like web search forms, CLIs and so on.
## Fragments
If you need an escape hatch, Ecto provides fragments
(see `Ecto.Query.API.fragment/1`) to inject SQL (and non-SQL)
fragments into queries.
For example, to get all posts while running the "lower(?)"
function in the database where `p.title` is interpolated
in place of `?`, one can write:
from p in Post,
where: is_nil(p.published_at) and
fragment("lower(?)", p.title) == ^title
Also, most adapters provide direct APIs for queries, like
`Ecto.Adapters.SQL.query/4`, allowing developers to
completely bypass Ecto queries.
## Macro API
In all examples so far we have used the **keywords query syntax** to
create a query:
import Ecto.Query
from u in "users", where: u.age > 18, select: u.name
Due to the prevalence of the pipe operator in Elixir, Ecto also supports
a pipe-based syntax:
"users"
|> where([u], u.age > 18)
|> select([u], u.name)
The keyword-based and pipe-based examples are equivalent. The downside
of using macros is that the binding must be specified for every operation.
However, since keyword-based and pipe-based examples are equivalent, the
bindingless syntax also works for macros:
"users"
|> where([u], u.age > 18)
|> select([:name])
Such allows developers to write queries using bindings only in more
complex query expressions.
This module documents each of those macros, providing examples in
both the keywords query and pipe expression formats.
## Query Prefix
It is possible to set a prefix for the queries. For Postgres users,
this will specify the schema where the table is located, while for
MySQL users this will specify the database where the table is
located. When no prefix is set, Postgres queries are assumed to be
in the public schema, while MySQL queries are assumed to be in the
database set in the config for the repo.
The query prefix may be set either for the whole query or on each
individual `from` and `join` expression. If a `prefix` is not given
to a `from` or a `join`, the prefix of the schema given to the `from`
or `join` is used. The query prefix is used only if none of the above
are declared.
Let's see some examples. To see the query prefix globally, the simplest
mechanism is to pass an option to the repository operation:
results = Repo.all(query, prefix: "accounts")
You may also set the prefix for the whole query by setting the prefix field:
results =
query # May be User or an Ecto.Query itself
|> Ecto.Queryable.to_query
|> Map.put(:prefix, "accounts")
|> Repo.all()
Setting the prefix in the query changes the default prefix of all `from`
and `join` expressions. You can override the query prefix by either setting
the `@schema_prefix` in your schema definitions or by passing the prefix
option:
from u in User,
prefix: "accounts",
join: p in assoc(u, :posts),
prefix: "public"
Overall, here is the prefix lookup precedence:
1. The `:prefix` option given to `from`/`join` has the highest precedence
2. Then it falls back to the `@schema_prefix` attribute declared in the schema
given to `from`/`join`
3. Then it falls back to the query prefix
The prefixes set in the query will be preserved when loading data.
"""
defstruct [prefix: nil, sources: nil, from: nil, joins: [], aliases: %{}, wheres: [], select: nil,
order_bys: [], limit: nil, offset: nil, group_bys: [], combinations: [], updates: [],
havings: [], preloads: [], assocs: [], distinct: nil, lock: nil, windows: [],
with_ctes: nil]
defmodule FromExpr do
@moduledoc false
defstruct [:source, :as, :prefix, hints: []]
end
defmodule DynamicExpr do
@moduledoc false
defstruct [:fun, :binding, :file, :line]
end
defmodule QueryExpr do
@moduledoc false
defstruct [:expr, :file, :line, params: []]
end
defmodule BooleanExpr do
@moduledoc false
defstruct [:op, :expr, :file, :line, params: []]
end
defmodule SelectExpr do
@moduledoc false
defstruct [:expr, :file, :line, :fields, params: [], take: %{}]
end
defmodule JoinExpr do
@moduledoc false
defstruct [:qual, :source, :on, :file, :line, :assoc, :as, :ix, :prefix, params: [], hints: []]
end
defmodule WithExpr do
@moduledoc false
defstruct [recursive: false, queries: []]
end
defmodule Tagged do
@moduledoc false
# * value is the tagged value
# * tag is the directly tagged value, like Ecto.UUID
# * type is the underlying tag type, like :string
defstruct [:value, :tag, :type]
end
@type t :: %__MODULE__{}
@opaque dynamic :: %DynamicExpr{}
alias Ecto.Query.Builder
@doc """
Builds a dynamic query expression.
Dynamic query expressions allows developers to build queries
expression bit by bit so they are later interpolated in a query.
## Examples
For example, imagine you have a set of conditions you want to
build your query on:
conditions = false
conditions =
if params["is_public"] do
dynamic([p], p.is_public or ^conditions)
else
conditions
end
conditions =
if params["allow_reviewers"] do
dynamic([p, a], a.reviewer == true or ^conditions)
else
conditions
end
from query, where: ^conditions
In the example above, we were able to build the query expressions
bit by bit, using different bindings, and later interpolate it all
at once inside the query.
A dynamic expression can always be interpolated inside another dynamic
expression and into the constructs described below.
## `where`, `having` and a `join`'s `on'
The `dynamic` macro can be interpolated at the root of a `where`,
`having` or a `join`'s `on`.
For example, assuming the `conditions` variable defined in the
previous section, the following is forbidden because it is not
at the root of a `where`:
from q in query, where: q.some_condition and ^conditions
Fortunately that's easily solvable by simply rewriting it to:
conditions = dynamic([q], q.some_condition and ^conditions)
from query, where: ^conditions
## `order_by`
Dynamics can be interpolated inside keyword lists at the root of
`order_by`. For example, you can write:
order_by = [
asc: :some_field,
desc: dynamic([p], fragment("?>>?", p.another_field, "json_key"))
]
from query, order_by: ^order_by
Dynamics are also supported in `order_by/2` clauses inside `windows/2`.
As with `where` and friends, it is not possible to pass dynamics
outside of a root. For example, this won't work:
from query, order_by: [asc: ^dynamic(...)]
But this will:
from query, order_by: ^[asc: dynamic(...)]
## `group_by`
Dynamics can be interpolated inside keyword lists at the root of
`group_by`. For example, you can write:
group_by = [
:some_field,
dynamic([p], fragment("?>>?", p.another_field, "json_key"))
]
from query, group_by: ^group_by
Dynamics are also supported in `partition_by/2` clauses inside `windows/2`.
As with `where` and friends, it is not possible to pass dynamics
outside of a root. For example, this won't work:
from query, group_by: [:some_field, ^dynamic(...)]
But this will:
from query, order_by: ^[:some_field, dynamic(...)]
## Updates
Dynamic is also supported inside updates, for example:
updates = [
set: [average: dynamic([p], p.sum / p.count)]
]
from query, update: ^updates
"""
defmacro dynamic(binding \\ [], expr) do
Builder.Dynamic.build(binding, expr, __CALLER__)
end
@doc """
Defines windows which can be used with `Ecto.Query.WindowAPI`.
Receives a keyword list where keys are names of the windows
and values are a keyword list with window expressions.
## Examples
# Compare each employee's salary with the average salary in his or her department
from e in Employee,
select: {e.depname, e.empno, e.salary, over(avg(e.salary), :department)},
windows: [department: [partition_by: e.depname]]
In the example above, we get the average salary per department.
`:department` is the window name, partitioned by `e.depname`
and `avg/1` is the window function. For more information
on windows functions, see `Ecto.Query.WindowAPI`.
## Window expressions
The following keys are allowed when specifying a window.
### :partition_by
A list of fields to partition the window by, for example:
windows: [department: [partition_by: e.depname]]
A list of atoms can also be interpolated for dynamic partitioning:
fields = [:depname, :year]
windows: [dynamic_window: [partition_by: ^fields]]
### :order_by
A list of fields to order the window by, for example:
windows: [ordered_names: [order_by: e.name]]
It works exactly as the keyword query version of `order_by/3`.
### :frame
A fragment which defines the frame for window functions.
## Examples
# compare each employee's salary for each month with his average salary for previous 3 months
from p in Payroll,
select: {p.empno, p.date, p.salary, over(avg(p.salary), :prev_months)},
windows: [prev_months: [partition_by: p.empno, order_by: p.date, frame: fragment("ROWS 3 PRECEDING EXCLUDE CURRENT ROW")]]
"""
defmacro windows(query, binding \\ [], expr) do
Builder.Windows.build(query, binding, expr, __CALLER__)
end
@doc """
Converts a query into a subquery.
If a subquery is given, returns the subquery itself.
If any other value is given, it is converted to a query via
`Ecto.Queryable` and wrapped in the `Ecto.SubQuery` struct.
Subqueries are currently only supported in the `from`
and `join` fields.
## Examples
# Get the average salary of the top 10 highest salaries
query = from Employee, order_by: [desc: :salary], limit: 10
from e in subquery(query), select: avg(e.salary)
A prefix can be specified for a subquery, similar to standard repo operations:
query = from Employee, order_by: [desc: :salary], limit: 10
from e in subquery(query, prefix: "my_prefix"), select: avg(e.salary)
Although subqueries are not allowed in WHERE expressions,
most subqueries in WHERE expression can be rewritten as JOINs.
Imagine you want to write this query:
UPDATE posts
SET sync_started_at = $1
WHERE id IN (
SELECT id FROM posts
WHERE synced = false AND (sync_started_at IS NULL OR sync_started_at < $1)
LIMIT $2
)
If you attempt to write it as `where: p.id in ^subquery(foo)`,
Ecto won't accept such query. However, the subquery above can be
written as a JOIN, which is supported by Ecto. The final Ecto
query will look like this:
subset_query = from(p in Post,
where: p.synced == false and
(is_nil(p.sync_started_at) or p.sync_started_at < ^min_sync_started_at),
limit: ^batch_size
)
Repo.update_all(
from(p in Post, join: s in subquery(subset_query), on: s.id == p.id),
set: [sync_started_at: NaiveDateTime.utc_now()]
)
"""
def subquery(query, opts \\ []) do
subquery = wrap_in_subquery(query)
case Keyword.fetch(opts, :prefix) do
{:ok, prefix} when is_binary(prefix) or is_nil(prefix) -> put_in(subquery.query.prefix, prefix)
:error -> subquery
end
end
defp wrap_in_subquery(%Ecto.SubQuery{} = subquery), do: subquery
defp wrap_in_subquery(%Ecto.Query{} = query), do: %Ecto.SubQuery{query: query}
defp wrap_in_subquery(queryable), do: %Ecto.SubQuery{query: Ecto.Queryable.to_query(queryable)}
@joins [:join, :inner_join, :cross_join, :left_join, :right_join, :full_join,
:inner_lateral_join, :left_lateral_join]
@doc """
Resets a previously set field on a query.
It can reset many fields except the query source (`from`). When excluding
a `:join`, it will remove *all* types of joins. If you prefer to remove a
single type of join, please see paragraph below.
## Examples
Ecto.Query.exclude(query, :join)
Ecto.Query.exclude(query, :where)
Ecto.Query.exclude(query, :order_by)
Ecto.Query.exclude(query, :group_by)
Ecto.Query.exclude(query, :having)
Ecto.Query.exclude(query, :distinct)
Ecto.Query.exclude(query, :select)
Ecto.Query.exclude(query, :combinations)
Ecto.Query.exclude(query, :with_ctes)
Ecto.Query.exclude(query, :limit)
Ecto.Query.exclude(query, :offset)
Ecto.Query.exclude(query, :lock)
Ecto.Query.exclude(query, :preload)
You can also remove specific joins as well such as `left_join` and
`inner_join`:
Ecto.Query.exclude(query, :inner_join)
Ecto.Query.exclude(query, :cross_join)
Ecto.Query.exclude(query, :left_join)
Ecto.Query.exclude(query, :right_join)
Ecto.Query.exclude(query, :full_join)
Ecto.Query.exclude(query, :inner_lateral_join)
Ecto.Query.exclude(query, :left_lateral_join)
However, keep in mind that if a join is removed and its bindings
were referenced elsewhere, the bindings won't be removed, leading
to a query that won't compile.
"""
def exclude(%Ecto.Query{} = query, field), do: do_exclude(query, field)
def exclude(query, field), do: do_exclude(Ecto.Queryable.to_query(query), field)
defp do_exclude(%Ecto.Query{} = query, :join) do
%{query | joins: [], aliases: Map.take(query.aliases, [query.from.as])}
end
defp do_exclude(%Ecto.Query{} = query, join_keyword) when join_keyword in @joins do
qual = join_qual(join_keyword)
{excluded, remaining} = Enum.split_with(query.joins, &(&1.qual == qual))
aliases = Map.drop(query.aliases, Enum.map(excluded, & &1.as))
%{query | joins: remaining, aliases: aliases}
end
defp do_exclude(%Ecto.Query{} = query, :where), do: %{query | wheres: []}
defp do_exclude(%Ecto.Query{} = query, :order_by), do: %{query | order_bys: []}
defp do_exclude(%Ecto.Query{} = query, :group_by), do: %{query | group_bys: []}
defp do_exclude(%Ecto.Query{} = query, :combinations), do: %{query | combinations: []}
defp do_exclude(%Ecto.Query{} = query, :with_ctes), do: %{query | with_ctes: nil}
defp do_exclude(%Ecto.Query{} = query, :having), do: %{query | havings: []}
defp do_exclude(%Ecto.Query{} = query, :distinct), do: %{query | distinct: nil}
defp do_exclude(%Ecto.Query{} = query, :select), do: %{query | select: nil}
defp do_exclude(%Ecto.Query{} = query, :limit), do: %{query | limit: nil}
defp do_exclude(%Ecto.Query{} = query, :offset), do: %{query | offset: nil}
defp do_exclude(%Ecto.Query{} = query, :lock), do: %{query | lock: nil}
defp do_exclude(%Ecto.Query{} = query, :preload), do: %{query | preloads: [], assocs: []}
@doc """
Creates a query.
It can either be a keyword query or a query expression.
If it is a keyword query the first argument must be
either an `in` expression, or a value that implements
the `Ecto.Queryable` protocol. If the query needs a
reference to the data source in any other part of the
expression, then an `in` must be used to create a reference
variable. The second argument should be a keyword query
where the keys are expression types and the values are
expressions.
If it is a query expression the first argument must be
a value that implements the `Ecto.Queryable` protocol
and the second argument the expression.
## Keywords example
from(c in City, select: c)
## Expressions example
City |> select([c], c)
## Examples
def paginate(query, page, size) do
from query,
limit: ^size,
offset: ^((page-1) * size)
end
The example above does not use `in` because `limit` and `offset`
do not require a reference to the data source. However, extending
the query with a where expression would require the use of `in`:
def published(query) do
from p in query, where: not(is_nil(p.published_at))
end
Notice we have created a `p` variable to reference the query's
original data source. This assumes that the original query
only had one source. When the given query has more than one source,
positonal or named bindings may be used to access the additional sources.
def published_multi(query) do
from [p,o] in query,
where: not(is_nil(p.published_at)) and not(is_nil(o.published_at))
end
Note the variables `p` and `o` can be named whatever you like
as they have no importance in the query sent to the database.
"""
defmacro from(expr, kw \\ []) do
unless Keyword.keyword?(kw) do
raise ArgumentError, "second argument to `from` must be a compile time keyword list"
end
{kw, as, prefix, hints} = collect_as_and_prefix_and_hints(kw, nil, nil, nil)
{quoted, binds, count_bind} = Builder.From.build(expr, __CALLER__, as, prefix, hints)
from(kw, __CALLER__, count_bind, quoted, to_query_binds(binds))
end
@from_join_opts [:as, :prefix, :hints]
@no_binds [:lock, :union, :union_all, :except, :except_all, :intersect, :intersect_all]
@binds [:where, :or_where, :select, :distinct, :order_by, :group_by, :windows] ++
[:having, :or_having, :limit, :offset, :preload, :update, :select_merge, :with_ctes]
defp from([{type, expr}|t], env, count_bind, quoted, binds) when type in @binds do
# If all bindings are integer indexes keep AST Macro expandable to %Query{},
# otherwise ensure that quoted code is evaluated before macro call
quoted =
if Enum.all?(binds, fn {_, value} -> is_integer(value) end) do
quote do
Ecto.Query.unquote(type)(unquote(quoted), unquote(binds), unquote(expr))
end
else
quote do
query = unquote(quoted)
Ecto.Query.unquote(type)(query, unquote(binds), unquote(expr))
end
end
from(t, env, count_bind, quoted, binds)
end
defp from([{type, expr}|t], env, count_bind, quoted, binds) when type in @no_binds do
quoted =
quote do
Ecto.Query.unquote(type)(unquote(quoted), unquote(expr))
end
from(t, env, count_bind, quoted, binds)
end
defp from([{join, expr}|t], env, count_bind, quoted, binds) when join in @joins do
qual = join_qual(join)
{t, on, as, prefix, hints} = collect_on(t, nil, nil, nil, nil)
{quoted, binds, count_bind} =
Builder.Join.build(quoted, qual, binds, expr, count_bind, on, as, prefix, hints, env)
from(t, env, count_bind, quoted, to_query_binds(binds))
end
defp from([{:on, _value}|_], _env, _count_bind, _quoted, _binds) do
Builder.error! "`on` keyword must immediately follow a join"
end
defp from([{key, _value}|_], _env, _count_bind, _quoted, _binds) when key in @from_join_opts do
Builder.error! "`#{key}` keyword must immediately follow a from/join"
end
defp from([{key, _value}|_], _env, _count_bind, _quoted, _binds) do
Builder.error! "unsupported #{inspect key} in keyword query expression"
end
defp from([], _env, _count_bind, quoted, _binds) do
quoted
end
defp to_query_binds(binds) do
for {k, v} <- binds, do: {{k, [], nil}, v}
end
defp join_qual(:join), do: :inner
defp join_qual(:full_join), do: :full
defp join_qual(:left_join), do: :left
defp join_qual(:right_join), do: :right
defp join_qual(:inner_join), do: :inner
defp join_qual(:cross_join), do: :cross
defp join_qual(:left_lateral_join), do: :left_lateral
defp join_qual(:inner_lateral_join), do: :inner_lateral
defp collect_on([{key, _} | _] = t, on, as, prefix, hints) when key in @from_join_opts do
{t, as, prefix, hints} = collect_as_and_prefix_and_hints(t, as, prefix, hints)
collect_on(t, on, as, prefix, hints)
end
defp collect_on([{:on, on} | t], nil, as, prefix, hints),
do: collect_on(t, on, as, prefix, hints)
defp collect_on([{:on, expr} | t], on, as, prefix, hints),
do: collect_on(t, {:and, [], [on, expr]}, as, prefix, hints)
defp collect_on(t, on, as, prefix, hints),
do: {t, on, as, prefix, hints}
defp collect_as_and_prefix_and_hints([{:as, as} | t], nil, prefix, hints),
do: collect_as_and_prefix_and_hints(t, as, prefix, hints)
defp collect_as_and_prefix_and_hints([{:as, _} | _], _, _, _),
do: Builder.error! "`as` keyword was given more than once to the same from/join"
defp collect_as_and_prefix_and_hints([{:prefix, prefix} | t], as, nil, hints),
do: collect_as_and_prefix_and_hints(t, as, prefix, hints)
defp collect_as_and_prefix_and_hints([{:prefix, _} | _], _, _, _),
do: Builder.error! "`prefix` keyword was given more than once to the same from/join"
defp collect_as_and_prefix_and_hints([{:hints, hints} | t], as, prefix, nil),
do: collect_as_and_prefix_and_hints(t, as, prefix, hints)
defp collect_as_and_prefix_and_hints([{:hints, _} | _], _, _, _),
do: Builder.error! "`hints` keyword was given more than once to the same from/join"
defp collect_as_and_prefix_and_hints(t, as, prefix, hints),
do: {t, as, prefix, hints}
@doc """
A join query expression.
Receives a source that is to be joined to the query and a condition for
the join. The join condition can be any expression that evaluates
to a boolean value. The join is by default an inner join, the qualifier
can be changed by giving the atoms: `:inner`, `:left`, `:right`, `:cross`,
`:full`, `:inner_lateral` or `:left_lateral`. For a keyword query the `:join`
keyword can be changed to: `:inner_join`, `:left_join`, `:right_join`,
`:cross_join`, `:full_join`, `:inner_lateral_join` or `:left_lateral_join`.
Currently it is possible to join on:
* an `Ecto.Schema`, such as `p in Post`
* an interpolated Ecto query with zero or more where clauses,
such as `c in ^(from "posts", where: [public: true])`
* an association, such as `c in assoc(post, :comments)`
* a subquery, such as `c in subquery(another_query)`
* a query fragment, such as `c in fragment("SOME COMPLEX QUERY")`,
see "Joining with fragments" below.
## Options
Each join accepts the following options:
* `:on` - a query expression or keyword list to filter the join
* `:as` - a named binding for the join
* `:prefix` - the prefix to be used for the join when issuing a database query
* `:hints` - a string or a list of strings to be used as database hints
In the keyword query syntax, those options must be given immediately
after the join. In the expression syntax, the options are given as
the fifth argument.
## Keywords examples
from c in Comment,
join: p in Post,
on: p.id == c.post_id,
select: {p.title, c.text}
from p in Post,
left_join: c in assoc(p, :comments),
select: {p, c}
Keywords can also be given or interpolated as part of `on`:
from c in Comment,
join: p in Post,
on: [id: c.post_id],
select: {p.title, c.text}
Any key in `on` will apply to the currently joined expression.
It is also possible to interpolate an Ecto query on the right side
of `in`. For example, the query above can also be written as:
posts = Post
from c in Comment,
join: p in ^posts,
on: [id: c.post_id],
select: {p.title, c.text}
The above is specially useful to dynamically join on existing
queries, for example, to dynamically choose a source, or by
choosing between public posts or posts that have been recently
published:
posts =
if params["drafts"] do
from p in Post, where: [drafts: true]
else
from p in Post, where: [public: true]
end
from c in Comment,
join: p in ^posts, on: [id: c.post_id],
select: {p.title, c.text}
Only simple queries with `where` expressions can be interpolated
in join.
## Expressions examples
Comment
|> join(:inner, [c], p in Post, on: c.post_id == p.id)
|> select([c, p], {p.title, c.text})
Post
|> join(:left, [p], c in assoc(p, :comments))
|> select([p, c], {p, c})
Post
|> join(:left, [p], c in Comment, on: c.post_id == p.id and c.is_visible == true)
|> select([p, c], {p, c})
## Joining with fragments
When you need to join on a complex query, Ecto supports fragments in joins:
Comment
|> join(:inner, [c], p in fragment("SOME COMPLEX QUERY", c.id, ^some_param))
Although using fragments in joins is discouraged in favor of Ecto
Query syntax, they are necessary when writing lateral joins as
lateral joins require a subquery that refer to previous bindings:
Game
|> join(:inner_lateral, [g], gs in fragment("SELECT * FROM games_sold AS gs WHERE gs.game_id = ? ORDER BY gs.sold_on LIMIT 2", g.id))
|> select([g, gs], {g.name, gs.sold_on})
Note that the `join` does not automatically wrap the fragment in
parentheses, since some expressions require parens and others
require no parens. Therefore, in cases such as common table
expressions, you will have to explicitly wrap the fragment content
in parens.
## Hints
`from` and `join` also support index hints, as found in databases such as
[MySQL](https://dev.mysql.com/doc/refman/8.0/en/index-hints.html) and
[MSSQL](https://docs.microsoft.com/en-us/sql/t-sql/queries/hints-transact-sql-table?view=sql-server-2017).
For example, a developer using MySQL may write:
from p in Post,
join: c in Comment,
hints: ["USE INDEX FOO", "USE INDEX BAR"],
where: p.id == c.post_id,
select: c
Keep in mind you want to use hints rarely, so don't forget to read the database
disclaimers about such functionality.
"""
@join_opts [:on | @from_join_opts]
defmacro join(query, qual, binding \\ [], expr, opts \\ [])
when is_list(binding) and is_list(opts) do
{t, on, as, prefix, hints} = collect_on(opts, nil, nil, nil, nil)
with [{key, _} | _] <- t do
raise ArgumentError, "invalid option `#{key}` passed to Ecto.Query.join/5, " <>
"valid options are: #{inspect(@join_opts)}"
end
query
|> Builder.Join.build(qual, binding, expr, nil, on, as, prefix, hints, __CALLER__)
|> elem(0)
end
@doc """
A common table expression (CTE) also known as WITH expression.
`name` must be a compile-time literal string that is being used as the table name to join
the CTE in the main query or in the recursive CTE.
## Options
* `:as` – the CTE query itself or a fragment
## Recursive CTEs
Use `recursive_ctes/2` to enable recursive mode for CTEs.
In the CTE query itself use the same table name to leverage recursion that has been passed
to `name` argument. Make sure to write a stop condition to avoid infinite recursion loop.
Generally speaking, you should only use CTEs for writing recursive queries. Non-recursive
CTEs can often be written as joins or subqueries, which provide better performance.
## Expression examples
Products and their category names for breadcrumbs:
category_tree_initial_query =
Category
|> where([c], is_nil(c.parent_id))
category_tree_recursion_query =
Category
|> join(:inner, [c], ct in "category_tree", on: c.parent_id == ct.id)
category_tree_query =
category_tree_initial_query
|> union_all(^category_tree_recursion_query)
Product
|> recursive_ctes(true)
|> with_cte("category_tree", as: ^category_tree_query)
|> join(:left, [p], c in "category_tree", on: c.id == p.category_id)
|> group_by([p], p.id)
|> select([p, c], %{p | category_names: fragment("ARRAY_AGG(?)", c.name)})
It's possible to cast CTE result rows as Ecto structs:
{"category_tree", Category}
|> recursive_ctes(true)
|> with_cte("category_tree", as: ^category_tree_query)
|> join(:left, [c], p in assoc(c, :products))
|> group_by([c], c.id)
|> select:([c, p], %{c | products_count: count(p.id)})
It's also possible to pass a raw SQL fragment:
@raw_sql_category_tree \"""
SELECT * FROM categories WHERE c.parent_id IS NULL
UNION ALL
SELECT * FROM categories AS c, category_tree AS ct WHERE ct.id = c.parent_id
\"""
Product
|> recursive_ctes(true)
|> with_cte("category_tree", as: fragment(@raw_sql_category_tree))
|> join(:inner, [p], c in "category_tree", on: c.id == p.category_id)
Keyword syntax is not supported for this feature.
"""
defmacro with_cte(query, name, as: with_query) do
Builder.CTE.build(query, name, with_query, __CALLER__)
end
@doc """
Enables or disables recursive mode for CTEs.
According to the SQL standard it affects all CTEs in the query, not individual ones.
See `with_cte/3` on example of how to build a query with a recursive CTE.
"""
def recursive_ctes(%__MODULE__{with_ctes: with_expr} = query, value) when is_boolean(value) do
with_expr = with_expr || %WithExpr{}
with_expr = %{with_expr | recursive: value}
%{query | with_ctes: with_expr}
end
def recursive_ctes(queryable, value) do
recursive_ctes(Ecto.Queryable.to_query(queryable), value)
end
@doc """
A select query expression.
Selects which fields will be selected from the schema and any transformations
that should be performed on the fields. Any expression that is accepted in a
query can be a select field.
Select also allows each expression to be wrapped in lists, tuples or maps as
shown in the examples below. A full schema can also be selected.
There can only be one select expression in a query, if the select expression
is omitted, the query will by default select the full schema. If select is
given more than once, an error is raised. Use `exclude/2` if you would like
to remove a previous select for overriding or see `select_merge/3` for a
limited version of `select` that is composable and can be called multiple
times.
`select` also accepts a list of atoms where each atom refers to a field in
the source to be selected.
## Keywords examples
from(c in City, select: c) # returns the schema as a struct
from(c in City, select: {c.name, c.population})
from(c in City, select: [c.name, c.county])
from(c in City, select: %{n: c.name, answer: 42})
from(c in City, select: %{c | alternative_name: c.name})
from(c in City, select: %Data{name: c.name})
It is also possible to select a struct and limit the returned
fields at the same time:
from(City, select: [:name])
The syntax above is equivalent to:
from(city in City, select: struct(city, [:name]))
You can also write:
from(city in City, select: map(city, [:name]))
If you want a map with only the selected fields to be returned.
For more information, read the docs for `Ecto.Query.API.struct/2`
and `Ecto.Query.API.map/2`.
## Expressions examples
City |> select([c], c)
City |> select([c], {c.name, c.country})
City |> select([c], %{"name" => c.name})
City |> select([:name])
City |> select([c], struct(c, [:name]))
City |> select([c], map(c, [:name]))
"""
defmacro select(query, binding \\ [], expr) do
Builder.Select.build(:select, query, binding, expr, __CALLER__)
end
@doc """
Mergeable select query expression.
This macro is similar to `select/3` except it may be specified
multiple times as long as every entry is a map. This is useful
for merging and composing selects. For example:
query = from p in Post, select: %{}
query =
if include_title? do
from p in query, select_merge: %{title: p.title}
else
query
end
query =
if include_visits? do
from p in query, select_merge: %{visits: p.visits}
else
query
end
In the example above, the query is built little by little by merging
into a final map. If both conditions above are true, the final query
would be equivalent to:
from p in Post, select: %{title: p.title, visits: p.visits}
If `:select_merge` is called and there is no value selected previously,
it will default to the source, `p` in the example above.
The argument given to `:select_merge` must always be a map. The value
being merged on must be a struct or a map. If it is a struct, the fields
merged later on must be part of the struct, otherwise an error is raised.
`select_merge` cannot be used to set fields in associations, as
associations are always loaded later, overriding any previous value.
"""
defmacro select_merge(query, binding \\ [], expr) do
Builder.Select.build(:merge, query, binding, expr, __CALLER__)
end
@doc """
A distinct query expression.
When true, only keeps distinct values from the resulting
select expression.
If supported by your database, you can also pass query expressions
to distinct and it will generate a query with DISTINCT ON. In such
cases, `distinct` accepts exactly the same expressions as `order_by`
and any `distinct` expression will be automatically prepended to the
`order_by` expressions in case there is any `order_by` expression.
## Keywords examples
# Returns the list of different categories in the Post schema
from(p in Post, distinct: true, select: p.category)
# If your database supports DISTINCT ON(),
# you can pass expressions to distinct too
from(p in Post,
distinct: p.category,
order_by: [p.date])
# The DISTINCT ON() also supports ordering similar to ORDER BY.
from(p in Post,
distinct: [desc: p.category],
order_by: [p.date])
# Using atoms
from(p in Post, distinct: :category, order_by: :date)
## Expressions example
Post
|> distinct(true)
|> order_by([p], [p.category, p.author])
"""
defmacro distinct(query, binding \\ [], expr) do
Builder.Distinct.build(query, binding, expr, __CALLER__)
end
@doc """
An AND where query expression.
`where` expressions are used to filter the result set. If there is more
than one where expression, they are combined with an `and` operator. All
where expressions have to evaluate to a boolean value.
`where` also accepts a keyword list where the field given as key is going to
be compared with the given value. The fields will always refer to the source
given in `from`.
## Keywords example
from(c in City, where: c.country == "Sweden")
from(c in City, where: [country: "Sweden"])
It is also possible to interpolate the whole keyword list, allowing you to
dynamically filter the source:
filters = [country: "Sweden"]
from(c in City, where: ^filters)
## Expressions example
City |> where([c], c.country == "Sweden")
City |> where(country: "Sweden")
"""
defmacro where(query, binding \\ [], expr) do
Builder.Filter.build(:where, :and, query, binding, expr, __CALLER__)
end
@doc """
An OR where query expression.
Behaves exactly the same as `where` except it combines with any previous
expression by using an `OR`. All expressions have to evaluate to a boolean
value.
`or_where` also accepts a keyword list where each key is a field to be
compared with the given value. Each key-value pair will be combined
using `AND`, exactly as in `where`.
## Keywords example
from(c in City, where: [country: "Sweden"], or_where: [country: "Brazil"])
If interpolating keyword lists, the keyword list entries are combined
using ANDs and joined to any existing expression with an OR:
filters = [country: "USA", name: "New York"]
from(c in City, where: [country: "Sweden"], or_where: ^filters)
is equivalent to:
from c in City, where: (c.country == "Sweden") or
(c.country == "USA" and c.name == "New York")
The behaviour above is by design to keep the changes between `where`
and `or_where` minimal. Plus, if you have a keyword list and you
would like each pair to be combined using `or`, it can be easily done
with `Enum.reduce/3`:
filters = [country: "USA", is_tax_exempt: true]
Enum.reduce(filters, City, fn {key, value}, query ->
from q in query, or_where: field(q, ^key) == ^value
end)
which will be equivalent to:
from c in City, or_where: (c.country == "USA"), or_where: c.is_tax_exempt == true
## Expressions example
City |> where([c], c.country == "Sweden") |> or_where([c], c.country == "Brazil")
"""
defmacro or_where(query, binding \\ [], expr) do
Builder.Filter.build(:where, :or, query, binding, expr, __CALLER__)
end
@doc """
An order by query expression.
Orders the fields based on one or more fields. It accepts a single field
or a list of fields. The default direction is ascending (`:asc`) and can be
customized in a keyword list as one of the following:
* `:asc`
* `:asc_nulls_last`
* `:asc_nulls_first`
* `:desc`
* `:desc_nulls_last`
* `:desc_nulls_first`
The `*_nulls_first` and `*_nulls_last` variants are not supported by all
databases. While all databases default to ascending order, the choice of
"nulls first" or "nulls last" is specific to each database implementation.
`order_by` may be invoked or listed in a query many times. New expressions
are always appended to the previous ones.
`order_by` also accepts a list of atoms where each atom refers to a field in
source or a keyword list where the direction is given as key and the field
to order as value.
## Keywords examples
from(c in City, order_by: c.name, order_by: c.population)
from(c in City, order_by: [c.name, c.population])
from(c in City, order_by: [asc: c.name, desc: c.population])
from(c in City, order_by: [:name, :population])
from(c in City, order_by: [asc: :name, desc_nulls_first: :population])
A keyword list can also be interpolated:
values = [asc: :name, desc_nulls_first: :population]
from(c in City, order_by: ^values)
A fragment can also be used:
from c in City, order_by: [
# a deterministic shuffled order
fragment("? % ? DESC", c.id, ^modulus),
desc: c.id,
]
## Expressions example
City |> order_by([c], asc: c.name, desc: c.population)
City |> order_by(asc: :name) # Sorts by the cities name
"""
defmacro order_by(query, binding \\ [], expr) do
Builder.OrderBy.build(query, binding, expr, __CALLER__)
end
@doc """
A union query expression.
Combines result sets of multiple queries. The `select` of each query
must be exactly the same, with the same types in the same order.
Union expression returns only unique rows as if each query returned
distinct results. This may cause performance penalty. If you need
just to combine multiple result sets without removing duplicate rows
consider using `union_all/2`.
Note that the operations `order_by`, `limit` and `offset` of the
current `query` apply to the result of the union.
## Keywords example
supplier_query = from s in Supplier, select: s.city
from c in Customer, select: c.city, union: ^supplier_query
## Expressions example
supplier_query = Supplier |> select([s], s.city)
Customer |> select([c], c.city) |> union(^supplier_query)
"""
defmacro union(query, other_query) do
Builder.Combination.build(:union, query, other_query, __CALLER__)
end
@doc """
A union all query expression.
Combines result sets of multiple queries. The `select` of each query
must be exactly the same, with the same types in the same order.
Note that the operations `order_by`, `limit` and `offset` of the
current `query` apply to the result of the union.
## Keywords example
supplier_query = from s in Supplier, select: s.city
from c in Customer, select: c.city, union_all: ^supplier_query
## Expressions example
supplier_query = Supplier |> select([s], s.city)
Customer |> select([c], c.city) |> union_all(^supplier_query)
"""
defmacro union_all(query, other_query) do
Builder.Combination.build(:union_all, query, other_query, __CALLER__)
end
@doc """
An except (set difference) query expression.
Takes the difference of the result sets of multiple queries. The
`select` of each query must be exactly the same, with the same
types in the same order.
Except expression returns only unique rows as if each query returned
distinct results. This may cause performance penalty. If you need
just to take the difference of multiple result sets without
removing duplicate rows consider using `except_all/2`.
Note that the operations `order_by`, `limit` and `offset` of the
current `query` apply to the result of the set difference.
## Keywords example
supplier_query = from s in Supplier, select: s.city
from c in Customer, select: c.city, except: ^supplier_query
## Expressions example
supplier_query = Supplier |> select([s], s.city)
Customer |> select([c], c.city) |> except(^supplier_query)
"""
defmacro except(query, other_query) do
Builder.Combination.build(:except, query, other_query, __CALLER__)
end
@doc """
An except (set difference) query expression.
Takes the difference of the result sets of multiple queries. The
`select` of each query must be exactly the same, with the same
types in the same order.
Note that the operations `order_by`, `limit` and `offset` of the
current `query` apply to the result of the set difference.
## Keywords example
supplier_query = from s in Supplier, select: s.city
from c in Customer, select: c.city, except_all: ^supplier_query
## Expressions example
supplier_query = Supplier |> select([s], s.city)
Customer |> select([c], c.city) |> except_all(^supplier_query)
"""
defmacro except_all(query, other_query) do
Builder.Combination.build(:except_all, query, other_query, __CALLER__)
end
@doc """
An intersect query expression.
Takes the overlap of the result sets of multiple queries. The
`select` of each query must be exactly the same, with the same
types in the same order.
Intersect expression returns only unique rows as if each query returned
distinct results. This may cause performance penalty. If you need
just to take the intersection of multiple result sets without
removing duplicate rows consider using `intersect_all/2`.
Note that the operations `order_by`, `limit` and `offset` of the
current `query` apply to the result of the set difference.
## Keywords example
supplier_query = from s in Supplier, select: s.city
from c in Customer, select: c.city, intersect: ^supplier_query
## Expressions example
supplier_query = Supplier |> select([s], s.city)
Customer |> select([c], c.city) |> intersect(^supplier_query)
"""
defmacro intersect(query, other_query) do
Builder.Combination.build(:intersect, query, other_query, __CALLER__)
end
@doc """
An intersect query expression.
Takes the overlap of the result sets of multiple queries. The
`select` of each query must be exactly the same, with the same
types in the same order.
Note that the operations `order_by`, `limit` and `offset` of the
current `query` apply to the result of the set difference.
## Keywords example
supplier_query = from s in Supplier, select: s.city
from c in Customer, select: c.city, intersect_all: ^supplier_query
## Expressions example
supplier_query = Supplier |> select([s], s.city)
Customer |> select([c], c.city) |> intersect_all(^supplier_query)
"""
defmacro intersect_all(query, other_query) do
Builder.Combination.build(:intersect_all, query, other_query, __CALLER__)
end
@doc """
A limit query expression.
Limits the number of rows returned from the result. Can be any expression but
has to evaluate to an integer value and it can't include any field.
If `limit` is given twice, it overrides the previous value.
## Keywords example
from(u in User, where: u.id == ^current_user, limit: 1)
## Expressions example
User |> where([u], u.id == ^current_user) |> limit(1)
"""
defmacro limit(query, binding \\ [], expr) do
Builder.LimitOffset.build(:limit, query, binding, expr, __CALLER__)
end
@doc """
An offset query expression.
Offsets the number of rows selected from the result. Can be any expression
but it must evaluate to an integer value and it can't include any field.
If `offset` is given twice, it overrides the previous value.
## Keywords example
# Get all posts on page 4
from(p in Post, limit: 10, offset: 30)
## Expressions example
Post |> limit(10) |> offset(30)
"""
defmacro offset(query, binding \\ [], expr) do
Builder.LimitOffset.build(:offset, query, binding, expr, __CALLER__)
end
@doc ~S"""
A lock query expression.
Provides support for row-level pessimistic locking using
`SELECT ... FOR UPDATE` or other, database-specific, locking clauses.
`expr` can be any expression but has to evaluate to a boolean value or to a
string and it can't include any fields.
If `lock` is used more than once, the last one used takes precedence.
Ecto also supports [optimistic
locking](http://en.wikipedia.org/wiki/Optimistic_concurrency_control) but not
through queries. For more information on optimistic locking, have a look at
the `Ecto.Changeset.optimistic_lock/3` function
## Keywords example
from(u in User, where: u.id == ^current_user, lock: "FOR SHARE NOWAIT")
## Expressions example
User |> where(u.id == ^current_user) |> lock("FOR SHARE NOWAIT")
"""
defmacro lock(query, expr) do
Builder.Lock.build(query, expr, __CALLER__)
end
@doc ~S"""
An update query expression.
Updates are used to update the filtered entries. In order for
updates to be applied, `c:Ecto.Repo.update_all/3` must be invoked.
## Keywords example
from(u in User, update: [set: [name: "new name"]])
## Expressions example
User |> update([u], set: [name: "new name"])
User |> update(set: [name: "new name"])
## Interpolation
new_name = "new name"
from(u in User, update: [set: [name: ^new_name]])
new_name = "new name"
from(u in User, update: [set: [name: fragment("upper(?)", ^new_name)]])
## Operators
The update expression in Ecto supports the following operators:
* `set` - sets the given field in the table to the given value
from(u in User, update: [set: [name: "new name"]])
* `inc` - increments (or decrements if the value is negative) the given field in the table by the given value
from(u in User, update: [inc: [accesses: 1]])
* `push` - pushes (appends) the given value to the end of the array field
from(u in User, update: [push: [tags: "cool"]])
* `pull` - pulls (removes) the given value from the array field
from(u in User, update: [pull: [tags: "not cool"]])
"""
defmacro update(query, binding \\ [], expr) do
Builder.Update.build(query, binding, expr, __CALLER__)
end
@doc """
A group by query expression.
Groups together rows from the schema that have the same values in the given
fields. Using `group_by` "groups" the query giving it different semantics
in the `select` expression. If a query is grouped, only fields that were
referenced in the `group_by` can be used in the `select` or if the field
is given as an argument to an aggregate function.
`group_by` also accepts a list of atoms where each atom refers to
a field in source. For more complicated queries you can access fields
directly instead of atoms.
## Keywords examples
# Returns the number of posts in each category
from(p in Post,
group_by: p.category,
select: {p.category, count(p.id)})
# Using atoms
from(p in Post, group_by: :category, select: {p.category, count(p.id)})
# Using direct fields access
from(p in Post,
join: c in assoc(p, :category)
group_by: [p.id, c.name])
## Expressions example
Post |> group_by([p], p.category) |> select([p], count(p.id))
"""
defmacro group_by(query, binding \\ [], expr) do
Builder.GroupBy.build(query, binding, expr, __CALLER__)
end
@doc """
An AND having query expression.
Like `where`, `having` filters rows from the schema, but after the grouping is
performed giving it the same semantics as `select` for a grouped query
(see `group_by/3`). `having` groups the query even if the query has no
`group_by` expression.
## Keywords example
# Returns the number of posts in each category where the
# average number of comments is above ten
from(p in Post,
group_by: p.category,
having: avg(p.num_comments) > 10,
select: {p.category, count(p.id)})
## Expressions example
Post
|> group_by([p], p.category)
|> having([p], avg(p.num_comments) > 10)
|> select([p], count(p.id))
"""
defmacro having(query, binding \\ [], expr) do
Builder.Filter.build(:having, :and, query, binding, expr, __CALLER__)
end
@doc """
An OR having query expression.
Like `having` but combines with the previous expression by using
`OR`. `or_having` behaves for `having` the same way `or_where`
behaves for `where`.
## Keywords example
# Augment a previous group_by with a having condition.
from(p in query, or_having: avg(p.num_comments) > 10)
## Expressions example
# Augment a previous group_by with a having condition.
Post |> or_having([p], avg(p.num_comments) > 10)
"""
defmacro or_having(query, binding \\ [], expr) do
Builder.Filter.build(:having, :or, query, binding, expr, __CALLER__)
end
@doc """
Preloads the associations into the result set.
Imagine you have an schema `Post` with a `has_many :comments`
association and you execute the following query:
Repo.all from p in Post, preload: [:comments]
The example above will fetch all posts from the database and then do
a separate query returning all comments associated with the given posts.
The comments are then processed and associated to each returned `post`
under the `comments` field.
Often times, you may want posts and comments to be selected and
filtered in the same query. For such cases, you can explicitly tell
an existing join to be preloaded into the result set:
Repo.all from p in Post,
join: c in assoc(p, :comments),
where: c.published_at > p.updated_at,
preload: [comments: c]
In the example above, instead of issuing a separate query to fetch
comments, Ecto will fetch posts and comments in a single query and
then do a separate pass associating each comment to its parent post.
Therefore, instead of returning `number_of_posts * number_of_comments`
results, like a `join` would, it returns only posts with the `comments`
fields properly filled in.
Nested associations can also be preloaded in both formats:
Repo.all from p in Post,
preload: [comments: :likes]
Repo.all from p in Post,
join: c in assoc(p, :comments),
join: l in assoc(c, :likes),
where: l.inserted_at > c.updated_at,
preload: [comments: {c, likes: l}]
## Preload queries
Preload also allows queries to be given, allowing you to filter or
customize how the preloads are fetched:
comments_query = from c in Comment, order_by: c.published_at
Repo.all from p in Post, preload: [comments: ^comments_query]
The example above will issue two queries, one for loading posts and
then another for loading the comments associated with the posts.
Comments will be ordered by `published_at`.
Note: keep in mind operations like limit and offset in the preload
query will affect the whole result set and not each association. For
example, the query below:
comments_query = from c in Comment, order_by: c.popularity, limit: 5
Repo.all from p in Post, preload: [comments: ^comments_query]
won't bring the top of comments per post. Rather, it will only bring
the 5 top comments across all posts.
## Preload functions
Preload also allows functions to be given. In such cases, the function
receives the IDs of the parent association and it must return the associated
data. Ecto then will map this data and sort it by the relationship key:
comment_preloader = fn post_ids -> fetch_comments_by_post_ids(post_ids) end
Repo.all from p in Post, preload: [comments: ^comment_preloader]
This is useful when the whole dataset was already loaded or must be
explicitly fetched from elsewhere. The IDs received by the preloading
function and the result returned depends on the association type:
* For `has_many` and `belongs_to` - the function receives the IDs of
the parent association and it must return a list of maps or structs
with the associated entries. The associated map/struct must contain
the "foreign_key" field. For example, if a post has many comments,
when preloading the comments with a custom function, the function
will receive a list of "post_ids" as argument and it must return
maps or structs representing the comments. The maps/structs must
include the `:post_id` field
* For `has_many :through` - it behaves similarly to a regular `has_many`
but note that the IDs received are the ones from the closest
parent and not the furthest one. Imagine for example a post has
many comments and each comment has an author. Therefore, a post
may have many comments_authors, written as
`has_many :comments_authors, through: [:comments, :author]`. When
preloading authors with a custom function via `:comments_authors`,
the function will receive the IDs of the comments and not of the
posts. That's because through associations are still loaded step
by step
* For `many_to_many` - the function receives the IDs of the parent
association and it must return a tuple with the parent id as first
element and the association map or struct as second. For example,
if a post has many tags, when preloading the tags with a custom
function, the function will receive a list of "post_ids" as argument
and it must return a tuple in the format of `{post_id, tag}`
## Keywords example
# Returns all posts, their associated comments, and the associated
# likes for those comments.
from(p in Post,
preload: [:comments, comments: :likes],
select: p)
## Expressions examples
Post |> preload(:comments) |> select([p], p)
Post
|> join(:left, [p], c in assoc(p, :comments))
|> preload([p, c], [:user, comments: c])
|> select([p], p)
"""
defmacro preload(query, bindings \\ [], expr) do
Builder.Preload.build(query, bindings, expr, __CALLER__)
end
@doc """
Restricts the query to return the first result ordered by primary key.
The query will be automatically ordered by the primary key
unless `order_by` is given or `order_by` is set in the query.
Limit is always set to 1.
## Examples
Post |> first |> Repo.one
query |> first(:inserted_at) |> Repo.one
"""
def first(queryable, order_by \\ nil)
def first(%Ecto.Query{} = query, nil) do
query = %{query | limit: limit()}
case query do
%{order_bys: []} ->
%{query | order_bys: [order_by_pk(query, :asc)]}
%{} ->
query
end
end
def first(queryable, nil), do: first(Ecto.Queryable.to_query(queryable), nil)
def first(queryable, key), do: first(order_by(queryable, ^key), nil)
@doc """
Restricts the query to return the last result ordered by primary key.
The query ordering will be automatically reversed, with ASC
columns becoming DESC columns (and vice-versa) and limit is set
to 1. If there is no ordering, the query will be automatically
ordered decreasingly by primary key.
## Examples
Post |> last |> Repo.one
query |> last(:inserted_at) |> Repo.one
"""
def last(queryable, order_by \\ nil)
def last(queryable, nil), do: %{reverse_order(queryable) | limit: limit()}
def last(queryable, key), do: last(order_by(queryable, ^key), nil)
defp limit do
%QueryExpr{expr: 1, params: [], file: __ENV__.file, line: __ENV__.line}
end
defp field(ix, field) when is_integer(ix) and is_atom(field) do
{{:., [], [{:&, [], [ix]}, field]}, [], []}
end
defp order_by_pk(query, dir) do
schema = assert_schema!(query)
pks = schema.__schema__(:primary_key)
expr = for pk <- pks, do: {dir, field(0, pk)}
%QueryExpr{expr: expr, file: __ENV__.file, line: __ENV__.line}
end
defp assert_schema!(%{from: %Ecto.Query.FromExpr{source: {_source, schema}}}) when schema != nil, do: schema
defp assert_schema!(query) do
raise Ecto.QueryError, query: query, message: "expected a from expression with a schema"
end
@doc """
Returns `true` if query has binding with a given name, otherwise `false`.
For more information on named bindings see "Named bindings" in this module doc.
"""
def has_named_binding?(%Ecto.Query{aliases: aliases}, key) do
Map.has_key?(aliases, key)
end
def has_named_binding?(queryable, _key)
when is_atom(queryable) or is_binary(queryable) or is_tuple(queryable) do
false
end
def has_named_binding?(queryable, key) do
has_named_binding?(Ecto.Queryable.to_query(queryable), key)
end
@doc """
Reverses the ordering of the query.
ASC columns become DESC columns (and vice-versa). If the query
has no order_bys, it orders by the inverse of the primary key.
## Examples
query |> reverse_order |> Repo.one
Post |> order(asc: :id) |> reverse_order == Post |> order(desc: :id)
"""
def reverse_order(%Ecto.Query{} = query) do
update_in(query.order_bys, fn
[] -> [order_by_pk(query, :desc)]
order_bys -> Enum.map(order_bys, &reverse_order_by/1)
end)
end
def reverse_order(queryable) do
reverse_order(Ecto.Queryable.to_query(queryable))
end
defp reverse_order_by(%{expr: expr} = order_by) do
%{
order_by
| expr:
Enum.map(expr, fn
{:desc, ast} -> {:asc, ast}
{:desc_nulls_last, ast} -> {:asc_nulls_first, ast}
{:desc_nulls_first, ast} -> {:asc_nulls_last, ast}
{:asc, ast} -> {:desc, ast}
{:asc_nulls_last, ast} -> {:desc_nulls_first, ast}
{:asc_nulls_first, ast} -> {:desc_nulls_last, ast}
end)
}
end
end
| 34.094642 | 139 | 0.675606 |
e856ec376ffbaccdecef025f51e091df320f6546 | 5,761 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/report.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/report.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/report.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V33.Model.Report do
@moduledoc """
Represents a Report resource.
## Attributes
* `accountId` (*type:* `String.t`, *default:* `nil`) - The account ID to which this report belongs.
* `criteria` (*type:* `GoogleApi.DFAReporting.V33.Model.ReportCriteria.t`, *default:* `nil`) - The report criteria for a report of type "STANDARD".
* `crossDimensionReachCriteria` (*type:* `GoogleApi.DFAReporting.V33.Model.ReportCrossDimensionReachCriteria.t`, *default:* `nil`) - The report criteria for a report of type "CROSS_DIMENSION_REACH".
* `delivery` (*type:* `GoogleApi.DFAReporting.V33.Model.ReportDelivery.t`, *default:* `nil`) - The report's email delivery settings.
* `etag` (*type:* `String.t`, *default:* `nil`) - The eTag of this response for caching purposes.
* `fileName` (*type:* `String.t`, *default:* `nil`) - The filename used when generating report files for this report.
* `floodlightCriteria` (*type:* `GoogleApi.DFAReporting.V33.Model.ReportFloodlightCriteria.t`, *default:* `nil`) - The report criteria for a report of type "FLOODLIGHT".
* `format` (*type:* `String.t`, *default:* `nil`) - The output format of the report. If not specified, default format is "CSV". Note that the actual format in the completed report file might differ if for instance the report's size exceeds the format's capabilities. "CSV" will then be the fallback format.
* `id` (*type:* `String.t`, *default:* `nil`) - The unique ID identifying this report resource.
* `kind` (*type:* `String.t`, *default:* `dfareporting#report`) - The kind of resource this is, in this case dfareporting#report.
* `lastModifiedTime` (*type:* `String.t`, *default:* `nil`) - The timestamp (in milliseconds since epoch) of when this report was last modified.
* `name` (*type:* `String.t`, *default:* `nil`) - The name of the report.
* `ownerProfileId` (*type:* `String.t`, *default:* `nil`) - The user profile id of the owner of this report.
* `pathToConversionCriteria` (*type:* `GoogleApi.DFAReporting.V33.Model.ReportPathToConversionCriteria.t`, *default:* `nil`) - The report criteria for a report of type "PATH_TO_CONVERSION".
* `reachCriteria` (*type:* `GoogleApi.DFAReporting.V33.Model.ReportReachCriteria.t`, *default:* `nil`) - The report criteria for a report of type "REACH".
* `schedule` (*type:* `GoogleApi.DFAReporting.V33.Model.ReportSchedule.t`, *default:* `nil`) - The report's schedule. Can only be set if the report's 'dateRange' is a relative date range and the relative date range is not "TODAY".
* `subAccountId` (*type:* `String.t`, *default:* `nil`) - The subaccount ID to which this report belongs if applicable.
* `type` (*type:* `String.t`, *default:* `nil`) - The type of the report.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:accountId => String.t(),
:criteria => GoogleApi.DFAReporting.V33.Model.ReportCriteria.t(),
:crossDimensionReachCriteria =>
GoogleApi.DFAReporting.V33.Model.ReportCrossDimensionReachCriteria.t(),
:delivery => GoogleApi.DFAReporting.V33.Model.ReportDelivery.t(),
:etag => String.t(),
:fileName => String.t(),
:floodlightCriteria => GoogleApi.DFAReporting.V33.Model.ReportFloodlightCriteria.t(),
:format => String.t(),
:id => String.t(),
:kind => String.t(),
:lastModifiedTime => String.t(),
:name => String.t(),
:ownerProfileId => String.t(),
:pathToConversionCriteria =>
GoogleApi.DFAReporting.V33.Model.ReportPathToConversionCriteria.t(),
:reachCriteria => GoogleApi.DFAReporting.V33.Model.ReportReachCriteria.t(),
:schedule => GoogleApi.DFAReporting.V33.Model.ReportSchedule.t(),
:subAccountId => String.t(),
:type => String.t()
}
field(:accountId)
field(:criteria, as: GoogleApi.DFAReporting.V33.Model.ReportCriteria)
field(:crossDimensionReachCriteria,
as: GoogleApi.DFAReporting.V33.Model.ReportCrossDimensionReachCriteria
)
field(:delivery, as: GoogleApi.DFAReporting.V33.Model.ReportDelivery)
field(:etag)
field(:fileName)
field(:floodlightCriteria, as: GoogleApi.DFAReporting.V33.Model.ReportFloodlightCriteria)
field(:format)
field(:id)
field(:kind)
field(:lastModifiedTime)
field(:name)
field(:ownerProfileId)
field(:pathToConversionCriteria,
as: GoogleApi.DFAReporting.V33.Model.ReportPathToConversionCriteria
)
field(:reachCriteria, as: GoogleApi.DFAReporting.V33.Model.ReportReachCriteria)
field(:schedule, as: GoogleApi.DFAReporting.V33.Model.ReportSchedule)
field(:subAccountId)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V33.Model.Report do
def decode(value, options) do
GoogleApi.DFAReporting.V33.Model.Report.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V33.Model.Report do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 53.342593 | 310 | 0.704218 |
e856f79bffc045fc5a7bfc7bf36208f4b0fa8614 | 280 | ex | Elixir | lib/option_calc/Models/Quote.ex | davidgrupp/Option-Calc | 0b0e4607d827412e207d5fc5102aee001e93a821 | [
"Apache-2.0"
] | null | null | null | lib/option_calc/Models/Quote.ex | davidgrupp/Option-Calc | 0b0e4607d827412e207d5fc5102aee001e93a821 | [
"Apache-2.0"
] | null | null | null | lib/option_calc/Models/Quote.ex | davidgrupp/Option-Calc | 0b0e4607d827412e207d5fc5102aee001e93a821 | [
"Apache-2.0"
] | null | null | null | defmodule OptionCalc.Quote do
defstruct [
:dayslow,
:dayshigh,
:symbol,
:name,
:percentchange,
:change,
:volume,
:shortratio,
:open,
:last,
:ask,
:bid,
]
end | 17.5 | 30 | 0.4 |
e8571075f061d93007f1a7ce070ab3cfb4f95f38 | 1,099 | ex | Elixir | lib/cxs_starter_web/live/page_live.ex | antp/cxs_starter | 349cbd61e561f276b552f95e88e24e01397b8519 | [
"MIT"
] | null | null | null | lib/cxs_starter_web/live/page_live.ex | antp/cxs_starter | 349cbd61e561f276b552f95e88e24e01397b8519 | [
"MIT"
] | null | null | null | lib/cxs_starter_web/live/page_live.ex | antp/cxs_starter | 349cbd61e561f276b552f95e88e24e01397b8519 | [
"MIT"
] | null | null | null | defmodule CxsStarterWeb.PageLive do
use CxsStarterWeb, :live_view
@impl true
def mount(_params, _session, socket) do
{:ok, assign(socket, query: "", results: %{})}
end
@impl true
def handle_event("suggest", %{"q" => query}, socket) do
{:noreply, assign(socket, results: search(query), query: query)}
end
@impl true
def handle_event("search", %{"q" => query}, socket) do
case search(query) do
%{^query => vsn} ->
{:noreply, redirect(socket, external: "https://hexdocs.pm/#{query}/#{vsn}")}
_ ->
{:noreply,
socket
|> put_flash(:error, "No dependencies found matching \"#{query}\"")
|> assign(results: %{}, query: query)}
end
end
defp search(query) do
if not CxsStarterWeb.Endpoint.config(:code_reloader) do
raise "action disabled when not in development"
end
for {app, desc, vsn} <- Application.started_applications(),
app = to_string(app),
String.starts_with?(app, query) and not List.starts_with?(desc, ~c"ERTS"),
into: %{},
do: {app, vsn}
end
end
| 27.475 | 84 | 0.605096 |
e8571a9dc18a712f36f935fd8b3e1ce3ee745120 | 954 | exs | Elixir | test/roger/job/worker_timeout_test.exs | jnylen/roger | 074338eceae4783221088e8b235a635452708ef1 | [
"MIT"
] | null | null | null | test/roger/job/worker_timeout_test.exs | jnylen/roger | 074338eceae4783221088e8b235a635452708ef1 | [
"MIT"
] | null | null | null | test/roger/job/worker_timeout_test.exs | jnylen/roger | 074338eceae4783221088e8b235a635452708ef1 | [
"MIT"
] | null | null | null | defmodule Roger.Job.WorkerTimeoutTest do
use ExUnit.Case
use Roger.AppCase
alias Roger.Job
defmodule OnErrorCallback do
use Roger.Partition.Worker.Callback
def on_error(_partition, _job, {error, _reason}, _, _) do
send(Roger.Job.WorkerTimeoutTest, {:job_error, error})
end
end
defmodule MyLongJob do
use Roger.Job
def perform([time]) do
:timer.sleep(time)
send(Roger.Job.WorkerTimeoutTest, :job_done)
end
def max_execution_time(), do: 1
end
test "job should not finish when over timeout" do
Application.put_env(:roger, :callbacks, OnErrorCallback)
{:ok, job} = Job.create(MyLongJob, [1100])
:ok = Job.enqueue(job, @app)
refute_receive :job_done
assert_receive {:job_error, :timeout}, 1500
end
test "jobs still complete inside timeout" do
{:ok, job} = Job.create(MyLongJob, [50])
:ok = Job.enqueue(job, @app)
assert_receive :job_done
end
end
| 21.2 | 61 | 0.680294 |
e8575534610fedd1a2d8fab36baa3704d68f394e | 542 | ex | Elixir | examples/ex2/consumer.ex | Carburetor/esp_ex | 0a0ab02c71945e521b213befc0421a4642c9e07b | [
"MIT"
] | null | null | null | examples/ex2/consumer.ex | Carburetor/esp_ex | 0a0ab02c71945e521b213befc0421a4642c9e07b | [
"MIT"
] | 1 | 2018-05-11T04:27:54.000Z | 2018-05-11T04:27:54.000Z | examples/ex2/consumer.ex | Carburetor/esp_ex | 0a0ab02c71945e521b213befc0421a4642c9e07b | [
"MIT"
] | 1 | 2018-10-30T06:08:02.000Z | 2018-10-30T06:08:02.000Z | defmodule Ex2.Consumer do
use GenServer
def start_link(_) do
EspEx.Consumer.start_link(__MODULE__, identifier: __MODULE__)
EspEx.Consumer.listen(__MODULE__)
end
def handle_cast(:event_received, raw_event) do
event = Ex2.Task.Events.transform(raw_event.type, raw_event)
handle(event)
end
def handle(%Ex2.Task.Events.Created{} = created) do
# handle the event
# write some other event
EspEx.EventBus.write(%Ex2.Task.Events.Started{})
end
def handle(_) do
IO.puts("Event unhandled")
end
end
| 22.583333 | 65 | 0.714022 |
e85761dc9e8fd11aa3eaf58cd9e514fa32c0addf | 1,906 | exs | Elixir | config/dev.exs | DPechetti/nlw-heat-elixir | c3fe71244a4c1d49bb28668f9334c5b4c803aa54 | [
"MIT"
] | 2 | 2021-10-30T11:53:58.000Z | 2022-02-06T02:17:06.000Z | config/dev.exs | DPechetti/nlw-heat-elixir | c3fe71244a4c1d49bb28668f9334c5b4c803aa54 | [
"MIT"
] | 1 | 2021-10-30T14:24:28.000Z | 2021-10-30T14:24:28.000Z | config/dev.exs | DPechetti/nlw-heat-elixir | c3fe71244a4c1d49bb28668f9334c5b4c803aa54 | [
"MIT"
] | null | null | null | import Config
# Configure your database
config :heat_tags, HeatTags.Repo,
username: "postgres",
password: "postgres",
database: "heat_tags_dev",
hostname: "localhost",
show_sensitive_data_on_connection_error: true,
pool_size: 10
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with esbuild to bundle .js and .css sources.
config :heat_tags, HeatTagsWeb.Endpoint,
# Binding to loopback ipv4 address prevents access from other machines.
# Change to `ip: {0, 0, 0, 0}` to allow access from other machines.
http: [ip: {127, 0, 0, 1}, port: 4000],
check_origin: false,
code_reloader: true,
debug_errors: true,
secret_key_base: "6/HFmNV9Q0mXk8vmggQYRIYJgGLT3G9COC2wS4Uzoy3OJ20wV86+KpwefXjkOOU5",
watchers: []
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 31.245902 | 86 | 0.730325 |
e8579601c274c4e44df47031aa61693f2b92e4ad | 1,007 | ex | Elixir | test/support/data_case.ex | andsleonardo/magnemite | 2a06c1520defeb193d718313ad3fc6a50349bc8d | [
"MIT"
] | null | null | null | test/support/data_case.ex | andsleonardo/magnemite | 2a06c1520defeb193d718313ad3fc6a50349bc8d | [
"MIT"
] | null | null | null | test/support/data_case.ex | andsleonardo/magnemite | 2a06c1520defeb193d718313ad3fc6a50349bc8d | [
"MIT"
] | null | null | null | defmodule Magnemite.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use Magnemite.DataCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Magnemite.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Magnemite.DataCase
import Magnemite.Factory
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Magnemite.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Magnemite.Repo, {:shared, self()})
end
:ok
end
end
| 23.97619 | 71 | 0.713009 |
e857f1351dfcddfd02cb5abab6510eca4140e09c | 1,474 | ex | Elixir | lib/namedb_web/views/error_helpers.ex | empex2019liveview/namedb | fd01151dd293f9740204435fed524bc364f81069 | [
"MIT"
] | null | null | null | lib/namedb_web/views/error_helpers.ex | empex2019liveview/namedb | fd01151dd293f9740204435fed524bc364f81069 | [
"MIT"
] | null | null | null | lib/namedb_web/views/error_helpers.ex | empex2019liveview/namedb | fd01151dd293f9740204435fed524bc364f81069 | [
"MIT"
] | null | null | null | defmodule NamedbWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error), class: "help-block")
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(NamedbWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(NamedbWeb.Gettext, "errors", msg, opts)
end
end
end
| 32.755556 | 75 | 0.669607 |
e857ff852d4a89784ff151f93ed495ef49ccd754 | 2,342 | exs | Elixir | test/controllers/tag_controller_test.exs | cdegroot/clueless | c7f575159a42f55eb843df0e37802e778826c845 | [
"MIT"
] | null | null | null | test/controllers/tag_controller_test.exs | cdegroot/clueless | c7f575159a42f55eb843df0e37802e778826c845 | [
"MIT"
] | 4 | 2015-12-18T20:35:49.000Z | 2015-12-18T20:36:43.000Z | test/controllers/tag_controller_test.exs | cdegroot/clueless | c7f575159a42f55eb843df0e37802e778826c845 | [
"MIT"
] | null | null | null | defmodule Clueless.TagControllerTest do
use Clueless.ConnCase
alias Clueless.Tag
@valid_attrs %{name: "some content"}
@invalid_attrs %{}
setup do
conn = authenticated_conn()
{:ok, conn: conn}
end
test "lists all entries on index", %{conn: conn} do
conn = get conn, tag_path(conn, :index)
assert html_response(conn, 200) =~ "Listing tags"
end
test "renders form for new resources", %{conn: conn} do
conn = get conn, tag_path(conn, :new)
assert html_response(conn, 200) =~ "New tag"
end
test "creates resource and redirects when data is valid", %{conn: conn} do
conn = post conn, tag_path(conn, :create), tag: @valid_attrs
assert redirected_to(conn) == tag_path(conn, :index)
assert Repo.get_by(Tag, @valid_attrs)
end
test "does not create resource and renders errors when data is invalid", %{conn: conn} do
conn = post conn, tag_path(conn, :create), tag: @invalid_attrs
assert html_response(conn, 200) =~ "New tag"
end
test "shows chosen resource", %{conn: conn} do
tag = Repo.insert! %Tag{}
conn = get conn, tag_path(conn, :show, tag)
assert html_response(conn, 200) =~ "Show tag"
end
test "renders page not found when id is nonexistent", %{conn: conn} do
assert_raise Ecto.NoResultsError, fn ->
get conn, tag_path(conn, :show, -1)
end
end
test "renders form for editing chosen resource", %{conn: conn} do
tag = Repo.insert! %Tag{}
conn = get conn, tag_path(conn, :edit, tag)
assert html_response(conn, 200) =~ "Edit tag"
end
test "updates chosen resource and redirects when data is valid", %{conn: conn} do
tag = Repo.insert! %Tag{}
conn = put conn, tag_path(conn, :update, tag), tag: @valid_attrs
assert redirected_to(conn) == tag_path(conn, :show, tag)
assert Repo.get_by(Tag, @valid_attrs)
end
test "does not update chosen resource and renders errors when data is invalid", %{conn: conn} do
tag = Repo.insert! %Tag{}
conn = put conn, tag_path(conn, :update, tag), tag: @invalid_attrs
assert html_response(conn, 200) =~ "Edit tag"
end
test "deletes chosen resource", %{conn: conn} do
tag = Repo.insert! %Tag{}
conn = delete conn, tag_path(conn, :delete, tag)
assert redirected_to(conn) == tag_path(conn, :index)
refute Repo.get(Tag, tag.id)
end
end
| 32.527778 | 98 | 0.668232 |
e8582abc289aebf396d28fec7f294d96ad6a330e | 3,181 | ex | Elixir | clients/firestore/lib/google_api/firestore/v1beta1/model/google_firestore_admin_v1beta1_import_documents_metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/firestore/lib/google_api/firestore/v1beta1/model/google_firestore_admin_v1beta1_import_documents_metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/firestore/lib/google_api/firestore/v1beta1/model/google_firestore_admin_v1beta1_import_documents_metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Firestore.V1beta1.Model.GoogleFirestoreAdminV1beta1ImportDocumentsMetadata do
@moduledoc """
Metadata for ImportDocuments operations.
## Attributes
* `collectionIds` (*type:* `list(String.t)`, *default:* `nil`) - Which collection ids are being imported.
* `endTime` (*type:* `DateTime.t`, *default:* `nil`) - The time the operation ended, either successfully or otherwise. Unset if the operation is still active.
* `inputUriPrefix` (*type:* `String.t`, *default:* `nil`) - The location of the documents being imported.
* `operationState` (*type:* `String.t`, *default:* `nil`) - The state of the import operation.
* `progressBytes` (*type:* `GoogleApi.Firestore.V1beta1.Model.GoogleFirestoreAdminV1beta1Progress.t`, *default:* `nil`) - An estimate of the number of bytes processed.
* `progressDocuments` (*type:* `GoogleApi.Firestore.V1beta1.Model.GoogleFirestoreAdminV1beta1Progress.t`, *default:* `nil`) - An estimate of the number of documents processed.
* `startTime` (*type:* `DateTime.t`, *default:* `nil`) - The time that work began on the operation.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:collectionIds => list(String.t()),
:endTime => DateTime.t(),
:inputUriPrefix => String.t(),
:operationState => String.t(),
:progressBytes =>
GoogleApi.Firestore.V1beta1.Model.GoogleFirestoreAdminV1beta1Progress.t(),
:progressDocuments =>
GoogleApi.Firestore.V1beta1.Model.GoogleFirestoreAdminV1beta1Progress.t(),
:startTime => DateTime.t()
}
field(:collectionIds, type: :list)
field(:endTime, as: DateTime)
field(:inputUriPrefix)
field(:operationState)
field(:progressBytes, as: GoogleApi.Firestore.V1beta1.Model.GoogleFirestoreAdminV1beta1Progress)
field(:progressDocuments,
as: GoogleApi.Firestore.V1beta1.Model.GoogleFirestoreAdminV1beta1Progress
)
field(:startTime, as: DateTime)
end
defimpl Poison.Decoder,
for: GoogleApi.Firestore.V1beta1.Model.GoogleFirestoreAdminV1beta1ImportDocumentsMetadata do
def decode(value, options) do
GoogleApi.Firestore.V1beta1.Model.GoogleFirestoreAdminV1beta1ImportDocumentsMetadata.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Firestore.V1beta1.Model.GoogleFirestoreAdminV1beta1ImportDocumentsMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.855263 | 179 | 0.728387 |
e8582b02b4020d5e1f9bdb1128b625d4389bc16b | 280 | exs | Elixir | chapter-11/strings-and-binaries-2.exs | andreyuhai/elixir-exercises | 23f90ba8e76597cf8feb45a11718dcb0c42b649d | [
"MIT"
] | null | null | null | chapter-11/strings-and-binaries-2.exs | andreyuhai/elixir-exercises | 23f90ba8e76597cf8feb45a11718dcb0c42b649d | [
"MIT"
] | null | null | null | chapter-11/strings-and-binaries-2.exs | andreyuhai/elixir-exercises | 23f90ba8e76597cf8feb45a11718dcb0c42b649d | [
"MIT"
] | null | null | null | defmodule StringsAndBinaries do
# Exercise:StringsAndBinaries-2
# Write an anagram?(word1, word2) that returns true if
# its parameters are anagrams
def anagram?(word1, word2), do: word1 -- word2 == []
end
IO.puts StringsAndBinaries.anagram?('alert', 'alter')
# => true
| 25.454545 | 56 | 0.717857 |
e85858b3ff8daae5cb2fbb925f67394a1b2032ee | 1,225 | ex | Elixir | clients/analytics_reporting/lib/google_api/analytics_reporting/v4/connection.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/analytics_reporting/lib/google_api/analytics_reporting/v4/connection.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/analytics_reporting/lib/google_api/analytics_reporting/v4/connection.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AnalyticsReporting.V4.Connection do
@moduledoc """
Handle Tesla connections for GoogleApi.AnalyticsReporting.V4.
"""
@type t :: Tesla.Env.client()
use GoogleApi.Gax.Connection,
scopes: [
# View and manage your Google Analytics data
"https://www.googleapis.com/auth/analytics",
# View your Google Analytics data
"https://www.googleapis.com/auth/analytics.readonly"
],
otp_app: :google_api_analytics_reporting,
base_url: "https://analyticsreporting.googleapis.com/"
end
| 34.027778 | 74 | 0.740408 |
e85893d3a67eab33a0a6746b4f920ce39d121c1c | 277 | exs | Elixir | test/lib/Utilities/oauth_test.exs | davidgrupp/Option-Calc | 0b0e4607d827412e207d5fc5102aee001e93a821 | [
"Apache-2.0"
] | null | null | null | test/lib/Utilities/oauth_test.exs | davidgrupp/Option-Calc | 0b0e4607d827412e207d5fc5102aee001e93a821 | [
"Apache-2.0"
] | null | null | null | test/lib/Utilities/oauth_test.exs | davidgrupp/Option-Calc | 0b0e4607d827412e207d5fc5102aee001e93a821 | [
"Apache-2.0"
] | null | null | null | defmodule OAuthTests do
use ExUnit.Case
#doctest OptionCalc.OAuth
test "OAuth sign" do
#assert on patern match
assert [{"Authorization", "OAuth oauth_signature=" <> _}] = OptionCalc.OAuth.sign(:get, "https://example.com", :tk_oauth)
#assert 1==1
end
end
| 23.083333 | 125 | 0.68231 |
e858984bc30e46d9bc4b6c45f2926df9ce5f8487 | 845 | exs | Elixir | apps/state_mediator/test/state_mediator/mediator_log_test.exs | mbta/automatic-fiesta | de9eb43e4fc7c47aa069f5ded8497ddd39ff74c5 | [
"MIT"
] | null | null | null | apps/state_mediator/test/state_mediator/mediator_log_test.exs | mbta/automatic-fiesta | de9eb43e4fc7c47aa069f5ded8497ddd39ff74c5 | [
"MIT"
] | null | null | null | apps/state_mediator/test/state_mediator/mediator_log_test.exs | mbta/automatic-fiesta | de9eb43e4fc7c47aa069f5ded8497ddd39ff74c5 | [
"MIT"
] | null | null | null | defmodule StateMediator.MediatorLogTest do
use ExUnit.Case
import ExUnit.CaptureLog, only: [capture_log: 1]
alias StateMediator.Mediator
test "logs a fetch timeout as a warning" do
assert capture_log(fn ->
Mediator.handle_response(
{:error,
%HTTPoison.Error{
id: nil,
reason: :timeout
}},
%{module: nil, retries: 0}
)
end) =~ "[warning]"
end
test "logs an unknown error as an error" do
assert capture_log(fn ->
Mediator.handle_response(
{:error,
%HTTPoison.Error{
id: nil,
reason: :heat_death_of_universe
}},
%{module: nil, retries: 0}
)
end)
end
end
| 26.40625 | 50 | 0.486391 |
e858df72f6494dbb06ad6ec5015935c0319280cb | 213 | ex | Elixir | apps/wechat_mp/lib/wechat_mp/api/model/access_token.ex | secretworry/exwechat | 2d3a8bf03135eebd58452122c2f7b3718b5f5b3d | [
"Apache-2.0"
] | null | null | null | apps/wechat_mp/lib/wechat_mp/api/model/access_token.ex | secretworry/exwechat | 2d3a8bf03135eebd58452122c2f7b3718b5f5b3d | [
"Apache-2.0"
] | null | null | null | apps/wechat_mp/lib/wechat_mp/api/model/access_token.ex | secretworry/exwechat | 2d3a8bf03135eebd58452122c2f7b3718b5f5b3d | [
"Apache-2.0"
] | null | null | null | defmodule WechatMP.Api.Model.AccessToken do
use WechatBase.Api.Model.JsonResponse
model do
field :access_token
field :expires_in
field :refresh_token
field :openid
field :scope
end
end
| 16.384615 | 43 | 0.7277 |
e858e22f8b3de2964e597531be0e2e90dc58bebd | 2,011 | ex | Elixir | lib/mix/lib/mix/tasks/compile.protocols.ex | patrickgombert/elixir | dc4b7c89ea30a9734d9f1cbf3bb1612fada498d4 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/compile.protocols.ex | patrickgombert/elixir | dc4b7c89ea30a9734d9f1cbf3bb1612fada498d4 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/compile.protocols.ex | patrickgombert/elixir | dc4b7c89ea30a9734d9f1cbf3bb1612fada498d4 | [
"Apache-2.0"
] | 1 | 2020-12-07T08:04:16.000Z | 2020-12-07T08:04:16.000Z | defmodule Mix.Tasks.Compile.Protocols do
use Mix.Task
@recursive true
@shortdoc "Consolidates all protocols in all paths"
@moduledoc ~S"""
Consolidates all protocols in all paths.
This module consolidates all protocols in the code path
and output the new binary files to the given directory
(defaults to "consolidated").
A new directory will be created with the consolidated
protocol versions in the build directory for the given
environment. Simply add it to your loadpath to make use
of it according to your MIX_ENV:
$ elixir -pa _build/MIX_ENV/consolidated -S mix run
You can verify a protocol is consolidated by checking
its attributes:
$ iex -pa _build/MIX_ENV/consolidated -S mix run
iex> Protocol.consolidated?(Enumerable)
true
"""
def run(args) do
Mix.Task.run "compile", args
{opts, _, _} = OptionParser.parse(args, switches: [output: :string], aliases: [o: :output])
paths = filter_otp(:code.get_path, :code.lib_dir)
paths
|> Protocol.extract_protocols
|> consolidate(paths, opts[:output] || Path.join(Mix.Project.build_path, "consolidated"))
:ok
end
defp filter_otp(paths, otp) do
Enum.filter(paths, &(not :lists.prefix(&1, otp)))
end
defp consolidate(protocols, paths, output) do
File.mkdir_p!(output)
_ = for protocol <- protocols do
impls = Protocol.extract_impls(protocol, paths)
maybe_reload(protocol)
{:ok, binary} = Protocol.consolidate(protocol, impls)
File.write!(Path.join(output, "#{protocol}.beam"), binary)
Mix.shell.info "Consolidated #{inspect protocol}"
end
relative = Path.relative_to_cwd(output)
Mix.shell.info "Consolidated protocols written to #{relative}"
end
defp maybe_reload(module) do
case :code.which(module) do
:non_existing ->
module
file ->
unless Path.extname(file) == ".beam" do
:code.purge(module)
:code.delete(module)
end
end
end
end
| 27.175676 | 95 | 0.679761 |
e858eba0d3d6ceef40c16efbc2cd4918b5a53f5c | 2,026 | exs | Elixir | test/swoosh/attachment_test.exs | henb/swoosh | 54534c411d1b9be46d94c2f75e102098a3ad28ea | [
"MIT"
] | null | null | null | test/swoosh/attachment_test.exs | henb/swoosh | 54534c411d1b9be46d94c2f75e102098a3ad28ea | [
"MIT"
] | null | null | null | test/swoosh/attachment_test.exs | henb/swoosh | 54534c411d1b9be46d94c2f75e102098a3ad28ea | [
"MIT"
] | null | null | null | defmodule Swoosh.AttachmentTest do
use ExUnit.Case
alias Swoosh.Attachment
test "create an attachment" do
attachment = Attachment.new("/data/file")
assert attachment.content_type == "application/octet-stream"
assert attachment.filename == "file"
assert attachment.path == "/data/file"
end
test "create an attachment with an unknown content type" do
attachment = Attachment.new("/data/unknown-file")
assert attachment.content_type == "application/octet-stream"
end
test "create an attachment with a specified file name" do
attachment = Attachment.new("/data/file", filename: "my-test-name.doc")
assert attachment.filename == "my-test-name.doc"
end
test "create an attachment with a specified content type" do
attachment = Attachment.new("/data/file", content_type: "application/msword")
assert attachment.content_type == "application/msword"
end
test "create an attachment from a Plug Upload struct" do
path = "/data/uuid-random"
upload = %Plug.Upload{filename: "imaginary.zip",
content_type: "application/zip",
path: path}
attachment = Attachment.new(upload)
assert attachment.content_type == "application/zip"
assert attachment.filename == "imaginary.zip"
assert attachment.path == path
end
test "create an attachment from a Plug Upload struct with overrides" do
path = "/data/uuid-random"
upload = %Plug.Upload{filename: "imaginary.zip",
content_type: "application/zip",
path: path}
attachment = Attachment.new(upload, filename: "real.zip", content_type: "application/other")
assert attachment.content_type == "application/other"
assert attachment.filename == "real.zip"
assert attachment.path == path
end
test "create an attachment that should be sent as an inline-attachment" do
attachment = Attachment.new("/data/file.png", type: :inline)
assert attachment.type == :inline
end
end
| 36.836364 | 96 | 0.684107 |
e8590b95f16e5b8efa1ecaf672ba415efe8bf0b6 | 114 | ex | Elixir | lib/suomidev_web/views/auth_view.ex | yliaho/suomi.dev | 4e1946adbeb67cebdc1b7e2009ddecf66497a9c7 | [
"MIT"
] | 19 | 2020-08-24T02:50:26.000Z | 2021-05-31T20:54:30.000Z | lib/suomidev_web/views/auth_view.ex | yliaho/suomi.dev | 4e1946adbeb67cebdc1b7e2009ddecf66497a9c7 | [
"MIT"
] | 2 | 2020-08-24T06:59:46.000Z | 2020-08-26T09:51:40.000Z | lib/suomidev_web/views/auth_view.ex | yliaho/suomi.dev | 4e1946adbeb67cebdc1b7e2009ddecf66497a9c7 | [
"MIT"
] | 6 | 2020-08-26T02:52:48.000Z | 2022-03-08T12:55:51.000Z | defmodule SuomidevWeb.AuthView do
use SuomidevWeb, :view
def title(_template, _assigns) do
nil
end
end
| 14.25 | 35 | 0.736842 |
e85910f4ce1264eb4bef0891632650934d7a7b8d | 675 | exs | Elixir | dei_app/config/test.exs | mbta/DEI-dashboard | 7446c1d5497b388f337df554ef1109b8b9b0f606 | [
"MIT"
] | 1 | 2020-09-18T19:41:07.000Z | 2020-09-18T19:41:07.000Z | dei_app/config/test.exs | mbta/DEI-dashboard | 7446c1d5497b388f337df554ef1109b8b9b0f606 | [
"MIT"
] | 4 | 2020-07-23T02:35:33.000Z | 2020-09-21T14:55:32.000Z | dei_app/config/test.exs | mbta/DEI-dashboard | 7446c1d5497b388f337df554ef1109b8b9b0f606 | [
"MIT"
] | null | null | null | use Mix.Config
# Configure your database
#
# The MIX_TEST_PARTITION environment variable can be used
# to provide built-in test partitioning in CI environment.
# Run `mix help test` for more information.
config :dei_app, DeiApp.Repo,
username: "dei_user",
password: "password",
database: "dei_dashboard_test",
hostname: "127.0.0.1",
show_sensitive_data_on_connection_error: true,
pool: Ecto.Adapters.SQL.Sandbox
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :dei_app, DeiAppWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 28.125 | 58 | 0.752593 |
e8592b6e04d5dba01cafdcaa65d0ac58a1a46d7b | 3,127 | exs | Elixir | host_core/test/host_core/wasmcloud/native_test.exs | sleipnir/wasmcloud-otp | aef10f2f07257e65c527be6030006aaed2b85ff9 | [
"Apache-2.0"
] | null | null | null | host_core/test/host_core/wasmcloud/native_test.exs | sleipnir/wasmcloud-otp | aef10f2f07257e65c527be6030006aaed2b85ff9 | [
"Apache-2.0"
] | null | null | null | host_core/test/host_core/wasmcloud/native_test.exs | sleipnir/wasmcloud-otp | aef10f2f07257e65c527be6030006aaed2b85ff9 | [
"Apache-2.0"
] | null | null | null | defmodule HostCore.WasmCloud.NativeTest do
@httpserver_key "VAG3QITQQ2ODAOWB5TTQSDJ53XK3SHBEIFNK4AYJ5RKAX2UNSCAPHA5M"
@httpserver_link "default"
@httpserver_contract "wasmcloud:httpserver"
@httpserver_oci "wasmcloud.azurecr.io/httpserver:0.13.1"
@official_issuer "ACOJJN6WUP4ODD75XEBKKTCCUJJCY5ZKQ56XVKYK4BEJWGVAOOQHZMCW"
@httpserver_vendor "wasmCloud"
use ExUnit.Case, async: false
test "retrieves provider archive from OCI image" do
{:ok, bytes} = HostCore.WasmCloud.Native.get_oci_bytes(@httpserver_oci, false, [])
bytes = bytes |> IO.iodata_to_binary()
{:ok, par} = HostCore.WasmCloud.Native.ProviderArchive.from_bytes(bytes)
assert par.claims.public_key == @httpserver_key
assert par.claims.issuer == @official_issuer
assert par.claims.version == "0.13.0"
target_bytes =
case :os.type() do
{:unix, :darwin} ->
7_827_360
{:unix, _linux} ->
7_916_857
{:win32, :nt} ->
7_875_072
end
assert byte_size(par.target_bytes |> IO.iodata_to_binary()) == target_bytes
assert par.contract_id == @httpserver_contract
assert par.vendor == @httpserver_vendor
end
test "generates seed keys" do
{pub, seed} = HostCore.WasmCloud.Native.generate_key(:server)
assert String.starts_with?(pub, "N")
assert String.starts_with?(seed, "SN")
end
test "produces and validates invocation bytes" do
{pub, seed} = HostCore.WasmCloud.Native.generate_key(:cluster)
req =
%{
body: "hello",
header: %{},
path: "/",
method: "GET"
}
|> Msgpax.pack!()
|> IO.iodata_to_binary()
inv =
HostCore.WasmCloud.Native.generate_invocation_bytes(
seed,
"system",
:provider,
@httpserver_key,
@httpserver_contract,
@httpserver_link,
"HandleRequest",
req
)
res = HostCore.WasmCloud.Native.validate_antiforgery(inv |> IO.iodata_to_binary(), [pub])
assert res == {}
decinv = inv |> Msgpax.unpack!()
# Rust struct is converted to map of strings
assert decinv["host_id"] == pub
assert decinv["origin"]["public_key"] == "system"
assert decinv["target"]["public_key"] == @httpserver_key
end
test "validate antiforgery rejects bad issuer" do
{_pub, seed} = HostCore.WasmCloud.Native.generate_key(:cluster)
req =
%{
body: "hello",
header: %{},
path: "/",
method: "GET"
}
|> Msgpax.pack!()
|> IO.iodata_to_binary()
inv =
HostCore.WasmCloud.Native.generate_invocation_bytes(
seed,
"system",
:provider,
@httpserver_key,
@httpserver_contract,
@httpserver_link,
"HandleRequest",
req
)
res =
HostCore.WasmCloud.Native.validate_antiforgery(inv |> IO.iodata_to_binary(), [
"CSUPERBADKEYIAMAMALICIOUSACTOR"
])
assert res ==
{:error,
"Validation of invocation/AF token failed: Issuer of this invocation is not among the list of valid issuers"}
end
end
| 26.956897 | 123 | 0.631916 |
e8592c95ca6c32fa939b1741dc5fafe3b4abdb08 | 3,903 | ex | Elixir | clients/monitoring/lib/google_api/monitoring/v3/model/http_check.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/monitoring/lib/google_api/monitoring/v3/model/http_check.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/monitoring/lib/google_api/monitoring/v3/model/http_check.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Monitoring.V3.Model.HttpCheck do
@moduledoc """
Information involved in an HTTP/HTTPS Uptime check request.
## Attributes
* `authInfo` (*type:* `GoogleApi.Monitoring.V3.Model.BasicAuthentication.t`, *default:* `nil`) - The authentication information. Optional when creating an HTTP check; defaults to empty.
* `headers` (*type:* `map()`, *default:* `nil`) - The list of headers to send as part of the Uptime check request. If two headers have the same key and different values, they should be entered as a single header, with the value being a comma-separated list of all the desired values as described at https://www.w3.org/Protocols/rfc2616/rfc2616.txt (page 31). Entering two separate headers with the same key in a Create call will cause the first to be overwritten by the second. The maximum number of headers allowed is 100.
* `maskHeaders` (*type:* `boolean()`, *default:* `nil`) - Boolean specifiying whether to encrypt the header information. Encryption should be specified for any headers related to authentication that you do not wish to be seen when retrieving the configuration. The server will be responsible for encrypting the headers. On Get/List calls, if mask_headers is set to true then the headers will be obscured with ******.
* `path` (*type:* `String.t`, *default:* `nil`) - Optional (defaults to "/"). The path to the page against which to run the check. Will be combined with the host (specified within the monitored_resource) and port to construct the full URL. If the provided path does not begin with "/", a "/" will be prepended automatically.
* `port` (*type:* `integer()`, *default:* `nil`) - Optional (defaults to 80 when use_ssl is false, and 443 when use_ssl is true). The TCP port on the HTTP server against which to run the check. Will be combined with host (specified within the monitored_resource) and path to construct the full URL.
* `useSsl` (*type:* `boolean()`, *default:* `nil`) - If true, use HTTPS instead of HTTP to run the check.
* `validateSsl` (*type:* `boolean()`, *default:* `nil`) - Boolean specifying whether to include SSL certificate validation as a part of the Uptime check. Only applies to checks where monitored_resource is set to uptime_url. If use_ssl is false, setting validate_ssl to true has no effect.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:authInfo => GoogleApi.Monitoring.V3.Model.BasicAuthentication.t(),
:headers => map(),
:maskHeaders => boolean(),
:path => String.t(),
:port => integer(),
:useSsl => boolean(),
:validateSsl => boolean()
}
field(:authInfo, as: GoogleApi.Monitoring.V3.Model.BasicAuthentication)
field(:headers, type: :map)
field(:maskHeaders)
field(:path)
field(:port)
field(:useSsl)
field(:validateSsl)
end
defimpl Poison.Decoder, for: GoogleApi.Monitoring.V3.Model.HttpCheck do
def decode(value, options) do
GoogleApi.Monitoring.V3.Model.HttpCheck.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Monitoring.V3.Model.HttpCheck do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 60.046154 | 527 | 0.726621 |
e859388566d9e9c70fda595cc3573de4f23f8066 | 21,720 | exs | Elixir | test/finch_test.exs | c4710n/finch | f07ef3063b2778776ea847bab45fc7b566d02668 | [
"MIT"
] | 71 | 2021-10-16T09:55:02.000Z | 2022-03-26T08:42:59.000Z | test/finch_test.exs | c4710n/finch | f07ef3063b2778776ea847bab45fc7b566d02668 | [
"MIT"
] | 21 | 2021-10-16T07:58:51.000Z | 2022-03-28T12:27:40.000Z | test/finch_test.exs | c4710n/finch | f07ef3063b2778776ea847bab45fc7b566d02668 | [
"MIT"
] | 20 | 2021-10-16T09:55:06.000Z | 2022-03-27T13:21:27.000Z | defmodule FinchTest do
use FinchCase, async: true
doctest Finch
import ExUnit.CaptureIO
alias Finch.Response
alias Finch.MockSocketServer
describe "start_link/1" do
test "raises if :name is not provided" do
assert_raise(ArgumentError, ~r/must supply a name/, fn -> Finch.start_link([]) end)
end
test "max_idle_time is deprecated", %{finch_name: finch_name} do
msg =
capture_io(:stderr, fn ->
start_supervised!({Finch, name: finch_name, pools: %{default: [max_idle_time: 1_000]}})
end)
assert String.contains?(
msg,
":max_idle_time is deprecated. Use :conn_max_idle_time instead."
)
end
end
describe "pool configuration" do
test "unconfigured", %{bypass: bypass, finch_name: finch_name} do
start_supervised!({Finch, name: finch_name})
expect_any(bypass)
{:ok, %Response{}} = Finch.build(:get, endpoint(bypass)) |> Finch.request(finch_name)
assert [_pool] = get_pools(finch_name, shp(bypass))
{:ok, %Response{}} = Finch.build(:get, endpoint(bypass)) |> Finch.request(finch_name)
end
test "default can be configured", %{bypass: bypass, finch_name: finch_name} do
{:ok, _} =
Finch.start_link(
name: finch_name,
pools: %{default: [count: 5, size: 5]}
)
expect_any(bypass)
{:ok, %Response{}} = Finch.build("GET", endpoint(bypass)) |> Finch.request(finch_name)
pools = get_pools(finch_name, shp(bypass))
assert length(pools) == 5
end
test "TLS options will be dropped from default if it connects to http",
%{bypass: bypass, finch_name: finch_name} do
{:ok, _} =
Finch.start_link(
name: finch_name,
pools: %{
default: [
count: 5,
size: 5,
conn_opts: [transport_opts: [verify: :verify_none]]
]
}
)
expect_any(bypass)
# you will get badarg error if the verify option is applied to the connection.
assert {:ok, %Response{}} =
Finch.build("GET", endpoint(bypass)) |> Finch.request(finch_name)
end
test "raises when invalid configuration is provided", %{finch_name: finch_name} do
assert_raise(
NimbleOptions.ValidationError,
~r/expected :count to be a positive integer/,
fn ->
Finch.start_link(name: finch_name, pools: %{default: [count: :dog]})
end
)
assert_raise(ArgumentError, ~r/invalid destination/, fn ->
Finch.start_link(name: finch_name, pools: %{invalid: [count: 5, size: 5]})
end)
end
test "pools are started based on only the {scheme, host, port} of the URLs",
%{bypass: bypass, finch_name: finch_name} do
other_bypass = Bypass.open()
default_bypass = Bypass.open()
unix_socket = {:local, "/my/unix/socket"}
start_supervised!(
{Finch,
name: finch_name,
pools: %{
endpoint(bypass, "/some-path") => [count: 5, size: 5],
endpoint(other_bypass, "/some-other-path") => [count: 10, size: 10],
{:http, unix_socket} => [count: 5, size: 5],
{:https, unix_socket} => [count: 10, size: 10]
}}
)
assert get_pools(finch_name, shp(bypass)) |> length() == 5
assert get_pools(finch_name, shp(other_bypass)) |> length() == 10
assert get_pools(finch_name, shp({:http, unix_socket})) |> length() == 5
assert get_pools(finch_name, shp({:https, unix_socket})) |> length() == 10
# no pool has been started for this unconfigured shp
assert get_pools(finch_name, shp(default_bypass)) |> length() == 0
end
test "pools with an invalid URL cannot be started", %{finch_name: finch_name} do
assert_raise(ArgumentError, ~r/scheme is required for url: example.com/, fn ->
Finch.start_link(
name: finch_name,
pools: %{
"example.com" => [count: 5, size: 5]
}
)
end)
assert_raise(ArgumentError, ~r/scheme is required for url: example/, fn ->
Finch.start_link(
name: finch_name,
pools: %{
"example" => [count: 5, size: 5]
}
)
end)
assert_raise(ArgumentError, ~r/scheme is required for url: :443/, fn ->
Finch.start_link(
name: finch_name,
pools: %{
":443" => [count: 5, size: 5]
}
)
end)
end
test "impossible to accidentally start multiple pools when they are dynamically started",
%{
bypass: bypass,
finch_name: finch_name
} do
start_supervised!(
{Finch,
name: finch_name,
pools: %{
default: [count: 5, size: 5]
}}
)
expect_any(bypass)
Task.async_stream(
1..50,
fn _ -> Finch.build(:get, endpoint(bypass)) |> Finch.request(finch_name) end,
max_concurrency: 50
)
|> Stream.run()
assert get_pools(finch_name, shp(bypass)) |> length() == 5
end
end
describe "build/5" do
test "raises if unsupported atom request method provided", %{bypass: bypass} do
assert_raise ArgumentError, ~r/got unsupported atom method :gimme/, fn ->
Finch.build(:gimme, endpoint(bypass))
end
end
test "raises when requesting a URL with an invalid scheme" do
assert_raise ArgumentError, ~r"invalid scheme \"ftp\" for url: ftp://example.com", fn ->
Finch.build(:get, "ftp://example.com")
end
end
end
describe "request/3" do
test "successful get request, with query string", %{bypass: bypass, finch_name: finch_name} do
start_supervised!({Finch, name: finch_name})
query_string = "query=value"
Bypass.expect_once(bypass, "GET", "/", fn conn ->
assert conn.query_string == query_string
Plug.Conn.send_resp(conn, 200, "OK")
end)
assert {:ok, %{status: 200}} =
Finch.build(:get, endpoint(bypass, "?" <> query_string))
|> Finch.request(finch_name)
end
test "successful post request, with body and query string", %{
bypass: bypass,
finch_name: finch_name
} do
start_supervised!({Finch, name: finch_name})
req_body = "{\"response\":\"please\"}"
response_body = "{\"right\":\"here\"}"
header_key = "content-type"
header_val = "application/json"
query_string = "query=value"
Bypass.expect_once(bypass, "POST", "/", fn conn ->
assert conn.query_string == query_string
assert {:ok, ^req_body, conn} = Plug.Conn.read_body(conn)
conn
|> Plug.Conn.put_resp_header(header_key, header_val)
|> Plug.Conn.send_resp(200, response_body)
end)
assert {:ok, %Response{status: 200, headers: headers, body: ^response_body}} =
Finch.build(
:post,
endpoint(bypass, "?" <> query_string),
[{header_key, header_val}],
req_body
)
|> Finch.request(finch_name)
assert {"content-type", "application/json"} in headers
end
test "successful post HTTP/1 streaming request, with streaming body and query string",
%{
bypass: bypass,
finch_name: finch_name
} do
start_supervised!({Finch, name: finch_name})
req_stream = Stream.map(1..10_000, fn _ -> "please" end)
req_body = req_stream |> Enum.join("")
response_body = "{\"right\":\"here\"}"
header_key = "content-type"
header_val = "application/json"
query_string = "query=value"
Bypass.expect_once(bypass, "POST", "/", fn conn ->
assert conn.query_string == query_string
assert {:ok, ^req_body, conn} = Plug.Conn.read_body(conn)
conn
|> Plug.Conn.put_resp_header(header_key, header_val)
|> Plug.Conn.send_resp(200, response_body)
end)
assert {:ok, %Response{status: 200, headers: headers, body: ^response_body}} =
Finch.build(
:post,
endpoint(bypass, "?" <> query_string),
[{header_key, header_val}],
{:stream, req_stream}
)
|> Finch.request(finch_name)
assert {"content-type", "application/json"} in headers
end
test "successful post HTTP/2 streaming request, with streaming body and query string",
%{
bypass: bypass,
finch_name: finch_name
} do
start_supervised!(
{Finch,
name: finch_name,
pools: %{
default: [
protocol: :http2,
count: 1,
conn_opts: [
transport_opts: [
verify: :verify_none
]
]
]
}}
)
data = :crypto.strong_rand_bytes(1_000)
# 1MB of data
req_stream = Stream.repeatedly(fn -> data end) |> Stream.take(1_000)
req_body = req_stream |> Enum.join("")
response_body = data
header_key = "content-type"
header_val = "application/octet-stream"
query_string = "query=value"
Bypass.expect_once(bypass, "POST", "/", fn conn ->
assert conn.query_string == query_string
assert {:ok, ^req_body, conn} = Plug.Conn.read_body(conn)
conn
|> Plug.Conn.put_resp_header(header_key, header_val)
|> Plug.Conn.send_resp(200, response_body)
end)
assert {:ok, %Response{status: 200, headers: headers, body: ^response_body}} =
Finch.build(
:post,
endpoint(bypass, "?" <> query_string),
[{header_key, header_val}],
{:stream, req_stream}
)
|> Finch.request(finch_name)
assert {header_key, header_val} in headers
end
test "successful post HTTP/2 with a large binary body",
%{
bypass: bypass,
finch_name: finch_name
} do
start_supervised!(
{Finch,
name: finch_name,
pools: %{
default: [
protocol: :http2,
count: 1,
conn_opts: [
transport_opts: [
verify: :verify_none
]
]
]
}}
)
data = :crypto.strong_rand_bytes(1_000)
# 2MB of data
req_body = :binary.copy(data, 2_000)
response_body = data
header_key = "content-type"
header_val = "application/octet-stream"
query_string = "query=value"
Bypass.expect_once(bypass, "POST", "/", fn conn ->
assert conn.query_string == query_string
assert {:ok, ^req_body, conn} = Plug.Conn.read_body(conn)
conn
|> Plug.Conn.put_resp_header(header_key, header_val)
|> Plug.Conn.send_resp(200, response_body)
end)
assert {:ok, %Response{status: 200, headers: headers, body: ^response_body}} =
Finch.build(
:post,
endpoint(bypass, "?" <> query_string),
[{header_key, header_val}],
req_body
)
|> Finch.request(finch_name)
assert {header_key, header_val} in headers
end
test "successful post HTTP/2 with a large iolist body",
%{
bypass: bypass,
finch_name: finch_name
} do
start_supervised!(
{Finch,
name: finch_name,
pools: %{
default: [
protocol: :http2,
count: 1,
conn_opts: [
transport_opts: [
verify: :verify_none
]
]
]
}}
)
# 2MB of data
req_body = List.duplicate(125, 2_000_000)
req_body_binary = IO.iodata_to_binary(req_body)
response_body = req_body_binary
header_key = "content-type"
header_val = "application/octet-stream"
query_string = "query=value"
Bypass.expect_once(bypass, "POST", "/", fn conn ->
assert conn.query_string == query_string
assert {:ok, ^req_body_binary, conn} = Plug.Conn.read_body(conn)
conn
|> Plug.Conn.put_resp_header(header_key, header_val)
|> Plug.Conn.send_resp(200, response_body)
end)
assert {:ok, %Response{status: 200, headers: headers, body: ^response_body}} =
Finch.build(
:post,
endpoint(bypass, "?" <> query_string),
[{header_key, header_val}],
req_body
)
|> Finch.request(finch_name)
assert {header_key, header_val} in headers
end
test "successful get request, with query string, when given a %URI{}",
%{bypass: bypass, finch_name: finch_name} do
start_supervised!({Finch, name: finch_name})
query_string = "query=value"
uri = URI.parse(endpoint(bypass, "?" <> query_string))
Bypass.expect_once(bypass, "GET", "/", fn conn ->
assert conn.query_string == query_string
Plug.Conn.send_resp(conn, 200, "OK")
end)
assert {:ok, %{status: 200}} = Finch.build(:get, uri) |> Finch.request(finch_name)
end
test "successful get request to a unix socket", %{finch_name: finch_name} do
{:ok, {:local, socket_path}} = MockSocketServer.start()
start_supervised!({Finch, name: finch_name})
assert {:ok, %Response{status: 200}} =
Finch.build(:get, "http://localhost/", [], nil, unix_socket: socket_path)
|> Finch.request(finch_name)
end
@tag :capture_log
test "successful get request to a unix socket with tls", %{finch_name: finch_name} do
{:ok, socket_address = {:local, socket_path}} = MockSocketServer.start(ssl?: true)
start_supervised!(
{Finch,
name: finch_name,
pools: %{
{:https, socket_address} => [conn_opts: [transport_opts: [verify: :verify_none]]]
}}
)
assert {:ok, %Response{status: 200}} =
Finch.build(:get, "https://localhost/", [], nil, unix_socket: socket_path)
|> Finch.request(finch_name)
end
test "properly handles connection: close", %{bypass: bypass, finch_name: finch_name} do
start_supervised!({Finch, name: finch_name})
Bypass.expect(bypass, fn conn ->
conn
|> Plug.Conn.put_resp_header("connection", "close")
|> Plug.Conn.send_resp(200, "OK")
end)
request =
Finch.build(
:get,
endpoint(bypass),
[{"connection", "keep-alive"}]
)
for _ <- 1..10 do
assert {:ok, %Response{status: 200, body: "OK"}} = Finch.request(request, finch_name)
end
end
test "returns error when request times out", %{bypass: bypass, finch_name: finch_name} do
start_supervised!({Finch, name: finch_name})
timeout = 100
Bypass.expect(bypass, fn conn ->
Process.sleep(timeout + 50)
Plug.Conn.send_resp(conn, 200, "delayed")
end)
assert {:error, %{reason: :timeout}} =
Finch.build(:get, endpoint(bypass))
|> Finch.request(finch_name, receive_timeout: timeout)
assert {:ok, %Response{}} =
Finch.build(:get, endpoint(bypass))
|> Finch.request(finch_name, receive_timeout: timeout * 2)
end
test "returns error when requesting bad address", %{finch_name: finch_name} do
start_supervised!({Finch, name: finch_name})
assert {:error, %{reason: :nxdomain}} =
Finch.build(:get, "http://idontexist.wat") |> Finch.request(finch_name)
end
test "worker exits when pool times out", %{bypass: bypass, finch_name: finch_name} do
start_supervised!({Finch, name: finch_name})
expect_any(bypass)
{:ok, %Response{}} = Finch.build(:get, endpoint(bypass)) |> Finch.request(finch_name)
:sys.suspend(finch_name)
assert {:timeout, _} =
catch_exit(
Finch.build(:get, endpoint(bypass))
|> Finch.request(finch_name, pool_timeout: 0)
)
:sys.resume(finch_name)
assert {:ok, %Response{}} =
Finch.build(:get, endpoint(bypass)) |> Finch.request(finch_name, pool_timeout: 1)
end
end
describe "connection options" do
test "are passed through to the conn", %{bypass: bypass} do
expect_any(bypass)
start_supervised!({Finch, name: H1Finch, pools: %{default: [protocol: :http1]}})
assert {:ok, _} = Finch.build(:get, endpoint(bypass)) |> Finch.request(H1Finch)
stop_supervised(Finch)
end
test "caller is unable to override mode", %{bypass: bypass, finch_name: finch_name} do
start_supervised!(
{Finch, name: finch_name, pools: %{default: [conn_opts: [mode: :active]]}}
)
expect_any(bypass)
assert {:ok, _} = Finch.build(:get, endpoint(bypass)) |> Finch.request(finch_name)
end
end
describe "stream/5" do
test "successful get request, with query string", %{bypass: bypass, finch_name: finch_name} do
start_supervised!({Finch, name: finch_name})
query_string = "query=value"
Bypass.expect_once(bypass, "GET", "/", fn conn ->
assert conn.query_string == query_string
Plug.Conn.send_resp(conn, 200, "OK")
end)
acc = {nil, [], ""}
fun = fn
{:status, value}, {_, headers, body} -> {value, headers, body}
{:headers, value}, {status, headers, body} -> {status, headers ++ value, body}
{:data, value}, {status, headers, body} -> {status, headers, body <> value}
end
assert {:ok, {200, [_ | _], "OK"}} =
Finch.build(:get, endpoint(bypass, "?" <> query_string))
|> Finch.stream(finch_name, acc, fun)
end
test "HTTP/1 with atom accumulator, illustrating that the type/shape of the accumulator is not important",
%{bypass: bypass, finch_name: finch_name} do
start_supervised!({Finch, name: finch_name})
expect_any(bypass)
fun = fn _msg, :ok -> :ok end
assert {:ok, :ok} =
Finch.build(:get, endpoint(bypass))
|> Finch.stream(finch_name, :ok, fun)
end
test "HTTP/2 with atom accumulator, illustrating that the type/shape of the accumulator is not important",
%{bypass: bypass, finch_name: finch_name} do
start_supervised!(
{Finch,
name: finch_name,
pools: %{
default: [
protocol: :http2,
count: 1,
conn_opts: [
transport_opts: [
verify: :verify_none
]
]
]
}}
)
expect_any(bypass)
fun = fn _msg, :ok -> :ok end
assert {:ok, :ok} =
Finch.build(:get, endpoint(bypass))
|> Finch.stream(finch_name, :ok, fun)
end
test "successful post request, with query string and string request body",
%{bypass: bypass, finch_name: finch_name} do
start_supervised!({Finch, name: finch_name})
query_string = "query=value"
req_headers = [{"content-type", "application/json"}]
req_body = "{hello:\"world\"}"
resp_body = "{hi:\"there\"}"
Bypass.expect_once(bypass, "POST", "/", fn conn ->
assert conn.query_string == query_string
Plug.Conn.send_resp(conn, 200, resp_body)
end)
acc = {nil, [], ""}
fun = fn
{:status, value}, {_, headers, body} -> {value, headers, body}
{:headers, value}, {status, headers, body} -> {status, headers ++ value, body}
{:data, value}, {status, headers, body} -> {status, headers, body <> value}
end
assert {:ok, {200, [_ | _], ^resp_body}} =
Finch.build(:post, endpoint(bypass, "?" <> query_string), req_headers, req_body)
|> Finch.stream(finch_name, acc, fun)
end
test "successful post request, with query string and streaming request body",
%{
bypass: bypass,
finch_name: finch_name
} do
start_supervised!({Finch, name: finch_name})
query_string = "query=value"
req_headers = [{"content-type", "application/json"}]
req_stream = Stream.map(1..10_000, fn _ -> "please" end)
resp_body = "{hi:\"there\"}"
Bypass.expect_once(bypass, "POST", "/", fn conn ->
assert conn.query_string == query_string
Plug.Conn.send_resp(conn, 200, resp_body)
end)
acc = {nil, [], ""}
fun = fn
{:status, value}, {_, headers, body} -> {value, headers, body}
{:headers, value}, {status, headers, body} -> {status, headers ++ value, body}
{:data, value}, {status, headers, body} -> {status, headers, body <> value}
end
assert {:ok, {200, [_ | _], ^resp_body}} =
Finch.build(
:post,
endpoint(bypass, "?" <> query_string),
req_headers,
{:stream, req_stream}
)
|> Finch.stream(finch_name, acc, fun)
end
end
defp get_pools(name, shp) do
Registry.lookup(name, shp)
end
defp shp(%{port: port}), do: {:http, "localhost", port}
defp shp({scheme, {:local, unix_socket}}), do: {scheme, {:local, unix_socket}, 0}
defp expect_any(bypass) do
Bypass.expect(bypass, fn conn -> Plug.Conn.send_resp(conn, 200, "OK") end)
end
end
| 31.894273 | 110 | 0.564227 |
e8593caec562e4a56ced9cb2ea854ec46e59320b | 161 | exs | Elixir | lib/modules_and_functions_1/triple.exs | mikan/elixir-practice | 624525605eb2324e0c55a4ddcb68388c0d2ecefc | [
"Apache-2.0"
] | null | null | null | lib/modules_and_functions_1/triple.exs | mikan/elixir-practice | 624525605eb2324e0c55a4ddcb68388c0d2ecefc | [
"Apache-2.0"
] | 1 | 2020-01-28T00:19:53.000Z | 2020-01-28T00:19:53.000Z | lib/modules_and_functions_1/triple.exs | mikan/elixir-practice | 624525605eb2324e0c55a4ddcb68388c0d2ecefc | [
"Apache-2.0"
] | null | null | null | defmodule Times do
def double(n), do: n * 2
def triple(n), do: n * 3
end
IO.puts Times.triple(0) # 0
IO.puts Times.triple(1) # 3
IO.puts Times.triple(2) # 6
| 20.125 | 27 | 0.645963 |
e859468a48a9c7c6fbe74ef0bc078ed9a1b6faba | 320 | ex | Elixir | examples/simple_app/lib/simple_app/struct_example.ex | Fl4m3Ph03n1x/gradient | 60d7d3fe2ebdf68747325c1e852959f8b92fdcee | [
"Apache-2.0"
] | 75 | 2021-11-17T11:55:13.000Z | 2022-03-28T04:35:04.000Z | examples/simple_app/lib/simple_app/struct_example.ex | Fl4m3Ph03n1x/gradient | 60d7d3fe2ebdf68747325c1e852959f8b92fdcee | [
"Apache-2.0"
] | 48 | 2021-11-15T13:56:14.000Z | 2022-03-31T15:55:47.000Z | examples/simple_app/lib/simple_app/struct_example.ex | Fl4m3Ph03n1x/gradient | 60d7d3fe2ebdf68747325c1e852959f8b92fdcee | [
"Apache-2.0"
] | 6 | 2021-12-22T20:41:27.000Z | 2022-03-09T09:07:38.000Z | defmodule SimpleApp.StructExample do
defmodule SomeStruct do
defstruct [name: "John Smith", age: 25]
end
@type t :: %SomeStruct{}
@spec age(t) :: non_neg_integer
def age(%SomeStruct{age: age}), do: age
@spec mistake :: :ok
def mistake do
age(%{first: "John", last: "Smith", age: 32})
end
end
| 18.823529 | 49 | 0.64375 |
e859510237c9a57862ca47375b08bc7253e919cd | 1,021 | ex | Elixir | test/support/conn_case.ex | JosKar/iris_server | 8faa370f506a003ae3c06b23d6c827bd746368f3 | [
"MIT"
] | 17 | 2017-07-10T09:01:40.000Z | 2022-03-08T23:47:27.000Z | test/support/conn_case.ex | 7-iris/iris_server | 8faa370f506a003ae3c06b23d6c827bd746368f3 | [
"MIT"
] | 7 | 2017-07-09T16:22:13.000Z | 2018-06-09T21:34:03.000Z | test/support/conn_case.ex | JosKar/iris_server | 8faa370f506a003ae3c06b23d6c827bd746368f3 | [
"MIT"
] | null | null | null | defmodule IrisWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
import IrisWeb.Router.Helpers
# The default endpoint for testing
@endpoint IrisWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Iris.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Iris.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 27.594595 | 66 | 0.716944 |
e85957818606c24d0311828d07f8afb670d64a7e | 6,413 | ex | Elixir | lib/elixir/lib/macro/env.ex | RyanBard/elixir | 3e0f3b47cf26aa121470141b9a1aa55a366c066e | [
"Apache-2.0"
] | 2 | 2018-11-15T06:38:14.000Z | 2018-11-17T18:03:14.000Z | lib/elixir/lib/macro/env.ex | RyanBard/elixir | 3e0f3b47cf26aa121470141b9a1aa55a366c066e | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/macro/env.ex | RyanBard/elixir | 3e0f3b47cf26aa121470141b9a1aa55a366c066e | [
"Apache-2.0"
] | null | null | null | defmodule Macro.Env do
@moduledoc """
A struct that holds compile time environment information.
The current environment can be accessed at any time as
`__ENV__/0`. Inside macros, the caller environment can be
accessed as `__CALLER__/0`.
An instance of `Macro.Env` must not be modified by hand. If you need to
create a custom environment to pass to `Code.eval_quoted/3`, use the
following trick:
def make_custom_env do
import SomeModule, only: [some_function: 2]
alias A.B.C
__ENV__
end
You may then call `make_custom_env()` to get a struct with the desired
imports and aliases included.
It contains the following fields:
* `module` - the current module name
* `file` - the current file name as a binary
* `line` - the current line as an integer
* `function` - a tuple as `{atom, integer}`, where the first
element is the function name and the second its arity; returns
`nil` if not inside a function
* `context` - the context of the environment; it can be `nil`
(default context), `:guard` (inside a guard) or `:match` (inside a match)
* `aliases` - a list of two-element tuples, where the first
element is the aliased name and the second one the actual name
* `requires` - the list of required modules
* `functions` - a list of functions imported from each module
* `macros` - a list of macros imported from each module
* `macro_aliases` - a list of aliases defined inside the current macro
* `context_modules` - a list of modules defined in the current context
* `lexical_tracker` - PID of the lexical tracker which is responsible for
keeping user info
The following fields pertain to variable handling and must not be accessed or
relied on. To get a list of all variables, see `vars/1`:
* `current_vars`
* `unused_vars`
* `prematch_vars`
* `contextual_vars`
The following fields are deprecated and must not be accessed or relied on:
* `vars` - a list keeping all defined variables as `{var, context}`
"""
@type name_arity :: {atom, arity}
@type file :: binary
@type line :: non_neg_integer
@type aliases :: [{module, module}]
@type macro_aliases :: [{module, {term, module}}]
@type context :: :match | :guard | nil
@type requires :: [module]
@type functions :: [{module, [name_arity]}]
@type macros :: [{module, [name_arity]}]
@type context_modules :: [module]
@type lexical_tracker :: pid | nil
@type variable :: {atom, atom | term}
@typep vars :: [variable]
@typep var_type :: :term
@typep var_version :: non_neg_integer
@typep unused_vars :: %{{variable, var_version} => non_neg_integer | false}
@typep current_vars :: %{variable => {var_version, var_type}}
@typep prematch_vars :: current_vars | :warn | :raise | :pin | :apply
@typep contextual_vars :: [atom]
@type t :: %{
__struct__: __MODULE__,
module: atom,
file: file,
line: line,
function: name_arity | nil,
context: context,
requires: requires,
aliases: aliases,
functions: functions,
macros: macros,
macro_aliases: aliases,
context_modules: context_modules,
vars: vars,
unused_vars: unused_vars,
current_vars: current_vars,
prematch_vars: prematch_vars,
lexical_tracker: lexical_tracker,
contextual_vars: contextual_vars
}
# TODO: Remove :vars field on v2.0
def __struct__ do
%{
__struct__: __MODULE__,
module: nil,
file: "nofile",
line: 0,
function: nil,
context: nil,
requires: [],
aliases: [],
functions: [],
macros: [],
macro_aliases: [],
context_modules: [],
vars: [],
unused_vars: %{},
current_vars: %{},
prematch_vars: :warn,
lexical_tracker: nil,
contextual_vars: []
}
end
def __struct__(kv) do
Enum.reduce(kv, __struct__(), fn {k, v}, acc -> :maps.update(k, v, acc) end)
end
@doc """
Returns a list of variables in the current environment.
Each variable is identified by a tuple of two elements,
where the first element is the variable name as an atom
and the second element is its context, which may be an
atom or an integer.
"""
@doc since: "1.7.0"
@spec vars(t) :: [variable]
def vars(env)
def vars(%{__struct__: Macro.Env, current_vars: current_vars}) do
Map.keys(current_vars)
end
@doc """
Checks if a variable belongs to the environment.
"""
@doc since: "1.7.0"
@spec has_var?(t, variable) :: boolean()
def has_var?(env, var)
def has_var?(%{__struct__: Macro.Env, current_vars: current_vars}, var) do
Map.has_key?(current_vars, var)
end
@doc """
Returns a keyword list containing the file and line
information as keys.
"""
@spec location(t) :: keyword
def location(env)
def location(%{__struct__: Macro.Env, file: file, line: line}) do
[file: file, line: line]
end
@doc """
Returns a `Macro.Env` in the match context.
"""
@spec to_match(t) :: t
def to_match(%{__struct__: Macro.Env, context: :match} = env) do
env
end
def to_match(%{__struct__: Macro.Env, current_vars: vars} = env) do
%{env | context: :match, prematch_vars: vars}
end
@doc """
Returns whether the compilation environment is currently
inside a guard.
"""
@spec in_guard?(t) :: boolean
def in_guard?(env)
def in_guard?(%{__struct__: Macro.Env, context: context}), do: context == :guard
@doc """
Returns whether the compilation environment is currently
inside a match clause.
"""
@spec in_match?(t) :: boolean
def in_match?(env)
def in_match?(%{__struct__: Macro.Env, context: context}), do: context == :match
@doc """
Returns the environment stacktrace.
"""
@spec stacktrace(t) :: list
def stacktrace(%{__struct__: Macro.Env} = env) do
cond do
is_nil(env.module) ->
[{:elixir_compiler, :__FILE__, 1, relative_location(env)}]
is_nil(env.function) ->
[{env.module, :__MODULE__, 0, relative_location(env)}]
true ->
{name, arity} = env.function
[{env.module, name, arity, relative_location(env)}]
end
end
defp relative_location(env) do
[file: String.to_charlist(Path.relative_to_cwd(env.file)), line: env.line]
end
end
| 29.96729 | 82 | 0.645252 |
e85971c4b582c1db3c1ab00a14d9050cac53bd77 | 378 | exs | Elixir | priv/repo/migrations/20200707155254_fill_document_tokens_posts_column.exs | santiment/sanbase2 | 9ef6e2dd1e377744a6d2bba570ea6bd477a1db31 | [
"MIT"
] | 81 | 2017-11-20T01:20:22.000Z | 2022-03-05T12:04:25.000Z | priv/repo/migrations/20200707155254_fill_document_tokens_posts_column.exs | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | 359 | 2017-10-15T14:40:53.000Z | 2022-01-25T13:34:20.000Z | priv/repo/migrations/20200707155254_fill_document_tokens_posts_column.exs | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | 16 | 2017-11-19T13:57:40.000Z | 2022-02-07T08:13:02.000Z | defmodule Sanbase.Repo.Migrations.FillDocumentTokensPostsColumn do
use Ecto.Migration
def up do
setup()
Sanbase.Insight.Search.update_all_document_tokens()
end
def down do
:ok
end
defp setup() do
Application.ensure_all_started(:tzdata)
Application.ensure_all_started(:prometheus_ecto)
Sanbase.Prometheus.EctoInstrumenter.setup()
end
end
| 19.894737 | 66 | 0.756614 |
e85971ede6d4634a8cd54d436f91dd1d581a1782 | 562 | ex | Elixir | lib/mail_slurp_api/model/sort.ex | sumup-bank/mailslurp-client-elixir | 87ccdedf2f0f4cd3e50f5781ffb088142e3cf4e8 | [
"MIT"
] | 1 | 2021-06-17T18:07:49.000Z | 2021-06-17T18:07:49.000Z | lib/mail_slurp_api/model/sort.ex | sumup-bank/mailslurp-client-elixir | 87ccdedf2f0f4cd3e50f5781ffb088142e3cf4e8 | [
"MIT"
] | null | null | null | lib/mail_slurp_api/model/sort.ex | sumup-bank/mailslurp-client-elixir | 87ccdedf2f0f4cd3e50f5781ffb088142e3cf4e8 | [
"MIT"
] | 1 | 2021-03-16T18:55:56.000Z | 2021-03-16T18:55:56.000Z | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule MailSlurpAPI.Model.Sort do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"empty",
:"sorted",
:"unsorted"
]
@type t :: %__MODULE__{
:"empty" => boolean() | nil,
:"sorted" => boolean() | nil,
:"unsorted" => boolean() | nil
}
end
defimpl Poison.Decoder, for: MailSlurpAPI.Model.Sort do
def decode(value, _options) do
value
end
end
| 18.733333 | 91 | 0.635231 |
e85974b5e7fe895a67232d9b8b5a6643950fc7f8 | 2,300 | ex | Elixir | clients/app_engine/lib/google_api/app_engine/v1/model/identity_aware_proxy.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/app_engine/lib/google_api/app_engine/v1/model/identity_aware_proxy.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/app_engine/lib/google_api/app_engine/v1/model/identity_aware_proxy.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AppEngine.V1.Model.IdentityAwareProxy do
@moduledoc """
Identity-Aware Proxy
## Attributes
* `enabled` (*type:* `boolean()`, *default:* `nil`) - Whether the serving infrastructure will authenticate and authorize all incoming requests.If true, the oauth2_client_id and oauth2_client_secret fields must be non-empty.
* `oauth2ClientId` (*type:* `String.t`, *default:* `nil`) - OAuth2 client ID to use for the authentication flow.
* `oauth2ClientSecret` (*type:* `String.t`, *default:* `nil`) - OAuth2 client secret to use for the authentication flow.For security reasons, this value cannot be retrieved via the API. Instead, the SHA-256 hash of the value is returned in the oauth2_client_secret_sha256 field.@InputOnly
* `oauth2ClientSecretSha256` (*type:* `String.t`, *default:* `nil`) - Hex-encoded SHA-256 hash of the client secret.@OutputOnly
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:enabled => boolean(),
:oauth2ClientId => String.t(),
:oauth2ClientSecret => String.t(),
:oauth2ClientSecretSha256 => String.t()
}
field(:enabled)
field(:oauth2ClientId)
field(:oauth2ClientSecret)
field(:oauth2ClientSecretSha256)
end
defimpl Poison.Decoder, for: GoogleApi.AppEngine.V1.Model.IdentityAwareProxy do
def decode(value, options) do
GoogleApi.AppEngine.V1.Model.IdentityAwareProxy.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AppEngine.V1.Model.IdentityAwareProxy do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.071429 | 292 | 0.735217 |
e85979a2aed27d12f6445f8bf029927f7e6a7968 | 1,081 | exs | Elixir | config/config.exs | adanselm/exrecaptcha | 5e4ab06f791c4e54ccb510631abb4779ed34723f | [
"WTFPL"
] | 10 | 2015-10-02T12:04:05.000Z | 2020-01-09T02:50:22.000Z | config/config.exs | adanselm/exrecaptcha | 5e4ab06f791c4e54ccb510631abb4779ed34723f | [
"WTFPL"
] | 2 | 2015-08-24T09:06:29.000Z | 2018-04-06T08:35:51.000Z | config/config.exs | adanselm/exrecaptcha | 5e4ab06f791c4e54ccb510631abb4779ed34723f | [
"WTFPL"
] | 1 | 2015-08-21T10:51:55.000Z | 2015-08-21T10:51:55.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for third-
# party users, it should be done in your mix.exs file.
# Sample configuration:
config :exrecaptcha,
api_config: %{ verify_url: "http://www.google.com/recaptcha/api/verify",
public_key: "YOUR_PUBLIC_KEY",
private_key: "YOUR_PRIVATE_KEY" }
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 43.24 | 74 | 0.749306 |
e8597feb72590b5107dbcc92c26a484a8b181f1f | 1,107 | exs | Elixir | config/config.exs | staspoons/secretplace | 1270d2d40606cdde9f1196c8f4a87acb8e94efe8 | [
"MIT"
] | 1 | 2019-04-12T08:50:05.000Z | 2019-04-12T08:50:05.000Z | config/config.exs | staspoons/secretplace | 1270d2d40606cdde9f1196c8f4a87acb8e94efe8 | [
"MIT"
] | null | null | null | config/config.exs | staspoons/secretplace | 1270d2d40606cdde9f1196c8f4a87acb8e94efe8 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :kv, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:kv, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 35.709677 | 73 | 0.748871 |
e85985941107e1a57b8ab38f5048059bf0059a95 | 4,055 | exs | Elixir | test/sitemapper_test.exs | kotsius/sitemapper | 67807a9c4d609a86feb7fd890f5ff48c7bb08223 | [
"MIT"
] | null | null | null | test/sitemapper_test.exs | kotsius/sitemapper | 67807a9c4d609a86feb7fd890f5ff48c7bb08223 | [
"MIT"
] | null | null | null | test/sitemapper_test.exs | kotsius/sitemapper | 67807a9c4d609a86feb7fd890f5ff48c7bb08223 | [
"MIT"
] | null | null | null | defmodule SitemapperTest do
use ExUnit.Case
doctest Sitemapper
alias Sitemapper.URL
test "generate with 0 URLs" do
opts = [
sitemap_url: "http://example.org/foo"
]
elements =
Stream.concat([])
|> Sitemapper.generate(opts)
assert Enum.count(elements) == 0
end
test "generate with complex URLs" do
opts = [
sitemap_url: "http://example.org/foo"
]
elements =
Stream.concat([1..100])
|> Stream.map(fn i ->
%URL{
loc: "http://example.com/#{i}",
priority: 0.5,
lastmod: ~U[2020-01-01 00:00:00Z],
changefreq: :hourly
}
end)
|> Sitemapper.generate(opts)
assert Enum.count(elements) == 2
assert Enum.at(elements, 0) |> elem(0) == "sitemap-00001.xml.gz"
assert Enum.at(elements, 0) |> elem(1) |> IO.iodata_length() == 561
assert Enum.at(elements, 1) |> elem(0) == "sitemap.xml.gz"
assert Enum.at(elements, 1) |> elem(1) |> IO.iodata_length() == 229
end
test "generate with 50,000 URLs" do
opts = [
sitemap_url: "http://example.org/foo"
]
elements =
Stream.concat([1..50_000])
|> Stream.map(fn i ->
%URL{loc: "http://example.com/#{i}"}
end)
|> Sitemapper.generate(opts)
assert Enum.count(elements) == 2
assert Enum.at(elements, 0) |> elem(0) == "sitemap-00001.xml.gz"
assert Enum.at(elements, 0) |> elem(1) |> IO.iodata_length() == 128_046
assert Enum.at(elements, 1) |> elem(0) == "sitemap.xml.gz"
assert Enum.at(elements, 1) |> elem(1) |> IO.iodata_length() == 229
end
test "generate with 50,001 URLs" do
opts = [
sitemap_url: "http://example.org/foo"
]
elements =
Stream.concat([1..50_001])
|> Stream.map(fn i ->
%URL{loc: "http://example.com/#{i}"}
end)
|> Sitemapper.generate(opts)
assert Enum.count(elements) == 3
assert Enum.at(elements, 0) |> elem(0) == "sitemap-00001.xml.gz"
assert Enum.at(elements, 1) |> elem(0) == "sitemap-00002.xml.gz"
assert Enum.at(elements, 2) |> elem(0) == "sitemap.xml.gz"
end
test "generate with gzip disabled" do
opts = [
sitemap_url: "http://example.org/foo",
gzip: false,
index_lastmod: ~D[2020-01-01]
]
elements =
Stream.concat([1..100])
|> Stream.map(fn i ->
%URL{loc: "http://example.com/#{i}", lastmod: ~D[2020-01-01], priority: 0.5, changefreq: :hourly}
end)
|> Sitemapper.generate(opts)
sitemap_00001_contents = File.read!(Path.join(__DIR__, "fixtures/sitemap-00001.xml"))
sitemap_index_contents = File.read!(Path.join(__DIR__, "fixtures/sitemap.xml"))
assert Enum.count(elements) == 2
assert Enum.at(elements, 0) |> elem(0) == "sitemap-00001.xml"
assert Enum.at(elements, 0) |> elem(1) |> IO.chardata_to_string() == sitemap_00001_contents
assert Enum.at(elements, 1) |> elem(0) == "sitemap.xml"
assert Enum.at(elements, 1) |> elem(1) |> IO.chardata_to_string() == sitemap_index_contents
end
test "generate with an alternative name" do
opts = [
sitemap_url: "http://example.org/foo",
name: "alt"
]
elements =
Stream.concat([1..50_000])
|> Stream.map(fn i ->
%URL{loc: "http://example.com/#{i}"}
end)
|> Sitemapper.generate(opts)
assert Enum.count(elements) == 2
assert Enum.at(elements, 0) |> elem(0) == "sitemap-alt-00001.xml.gz"
assert Enum.at(elements, 1) |> elem(0) == "sitemap-alt.xml.gz"
end
test "generate and persist" do
store_path = File.cwd!() |> Path.join("test/store")
File.mkdir_p!(store_path)
opts = [
sitemap_url: "http://example.org/foo",
store: Sitemapper.FileStore,
store_config: [
path: store_path
]
]
elements =
Stream.concat([1..50_002])
|> Stream.map(fn i ->
%URL{loc: "http://example.com/#{i}"}
end)
|> Sitemapper.generate(opts)
|> Sitemapper.persist(opts)
assert Enum.count(elements) == 3
end
end
| 27.773973 | 105 | 0.587423 |
e8598c950e86596b8c8230cfcc67d378e49f9ca4 | 804 | ex | Elixir | guardian_authentication/test/support/channel_case.ex | hbobenicio/phoenix-examples | dd2adc8887a341b50d192bc31c61ee528e896672 | [
"MIT"
] | null | null | null | guardian_authentication/test/support/channel_case.ex | hbobenicio/phoenix-examples | dd2adc8887a341b50d192bc31c61ee528e896672 | [
"MIT"
] | null | null | null | guardian_authentication/test/support/channel_case.ex | hbobenicio/phoenix-examples | dd2adc8887a341b50d192bc31c61ee528e896672 | [
"MIT"
] | null | null | null | defmodule GuardianAuthenticationWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
# The default endpoint for testing
@endpoint GuardianAuthenticationWeb.Endpoint
end
end
setup _tags do
:ok
end
end
| 23.647059 | 58 | 0.736318 |
e8599cbf0e5e1051d91a3c0914c55b1b0518cb2a | 3,516 | ex | Elixir | lib/helios_aggregate/pipeline.ex | exponentially/helios_aggregate | d667fe913cd5ed380f874e2f207fdbc98f4c9b71 | [
"Apache-2.0"
] | 2 | 2018-09-03T11:02:12.000Z | 2019-12-12T13:08:51.000Z | lib/helios_aggregate/pipeline.ex | exponentially/helios_aggregate | d667fe913cd5ed380f874e2f207fdbc98f4c9b71 | [
"Apache-2.0"
] | 1 | 2018-09-04T05:14:55.000Z | 2018-09-05T06:37:07.000Z | lib/helios_aggregate/pipeline.ex | exponentially/helios_aggregate | d667fe913cd5ed380f874e2f207fdbc98f4c9b71 | [
"Apache-2.0"
] | null | null | null | defmodule Helios.Aggregate.Pipeline do
@moduledoc false
alias Helios.Aggregate.Pipeline.Context
@doc false
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@behaviour Helios.Aggregate.Pipeline.Plug
import Helios.Aggregate.Pipeline
alias Helios.Aggregate.Pipeline.Context
Module.register_attribute(__MODULE__, :plugs, accumulate: true)
@before_compile Helios.Aggregate.Pipeline
@helios_log_level Keyword.get(opts, :log, :debug)
@doc false
def init(opts), do: opts
@doc false
def call(ctx, handler) when not is_atom(handler),
do: raise(RuntimeError, "handler name should be an atom, got #{inspect(handler)}.")
def call(ctx, handler) do
ctx =
update_in(
ctx.private,
&(&1
|> Map.put(:helios_aggregate, __MODULE__)
|> Map.put(:helios_aggregate_command_handler, handler))
)
helios_aggregate_pipeline(ctx, handler)
end
@doc false
def handle(%Context{private: %{helios_aggregate_command_handler: handler}} = ctx, _ops) do
apply(__MODULE__, handler, [ctx, ctx.params])
end
defoverridable init: 1, call: 2, handle: 2
end
end
@doc false
defmacro __before_compile__(env) do
handler = {:handle, [], true}
plugs = [handler | Module.get_attribute(env.module, :plugs)]
{ctx, body} =
Helios.Aggregate.Pipeline.Builder.compile(
env,
plugs,
log_on_halt: :debug,
init_mode: Helios.Aggregate.plug_init_mode()
)
quote do
defoverridable handle: 2
def handle(var!(ctx_before), opts) do
try do
# var!(ctx_after) = super(var!(ctx_before), opts)
super(var!(ctx_before), opts)
catch
:error, reason ->
Helios.Aggregate.Pipeline.__catch__(
var!(ctx_before),
reason,
__MODULE__,
var!(ctx_before).private.helios_aggregate_command_handler,
System.stacktrace()
)
end
end
defp helios_aggregate_pipeline(unquote(ctx), var!(handler)) do
var!(ctx) = unquote(ctx)
var!(aggregate) = __MODULE__
_ = var!(ctx)
_ = var!(aggregate)
_ = var!(handler)
unquote(body)
end
end
end
@doc false
def __catch__(
%Context{},
:function_clause,
aggregate,
handler,
[{aggregate, handler, [%Context{} = ctx | _], _loc} | _] = stack
) do
args = [aggregate: aggregate, handler: handler, params: ctx.params]
reraise Helios.Aggregate.CommandHandlerClauseError, args, stack
end
def __catch__(%Context{} = ctx, reason, _aggregate, _handler, stack) do
Helios.Aggregate.WrapperError.reraise(ctx, :error, reason, stack)
end
@doc """
Stores a plug to be executed as part of the aggregate pipeline.
"""
defmacro plug(plug)
defmacro plug({:when, _, [plug, guards]}), do: plug(plug, [], guards)
defmacro plug(plug), do: plug(plug, [], true)
@doc """
Stores a plug with the given options to be executed as part of
the aggregate pipeline.
"""
defmacro plug(plug, opts)
defmacro plug(plug, {:when, _, [opts, guards]}), do: plug(plug, opts, guards)
defmacro plug(plug, opts), do: plug(plug, opts, true)
defp plug(plug, opts, guards) do
quote do
@plugs {unquote(plug), unquote(opts), unquote(Macro.escape(guards))}
end
end
end
| 26.839695 | 96 | 0.613481 |
e859bd012d6d9d3e025aa6f4bc7ff213690adf25 | 1,811 | ex | Elixir | lib/assent/strategies/google.ex | lmeier/assent | 3f5a5d340eb1833cd9d6ada9d7e3056b7e3d4f41 | [
"MIT"
] | 160 | 2019-09-25T01:07:16.000Z | 2022-03-08T00:45:16.000Z | lib/assent/strategies/google.ex | lmeier/assent | 3f5a5d340eb1833cd9d6ada9d7e3056b7e3d4f41 | [
"MIT"
] | 47 | 2019-09-25T00:38:44.000Z | 2022-03-21T17:35:43.000Z | lib/assent/strategies/google.ex | lmeier/assent | 3f5a5d340eb1833cd9d6ada9d7e3056b7e3d4f41 | [
"MIT"
] | 30 | 2019-10-04T15:58:06.000Z | 2021-12-10T16:43:52.000Z | defmodule Assent.Strategy.Google do
@moduledoc """
Google OAuth 2.0 strategy.
In the normalized user response a `google_hd` ("Hosted Domain") field is
included in user parameters and can be used to limit access to users
belonging to a particular hosted domain.
## Usage
config = [
client_id: "REPLACE_WITH_CLIENT_ID",
client_secret: "REPLACE_WITH_CLIENT_SECRET"
]
To get the refresh token, it's necessary to pass `access_type: "offline"` in
the authorization request:
config = [
client_id: "REPLACE_WITH_CLIENT_ID",
client_secret: "REPLACE_WITH_CLIENT_SECRET",
authorization_params: [
access_type: "offline",
scope: "https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile"
]
]
See `Assent.Strategy.OAuth2` for more.
"""
use Assent.Strategy.OAuth2.Base
@impl true
def default_config(_config) do
[
site: "https://www.googleapis.com",
authorize_url: "https://accounts.google.com/o/oauth2/v2/auth",
token_url: "/oauth2/v4/token",
user_url: "/oauth2/v3/userinfo",
authorization_params: [scope: "https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile"],
auth_method: :client_secret_post
]
end
@impl true
def normalize(_config, user) do
{:ok, %{
"sub" => user["sub"],
"name" => user["name"],
"given_name" => user["given_name"],
"family_name" => user["family_name"],
"picture" => user["picture"],
"email" => user["email"],
"email_verified" => user["email_verified"],
"locale" => user["locale"]
},
%{
"google_hd" => user["hd"]
}}
end
end
| 29.688525 | 135 | 0.617891 |
e859bebbfa25adbdf7020b1617e2ebdd35ca4fe6 | 1,919 | ex | Elixir | clients/document_ai/lib/google_api/document_ai/v1/model/google_cloud_documentai_v1_set_default_processor_version_metadata.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/document_ai/lib/google_api/document_ai/v1/model/google_cloud_documentai_v1_set_default_processor_version_metadata.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/document_ai/lib/google_api/document_ai/v1/model/google_cloud_documentai_v1_set_default_processor_version_metadata.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1SetDefaultProcessorVersionMetadata do
@moduledoc """
The long running operation metadata for set default processor version method.
## Attributes
* `commonMetadata` (*type:* `GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1CommonOperationMetadata.t`, *default:* `nil`) - The basic metadata of the long running operation.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:commonMetadata =>
GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1CommonOperationMetadata.t() | nil
}
field(:commonMetadata,
as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1CommonOperationMetadata
)
end
defimpl Poison.Decoder,
for: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1SetDefaultProcessorVersionMetadata do
def decode(value, options) do
GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1SetDefaultProcessorVersionMetadata.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1SetDefaultProcessorVersionMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.890909 | 183 | 0.771756 |
e859d310907e531d8ef00994a89ddbcb7a86e784 | 2,021 | exs | Elixir | programming_elixir/chapter11/exercises.exs | hectoregm/elixir | c4dc9cd327c6d935de93337e5c52d58b82c4d339 | [
"MIT"
] | null | null | null | programming_elixir/chapter11/exercises.exs | hectoregm/elixir | c4dc9cd327c6d935de93337e5c52d58b82c4d339 | [
"MIT"
] | null | null | null | programming_elixir/chapter11/exercises.exs | hectoregm/elixir | c4dc9cd327c6d935de93337e5c52d58b82c4d339 | [
"MIT"
] | null | null | null | defmodule Exercise do
def printable?(chars), do: Enum.all?(chars, fn ch -> ch in ?\s..?~ end)
def anagram?(word1, word2), do: Enum.sort(word1) == Enum.sort(word2)
def calculate(str) do
{num_a, [op | num_b]} = Enum.split_while(str, fn ch -> ch in '0123456789 ' end)
number_a = parse_number(num_a)
number_b = parse_number(num_b)
eval([op], number_a, number_b)
end
defp parse_number(expression), do: _parse_number({ 0, expression })
defp _parse_number({value, [ space | rest ] }) when space in ' ' do
_parse_number({ value, rest })
end
defp _parse_number({value, [ digit | rest ] }) when digit in ?0..?9 do
_parse_number({ value*10 + digit - ?0, rest})
end
defp _parse_number({result, [] }) do
result
end
defp eval('+', a, b), do: a + b
defp eval('-', a, b), do: a - b
defp eval('*', a, b), do: a * b
defp eval('/', a, b), do: div(a, b)
def center(lst = [ _str | _rest ]) do
lstring = Enum.max_by lst, fn str -> String.length str end
len = String.length lstring
_center(lst, len)
end
defp _center([], _) do
end
defp _center([ str | rest ], line_length) do
str_length = String.length str
padding = line_length - str_length
lpadding = trunc Float.floor(padding / 2)
IO.puts String.ljust(String.rjust(str, str_length + lpadding), str_length + padding)
_center(rest, line_length)
end
def capitalize_sentences(str), do: _capitalize_sentences(str, "", "")
defp _capitalize_sentences(<< 46 :: utf8, 32 :: utf8, tail :: binary >>, sentence, result) do
_capitalize_sentences(tail, "", result <> String.capitalize(sentence <> ". "))
end
defp _capitalize_sentences(<< head :: utf8, tail :: binary >>, sentence, result) do
_capitalize_sentences(tail, sentence <> << head :: utf8 >>, result)
end
defp _capitalize_sentences("", "", result) do
result
end
defp _each(<< head :: utf8, tail :: binary >>, func) do
func.(head)
_each(tail, func)
end
defp _each(<<>>, _func), do: []
end
| 29.720588 | 95 | 0.629886 |
e859e77fc7d3b7a40d72be934d419f399af16d53 | 1,171 | ex | Elixir | apps/wuw_web/lib/wuw_web/web/channels/user_socket.ex | aforward/wuw | 8f41157aeb9fce738a402e757bf78a322a890b7a | [
"MIT"
] | null | null | null | apps/wuw_web/lib/wuw_web/web/channels/user_socket.ex | aforward/wuw | 8f41157aeb9fce738a402e757bf78a322a890b7a | [
"MIT"
] | null | null | null | apps/wuw_web/lib/wuw_web/web/channels/user_socket.ex | aforward/wuw | 8f41157aeb9fce738a402e757bf78a322a890b7a | [
"MIT"
] | null | null | null | defmodule WuwWeb.Web.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", WuwWeb.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "users_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# WuwWeb.Web.Endpoint.broadcast("users_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 30.815789 | 83 | 0.70111 |
e859fd4c814e3288f16f0f441d3abba18e70e8e8 | 413 | exs | Elixir | nomads/src/test/nomads_web/views/error_view_test.exs | HarpBytes/nomads | 10c79953e19262d9d095a190eecb09a5e57e6bf2 | [
"MIT"
] | null | null | null | nomads/src/test/nomads_web/views/error_view_test.exs | HarpBytes/nomads | 10c79953e19262d9d095a190eecb09a5e57e6bf2 | [
"MIT"
] | null | null | null | nomads/src/test/nomads_web/views/error_view_test.exs | HarpBytes/nomads | 10c79953e19262d9d095a190eecb09a5e57e6bf2 | [
"MIT"
] | null | null | null | defmodule NomadsWeb.ErrorViewTest do
use NomadsWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(NomadsWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(NomadsWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end
| 27.533333 | 91 | 0.731235 |
e85a0dc13db46af8d188104e1cd127f10fc77337 | 693 | ex | Elixir | apps/artemis/lib/artemis/contexts/tag/create_tag.ex | artemis-platform/artemis_dashboard | 5ab3f5ac4c5255478bbebf76f0e43b44992e3cab | [
"MIT"
] | 9 | 2019-08-19T19:56:34.000Z | 2022-03-22T17:56:38.000Z | apps/artemis/lib/artemis/contexts/tag/create_tag.ex | chrislaskey/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | 7 | 2019-07-12T21:41:01.000Z | 2020-08-17T21:29:22.000Z | apps/artemis/lib/artemis/contexts/tag/create_tag.ex | chrislaskey/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | 2 | 2019-07-05T22:51:47.000Z | 2019-08-19T19:56:37.000Z | defmodule Artemis.CreateTag do
use Artemis.Context
use Assoc.Updater, repo: Artemis.Repo
alias Artemis.GenerateTagParams
alias Artemis.Repo
alias Artemis.Tag
def call!(params, user) do
case call(params, user) do
{:error, _} -> raise(Artemis.Context.Error, "Error creating tag")
{:ok, result} -> result
end
end
def call(params, user) do
with_transaction(fn ->
params
|> insert_record
|> update_associations(params)
|> Event.broadcast("tag:created", params, user)
end)
end
defp insert_record(params) do
params = GenerateTagParams.call(params)
%Tag{}
|> Tag.changeset(params)
|> Repo.insert()
end
end
| 21 | 71 | 0.656566 |
e85a326e28ea0338041be50c677a0582176a6a98 | 4,380 | exs | Elixir | test/ex_doc/formatter/epub/templates_test.exs | zoldar/ex_doc | f2ca5d1f4f85650b55460bc08e4d17a00f9f0d75 | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | test/ex_doc/formatter/epub/templates_test.exs | zoldar/ex_doc | f2ca5d1f4f85650b55460bc08e4d17a00f9f0d75 | [
"Apache-2.0",
"CC-BY-4.0"
] | 2 | 2020-12-04T22:03:31.000Z | 2022-03-02T10:00:23.000Z | test/ex_doc/formatter/epub/templates_test.exs | bonomali/ex_doc | d22cdad512fbfd8bfc44b417c680b1105c8aa19a | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | defmodule ExDoc.Formatter.EPUB.TemplatesTest do
use ExUnit.Case, async: true
alias ExDoc.Formatter.HTML
alias ExDoc.Formatter.EPUB.Templates
defp source_url do
"https://github.com/elixir-lang/elixir"
end
defp homepage_url do
"http://elixir-lang.org"
end
defp doc_config(config \\ []) do
default = %ExDoc.Config{
project: "Elixir",
version: "1.0.1",
source_root: File.cwd!(),
source_url_pattern: "#{source_url()}/blob/master/%{path}#L%{line}",
homepage_url: homepage_url(),
source_url: source_url(),
output: "test/tmp/epub_templates"
}
struct(default, config)
end
defp get_module_page(names, config \\ []) do
config = doc_config(config)
mods = ExDoc.Retriever.docs_from_modules(names, config)
mods = HTML.Autolink.all(mods, HTML.Autolink.compile(mods, ".xhtml", config))
Templates.module_page(config, hd(mods))
end
setup_all do
File.mkdir_p!("test/tmp/epub_templates/OEBPS")
File.cp_r!("formatters/epub", "test/tmp/epub_templates/OEBPS")
:ok
end
describe "module_page" do
test "generates only the module name when there's no more info" do
module_node = %ExDoc.ModuleNode{
module: XPTOModule,
doc: nil,
id: "XPTOModule",
title: "XPTOModule"
}
content = Templates.module_page(doc_config(), module_node)
assert content =~ ~r{<title>XPTOModule [^<]*</title>}
assert content =~ ~r{<h1 id="content">\s*XPTOModule\s*}
end
test "outputs the functions and docstrings" do
content = get_module_page([CompiledWithDocs])
assert content =~ ~r{<title>CompiledWithDocs [^<]*</title>}
assert content =~ ~r{<h1 id="content">\s*CompiledWithDocs\s*}
assert content =~
~r{moduledoc.*Example.*<span class="nc">CompiledWithDocs</span><span class="o">\.</span><span class="n">example</span>.*}ms
assert content =~ ~r{example/2.*Some example}ms
assert content =~ ~r{example_without_docs/0.*<section class="docstring">.*</section>}ms
assert content =~ ~r{example_1/0.*<span class="note">\(macro\)</span>}ms
assert content =~ ~s{<section class="detail" id="example_1/0">}
assert content =~ ~s{example(foo, bar \\\\ Baz)}
end
test "outputs function groups" do
content =
get_module_page([CompiledWithDocs],
groups_for_functions: [
"Example functions": &(&1[:purpose] == :example),
Legacy: &is_binary(&1[:deprecated])
]
)
assert content =~ ~r{id="example-functions".*href="#example-functions".*Example functions}ms
assert content =~ ~r{id="legacy".*href="#legacy".*Legacy}ms
assert content =~ ~r{id="example-functions".*id="example/2"}ms
refute content =~ ~r{id="legacy".*id="example/2"}ms
refute content =~ ~r{id="functions".*id="example/2"}ms
assert content =~ ~r{id="functions".*id="example_1/0"}ms
end
test "outputs summaries" do
content = get_module_page([CompiledWithDocs])
assert content =~ ~r{<div class="summary-signature">\s*<a href="#example_1/0">}
end
test "contains links to summary sections when those exist" do
content = get_module_page([CompiledWithDocs, CompiledWithDocs.Nested])
refute content =~ ~r{types_details}
end
## BEHAVIOURS
test "outputs behavior and callbacks" do
content = get_module_page([CustomBehaviourOne])
assert content =~
~r{<h1 id="content">\s*CustomBehaviourOne\s*<small>behaviour</small>\s*</h1>}m
assert content =~ ~r{Callbacks}
assert content =~ ~r{<section class="detail" id="c:hello/1">}
content = get_module_page([CustomBehaviourTwo])
assert content =~
~r{<h1 id="content">\s*CustomBehaviourTwo\s*<small>behaviour</small>\s*</h1>}m
assert content =~ ~r{Callbacks}
assert content =~ ~r{<section class="detail" id="c:bye/1">}
end
## PROTOCOLS
test "outputs the protocol type" do
content = get_module_page([CustomProtocol])
assert content =~ ~r{<h1 id="content">\s*CustomProtocol\s*<small>protocol</small>\s*}m
end
## TASKS
test "outputs the task type" do
content = get_module_page([Mix.Tasks.TaskWithDocs])
assert content =~ ~r{<h1 id="content">\s*mix task_with_docs\s*}m
end
end
end
| 32.205882 | 138 | 0.63653 |
e85a66334e9a8e19759df0badaf387c5a8bdc10d | 1,585 | ex | Elixir | web/web.ex | Croissong/nussnougat | dd669d8614029400a25cef67971ef8720f5580dd | [
"MIT"
] | null | null | null | web/web.ex | Croissong/nussnougat | dd669d8614029400a25cef67971ef8720f5580dd | [
"MIT"
] | null | null | null | web/web.ex | Croissong/nussnougat | dd669d8614029400a25cef67971ef8720f5580dd | [
"MIT"
] | null | null | null | defmodule Nussnougat.Web do
@moduledoc """
A module that keeps using definitions for controllers,
views and so on.
This can be used in your application as:
use Nussnougat.Web, :controller
use Nussnougat.Web, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below.
"""
def model do
quote do
use Ecto.Model
import Ecto.Changeset
import Ecto.Query, only: [from: 1, from: 2]
end
end
def controller do
quote do
use Phoenix.Controller
alias Nussnougat.Repo
import Ecto.Model
import Ecto.Query, only: [from: 1, from: 2]
import Nussnougat.Router.Helpers
end
end
def view do
quote do
use Phoenix.View, root: "web/templates"
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import Nussnougat.Router.Helpers
end
end
def router do
quote do
use Phoenix.Router
end
end
def channel do
quote do
use Phoenix.Channel
alias Nussnougat.Repo
import Ecto.Model
import Ecto.Query, only: [from: 1, from: 2]
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 20.584416 | 88 | 0.663722 |
e85a68fb2527b443f1dd9deeaadfca21e2fd4a9f | 362 | ex | Elixir | lib/parser/inform_response.ex | smiyabe/cwmp_ex | 9db322497aa3208b5985ccf496ada5286cde3925 | [
"Artistic-2.0"
] | 3 | 2017-11-29T05:07:35.000Z | 2019-12-18T17:16:41.000Z | lib/parser/inform_response.ex | smiyabe/cwmp_ex | 9db322497aa3208b5985ccf496ada5286cde3925 | [
"Artistic-2.0"
] | 1 | 2021-12-02T19:35:28.000Z | 2022-03-29T09:40:52.000Z | lib/parser/inform_response.ex | smiyabe/cwmp_ex | 9db322497aa3208b5985ccf496ada5286cde3925 | [
"Artistic-2.0"
] | 2 | 2017-11-29T05:07:30.000Z | 2020-11-10T07:10:42.000Z | defmodule CWMP.Protocol.Parser.Messages.InformResponse do
use CWMP.Protocol.ParserHelpers
alias CWMP.Protocol.Messages.InformResponse
def initial_acc do
%InformResponse{}
end
def end_element(state, ['MaxEnvelopes']) do
me=integerValue(state.last_text)
update_acc(state, fn acc -> %InformResponse{acc | max_envelopes: me} end)
end
end
| 22.625 | 77 | 0.754144 |
e85a8c749b6e73c48a18a219aafe108679c5d005 | 494 | exs | Elixir | config/test.exs | retgoat/band-indigo.com | b8a1cf58ec766858cc2319441fa8628ad763529a | [
"MIT"
] | null | null | null | config/test.exs | retgoat/band-indigo.com | b8a1cf58ec766858cc2319441fa8628ad763529a | [
"MIT"
] | null | null | null | config/test.exs | retgoat/band-indigo.com | b8a1cf58ec766858cc2319441fa8628ad763529a | [
"MIT"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :band_indigo, BandIndigo.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :band_indigo, BandIndigo.Repo,
adapter: Ecto.Adapters.MySQL,
username: "root",
password: "",
database: "band_indigo_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
| 24.7 | 56 | 0.734818 |
e85acdc55f3c3279fc7f99cb07256ae26e435d3b | 3,246 | exs | Elixir | mix.exs | mitjok/phoenix_live_view | b396bb83d9e02c18c58039bd86fdc4835bd8e0e9 | [
"MIT"
] | null | null | null | mix.exs | mitjok/phoenix_live_view | b396bb83d9e02c18c58039bd86fdc4835bd8e0e9 | [
"MIT"
] | null | null | null | mix.exs | mitjok/phoenix_live_view | b396bb83d9e02c18c58039bd86fdc4835bd8e0e9 | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveView.MixProject do
use Mix.Project
@version "0.15.3"
def project do
[
app: :phoenix_live_view,
version: @version,
elixir: "~> 1.7",
start_permanent: Mix.env() == :prod,
elixirc_paths: elixirc_paths(Mix.env()),
compilers: compilers(Mix.env()),
package: package(),
xref: [exclude: [Floki]],
deps: deps(),
docs: docs(),
name: "Phoenix LiveView",
homepage_url: "http://www.phoenixframework.org",
description: """
Rich, real-time user experiences with server-rendered HTML
"""
]
end
defp compilers(:test), do: [:phoenix] ++ Mix.compilers()
defp compilers(_), do: Mix.compilers()
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
def application do
[
extra_applications: [:logger],
mod: {Phoenix.LiveView.Application, []}
]
end
defp deps do
[
{:phoenix, "~> 1.6.0-dev", github: "phoenixframework/phoenix"},
{:phoenix_html, "~> 2.14"},
{:telemetry, "~> 0.4.2 or ~> 0.5"},
{:jason, "~> 1.0", optional: true},
{:ex_doc, "~> 0.22", only: :docs},
{:floki, "~> 0.27.0", only: :test},
{:html_entities, ">= 0.0.0", only: :test}
]
end
defp docs do
[
main: "Phoenix.LiveView",
source_ref: "v#{@version}",
source_url: "https://github.com/phoenixframework/phoenix_live_view",
extra_section: "GUIDES",
extras: extras(),
groups_for_extras: groups_for_extras(),
groups_for_modules: groups_for_modules()
]
end
defp extras do
[
"guides/introduction/installation.md",
"guides/client/bindings.md",
"guides/client/form-bindings.md",
"guides/client/dom-patching.md",
"guides/client/js-interop.md",
"guides/client/uploads-external.md",
"guides/server/assigns-eex.md",
"guides/server/error-handling.md",
"guides/server/live-layouts.md",
"guides/server/live-navigation.md",
"guides/server/security-model.md",
"guides/server/telemetry.md",
"guides/server/uploads.md",
"guides/server/using-gettext.md"
]
end
defp groups_for_extras do
[
Introduction: ~r/guides\/introduction\/.?/,
"Server-side features": ~r/guides\/server\/.?/,
"Client-side integration": ~r/guides\/client\/.?/
]
end
defp groups_for_modules do
[
"Upload structures": [
Phoenix.LiveView.UploadConfig,
Phoenix.LiveView.UploadEntry
],
"Testing structures": [
Phoenix.LiveViewTest.Element,
Phoenix.LiveViewTest.Upload,
Phoenix.LiveViewTest.View
],
"Live EEx Engine": [
Phoenix.LiveView.Engine,
Phoenix.LiveView.Component,
Phoenix.LiveView.Rendered,
Phoenix.LiveView.Comprehension
]
]
end
defp package do
[
maintainers: ["Chris McCord", "José Valim", "Gary Rennie", "Alex Garibay", "Scott Newcomer"],
licenses: ["MIT"],
links: %{github: "https://github.com/phoenixframework/phoenix_live_view"},
files:
~w(assets/js lib priv) ++
~w(CHANGELOG.md LICENSE.md mix.exs package.json README.md)
]
end
end
| 26.826446 | 99 | 0.597967 |
e85acf44201a79a46921f91ed54f94b507f9c8b2 | 924 | ex | Elixir | test/fixtures/my_handler.ex | NFIBrokerage/beeline_honeycomb | f64b8715289021ae0d5709a2b05a8d2d136c6481 | [
"Apache-2.0"
] | null | null | null | test/fixtures/my_handler.ex | NFIBrokerage/beeline_honeycomb | f64b8715289021ae0d5709a2b05a8d2d136c6481 | [
"Apache-2.0"
] | null | null | null | test/fixtures/my_handler.ex | NFIBrokerage/beeline_honeycomb | f64b8715289021ae0d5709a2b05a8d2d136c6481 | [
"Apache-2.0"
] | null | null | null | defmodule BeelineHoneycomb.MyHandler do
@moduledoc """
An example beeline fixture for the sake of testing which relies on the dummy
producer
"""
use Beeline
def start_link(opts) do
Beeline.start_link(__MODULE__,
name: __MODULE__,
producers: [
default: [
adapter: :dummy,
connection: nil,
stream_name: "Beeline.Honeycomb.Test"
]
],
auto_subscribe?: fn _producer -> true end,
get_stream_position: fn _producer -> -1 end,
spawn_health_checkers?: true,
health_check_interval: 100,
health_check_drift: 0,
subscribe_after: 0,
context: %{test_proc: opts[:test_proc]}
)
end
@impl GenStage
def handle_events(subscription_events, _from, state) do
events = Enum.map(subscription_events, &Beeline.decode_event/1)
send(state.test_proc, {:handled, events})
{:noreply, [], state}
end
end
| 24.315789 | 78 | 0.649351 |
e85adfb9142bead4d1413c138e2868fa45c479c7 | 3,640 | ex | Elixir | clients/vm_migration/lib/google_api/vm_migration/v1/model/vmware_vm_details.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/vm_migration/lib/google_api/vm_migration/v1/model/vmware_vm_details.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/vm_migration/lib/google_api/vm_migration/v1/model/vmware_vm_details.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.VMMigration.V1.Model.VmwareVmDetails do
@moduledoc """
VmwareVmDetails describes a VM in vCenter.
## Attributes
* `bootOption` (*type:* `String.t`, *default:* `nil`) - Output only. The VM Boot Option.
* `committedStorageMb` (*type:* `String.t`, *default:* `nil`) - The total size of the storage allocated to the VM in MB.
* `cpuCount` (*type:* `integer()`, *default:* `nil`) - The number of cpus in the VM.
* `datacenterDescription` (*type:* `String.t`, *default:* `nil`) - The descriptive name of the vCenter's datacenter this VM is contained in.
* `datacenterId` (*type:* `String.t`, *default:* `nil`) - The id of the vCenter's datacenter this VM is contained in.
* `diskCount` (*type:* `integer()`, *default:* `nil`) - The number of disks the VM has.
* `displayName` (*type:* `String.t`, *default:* `nil`) - The display name of the VM. Note that this is not necessarily unique.
* `guestDescription` (*type:* `String.t`, *default:* `nil`) - The VM's OS. See for example https://vdc-repo.vmware.com/vmwb-repository/dcr-public/da47f910-60ac-438b-8b9b-6122f4d14524/16b7274a-bf8b-4b4c-a05e-746f2aa93c8c/doc/vim.vm.GuestOsDescriptor.GuestOsIdentifier.html for types of strings this might hold.
* `memoryMb` (*type:* `integer()`, *default:* `nil`) - The size of the memory of the VM in MB.
* `powerState` (*type:* `String.t`, *default:* `nil`) - The power state of the VM at the moment list was taken.
* `uuid` (*type:* `String.t`, *default:* `nil`) - The unique identifier of the VM in vCenter.
* `vmId` (*type:* `String.t`, *default:* `nil`) - The VM's id in the source (note that this is not the MigratingVm's id). This is the moref id of the VM.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:bootOption => String.t() | nil,
:committedStorageMb => String.t() | nil,
:cpuCount => integer() | nil,
:datacenterDescription => String.t() | nil,
:datacenterId => String.t() | nil,
:diskCount => integer() | nil,
:displayName => String.t() | nil,
:guestDescription => String.t() | nil,
:memoryMb => integer() | nil,
:powerState => String.t() | nil,
:uuid => String.t() | nil,
:vmId => String.t() | nil
}
field(:bootOption)
field(:committedStorageMb)
field(:cpuCount)
field(:datacenterDescription)
field(:datacenterId)
field(:diskCount)
field(:displayName)
field(:guestDescription)
field(:memoryMb)
field(:powerState)
field(:uuid)
field(:vmId)
end
defimpl Poison.Decoder, for: GoogleApi.VMMigration.V1.Model.VmwareVmDetails do
def decode(value, options) do
GoogleApi.VMMigration.V1.Model.VmwareVmDetails.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.VMMigration.V1.Model.VmwareVmDetails do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 45.5 | 313 | 0.673626 |
e85b385b74bb9f9a4ac5a74446c1ce255e8eea93 | 2,393 | ex | Elixir | clients/cloud_private_catalog_producer/lib/google_api/cloud_private_catalog_producer/v1beta1/model/google_cloud_privatecatalogproducer_v1beta1_list_associations_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/cloud_private_catalog_producer/lib/google_api/cloud_private_catalog_producer/v1beta1/model/google_cloud_privatecatalogproducer_v1beta1_list_associations_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/cloud_private_catalog_producer/lib/google_api/cloud_private_catalog_producer/v1beta1/model/google_cloud_privatecatalogproducer_v1beta1_list_associations_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudPrivateCatalogProducer.V1beta1.Model.GoogleCloudPrivatecatalogproducerV1beta1ListAssociationsResponse do
@moduledoc """
## Attributes
* `associations` (*type:* `list(GoogleApi.CloudPrivateCatalogProducer.V1beta1.Model.GoogleCloudPrivatecatalogproducerV1beta1Association.t)`, *default:* `nil`) - The returned `Association` resources from the list call.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - A pagination token returned from a previous call to
`ListAssociations` that indicates where the listing should continue from.
This field is optional.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:associations =>
list(
GoogleApi.CloudPrivateCatalogProducer.V1beta1.Model.GoogleCloudPrivatecatalogproducerV1beta1Association.t()
),
:nextPageToken => String.t()
}
field(:associations,
as:
GoogleApi.CloudPrivateCatalogProducer.V1beta1.Model.GoogleCloudPrivatecatalogproducerV1beta1Association,
type: :list
)
field(:nextPageToken)
end
defimpl Poison.Decoder,
for:
GoogleApi.CloudPrivateCatalogProducer.V1beta1.Model.GoogleCloudPrivatecatalogproducerV1beta1ListAssociationsResponse do
def decode(value, options) do
GoogleApi.CloudPrivateCatalogProducer.V1beta1.Model.GoogleCloudPrivatecatalogproducerV1beta1ListAssociationsResponse.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for:
GoogleApi.CloudPrivateCatalogProducer.V1beta1.Model.GoogleCloudPrivatecatalogproducerV1beta1ListAssociationsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.716418 | 221 | 0.760552 |
e85b9cecec6cbe28ae1d1c9419cf3f940e621b30 | 280 | ex | Elixir | lib/oli/repo/sorting.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 45 | 2020-04-17T15:40:27.000Z | 2022-03-25T00:13:30.000Z | lib/oli/repo/sorting.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 944 | 2020-02-13T02:37:01.000Z | 2022-03-31T17:50:07.000Z | lib/oli/repo/sorting.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 23 | 2020-07-28T03:36:13.000Z | 2022-03-17T14:29:02.000Z | defmodule Oli.Repo.Sorting do
@moduledoc """
Params for sorting queries.
"""
@enforce_keys [
:field,
:direction
]
defstruct [
:field,
:direction
]
@type t() :: %__MODULE__{
field: any(),
direction: :asc | :desc
}
end
| 13.333333 | 33 | 0.528571 |
e85bc7d5e3cd23446341615775b5f4816d46b18f | 7,978 | ex | Elixir | lib/cobol_to_elixir/elixirizer.ex | TheFirstAvenger/cobol_to_elixir | 549528aed3dd1de8a14bb3a763539b5cbf714811 | [
"MIT"
] | 65 | 2021-08-10T19:16:25.000Z | 2022-02-23T01:38:13.000Z | lib/cobol_to_elixir/elixirizer.ex | TheFirstAvenger/cobol_to_elixir | 549528aed3dd1de8a14bb3a763539b5cbf714811 | [
"MIT"
] | 1 | 2021-08-15T21:59:54.000Z | 2021-08-20T20:30:06.000Z | lib/cobol_to_elixir/elixirizer.ex | TheFirstAvenger/cobol_to_elixir | 549528aed3dd1de8a14bb3a763539b5cbf714811 | [
"MIT"
] | 1 | 2021-08-21T19:53:13.000Z | 2021-08-21T19:53:13.000Z | defmodule CobolToElixir.Elixirizer do
alias CobolToElixir.Parsed
alias CobolToElixir.Parsed.Variable
require Logger
def elixirize(%Parsed{program_id: program_id} = parsed, opts \\ []) do
namespace = Keyword.get(opts, :namespace, "ElixirFromCobol")
io_dir = Keyword.get(opts, :io_dir, "")
accept_via_message = Keyword.get(opts, :accept_via_message, false)
module_name = program_id_to_module_name(program_id)
lines =
[
~s|defmodule #{namespace}.#{module_name} do|,
~s| @moduledoc """|,
~s| author: #{parsed.author || "n/a"}|,
~s| date written: #{parsed.date_written || "n/a"}|,
~s| """|,
~s||,
~s| @io_dir #{inspect(io_dir)}|,
~s||,
~s| def main do|,
~s| try do|,
~s| do_main()|,
~s| catch|,
~s| :stop_run -> :stop_run|,
~s| end|,
~s| end|,
~s||
] ++
do_main_function(parsed) ++
do_paragraphs(parsed) ++
[
~s||,
~s| defp do_accept() do|
] ++
accept_lines(accept_via_message) ++
[
~s| end|,
~s||,
~s/ def accept({:str, _, _} = pic), do: do_accept() |> format(pic)/,
~s||,
~s| def accept(_), do: do_accept()|,
~s||,
~s/ def format(str, {:str, _, length}), do: str |> String.slice(0..(length - 1)) |> String.pad_trailing(length)/,
~s/ def format(int, {:int, _, length}), do: "\#{int}" |> String.slice(0..(length - 1)) |> String.pad_leading(length, "0")/,
~s||,
~s| def display_group_item(group_item = %{}, children_paths, pics) do|,
~s| children_paths|,
~s/ |> Enum.reduce([], fn path, values ->/,
~s| name = List.last(path)|,
~s| pic = pics[name]|,
~s| val = get_in(group_item, tl(path))|,
~s/ [format(val, pic) | values]/,
~s| end)|,
~s/ |> Enum.reverse()/,
~s/ |> Enum.join("")/,
~s| end|,
~s||,
~s|end|
]
{:ok, Enum.join(lines, "\n")}
end
defp accept_lines(false), do: [~s/ "" |> IO.gets() |> String.trim_trailing("\\n")/]
defp accept_lines(true) do
[
~s/ receive do/,
~s/ {:input, input} -> input/,
~s/ end/
]
end
defp program_id_to_module_name(program_id) do
String.capitalize(program_id)
end
def do_main_function(%Parsed{} = parsed) do
[
~s| def do_main do|
] ++
variables(parsed) ++
pics(parsed) ++
procedure(parsed) ++
[
~s| end|
]
end
def do_paragraphs(%Parsed{paragraphs: paragraphs} = parsed) do
Enum.flat_map(paragraphs, &do_paragraph(&1, parsed))
end
defp do_paragraph({name, lines}, parsed) do
[~s||, ~s| def paragraph_#{name} do|] ++ procedure(%Parsed{parsed | procedure: lines}) ++ [~s| end|]
end
def variables(%Parsed{variables: variables}) do
Enum.flat_map(variables, &variable_to_line/1)
end
def pics(%Parsed{variables: []}), do: []
def pics(%Parsed{variable_map: variable_map}) do
pics =
variable_map
|> Enum.map(fn {name, %Variable{pic: pic}} -> {name, pic} end)
|> Enum.reject(&is_nil(elem(&1, 1)))
|> Enum.into(%{})
[~s| pics = #{inspect(pics)}|]
end
def procedure(%Parsed{procedure: procedure} = parsed) do
Enum.flat_map(procedure, &procedure_to_line(&1, parsed))
end
def variable_to_line(%Variable{type: :map, value: value, children: children} = variable) do
[
~s| #{variable_name(variable)} = #{inspect(value)}|,
~s| #{group_child_paths_name(variable)} = #{inspect(children)}|
]
end
def variable_to_line(%Variable{type: :single, pic: pic, value: value} = variable) do
[
~s| # pic: #{pic_str(pic)}|,
~s| #{variable_name(variable)} = #{maybe_parens(value, pic)}|
]
end
defp maybe_parens(val, {:str, _, _}), do: ~s|"#{val}"|
defp maybe_parens(val, _), do: val
defp pic_str(nil), do: "none"
defp pic_str(pic), do: elem(pic, 1)
defp variable_name(%Variable{name: name}), do: variable_name(name)
defp variable_name(name), do: "var_#{name}"
defp group_child_paths_name(%Variable{type: :map, name: name}), do: group_child_paths_name(name)
defp group_child_paths_name(name) when is_binary(name), do: "child_paths_of_#{name}"
defp procedure_to_line({display_type, display}, %Parsed{} = parsed)
when display_type in [:display_no_advancing, :display] do
io_type =
case display_type do
:display -> "puts"
:display_no_advancing -> "write"
end
to_display =
display
|> display_vars_and_strings(parsed)
|> Enum.join(" <> ")
[~s| IO.#{io_type} #{to_display}|]
end
defp procedure_to_line({:accept, var}, %Parsed{variable_map: m}),
do: [~s| #{variable_name(m[var])} = accept(#{inspect(m[var].pic)})|]
defp procedure_to_line({:stop, :run}, _), do: [~s| throw :stop_run|]
defp procedure_to_line({:move_line, commands}, %Parsed{variable_map: m}) do
{var, from} =
commands
|> Enum.reverse()
|> then(fn [var, "TO" | rest] ->
{var, Enum.reverse(rest)}
end)
val =
case from do
[v] -> v
_ -> raise "Move from something other than a single value not currently supported"
end
case Map.fetch!(m, var) do
%Variable{type: :single} ->
[~s| #{variable_name(var)} = format(#{val}, #{inspect(m[var].pic)})|]
%Variable{type: :map_child, value: child_path} ->
path = tl(child_path) ++ [var]
[
~s| #{variable_name(hd(child_path))} = put_in(#{variable_name(hd(child_path))}, #{inspect(path)}, format(#{val}, #{inspect(m[var].pic)}))|
]
end
end
defp procedure_to_line({:perform_paragraph, paragraph_name}, _),
do: [~s| paragraph_#{paragraph_name}()|]
defp procedure_to_line({:perform, {:repeat, x, paragraph_name}}, _),
do: List.duplicate(~s| paragraph_#{paragraph_name}()|, x)
defp procedure_to_line({:open, :output, file_var_name}, %Parsed{} = parsed) do
%{file_name: file_name} = Enum.find(parsed.file_control, &(&1.var_name == file_var_name))
{file_variables, _file_variable_map} = parsed.file_variables[file_var_name]
[~s| current_file = #{file_name}| | Enum.flat_map(file_variables, &variable_to_line/1)]
end
defp procedure_to_line({:write, variable}, _) do
[
~s||,
~s| str_to_write = #{variable_name(variable)}|,
~s/ |> display_group_item(#{group_child_paths_name(variable)}, pics)/,
~s/ |> String.trim_trailing()/,
~s/ |> Kernel.<>("\\n")/,
~s||,
~s| File.write!(Path.join(@io_dir, current_file), str_to_write)|,
~s||
]
end
defp procedure_to_line(:end_write, _), do: []
defp procedure_to_line({:close, _}, _), do: [~s| current_file = nil|]
defp procedure_to_line(other, _) do
# coveralls-ignore-start
Logger.warn("No procedure_to_line for #{inspect(other)}")
[]
# coveralls-ignore-end
end
defp display_vars_and_strings([{:variable, var} | rest], %Parsed{variable_map: variable_map} = parsed) do
vars =
case Map.get(variable_map, var) do
%{type: :map} = variable ->
~s|display_group_item(#{variable_name(variable)}, #{group_child_paths_name(variable)}, pics)|
%{type: :single} ->
variable_name(var)
end
[vars | display_vars_and_strings(rest, parsed)]
end
# THIS HAS TO HANDLE A MAP
defp display_vars_and_strings([{:variable, var} | rest], parsed) do
[variable_name(var) | display_vars_and_strings(rest, parsed)]
end
defp display_vars_and_strings([{:string, str} | rest], parsed),
do: [~s|"#{str}"| | display_vars_and_strings(rest, parsed)]
defp display_vars_and_strings([], _parsed), do: []
end
| 31.533597 | 151 | 0.573076 |
e85bcc3512d97c8660e68de24b3d7ce24672b8c9 | 770 | ex | Elixir | lib/stylexer/tokenizer.ex | alganet/stylistex | b1ceb3b6456f841a03153d58153fc6ed1427a923 | [
"MIT"
] | null | null | null | lib/stylexer/tokenizer.ex | alganet/stylistex | b1ceb3b6456f841a03153d58153fc6ed1427a923 | [
"MIT"
] | null | null | null | lib/stylexer/tokenizer.ex | alganet/stylistex | b1ceb3b6456f841a03153d58153fc6ed1427a923 | [
"MIT"
] | null | null | null | defmodule Stylexer.Tokenizer do
@moduledoc """
Tokenize the CSS definition
"""
@typedoc """
Token definition when consuming CSS with Lexer
"""
@type token :: list({atom(), pos_integer(), char()})
@doc """
Consume the stream with UTF-8 encoding
"""
@spec consume(expression :: binary() | charlist()) :: {:ok, token()} | {:error, term()}
def consume(expression) do
expression =
expression
|> get_expr()
case :lexer.string(expression) do
{:ok, response, _} ->
{:ok, response}
{:error, reason} ->
{:error, reason}
end
end
defp get_expr(expression) when is_list(expression),
do: expression
defp get_expr(expression) when is_binary(expression),
do: expression |> to_charlist()
end
| 22.647059 | 89 | 0.620779 |
e85bcef87825bbd5e10ccbac09eeab991c667f2f | 19,373 | ex | Elixir | lib/elixir/lib/module/types/pattern.ex | hassan/elixir | d002f2fd1093f37b422e424014f1cb0a7e675e79 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/module/types/pattern.ex | hassan/elixir | d002f2fd1093f37b422e424014f1cb0a7e675e79 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/module/types/pattern.ex | hassan/elixir | d002f2fd1093f37b422e424014f1cb0a7e675e79 | [
"Apache-2.0"
] | null | null | null | defmodule Module.Types.Pattern do
@moduledoc false
import Module.Types.{Helpers, Infer}
@doc """
Return the type and typing context of a pattern expression or an error
in case of a typing conflict.
"""
# :atom
def of_pattern(atom, _stack, context) when is_atom(atom) do
{:ok, {:atom, atom}, context}
end
# 12
def of_pattern(literal, _stack, context) when is_integer(literal) do
{:ok, :integer, context}
end
# 1.2
def of_pattern(literal, _stack, context) when is_float(literal) do
{:ok, :float, context}
end
# "..."
def of_pattern(literal, _stack, context) when is_binary(literal) do
{:ok, :binary, context}
end
# <<...>>>
def of_pattern({:<<>>, _meta, args} = expr, stack, context) do
stack = push_expr_stack(expr, stack)
case reduce_ok(args, context, &of_binary(&1, stack, &2)) do
{:ok, context} -> {:ok, :binary, context}
{:error, reason} -> {:error, reason}
end
end
# left | []
def of_pattern({:|, _meta, [left_expr, []]} = expr, stack, context) do
stack = push_expr_stack(expr, stack)
of_pattern(left_expr, stack, context)
end
# left | right
def of_pattern({:|, _meta, [left_expr, right_expr]} = expr, stack, context) do
stack = push_expr_stack(expr, stack)
case of_pattern(left_expr, stack, context) do
{:ok, left, context} ->
case of_pattern(right_expr, stack, context) do
{:ok, {:list, right}, context} ->
{:ok, to_union([left, right], context), context}
{:ok, right, context} ->
{:ok, to_union([left, right], context), context}
{:error, reason} ->
{:error, reason}
end
{:error, reason} ->
{:error, reason}
end
end
# []
def of_pattern([], _stack, context) do
{:ok, {:list, :dynamic}, context}
end
# [expr, ...]
def of_pattern(exprs, stack, context) when is_list(exprs) do
stack = push_expr_stack(exprs, stack)
case map_reduce_ok(exprs, context, &of_pattern(&1, stack, &2)) do
{:ok, types, context} -> {:ok, {:list, to_union(types, context)}, context}
{:error, reason} -> {:error, reason}
end
end
# left ++ right
def of_pattern(
{{:., _meta1, [:erlang, :++]}, _meta2, [left_expr, right_expr]} = expr,
stack,
context
) do
stack = push_expr_stack(expr, stack)
case of_pattern(left_expr, stack, context) do
{:ok, {:list, left}, context} ->
case of_pattern(right_expr, stack, context) do
{:ok, {:list, right}, context} ->
{:ok, {:list, to_union([left, right], context)}, context}
{:ok, right, context} ->
{:ok, {:list, to_union([left, right], context)}, context}
{:error, reason} ->
{:error, reason}
end
{:error, reason} ->
{:error, reason}
end
end
# _
def of_pattern({:_, _meta, atom}, _stack, context) when is_atom(atom) do
{:ok, :dynamic, context}
end
# var
def of_pattern(var, _stack, context) when is_var(var) do
{type, context} = new_var(var, context)
{:ok, type, context}
end
# {left, right}
def of_pattern({left, right}, stack, context) do
of_pattern({:{}, [], [left, right]}, stack, context)
end
# {...}
def of_pattern({:{}, _meta, exprs} = expr, stack, context) do
stack = push_expr_stack(expr, stack)
case map_reduce_ok(exprs, context, &of_pattern(&1, stack, &2)) do
{:ok, types, context} -> {:ok, {:tuple, types}, context}
{:error, reason} -> {:error, reason}
end
end
# left = right
def of_pattern({:=, _meta, [left_expr, right_expr]} = expr, stack, context) do
stack = push_expr_stack(expr, stack)
with {:ok, left_type, context} <- of_pattern(left_expr, stack, context),
{:ok, right_type, context} <- of_pattern(right_expr, stack, context),
do: unify(left_type, right_type, stack, context)
end
# %{...}
def of_pattern({:%{}, _meta, args} = expr, stack, context) do
stack = push_expr_stack(expr, stack)
case of_pairs(args, stack, context) do
{:ok, pairs, context} -> {:ok, {:map, pairs_to_unions(pairs, context)}, context}
{:error, reason} -> {:error, reason}
end
end
# %_{...}
def of_pattern(
{:%, _meta1, [{:_, _meta2, var_context}, {:%{}, _meta3, args}]} = expr,
stack,
context
)
when is_atom(var_context) do
stack = push_expr_stack(expr, stack)
case of_pairs(args, stack, context) do
{:ok, pairs, context} ->
pairs = [{{:atom, :__struct__}, :atom} | pairs]
{:ok, {:map, pairs}, context}
{:error, reason} ->
{:error, reason}
end
end
# %var{...}
def of_pattern({:%, _meta1, [var, {:%{}, _meta2, args}]} = expr, stack, context)
when is_var(var) do
stack = push_expr_stack(expr, stack)
with {:ok, pairs, context} <- of_pairs(args, stack, context),
{var_type, context} = new_var(var, context),
{:ok, _, context} <- unify(var_type, :atom, stack, context) do
pairs = [{{:atom, :__struct__}, var_type} | pairs]
{:ok, {:map, pairs}, context}
end
end
# %Struct{...}
def of_pattern({:%, _meta1, [module, {:%{}, _meta2, args}]} = expr, stack, context)
when is_atom(module) do
stack = push_expr_stack(expr, stack)
case of_pairs(args, stack, context) do
{:ok, pairs, context} ->
pairs = [{{:atom, :__struct__}, {:atom, module}} | pairs]
{:ok, {:map, pairs}, context}
{:error, reason} ->
{:error, reason}
end
end
defp of_pairs(pairs, stack, context) do
map_reduce_ok(pairs, context, fn {key, value}, context ->
with {:ok, key_type, context} <- of_pattern(key, stack, context),
{:ok, value_type, context} <- of_pattern(value, stack, context),
do: {:ok, {key_type, value_type}, context}
end)
end
defp pairs_to_unions(pairs, context) do
# Maps only allow simple literal keys in patterns so
# we do not have to do subtype checking
Enum.reduce(pairs, [], fn {key, value}, pairs ->
case :lists.keyfind(key, 1, pairs) do
{^key, {:union, union}} ->
:lists.keystore(key, 1, pairs, {key, to_union([value | union], context)})
{^key, original_value} ->
:lists.keystore(key, 1, pairs, {key, to_union([value, original_value], context)})
false ->
[{key, value} | pairs]
end
end)
end
## GUARDS
# TODO: Some guards can be changed to intersection types or higher order types
@guard_functions %{
{:is_atom, 1} => {[:atom], :boolean},
{:is_binary, 1} => {[:binary], :boolean},
{:is_bitstring, 1} => {[:binary], :boolean},
{:is_boolean, 1} => {[:boolean], :boolean},
{:is_float, 1} => {[:float], :boolean},
{:is_function, 1} => {[:fun], :boolean},
{:is_function, 2} => {[:fun, :integer], :boolean},
{:is_integer, 1} => {[:integer], :boolean},
{:is_list, 1} => {[{:list, :dynamic}], :boolean},
{:is_map, 1} => {[{:map, []}], :boolean},
{:is_map_key, 2} => {[:dynamic, {:map, []}], :dynamic},
{:is_number, 1} => {[:number], :boolean},
{:is_pid, 1} => {[:pid], :boolean},
{:is_port, 1} => {[:port], :boolean},
{:is_reference, 1} => {[:reference], :boolean},
{:is_tuple, 1} => {[:tuple], :boolean},
{:<, 2} => {[:dynamic, :dynamic], :boolean},
{:"=<", 2} => {[:dynamic, :dynamic], :boolean},
{:>, 2} => {[:dynamic, :dynamic], :boolean},
{:>=, 2} => {[:dynamic, :dynamic], :boolean},
{:"/=", 2} => {[:dynamic, :dynamic], :boolean},
{:"=/=", 2} => {[:dynamic, :dynamic], :boolean},
{:==, 2} => {[:dynamic, :dynamic], :boolean},
{:"=:=", 2} => {[:dynamic, :dynamic], :boolean},
{:*, 2} => {[:number, :number], :number},
{:+, 1} => {[:number], :number},
{:+, 2} => {[:number, :number], :number},
{:-, 1} => {[:number], :number},
{:-, 2} => {[:number, :number], :number},
{:/, 2} => {[:number, :number], :number},
{:abs, 1} => {[:number], :number},
{:ceil, 1} => {[:number], :integer},
{:floor, 1} => {[:number], :integer},
{:round, 1} => {[:number], :integer},
{:trunc, 1} => {[:number], :integer},
{:element, 2} => {[:integer, :tuple], :dynamic},
{:hd, 1} => {[{:list, :dynamic}], :dynamic},
{:length, 1} => {[{:list, :dynamic}], :integer},
{:map_get, 2} => {[:dynamic, {:map, []}], :dynamic},
{:map_size, 1} => {[{:map, []}], :integer},
{:tl, 1} => {[{:list, :dynamic}], :dynamic},
{:tuple_size, 1} => {[:tuple], :integer},
{:node, 1} => {[{:union, [:pid, :reference, :port]}], :atom},
{:binary_part, 3} => {[:binary, :integer, :integer], :binary},
{:bit_size, 1} => {[:binary], :integer},
{:byte_size, 1} => {[:binary], :integer},
{:div, 2} => {[:integer, :integer], :integer},
{:rem, 2} => {[:integer, :integer], :integer},
{:node, 0} => {[], :atom},
{:self, 0} => {[], :pid},
{:bnot, 1} => {[:integer], :integer},
{:band, 2} => {[:integer, :integer], :integer},
{:bor, 2} => {[:integer, :integer], :integer},
{:bxor, 2} => {[:integer, :integer], :integer},
{:bsl, 2} => {[:integer, :integer], :integer},
{:bsr, 2} => {[:integer, :integer], :integer},
{:xor, 2} => {[:boolean, :boolean], :boolean},
{:not, 1} => {[:boolean], :boolean}
# Following guards are matched explicitly to handle
# type guard functions such as is_atom/1
# {:andalso, 2} => {[:boolean, :boolean], :boolean}
# {:orelse, 2} => {[:boolean, :boolean], :boolean}
}
@type_guards [
:is_atom,
:is_binary,
:is_bitstring,
:is_boolean,
:is_float,
:is_function,
:is_function,
:is_integer,
:is_list,
:is_map,
:is_number,
:is_pid,
:is_port,
:is_reference,
:is_tuple
]
@doc """
Refines the type variables in the typing context using type check guards
such as `is_integer/1`.
"""
def of_guard({{:., _, [:erlang, :andalso]}, _, [left, right]} = expr, stack, context) do
stack = push_expr_stack(expr, stack)
fresh_context = fresh_context(context)
with {:ok, left_type, left_context} <- of_guard(left, stack, fresh_context),
{:ok, right_type, right_context} <- of_guard(right, stack, fresh_context),
{:ok, context} <- merge_context_and(context, stack, left_context, right_context),
{:ok, _, context} <- unify(left_type, :boolean, stack, context),
{:ok, _, context} <- unify(right_type, :boolean, stack, context),
do: {:ok, :boolean, context}
end
def of_guard({{:., _, [:erlang, :orelse]}, _, [left, right]} = expr, stack, context) do
stack = push_expr_stack(expr, stack)
fresh_context = fresh_context(context)
with {:ok, left_type, left_context} <- of_guard(left, stack, fresh_context),
{:ok, right_type, right_context} <- of_guard(right, stack, fresh_context),
{:ok, context} <- merge_context_or(context, stack, left_context, right_context),
{:ok, _, context} <- unify(left_type, :boolean, stack, context),
{:ok, _, context} <- unify(right_type, :boolean, stack, context),
do: {:ok, :boolean, context}
end
def of_guard({{:., _, [:erlang, guard]}, _, args} = expr, stack, context) do
stack = push_expr_stack(expr, stack)
{param_types, return_type} = guard_signature(guard, length(args))
type_guard? = type_guard?(guard)
# Only check type guards in the context of and/or/not,
# a type guard in the context of is_tuple(x) > :foo
# should not affect the inference of x
if not type_guard? or stack.type_guards_enabled? do
arg_stack = %{stack | type_guards_enabled?: type_guard?}
with {:ok, arg_types, context} <-
map_reduce_ok(args, context, &of_guard(&1, arg_stack, &2)),
{:ok, context} <- unify_call(param_types, arg_types, stack, context) do
{arg_types, guard_sources} =
case arg_types do
[{:var, index} | rest_arg_types] when type_guard? ->
guard_sources =
Map.update(context.guard_sources, index, [:guarded], &[:guarded | &1])
{rest_arg_types, guard_sources}
_ ->
{arg_types, context.guard_sources}
end
guard_sources =
Enum.reduce(arg_types, guard_sources, fn
{:var, index}, guard_sources ->
Map.update(guard_sources, index, [:fail], &[:fail | &1])
_, guard_sources ->
guard_sources
end)
{:ok, return_type, %{context | guard_sources: guard_sources}}
end
else
{:ok, return_type, context}
end
end
def of_guard(var, _stack, context) when is_var(var) do
type = Map.fetch!(context.vars, var_name(var))
{:ok, type, context}
end
def of_guard(expr, stack, context) do
# Fall back to of_pattern/3 for literals
of_pattern(expr, stack, context)
end
defp fresh_context(context) do
types = Map.new(context.types, fn {var, _} -> {var, :unbound} end)
traces = Map.new(context.traces, fn {var, _} -> {var, []} end)
%{context | types: types, traces: traces}
end
defp unify_call(params, args, stack, context) do
reduce_ok(Enum.zip(params, args), context, fn {param, arg}, context ->
case unify(param, arg, stack, context) do
{:ok, _, context} -> {:ok, context}
{:error, reason} -> {:error, reason}
end
end)
end
defp merge_context_and(context, stack, left, right) do
with {:ok, context} <- unify_new_types(context, stack, left),
{:ok, context} <- unify_new_types(context, stack, right) do
guard_sources = and_guard_sources(left.guard_sources, right.guard_sources)
guard_sources = merge_guard_sources([context.guard_sources, guard_sources])
{:ok, %{context | guard_sources: guard_sources}}
end
end
defp unify_new_types(context, stack, new_context) do
context = merge_traces(context, new_context)
reduce_ok(Map.to_list(new_context.types), context, fn
{_index, :unbound}, context ->
{:ok, context}
{index, new_type}, context ->
case unify({:var, index}, new_type, %{stack | trace: false}, context) do
{:ok, _, context} ->
{:ok, context}
{:error, reason} ->
{:error, reason}
end
end)
end
defp merge_guard_sources(sources) do
Enum.reduce(sources, fn left, right ->
Map.merge(left, right, fn _index, left, right -> join_guard_source(left, right) end)
end)
end
defp join_guard_source(left, right) do
sources = left ++ right
cond do
:fail in sources -> [:fail]
:guarded_fail in sources -> [:guarded_fail]
:guarded in sources -> [:guarded]
true -> []
end
end
defp and_guard_sources(left, right) do
Map.merge(left, right, fn _index, left, right ->
# When the failing guard function wont fail due to type check function before it,
# for example: is_list(x) and length(x)
if :guarded in left and :fail in right do
[:guarded_fail]
else
join_guard_source(left, right)
end
end)
end
defp merge_traces(context, new_context) do
traces =
:maps.fold(
fn index, new_traces, traces ->
:maps.update_with(index, &(new_traces ++ &1), new_traces, traces)
end,
context.traces,
new_context.traces
)
%{context | traces: traces}
end
defp merge_context_or(context, stack, left, right) do
context =
case {Map.to_list(left.types), Map.to_list(right.types)} do
{[{index, :unbound}], [{index, type}]} ->
refine_var(index, type, stack, context)
{[{index, type}], [{index, :unbound}]} ->
refine_var(index, type, stack, context)
{[{index, left_type}], [{index, right_type}]} ->
# Only include right side if left side is from type guard such as is_list(x),
# do not refine in case of length(x)
left_guard_sources = Map.get(left.guard_sources, index, [])
if :fail in left_guard_sources do
guard_sources = Map.put(context.guard_sources, index, [:fail])
context = %{context | guard_sources: guard_sources}
refine_var(index, left_type, stack, context)
else
guard_sources =
merge_guard_sources([
context.guard_sources,
left.guard_sources,
right.guard_sources
])
context = %{context | guard_sources: guard_sources}
refine_var(index, to_union([left_type, right_type], context), stack, context)
end
{left_types, _right_types} ->
Enum.reduce(left_types, context, fn {index, left_type}, context ->
left_guard_sources = Map.get(left.guard_sources, index, [])
if :fail in left_guard_sources do
guard_sources =
merge_guard_sources([
context.guard_sources,
left.guard_sources,
right.guard_sources
])
context = %{context | guard_sources: guard_sources}
refine_var(index, left_type, stack, context)
else
context
end
end)
end
{:ok, context}
end
# binary-pattern :: specifier
defp of_binary({:"::", _meta, [expr, specifiers]} = full_expr, stack, context) do
{expected_type, utf?} = collect_binary_type(specifiers) || {:integer, false}
stack = push_expr_stack(full_expr, stack)
# Special case utf specifiers with binary literals since they allow
# both integer and binary literals but variables are always integer
if is_binary(expr) and utf? do
{:ok, context}
else
with {:ok, type, context} <- of_pattern(expr, stack, context),
{:ok, _type, context} <- unify(type, expected_type, stack, context),
do: {:ok, context}
end
end
# binary-pattern
defp of_binary(expr, stack, context) do
case of_pattern(expr, stack, context) do
{:ok, type, context} when type in [:integer, :float, :binary] ->
{:ok, context}
{:ok, type, _context} ->
{:error, {:invalid_binary_type, type}}
{:error, reason} ->
{:error, reason}
end
end
# Collect binary type specifiers,
# from `<<pattern::integer-size(10)>>` collect `integer`
defp collect_binary_type({:-, _meta, [left, right]}),
do: collect_binary_type(left) || collect_binary_type(right)
defp collect_binary_type({:integer, _, _}), do: {:integer, false}
defp collect_binary_type({:float, _, _}), do: {:float, false}
defp collect_binary_type({:bits, _, _}), do: {:binary, false}
defp collect_binary_type({:bitstring, _, _}), do: {:binary, false}
defp collect_binary_type({:bytes, _, _}), do: {:binary, false}
defp collect_binary_type({:binary, _, _}), do: {:binary, false}
defp collect_binary_type({:utf8, _, _}), do: {:integer, true}
defp collect_binary_type({:utf16, _, _}), do: {:integer, true}
defp collect_binary_type({:utf32, _, _}), do: {:integer, true}
defp collect_binary_type(_), do: nil
defp guard_signature(name, arity) do
Map.fetch!(@guard_functions, {name, arity})
end
defp type_guard?(name) do
name in @type_guards
end
end
| 33.059727 | 91 | 0.579879 |
e85be060ce7a78dd95833ce1c29b51b29330bd38 | 2,197 | ex | Elixir | implementations/elixir/ockam/ockam_services/lib/services/tracing.ex | 0x00A5/ockam | 9710804e20606e70057d65c70d1af7236194aeeb | [
"Apache-2.0"
] | null | null | null | implementations/elixir/ockam/ockam_services/lib/services/tracing.ex | 0x00A5/ockam | 9710804e20606e70057d65c70d1af7236194aeeb | [
"Apache-2.0"
] | null | null | null | implementations/elixir/ockam/ockam_services/lib/services/tracing.ex | 0x00A5/ockam | 9710804e20606e70057d65c70d1af7236194aeeb | [
"Apache-2.0"
] | null | null | null | defmodule Ockam.Services.Tracing do
@moduledoc """
Tracing service.
Create watchers - workers to intercept traffic
Watchers will forward all traffic without changes
but send a copy of the payload to the tracing route.
"""
use Ockam.Worker
alias Ockam.Message
alias Ockam.Router
require Logger
@impl true
def handle_message(message, state) do
Logger.info("TRACING service\nMESSAGE: #{inspect(message)}")
tracing_route = Message.return_route(message)
payload = Message.payload(message)
{:ok, _alias_address} =
__MODULE__.Watcher.create(
tracing_route: tracing_route,
reply: payload
)
{:ok, state}
end
end
defmodule Ockam.Services.Tracing.Watcher do
@moduledoc """
Tracing watcher.
Upon creation sends a message to the tracing_route to communicate its address
On message will send a copy of the payload to the tracing_route
"""
use Ockam.Worker
alias Ockam.Message
alias Ockam.Router
require Logger
@impl true
def setup(options, state) do
Logger.info("Created new watcher for #{inspect(options)}")
tracing_route = Keyword.fetch!(options, :tracing_route)
reply = Keyword.fetch!(options, :reply)
state = Map.put(state, :tracing_route, tracing_route)
Logger.info("REGISTER OK: #{inspect(reply)}")
send_trace(reply, state)
{:ok, state}
end
@impl true
def handle_message(message, state) do
route_further(message, state)
Logger.info("Tracing #{inspect(message)} to #{inspect(state.tracing_route)}")
send_trace(Message.payload(message), state)
{:ok, state}
end
def route_further(message, %{address: address}) do
## TODO: use helpers for those things
onward_route =
case Message.onward_route(message) do
[^address | onward_route] -> onward_route
onward_route -> onward_route
end
req = Message.set_onward_route(message, onward_route) |> Message.trace(address)
Router.route(req)
end
def send_trace(payload, %{tracing_route: tracing_route, address: address}) do
Router.route(%{
onward_route: tracing_route,
return_route: [address],
payload: payload
})
end
end
| 23.126316 | 83 | 0.694128 |
e85c08a2052105dae28a0946b9416af3a57b96e5 | 969 | ex | Elixir | lib/game/character/via.ex | shanesveller/ex_venture | 68507da11442a9e0423073fcd305e9021f649ca1 | [
"MIT"
] | null | null | null | lib/game/character/via.ex | shanesveller/ex_venture | 68507da11442a9e0423073fcd305e9021f649ca1 | [
"MIT"
] | null | null | null | lib/game/character/via.ex | shanesveller/ex_venture | 68507da11442a9e0423073fcd305e9021f649ca1 | [
"MIT"
] | null | null | null | defmodule Game.Character.Via do
@moduledoc """
Send to either a player (session) or an NPC
"""
alias Game.Session
@doc """
Find the player or NPC pid
Callback for :via GenServer lookup
"""
@spec whereis_name(any) :: pid
def whereis_name(who)
def whereis_name({:npc, id}) do
:global.whereis_name({Game.NPC, id})
end
def whereis_name({:user, id}) do
player =
Session.Registry.connected_players()
|> Enum.find(&(&1.user.id == id))
case player do
%{pid: pid} -> pid
_ -> :undefined
end
end
@doc """
Callback for :via GenServer lookup
"""
@spec send(any, any) :: :ok
def send(who, message)
def send({:npc, id}, message) do
:global.send({Game.NPC, id}, message)
end
def send({:user, id}, message) do
case whereis_name({:user, id}) do
:undefined ->
{:badarg, {{:user, id}, message}}
pid ->
Kernel.send(pid, message)
pid
end
end
end
| 18.634615 | 45 | 0.587203 |
e85c39b74c813fe9d63b139c4317da353ecfde7d | 2,582 | exs | Elixir | test/test_helper.exs | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | test/test_helper.exs | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | test/test_helper.exs | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | # Copyright(c) 2015-2018 ACCESS CO., LTD. All rights reserved.
ExUnit.start()
defmodule ExecutorPoolHelper do
import ExUnit.Assertions
alias Antikythera.Test.{ProcessHelper, GenServerHelper}
alias AntikytheraCore.ExecutorPool
alias AntikytheraCore.ExecutorPool.RegisteredName, as: RegName
def assert_current_setting(epool_id, na, sa, sj, ws) do
action_name = RegName.action_runner_pool_multi(epool_id)
action_pools = Supervisor.which_children(action_name) |> Enum.map(fn {_, pid, _, _} -> pid end)
assert length(action_pools) == na
Enum.each(action_pools, fn p ->
%{reserved: ^sa, ondemand: 0} = PoolSup.status(p)
end)
task_name = RegName.async_job_runner_pool(epool_id)
%{reserved: 0, ondemand: ^sj} = PoolSup.status(task_name)
ws_counter_name = RegName.websocket_connections_counter(epool_id)
%{max: ^ws} = :sys.get_state(ws_counter_name)
end
def hurry_action_pool_multi(epool_id) do
# Send internal message to `PoolSup.Multi` so that it terminates child pools
action_name = RegName.action_runner_pool_multi(epool_id)
GenServerHelper.send_message_and_wait(action_name, :check_progress_of_termination)
end
def wait_until_async_job_queue_added(epool_id, remaining_tries \\ 10) do
if remaining_tries == 0 do
flunk "job queue for #{inspect(epool_id)} is not added!"
else
:timer.sleep(100)
queue_name = RegName.async_job_queue(epool_id)
if match?(%{^queue_name => 3}, RaftFleet.consensus_groups()) && Process.whereis(queue_name) do
:ok
else
wait_until_async_job_queue_added(epool_id, remaining_tries - 1)
end
end
end
def kill_and_wait(epool_id) do
kill_and_wait(epool_id, fn ->
assert ExecutorPool.kill_executor_pool(epool_id) == :ok
end)
end
def kill_and_wait(epool_id, kill_fun) do
queue_name = RegName.async_job_queue(epool_id)
queue_pid = Process.whereis(queue_name)
assert queue_pid
kill_fun.()
case epool_id do
{:gear, _} -> assert RaftFleet.remove_consensus_group(queue_name) == :ok
{:tenant, _} -> :timer.sleep(150) # job queue is removed asynchronously; wait until it's removed before sending `:adjust_members`
end
send(RaftFleet.Manager, :adjust_members) # accelerate termination of consensus member process
ProcessHelper.monitor_wait(queue_pid)
# discard all generated snapshot & log files of async job queue
persist_dir = Path.join(AntikytheraCore.Path.raft_persistence_dir_parent(), Atom.to_string(queue_name))
File.rm_rf!(persist_dir)
end
end
| 37.970588 | 135 | 0.732378 |
e85c4c7c3c6b32129311ac27e722dfb6438b8ca6 | 3,710 | ex | Elixir | clients/ad_experience_report/lib/google_api/ad_experience_report/v1/api/violating_sites.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/ad_experience_report/lib/google_api/ad_experience_report/v1/api/violating_sites.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/ad_experience_report/lib/google_api/ad_experience_report/v1/api/violating_sites.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AdExperienceReport.V1.Api.ViolatingSites do
@moduledoc """
API calls for all endpoints tagged `ViolatingSites`.
"""
alias GoogleApi.AdExperienceReport.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Lists sites that are failing in the Ad Experience Report on at least one
platform.
## Parameters
* `connection` (*type:* `GoogleApi.AdExperienceReport.V1.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AdExperienceReport.V1.Model.ViolatingSitesResponse{}}` on success
* `{:error, info}` on failure
"""
@spec adexperiencereport_violating_sites_list(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.AdExperienceReport.V1.Model.ViolatingSitesResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def adexperiencereport_violating_sites_list(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/violatingSites", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.AdExperienceReport.V1.Model.ViolatingSitesResponse{}]
)
end
end
| 42.643678 | 196 | 0.662264 |
e85c5dcab0438d6116c255a13bb68f753ac6c4a0 | 1,605 | ex | Elixir | lib/radiator/directory/audio.ex | bhtabor/radiator | 39c137a18d36d6f418f9d1ffb7aa2c99011d66cf | [
"MIT"
] | 92 | 2019-01-03T11:46:23.000Z | 2022-02-19T21:28:44.000Z | lib/radiator/directory/audio.ex | bhtabor/radiator | 39c137a18d36d6f418f9d1ffb7aa2c99011d66cf | [
"MIT"
] | 350 | 2019-04-11T07:55:51.000Z | 2021-08-03T11:19:05.000Z | lib/radiator/directory/audio.ex | bhtabor/radiator | 39c137a18d36d6f418f9d1ffb7aa2c99011d66cf | [
"MIT"
] | 10 | 2019-04-18T12:47:27.000Z | 2022-01-25T20:49:15.000Z | defmodule Radiator.Directory.Audio do
@moduledoc """
Audio Meta Object.
An Audio contains all data required to generate a web player: file references
and audio metadata.
An Audio belongs to one or many episodes, or stand on its own in a network via AudioPublication.
"""
use Ecto.Schema
import Ecto.Changeset
import Arc.Ecto.Changeset
import Ecto.Query, warn: false
alias __MODULE__
alias Radiator.Media
alias Radiator.AudioMeta.Chapter
alias Radiator.Contribution
alias Radiator.Directory.{
Episode,
Podcast,
AudioPublication
}
schema "audios" do
field :duration, :integer
field :image, Media.AudioImage.Type
has_many :episodes, Episode
has_one :audio_publication, AudioPublication
has_many :contributions, Contribution.AudioContribution
has_many :contributors, through: [:contributions, :person]
has_many :audio_files, Media.AudioFile
has_many :chapters, Chapter
timestamps()
end
@doc false
def changeset(audio, attrs) do
audio
|> cast(attrs, [:duration])
|> cast_attachments(attrs, [:image], allow_paths: true, allow_urls: true)
end
@doc """
Convenience accessor for image URL.
Use `podcast: podcast` to get podcast image if there is no audio image
"""
def image_url(audio = %Audio{}, opts \\ []) do
with url when is_binary(url) <- Media.AudioImage.url({audio.image, audio}) do
url
else
_ ->
case opts[:podcast] do
podcast = %Podcast{} ->
Podcast.image_url(podcast)
_ ->
nil
end
end
end
end
| 23.26087 | 98 | 0.676636 |
e85c8bce6b098d3242d080154118fd58ac198e3e | 11,047 | ex | Elixir | apps/core/lib/core/services/recipes.ex | michaeljguarino/forge | 50ee583ecb4aad5dee4ef08fce29a8eaed1a0824 | [
"Apache-2.0"
] | 59 | 2021-09-16T19:29:39.000Z | 2022-03-31T20:44:24.000Z | apps/core/lib/core/services/recipes.ex | michaeljguarino/forge | 50ee583ecb4aad5dee4ef08fce29a8eaed1a0824 | [
"Apache-2.0"
] | 111 | 2021-08-15T09:56:37.000Z | 2022-03-31T23:59:32.000Z | apps/core/lib/core/services/recipes.ex | michaeljguarino/chartmart | a34c949cc29d6a1ab91c04c5e4f797e6f0daabfc | [
"Apache-2.0"
] | 4 | 2021-12-13T09:43:01.000Z | 2022-03-29T18:08:44.000Z | defmodule Core.Services.Recipes do
use Core.Services.Base
import Core.Policies.Recipe
alias Core.Schema.{Recipe, RecipeSection, RecipeItem, Installation, Terraform, Chart, User}
alias Core.Services.{Repositories, Charts, Versions}
alias Core.Services.Terraform, as: TfSvc
@spec get!(binary) :: Recipe.t
def get!(recipe_id), do: Core.Repo.get!(Recipe, recipe_id)
@spec get_by_name(binary, binary) :: Recipe.t | nil
def get_by_name(name, repo_id),
do: Core.Repo.get_by(Recipe, name: name, repository_id: repo_id)
@spec get_by_name!(binary, binary) :: Recipe.t
def get_by_name!(name, repo_id),
do: Core.Repo.get_by!(Recipe, name: name, repository_id: repo_id)
@doc """
Will persist the given recipe for repository `repository_id`
Fails if the user is not the publisher
"""
@spec create(map, binary, User.t) :: {:ok, Recipe.t} | {:error, term}
def create(%{sections: sections} = attrs, repository_id, user) do
start_transaction()
|> add_operation(:recipe, fn _ ->
%Recipe{repository_id: repository_id, id: attrs[:id]}
|> Recipe.changeset(build_dependencies(attrs))
|> allow(user, :edit)
|> when_ok(:insert)
end)
|> build_sections(sections)
|> execute(extract: :recipe)
|> when_ok(&hydrate/1)
end
@doc """
Deletes the recipe. Fails if the user is not the publisher
"""
@spec delete(binary, User.t) :: {:ok, Recipe.t} | {:error, term}
def delete(id, user) do
get!(id)
|> allow(user, :edit)
|> when_ok(:delete)
end
@doc """
Evaluates each section of the recipe, given the provided context, and installs
the repository for each section and any chart/terraform items per section. Returns
the created installations.
"""
@spec install(Recipe.t | binary, map, User.t) :: {:ok, [Installation.t]} | {:error, term}
def install(%Recipe{} = recipe, context, user) do
with {:ok, user} <- register_provider(recipe, user) do
hydrate(recipe)
|> Map.get(:recipe_sections)
|> Enum.reduce(start_transaction(), fn %{id: id, repository_id: repo_id} = section, acc ->
add_operation(acc, id, fn _ ->
installation_ctx = Map.get(context, repo_id, %{})
case Repositories.get_installation(user.id, repo_id) do
%Installation{id: id, context: ctx} ->
Repositories.update_installation(%{context: Map.merge(ctx || %{}, installation_ctx)}, id, user)
_ -> Repositories.create_installation(%{context: installation_ctx}, repo_id, user)
end
end)
|> install_items(section.recipe_items, id, user)
end)
|> execute()
|> case do
{:ok, result} ->
Enum.map(result, fn
{_, %Installation{} = inst} -> inst
_ -> nil
end)
|> Enum.filter(& &1)
|> ok()
error -> error
end
end
end
def install(recipe_id, ctx, user) when is_binary(recipe_id),
do: get!(recipe_id) |> install(ctx, user)
def register_provider(%Recipe{provider: prov}, %User{provider: nil} = user) when prov in ~w(gcp aws azure)a do
Ecto.Changeset.change(user, %{provider: prov})
|> Core.Repo.update()
|> case do
{:ok, user} -> handle_notify(Core.PubSub.UserUpdated, user)
error -> error
end
end
def register_provider(_, user), do: {:ok, user}
@doc """
Either creates a new recipe, or deletes the entire old recipe and
recreates it.
"""
@spec upsert(map, binary, User.t) :: {:ok, Recipe.t} | {:error, term}
def upsert(%{name: name} = attrs, repo_id, user) do
start_transaction()
|> add_operation(:wipe, fn _ ->
case get_by_name(name, repo_id) do
%Recipe{} = recipe -> Core.Repo.delete(recipe)
_ -> {:ok, %{id: nil}}
end
end)
|> add_operation(:create, fn %{wipe: %{id: id}} ->
Map.put(attrs, :id, id)
|> create(repo_id, user)
end)
|> execute(extract: :create)
end
@recipe_preloads [recipe_sections: [:repository, [recipe_items: [:terraform, :chart]]]]
@preloads [dependencies: [dependent_recipe: @recipe_preloads]] ++ @recipe_preloads
@doc """
Preloads a recipe, and properly topsorts the sections according to their
dependency maps.
"""
@spec hydrate(Recipe.t) :: Recipe.t
def hydrate(%Recipe{} = recipe) do
dependencies = resolve_dependencies([], recipe)
recursive_sections = Enum.flat_map(dependencies, & &1.recipe_sections || [])
deduped = Enum.reduce(recursive_sections, %{}, fn %{repository_id: repository_id, recipe_items: items} = section, acc ->
case Map.get(acc, repository_id) do
%{recipe_items: moar} = section ->
Map.put(acc, repository_id, %{section | recipe_items: items ++ moar})
_ -> Map.put(acc, repository_id, section)
end
end)
sections =
Map.values(deduped)
|> topsort_sections()
|> Enum.map(fn %{recipe_items: items} = section ->
%{section | recipe_items: topsort(Enum.dedup_by(items, &as_node/1))}
end)
|> Enum.map(&fix_configuration/1)
dependencies =
Enum.reject(dependencies, & &1.id == recipe.id)
|> Enum.uniq_by(& &1.id)
recipe
|> Map.put(:recipe_sections, sections)
|> Map.put(:recipe_dependencies, dependencies)
end
def hydrate(nil), do: nil
defp fix_configuration(%RecipeSection{configuration: conf, recipe_items: items} = section) do
configuration =
Enum.flat_map(items, & (&1.configuration || []))
|> Enum.concat(conf || [])
|> Enum.dedup_by(& &1.name)
%{section | configuration: configuration}
end
defp resolve_dependencies(prev, %Recipe{} = recipe) do
case Core.Repo.preload(recipe, @preloads) do
%{dependencies: [_ | _] = deps} = recipe ->
Enum.flat_map(deps, &resolve_dependencies([], &1.dependent_recipe))
|> Enum.concat([recipe | prev])
recipe -> [recipe | prev]
end
end
defp install_items(transaction, items, id, user) do
Enum.reduce(items, transaction, fn item, acc ->
add_operation(acc, item.id, fn %{^id => installation} ->
case item do
%{terraform: %Terraform{} = tf} -> upsert_terraform(tf, installation, user)
%{chart: %Chart{} = chart} -> upsert_chart(chart, installation, user)
end
end)
end)
end
defp upsert_terraform(%Terraform{id: id, latest_version: v}, installation, %User{id: uid} = user) do
version = Versions.get_version(:terraform, id, v)
case TfSvc.get_terraform_installation(id, uid) do
%{id: id} -> TfSvc.update_terraform_installation(%{version_id: version.id}, id, user)
_ -> TfSvc.create_terraform_installation(%{terraform_id: id, version_id: version.id}, installation.id, user)
end
end
defp upsert_chart(%Chart{id: id, latest_version: v}, installation, %User{id: uid} = user) do
version = Charts.get_chart_version(id, v)
case Charts.get_chart_installation(id, uid) do
%{id: cinst_id} -> Charts.update_chart_installation(%{version_id: version.id}, cinst_id, user)
_ -> Charts.create_chart_installation(%{chart_id: id, version_id: version.id}, installation.id, user)
end
end
defp topsort_sections(sections) do
graph = :digraph.new()
by_name = Enum.into(sections, %{}, & {&1.repository.name, &1})
try do
Enum.each(sections, fn %{repository: %{name: name}} ->
:digraph.add_vertex(graph, name)
end)
Enum.reduce(sections, MapSet.new(), fn %{recipe_items: items, repository: %{name: out_repo}}, seen ->
Enum.flat_map(items, &dependencies/1)
|> Enum.map(fn %{repo: in_repo} -> {in_repo, out_repo} end)
|> Enum.reject(&MapSet.member?(seen, &1))
|> Enum.into(seen, fn {in_repo, out_repo} = pair ->
:digraph.add_edge(graph, in_repo, out_repo)
pair
end)
end)
if sorted = :digraph_utils.topsort(graph) do
Enum.map(sorted, &Map.get(by_name, &1))
else
raise ArgumentError, message: "dependency cycle detected"
end
after
:digraph.delete(graph)
end
end
defp topsort(recipe_items) do
graph = :digraph.new()
nodes = Enum.into(recipe_items, %{}, fn item -> {as_node(item), item} end)
try do
Enum.each(recipe_items, fn item ->
:digraph.add_vertex(graph, as_node(item))
end)
Enum.map(recipe_items, fn item -> {item, dependencies(item)} end)
|> Enum.each(fn {item, deps} ->
deps
|> Enum.filter(&Map.has_key?(nodes, {&1.type, &1.name}))
|> Enum.each(fn dep ->
:digraph.add_edge(graph, {dep.type, dep.name}, as_node(item))
end)
end)
if sorted = :digraph_utils.topsort(graph) do
Enum.map(sorted, &Map.get(nodes, &1))
else
raise ArgumentError, message: "dependency cycle detected"
end
after
:digraph.delete(graph)
end
end
defp as_node(%RecipeItem{terraform: %Terraform{name: name}}), do: {:terraform, name}
defp as_node(%RecipeItem{chart: %Chart{name: name}}), do: {:helm, name}
defp dependencies(%RecipeItem{terraform: %Terraform{dependencies: %{dependencies: deps}}}),
do: deps
defp dependencies(%RecipeItem{chart: %Chart{dependencies: %{dependencies: deps}}}),
do: deps
defp dependencies(_), do: []
defp build_sections(transaction, sections) when is_list(sections) do
sections
|> Enum.with_index()
|> Enum.reduce(transaction, fn {%{name: repo_name, items: items} = attrs, ind}, acc ->
repo = Repositories.get_repository_by_name!(repo_name)
add_operation(acc, {:section, repo_name}, fn %{recipe: %{id: id}} ->
attrs = Map.drop(attrs, [:name, :items])
%RecipeSection{recipe_id: id}
|> RecipeSection.changeset(Map.merge(attrs, %{repository_id: repo.id, index: ind}))
|> Core.Repo.insert()
end)
|> build_items({:section, repo_name}, repo, items)
end)
end
defp build_dependencies(%{dependencies: [_ | _] = deps} = attrs) do
dependencies =
Enum.with_index(deps)
|> Enum.map(fn {%{repo: repo, name: recipe}, ind} ->
repo = Repositories.get_repository_by_name!(repo)
recipe = get_by_name!(recipe, repo.id)
%{dependent_recipe_id: recipe.id, index: ind}
end)
%{attrs | dependencies: dependencies}
end
defp build_dependencies(attrs), do: attrs
defp build_items(transaction, section_key, repo, items) when is_list(items) do
Enum.reduce(items, transaction, fn %{name: name} = item, acc ->
add_operation(acc, {:item, repo.id, name}, fn %{^section_key => %{id: section_id}} ->
%RecipeItem{recipe_section_id: section_id}
|> RecipeItem.changeset(Map.merge(item, item_reference(item, repo)))
|> Core.Repo.insert()
end)
end)
end
defp item_reference(%{type: :helm, name: name}, %{id: id}),
do: %{chart_id: Charts.get_chart_by_name!(id, name).id}
defp item_reference(%{type: :terraform, name: name}, %{id: id}),
do: %{terraform_id: TfSvc.get_terraform_by_name!(id, name).id}
end
| 36.219672 | 124 | 0.634561 |
e85cb3fce99f35a672e4c85c4f624282144f237b | 708 | exs | Elixir | test/features/category_change_via_drag_and_drop_test.exs | codyduval/remote_retro_crd | b3a4adda7503b4e1ca5ec135cd34e1e71ec8d247 | [
"MIT"
] | null | null | null | test/features/category_change_via_drag_and_drop_test.exs | codyduval/remote_retro_crd | b3a4adda7503b4e1ca5ec135cd34e1e71ec8d247 | [
"MIT"
] | null | null | null | test/features/category_change_via_drag_and_drop_test.exs | codyduval/remote_retro_crd | b3a4adda7503b4e1ca5ec135cd34e1e71ec8d247 | [
"MIT"
] | null | null | null | defmodule CategoryChangeViaDragAndDrop do
use RemoteRetro.IntegrationCase, async: false
alias RemoteRetro.Idea
import ShorterMaps
describe "when an idea already exists in a retro" do
setup [:persist_idea_for_retro]
@tag [
idea: %Idea{category: "sad", body: "no linter"},
]
test "users can drag the idea from one column to another", ~M{retro, session} do
retro_path = "/retros/" <> retro.id
session = visit(session, retro_path)
assert_has(session, Query.css(".sad.ideas", text: "no linter"))
session |> drag_idea("no linter", from: "sad", to: "confused")
assert_has(session, Query.css(".confused.column", text: "no linter"))
end
end
end
| 28.32 | 84 | 0.670904 |
e85cb5cdd0af73522409255bd73e5b0eff7f8f1c | 95 | ex | Elixir | lib/gen_event_sourced.ex | slashmili/gen_event_sourced | d2cfa6337256ded910b5a315169be927c9dd589b | [
"MIT"
] | null | null | null | lib/gen_event_sourced.ex | slashmili/gen_event_sourced | d2cfa6337256ded910b5a315169be927c9dd589b | [
"MIT"
] | null | null | null | lib/gen_event_sourced.ex | slashmili/gen_event_sourced | d2cfa6337256ded910b5a315169be927c9dd589b | [
"MIT"
] | null | null | null | defmodule GenEventSourced do
@moduledoc """
Documentation for `GenEventSourced`.
"""
end
| 15.833333 | 38 | 0.726316 |
e85cd56540aaf96b3c4175779f404d36bb06b607 | 612 | ex | Elixir | lib/EventStage/event_dispatcher.ex | ElinksFr/alchemy | 4c64e5c619977a62a5674dbd2b1ef29f76f6f44e | [
"MIT"
] | 163 | 2017-03-01T09:02:35.000Z | 2022-03-09T23:31:48.000Z | lib/EventStage/event_dispatcher.ex | ElinksFr/alchemy | 4c64e5c619977a62a5674dbd2b1ef29f76f6f44e | [
"MIT"
] | 91 | 2017-02-23T23:23:35.000Z | 2021-12-29T23:47:44.000Z | lib/EventStage/event_dispatcher.ex | ElinksFr/alchemy | 4c64e5c619977a62a5674dbd2b1ef29f76f6f44e | [
"MIT"
] | 38 | 2017-03-23T13:16:44.000Z | 2022-02-26T15:53:32.000Z | defmodule Alchemy.EventStage.EventDispatcher do
# Serves as a small consumer of the 3rd stage,
@moduledoc false
# forwarding events to notify processes subscribed in the EventRegistry
use GenStage
alias Alchemy.Cogs.EventRegistry
alias Alchemy.EventStage.Cacher
def start_link(limit) do
GenStage.start_link(__MODULE__, limit)
end
def init(limit) do
producers = for x <- 1..limit, do: Module.concat(Cacher, :"#{x}")
{:consumer, :ok, subscribe_to: producers}
end
def handle_events(events, _from, state) do
EventRegistry.dispatch(events)
{:noreply, [], state}
end
end
| 26.608696 | 73 | 0.72549 |
e85ce9a1c0364df4cc9134d0e07ea71e825bb805 | 1,655 | ex | Elixir | clients/civic_info/lib/google_api/civic_info/v2/model/point_proto.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/civic_info/lib/google_api/civic_info/v2/model/point_proto.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/civic_info/lib/google_api/civic_info/v2/model/point_proto.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CivicInfo.V2.Model.PointProto do
@moduledoc """
## Attributes
* `latE7` (*type:* `integer()`, *default:* `nil`) -
* `lngE7` (*type:* `integer()`, *default:* `nil`) -
* `metadata` (*type:* `GoogleApi.CivicInfo.V2.Model.FieldMetadataProto.t`, *default:* `nil`) -
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:latE7 => integer(),
:lngE7 => integer(),
:metadata => GoogleApi.CivicInfo.V2.Model.FieldMetadataProto.t()
}
field(:latE7)
field(:lngE7)
field(:metadata, as: GoogleApi.CivicInfo.V2.Model.FieldMetadataProto)
end
defimpl Poison.Decoder, for: GoogleApi.CivicInfo.V2.Model.PointProto do
def decode(value, options) do
GoogleApi.CivicInfo.V2.Model.PointProto.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CivicInfo.V2.Model.PointProto do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.226415 | 99 | 0.706949 |
e85cf05716e269038d33ebf5cb77fdc7d4b70ddd | 1,234 | ex | Elixir | apps/neoscan_web/lib/neoscan_web/channels/user_socket.ex | vincentgeneste/neo-scan | 4a654575331eeb3eb12d4fd61696a7bd6dbca3ce | [
"MIT"
] | 75 | 2017-07-23T02:45:32.000Z | 2021-12-13T11:04:17.000Z | apps/neoscan_web/lib/neoscan_web/channels/user_socket.ex | vincentgeneste/neo-scan | 4a654575331eeb3eb12d4fd61696a7bd6dbca3ce | [
"MIT"
] | 252 | 2017-07-13T19:36:00.000Z | 2021-07-28T18:40:00.000Z | apps/neoscan_web/lib/neoscan_web/channels/user_socket.ex | vincentgeneste/neo-scan | 4a654575331eeb3eb12d4fd61696a7bd6dbca3ce | [
"MIT"
] | 87 | 2017-07-23T02:45:34.000Z | 2022-03-02T14:54:27.000Z | defmodule NeoscanWeb.UserSocket do
use Phoenix.Socket
@moduledoc false
## Channels
channel("room:*", NeoscanWeb.RoomChannel)
## Transports
transport(:websocket, Phoenix.Transports.WebSocket)
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets
# for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# NeoscanWeb.Endpoint.broadcast("user_socket:#{user.id}",
# "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 30.097561 | 67 | 0.679903 |
e85cf5c1c415dc99955612a3e70243c22c89dc91 | 1,549 | ex | Elixir | lib/renraku_web/views/error_helpers.ex | moroz/renraku | 845e6a72573f138094ba8f868ac91b7f3440bdab | [
"BSD-3-Clause"
] | null | null | null | lib/renraku_web/views/error_helpers.ex | moroz/renraku | 845e6a72573f138094ba8f868ac91b7f3440bdab | [
"BSD-3-Clause"
] | null | null | null | lib/renraku_web/views/error_helpers.ex | moroz/renraku | 845e6a72573f138094ba8f868ac91b7f3440bdab | [
"BSD-3-Clause"
] | null | null | null | defmodule RenrakuWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error),
class: "invalid-feedback",
phx_feedback_for: input_name(form, field)
)
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(RenrakuWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(RenrakuWeb.Gettext, "errors", msg, opts)
end
end
end
| 32.270833 | 76 | 0.665591 |
e85cf7ac78389ffddc69541b1af1603f3e3fdb14 | 82 | exs | Elixir | test/bus_kiosk_web/views/page_view_test.exs | mitchellhenke/bus_kiosk | 9814b0b10190bb06c823b00315616391100c5bfa | [
"BSD-3-Clause"
] | 2 | 2020-02-21T15:40:27.000Z | 2020-12-06T21:50:39.000Z | test/bus_kiosk_web/views/page_view_test.exs | mitchellhenke/bus_kiosk | 9814b0b10190bb06c823b00315616391100c5bfa | [
"BSD-3-Clause"
] | 3 | 2020-02-19T17:06:24.000Z | 2020-04-20T14:33:07.000Z | test/bus_kiosk_web/views/page_view_test.exs | mitchellhenke/bus_kiosk | 9814b0b10190bb06c823b00315616391100c5bfa | [
"BSD-3-Clause"
] | null | null | null | defmodule BusKioskWeb.PageViewTest do
use BusKioskWeb.ConnCase, async: true
end
| 20.5 | 39 | 0.829268 |
e85cfc3daf112eadb51c2856f332136b9a9bb533 | 9,806 | ex | Elixir | lib/elixir/lib/integer.ex | davidsulc/elixir | dd4fd6ab742acd75862e34e26dbdb86e0cf6453f | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/integer.ex | davidsulc/elixir | dd4fd6ab742acd75862e34e26dbdb86e0cf6453f | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/integer.ex | davidsulc/elixir | dd4fd6ab742acd75862e34e26dbdb86e0cf6453f | [
"Apache-2.0"
] | null | null | null | defmodule Integer do
@moduledoc """
Functions for working with integers.
"""
import Bitwise
@doc """
Determines if `integer` is odd.
Returns `true` if the given `integer` is an odd number,
otherwise it returns `false`.
Allowed in guard clauses.
## Examples
iex> Integer.is_odd(5)
true
iex> Integer.is_odd(6)
false
iex> Integer.is_odd(-5)
true
iex> Integer.is_odd(0)
false
"""
defmacro is_odd(integer) do
quote do: (unquote(integer) &&& 1) == 1
end
@doc """
Determines if an `integer` is even.
Returns `true` if the given `integer` is an even number,
otherwise it returns `false`.
Allowed in guard clauses.
## Examples
iex> Integer.is_even(10)
true
iex> Integer.is_even(5)
false
iex> Integer.is_even(-10)
true
iex> Integer.is_even(0)
true
"""
defmacro is_even(integer) do
quote do: (unquote(integer) &&& 1) == 0
end
@doc """
Computes the modulo remainder of an integer division.
`Integer.mod/2` uses floored division, which means that
the result will always have the sign of the `divisor`.
Raises an `ArithmeticError` exception if one of the arguments is not an
integer, or when the `divisor` is `0`.
## Examples
iex> Integer.mod(5, 2)
1
iex> Integer.mod(6, -4)
-2
"""
@spec mod(integer, neg_integer | pos_integer) :: integer
def mod(dividend, divisor) do
remainder = rem(dividend, divisor)
if remainder * divisor < 0 do
remainder + divisor
else
remainder
end
end
@doc """
Performs a floored integer division.
Raises an `ArithmeticError` exception if one of the arguments is not an
integer, or when the `divisor` is `0`.
`Integer.floor_div/2` performs *floored* integer division. This means that
the result is always rounded towards negative infinity.
If you want to perform truncated integer division (rounding towards zero),
use `Kernel.div/2` instead.
## Examples
iex> Integer.floor_div(5, 2)
2
iex> Integer.floor_div(6, -4)
-2
iex> Integer.floor_div(-99, 2)
-50
"""
@spec floor_div(integer, neg_integer | pos_integer) :: integer
def floor_div(dividend, divisor) do
if (dividend * divisor < 0) and rem(dividend, divisor) != 0 do
div(dividend, divisor) - 1
else
div(dividend, divisor)
end
end
@doc """
Returns the ordered digits for the given `integer`.
An optional `base` value may be provided representing the radix for the returned
digits. This one must be an integer >= 2.
## Examples
iex> Integer.digits(123)
[1, 2, 3]
iex> Integer.digits(170, 2)
[1, 0, 1, 0, 1, 0, 1, 0]
iex> Integer.digits(-170, 2)
[-1, 0, -1, 0, -1, 0, -1, 0]
"""
@spec digits(integer, pos_integer) :: [integer, ...]
def digits(integer, base \\ 10)
when is_integer(integer) and is_integer(base) and base >= 2 do
do_digits(integer, base, [])
end
defp do_digits(digit, base, []) when abs(digit) < base,
do: [digit]
defp do_digits(digit, base, []) when digit == -base,
do: [-1, 0]
defp do_digits(base, base, []),
do: [1, 0]
defp do_digits(0, _base, acc),
do: acc
defp do_digits(integer, base, acc),
do: do_digits(div(integer, base), base, [rem(integer, base) | acc])
@doc """
Returns the integer represented by the ordered `digits`.
An optional `base` value may be provided representing the radix for the `digits`.
Base has to be an integer greater or equal than `2`.
## Examples
iex> Integer.undigits([1, 2, 3])
123
iex> Integer.undigits([1, 4], 16)
20
iex> Integer.undigits([])
0
"""
@spec undigits([integer], pos_integer) :: integer
def undigits(digits, base \\ 10) when is_list(digits) and is_integer(base) and base >= 2 do
do_undigits(digits, base, 0)
end
defp do_undigits([], _base, 0),
do: 0
defp do_undigits([digit], base, 0) when is_integer(digit) and digit < base,
do: digit
defp do_undigits([1, 0], base, 0),
do: base
defp do_undigits([0 | tail], base, 0),
do: do_undigits(tail, base, 0)
defp do_undigits([], _base, acc),
do: acc
defp do_undigits([digit | _], base, _) when is_integer(digit) and digit >= base,
do: raise ArgumentError, "invalid digit #{digit} in base #{base}"
defp do_undigits([digit | tail], base, acc) when is_integer(digit),
do: do_undigits(tail, base, acc * base + digit)
@doc """
Parses a text representation of an integer.
An optional `base` to the corresponding integer can be provided.
If `base` is not given, 10 will be used.
If successful, returns a tuple in the form of `{integer, remainder_of_binary}`.
Otherwise `:error`.
Raises an error if `base` is less than 2 or more than 36.
If you want to convert a string-formatted integer directly to a integer,
`String.to_integer/1` or `String.to_integer/2` can be used instead.
## Examples
iex> Integer.parse("34")
{34, ""}
iex> Integer.parse("34.5")
{34, ".5"}
iex> Integer.parse("three")
:error
iex> Integer.parse("34", 10)
{34, ""}
iex> Integer.parse("f4", 16)
{244, ""}
iex> Integer.parse("Awww++", 36)
{509216, "++"}
iex> Integer.parse("fab", 10)
:error
iex> Integer.parse("a2", 38)
** (ArgumentError) invalid base 38
"""
@spec parse(binary, 2..36) :: {integer, binary} | :error
def parse(binary, base \\ 10)
def parse(_binary, base) when not base in 2..36 do
raise ArgumentError, "invalid base #{inspect base}"
end
def parse(binary, base) do
case count_digits(binary, base) do
0 ->
:error
count ->
{digits, rem} = :erlang.split_binary(binary, count)
{:erlang.binary_to_integer(digits, base), rem}
end
end
defp count_digits(<<sign, rest::binary>>, base) when sign in '+-' do
case count_digits_nosign(rest, base, 1) do
1 -> 0
count -> count
end
end
defp count_digits(<<rest::binary>>, base) do
count_digits_nosign(rest, base, 0)
end
digits = [{?0..?9, -?0}, {?A..?Z, 10 - ?A}, {?a..?z, 10 - ?a}]
for {chars, diff} <- digits, char <- chars do
digit = char + diff
defp count_digits_nosign(<<unquote(char), rest::binary>>, base, count)
when base > unquote(digit) do
count_digits_nosign(rest, base, count + 1)
end
end
defp count_digits_nosign(<<_::binary>>, _, count), do: count
@doc """
Returns a binary which corresponds to the text representation
of `integer`.
Inlined by the compiler.
## Examples
iex> Integer.to_string(123)
"123"
iex> Integer.to_string(+456)
"456"
iex> Integer.to_string(-789)
"-789"
iex> Integer.to_string(0123)
"123"
"""
@spec to_string(integer) :: String.t
def to_string(integer) do
:erlang.integer_to_binary(integer)
end
@doc """
Returns a binary which corresponds to the text representation
of `integer` in the given `base`.
`base` can be an integer between 2 and 36.
Inlined by the compiler.
## Examples
iex> Integer.to_string(100, 16)
"64"
iex> Integer.to_string(-100, 16)
"-64"
iex> Integer.to_string(882681651, 36)
"ELIXIR"
"""
@spec to_string(integer, 2..36) :: String.t
def to_string(integer, base) do
:erlang.integer_to_binary(integer, base)
end
@doc """
Returns a charlist which corresponds to the text representation of the given `integer`.
Inlined by the compiler.
## Examples
iex> Integer.to_charlist(123)
'123'
iex> Integer.to_charlist(+456)
'456'
iex> Integer.to_charlist(-789)
'-789'
iex> Integer.to_charlist(0123)
'123'
"""
@spec to_charlist(integer) :: charlist
def to_charlist(integer) do
:erlang.integer_to_list(integer)
end
@doc """
Returns a charlist which corresponds to the text representation of `integer` in the given `base`.
`base` can be an integer between 2 and 36.
Inlined by the compiler.
## Examples
iex> Integer.to_charlist(100, 16)
'64'
iex> Integer.to_charlist(-100, 16)
'-64'
iex> Integer.to_charlist(882681651, 36)
'ELIXIR'
"""
@spec to_charlist(integer, 2..36) :: charlist
def to_charlist(integer, base) do
:erlang.integer_to_list(integer, base)
end
@doc """
Returns the greatest common divisor of the two given integers.
The greatest common divisor (GCD) of `integer1` and `integer2` is the largest positive
integer that divides both `integer1` and `integer2` without leaving a remainder.
By convention, `gcd(0, 0)` returns `0`.
## Examples
iex> Integer.gcd(2, 3)
1
iex> Integer.gcd(8, 12)
4
iex> Integer.gcd(8, -12)
4
iex> Integer.gcd(10, 0)
10
iex> Integer.gcd(7, 7)
7
iex> Integer.gcd(0, 0)
0
"""
@spec gcd(0, 0) :: 0
@spec gcd(integer, integer) :: pos_integer
def gcd(integer1, integer2) when is_integer(integer1) and is_integer(integer2) do
gcd_positive(abs(integer1), abs(integer2))
end
defp gcd_positive(0, integer2), do: integer2
defp gcd_positive(integer1, 0), do: integer1
defp gcd_positive(integer1, integer2), do: gcd_positive(integer2, rem(integer1, integer2))
# TODO: Remove by 2.0
# (hard-deprecated in elixir_dispatch)
@doc false
@spec to_char_list(integer) :: charlist
def to_char_list(integer), do: Integer.to_charlist(integer)
# TODO: Remove by 2.0
# (hard-deprecated in elixir_dispatch)
@doc false
@spec to_char_list(integer, 2..36) :: charlist
def to_char_list(integer, base), do: Integer.to_charlist(integer, base)
end
| 22.964871 | 99 | 0.628697 |
e85d337e1e3c896c51080001c623e46b588e98fd | 12,148 | ex | Elixir | lib/pardall_markdown/cache.ex | rockchalkwushock/pardall_markdown | 82188d6090a419f346618997d40539a0435ea4d5 | [
"Apache-2.0"
] | 75 | 2021-09-11T15:36:34.000Z | 2022-03-28T17:21:55.000Z | lib/pardall_markdown/cache.ex | rockchalkwushock/pardall_markdown | 82188d6090a419f346618997d40539a0435ea4d5 | [
"Apache-2.0"
] | 33 | 2021-09-11T15:34:29.000Z | 2021-11-21T03:47:05.000Z | lib/pardall_markdown/cache.ex | rockchalkwushock/pardall_markdown | 82188d6090a419f346618997d40539a0435ea4d5 | [
"Apache-2.0"
] | 4 | 2021-09-27T03:34:35.000Z | 2022-01-31T23:42:58.000Z | defmodule PardallMarkdown.Cache do
require Logger
alias PardallMarkdown.Content.{Post, Link}
import PardallMarkdown.Content.Filters
import PardallMarkdown.Content.Utils
@cache_name Application.get_env(:pardall_markdown, PardallMarkdown.Content)[:cache_name]
@index_cache_name Application.get_env(:pardall_markdown, PardallMarkdown.Content)[
:index_cache_name
]
def get_by_slug(slug), do: ConCache.get(@cache_name, slug_key(slug))
def get_all_posts(type \\ :all) do
ConCache.ets(@cache_name)
|> :ets.tab2list()
|> Enum.filter(fn
{_, %Post{type: p_type}} -> type == :all or p_type == type
_ -> false
end)
|> Enum.map(fn {_, %Post{} = post} -> post end)
end
def get_all_links(type \\ :all) do
ConCache.ets(@cache_name)
|> :ets.tab2list()
|> Enum.filter(fn
{_, %Link{type: l_type}} -> type == :all or l_type == type
_ -> false
end)
|> Enum.map(fn {_, %Link{} = link} -> link end)
end
def get_all_links_indexed_by_slug(type \\ :all) do
get_all_links(type)
|> Enum.reduce(%{}, fn %Link{slug: slug} = link, acc ->
Map.put(acc, slug, link)
end)
end
def get_taxonomy_tree do
get = fn -> ConCache.get(@index_cache_name, taxonomy_tree_key()) end
case get.() do
nil ->
build_taxonomy_tree()
get.()
tree ->
tree
end
end
def get_content_tree(slug \\ "/") do
get = fn -> ConCache.get(@index_cache_name, content_tree_key(slug)) end
case get.() do
nil ->
build_content_tree()
get.()
tree ->
tree
end
end
def save_post(%Post{type: :index} = post) do
save_post_taxonomies(post)
post
end
def save_post(%Post{} = post) do
save_post_pure(post)
save_post_taxonomies(post)
post
end
def save_post_pure(%Post{slug: _slug} = post) do
:ok = save_slug(post)
post
end
def update_post_field(slug, field, value) do
case get_by_slug(slug) do
nil -> nil
%Post{} = post -> post |> Map.put(field, value) |> save_post_pure()
end
end
def save_slug(%{slug: slug} = item) do
key = slug_key(slug)
ConCache.put(@cache_name, key, item)
end
def build_taxonomy_tree() do
tree = do_build_taxonomy_tree()
ConCache.put(@index_cache_name, taxonomy_tree_key(), tree)
tree
end
@doc """
Posts are extracted from the `children` field of a taxonomy
and added to the taxonomies list as a Link, while keeping
the correct nesting under their parent taxonomy.
Posts are also sorted accordingly to their topmost taxonomy
sorting configuration.
"""
def build_content_tree do
tree =
do_build_taxonomy_tree(true)
|> do_build_content_tree()
# Embed each post `%Link{}` into their individual `%Post{}` entities
Enum.each(tree, fn
%Link{type: :post, slug: slug} = link ->
update_post_field(slug, :link, link)
_ ->
:ignore
end)
ConCache.put(@index_cache_name, content_tree_key(), tree)
# Split each root slug and its nested children and
# save the roots independently.
tree
|> Enum.reduce(%{}, fn
# Only root taxonomies, and ignore pages
# (pages are posts in the root, but a link of type :post)
%{type: :taxonomy, slug: slug, level: 0} = link, acc when slug != "/" ->
Map.put(acc, slug, %{link: link, children: []})
%{parents: [_ | [root_parent | _]]} = link, acc ->
children = Map.get(acc, root_parent)[:children]
put_in(acc[root_parent][:children], children ++ [link])
# Ignore the very root link "/" and pages
_, acc ->
acc
end)
|> Enum.each(fn {root_slug, %{children: children}} ->
ConCache.put(@index_cache_name, content_tree_key(root_slug), children)
end)
tree
end
def delete_slug(slug) do
ConCache.delete(@cache_name, slug_key(slug))
end
def delete_all do
ConCache.ets(@cache_name)
|> :ets.delete_all_objects()
ConCache.ets(@index_cache_name)
|> :ets.delete_all_objects()
end
#
# Internal
#
defp save_post_taxonomies(%Post{type: :index, taxonomies: taxonomies} = post) do
taxonomies
|> List.last()
|> upsert_taxonomy_appending_post(post)
end
defp save_post_taxonomies(%Post{taxonomies: taxonomies} = post) do
taxonomies
|> Enum.map(&upsert_taxonomy_appending_post(&1, post))
end
defp upsert_taxonomy_appending_post(
%Link{slug: slug} = taxonomy,
%Post{type: :index, position: position, title: post_title, metadata: metadata} = post
) do
do_update = fn taxonomy ->
{:ok,
%{
taxonomy
| index_post: post,
position: position,
title: post_title,
sort_by: Map.get(metadata, :sort_by, default_sort_by()) |> maybe_to_atom(),
sort_order: Map.get(metadata, :sort_order, default_sort_order()) |> maybe_to_atom()
}}
end
ConCache.update(@cache_name, slug_key(slug), fn
nil ->
do_update.(taxonomy)
%Link{} = taxonomy ->
do_update.(taxonomy)
end)
end
defp upsert_taxonomy_appending_post(
%Link{slug: slug, children: children} = taxonomy,
%Post{} = post
) do
do_update = fn taxonomy, children ->
stripped_post = post |> Map.put(:content, nil) |> Map.put(:toc, [])
{:ok, %{taxonomy | children: children ++ [stripped_post]}}
end
ConCache.update(@cache_name, slug_key(slug), fn
nil ->
do_update.(taxonomy, children)
%Link{children: children} = taxonomy ->
do_update.(taxonomy, children)
end)
end
defp get_taxonomy_sorting_methods_from_topmost_taxonomies do
get_all_links()
|> sort_by_slug()
|> Enum.reduce(%{}, fn
%Link{
type: :taxonomy,
level: 0,
sort_by: sort_by,
sort_order: sort_order,
slug: slug
},
acc ->
Map.put(acc, slug, {sort_by, sort_order})
_, acc ->
acc
end)
end
defp do_build_taxonomy_tree(with_home \\ false) do
sorting_methods = get_taxonomy_sorting_methods_from_topmost_taxonomies()
get_all_links()
|> sort_by_slug()
|> (fn
[%Link{slug: "/"} | tree] when not with_home -> tree
tree -> tree
end).()
|> Enum.map(fn %Link{children: posts, slug: slug} = taxonomy ->
posts =
posts
|> Enum.filter(fn
%Post{taxonomies: [_t | _] = post_taxonomies} ->
# The last taxonomy of a post is its parent taxonomy.
# I.e. a post in *Blog > Art > 3D* has 3 taxonomies:
# Blog, Blog > Art and Blog > Art > 3D,
# where its parent is the last one.
List.last(post_taxonomies).slug == slug
_ ->
true
end)
|> filter_by_is_published()
taxonomy
|> Map.put(:children, posts)
end)
|> Enum.map(fn %Link{children: posts} = taxonomy ->
{sort_by, sort_order} = find_sorting_method_for_taxonomy(taxonomy, sorting_methods)
taxonomy
|> Map.put(:children, posts |> sort_by_custom(sort_by, sort_order))
end)
end
defp do_build_content_tree(tree) do
with_home =
Application.get_env(:pardall_markdown, PardallMarkdown.Content)[:content_tree_display_home]
parent_for_level1_post = fn
["/"], %Link{slug: slug, parents: parents}
when slug != "/" ->
parents ++ [slug]
_, %Link{parents: parents} ->
parents
end
tree
|> Enum.reduce([], fn %Link{children: posts} = taxonomy, all ->
all
|> Kernel.++([Map.put(taxonomy, :children, [])])
|> Kernel.++(
posts
|> Enum.map(fn %{taxonomies: post_taxonomies} = post ->
%{parents: last_taxonomy_parents} = List.last(post_taxonomies)
%Link{
slug: post.slug,
title: post.title,
level: level_for_joined_post(taxonomy.slug, taxonomy.level),
parents: parent_for_level1_post.(last_taxonomy_parents, taxonomy),
type: :post,
position: post.position,
children: [post]
}
end)
)
end)
|> (fn
[%Link{slug: "/"} | tree] when not with_home ->
tree
tree ->
tree
end).()
|> sort_taxonomies_embedded_posts()
|> build_tree_navigation()
|> update_embedded_taxonomies()
end
defp build_tree_navigation(tree) do
total = Enum.count(tree)
indexed =
tree
|> Enum.with_index()
indexed
|> Enum.map(fn
{%Link{} = link, 0} ->
next =
case find_next_post(indexed, 0) do
{next, _} -> next
_ -> nil
end
Map.put(link, :next, next)
{%Link{} = link, pos} when pos == total - 1 ->
previous =
case find_previous_post(indexed, pos) do
{previous, _} -> previous
_ -> nil
end
link
|> Map.put(:previous, previous)
{%Link{} = link, pos} ->
previous =
case find_previous_post(indexed, pos) do
{previous, _} -> previous
_ -> nil
end
next =
case find_next_post(indexed, pos) do
{next, _} -> next
_ -> nil
end
link
|> Map.put(:previous, previous)
|> Map.put(:next, next)
end)
end
defp find_previous_post(all_indexed, before_index) do
all_indexed
|> Enum.reverse()
|> Enum.find(fn
{%Link{type: :post}, pos} when pos < before_index -> true
{_, _} -> false
end)
end
defp find_next_post(all_indexed, after_index) do
all_indexed
|> Enum.find(fn
{%Link{type: :post}, pos} when pos > after_index -> true
{_, _} -> false
end)
end
defp level_for_joined_post(parent_slug, parent_level) when parent_slug == "/",
do: parent_level
defp level_for_joined_post(_, parent_level), do: parent_level + 1
defp sort_taxonomies_embedded_posts(tree) do
taxonomies = get_all_links(:taxonomy)
sorting_methods = get_taxonomy_sorting_methods_from_topmost_taxonomies()
taxonomies
|> Enum.map(fn
%Link{type: :taxonomy, children: posts} = taxonomy ->
{sort_by, sort_order} = find_sorting_method_for_taxonomy(taxonomy, sorting_methods)
taxonomy
|> Map.put(:children, posts |> sort_by_custom(sort_by, sort_order))
|> save_slug()
end)
tree
end
defp find_sorting_method_for_taxonomy(
%Link{parents: parents, slug: slug, level: level},
sorting_methods
) do
target_sort_taxonomy =
cond do
level == 0 and slug != "/" -> slug
level == 0 -> "/"
true -> Enum.at(parents, 1)
end
sorting_methods[target_sort_taxonomy]
end
# Updates taxonomies that are embedded into all posts's `Post.taxonomies`,
# since after the content tree is built, there may be taxonomies
# that have overriden data with their related _index.md files.
defp update_embedded_taxonomies(tree) do
taxonomies = get_all_links_indexed_by_slug(:taxonomy)
posts = get_all_posts()
for post <- posts do
do_update_post_embedded_taxonomies(post, taxonomies)
|> save_post_pure()
end
for {_, %Link{children: children_posts} = link} <- taxonomies do
updated =
for post <- children_posts do
do_update_post_embedded_taxonomies(post, taxonomies)
end
:ok = save_slug(%{link | children: updated})
end
tree
end
defp do_update_post_embedded_taxonomies(
%Post{taxonomies: post_taxonomies} = post,
taxonomies_by_slug
) do
updated =
Enum.map(post_taxonomies, fn %Link{slug: slug} = taxonomy ->
%{taxonomy | title: taxonomies_by_slug[slug].title}
end)
%{post | taxonomies: updated}
end
defp slug_key(slug), do: {:slug, slug}
defp taxonomy_tree_key(slug \\ "/"), do: {:taxonomy_tree, slug}
defp content_tree_key(slug \\ "/"),
do: {:content_tree, slug}
end
| 26.294372 | 97 | 0.601169 |
e85d3a4eb276dd20f978f1d265a6bfb8cd1f4b91 | 1,704 | ex | Elixir | clients/games/lib/google_api/games/v1/model/player_name.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/games/lib/google_api/games/v1/model/player_name.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/games/lib/google_api/games/v1/model/player_name.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Games.V1.Model.PlayerName do
@moduledoc """
An object representation of the individual components of the player's name. For some players, these fields may not be present.
## Attributes
* `familyName` (*type:* `String.t`, *default:* `nil`) - The family name of this player. In some places, this is known as the last name.
* `givenName` (*type:* `String.t`, *default:* `nil`) - The given name of this player. In some places, this is known as the first name.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:familyName => String.t(),
:givenName => String.t()
}
field(:familyName)
field(:givenName)
end
defimpl Poison.Decoder, for: GoogleApi.Games.V1.Model.PlayerName do
def decode(value, options) do
GoogleApi.Games.V1.Model.PlayerName.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Games.V1.Model.PlayerName do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.08 | 139 | 0.721831 |
e85d9665cc552487deff782f8a8679cd1918f7d3 | 1,461 | exs | Elixir | test/gradient/elixir_checker_test.exs | esl/gradient | 822da399872a38b77440858971f4147694e85fac | [
"Apache-2.0"
] | 75 | 2021-11-17T11:55:13.000Z | 2022-03-28T04:35:04.000Z | test/gradient/elixir_checker_test.exs | esl/gradient | 822da399872a38b77440858971f4147694e85fac | [
"Apache-2.0"
] | 48 | 2021-11-15T13:56:14.000Z | 2022-03-31T15:55:47.000Z | test/gradient/elixir_checker_test.exs | esl/gradient | 822da399872a38b77440858971f4147694e85fac | [
"Apache-2.0"
] | 6 | 2021-12-22T20:41:27.000Z | 2022-03-09T09:07:38.000Z | defmodule Gradient.ElixirCheckerTest do
use ExUnit.Case
doctest Gradient.ElixirChecker
alias Gradient.ElixirChecker
import Gradient.TestHelpers
test "checker options" do
ast = load("Elixir.SpecWrongName.beam")
assert [] = ElixirChecker.check(ast, ex_check: false)
assert [] != ElixirChecker.check(ast, ex_check: true)
end
test "all specs are correct" do
ast = load("Elixir.CorrectSpec.beam")
assert [] = ElixirChecker.check(ast, ex_check: true)
end
test "specs over default args are correct" do
ast = load("Elixir.SpecDefaultArgs.beam")
assert [] = ElixirChecker.check(ast, ex_check: true)
end
test "spec arity doesn't match the function arity" do
ast = load("Elixir.SpecWrongArgsArity.beam")
assert [{_, {:spec_error, :wrong_spec_name, 2, :foo, 3}}] =
ElixirChecker.check(ast, ex_check: true)
end
test "spec name doesn't match the function name" do
ast = load("Elixir.SpecWrongName.beam")
assert [
{_, {:spec_error, :wrong_spec_name, 5, :convert, 1}},
{_, {:spec_error, :wrong_spec_name, 11, :last_two, 1}}
] = ElixirChecker.check(ast, [])
end
test "mixing specs names is not allowed" do
ast = load("Elixir.SpecMixed.beam")
assert [
{_, {:spec_error, :mixed_specs, 3, :encode, 1}},
{_, {:spec_error, :wrong_spec_name, 3, :encode, 1}}
] = ElixirChecker.check(ast, [])
end
end
| 27.566038 | 67 | 0.645448 |
e85d9bf9014f6f9cb1b81d51054457c1ab2eea7c | 2,032 | exs | Elixir | mix.exs | DylanGuedes/forensic | 16ea59bb6aeefe636012b747a7caab26e0b7f686 | [
"Apache-2.0"
] | null | null | null | mix.exs | DylanGuedes/forensic | 16ea59bb6aeefe636012b747a7caab26e0b7f686 | [
"Apache-2.0"
] | null | null | null | mix.exs | DylanGuedes/forensic | 16ea59bb6aeefe636012b747a7caab26e0b7f686 | [
"Apache-2.0"
] | null | null | null | defmodule Forensic.Mixfile do
use Mix.Project
def project do
[app: :forensic,
version: "0.0.1",
elixir: "~> 1.2",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
test_coverage: [tool: ExCoveralls],
preferred_cli_env: ["coveralls": :test, "coveralls.detail": :test, "coveralls.post": :test, "coveralls.html": :test],
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
aliases: aliases(),
deps: deps()]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[mod: {Forensic, []},
applications: [:phoenix, :phoenix_pubsub, :phoenix_html, :cowboy, :logger, :gettext,
:phoenix_ecto, :postgrex, :kafka_ex, :ex_machina]]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[{:phoenix, "~> 1.2.4"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_ecto, "~> 3.0"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.6"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"},
{:kafka_ex, "~> 0.6.5"},
{:ex_machina, "~> 2.0"},
{:excoveralls, "~> 0.6", only: :test},
{:poison, "~> 2.0"},
{:cors_plug, "~> 1.2"},
{:httpoison, "~> 0.11.1"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
["ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
"test": ["ecto.create --quiet", "ecto.migrate", "test"]]
end
end
| 31.75 | 122 | 0.590059 |
e85dc090f576a5827d1f0a699f56675711d80ac5 | 324 | exs | Elixir | test/advent_of_code/day_13_test.exs | MarcusCemes/advent-of-code-2021 | 703aff2701be89031631342a094ae6976e7d3c1b | [
"MIT"
] | 1 | 2021-12-19T04:13:00.000Z | 2021-12-19T04:13:00.000Z | test/advent_of_code/day_13_test.exs | MarcusCemes/advent-of-code-2021 | 703aff2701be89031631342a094ae6976e7d3c1b | [
"MIT"
] | null | null | null | test/advent_of_code/day_13_test.exs | MarcusCemes/advent-of-code-2021 | 703aff2701be89031631342a094ae6976e7d3c1b | [
"MIT"
] | 1 | 2021-12-15T19:15:20.000Z | 2021-12-15T19:15:20.000Z | defmodule AdventOfCode.Day13Test do
use ExUnit.Case
import AdventOfCode.Day13
import AdventOfCode.Utils
test "part1" do
input = read_data(13, :sample)
result = part1(input)
assert result == 17
end
@tag :skip
test "part2" do
input = nil
result = part2(input)
assert result
end
end
| 15.428571 | 35 | 0.669753 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.