hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1cfa851748bc49f74a80b476c9d86ba6c8eb286a | 852 | exs | Elixir | test/mix/tasks/expected.mnesia.clear_test.exs | ejpcmac/expected | c2b03bfa9bcb7efd52cf4003fb46f1309e8aa3c4 | [
"MIT"
] | 33 | 2018-01-18T12:16:19.000Z | 2021-07-30T00:33:26.000Z | test/mix/tasks/expected.mnesia.clear_test.exs | ejpcmac/expected | c2b03bfa9bcb7efd52cf4003fb46f1309e8aa3c4 | [
"MIT"
] | 5 | 2018-01-18T12:56:28.000Z | 2019-09-30T07:16:00.000Z | test/mix/tasks/expected.mnesia.clear_test.exs | ejpcmac/expected | c2b03bfa9bcb7efd52cf4003fb46f1309e8aa3c4 | [
"MIT"
] | 1 | 2018-08-08T12:02:44.000Z | 2018-08-08T12:02:44.000Z | Mix.shell(Mix.Shell.Process)
defmodule Mix.Tasks.Expected.Mnesia.ClearTest do
use Expected.MnesiaCase
import Mix.Tasks.Expected.Mnesia.Clear
describe "run/1" do
test "clears all logins from the store accorting to the configuration" do
:mnesia.create_table(@table, attributes: [:key, :value])
record = {@table, :test, :test}
:mnesia.dirty_write(record)
assert :mnesia.dirty_match_object({@table, :_, :_}) == [record]
run([])
assert :mnesia.dirty_match_object({@table, :_, :_}) == []
end
test "prints an error message if the table name is not provided in the
configuration" do
Application.delete_env(:expected, :table)
run([])
assert_received {:mix_shell, :error, [msg]}
assert msg =~ ConfigurationError.message(%{reason: :no_mnesia_table})
end
end
end
| 28.4 | 77 | 0.664319 |
1cfa8b8a897dd7c7b5f587d017a5ef8a57797314 | 1,899 | exs | Elixir | mix.exs | blvdgroup/crater | 78d03de2eac73d90148df6c5d2d03e99b9b5ccb7 | [
"Apache-2.0"
] | 1 | 2018-03-13T08:15:50.000Z | 2018-03-13T08:15:50.000Z | mix.exs | blvdgroup/crater | 78d03de2eac73d90148df6c5d2d03e99b9b5ccb7 | [
"Apache-2.0"
] | 1 | 2018-03-17T15:45:26.000Z | 2018-03-17T15:45:26.000Z | mix.exs | blvdgroup/crater | 78d03de2eac73d90148df6c5d2d03e99b9b5ccb7 | [
"Apache-2.0"
] | 1 | 2017-08-30T16:13:09.000Z | 2017-08-30T16:13:09.000Z | defmodule Crater.Mixfile do
use Mix.Project
def project do
[
app: :crater,
version: "0.0.1",
elixir: "~> 1.4",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Crater.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:comeonin, "~> 4.0"},
{:cowboy, "~> 1.0"},
{:credo, "~> 0.9.0-rc1", only: [:dev, :test], runtime: false},
{:ex_machina, "~> 2.1", only: :test},
{:gettext, "~> 0.11"},
{:guardian, "~> 1.0"},
{:pbkdf2_elixir, "~> 0.12"},
{:phoenix, "~> 1.3.2"},
{:phoenix_ecto, "~> 3.2"},
{:phoenix_html, "~> 2.10"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:phoenix_pubsub, "~> 1.0"},
{:plug_static_index_html, "~> 1.0"},
{:postgrex, ">= 0.0.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
format: ["format", "credo"],
lint: ["credo --strict"],
test: ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 27.521739 | 79 | 0.562928 |
1cfa90885c1958b3d3506dd91a8591af6298a98a | 2,290 | exs | Elixir | mix.exs | mmacai/cloudevents-ex | ef7ab9e39019112e0bec2058c3611ebcd04db605 | [
"Apache-2.0"
] | null | null | null | mix.exs | mmacai/cloudevents-ex | ef7ab9e39019112e0bec2058c3611ebcd04db605 | [
"Apache-2.0"
] | null | null | null | mix.exs | mmacai/cloudevents-ex | ef7ab9e39019112e0bec2058c3611ebcd04db605 | [
"Apache-2.0"
] | null | null | null | defmodule Cloudevents.MixProject do
@moduledoc false
use Mix.Project
def project do
[
app: :cloudevents,
description: description(),
version: "0.3.0",
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps(),
package: package(),
docs: docs(),
source_url: "https://github.com/kevinbader/cloudevents-ex",
dialyzer: [
plt_file: {:no_warn, "priv/plts/dialyzer.plt"}
],
aliases: aliases()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
# applications: []
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# Docs:
{:ex_doc, ">= 0.22.1", only: :dev, runtime: false},
# Linting:
{:credo, ">= 1.4.0", only: [:dev, :test], runtime: false},
# Static type checks:
{:dialyxir, ">= 1.0.0", only: [:dev], runtime: false},
# Run all static code checks via `mix check`:
{:ex_check, ">= 0.11.0", only: :dev, runtime: false},
# A library for defining structs with a type without writing boilerplate code:
{:typed_struct, "~> 0.2.0"},
# JSON parser:
{:jason, "~> 1.2"},
# Avro encoding/decoding:
{:avrora, "~> 0.11"}
]
end
defp description do
"""
Elixir SDK for CloudEvents, with bindings for JSON, AVRO, HTTP, Kafka.
"""
end
defp package do
[
name: "cloudevents",
maintainers: ["Kevin Bader"],
licenses: ["Apache-2.0"],
links: %{
"GitHub" => "https://github.com/kevinbader/cloudevents-ex"
}
]
end
defp docs do
[
main: "Cloudevents",
before_closing_body_tag: fn _format ->
"""
<script type="text/javascript">
["badges", "status"].forEach(function(id) {
var element = document.getElementById(id);
element.parentNode.removeChild(element);
});
</script>
"""
end
]
end
defp aliases do
[
release: [
"check",
fn _ ->
version = Keyword.get(project(), :version)
Mix.shell().cmd("git tag v#{version}")
Mix.shell().cmd("git push --tags")
end,
"hex.publish"
]
]
end
end
| 23.608247 | 84 | 0.535371 |
1cfab3f39b73022b0ef6e50d0c18600ac1071c29 | 340 | exs | Elixir | test/solverlview_web/live/page_live_test.exs | bokner/solverlview | 52a527bc653619fd35977d951b36207ba0ebd9cc | [
"MIT"
] | 10 | 2020-09-18T19:26:51.000Z | 2021-07-08T19:26:00.000Z | test/solverlview_web/live/page_live_test.exs | bokner/solverview | 52a527bc653619fd35977d951b36207ba0ebd9cc | [
"MIT"
] | null | null | null | test/solverlview_web/live/page_live_test.exs | bokner/solverview | 52a527bc653619fd35977d951b36207ba0ebd9cc | [
"MIT"
] | null | null | null | defmodule SolverlviewWeb.PageLiveTest do
use SolverlviewWeb.ConnCase
import Phoenix.LiveViewTest
test "disconnected and connected render", %{conn: conn} do
{:ok, page_live, disconnected_html} = live(conn, "/")
assert disconnected_html =~ "Welcome to Phoenix!"
assert render(page_live) =~ "Welcome to Phoenix!"
end
end
| 28.333333 | 60 | 0.732353 |
1cfacbcad02973073151533c08c2f174630128a1 | 1,921 | ex | Elixir | clients/document_ai/lib/google_api/document_ai/v1beta2/model/google_cloud_documentai_v1beta1_document_page_detected_language.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/document_ai/lib/google_api/document_ai/v1beta2/model/google_cloud_documentai_v1beta1_document_page_detected_language.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/document_ai/lib/google_api/document_ai/v1beta2/model/google_cloud_documentai_v1beta1_document_page_detected_language.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta1DocumentPageDetectedLanguage do
@moduledoc """
Detected language for a structural component.
## Attributes
* `confidence` (*type:* `number()`, *default:* `nil`) - Confidence of detected language. Range [0, 1].
* `languageCode` (*type:* `String.t`, *default:* `nil`) - The BCP-47 language code, such as "en-US" or "sr-Latn". For more information, see http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:confidence => number() | nil,
:languageCode => String.t() | nil
}
field(:confidence)
field(:languageCode)
end
defimpl Poison.Decoder,
for: GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta1DocumentPageDetectedLanguage do
def decode(value, options) do
GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta1DocumentPageDetectedLanguage.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta1DocumentPageDetectedLanguage do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.927273 | 207 | 0.743363 |
1cfae5d4fedd3f4b0ad1a90d4a98db4e19054d2f | 921 | exs | Elixir | test/frank_test.exs | triptec/frank | b3cdea238e99595b5e2d8bcc23b97a6572f77d3e | [
"MIT"
] | 1 | 2015-11-26T10:27:35.000Z | 2015-11-26T10:27:35.000Z | test/frank_test.exs | triptec/frank | b3cdea238e99595b5e2d8bcc23b97a6572f77d3e | [
"MIT"
] | null | null | null | test/frank_test.exs | triptec/frank | b3cdea238e99595b5e2d8bcc23b97a6572f77d3e | [
"MIT"
] | null | null | null | defmodule FrankTest do
use ExUnit.Case
import FrankTestUtil
doctest Frank
test "publish with uri, queue and payload" do
queue = "test"
msg = "uri, queue and payload"
fun = fn (payload, _meta) ->
send :frank_test, payload
end
Process.register(self, :frank_test)
{:ok, cmp} = Frank.subscribe(mq_uri(mq_host, mq_port), queue, fun)
:ok = Frank.publish(mq_uri(mq_host, mq_port), queue, msg)
assert_receive msg
end
test "publish with uri, queue and payload and have fun" do
queue = "test"
msg = "uri, queue and payload"
fun = fn (payload, _meta) ->
send :frank_test, payload
end
Process.register(self, :frank_test)
{:ok, cmp} = Frank.subscribe(mq_uri(mq_host, mq_port), queue, fun)
Enum.map(1..1000,
fn x -> :ok = Frank.publish(mq_uri(mq_host, mq_port), queue, Integer.to_string(x)) end
)
assert_receive "1000", 1000
end
end
| 28.78125 | 92 | 0.653637 |
1cfae66bff55a30ed4a9061957142b553d14eed6 | 6,920 | ex | Elixir | lib/game/environment.ex | shanesveller/ex_venture | 68507da11442a9e0423073fcd305e9021f649ca1 | [
"MIT"
] | null | null | null | lib/game/environment.ex | shanesveller/ex_venture | 68507da11442a9e0423073fcd305e9021f649ca1 | [
"MIT"
] | null | null | null | lib/game/environment.ex | shanesveller/ex_venture | 68507da11442a9e0423073fcd305e9021f649ca1 | [
"MIT"
] | null | null | null | defmodule Game.Environment do
@moduledoc """
Look at your surroundings, whether a room or an overworld
"""
alias Game.Room
alias Game.Overworld
alias Game.Overworld.Sector
@type state :: Data.Room.t()
defmacro __using__(_opts) do
quote do
@environment Application.get_env(:ex_venture, :game)[:environment]
end
end
@doc """
Get the type of room based on its id
"""
def room_type(room_id) do
case room_id do
"overworld:" <> _id ->
:overworld
_ ->
:room
end
end
@doc """
Look around your environment
"""
@spec look(integer() | String.t()) :: state()
def look("overworld:" <> overworld_id) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
case :global.whereis_name({Sector, zone_id, sector}) do
:undefined ->
{:error, :room_offline}
pid ->
GenServer.call(pid, {:look, overworld_id})
end
end
def look(id) do
case :global.whereis_name({Room, id}) do
:undefined ->
{:error, :room_offline}
pid ->
GenServer.call(pid, :look)
end
end
@doc """
Enter a room
Valid enter reasons: `:enter`, `:respawn`
"""
@spec enter(integer(), Character.t(), atom()) :: :ok
def enter("overworld:" <> overworld_id, character, reason) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
GenServer.cast(Sector.pid(zone_id, sector), {:enter, overworld_id, character, reason})
end
def enter(id, character, reason) do
GenServer.cast(Room.pid(id), {:enter, character, reason})
end
@doc """
Leave a room
Valid leave reasons: `:leave`, `:death`
"""
@spec leave(integer(), Character.t(), atom()) :: :ok
def leave("overworld:" <> overworld_id, character, reason) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
GenServer.cast(Sector.pid(zone_id, sector), {:leave, overworld_id, character, reason})
end
def leave(id, character, reason) do
GenServer.cast(Room.pid(id), {:leave, character, reason})
end
@doc """
Notify characters in a room of an event
"""
@spec notify(integer(), Character.t(), tuple()) :: :ok
def notify("overworld:" <> overworld_id, character, event) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
GenServer.cast(Sector.pid(zone_id, sector), {:notify, overworld_id, character, event})
end
def notify(id, character, event) do
GenServer.cast(Room.pid(id), {:notify, character, event})
end
@doc """
Say to the players in the room
"""
@spec say(integer(), pid(), Message.t()) :: :ok
def say("overworld:" <> overworld_id, sender, message) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
GenServer.cast(Sector.pid(zone_id, sector), {:say, overworld_id, sender, message})
end
def say(id, sender, message) do
GenServer.cast(Room.pid(id), {:say, sender, message})
end
@doc """
Emote to the players in the room
"""
@spec emote(integer(), pid(), Message.t()) :: :ok
def emote("overworld:" <> overworld_id, sender, message) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
GenServer.cast(Sector.pid(zone_id, sector), {:emote, overworld_id, sender, message})
end
def emote(id, sender, message) do
GenServer.cast(Room.pid(id), {:emote, sender, message})
end
@doc """
Pick up the item
"""
@spec pick_up(integer(), Item.t()) :: :ok
def pick_up("overworld:" <> overworld_id, item) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
GenServer.call(Sector.pid(zone_id, sector), {:pick_up, overworld_id, item})
end
def pick_up(id, item) do
GenServer.call(Room.pid(id), {:pick_up, item})
end
@doc """
Pick up currency
"""
@spec pick_up_currency(integer()) :: :ok
def pick_up_currency("overworld:" <> overworld_id) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
GenServer.call(Sector.pid(zone_id, sector), {:pick_up_currency, overworld_id})
end
def pick_up_currency(id) do
GenServer.call(Room.pid(id), :pick_up_currency)
end
@doc """
Drop an item into a room
"""
@spec drop(integer(), Character.t(), Item.t()) :: :ok
def drop("overworld:" <> overworld_id, who, item) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
GenServer.cast(Sector.pid(zone_id, sector), {:drop, overworld_id, who, item})
end
def drop(id, who, item) do
GenServer.cast(Room.pid(id), {:drop, who, item})
end
@doc """
Drop currency into a room
"""
@spec drop_currency(integer(), Character.t(), integer()) :: :ok
def drop_currency("overworld:" <> overworld_id, who, currency) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
GenServer.cast(Sector.pid(zone_id, sector), {:drop_currency, overworld_id, who, currency})
end
def drop_currency(id, who, currency) do
GenServer.cast(Room.pid(id), {:drop_currency, who, currency})
end
@doc """
Update the character after a stats change
"""
@spec update_character(integer(), tuple()) :: :ok
def update_character("overworld:" <> overworld_id, character) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
GenServer.cast(Sector.pid(zone_id, sector), {:update_character, overworld_id, character})
end
def update_character(id, character) do
GenServer.cast(Room.pid(id), {:update_character, character})
end
@doc """
Link the current process against the room's pid, finds by id
"""
def link("overworld:" <> overworld_id) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
case :global.whereis_name({Sector, zone_id, sector}) do
:undefined ->
{:error, :room_offline}
pid ->
Process.link(pid)
end
end
def link(id) do
case :global.whereis_name({Room, id}) do
:undefined ->
:ok
pid ->
Process.link(pid)
end
end
@doc """
Unlink the current process against the room's pid, finds by id
"""
def unlink("overworld:" <> overworld_id) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
case :global.whereis_name({Sector, zone_id, sector}) do
:undefined ->
{:error, :room_offline}
pid ->
Process.unlink(pid)
end
end
def unlink(id) do
case :global.whereis_name({Room, id}) do
:undefined ->
:ok
pid ->
Process.unlink(pid)
end
end
@doc """
Crash a room process with an unmatched cast
There should always remain no matching clause for this cast
"""
def crash("overworld:" <> overworld_id) do
{zone_id, sector} = Overworld.sector_from_overworld_id(overworld_id)
GenServer.cast(Sector.pid(zone_id, sector), :crash)
end
def crash(id) do
GenServer.cast(Room.pid(id), :crash)
end
end
| 27.137255 | 94 | 0.660549 |
1cfb03ce56036833ca3a9e9c9d616f43a7ef46de | 904 | ex | Elixir | apps/general/lib/general/application.ex | bornmeyer/janus_signaling | cbab905aaa844a2762d4647f9363370cecd3db22 | [
"Apache-2.0"
] | null | null | null | apps/general/lib/general/application.ex | bornmeyer/janus_signaling | cbab905aaa844a2762d4647f9363370cecd3db22 | [
"Apache-2.0"
] | null | null | null | apps/general/lib/general/application.ex | bornmeyer/janus_signaling | cbab905aaa844a2762d4647f9363370cecd3db22 | [
"Apache-2.0"
] | null | null | null | defmodule General.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
require Logger
def start(_type, _args) do
children = [
Plug.Cowboy.child_spec(
scheme: :http,
plug: General.Router,
options: [
dispatch: dispatch(),
port: 4000
]
),
Registry.child_spec(
keys: :duplicate,
name: Registry.General
)
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: General.Supervisor]
Supervisor.start_link(children, opts)
end
defp dispatch do
[
{:_,
[
{:_, General.SocketHandler, []},
#{:_, Plug.Cowboy.Handler, {General.Router, []}}
]
}
]
end
end
| 20.545455 | 61 | 0.580752 |
1cfb16028538ea652dbd2605a84d21fae4b80578 | 1,120 | ex | Elixir | packages/templates/src/project/potionx/lib/{{ appName }}_web/channels/user_socket.ex | shuv1824/potionx | a5888413b13a520d8ddf79fb26b7483e441737c3 | [
"MIT"
] | 31 | 2021-02-16T20:50:46.000Z | 2022-02-03T10:38:07.000Z | packages/templates/src/project/potionx/lib/{{ appName }}_web/channels/user_socket.ex | shuv1824/potionx | a5888413b13a520d8ddf79fb26b7483e441737c3 | [
"MIT"
] | 6 | 2021-04-07T21:50:20.000Z | 2022-02-06T21:54:04.000Z | packages/templates/src/project/potionx/lib/{{ appName }}_web/channels/user_socket.ex | shuv1824/potionx | a5888413b13a520d8ddf79fb26b7483e441737c3 | [
"MIT"
] | 4 | 2021-03-25T17:59:44.000Z | 2021-04-25T16:28:22.000Z | defmodule <%= webNamespace %>.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", <%= webNamespace %>.RoomChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
# @impl true
# def connect(_params, socket, _connect_info) do
# {:ok, socket}
# end
# Socket IDs are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# <%= endpointModule %>.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
# @impl true
def id(_socket), do: nil
end
| 31.111111 | 84 | 0.676786 |
1cfb2b5cd3d3a39afb59d167be072ac3e2d56f76 | 3,621 | exs | Elixir | mix.exs | randaalex/pow | 2a8c8db4652f7cb2c58d3a897e02b1d47e76f27b | [
"MIT"
] | null | null | null | mix.exs | randaalex/pow | 2a8c8db4652f7cb2c58d3a897e02b1d47e76f27b | [
"MIT"
] | null | null | null | mix.exs | randaalex/pow | 2a8c8db4652f7cb2c58d3a897e02b1d47e76f27b | [
"MIT"
] | null | null | null | defmodule Pow.MixProject do
use Mix.Project
@version "1.0.13"
def project do
[
app: :pow,
version: @version,
elixir: "~> 1.6",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
compilers: [:phoenix] ++ Mix.compilers(),
deps: deps(),
# Hex
description: "Robust user authentication solution",
package: package(),
# Docs
name: "Pow",
docs: docs()
]
end
def application do
[
extra_applications: extra_applications(Mix.env()),
mod: {Pow.Application, []}
]
end
defp extra_applications(:test), do: [:ecto, :logger]
defp extra_applications(_), do: [:logger]
defp deps do
[
{:ecto, "~> 2.2 or ~> 3.0"},
{:phoenix, "~> 1.3.0 or ~> 1.4.0"},
{:phoenix_html, ">= 2.0.0 and <= 3.0.0"},
{:plug, ">= 1.5.0 and < 2.0.0", optional: true},
{:phoenix_ecto, "~> 4.0.0", only: [:dev, :test]},
{:credo, "~> 1.1.0", only: [:dev, :test]},
{:jason, "~> 1.0", only: [:dev, :test]}, # Credo requires jason to exist also in :dev
{:ex_doc, "~> 0.21.0", only: :dev},
{:ecto_sql, "~> 3.1", only: [:test]},
{:plug_cowboy, "~> 2.0", only: [:test]},
{:postgrex, "~> 0.15.0", only: [:test]}
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp package do
[
maintainers: ["Dan Shultzer"],
licenses: ["MIT"],
links: %{github: "https://github.com/danschultzer/pow"},
files: ~w(lib LICENSE mix.exs README.md)
]
end
defp docs do
[
markdown_processor: ExDoc.Pow.Markdown,
source_ref: "v#{@version}",
main: "README",
canonical: "http://hexdocs.pm/pow",
source_url: "https://github.com/danschultzer/pow",
logo: "assets/logo.svg",
assets: "assets",
extras: [
"README.md": [filename: "README"],
"CONTRIBUTING.md": [filename: "CONTRIBUTING"],
"CHANGELOG.md": [filename: "CHANGELOG"],
"guides/why_pow.md": [],
"guides/coherence_migration.md": [],
"guides/configuring_mailer.md": [],
"guides/user_roles.md": [],
"guides/lock_users.md": [],
"guides/custom_controllers.md": [],
"guides/disable_registration.md": [],
"guides/redis_cache_store_backend.md": [],
"guides/umbrella_project.md": [],
"guides/multitenancy.md": [],
"guides/sync_user.md": [],
"guides/api.md": [],
"lib/extensions/email_confirmation/README.md": [filename: "pow_email_confirmation"],
"lib/extensions/invitation/README.md": [filename: "pow_invitation"],
"lib/extensions/persistent_session/README.md": [filename: "pow_persistent_session"],
"lib/extensions/reset_password/README.md": [filename: "pow_reset_password"]
],
groups_for_modules: [
Plug: ~r/^Pow.Plug/,
Ecto: ~r/^Pow.Ecto/,
Phoenix: ~r/^Pow.Phoenix/,
"Plug extension": ~r/^Pow.Extension.Plug/,
"Ecto extension": ~r/^Pow.Extension.Ecto/,
"Phoenix extension": ~r/^Pow.Extension.Phoenix/,
"Store handling": ~r/^Pow.Store/,
"Mix helpers": ~r/^Mix.Pow/,
"PowEmailConfirmation": ~r/^PowEmailConfirmation/,
"PowPersistentSession": ~r/^PowPersistentSession/,
"PowResetPassword": ~r/^PowResetPassword/,
"PowInvitation": ~r/^PowInvitation/
],
groups_for_extras: [
Extensions: Path.wildcard("lib/extensions/*/README.md"),
Guides: Path.wildcard("guides/*.md")
]
]
end
end
| 30.686441 | 92 | 0.564761 |
1cfb788ea193f439987c6d460198415472d3e3b3 | 235 | exs | Elixir | binary_data_over_phoenix_sockets/test/controllers/page_controller_test.exs | StoiximanServices/blog | b164ae5e8fb4701ee40925aca9aef2297b80be95 | [
"MIT"
] | 10 | 2016-11-28T03:38:36.000Z | 2021-08-24T10:38:38.000Z | binary_data_over_phoenix_sockets/test/controllers/page_controller_test.exs | StoiximanServices/blog | b164ae5e8fb4701ee40925aca9aef2297b80be95 | [
"MIT"
] | null | null | null | binary_data_over_phoenix_sockets/test/controllers/page_controller_test.exs | StoiximanServices/blog | b164ae5e8fb4701ee40925aca9aef2297b80be95 | [
"MIT"
] | 3 | 2018-07-11T08:31:41.000Z | 2019-01-24T18:16:44.000Z | defmodule BinaryDataOverPhoenixSockets.PageControllerTest do
use BinaryDataOverPhoenixSockets.ConnCase
test "GET /", %{conn: conn} do
conn = get conn, "/"
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 26.111111 | 60 | 0.731915 |
1cfb81d17ac6eb4f08a0dff99774db7a7b67cbfa | 177 | exs | Elixir | config/test.exs | gregnar/ex_vmstats | 5cc048fc19ad1db3c24f2f70fb0555a181efc945 | [
"Apache-2.0"
] | null | null | null | config/test.exs | gregnar/ex_vmstats | 5cc048fc19ad1db3c24f2f70fb0555a181efc945 | [
"Apache-2.0"
] | null | null | null | config/test.exs | gregnar/ex_vmstats | 5cc048fc19ad1db3c24f2f70fb0555a181efc945 | [
"Apache-2.0"
] | null | null | null | use Mix.Config
# Print only warnings and errors during test
config :logger, :console,
format: "$metadata[$level] $message\n",
level: :warn
config :logger, :truncate, 99999 | 22.125 | 44 | 0.723164 |
1cfb820ee4858dc90b44303001c6b9f6db3e230d | 4,023 | ex | Elixir | lib/protobuf.ex | wojtekmach/protobuf | 3b251d286dc94745fc6a92fcbf9be03f44bb9b46 | [
"MIT"
] | null | null | null | lib/protobuf.ex | wojtekmach/protobuf | 3b251d286dc94745fc6a92fcbf9be03f44bb9b46 | [
"MIT"
] | null | null | null | lib/protobuf.ex | wojtekmach/protobuf | 3b251d286dc94745fc6a92fcbf9be03f44bb9b46 | [
"MIT"
] | null | null | null | defmodule Protobuf do
@moduledoc """
`protoc` should always be used to generate code instead of writing the code by hand.
By `use` this module, macros defined in `Protobuf.DSL` will be injected. Most of thee macros
are equal to definition in .proto files.
defmodule Foo do
use Protobuf, syntax: :proto3
defstruct [:a, :b]
field :a, 1, type: :int32
field :b, 2, type: :string
end
Your Protobuf message(module) is just a normal Elixir struct. Some useful functions are also injected,
see "Callbacks" for details. Examples:
foo1 = Foo.new!(%{a: 1})
foo1.b == ""
bin = Foo.encode(foo1)
foo1 == Foo.decode(bin)
Except functions in "Callbacks", some other functions may be defined:
* Extension functions when your Protobuf message use extensions. See `Protobuf.Extension` for details.
* `put_extension(struct, extension_mod, field, value)`
* `get_extension(struct, extension_mod, field, default \\ nil)`
"""
defmacro __using__(opts) do
quote location: :keep do
import Protobuf.DSL, only: [field: 3, field: 2, oneof: 2, extend: 4, extensions: 1]
Module.register_attribute(__MODULE__, :fields, accumulate: true)
Module.register_attribute(__MODULE__, :oneofs, accumulate: true)
Module.register_attribute(__MODULE__, :extends, accumulate: true)
Module.register_attribute(__MODULE__, :extensions, [])
@options unquote(opts)
@before_compile Protobuf.DSL
@behaviour Protobuf
def new() do
Protobuf.Builder.new(__MODULE__)
end
def new(attrs) do
Protobuf.Builder.new(__MODULE__, attrs)
end
def new!(attrs) do
Protobuf.Builder.new!(__MODULE__, attrs)
end
def transform_module() do
nil
end
defoverridable transform_module: 0
unquote(def_encode_decode())
end
end
defp def_encode_decode() do
quote do
def decode(data), do: Protobuf.Decoder.decode(data, __MODULE__)
def encode(struct), do: Protobuf.Encoder.encode(struct)
end
end
@doc """
Build a blank struct with default values. This and other "new" functions are
preferred than raw building struct method like `%Foo{}`.
In proto3, the zero values are the default values.
"""
@callback new() :: struct
@doc """
Build and update the struct with passed fields.
"""
@callback new(Enum.t()) :: struct
@doc """
Similar to `new/1`, but use `struct!/2` to build the struct, so
errors will be raised if unknown keys are passed.
"""
@callback new!(Enum.t()) :: struct
@doc """
Encode the struct to a protobuf binary.
Errors may be raised if there's something wrong in the struct.
"""
@callback encode(struct) :: binary
@doc """
Decode a protobuf binary to a struct.
Errors may be raised if there's something wrong in the binary.
"""
@callback decode(binary) :: struct
@doc """
Returns `nil` or a transformer module that implements the `Protobuf.TransformModule`
behaviour.
This function is overridable in your module.
"""
@callback transform_module() :: module | nil
@doc """
It's preferable to use message's `decode` function, like:
Foo.decode(bin)
"""
@spec decode(binary, module) :: struct
def decode(data, mod) do
Protobuf.Decoder.decode(data, mod)
end
@doc """
It's preferable to use message's `encode` function, like:
Foo.encode(foo)
"""
@spec encode(struct) :: binary
def encode(struct) do
Protobuf.Encoder.encode(struct)
end
@doc """
Loads extensions modules.
This function should be called in your application's `start/2` callback,
as seen in the example below, if you wish to use extensions.
## Example
def start(_type, _args) do
Protobuf.load_extensions()
Supervisor.start_link([], strategy: :one_for_one)
end
"""
@spec load_extensions() :: :ok
def load_extensions() do
Protobuf.Extension.__cal_extensions__()
:ok
end
end
| 25.788462 | 104 | 0.664678 |
1cfba4409b5029fd3efbfd7c2417bd6bbaf53e4a | 7,876 | exs | Elixir | test/protobuf/encoder_test.exs | wojtekmach/protobuf | 3b251d286dc94745fc6a92fcbf9be03f44bb9b46 | [
"MIT"
] | null | null | null | test/protobuf/encoder_test.exs | wojtekmach/protobuf | 3b251d286dc94745fc6a92fcbf9be03f44bb9b46 | [
"MIT"
] | null | null | null | test/protobuf/encoder_test.exs | wojtekmach/protobuf | 3b251d286dc94745fc6a92fcbf9be03f44bb9b46 | [
"MIT"
] | null | null | null | defmodule Protobuf.EncoderTest do
use ExUnit.Case, async: true
alias Protobuf.Encoder
test "encodes one simple field" do
bin = Encoder.encode(TestMsg.Foo.new(a: 42))
assert bin == <<8, 42>>
end
test "encodes full fields" do
bin = <<8, 42, 17, 100, 0, 0, 0, 0, 0, 0, 0, 26, 3, 115, 116, 114, 45, 0, 0, 247, 66>>
res = Encoder.encode(TestMsg.Foo.new(a: 42, b: 100, c: "str", d: 123.5))
assert res == bin
end
test "skips a field with default value" do
bin = <<8, 42, 26, 3, 115, 116, 114, 45, 0, 0, 247, 66>>
res = Encoder.encode(TestMsg.Foo.new(a: 42, c: "str", d: 123.5))
assert res == bin
end
test "skips a field without default value" do
bin = <<8, 42, 17, 100, 0, 0, 0, 0, 0, 0, 0, 45, 0, 0, 247, 66>>
res = Encoder.encode(TestMsg.Foo.new(a: 42, b: 100, d: 123.5))
assert res == bin
end
test "encodes embedded message" do
bin = Encoder.encode(TestMsg.Foo.new(a: 42, e: %TestMsg.Foo.Bar{a: 12, b: "abc"}, f: 13))
assert bin == <<8, 42, 50, 7, 8, 12, 18, 3, 97, 98, 99, 56, 13>>
end
test "encodes empty embedded message" do
bin = Encoder.encode(TestMsg.Foo.new(a: 42, e: TestMsg.Foo.Bar.new()))
assert bin == <<8, 42, 50, 0>>
end
test "encodes repeated non-packed varint fields" do
bin = Encoder.encode(TestMsg.Foo.new(a: 123, g: [12, 13, 14]))
assert bin == <<8, 123, 64, 12, 64, 13, 64, 14>>
end
test "encodes repeated varint fields with all 0" do
bin = Encoder.encode(TestMsg.Foo.new(g: [0, 0, 0]))
assert bin == <<64, 0, 64, 0, 64, 0>>
end
test "encodes repeated embedded fields" do
bin = <<74, 7, 8, 12, 18, 3, 97, 98, 99, 74, 2, 8, 13>>
res =
Encoder.encode(
TestMsg.Foo.new(h: [%TestMsg.Foo.Bar{a: 12, b: "abc"}, TestMsg.Foo.Bar.new(a: 13)])
)
assert res == bin
end
test "encodes repeated embedded fields with all empty struct" do
bin = Encoder.encode(TestMsg.Foo.new(h: [TestMsg.Foo.Bar.new(), TestMsg.Foo.Bar.new()]))
assert bin == <<74, 0, 74, 0>>
end
test "encodes packed fields" do
bin = Encoder.encode(TestMsg.Foo.new(i: [12, 13, 14]))
assert bin == <<82, 3, 12, 13, 14>>
end
test "encodes packed fields with all 0" do
bin = Encoder.encode(TestMsg.Foo.new(i: [0, 0, 0]))
assert bin == <<82, 3, 0, 0, 0>>
end
test "encodes enum type" do
bin = Encoder.encode(TestMsg.Foo.new(j: 2))
assert bin == <<88, 2>>
bin = Encoder.encode(TestMsg.Foo.new(j: :A))
assert bin == <<88, 1>>
bin = Encoder.encode(TestMsg.Foo.new(j: :B))
assert bin == <<88, 2>>
end
test "encodes repeated enum fields using packed by default" do
bin = Encoder.encode(TestMsg.Foo.new(o: [:A, :B]))
assert bin == <<130, 1, 2, 1, 2>>
end
test "encodes unknown enum type" do
bin = Encoder.encode(TestMsg.Foo.new(j: 3))
assert bin == <<88, 3>>
end
test "encodes 0" do
assert Encoder.encode(TestMsg.Foo.new(a: 0)) == <<>>
end
test "encodes empty string" do
assert Encoder.encode(TestMsg.Foo.new(c: "")) == <<>>
end
test "encodes bool" do
assert Encoder.encode(TestMsg.Foo.new(k: false)) == <<>>
assert Encoder.encode(TestMsg.Foo.new(k: true)) == <<96, 1>>
end
test "encode map type" do
bin = <<106, 12, 10, 7, 102, 111, 111, 95, 107, 101, 121, 16, 213, 1>>
struct = TestMsg.Foo.new(l: %{"foo_key" => 213})
assert Encoder.encode(struct) == bin
end
test "encodes 0 for proto2" do
assert Encoder.encode(TestMsg.Foo2.new(a: 0)) == <<8, 0, 17, 5, 0, 0, 0, 0, 0, 0, 0>>
end
test "encodes [] for proto2" do
assert Encoder.encode(TestMsg.Foo2.new(a: 0, g: [])) == <<8, 0, 17, 5, 0, 0, 0, 0, 0, 0, 0>>
end
test "encodes %{} for proto2" do
assert Encoder.encode(TestMsg.Foo2.new(a: 0, l: %{})) == <<8, 0, 17, 5, 0, 0, 0, 0, 0, 0, 0>>
end
test "encodes custom default message for proto2" do
assert Encoder.encode(TestMsg.Foo2.new(a: 0, b: 0)) == <<8, 0, 17, 0, 0, 0, 0, 0, 0, 0, 0>>
end
test "encodes oneof fields" do
msg = TestMsg.Oneof.new(%{first: {:a, 42}, second: {:d, "abc"}, other: "other"})
assert Encoder.encode(msg) == <<8, 42, 34, 3, 97, 98, 99, 42, 5, 111, 116, 104, 101, 114>>
msg = TestMsg.Oneof.new(%{first: {:b, "abc"}, second: {:c, 123}, other: "other"})
assert Encoder.encode(msg) == <<18, 3, 97, 98, 99, 24, 123, 42, 5, 111, 116, 104, 101, 114>>
end
test "encodes oneof fields zero values" do
# proto2
# int and string
msg = TestMsg.Oneof.new(first: {:a, 0}, second: {:d, ""})
assert Encoder.encode(msg) == <<8, 0, 34, 0>>
msg = TestMsg.Oneof.new(first: {:b, ""}, second: {:c, 0})
assert Encoder.encode(msg) == <<18, 0, 24, 0>>
# enum
msg = TestMsg.Oneof.new(first: {:e, :UNKNOWN})
assert Encoder.encode(msg) == <<48, 0>>
assert TestMsg.Oneof.decode(<<48, 0>>) == msg
# proto3
# int and string
msg = TestMsg.OneofProto3.new(first: {:a, 0}, second: {:d, ""})
assert Encoder.encode(msg) == <<8, 0, 34, 0>>
assert TestMsg.OneofProto3.encode(msg) == <<8, 0, 34, 0>>
msg = TestMsg.OneofProto3.new(first: {:b, ""}, second: {:c, 0})
assert Encoder.encode(msg) == <<18, 0, 24, 0>>
assert TestMsg.OneofProto3.encode(msg) == <<18, 0, 24, 0>>
# enum
msg = TestMsg.OneofProto3.new(first: {:e, :UNKNOWN})
assert Encoder.encode(msg) == <<48, 0>>
assert TestMsg.OneofProto3.decode(<<48, 0>>) == msg
end
test "encodes map with oneof" do
msg = Google.Protobuf.Struct.new(fields: %{"valid" => %{kind: {:bool_value, true}}})
bin = Google.Protobuf.Struct.encode(msg)
assert Google.Protobuf.Struct.decode(bin) ==
Google.Protobuf.Struct.new(
fields: %{"valid" => %Google.Protobuf.Value{kind: {:bool_value, true}}}
)
end
test "encodes enum default value for proto2" do
# Includes required
msg = TestMsg.EnumBar2.new(a: 0)
assert Encoder.encode(msg) == <<8, 0>>
# Missing required field `:a` occurs a runtime error
msg = TestMsg.EnumBar2.new()
assert_raise Protobuf.EncodeError, ~r/Got error when encoding TestMsg.EnumBar2/, fn ->
Encoder.encode(msg)
end
msg = TestMsg.EnumFoo2.new()
assert Encoder.encode(msg) == <<>>
# Explicitly set the enum default value should be encoded, should not return it as ""
msg = TestMsg.EnumBar2.new(a: 0)
assert Encoder.encode(msg) == <<8, 0>>
msg = TestMsg.EnumBar2.new(a: 1)
assert Encoder.encode(msg) == <<8, 1>>
msg = TestMsg.EnumBar2.new(a: 0, b: 0)
assert Encoder.encode(msg) == <<8, 0, 16, 0>>
msg = TestMsg.EnumBar2.new(a: 0, b: 1)
assert Encoder.encode(msg) == <<8, 0, 16, 1>>
msg = TestMsg.EnumFoo2.new(a: 0)
assert Encoder.encode(msg) == <<8, 0>>
msg = TestMsg.EnumFoo2.new(a: 1)
assert Encoder.encode(msg) == <<8, 1>>
msg = TestMsg.EnumFoo2.new(b: 0)
assert Encoder.encode(msg) == <<16, 0>>
msg = TestMsg.EnumFoo2.new(a: 0, b: 1)
assert Encoder.encode(msg) == <<8, 0, 16, 1>>
# Proto2 enums that are not zero-based default to their first value declared.
msg = My.Test.Request.new(deadline: nil)
assert Encoder.encode(msg) == <<32, 1>>
msg = My.Test.Request.new(deadline: nil, hat: 1)
assert Encoder.encode(msg) == <<32, 1>>
msg = My.Test.Request.new(deadline: nil, hat: :FEDORA)
assert Encoder.encode(msg) == <<32, 1>>
end
test "encodes with transformer module" do
msg = %TestMsg.ContainsTransformModule{field: 42}
assert Encoder.encode(msg) == <<10, 2, 8, 42>>
assert TestMsg.ContainsTransformModule.decode(Encoder.encode(msg)) == msg
end
test "encoding skips transformer module when field is not set" do
msg = %TestMsg.ContainsTransformModule{field: nil}
assert Encoder.encode(msg) == <<>>
assert TestMsg.ContainsTransformModule.decode(Encoder.encode(msg)) == msg
end
end
| 33.514894 | 97 | 0.599035 |
1cfbd005e72d3f3cc0d6839149bc46f263ebe01e | 1,096 | ex | Elixir | lib/bson/utils.ex | hauleth/mongodb | d1be214cb38494bfd2cb861628526b853e7b776f | [
"Apache-2.0"
] | 286 | 2017-06-06T04:21:31.000Z | 2021-09-11T16:37:59.000Z | lib/bson/utils.ex | hauleth/mongodb | d1be214cb38494bfd2cb861628526b853e7b776f | [
"Apache-2.0"
] | 202 | 2017-05-28T13:22:01.000Z | 2020-05-15T20:15:51.000Z | lib/bson/utils.ex | hauleth/mongodb | d1be214cb38494bfd2cb861628526b853e7b776f | [
"Apache-2.0"
] | 120 | 2016-12-16T17:05:12.000Z | 2020-05-15T16:20:17.000Z | defmodule BSON.Utils do
@moduledoc false
defmacro __using__(_) do
quote do
import BSON.Utils
import Mongo.BinaryUtils
@type_float 0x01
@type_string 0x02
@type_document 0x03
@type_array 0x04
@type_binary 0x05
@type_undefined 0x06
@type_objectid 0x07
@type_bool 0x08
@type_datetime 0x09
@type_null 0x0A
@type_regex 0x0B
@type_js 0x0D
@type_symbol 0x0E
@type_js_scope 0x0F
@type_int32 0x10
@type_timestamp 0x11
@type_int64 0x12
@type_decimal128 0x13
@type_min 0xFF
@type_max 0x7F
end
end
@int32_min -2_147_483_648
@int32_max 2_147_483_647
@int64_min -9_223_372_036_854_775_808
@int64_max 9_223_372_036_854_775_807
defmacro is_int32(value) do
quote do
is_integer(unquote(value)) and
unquote(value) in unquote(@int32_min)..unquote(@int32_max)
end
end
defmacro is_int64(value) do
quote do
is_integer(unquote(value)) and
unquote(value) in unquote(@int64_min)..unquote(@int64_max)
end
end
end
| 21.490196 | 66 | 0.667883 |
1cfc200a836509cb562baaae5e4214c909011b3d | 247 | exs | Elixir | server/.dialyzer_ignore.exs | BlueHotDog/buff | 0403f68867e950945a47cce2d7442974d12583b2 | [
"MIT"
] | 1 | 2020-03-18T17:29:32.000Z | 2020-03-18T17:29:32.000Z | server/.dialyzer_ignore.exs | BlueHotDog/buff | 0403f68867e950945a47cce2d7442974d12583b2 | [
"MIT"
] | 26 | 2019-06-09T18:35:45.000Z | 2020-07-30T17:05:58.000Z | server/.dialyzer_ignore.exs | BlueHotDog/buff | 0403f68867e950945a47cce2d7442974d12583b2 | [
"MIT"
] | null | null | null | [
{":0:unknown_function Function ExUnit.Callbacks.__merge__/3 does not exist."},
{":0:unknown_function Function ExUnit.Callbacks.on_exit/1 does not exist."},
{":0:unknown_function Function ExUnit.CaseTemplate.__proxy__/2 does not exist."}
]
| 41.166667 | 82 | 0.757085 |
1cfc5270aff8aa1a92f70b81c89731080ff2b0b2 | 2,491 | ex | Elixir | lib/nerves_livebook/mix_install.ex | djcarpe/nerves_livebook | 15da67d7b0dcb9e3b67696519ba671b2b477ae02 | [
"Apache-2.0"
] | null | null | null | lib/nerves_livebook/mix_install.ex | djcarpe/nerves_livebook | 15da67d7b0dcb9e3b67696519ba671b2b477ae02 | [
"Apache-2.0"
] | 16 | 2021-09-10T21:35:13.000Z | 2022-03-18T13:41:54.000Z | lib/nerves_livebook/mix_install.ex | djcarpe/nerves_livebook | 15da67d7b0dcb9e3b67696519ba671b2b477ae02 | [
"Apache-2.0"
] | null | null | null | defmodule NervesLivebook.MixInstall do
@moduledoc """
Simulate Mix.install for Nerves Livebook
Nerves Livebook currently doesn't support `Mix.install/1`, but hopefully it
will in the future. As a workaround, check whether dependencies that the user
specifies are available and give them a help message if not.
"""
@spec install([atom() | {atom(), keyword()} | {atom(), binary(), keyword()}], keyword()) :: :ok
def install(deps, opts) when is_list(deps) and is_list(opts) do
deps
|> Enum.map(&normalize/1)
|> Enum.each(&check_dep/1)
end
defp normalize(app) when is_atom(app), do: {app, ">= 0.0.0"}
defp normalize({app, opts}) when is_atom(app) and is_list(opts) do
check_for_path_dep(app, opts)
{app, ">= 0.0.0"}
end
defp normalize({app, requirement}) when is_atom(app) and is_binary(requirement) do
{app, requirement}
end
defp normalize({app, requirement, opts})
when is_atom(app) and is_binary(requirement) and is_list(opts) do
check_for_path_dep(app, opts)
{app, requirement}
end
defp normalize(other) do
raise RuntimeError, """
Don't know how to install #{inspect(other)}.
"""
end
defp check_for_path_dep(app, opts) do
if Keyword.has_key?(opts, :path) do
raise RuntimeError, """
Path dependency for #{inspect(app)} is not supported in Nerves Livebook.
"""
end
end
defp check_dep({app, requirement}) do
case Application.spec(app, :vsn) do
nil ->
raise RuntimeError, """
Mix.install is not supported on Nerves Livebook (yet!).
All is not lost, but you will have to rebuild the Nerves Livebook firmware.
Go to https://github.com/fhunleth/nerves_livebook and clone the repository.
Add #{inspect(app)} to the dependencies listed in the `mix.exs` file, build,
and then try again.
"""
vsn ->
Version.match?(to_string(vsn), requirement) ||
raise RuntimeError, """
The dependency #{inspect(app)} is installed in Nerves Livebook but it
doesn't match the version requirement #{inspect(requirement)}.
Here are ways to fix this:
1. Change the version requirement to {#{inspect(app)}, "~> #{vsn}"}
2. Rebuild the Nerves Livebook firmware and update the version in
its mix.exs.
If Nerves Livebook includes an old version, please file a PR so that
we can update it.
"""
end
end
end
| 31.935897 | 97 | 0.646327 |
1cfc891e809dd8782ac928e535381cebcfd211db | 7,617 | ex | Elixir | lib/phoenix/endpoint/instrument.ex | angwoontiong/phoenix-ui | 03227b13f3c780b5626d56207bfa99c8a06525d8 | [
"MIT"
] | null | null | null | lib/phoenix/endpoint/instrument.ex | angwoontiong/phoenix-ui | 03227b13f3c780b5626d56207bfa99c8a06525d8 | [
"MIT"
] | null | null | null | lib/phoenix/endpoint/instrument.ex | angwoontiong/phoenix-ui | 03227b13f3c780b5626d56207bfa99c8a06525d8 | [
"MIT"
] | null | null | null | defmodule Phoenix.Endpoint.Instrument do
@moduledoc false
# This is the arity that event callbacks in the instrumenter modules must
# have.
@event_callback_arity 3
@doc false
def definstrument(otp_app, endpoint) do
app_instrumenters = app_instrumenters(otp_app, endpoint)
quote bind_quoted: [app_instrumenters: app_instrumenters] do
require Logger
@doc """
Instruments the given function.
`event` is the event identifier (usually an atom) that specifies which
instrumenting function to call in the instrumenter modules. `runtime` is
metadata to be associated with the event at runtime (e.g., the query being
issued if the event to instrument is a DB query).
## Examples
instrument :render_view, %{view: "index.html"}, fn ->
render conn, "index.html"
end
"""
defmacro instrument(event, runtime \\ Macro.escape(%{}), fun) do
compile = Macro.escape(Phoenix.Endpoint.Instrument.strip_caller(__CALLER__))
quote do
unquote(__MODULE__).instrument(
unquote(event),
unquote(compile),
unquote(runtime),
unquote(fun)
)
end
end
# For each event in any of the instrumenters, we must generate a
# clause of the `instrument/4` function. It'll look like this:
#
# def instrument(:my_event, compile, runtime, fun) do
# res0 = Inst0.my_event(:start, compile, runtime)
# ...
#
# start = :erlang.monotonic_time
# try do
# fun.()
# after
# diff = ...
# Inst0.my_event(:stop, diff, res0)
# ...
# end
# end
#
@doc false
def instrument(event, compile, runtime, fun)
for {event, instrumenters} <- app_instrumenters do
def instrument(unquote(event), var!(compile), var!(runtime), fun)
when is_map(var!(compile)) and is_map(var!(runtime)) and is_function(fun, 0) do
unquote(Phoenix.Endpoint.Instrument.compile_start_callbacks(event, instrumenters))
start = :erlang.monotonic_time
try do
fun.()
after
var!(diff) = (:erlang.monotonic_time - start) |> :erlang.convert_time_unit(:native, :micro_seconds)
unquote(Phoenix.Endpoint.Instrument.compile_stop_callbacks(event, instrumenters))
end
end
end
# Catch-all clause
def instrument(event, compile, runtime, fun)
when is_atom(event) and is_map(compile) and is_map(runtime) and is_function(fun, 0) do
fun.()
end
end
end
# Reads a list of the instrumenters from the config of `otp_app` and finds all
# events in those instrumenters. The return value is a list of `{event,
# instrumenters}` tuples, one for each event defined by any instrumenters
# (with no duplicated events); `instrumenters` is the list of instrumenters
# interested in `event`.
defp app_instrumenters(otp_app, endpoint) do
config = Application.get_env(otp_app, endpoint, [])
instrumenters = config[:instrumenters] || []
unless is_list(instrumenters) and Enum.all?(instrumenters, &is_atom/1) do
raise ":instrumenters must be a list of instrumenter modules"
end
events_to_instrumenters(instrumenters)
end
# Strips a `Macro.Env` struct, leaving only interesting compile-time metadata.
@doc false
@spec strip_caller(Macro.Env.t) :: %{}
def strip_caller(%Macro.Env{module: mod, function: fun, file: file, line: line}) do
caller = %{module: mod, function: form_fa(fun), file: file, line: line}
if app = Application.get_env(:logger, :compile_time_application) do
Map.put(caller, :application, app)
else
caller
end
end
defp form_fa({name, arity}), do: Atom.to_string(name) <> "/" <> Integer.to_string(arity)
defp form_fa(nil), do: nil
# called by Phoenix.Endpoint.instrument/4, see docs there
@doc false
@spec extract_endpoint(Plug.Conn.t | Phoenix.Socket.t | module) :: module | nil
def extract_endpoint(endpoint_or_conn_or_socket) do
case endpoint_or_conn_or_socket do
%Plug.Conn{private: %{phoenix_endpoint: endpoint}} -> endpoint
%Phoenix.Socket{endpoint: endpoint} -> endpoint
%{__struct__: struct} when struct in [Plug.Conn, Phoenix.Socket] -> nil
endpoint -> endpoint
end
end
# Returns the AST for all the calls to the "start event" callbacks in the given
# list of `instrumenters`.
# Each function call looks like this:
#
# res0 = Instr0.my_event(:start, compile, runtime)
#
@doc false
@spec compile_start_callbacks(term, [module]) :: Macro.t
def compile_start_callbacks(event, instrumenters) do
Enum.map Enum.with_index(instrumenters), fn {inst, index} ->
error_prefix = "Instrumenter #{inspect inst}.#{event}/3 failed.\n"
quote do
unquote(build_result_variable(index)) =
try do
unquote(inst).unquote(event)(:start, var!(compile), var!(runtime))
catch
kind, error ->
Logger.error unquote(error_prefix) <> Exception.format(kind, error)
end
end
end
end
# Returns the AST for all the calls to the "stop event" callbacks in the given
# list of `instrumenters`.
# Each function call looks like this:
#
# Instr0.my_event(:stop, diff, res0)
#
@doc false
@spec compile_start_callbacks(term, [module]) :: Macro.t
def compile_stop_callbacks(event, instrumenters) do
Enum.map Enum.with_index(instrumenters), fn {inst, index} ->
error_prefix = "Instrumenter #{inspect inst}.#{event}/3 failed.\n"
quote do
try do
unquote(inst).unquote(event)(:stop, var!(diff), unquote(build_result_variable(index)))
catch
kind, error ->
Logger.error unquote(error_prefix) <> Exception.format(kind, error)
end
end
end
end
# Takes a list of instrumenter modules and returns a list of `{event,
# instrumenters}` tuples where each tuple represents an event and all the
# modules interested in that event.
defp events_to_instrumenters(instrumenters) do
instrumenters # [Ins1, Ins2, ...]
|> instrumenters_and_events() # [{Ins1, e1}, {Ins2, e1}, ...]
|> Enum.group_by(fn {_inst, e} -> e end) # %{e1 => [{Ins1, e1}, ...], ...}
|> Enum.map(fn {e, insts} -> {e, strip_events(insts)} end) # [{e1, [Ins1, Ins2]}, ...]
end
defp instrumenters_and_events(instrumenters) do
# We're only interested in functions (events) with the given arity.
for inst <- instrumenters,
{event, @event_callback_arity} <- inst.__info__(:functions),
do: {inst, event}
end
defp strip_events(instrumenters) do
for {inst, _evt} <- instrumenters, do: inst
end
defp build_result_variable(index) when is_integer(index) do
"res#{index}" |> String.to_atom() |> Macro.var(nil)
end
def filter_values(%{__struct__: mod} = struct, _filter_params) when is_atom(mod) do
struct
end
def filter_values(%{} = map, filter_params) do
Enum.into map, %{}, fn {k, v} ->
if is_binary(k) && String.contains?(k, filter_params) do
{k, "[FILTERED]"}
else
{k, filter_values(v, filter_params)}
end
end
end
def filter_values([_|_] = list, filter_params) do
Enum.map(list, &filter_values(&1, filter_params))
end
def filter_values(other, _filter_params), do: other
end
| 35.263889 | 111 | 0.635815 |
1cfcdcd3d57ebf4c951ed1e11fed1dcf2a1b04bf | 983 | ex | Elixir | Microsoft.Azure.Management.Resources/lib/microsoft/azure/management/resources/model/provider_resource_type.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | 4 | 2018-09-29T03:43:15.000Z | 2021-04-01T18:30:46.000Z | Microsoft.Azure.Management.Resources/lib/microsoft/azure/management/resources/model/provider_resource_type.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | Microsoft.Azure.Management.Resources/lib/microsoft/azure/management/resources/model/provider_resource_type.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule Microsoft.Azure.Management.Resources.Model.ProviderResourceType do
@moduledoc """
Resource type managed by the resource provider.
"""
@derive [Poison.Encoder]
defstruct [
:"resourceType",
:"locations",
:"aliases",
:"apiVersions",
:"properties"
]
@type t :: %__MODULE__{
:"resourceType" => String.t,
:"locations" => [String.t],
:"aliases" => [AliasType],
:"apiVersions" => [String.t],
:"properties" => %{optional(String.t) => String.t}
}
end
defimpl Poison.Decoder, for: Microsoft.Azure.Management.Resources.Model.ProviderResourceType do
import Microsoft.Azure.Management.Resources.Deserializer
def decode(value, options) do
value
|> deserialize(:"aliases", :list, Microsoft.Azure.Management.Resources.Model.AliasType, options)
end
end
| 27.305556 | 100 | 0.695829 |
1cfd39f65c8e6460d7fe0292ff5b36939f7a07b9 | 1,487 | ex | Elixir | lib/ex_onixo/parser.ex | damjack/ex_onixo | 5b1f97bc65867dcf1710540264094d147722ee11 | [
"MIT"
] | 1 | 2021-12-11T06:44:18.000Z | 2021-12-11T06:44:18.000Z | lib/ex_onixo/parser.ex | damjack/ex_onixo | 5b1f97bc65867dcf1710540264094d147722ee11 | [
"MIT"
] | null | null | null | lib/ex_onixo/parser.ex | damjack/ex_onixo | 5b1f97bc65867dcf1710540264094d147722ee11 | [
"MIT"
] | null | null | null | defmodule ExOnixo.Parser do
import SweetXml
alias ExOnixo.Parser.{
Product,
Product21,
Sender,
Sender21
}
@moduledoc false
defdelegate raw_xml(xml_tree), to: ExOnixo.Raw
defp init_stream_parser(origin) do
init_stream(origin)
|> SweetXml.stream_tags([:Product], discard: [:Product])
end
def parse_stream_product(origin, %{release: "3.0"}) do
init_stream_parser(origin)
|> Stream.map(fn {_, doc} ->
Product.parse_recursive(doc)
|> IO.inspect
end)
|> Enum.to_list
end
def parse_stream_product(origin, %{release: "2.1"}) do
init_stream_parser(origin)
|> Stream.map(fn {_, doc} ->
Product21.parse_recursive(doc)
end)
|> Enum.to_list
end
def raw_stream(origin, opts) do
init_stream(origin)
|> SweetXml.stream_tags(String.to_atom(opts[:tag]), discard: [String.to_atom(opts[:tag])])
|> Stream.map(fn
{_, doc} ->
SweetXml.xpath(doc, ~x"//opts[:tag]"l)
# |> ExOnixo.raw_xml
end)
|> Enum.to_list
end
def parse_stream_sender("", _args), do: {:error, "File not found"}
def parse_stream_sender(origin, %{release: "3.0"}),
do: init_stream(origin) |> Sender.parse_recursive
def parse_stream_sender(origin, %{release: "2.1"}),
do: init_stream(origin) |> Sender21.parse_recursive
defp init_stream(""), do: {:error, "No file found"}
defp init_stream(origin) do
File.stream! origin
end
end
| 27.036364 | 96 | 0.632145 |
1cfd3f2e11ddc6ed6a123debf55001f10d78669f | 1,547 | ex | Elixir | lib/linreg_web/endpoint.ex | Tmw/linreg | b4dd10006ec875da1250cd5d2d7d21b551ed15e5 | [
"MIT"
] | 9 | 2020-05-25T19:54:41.000Z | 2022-03-09T09:57:04.000Z | lib/linreg_web/endpoint.ex | Tmw/linreg | b4dd10006ec875da1250cd5d2d7d21b551ed15e5 | [
"MIT"
] | 2 | 2020-06-04T13:25:11.000Z | 2020-06-07T14:31:30.000Z | lib/linreg_web/endpoint.ex | Tmw/linreg | b4dd10006ec875da1250cd5d2d7d21b551ed15e5 | [
"MIT"
] | 2 | 2021-01-14T17:03:01.000Z | 2021-04-27T05:22:29.000Z | defmodule LinregWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :linreg
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
@session_options [
store: :cookie,
key: "_linreg_key",
signing_salt: "90t/emVz"
]
socket "/socket", LinregWeb.UserSocket,
websocket: true,
longpoll: false
socket "/live", Phoenix.LiveView.Socket, websocket: [connect_info: [session: @session_options]]
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :linreg,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Phoenix.LiveDashboard.RequestLogger,
param_key: "request_logger",
cookie_key: "request_logger"
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session, @session_options
plug LinregWeb.Router
end
| 28.648148 | 97 | 0.714932 |
1cfd4688565540338d9051f9b9cd8ec8fa96be13 | 1,647 | ex | Elixir | lib/yggdrasil/distributor/distributor.ex | jsangilve/yggdrasil | a2ee905e877b6a9d918d8858153a01aadf2d5d63 | [
"MIT"
] | null | null | null | lib/yggdrasil/distributor/distributor.ex | jsangilve/yggdrasil | a2ee905e877b6a9d918d8858153a01aadf2d5d63 | [
"MIT"
] | null | null | null | lib/yggdrasil/distributor/distributor.ex | jsangilve/yggdrasil | a2ee905e877b6a9d918d8858153a01aadf2d5d63 | [
"MIT"
] | null | null | null | defmodule Yggdrasil.Distributor do
@moduledoc """
Supervisor for `Yggdrasil.Distributor.Adapter` and
`Yggdrasil.Distributor.Publisher`.
"""
use Supervisor
alias Yggdrasil.Channel
alias Yggdrasil.Settings
@publisher Yggdrasil.Distributor.Publisher
@registry Settings.registry()
#############################################################################
# Client API.
@doc """
Starts the supervisor and its children using the `channel` as part of the
identificator for the supervision tree. Additionally it can receive
`Supervisor` `options`.
"""
def start_link(%Channel{} = channel, options \\ []) do
Supervisor.start_link(__MODULE__, channel, options)
end
@doc """
Stops the `supervisor`.
"""
def stop(supervisor) do
for {module, child, _, _} <- Supervisor.which_children(supervisor) do
try do
apply(module, :stop, [child])
catch
_, _ -> :ok
end
end
Supervisor.stop(supervisor)
end
#############################################################################
# Supervisor callback.
@doc false
def init(%Channel{adapter: adapter_module} = channel) do
import Supervisor.Spec
adapter_name = {:via, @registry, {adapter_module, channel}}
publisher_name = {:via, @registry, {@publisher, channel}}
children = [
worker(
@publisher,
[channel, [name: publisher_name]],
restart: :transient
),
worker(
adapter_module,
[channel, publisher_name, [name: adapter_name]],
restart: :transient
)
]
supervise(children, strategy: :rest_for_one)
end
end
| 25.338462 | 79 | 0.590771 |
1cfd497736e450d0e56e68a7f4b012f94418a9c3 | 954 | ex | Elixir | lib/phoenix/status.ex | brightroll/phoenix | b93022086322bcc1d797214a28e0c9710f537c22 | [
"MIT"
] | null | null | null | lib/phoenix/status.ex | brightroll/phoenix | b93022086322bcc1d797214a28e0c9710f537c22 | [
"MIT"
] | null | null | null | lib/phoenix/status.ex | brightroll/phoenix | b93022086322bcc1d797214a28e0c9710f537c22 | [
"MIT"
] | null | null | null | defmodule Phoenix.Status do
@moduledoc """
Conversion for transforming atoms to http status codes.
"""
defmodule InvalidStatus do
defexception [:message]
def exception(value) do
%InvalidStatus{message: "Invalid HTTP status #{inspect value}"}
end
end
for line <- File.stream!(Path.join([__DIR__, "statuses.txt"]), [], :line) do
[code, message] = line |> String.split("\t") |> Enum.map(&String.strip(&1))
code = String.to_integer code
atom = message
|> String.downcase
|> String.replace(~r/[^\w]+/, "_")
|> String.to_atom
def code(unquote(atom)), do: unquote(code)
end
@doc """
Convert atom to http status code.
When passed an integer status code, simply returns it, valid or not.
Examples
iex> Status.code(:ok)
200
iex> Status.code(200)
200
"""
def code(code) when is_integer(code), do: code
def code(atom), do: raise(InvalidStatus, atom)
end
| 23.268293 | 79 | 0.63522 |
1cfd5a7961a7924074ad8efb9523fe36d78991cd | 2,395 | ex | Elixir | lib/bongo/converter/out.ex | bombinatetech/typed_struct | dc07cb613e24fe9e6efffe1c36bed62eee764ee2 | [
"MIT"
] | 7 | 2019-01-31T04:45:11.000Z | 2020-02-22T15:37:51.000Z | lib/bongo/converter/out.ex | yatender-oktalk/bongo | bf68a4ffd67f850d6dd1c05fd3eb01f73d677812 | [
"MIT"
] | 1 | 2019-01-11T07:07:09.000Z | 2019-01-11T07:07:09.000Z | lib/bongo/converter/out.ex | yatender-oktalk/bongo | bf68a4ffd67f850d6dd1c05fd3eb01f73d677812 | [
"MIT"
] | 2 | 2019-01-31T04:45:16.000Z | 2019-10-25T18:42:46.000Z | defmodule Bongo.Converter.Out do
import Bongo.Utilities, only: [log_and_return: 2, debug_log: 2]
def convert_out(nil, _type, _lenient) do
nil
end
def convert_out(value, nil, _lenient) do
log_and_return(value, "This model contains an unknown field *out* type")
end
def convert_out(value, {:|, [], [type, nil]}, lenient) do
convert_out(value, type, lenient)
end
def convert_out(
value,
{{:., line, [{:__aliases__, _aliases, type}, :t]}, line, []},
lenient
) do
convert_out(
value,
Macro.expand_once({:__aliases__, [alias: false], type}, __ENV__),
lenient
)
end
def convert_out(value, [type], lenient)
when is_list(value) do
Enum.map(value, &convert_out(&1, type, lenient))
end
# def convert_out(value, type, lenient) when is_list(value) do
# value
# |> Enum.map(fn {k, v} -> {k, convert_out(v, type, lenient)} end)
# end
#
# def convert_out(value, type, lenient) when is_map(value) do
# value
# |> Enum.map(fn {k, v} -> {k, convert_out(v, type, lenient)} end)
# |> Map.new()
# end
def convert_out(%BSON.ObjectId{} = value, :string, _lenient) do
BSON.ObjectId.encode!(value)
end
def convert_out(value, :string, _lenient) do
to_string(value)
end
def convert_out(value, :integer, _lenient) do
value
end
def convert_out(value, :objectId, _lenient) do
BSON.ObjectId.decode!(value)
end
def convert_out(value, :boolean, _lenient) do
case is_boolean(value) do
true -> value
false -> nil
end
end
def convert_out(value, :any, _lenient) do
value
end
def convert_out(value, :float, _lenient) do
value
end
def convert_out(value, :double, _lenient) do
value
end
# fixme what if we reached here as a dead end ? safely check this brooo
def convert_out(value, module, lenient) do
module.structize(value, lenient)
rescue
_ -> debug_log({module, value}, "failed to structize {module,value} ")
value
end
def from(item, out_types, _defaults, lenient) do
Enum.map(
item,
fn {k, v} ->
atom = String.to_atom(to_string(k))
case Keyword.has_key?(out_types, atom) do
true ->
{k, convert_out(v, out_types[atom], lenient)}
false ->
{k, :blackhole}
end
end
)
end
end
| 23.028846 | 76 | 0.616701 |
1cfd5ee99768112aca6e13ffc16657490b006d91 | 2,712 | exs | Elixir | template/$PROJECT_NAME$/config/prod.exs | infinitered/firebird | 679ca2f21eb0a5dc4bfc15c90f2c3c2abd2fe49b | [
"MIT"
] | 72 | 2017-04-30T05:45:22.000Z | 2021-05-20T21:00:31.000Z | template/$PROJECT_NAME$/config/prod.exs | infinitered/firebird | 679ca2f21eb0a5dc4bfc15c90f2c3c2abd2fe49b | [
"MIT"
] | 13 | 2017-04-30T07:03:59.000Z | 2017-12-15T08:19:16.000Z | template/$PROJECT_NAME$/config/prod.exs | infinitered/firebird | 679ca2f21eb0a5dc4bfc15c90f2c3c2abd2fe49b | [
"MIT"
] | 4 | 2017-09-05T14:21:29.000Z | 2018-09-30T11:11:38.000Z | use Mix.Config
# For production, we often load configuration from external
# sources, such as your system environment. For this reason,
# you won't find the :http configuration below, but set inside
# <%= @project_name_camel_case %>Web.Endpoint.load_from_system_env/1 dynamically.
# Any dynamic configuration should be moved to such function.
#
# Don't forget to configure the url host to something meaningful,
# Phoenix uses this information when generating URLs.
#
# Finally, we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the mix phoenix.digest task
# which you typically run after static files are built.
config :<%= @project_name %>, <%= @project_name_camel_case %>Web.Endpoint,
load_from_system_env: true,
cache_static_manifest: "priv/static/cache_manifest.json",
force_ssl: [rewrite_on: [:x_forwarded_proto]]
# Do not print debug messages in production
config :logger, level: :info
# TODO: Configure mailer for production.
#
# config :<%= @project_name %>, <%= @project_name_camel_case %>.Mailer,
# adapter: Swoosh.Adapters.Sendgrid,
# api_key: {:system, "SENDGRID_API_KEY"}
# TODO: Configure the database
#
# config :<%= @project_name %>, <%= @project_name_camel_case %>.Repo,
# adapter: Ecto.Adapters.Postgres,
# url: System.get_env("DATABASE_URL"),
# pool_size: String.to_integer(System.get_env("POOL_SIZE") || "20"),
# ssl: true
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :<%= @project_name %>, <%= @project_name_camel_case %>Web.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [:inet6,
# port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :<%= @project_name %>, <%= @project_name_camel_case %>Web.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :<%= @project_name %>, <%= @project_name_camel_case %>Web.Endpoint, server: true
#
| 36.16 | 93 | 0.70649 |
1cfd6b203a521072738711c6d76156f114419eec | 23,372 | ex | Elixir | lib/elixir/lib/access.ex | thomascchen/elixir | d19a92bbf4cb3743ce726a87e584a1741b59f5b0 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/access.ex | thomascchen/elixir | d19a92bbf4cb3743ce726a87e584a1741b59f5b0 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/access.ex | thomascchen/elixir | d19a92bbf4cb3743ce726a87e584a1741b59f5b0 | [
"Apache-2.0"
] | null | null | null | defmodule Access do
@moduledoc """
Key-based access to data structures using the `data[key]` syntax.
Elixir provides two syntaxes for accessing values. `user[:name]`
is used by dynamic structures, like maps and keywords, while
`user.name` is used by structs. The main difference is that
`user[:name]` won't raise if the key `:name` is missing but
`user.name` will raise if there is no `:name` key.
Besides the cases above, this module provides convenience
functions for accessing other structures, like `at/1` for
lists and `elem/1` for tuples. Those functions can be used
by the nested update functions in `Kernel`, such as
`Kernel.get_in/2`, `Kernel.put_in/3`, `Kernel.update_in/3`,
`Kernel.get_and_update_in/3` and friends.
## Dynamic lookups
Out of the box, `Access` works with `Keyword` and `Map`:
iex> keywords = [a: 1, b: 2]
iex> keywords[:a]
1
iex> map = %{a: 1, b: 2}
iex> map[:a]
1
iex> star_ratings = %{1.0 => "★", 1.5 => "★☆", 2.0 => "★★"}
iex> star_ratings[1.5]
"★☆"
Note that the dynamic lookup syntax (`term[key]`) roughly translates to
`Access.get(term, key, nil)`.
`Access` can be combined with `Kernel.put_in/3` to put a value
in a given key:
iex> map = %{a: 1, b: 2}
iex> put_in map[:a], 3
%{a: 3, b: 2}
This syntax is very convenient as it can be nested arbitrarily:
iex> users = %{"john" => %{age: 27}, "meg" => %{age: 23}}
iex> put_in users["john"][:age], 28
%{"john" => %{age: 28}, "meg" => %{age: 23}}
Furthermore, `Access` transparently ignores `nil` values:
iex> keywords = [a: 1, b: 2]
iex> keywords[:c][:unknown]
nil
Since `Access` is a behaviour, it can be implemented for key-value
data structures. The implementation should be added to the
module that defines the struct being accessed. `Access` requires the
key comparison to be implemented using the `===` operator.
## Static lookups
The `Access` syntax (`data[key]`) cannot be used to access fields in
structs, since structs do not implement the `Access` behaviour by
default. It is also a design decision: the dynamic access lookup
is meant to be used for dynamic key-value structures, like maps
and keywords, and not by static ones like structs (where fields are
known and not dynamic).
Therefore Elixir provides a static lookup for struct fields and for atom
fields in maps. Imagine a struct named `User` with a `:name` field.
The following would raise:
user = %User{name: "John"}
user[:name]
# ** (UndefinedFunctionError) undefined function User.fetch/2 (User does not implement the Access behaviour)
Structs instead use the `user.name` syntax to access fields:
user.name
#=> "John"
The same `user.name` syntax can also be used by `Kernel.put_in/2`
for updating structs fields:
put_in user.name, "Mary"
#=> %User{name: "Mary"}
Differently from `user[:name]`, `user.name` is not extensible via
a behaviour and is restricted only to structs and atom keys in maps.
As mentioned above, this works for atom keys in maps as well. Refer to the
`Map` module for more information on this.
Summing up:
* `user[:name]` is used by dynamic structures, is extensible and
does not raise on missing keys
* `user.name` is used by static structures, it is not extensible
and it will raise on missing keys
## Accessors
While Elixir provides built-in syntax only for traversing dynamic
and static key-value structures, this module provides convenience
functions for traversing other structures, like tuples and lists,
to be used alongside `Kernel.put_in/2` in others.
For instance, given a user map with `:name` and `:languages` keys, here is how
to deeply traverse the map and convert all language names to uppercase:
iex> languages = [
...> %{name: "elixir", type: :functional},
...> %{name: "c", type: :procedural},
...> ]
iex> user = %{name: "john", languages: languages}
iex> update_in user, [:languages, Access.all(), :name], &String.upcase/1
%{name: "john",
languages: [%{name: "ELIXIR", type: :functional},
%{name: "C", type: :procedural}]}
See the functions `key/1`, `key!/1`, `elem/1`, and `all/0` for some of the
available accessors.
## Implementing the Access behaviour for custom data structures
In order to be able to use the `Access` behaviour with custom data structures
(which have to be structs), such structures have to implement the `Access`
behaviour. For example, for a `User` struct, this would have to be done:
defmodule User do
defstruct [:name, :email]
@behaviour Access
# Implementation of the Access callbacks...
end
"""
@type container :: keyword | struct | map
@type nil_container :: nil
@type any_container :: any
@type t :: container | nil_container | any_container
@type key :: any
@type value :: any
@type get_fun(data, get_value) ::
(:get, data, (term -> term) ->
{get_value, new_data :: container})
@type get_and_update_fun(data, get_value) ::
(:get_and_update, data, (term -> term) ->
{get_value, new_data :: container} | :pop)
@type access_fun(data, get_value) ::
get_fun(data, get_value) | get_and_update_fun(data, get_value)
@doc """
Invoked in order to access the value stored under `key` in the given term `term`.
This function should return `{:ok, value}` where `value` is the value under
`key` if the key exists in the term, or `:error` if the key does not exist in
the term.
Many of the functions defined in the `Access` module internally call this
function. This function is also used when the square-brackets access syntax
(`structure[key]`) is used: the `fetch/2` callback implemented by the module
that defines the `structure` struct is invoked and if it returns `{:ok,
value}` then `value` is returned, or if it returns `:error` then `nil` is
returned.
See the `Map.fetch/2` and `Keyword.fetch/2` implementations for examples of
how to implement this callback.
"""
@callback fetch(term :: t, key) :: {:ok, value} | :error
@doc """
Invoked in order to access the value stored under `key` in the given term `term`,
defaulting to `default` if not present.
This function should return the value under `key` in `term` if there's
such key, otherwise `default`.
For most data structures, this can be implemented using `fetch/2` internally;
for example:
def get(structure, key, default) do
case fetch(structure, key) do
{:ok, value} -> value
:error -> default
end
end
See the `Map.get/3` and `Keyword.get/3` implementations for examples of
how to implement this callback.
"""
@callback get(term :: t, key, default :: value) :: value
@doc """
Invoked in order to access the value under `key` and update it at the same time.
The implementation of this callback should invoke `fun` with the value under
`key` in the passed structure `data`, or with `nil` if `key` is not present in it.
This function must return either `{get_value, update_value}` or `:pop`.
If the passed function returns `{get_value, update_value}`,
the return value of this callback should be `{get_value, new_data}`, where:
- `get_value` is the retrieved value (which can be operated on before being returned)
- `update_value` is the new value to be stored under `key`
- `new_data` is `data` after updating the value of `key` with `update_value`.
If the passed function returns `:pop`, the return value of this callback
must be `{value, new_data}` where `value` is the value under `key`
(or `nil` if not present) and `new_data` is `data` without `key`.
See the implementations of `Map.get_and_update/3` or `Keyword.get_and_update/3`
for more examples.
"""
@callback get_and_update(data, key, (value -> {get_value, value} | :pop)) ::
{get_value, data} when get_value: var, data: container | any_container
@doc """
Invoked to "pop" the value under `key` out of the given data structure.
When `key` exists in the given structure `data`, the implementation should
return a `{value, new_data}` tuple where `value` is the value that was under
`key` and `new_data` is `term` without `key`.
When `key` is not present in the given structure, a tuple `{value, data}`
should be returned, where `value` is implementation-defined.
See the implementations for `Map.pop/3` or `Keyword.pop/3` for more examples.
"""
@callback pop(data, key) :: {value, data} when data: container | any_container
defmacrop raise_undefined_behaviour(e, struct, top) do
quote do
stacktrace = System.stacktrace
e =
case stacktrace do
[unquote(top) | _] ->
%{unquote(e) | reason: "#{inspect unquote(struct)} does not implement the Access behaviour"}
_ ->
unquote(e)
end
reraise e, stacktrace
end
end
@doc """
Fetches the value for the given key in a container (a map, keyword
list, or struct that implements the `Access` behaviour).
Returns `{:ok, value}` where `value` is the value under `key` if there is such
a key, or `:error` if `key` is not found.
"""
@spec fetch(container, term) :: {:ok, term} | :error
@spec fetch(nil_container, any) :: :error
def fetch(container, key)
def fetch(%struct{} = container, key) do
struct.fetch(container, key)
rescue
e in UndefinedFunctionError ->
raise_undefined_behaviour e, struct, {^struct, :fetch, [^container, ^key], _}
end
def fetch(map, key) when is_map(map) do
case map do
%{^key => value} -> {:ok, value}
_ -> :error
end
end
def fetch(list, key) when is_list(list) and is_atom(key) do
case :lists.keyfind(key, 1, list) do
{_, value} -> {:ok, value}
false -> :error
end
end
def fetch(list, key) when is_list(list) do
raise ArgumentError,
"the Access calls for keywords expect the key to be an atom, got: " <> inspect(key)
end
def fetch(nil, _key) do
:error
end
@doc """
Gets the value for the given key in a container (a map, keyword
list, or struct that implements the `Access` behaviour).
Returns the value under `key` if there is such a key, or `default` if `key` is
not found.
"""
@spec get(container, term, term) :: term
@spec get(nil_container, any, default) :: default when default: var
def get(container, key, default \\ nil)
def get(%{__struct__: struct} = container, key, default) do
try do
struct.fetch(container, key)
rescue
e in UndefinedFunctionError ->
raise_undefined_behaviour e, struct, {^struct, :fetch, [^container, ^key], _}
else
{:ok, value} -> value
:error -> default
end
end
def get(map, key, default) when is_map(map) do
case map do
%{^key => value} -> value
_ -> default
end
end
def get(list, key, default) when is_list(list) and is_atom(key) do
case :lists.keyfind(key, 1, list) do
{_, value} -> value
false -> default
end
end
def get(list, key, _default) when is_list(list) do
raise ArgumentError,
"the Access calls for keywords expect the key to be an atom, got: " <> inspect(key)
end
def get(nil, _key, default) do
default
end
@doc """
Gets and updates the given key in a `container` (a map, a keyword list,
a struct that implements the `Access` behaviour).
The `fun` argument receives the value of `key` (or `nil` if `key` is not
present in `container`) and must return a two-element tuple `{get_value, update_value}`:
the "get" value `get_value` (the retrieved value, which can be operated on before
being returned) and the new value to be stored under `key` (`update_value`).
`fun` may also return `:pop`, which means the current value
should be removed from the container and returned.
The returned value is a two-element tuple with the "get" value returned by
`fun` and a new container with the updated value under `key`.
"""
@spec get_and_update(data, key, (value -> {get_value, value} | :pop)) ::
{get_value, data} when get_value: var, data: container
def get_and_update(container, key, fun)
def get_and_update(%{__struct__: struct} = container, key, fun) do
struct.get_and_update(container, key, fun)
rescue
e in UndefinedFunctionError ->
raise_undefined_behaviour e, struct, {^struct, :get_and_update, [^container, ^key, ^fun], _}
end
def get_and_update(map, key, fun) when is_map(map) do
Map.get_and_update(map, key, fun)
end
def get_and_update(list, key, fun) when is_list(list) do
Keyword.get_and_update(list, key, fun)
end
def get_and_update(nil, key, _fun) do
raise ArgumentError,
"could not put/update key #{inspect key} on a nil value"
end
@doc """
Removes the entry with a given key from a container (a map, keyword
list, or struct that implements the `Access` behaviour).
Returns a tuple containing the value associated with the key and the
updated container. `nil` is returned for the value if the key isn't
in the container.
## Examples
With a map:
iex> Access.pop(%{name: "Elixir", creator: "Valim"}, :name)
{"Elixir", %{creator: "Valim"}}
A keyword list:
iex> Access.pop([name: "Elixir", creator: "Valim"], :name)
{"Elixir", [creator: "Valim"]}
An unknown key:
iex> Access.pop(%{name: "Elixir", creator: "Valim"}, :year)
{nil, %{creator: "Valim", name: "Elixir"}}
"""
@spec pop(data, key) :: {value, data} when data: container
def pop(%{__struct__: struct} = container, key) do
struct.pop(container, key)
rescue
e in UndefinedFunctionError ->
raise_undefined_behaviour e, struct, {^struct, :pop, [^container, ^key], _}
end
def pop(map, key) when is_map(map) do
Map.pop(map, key)
end
def pop(list, key) when is_list(list) do
Keyword.pop(list, key)
end
def pop(nil, key) do
raise ArgumentError,
"could not pop key #{inspect key} on a nil value"
end
## Accessors
@doc """
Returns a function that accesses the given key in a map/struct.
The returned function is typically passed as an accessor to `Kernel.get_in/2`,
`Kernel.get_and_update_in/3`, and friends.
The returned function uses the default value if the key does not exist.
This can be used to specify defaults and safely traverse missing keys:
iex> get_in(%{}, [Access.key(:user, %{}), Access.key(:name)])
nil
Such is also useful when using update functions, allowing us to introduce
values as we traverse the data structure for updates:
iex> put_in(%{}, [Access.key(:user, %{}), Access.key(:name)], "Mary")
%{user: %{name: "Mary"}}
## Examples
iex> map = %{user: %{name: "john"}}
iex> get_in(map, [Access.key(:unknown, %{}), Access.key(:name, "john")])
"john"
iex> get_and_update_in(map, [Access.key(:user), Access.key(:name)], fn
...> prev -> {prev, String.upcase(prev)}
...> end)
{"john", %{user: %{name: "JOHN"}}}
iex> pop_in(map, [Access.key(:user), Access.key(:name)])
{"john", %{user: %{}}}
An error is raised if the accessed structure is not a map or a struct:
iex> get_in(nil, [Access.key(:foo)])
** (BadMapError) expected a map, got: nil
iex> get_in([], [Access.key(:foo)])
** (BadMapError) expected a map, got: []
"""
@spec key(key, term) :: access_fun(data :: struct | map, get_value :: term)
def key(key, default \\ nil) do
fn
:get, data, next ->
next.(Map.get(data, key, default))
:get_and_update, data, next ->
value = Map.get(data, key, default)
case next.(value) do
{get, update} -> {get, Map.put(data, key, update)}
:pop -> {value, Map.delete(data, key)}
end
end
end
@doc """
Returns a function that accesses the given key in a map/struct.
The returned function is typically passed as an accessor to `Kernel.get_in/2`,
`Kernel.get_and_update_in/3`, and friends.
The returned function raises if the key does not exist.
## Examples
iex> map = %{user: %{name: "john"}}
iex> get_in(map, [Access.key!(:user), Access.key!(:name)])
"john"
iex> get_and_update_in(map, [Access.key!(:user), Access.key!(:name)], fn
...> prev -> {prev, String.upcase(prev)}
...> end)
{"john", %{user: %{name: "JOHN"}}}
iex> pop_in(map, [Access.key!(:user), Access.key!(:name)])
{"john", %{user: %{}}}
iex> get_in(map, [Access.key!(:user), Access.key!(:unknown)])
** (KeyError) key :unknown not found in: %{name: \"john\"}
An error is raised if the accessed structure is not a map/struct:
iex> get_in([], [Access.key!(:foo)])
** (RuntimeError) Access.key!/1 expected a map/struct, got: []
"""
@spec key!(key) :: access_fun(data :: struct | map, get_value :: term)
def key!(key) do
fn
:get, %{} = data, next ->
next.(Map.fetch!(data, key))
:get_and_update, %{} = data, next ->
value = Map.fetch!(data, key)
case next.(value) do
{get, update} -> {get, Map.put(data, key, update)}
:pop -> {value, Map.delete(data, key)}
end
_op, data, _next ->
raise "Access.key!/1 expected a map/struct, got: #{inspect data}"
end
end
@doc ~S"""
Returns a function that accesses the element at the given index in a tuple.
The returned function is typically passed as an accessor to `Kernel.get_in/2`,
`Kernel.get_and_update_in/3`, and friends.
The returned function raises if `index` is out of bounds.
## Examples
iex> map = %{user: {"john", 27}}
iex> get_in(map, [:user, Access.elem(0)])
"john"
iex> get_and_update_in(map, [:user, Access.elem(0)], fn
...> prev -> {prev, String.upcase(prev)}
...> end)
{"john", %{user: {"JOHN", 27}}}
iex> pop_in(map, [:user, Access.elem(0)])
** (RuntimeError) cannot pop data from a tuple
An error is raised if the accessed structure is not a tuple:
iex> get_in(%{}, [Access.elem(0)])
** (RuntimeError) Access.elem/1 expected a tuple, got: %{}
"""
@spec elem(non_neg_integer) :: access_fun(data :: tuple, get_value :: term)
def elem(index) when is_integer(index) do
pos = index + 1
fn
:get, data, next when is_tuple(data) ->
next.(:erlang.element(pos, data))
:get_and_update, data, next when is_tuple(data) ->
value = :erlang.element(pos, data)
case next.(value) do
{get, update} -> {get, :erlang.setelement(pos, data, update)}
:pop -> raise "cannot pop data from a tuple"
end
_op, data, _next ->
raise "Access.elem/1 expected a tuple, got: #{inspect data}"
end
end
@doc ~S"""
Returns a function that accesses all the elements in a list.
The returned function is typically passed as an accessor to `Kernel.get_in/2`,
`Kernel.get_and_update_in/3`, and friends.
## Examples
iex> list = [%{name: "john"}, %{name: "mary"}]
iex> get_in(list, [Access.all(), :name])
["john", "mary"]
iex> get_and_update_in(list, [Access.all(), :name], fn
...> prev -> {prev, String.upcase(prev)}
...> end)
{["john", "mary"], [%{name: "JOHN"}, %{name: "MARY"}]}
iex> pop_in(list, [Access.all(), :name])
{["john", "mary"], [%{}, %{}]}
Here is an example that traverses the list dropping even
numbers and multiplying odd numbers by 2:
iex> require Integer
iex> get_and_update_in([1, 2, 3, 4, 5], [Access.all], fn
...> num -> if Integer.is_even(num), do: :pop, else: {num, num * 2}
...> end)
{[1, 2, 3, 4, 5], [2, 6, 10]}
An error is raised if the accessed structure is not a list:
iex> get_in(%{}, [Access.all()])
** (RuntimeError) Access.all/0 expected a list, got: %{}
"""
@spec all() :: access_fun(data :: list, get_value :: list)
def all() do
&all/3
end
defp all(:get, data, next) when is_list(data) do
Enum.map(data, next)
end
defp all(:get_and_update, data, next) when is_list(data) do
all(data, next, _gets = [], _updates = [])
end
defp all(_op, data, _next) do
raise "Access.all/0 expected a list, got: #{inspect data}"
end
defp all([head | rest], next, gets, updates) do
case next.(head) do
{get, update} -> all(rest, next, [get | gets], [update | updates])
:pop -> all(rest, next, [head | gets], updates)
end
end
defp all([], _next, gets, updates) do
{:lists.reverse(gets), :lists.reverse(updates)}
end
@doc ~S"""
Returns a function that accesses the element at `index` (zero based) of a list.
The returned function is typically passed as an accessor to `Kernel.get_in/2`,
`Kernel.get_and_update_in/3`, and friends.
## Examples
iex> list = [%{name: "john"}, %{name: "mary"}]
iex> get_in(list, [Access.at(1), :name])
"mary"
iex> get_and_update_in(list, [Access.at(0), :name], fn
...> prev -> {prev, String.upcase(prev)}
...> end)
{"john", [%{name: "JOHN"}, %{name: "mary"}]}
`at/1` can also be used to pop elements out of a list or
a key inside of a list:
iex> list = [%{name: "john"}, %{name: "mary"}]
iex> pop_in(list, [Access.at(0)])
{%{name: "john"}, [%{name: "mary"}]}
iex> pop_in(list, [Access.at(0), :name])
{"john", [%{}, %{name: "mary"}]}
When the index is out of bounds, `nil` is returned and the update function is never called:
iex> list = [%{name: "john"}, %{name: "mary"}]
iex> get_in(list, [Access.at(10), :name])
nil
iex> get_and_update_in(list, [Access.at(10), :name], fn
...> prev -> {prev, String.upcase(prev)}
...> end)
{nil, [%{name: "john"}, %{name: "mary"}]}
An error is raised for negative indexes:
iex> get_in([], [Access.at(-1)])
** (FunctionClauseError) no function clause matching in Access.at/1
An error is raised if the accessed structure is not a list:
iex> get_in(%{}, [Access.at(1)])
** (RuntimeError) Access.at/1 expected a list, got: %{}
"""
@spec at(non_neg_integer) :: access_fun(data :: list, get_value :: term)
def at(index) when is_integer(index) and index >= 0 do
fn(op, data, next) -> at(op, data, index, next) end
end
defp at(:get, data, index, next) when is_list(data) do
data |> Enum.at(index) |> next.()
end
defp at(:get_and_update, data, index, next) when is_list(data) do
get_and_update_at(data, index, next, [])
end
defp at(_op, data, _index, _next) do
raise "Access.at/1 expected a list, got: #{inspect data}"
end
defp get_and_update_at([head | rest], 0, next, updates) do
case next.(head) do
{get, update} -> {get, :lists.reverse([update | updates], rest)}
:pop -> {head, :lists.reverse(updates, rest)}
end
end
defp get_and_update_at([head | rest], index, next, updates) do
get_and_update_at(rest, index - 1, next, [head | updates])
end
defp get_and_update_at([], _index, _next, updates) do
{nil, :lists.reverse(updates)}
end
end
| 33.198864 | 114 | 0.634349 |
1cfd743457a7add2f8d2a073b78227300267ab51 | 331 | exs | Elixir | mix.exs | KallDrexx/elixir-media-libs | 5115c390133696a01c1b24107fa1f51eca9ce7b4 | [
"MIT"
] | 75 | 2016-12-23T14:37:18.000Z | 2021-04-26T14:07:20.000Z | mix.exs | KallDrexx/elixir-media-libs | 5115c390133696a01c1b24107fa1f51eca9ce7b4 | [
"MIT"
] | 19 | 2016-12-22T03:20:43.000Z | 2020-06-11T12:10:37.000Z | mix.exs | KallDrexx/elixir-media-libs | 5115c390133696a01c1b24107fa1f51eca9ce7b4 | [
"MIT"
] | 3 | 2018-03-29T06:40:40.000Z | 2019-02-13T09:37:19.000Z | defmodule ElixirMediaLibs.Mixfile do
use Mix.Project
def project do
[apps_path: "apps",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps(),
dialyzer: [plt_add_deps: :transitive]
]
end
defp deps do
[{:dialyxir, "~> 0.4.3", only: [:dev, :umbrella]}]
end
end
| 19.470588 | 54 | 0.613293 |
1cfd9ac24633a24518c6ddded146c06685cfacfe | 1,308 | exs | Elixir | config/prod.secret.exs | PanyPy/foundation_phoenix | 571aaa0b274c1428b4cf8e370777ae82c9167eb2 | [
"MIT"
] | null | null | null | config/prod.secret.exs | PanyPy/foundation_phoenix | 571aaa0b274c1428b4cf8e370777ae82c9167eb2 | [
"MIT"
] | 2 | 2021-03-10T11:28:36.000Z | 2021-05-11T07:00:39.000Z | config/prod.secret.exs | PanyPy/foundation_phoenix | 571aaa0b274c1428b4cf8e370777ae82c9167eb2 | [
"MIT"
] | null | null | null | # In this file, we load production configuration and secrets
# from environment variables. You can also hardcode secrets,
# although such is generally not recommended and you have to
# remember to add this file to your .gitignore.
use Mix.Config
database_url =
System.get_env("DATABASE_URL") ||
raise """
environment variable DATABASE_URL is missing.
For example: ecto://USER:PASS@HOST/DATABASE
"""
config :foundation_phoenix, FoundationPhoenix.Repo,
# ssl: true,
url: database_url,
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10")
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
"""
config :foundation_phoenix, FoundationPhoenixWeb.Endpoint,
http: [
port: String.to_integer(System.get_env("PORT") || "4000"),
transport_options: [socket_opts: [:inet6]]
],
secret_key_base: secret_key_base
# ## Using releases (Elixir v1.9+)
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start each relevant endpoint:
#
# config :foundation_phoenix, FoundationPhoenixWeb.Endpoint, server: true
#
# Then you can assemble a release by calling `mix release`.
# See `mix help release` for more information.
| 31.142857 | 77 | 0.732416 |
1cfdb971b05b8291d4ba90926800199a8b4bb296 | 693 | ex | Elixir | builds/installs/Debian/cards-500/debian/preinst.ex | bob-weber/cardGame-500 | 007ad6e1ad30f6e24ba3c5c29cfc16d42c0a9a51 | [
"Beerware"
] | null | null | null | builds/installs/Debian/cards-500/debian/preinst.ex | bob-weber/cardGame-500 | 007ad6e1ad30f6e24ba3c5c29cfc16d42c0a9a51 | [
"Beerware"
] | null | null | null | builds/installs/Debian/cards-500/debian/preinst.ex | bob-weber/cardGame-500 | 007ad6e1ad30f6e24ba3c5c29cfc16d42c0a9a51 | [
"Beerware"
] | null | null | null | #!/bin/sh
# preinst script for cards-500
#
# see: dh_installdeb(1)
set -e
# summary of how this script can be called:
# * <new-preinst> `install'
# * <new-preinst> `install' <old-version>
# * <new-preinst> `upgrade' <old-version>
# * <old-preinst> `abort-upgrade' <new-version>
# for details, see https://www.debian.org/doc/debian-policy/ or
# the debian-policy package
case "$1" in
install|upgrade)
;;
abort-upgrade)
;;
*)
echo "preinst called with unknown argument \`$1'" >&2
exit 1
;;
esac
# dh_installdeb will replace this with shell code automatically
# generated by other debhelper scripts.
#DEBHELPER#
exit 0
| 19.25 | 63 | 0.627706 |
1cfe0e755b6121f14ba037f89206a90f89f1da9e | 456 | exs | Elixir | config/prod.secret.exs | brianstorti/jan | e3e7172c672705b7d4e3912e16d5a69496a79e4a | [
"Apache-2.0"
] | 54 | 2016-02-07T21:44:14.000Z | 2022-01-06T04:10:59.000Z | config/prod.secret.exs | brianstorti/jan | e3e7172c672705b7d4e3912e16d5a69496a79e4a | [
"Apache-2.0"
] | 1 | 2016-02-13T15:52:55.000Z | 2016-02-13T15:52:55.000Z | config/prod.secret.exs | brianstorti/jan | e3e7172c672705b7d4e3912e16d5a69496a79e4a | [
"Apache-2.0"
] | 4 | 2016-02-13T11:54:00.000Z | 2022-01-06T04:19:26.000Z | use Mix.Config
# In this file, we keep production configuration that
# you likely want to automate and keep it away from
# your version control system.
config :jan, Jan.Endpoint,
secret_key_base: System.get_env("SECRET_KEY_BASE")
# Configure your database
config :jan, Jan.Repo,
adapter: Ecto.Adapters.Postgres,
username: System.get_env("DATABASE_USERNAME"),
password: System.get_env("DATABASE_PASSWORD"),
database: "jan_prod",
pool_size: 20
| 28.5 | 53 | 0.765351 |
1cfe786544fbca6a6ce419d8316405b34ac00c6b | 1,630 | ex | Elixir | elixir/epi_book/lib/chapter_8/max_stack.ex | wtfleming/data-structures-and-algorithms | f3d55b6642ee0219606c65ac6f1f8c5b402bdf70 | [
"MIT"
] | null | null | null | elixir/epi_book/lib/chapter_8/max_stack.ex | wtfleming/data-structures-and-algorithms | f3d55b6642ee0219606c65ac6f1f8c5b402bdf70 | [
"MIT"
] | null | null | null | elixir/epi_book/lib/chapter_8/max_stack.ex | wtfleming/data-structures-and-algorithms | f3d55b6642ee0219606c65ac6f1f8c5b402bdf70 | [
"MIT"
] | null | null | null | defmodule Chapter8.MaxStack do
defstruct head: []
@type t :: %Chapter8.MaxStack{head: [{integer, integer}]}
@moduledoc """
Stack that supports max() as an O(1) operation to determine the largest element on the stack
"""
@doc """
Construct an empty MaxStack
"""
@spec new() :: Chapter8.MaxStack.t
def new(), do: %Chapter8.MaxStack{}
@doc """
Add an element to the stack
"""
@spec push(Chapter8.MaxStack.t, integer) :: Chapter8.MaxStack.t
def push(stack, val) do
if stack.head == [] do
%Chapter8.MaxStack{head: [{val, val}]}
else
{_node, current_max} = List.first(stack.head)
if val < current_max do
%Chapter8.MaxStack{head: [{val, current_max}|stack.head]}
else
%Chapter8.MaxStack{head: [{val, val}|stack.head]}
end
end
end
@doc """
Remove an element from the stack
"""
@spec pop(t) :: {Chapter8.MaxStack.t, integer}
def pop(stack) do
if stack.head == [] do
{%Chapter8.MaxStack{head: []}, nil}
else
[head|tail] = stack.head
{val, _current_max} = head
{%Chapter8.MaxStack{head: tail}, val}
end
end
@doc """
Return the first element in the stack
"""
@spec peek(Chapter8.MaxStack.t) :: integer
def peek(stack) do
case List.first(stack.head) do
nil ->
nil
{val, _current_max} ->
val
end
end
@doc """
Return the maximum element in the stack
"""
@spec max(Chapter8.MaxStack.t) :: integer
def max(stack) do
case List.first(stack.head) do
nil ->
nil
{_val, current_max} ->
current_max
end
end
end
| 22.027027 | 94 | 0.598773 |
1cfe9734d17c2b0f8abf9be282a33b044a981de8 | 1,639 | ex | Elixir | clients/docs/lib/google_api/docs/v1/model/tab_stop.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/docs/lib/google_api/docs/v1/model/tab_stop.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/docs/lib/google_api/docs/v1/model/tab_stop.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Docs.V1.Model.TabStop do
@moduledoc """
A tab stop within a paragraph.
## Attributes
* `alignment` (*type:* `String.t`, *default:* `nil`) - The alignment of this tab stop. If unset, the value defaults to START.
* `offset` (*type:* `GoogleApi.Docs.V1.Model.Dimension.t`, *default:* `nil`) - The offset between this tab stop and the start margin.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:alignment => String.t(),
:offset => GoogleApi.Docs.V1.Model.Dimension.t()
}
field(:alignment)
field(:offset, as: GoogleApi.Docs.V1.Model.Dimension)
end
defimpl Poison.Decoder, for: GoogleApi.Docs.V1.Model.TabStop do
def decode(value, options) do
GoogleApi.Docs.V1.Model.TabStop.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Docs.V1.Model.TabStop do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.78 | 137 | 0.718121 |
1cfea1a263f360bf215e9485a4dc61972fc75e76 | 378 | ex | Elixir | 9-elixir-phoenix/web/router.ex | smddzcy/learning-new-technologies | cf3645ff649f6dfe89c77e90ceaf089b66da2482 | [
"MIT"
] | 3 | 2017-02-22T08:45:53.000Z | 2017-04-29T13:40:23.000Z | 9-elixir-phoenix/web/router.ex | smddzcy/learning-new-technologies | cf3645ff649f6dfe89c77e90ceaf089b66da2482 | [
"MIT"
] | null | null | null | 9-elixir-phoenix/web/router.ex | smddzcy/learning-new-technologies | cf3645ff649f6dfe89c77e90ceaf089b66da2482 | [
"MIT"
] | null | null | null | defmodule AwesomeprojectBackend.Router do
use AwesomeprojectBackend.Web, :router
pipeline :api do
plug :accepts, ["json"]
end
scope "/", AwesomeprojectBackend do
pipe_through :api # Use the api
resources "/posts", PostController
end
# Other scopes may use custom stacks.
# scope "/api", AwesomeprojectBackend do
# pipe_through :api
# end
end
| 19.894737 | 42 | 0.703704 |
1cfeab1000ebf9b91d32513dd4918479f895f9f8 | 1,556 | ex | Elixir | clients/dataflow/lib/google_api/dataflow/v1b3/model/list_snapshots_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/dataflow/lib/google_api/dataflow/v1b3/model/list_snapshots_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/dataflow/lib/google_api/dataflow/v1b3/model/list_snapshots_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Dataflow.V1b3.Model.ListSnapshotsResponse do
@moduledoc """
List of snapshots.
## Attributes
* `snapshots` (*type:* `list(GoogleApi.Dataflow.V1b3.Model.Snapshot.t)`, *default:* `nil`) - Returned snapshots.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:snapshots => list(GoogleApi.Dataflow.V1b3.Model.Snapshot.t())
}
field(:snapshots, as: GoogleApi.Dataflow.V1b3.Model.Snapshot, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Dataflow.V1b3.Model.ListSnapshotsResponse do
def decode(value, options) do
GoogleApi.Dataflow.V1b3.Model.ListSnapshotsResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataflow.V1b3.Model.ListSnapshotsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.106383 | 116 | 0.747429 |
1cfec2ceb8063849c7d99a8e7913468f61f0c797 | 1,812 | ex | Elixir | web/controllers/page_controller.ex | jabaraster/elixir_wiki | e8197a1f48c4ebfdbb209e74b01e76abb6d9fe68 | [
"MIT"
] | null | null | null | web/controllers/page_controller.ex | jabaraster/elixir_wiki | e8197a1f48c4ebfdbb209e74b01e76abb6d9fe68 | [
"MIT"
] | null | null | null | web/controllers/page_controller.ex | jabaraster/elixir_wiki | e8197a1f48c4ebfdbb209e74b01e76abb6d9fe68 | [
"MIT"
] | null | null | null | defmodule Wiki.PageController do
use Wiki.Web, :controller
alias Wiki.Page
plug :scrub_params, "page" when action in [:create, :update]
def index(conn, _params) do
pages = Repo.all(Page)
render(conn, "index.html", pages: pages)
end
def new(conn, _params) do
changeset = Page.changeset(%Page{})
render(conn, "new.html", changeset: changeset)
end
def create(conn, %{"page" => page_params}) do
changeset = Page.changeset(%Page{}, page_params)
case Repo.insert(changeset) do
{:ok, _page} ->
conn
|> put_flash(:info, "Page created successfully.")
|> redirect(to: page_path(conn, :index))
{:error, changeset} ->
render(conn, "new.html", changeset: changeset)
end
end
def show(conn, %{"id" => id}) do
page = Repo.get!(Page, id)
render(conn, "show.html", page: page)
end
def edit(conn, %{"id" => id}) do
page = Repo.get!(Page, id)
changeset = Page.changeset(page)
render(conn, "edit.html", page: page, changeset: changeset)
end
def update(conn, %{"id" => id, "page" => page_params}) do
page = Repo.get!(Page, id)
changeset = Page.changeset(page, page_params)
case Repo.update(changeset) do
{:ok, page} ->
conn
|> put_flash(:info, "Page updated successfully.")
|> redirect(to: page_path(conn, :show, page))
{:error, changeset} ->
render(conn, "edit.html", page: page, changeset: changeset)
end
end
def delete(conn, %{"id" => id}) do
page = Repo.get!(Page, id)
# Here we use delete! (with a bang) because we expect
# it to always work (and if it does not, it will raise).
Repo.delete!(page)
conn
|> put_flash(:info, "Page deleted successfully.")
|> redirect(to: page_path(conn, :index))
end
end
| 26.647059 | 67 | 0.610375 |
1cfec711c7cb5b8a740ef8a06051f2f9faca0bf3 | 229 | ex | Elixir | lib/operators/type_error.ex | carmaproject/towel | 471953af77efa675beaa5055e24e7ae645d565c8 | [
"MIT"
] | 3 | 2018-01-03T12:28:22.000Z | 2021-02-04T14:17:48.000Z | lib/operators/type_error.ex | carmaproject/towel | 471953af77efa675beaa5055e24e7ae645d565c8 | [
"MIT"
] | 4 | 2017-09-27T09:04:07.000Z | 2021-10-16T14:37:52.000Z | lib/operators/type_error.ex | carmaproject/towel | 471953af77efa675beaa5055e24e7ae645d565c8 | [
"MIT"
] | 3 | 2017-10-03T00:11:34.000Z | 2021-01-10T23:33:19.000Z | defmodule Result.TypeError do
defexception [:message]
@impl true
def exception(value) do
msg = "is not in {:ok, value} or {:error, error} format, instead got: #{inspect(value)}"
%__MODULE__{message: msg}
end
end
| 22.9 | 92 | 0.676856 |
1cfefe487596c179737ec62562f366279fcc5d1e | 1,276 | exs | Elixir | test/web/plugs/burger_create_plug_test.exs | mbuhot/irateburgers | 3ea07805501c10ef9b04cee04c22c36582f26e1e | [
"MIT"
] | 1 | 2020-01-13T09:27:21.000Z | 2020-01-13T09:27:21.000Z | test/web/plugs/burger_create_plug_test.exs | mbuhot/irateburgers | 3ea07805501c10ef9b04cee04c22c36582f26e1e | [
"MIT"
] | null | null | null | test/web/plugs/burger_create_plug_test.exs | mbuhot/irateburgers | 3ea07805501c10ef9b04cee04c22c36582f26e1e | [
"MIT"
] | null | null | null | defmodule Irateburgers.Web.BurgerCreatePlugTest do
use Irateburgers.Web.ConnCase
alias Plug.Conn
def big_mac_params do
%{
name: "Big Mac",
price: "$6.50",
description: "Beef, Cheese, Lettuce, Pickles, Special Sauce",
images: ["http://imgur.com/foo/bar"]
}
end
describe "Creating a burger" do
setup %{conn: conn} do
%{conn: Conn.put_req_header(conn, "content-type", "application/json")}
end
test "Succeeds with valid params", %{conn: conn} do
response =
conn
|> post(burger_path(conn, :create), Poison.encode!(big_mac_params()))
|> json_response(201)
assert %{
"description" => "Beef, Cheese, Lettuce, Pickles, Special Sauce",
"id" => id,
"images" => ["http://imgur.com/foo/bar"],
"name" => "Big Mac",
"price" => "$6.50",
"reviews" => []
} = response
assert id != nil
end
test "fails with missing fields", %{conn: conn} do
response =
conn
|> post(burger_path(conn, :create), "{}")
|> json_response(422)
assert %{
"price" => ["can't be blank"],
"name" => ["can't be blank"],
"description" => ["can't be blank"]
} = response
end
end
end
| 25.52 | 77 | 0.548589 |
1cff0328f0c28e8252a087b092dc4c79d7cbacc5 | 591 | exs | Elixir | elixir/prime-factors/prime_factors.exs | macborowy/exercism | c5d45e074e81b946a82a340b2730e0d2732b7e0a | [
"MIT"
] | null | null | null | elixir/prime-factors/prime_factors.exs | macborowy/exercism | c5d45e074e81b946a82a340b2730e0d2732b7e0a | [
"MIT"
] | null | null | null | elixir/prime-factors/prime_factors.exs | macborowy/exercism | c5d45e074e81b946a82a340b2730e0d2732b7e0a | [
"MIT"
] | null | null | null | defmodule PrimeFactors do
@doc """
Compute the prime factors for 'number'.
The prime factors are prime numbers that when multiplied give the desired
number.
The prime factors of 'number' will be ordered lowest to highest.
"""
@spec factors_for(pos_integer) :: [pos_integer]
def factors_for(number), do: do_factors(number, 2, [])
defp do_factors(1, _, acc),
do: Enum.reverse(acc)
defp do_factors(number, i, acc) when rem(number, i) == 0,
do: do_factors(div(number, i), i, [i | acc])
defp do_factors(number, i, acc),
do: do_factors(number, i + 1, acc)
end
| 29.55 | 75 | 0.681895 |
1cff04dec32ba9f4e0ced573413bcc0f56749e0f | 2,010 | exs | Elixir | implementations/elixir/ockam/ockam_hub/mix.exs | illaz/ockam | d1073799a140f5f7e3312ea2d5d8b86b4e94154c | [
"Apache-2.0"
] | null | null | null | implementations/elixir/ockam/ockam_hub/mix.exs | illaz/ockam | d1073799a140f5f7e3312ea2d5d8b86b4e94154c | [
"Apache-2.0"
] | null | null | null | implementations/elixir/ockam/ockam_hub/mix.exs | illaz/ockam | d1073799a140f5f7e3312ea2d5d8b86b4e94154c | [
"Apache-2.0"
] | null | null | null | defmodule Ockam.Hub.MixProject do
use Mix.Project
@version "0.10.0-dev"
@elixir_requirement "~> 1.10"
@ockam_github_repo "https://github.com/ockam-network/ockam"
@ockam_github_repo_path "implementations/elixir/ockam/ockam_hub"
def project do
[
app: :ockam_hub,
version: @version,
elixir: @elixir_requirement,
consolidate_protocols: Mix.env() != :test,
elixirc_options: [warnings_as_errors: true],
deps: deps(),
aliases: aliases(),
# lint
dialyzer: [flags: ["-Wunmatched_returns", :error_handling, :underspecs]],
# test
test_coverage: [output: "_build/cover"],
preferred_cli_env: ["test.cover": :test],
# hex
description: "Ockam Hub.",
package: package(),
# docs
name: "Ockam Hub",
docs: docs()
]
end
# mix help compile.app for more
def application do
[
mod: {Ockam.Hub, []},
extra_applications: [:logger, :ockam, :ockam_vault_software, :ranch]
]
end
defp deps do
[
{:ockam, path: "../ockam"},
{:ockam_vault_software, path: "../ockam_vault_software"},
{:ranch, "~> 2.0"},
{:ex_doc, "~> 0.23.0", only: :dev, runtime: false},
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:dialyxir, "~> 1.0", only: [:dev], runtime: false}
]
end
# used by hex
defp package do
[
links: %{"GitHub" => @ockam_github_repo},
licenses: ["Apache-2.0"]
]
end
# used by ex_doc
defp docs do
[
main: "Ockam.Hub",
source_url_pattern:
"#{@ockam_github_repo}/blob/v#{@version}/#{@ockam_github_repo_path}/%{path}#L%{line}"
]
end
defp aliases do
[
docs: "docs --output _build/docs --formatter html",
"test.cover": "test --no-start --cover",
"lint.format": "format --check-formatted",
"lint.credo": "credo --strict",
"lint.dialyzer": "dialyzer --format dialyxir",
lint: ["lint.format", "lint.credo"]
]
end
end
| 23.647059 | 93 | 0.5801 |
1cff3dc6ac71be42730349f0d7e7bd112e004467 | 290 | ex | Elixir | lib/Database.ex | ArchaicArhcon/elmud | 85730e0dc4da80826c943c0229dd621801ce5a4c | [
"BSD-2-Clause"
] | 1 | 2015-09-12T00:43:20.000Z | 2015-09-12T00:43:20.000Z | lib/Database.ex | ArchaicArhcon/elmud | 85730e0dc4da80826c943c0229dd621801ce5a4c | [
"BSD-2-Clause"
] | null | null | null | lib/Database.ex | ArchaicArhcon/elmud | 85730e0dc4da80826c943c0229dd621801ce5a4c | [
"BSD-2-Clause"
] | 1 | 2017-11-19T13:36:48.000Z | 2017-11-19T13:36:48.000Z | require Amnesia
use Amnesia
defdatabase Database do
deftable Item, [:key, :value], type: :ordered_set do
@type t :: %Item{key: String.t, value: String.t}
end
deftable Color, [:name, :color], type: :ordered_set do
@type t :: %Color{name: String.t, color: String.t}
end
end
| 22.307692 | 56 | 0.668966 |
1cff485a3a3d27f0c51ca4608268f0259188b073 | 2,402 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/live_chat_message.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/live_chat_message.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/live_chat_message.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.YouTube.V3.Model.LiveChatMessage do
@moduledoc """
A liveChatMessage resource represents a chat message in a YouTube Live Chat.
## Attributes
- authorDetails (LiveChatMessageAuthorDetails): The authorDetails object contains basic details about the user that posted this message. Defaults to: `null`.
- etag (String.t): Etag of this resource. Defaults to: `null`.
- id (String.t): The ID that YouTube assigns to uniquely identify the message. Defaults to: `null`.
- kind (String.t): Identifies what kind of resource this is. Value: the fixed string \"youtube#liveChatMessage\". Defaults to: `null`.
- snippet (LiveChatMessageSnippet): The snippet object contains basic details about the message. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:authorDetails => GoogleApi.YouTube.V3.Model.LiveChatMessageAuthorDetails.t(),
:etag => any(),
:id => any(),
:kind => any(),
:snippet => GoogleApi.YouTube.V3.Model.LiveChatMessageSnippet.t()
}
field(:authorDetails, as: GoogleApi.YouTube.V3.Model.LiveChatMessageAuthorDetails)
field(:etag)
field(:id)
field(:kind)
field(:snippet, as: GoogleApi.YouTube.V3.Model.LiveChatMessageSnippet)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.LiveChatMessage do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.LiveChatMessage.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.LiveChatMessage do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.033333 | 159 | 0.738551 |
1cff749f4413c9f3c892ee652eb24ae426564d44 | 6,524 | ex | Elixir | lib/oban/config.ex | superhawk610/oban | 9e87ca0e45451efee05db1d430739d348c67acdb | [
"Apache-2.0"
] | null | null | null | lib/oban/config.ex | superhawk610/oban | 9e87ca0e45451efee05db1d430739d348c67acdb | [
"Apache-2.0"
] | null | null | null | lib/oban/config.ex | superhawk610/oban | 9e87ca0e45451efee05db1d430739d348c67acdb | [
"Apache-2.0"
] | null | null | null | defmodule Oban.Config do
@moduledoc false
alias Oban.Crontab.Cron
use Agent
@type cronjob :: {Cron.t(), module(), Keyword.t()}
@type t :: %__MODULE__{
circuit_backoff: timeout(),
crontab: [cronjob()],
dispatch_cooldown: pos_integer(),
name: atom(),
node: binary(),
plugins: [module() | {module() | Keyword.t()}],
poll_interval: pos_integer(),
prefix: binary(),
queues: [{atom(), Keyword.t()}],
repo: module(),
shutdown_grace_period: timeout(),
timezone: Calendar.time_zone(),
log: false | Logger.level()
}
@type option :: {:name, module()} | {:conf, t()}
@enforce_keys [:node, :repo]
defstruct circuit_backoff: :timer.seconds(30),
crontab: [],
dispatch_cooldown: 5,
name: Oban,
node: nil,
plugins: [Oban.Plugins.Pruner],
poll_interval: :timer.seconds(1),
prefix: "public",
queues: [],
repo: nil,
shutdown_grace_period: :timer.seconds(15),
timezone: "Etc/UTC",
log: false
@spec start_link([option()]) :: GenServer.on_start()
def start_link(opts) when is_list(opts) do
{conf, opts} = Keyword.pop(opts, :conf)
Agent.start_link(fn -> conf end, opts)
end
@spec new(Keyword.t()) :: t()
def new(opts) when is_list(opts) do
opts =
opts
|> Keyword.put_new(:node, node_name())
|> Keyword.put(:crontab, opts[:crontab] || [])
|> Keyword.put(:plugins, opts[:plugins] || [])
|> Keyword.put(:queues, opts[:queues] || [])
Enum.each(opts, &validate_opt!/1)
opts =
opts
|> Keyword.update!(:crontab, &parse_crontab/1)
|> Keyword.update!(:queues, &parse_queues/1)
struct!(__MODULE__, opts)
end
@spec get(atom()) :: t()
def get(name), do: Agent.get(name, & &1)
@spec node_name(%{optional(binary()) => binary()}) :: binary()
def node_name(env \\ System.get_env()) do
cond do
Node.alive?() ->
to_string(node())
Map.has_key?(env, "DYNO") ->
Map.get(env, "DYNO")
true ->
:inet.gethostname()
|> elem(1)
|> to_string()
end
end
@spec to_ident(t()) :: binary()
def to_ident(%__MODULE__{name: name, node: node}) do
to_string(name) <> "." <> to_string(node)
end
@spec match_ident?(t(), binary()) :: boolean()
def match_ident?(%__MODULE__{} = conf, ident) when is_binary(ident) do
to_ident(conf) == ident
end
# Helpers
defp validate_opt!({:circuit_backoff, interval}) do
unless is_integer(interval) and interval > 0 do
raise ArgumentError, "expected :circuit_backoff to be a positive integer"
end
end
defp validate_opt!({:crontab, crontab}) do
unless is_list(crontab) and Enum.all?(crontab, &valid_crontab?/1) do
raise ArgumentError,
"expected :crontab to be a list of {expression, worker} or " <>
"{expression, worker, options} tuples"
end
end
defp validate_opt!({:dispatch_cooldown, period}) do
unless is_integer(period) and period > 0 do
raise ArgumentError, "expected :dispatch_cooldown to be a positive integer"
end
end
defp validate_opt!({:name, name}) do
unless is_atom(name) do
raise ArgumentError, "expected :name to be a module or atom"
end
end
defp validate_opt!({:node, node}) do
unless is_binary(node) and node != "" do
raise ArgumentError, "expected :node to be a non-empty binary"
end
end
defp validate_opt!({:plugins, plugins}) do
unless is_list(plugins) and Enum.all?(plugins, &valid_plugin?/1) do
raise ArgumentError, "expected a list of modules or {module, keyword} tuples"
end
end
defp validate_opt!({:poll_interval, interval}) do
unless is_integer(interval) and interval > 0 do
raise ArgumentError, "expected :poll_interval to be a positive integer"
end
end
defp validate_opt!({:prefix, prefix}) do
unless is_binary(prefix) and Regex.match?(~r/^[a-z0-9_]+$/i, prefix) do
raise ArgumentError, "expected :prefix to be a binary with alphanumeric characters"
end
end
defp validate_opt!({:queues, queues}) do
unless Keyword.keyword?(queues) and Enum.all?(queues, &valid_queue?/1) do
raise ArgumentError, "expected :queues to be a keyword list of {atom, integer} pairs"
end
end
defp validate_opt!({:repo, repo}) do
unless Code.ensure_loaded?(repo) and function_exported?(repo, :__adapter__, 0) do
raise ArgumentError, "expected :repo to be an Ecto.Repo"
end
end
defp validate_opt!({:shutdown_grace_period, interval}) do
unless is_integer(interval) and interval > 0 do
raise ArgumentError, "expected :shutdown_grace_period to be a positive integer"
end
end
defp validate_opt!({:timezone, timezone}) do
unless is_binary(timezone) and match?({:ok, _}, DateTime.now(timezone)) do
raise ArgumentError, "expected :timezone to be a known timezone"
end
end
defp validate_opt!({:log, log}) do
unless log in ~w(false error warn info debug)a do
raise ArgumentError, "expected :log to be `false` or a log level"
end
end
defp validate_opt!(option) do
raise ArgumentError, "unknown option provided #{inspect(option)}"
end
defp valid_crontab?({expression, worker}) do
valid_crontab?({expression, worker, []})
end
defp valid_crontab?({expression, worker, opts}) do
is_binary(expression) and
Code.ensure_loaded?(worker) and
function_exported?(worker, :perform, 1) and
Keyword.keyword?(opts)
end
defp valid_crontab?(_crontab), do: false
defp valid_queue?({_name, opts}) do
(is_integer(opts) and opts > 0) or Keyword.keyword?(opts)
end
defp valid_plugin?({plugin, opts}) do
is_atom(plugin) and
Code.ensure_loaded?(plugin) and
function_exported?(plugin, :init, 1) and
Keyword.keyword?(opts)
end
defp valid_plugin?(plugin), do: valid_plugin?({plugin, []})
defp parse_crontab(crontab) do
for tuple <- crontab do
case tuple do
{expression, worker} ->
{Cron.parse!(expression), worker, []}
{expression, worker, opts} ->
{Cron.parse!(expression), worker, opts}
end
end
end
defp parse_queues(queues) do
for {name, value} <- queues do
opts = if is_integer(value), do: [limit: value], else: value
{name, opts}
end
end
end
| 28.365217 | 91 | 0.624617 |
1cff84215820e90bf80cf78cba1704b62f27b216 | 4,678 | exs | Elixir | test/phoenix/tracker/pool_test.exs | grindrlabs/phoenix_pubsub | 27a9113dfa8a2e125ae74cfff36e6a322c8b97f4 | [
"MIT"
] | 3 | 2020-06-08T03:47:03.000Z | 2021-09-17T21:48:37.000Z | test/phoenix/tracker/pool_test.exs | grindrlabs/phoenix_pubsub | 27a9113dfa8a2e125ae74cfff36e6a322c8b97f4 | [
"MIT"
] | null | null | null | test/phoenix/tracker/pool_test.exs | grindrlabs/phoenix_pubsub | 27a9113dfa8a2e125ae74cfff36e6a322c8b97f4 | [
"MIT"
] | null | null | null | defmodule Phoenix.Tracker.PoolTest do
use Phoenix.PubSub.NodeCase
alias Phoenix.Tracker
setup config do
server = config.test
{:ok, _pid} = start_pool(name: server, pool_size: config.pool_size)
{:ok, server: server}
end
for n <- [1,2,8,512] do
@tag pool_size: n
test "pool #{n}: A track/5 call results in the id being tracked",
%{server: server} do
{:ok, ref} = Tracker.track(server, self(), "topic", "me", %{name: "me"})
assert [{"me", %{name: "me", phx_ref: ^ref}}]
= Tracker.list(server, "topic")
end
@tag pool_size: n
test "pool #{n}: dirty_list/2 returns tracked ids", %{server: server} do
{:ok, ref} = Tracker.track(server, self(), "topic", "me", %{name: "me"})
assert [{"me", %{name: "me", phx_ref: ^ref}}]
= Tracker.dirty_list(server, "topic")
end
@tag pool_size: n
test "pool #{n}: Track/5 results in all ids being tracked",
%{server: server} do
topics = for i <- 1..100, do: "topic_#{i}"
refs = for topic <- topics do
{:ok, ref} = Tracker.track(server, self(), topic, "me", %{name: "me"})
ref
end
for {t, ref} <- List.zip([topics, refs]) do
assert Tracker.list(server, t) == [{"me", %{name: "me", phx_ref: ref}}]
end
end
@tag pool_size: n
test "pool #{n}: Untrack/4 results in all ids being untracked",
%{server: server} do
topics = for i <- 1..100, do: "topic_#{i}"
for t <- topics do
{:ok, _ref} = Tracker.track(server, self(), t, "me", %{a: "b"})
end
for t <- topics, do: :ok = Tracker.untrack(server, self(), t, "me")
for t <- topics, do: assert Tracker.list(server, t) == []
end
@tag pool_size: n
test "pool #{n}: Untrack/2 results in all ids being untracked",
%{server: server} do
topics = for i <- 1..100, do: "topic_#{i}"
for t <- topics do
{:ok, _ref} = Tracker.track(server, self(), t, "me", %{a: "b"})
end
:ok = Tracker.untrack(server, self())
for t <- topics, do: assert Tracker.list(server, t) == []
end
@tag pool_size: n
test "pool #{n}: Update/5 updates a given trackees metas",
%{server: server} do
topics = for i <- 1..100, do: "topic_#{i}"
old_refs = for t <- topics do
{:ok, ref} = Tracker.track(server, self(), t, "me", %{a: "b"})
ref
end
new_refs = for t <- topics do
{:ok, new_ref} = Tracker.update(server, self(), t, "me", %{new: "thing"})
new_ref
end
expected_changes = List.zip([topics, old_refs, new_refs])
for {t, old_ref, new_ref} <- expected_changes do
assert [{"me", %{new: "thing",
phx_ref: ^new_ref,
phx_ref_prev: ^old_ref}}]
= Tracker.list(server, t)
end
end
@tag pool_size: n
test "pool #{n}: Update/5 applies fun to given trackees metas",
%{server: server} do
topics = for i <- 1..100, do: "topic_#{i}"
old_refs = for t <- topics do
{:ok, ref} = Tracker.track(server, self(), t, "me", %{a: "oldval"})
ref
end
update_fun = fn(m) -> Map.put(m, :a, "newval") end
new_refs = for t <- topics do
{:ok, new_ref} = Tracker.update(server, self(), t, "me", update_fun)
new_ref
end
expected_changes = List.zip([topics, old_refs, new_refs])
for {t, old_ref, new_ref} <- expected_changes do
assert [{"me", %{a: "newval",
phx_ref: ^new_ref,
phx_ref_prev: ^old_ref}}]
= Tracker.list(server, t)
end
end
@tag pool_size: n
test "pool #{n}: Graceful_permdown/2 results in all ids being untracked",
%{server: server, pool_size: n} do
topics = for i <- 1..100, do: "topic_#{i}"
for t <- topics do
{:ok, _ref} = Tracker.track(server, self(), t, "me", %{a: "b"})
end
:ok = Tracker.graceful_permdown(server)
:timer.sleep(n)
for t <- topics, do: assert Tracker.list(server, t) == []
end
@tag pool_size: n
test "pool #{n}: dirty_list_all() dumps state of all shards",
%{server: server, pool_size: n} do
topics = for i <- 1..100, do: "topic_#{i}"
self = self()
for t <- topics do
{:ok, _ref} = Tracker.track(server, self, t, "me", %{a: "b"})
end
list_of_lists = Tracker.dirty_list_all(server)
assert Enum.count(list_of_lists) == n
assert Enum.concat(list_of_lists) |> Enum.count() == 100
assert {{_topic, ^self, "me"},
%{a: "b", phx_ref: _}} = Enum.concat(list_of_lists) |> hd()
end
end
end
| 31.395973 | 81 | 0.543395 |
1cffa24b68772cf5db1b3c28ecd60ee618bed5bf | 1,924 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/update_sheet_properties_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/sheets/lib/google_api/sheets/v4/model/update_sheet_properties_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/update_sheet_properties_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Sheets.V4.Model.UpdateSheetPropertiesRequest do
@moduledoc """
Updates properties of the sheet with the specified
sheetId.
## Attributes
* `fields` (*type:* `String.t`, *default:* `nil`) - The fields that should be updated. At least one field must be specified.
The root `properties` is implied and should not be specified.
A single `"*"` can be used as short-hand for listing every field.
* `properties` (*type:* `GoogleApi.Sheets.V4.Model.SheetProperties.t`, *default:* `nil`) - The properties to update.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:fields => String.t(),
:properties => GoogleApi.Sheets.V4.Model.SheetProperties.t()
}
field(:fields)
field(:properties, as: GoogleApi.Sheets.V4.Model.SheetProperties)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.UpdateSheetPropertiesRequest do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.UpdateSheetPropertiesRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.UpdateSheetPropertiesRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.301887 | 129 | 0.735447 |
1cffb4241ffc984682762a32b26dfa2763699f52 | 1,074 | exs | Elixir | exercises/grains/grains_test.exs | martinsvalin/xelixir | 9469d92e7eecb528a05a8da923d8271ed303c058 | [
"MIT"
] | null | null | null | exercises/grains/grains_test.exs | martinsvalin/xelixir | 9469d92e7eecb528a05a8da923d8271ed303c058 | [
"MIT"
] | null | null | null | exercises/grains/grains_test.exs | martinsvalin/xelixir | 9469d92e7eecb528a05a8da923d8271ed303c058 | [
"MIT"
] | null | null | null | if !System.get_env("EXERCISM_TEST_EXAMPLES") do
Code.load_file("grains.exs")
end
ExUnit.start
ExUnit.configure exclude: :pending, trace: true
# NOTE: :math.pow/2 doesn't do what you'd expect:
# `:math.pow(2, 64) == :math.pow(2, 64) - 1` is true.
#
# It's best to avoid functions operating on floating point numbers for very
# large numbers.
defmodule GrainsTest do
use ExUnit.Case
# @tag :pending
test "square 1" do
assert Grains.square(1) === 1
end
@tag :pending
test "square 2" do
assert Grains.square(2) === 2
end
@tag :pending
test "square 3" do
assert Grains.square(3) === 4
end
@tag :pending
test "square 4" do
assert Grains.square(4) === 8
end
@tag :pending
test "square 16" do
assert Grains.square(16) === 32768
end
@tag :pending
test "square 32" do
assert Grains.square(32) === 2147483648
end
@tag :pending
test "square 64" do
assert Grains.square(64) === 9223372036854775808
end
@tag :pending
test "total grains" do
assert Grains.total === 18446744073709551615
end
end
| 18.842105 | 75 | 0.66108 |
1cffbc3068495dceb32d568d4d16566ae462e5cb | 1,977 | ex | Elixir | clients/street_view_publish/lib/google_api/street_view_publish/v1/model/photo_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/street_view_publish/lib/google_api/street_view_publish/v1/model/photo_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/street_view_publish/lib/google_api/street_view_publish/v1/model/photo_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.StreetViewPublish.V1.Model.PhotoResponse do
@moduledoc """
Response payload for a single
Photo
in batch operations including
BatchGetPhotos
and
BatchUpdatePhotos.
## Attributes
* `photo` (*type:* `GoogleApi.StreetViewPublish.V1.Model.Photo.t`, *default:* `nil`) - The Photo resource, if the request
was successful.
* `status` (*type:* `GoogleApi.StreetViewPublish.V1.Model.Status.t`, *default:* `nil`) - The status for the operation to get or update a single photo in the batch
request.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:photo => GoogleApi.StreetViewPublish.V1.Model.Photo.t(),
:status => GoogleApi.StreetViewPublish.V1.Model.Status.t()
}
field(:photo, as: GoogleApi.StreetViewPublish.V1.Model.Photo)
field(:status, as: GoogleApi.StreetViewPublish.V1.Model.Status)
end
defimpl Poison.Decoder, for: GoogleApi.StreetViewPublish.V1.Model.PhotoResponse do
def decode(value, options) do
GoogleApi.StreetViewPublish.V1.Model.PhotoResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.StreetViewPublish.V1.Model.PhotoResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.684211 | 166 | 0.741528 |
1cffcc46f53e181fcbfeec12fca7e0da4e3f4a28 | 1,464 | exs | Elixir | test/fixtures/umbrella_app/rel/config.exs | arikai/distillery | 65ddbcc143f2849a6ed5574e8c397a68ca92eb81 | [
"MIT"
] | 3,097 | 2016-07-18T13:59:00.000Z | 2022-03-29T00:27:23.000Z | test/fixtures/umbrella_app/rel/config.exs | arikai/distillery | 65ddbcc143f2849a6ed5574e8c397a68ca92eb81 | [
"MIT"
] | 672 | 2016-07-18T18:25:29.000Z | 2022-02-24T17:39:30.000Z | test/fixtures/umbrella_app/rel/config.exs | arikai/distillery | 65ddbcc143f2849a6ed5574e8c397a68ca92eb81 | [
"MIT"
] | 483 | 2016-07-22T14:08:49.000Z | 2022-03-21T09:35:23.000Z | # Import all plugins from `rel/plugins`
# They can then be used by adding `plugin MyPlugin` to
# either an environment, or release definition, where
# `MyPlugin` is the name of the plugin module.
~w(rel plugins *.exs)
|> Path.join()
|> Path.wildcard()
|> Enum.map(&Code.eval_file(&1))
use Distillery.Releases.Config,
# This sets the default release built by `mix distillery.release`
default_release: :default,
# This sets the default environment used by `mix distillery.release`
default_environment: Mix.env()
environment :dev do
# If you are running Phoenix, you should make sure that
# server: true is set and the code reloader is disabled,
# even in dev mode.
# It is recommended that you build with MIX_ENV=prod and pass
# the --env flag to Distillery explicitly if you want to use
# dev mode.
set dev_mode: true
set include_erts: false
set cookie: :"f:okQO{}o8:7Hi^&jI4ssu{71FoJ5dFE!2Bmg}~dtzxyzpY]dmDSc!epwJ`e*k_S"
end
environment :prod do
set include_erts: true
set include_src: false
set cookie: :"$^{@jVal*|$,)nJPdZNlUsMQMUEDBh7A?~U2x^>/f`J72xpa@kbm}`}QwLIHF1yR"
set vm_args: "rel/vm.args"
end
release :umbrella do
set version: "0.1.0"
set applications: [
:runtime_tools,
web: :permanent
]
set config_providers: [
{Distillery.Releases.Config.Providers.Elixir, ["${RELEASE_ROOT_DIR}/etc/config.exs"]}
]
set overlays: [
{:copy, "rel/config/config.exs", "etc/config.exs"}
]
end
| 28.153846 | 89 | 0.705601 |
1cffe0079da226378ab4e70d7675bde69338724d | 938 | exs | Elixir | test/credo/check/readability/module_names_test.exs | kanmaniselvan/credo | 276e0fc24d1bf56c8fc2902a9e933c8f208ce391 | [
"MIT"
] | 1 | 2021-12-01T13:37:43.000Z | 2021-12-01T13:37:43.000Z | test/credo/check/readability/module_names_test.exs | kanmaniselvan/credo | 276e0fc24d1bf56c8fc2902a9e933c8f208ce391 | [
"MIT"
] | null | null | null | test/credo/check/readability/module_names_test.exs | kanmaniselvan/credo | 276e0fc24d1bf56c8fc2902a9e933c8f208ce391 | [
"MIT"
] | 1 | 2019-10-08T16:42:40.000Z | 2019-10-08T16:42:40.000Z | defmodule Credo.Check.Readability.ModuleNamesTest do
use Credo.Test.Case
@described_check Credo.Check.Readability.ModuleNames
#
# cases NOT raising issues
#
test "it should NOT report expected code" do
"""
defmodule CredoSampleModule do
end
"""
|> to_source_file
|> run_check(@described_check)
|> refute_issues()
end
test "it should NOT report if module name cannot be determinated" do
"""
defmacro foo(quoted_module) do
{module, []} = Code.eval_quoted(quoted_module)
quote do
defmodule unquote(module).Bar do
end
end
end
"""
|> to_source_file
|> run_check(@described_check)
|> refute_issues()
end
#
# cases raising issues
#
test "it should report a violation /2" do
"""
defmodule Credo_SampleModule do
end
"""
|> to_source_file
|> run_check(@described_check)
|> assert_issue()
end
end
| 19.142857 | 70 | 0.639659 |
1cffe05f773aa1b89b4a10ac404d8d88477f4702 | 2,435 | ex | Elixir | lib/songmate_web/controllers/page_controller.ex | jimytc/music-dating-app | ec46ef2ffa4fb263a8b283a96495b0643467697c | [
"MIT"
] | null | null | null | lib/songmate_web/controllers/page_controller.ex | jimytc/music-dating-app | ec46ef2ffa4fb263a8b283a96495b0643467697c | [
"MIT"
] | null | null | null | lib/songmate_web/controllers/page_controller.ex | jimytc/music-dating-app | ec46ef2ffa4fb263a8b283a96495b0643467697c | [
"MIT"
] | null | null | null | defmodule SongmateWeb.PageController do
use SongmateWeb, :controller
alias Songmate.Repo
alias Songmate.Accounts
alias Songmate.MusicProfile
plug :check_tokens
def check_tokens(conn, _params) do
if Spotify.Authentication.tokens_present?(conn) do
{:ok, conn} = Spotify.Authentication.refresh(conn)
conn
else
redirect(conn, to: "/authorize")
end
end
def index(conn, _params) do
{user, profile} = build_current_profile(conn)
render(
conn,
"index.html",
name: user.name,
top_tracks: profile.track_preferences
|> Enum.map(&(&1.track.name)),
top_artists: profile.artist_preferences
|> Enum.map(&(&1.artist.name)),
top_genres: profile.genre_preferences
|> Enum.map(&(&1.genre.name)),
top_matches: []
)
end
defp build_current_profile(conn) do
profile_attrs = SpotifyService.fetch_user_info(conn)
user = case Accounts.get_user_by_username(profile_attrs[:username]) do
nil ->
{:ok, user} = Accounts.create_user(
%{
name: profile_attrs[:display_name],
avatar: profile_attrs[:avatar_url],
credential: %{
email: profile_attrs[:email],
username: profile_attrs[:username],
provider: :spotify
}
}
)
user
user -> user
end
tops = SpotifyService.fetch_tops(conn)
profile = MusicProfile.create_or_update_profile(
%{
user: user,
artist_preferences: build_preferences(:artist, tops[:artists]),
track_preferences: build_preferences(:track, tops[:tracks]),
genre_preferences: build_preferences(:genre, tops[:genres])
}
)
|> Repo.preload([[artist_preferences: :artist], [track_preferences: :track], [genre_preferences: :genre]])
{user, profile}
end
def build_preferences(label, data) do
data
|> Enum.with_index()
|> Enum.map(fn {row, idx} -> %{label => row, :rank => idx} end)
end
def info(conn, _params) do
render(conn, "info.html")
end
def login(conn, _params) do
render(conn, "login.html")
end
def music(conn, _params) do
render(conn, "music.html", top_matches: [])
end
def chat(conn, _params) do
render(conn, "chat.html")
end
end
| 26.182796 | 120 | 0.595483 |
e800009b80634f7ac769e1e4ec5f6956e7118c42 | 244 | ex | Elixir | lib/gps/last_known_location.ex | JesseHerrick/phoenix_liveview_gps | 711b0cd3eb7277e12be3bac26aa950914c4cf8e7 | [
"MIT"
] | 1 | 2021-03-28T08:22:40.000Z | 2021-03-28T08:22:40.000Z | lib/gps/last_known_location.ex | JesseHerrick/phoenix_liveview_gps | 711b0cd3eb7277e12be3bac26aa950914c4cf8e7 | [
"MIT"
] | null | null | null | lib/gps/last_known_location.ex | JesseHerrick/phoenix_liveview_gps | 711b0cd3eb7277e12be3bac26aa950914c4cf8e7 | [
"MIT"
] | 1 | 2021-01-15T12:40:12.000Z | 2021-01-15T12:40:12.000Z | defmodule Gps.LastKnownLocation do
use GenServer
def start_link(_) do
GenServer.start_link(__MODULE__, [])
end
@impl true
def init(_) do
:ets.new(:last_known_location, [:set, :public, :named_table])
{:ok, []}
end
end
| 16.266667 | 65 | 0.663934 |
e8002785ce759d88a179a41f35cdf2d7b9a33653 | 366 | exs | Elixir | config/dev.secret.example.exs | savekirk/elixir_jobs | d7ec0f088a1365f3ae5cbbd6c07c2b3fdde9a946 | [
"MIT"
] | null | null | null | config/dev.secret.example.exs | savekirk/elixir_jobs | d7ec0f088a1365f3ae5cbbd6c07c2b3fdde9a946 | [
"MIT"
] | null | null | null | config/dev.secret.example.exs | savekirk/elixir_jobs | d7ec0f088a1365f3ae5cbbd6c07c2b3fdde9a946 | [
"MIT"
] | null | null | null | use Mix.Config
# Configure your database
config :elixir_jobs, ElixirJobs.Repo,
adapter: Ecto.Adapters.Postgres,
username: "DB_USERNAME",
password: "DB_PASSWORD",
database: "elixir_jobs_dev",
hostname: "localhost",
pool_size: 10
config :extwitter, :oauth, [
consumer_key: "",
consumer_secret: "",
access_token: "",
access_token_secret: ""
]
| 20.333333 | 37 | 0.704918 |
e800423ccd4d23ee7648156d38d4343d88088feb | 62,550 | ex | Elixir | clients/secret_manager/lib/google_api/secret_manager/v1/api/projects.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/secret_manager/lib/google_api/secret_manager/v1/api/projects.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/secret_manager/lib/google_api/secret_manager/v1/api/projects.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SecretManager.V1.Api.Projects do
@moduledoc """
API calls for all endpoints tagged `Projects`.
"""
alias GoogleApi.SecretManager.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Gets information about a location.
## Parameters
* `connection` (*type:* `GoogleApi.SecretManager.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. Resource name for the location.
* `locations_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SecretManager.V1.Model.Location{}}` on success
* `{:error, info}` on failure
"""
@spec secretmanager_projects_locations_get(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SecretManager.V1.Model.Location.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def secretmanager_projects_locations_get(
connection,
projects_id,
locations_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/projects/{projectsId}/locations/{locationsId}", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"locationsId" => URI.encode(locations_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SecretManager.V1.Model.Location{}])
end
@doc """
Lists information about the supported locations for this service.
## Parameters
* `connection` (*type:* `GoogleApi.SecretManager.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. The resource that owns the locations collection, if applicable.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - A filter to narrow down results to a preferred subset. The filtering language accepts strings like `"displayName=tokyo"`, and is documented in more detail in [AIP-160](https://google.aip.dev/160).
* `:pageSize` (*type:* `integer()`) - The maximum number of results to return. If not set, the service selects a default.
* `:pageToken` (*type:* `String.t`) - A page token received from the `next_page_token` field in the response. Send that page token to receive the subsequent page.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SecretManager.V1.Model.ListLocationsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec secretmanager_projects_locations_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SecretManager.V1.Model.ListLocationsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def secretmanager_projects_locations_list(
connection,
projects_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/projects/{projectsId}/locations", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.SecretManager.V1.Model.ListLocationsResponse{}]
)
end
@doc """
Creates a new SecretVersion containing secret data and attaches it to an existing Secret.
## Parameters
* `connection` (*type:* `GoogleApi.SecretManager.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `parent`. Required. The resource name of the Secret to associate with the SecretVersion in the format `projects/*/secrets/*`.
* `secrets_id` (*type:* `String.t`) - Part of `parent`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.SecretManager.V1.Model.AddSecretVersionRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SecretManager.V1.Model.SecretVersion{}}` on success
* `{:error, info}` on failure
"""
@spec secretmanager_projects_secrets_add_version(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SecretManager.V1.Model.SecretVersion.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def secretmanager_projects_secrets_add_version(
connection,
projects_id,
secrets_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{projectsId}/secrets/{secretsId}:addVersion", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"secretsId" => URI.encode(secrets_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SecretManager.V1.Model.SecretVersion{}])
end
@doc """
Creates a new Secret containing no SecretVersions.
## Parameters
* `connection` (*type:* `GoogleApi.SecretManager.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `parent`. Required. The resource name of the project to associate with the Secret, in the format `projects/*`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:secretId` (*type:* `String.t`) - Required. This must be unique within the project. A secret ID is a string with a maximum length of 255 characters and can contain uppercase and lowercase letters, numerals, and the hyphen (`-`) and underscore (`_`) characters.
* `:body` (*type:* `GoogleApi.SecretManager.V1.Model.Secret.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SecretManager.V1.Model.Secret{}}` on success
* `{:error, info}` on failure
"""
@spec secretmanager_projects_secrets_create(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SecretManager.V1.Model.Secret.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def secretmanager_projects_secrets_create(
connection,
projects_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:secretId => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{projectsId}/secrets", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SecretManager.V1.Model.Secret{}])
end
@doc """
Deletes a Secret.
## Parameters
* `connection` (*type:* `GoogleApi.SecretManager.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. Required. The resource name of the Secret to delete in the format `projects/*/secrets/*`.
* `secrets_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:etag` (*type:* `String.t`) - Optional. Etag of the Secret. The request succeeds if it matches the etag of the currently stored secret object. If the etag is omitted, the request succeeds.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SecretManager.V1.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec secretmanager_projects_secrets_delete(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SecretManager.V1.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def secretmanager_projects_secrets_delete(
connection,
projects_id,
secrets_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:etag => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v1/projects/{projectsId}/secrets/{secretsId}", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"secretsId" => URI.encode(secrets_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SecretManager.V1.Model.Empty{}])
end
@doc """
Gets metadata for a given Secret.
## Parameters
* `connection` (*type:* `GoogleApi.SecretManager.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. Required. The resource name of the Secret, in the format `projects/*/secrets/*`.
* `secrets_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SecretManager.V1.Model.Secret{}}` on success
* `{:error, info}` on failure
"""
@spec secretmanager_projects_secrets_get(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SecretManager.V1.Model.Secret.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def secretmanager_projects_secrets_get(
connection,
projects_id,
secrets_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/projects/{projectsId}/secrets/{secretsId}", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"secretsId" => URI.encode(secrets_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SecretManager.V1.Model.Secret{}])
end
@doc """
Gets the access control policy for a secret. Returns empty policy if the secret exists and does not have a policy set.
## Parameters
* `connection` (*type:* `GoogleApi.SecretManager.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `resource`. REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field.
* `secrets_id` (*type:* `String.t`) - Part of `resource`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:"options.requestedPolicyVersion"` (*type:* `integer()`) - Optional. The maximum policy version that will be used to format the policy. Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. Requests for policies with any conditional role bindings must specify version 3. Policies with no conditional role bindings may specify any valid value or leave the field unset. The policy in the response might use the policy version that you specified, or it might use a lower policy version. For example, if you specify version 3, but the policy has no conditional role bindings, the response uses version 1. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SecretManager.V1.Model.Policy{}}` on success
* `{:error, info}` on failure
"""
@spec secretmanager_projects_secrets_get_iam_policy(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SecretManager.V1.Model.Policy.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def secretmanager_projects_secrets_get_iam_policy(
connection,
projects_id,
secrets_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:"options.requestedPolicyVersion" => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/projects/{projectsId}/secrets/{secretsId}:getIamPolicy", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"secretsId" => URI.encode(secrets_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SecretManager.V1.Model.Policy{}])
end
@doc """
Lists Secrets.
## Parameters
* `connection` (*type:* `GoogleApi.SecretManager.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `parent`. Required. The resource name of the project associated with the Secrets, in the format `projects/*`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - Optional. Filter string, adhering to the rules in [List-operation filtering](https://cloud.google.com/secret-manager/docs/filtering). List only secrets matching the filter. If filter is empty, all secrets are listed.
* `:pageSize` (*type:* `integer()`) - Optional. The maximum number of results to be returned in a single page. If set to 0, the server decides the number of results to return. If the number is greater than 25000, it is capped at 25000.
* `:pageToken` (*type:* `String.t`) - Optional. Pagination token, returned earlier via ListSecretsResponse.next_page_token.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SecretManager.V1.Model.ListSecretsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec secretmanager_projects_secrets_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.SecretManager.V1.Model.ListSecretsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def secretmanager_projects_secrets_list(
connection,
projects_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/projects/{projectsId}/secrets", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SecretManager.V1.Model.ListSecretsResponse{}])
end
@doc """
Updates metadata of an existing Secret.
## Parameters
* `connection` (*type:* `GoogleApi.SecretManager.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `secret.name`. Output only. The resource name of the Secret in the format `projects/*/secrets/*`.
* `secrets_id` (*type:* `String.t`) - Part of `secret.name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:updateMask` (*type:* `String.t`) - Required. Specifies the fields to be updated.
* `:body` (*type:* `GoogleApi.SecretManager.V1.Model.Secret.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SecretManager.V1.Model.Secret{}}` on success
* `{:error, info}` on failure
"""
@spec secretmanager_projects_secrets_patch(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SecretManager.V1.Model.Secret.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def secretmanager_projects_secrets_patch(
connection,
projects_id,
secrets_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v1/projects/{projectsId}/secrets/{secretsId}", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"secretsId" => URI.encode(secrets_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SecretManager.V1.Model.Secret{}])
end
@doc """
Sets the access control policy on the specified secret. Replaces any existing policy. Permissions on SecretVersions are enforced according to the policy set on the associated Secret.
## Parameters
* `connection` (*type:* `GoogleApi.SecretManager.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `resource`. REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field.
* `secrets_id` (*type:* `String.t`) - Part of `resource`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.SecretManager.V1.Model.SetIamPolicyRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SecretManager.V1.Model.Policy{}}` on success
* `{:error, info}` on failure
"""
@spec secretmanager_projects_secrets_set_iam_policy(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SecretManager.V1.Model.Policy.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def secretmanager_projects_secrets_set_iam_policy(
connection,
projects_id,
secrets_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{projectsId}/secrets/{secretsId}:setIamPolicy", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"secretsId" => URI.encode(secrets_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SecretManager.V1.Model.Policy{}])
end
@doc """
Returns permissions that a caller has for the specified secret. If the secret does not exist, this call returns an empty set of permissions, not a NOT_FOUND error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning.
## Parameters
* `connection` (*type:* `GoogleApi.SecretManager.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `resource`. REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field.
* `secrets_id` (*type:* `String.t`) - Part of `resource`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.SecretManager.V1.Model.TestIamPermissionsRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SecretManager.V1.Model.TestIamPermissionsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec secretmanager_projects_secrets_test_iam_permissions(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SecretManager.V1.Model.TestIamPermissionsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def secretmanager_projects_secrets_test_iam_permissions(
connection,
projects_id,
secrets_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/projects/{projectsId}/secrets/{secretsId}:testIamPermissions", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"secretsId" => URI.encode(secrets_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.SecretManager.V1.Model.TestIamPermissionsResponse{}]
)
end
@doc """
Accesses a SecretVersion. This call returns the secret data. `projects/*/secrets/*/versions/latest` is an alias to the most recently created SecretVersion.
## Parameters
* `connection` (*type:* `GoogleApi.SecretManager.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. Required. The resource name of the SecretVersion in the format `projects/*/secrets/*/versions/*`. `projects/*/secrets/*/versions/latest` is an alias to the most recently created SecretVersion.
* `secrets_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `versions_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SecretManager.V1.Model.AccessSecretVersionResponse{}}` on success
* `{:error, info}` on failure
"""
@spec secretmanager_projects_secrets_versions_access(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SecretManager.V1.Model.AccessSecretVersionResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def secretmanager_projects_secrets_versions_access(
connection,
projects_id,
secrets_id,
versions_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url(
"/v1/projects/{projectsId}/secrets/{secretsId}/versions/{versionsId}:access",
%{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"secretsId" => URI.encode(secrets_id, &URI.char_unreserved?/1),
"versionsId" => URI.encode(versions_id, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.SecretManager.V1.Model.AccessSecretVersionResponse{}]
)
end
@doc """
Destroys a SecretVersion. Sets the state of the SecretVersion to DESTROYED and irrevocably destroys the secret data.
## Parameters
* `connection` (*type:* `GoogleApi.SecretManager.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. Required. The resource name of the SecretVersion to destroy in the format `projects/*/secrets/*/versions/*`.
* `secrets_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `versions_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.SecretManager.V1.Model.DestroySecretVersionRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SecretManager.V1.Model.SecretVersion{}}` on success
* `{:error, info}` on failure
"""
@spec secretmanager_projects_secrets_versions_destroy(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SecretManager.V1.Model.SecretVersion.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def secretmanager_projects_secrets_versions_destroy(
connection,
projects_id,
secrets_id,
versions_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url(
"/v1/projects/{projectsId}/secrets/{secretsId}/versions/{versionsId}:destroy",
%{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"secretsId" => URI.encode(secrets_id, &URI.char_unreserved?/1),
"versionsId" => URI.encode(versions_id, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SecretManager.V1.Model.SecretVersion{}])
end
@doc """
Disables a SecretVersion. Sets the state of the SecretVersion to DISABLED.
## Parameters
* `connection` (*type:* `GoogleApi.SecretManager.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. Required. The resource name of the SecretVersion to disable in the format `projects/*/secrets/*/versions/*`.
* `secrets_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `versions_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.SecretManager.V1.Model.DisableSecretVersionRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SecretManager.V1.Model.SecretVersion{}}` on success
* `{:error, info}` on failure
"""
@spec secretmanager_projects_secrets_versions_disable(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SecretManager.V1.Model.SecretVersion.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def secretmanager_projects_secrets_versions_disable(
connection,
projects_id,
secrets_id,
versions_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url(
"/v1/projects/{projectsId}/secrets/{secretsId}/versions/{versionsId}:disable",
%{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"secretsId" => URI.encode(secrets_id, &URI.char_unreserved?/1),
"versionsId" => URI.encode(versions_id, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SecretManager.V1.Model.SecretVersion{}])
end
@doc """
Enables a SecretVersion. Sets the state of the SecretVersion to ENABLED.
## Parameters
* `connection` (*type:* `GoogleApi.SecretManager.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. Required. The resource name of the SecretVersion to enable in the format `projects/*/secrets/*/versions/*`.
* `secrets_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `versions_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.SecretManager.V1.Model.EnableSecretVersionRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SecretManager.V1.Model.SecretVersion{}}` on success
* `{:error, info}` on failure
"""
@spec secretmanager_projects_secrets_versions_enable(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SecretManager.V1.Model.SecretVersion.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def secretmanager_projects_secrets_versions_enable(
connection,
projects_id,
secrets_id,
versions_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url(
"/v1/projects/{projectsId}/secrets/{secretsId}/versions/{versionsId}:enable",
%{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"secretsId" => URI.encode(secrets_id, &URI.char_unreserved?/1),
"versionsId" => URI.encode(versions_id, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SecretManager.V1.Model.SecretVersion{}])
end
@doc """
Gets metadata for a SecretVersion. `projects/*/secrets/*/versions/latest` is an alias to the most recently created SecretVersion.
## Parameters
* `connection` (*type:* `GoogleApi.SecretManager.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `name`. Required. The resource name of the SecretVersion in the format `projects/*/secrets/*/versions/*`. `projects/*/secrets/*/versions/latest` is an alias to the most recently created SecretVersion.
* `secrets_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `versions_id` (*type:* `String.t`) - Part of `name`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SecretManager.V1.Model.SecretVersion{}}` on success
* `{:error, info}` on failure
"""
@spec secretmanager_projects_secrets_versions_get(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SecretManager.V1.Model.SecretVersion.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def secretmanager_projects_secrets_versions_get(
connection,
projects_id,
secrets_id,
versions_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/projects/{projectsId}/secrets/{secretsId}/versions/{versionsId}", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"secretsId" => URI.encode(secrets_id, &URI.char_unreserved?/1),
"versionsId" => URI.encode(versions_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SecretManager.V1.Model.SecretVersion{}])
end
@doc """
Lists SecretVersions. This call does not return secret data.
## Parameters
* `connection` (*type:* `GoogleApi.SecretManager.V1.Connection.t`) - Connection to server
* `projects_id` (*type:* `String.t`) - Part of `parent`. Required. The resource name of the Secret associated with the SecretVersions to list, in the format `projects/*/secrets/*`.
* `secrets_id` (*type:* `String.t`) - Part of `parent`. See documentation of `projectsId`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:filter` (*type:* `String.t`) - Optional. Filter string, adhering to the rules in [List-operation filtering](https://cloud.google.com/secret-manager/docs/filtering). List only secret versions matching the filter. If filter is empty, all secret versions are listed.
* `:pageSize` (*type:* `integer()`) - Optional. The maximum number of results to be returned in a single page. If set to 0, the server decides the number of results to return. If the number is greater than 25000, it is capped at 25000.
* `:pageToken` (*type:* `String.t`) - Optional. Pagination token, returned earlier via ListSecretVersionsResponse.next_page_token][].
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SecretManager.V1.Model.ListSecretVersionsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec secretmanager_projects_secrets_versions_list(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SecretManager.V1.Model.ListSecretVersionsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def secretmanager_projects_secrets_versions_list(
connection,
projects_id,
secrets_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:filter => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/projects/{projectsId}/secrets/{secretsId}/versions", %{
"projectsId" => URI.encode(projects_id, &URI.char_unreserved?/1),
"secretsId" => URI.encode(secrets_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.SecretManager.V1.Model.ListSecretVersionsResponse{}]
)
end
end
| 46.299038 | 802 | 0.615316 |
e8005b4ba7d492aee12cbaccbb471d05519a5536 | 1,054 | exs | Elixir | test/time_zone_info/downloader_test.exs | hrzndhrn/time_zone_info | 18fa4e7aefd68d256202de8e0f96b69b8a9dc618 | [
"MIT"
] | 5 | 2020-04-05T16:03:03.000Z | 2022-02-07T22:11:04.000Z | test/time_zone_info/downloader_test.exs | hrzndhrn/time_zone_info | 18fa4e7aefd68d256202de8e0f96b69b8a9dc618 | [
"MIT"
] | 16 | 2020-03-28T17:46:13.000Z | 2021-08-25T08:35:48.000Z | test/time_zone_info/downloader_test.exs | hrzndhrn/time_zone_info | 18fa4e7aefd68d256202de8e0f96b69b8a9dc618 | [
"MIT"
] | null | null | null | defmodule TimeZoneInfo.DownloaderTest do
use ExUnit.Case
import Mox
import TimeZoneInfo.TestUtils
alias TimeZoneInfo.{Downloader, DownloaderMock}
setup do
on_exit(&delete_env/0)
end
setup :verify_on_exit!
test "download/1" do
env =
put_env(
downloader: [
module: DownloaderMock,
mode: :etf,
uri: "http://localhost:123/data.etf",
headers: [
{"Content-Type", "application/tar+gzip"},
{"User-Agent", "Elixir.TimeZoneInfo.Mint"}
]
]
)
expect(DownloaderMock, :download, fn uri, opts ->
assert uri == URI.parse(env[:downloader][:uri])
assert opts ==
env
|> Keyword.fetch!(:downloader)
|> Keyword.delete(:module)
|> Keyword.delete(:uri)
end)
assert Downloader.download([])
end
test "returns error tuple if config is unavailable" do
delete_env()
assert Downloader.download([]) == {:error, {:invalid_config, :downloader}}
end
end
| 22.425532 | 78 | 0.580645 |
e80082ae930d2bfe572e1becd1c73f7377e29776 | 637 | ex | Elixir | lib/docusign/model/envelope_attachments_result.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 4 | 2020-12-21T12:50:13.000Z | 2022-01-12T16:50:43.000Z | lib/docusign/model/envelope_attachments_result.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 12 | 2018-09-18T15:26:34.000Z | 2019-09-28T15:29:39.000Z | lib/docusign/model/envelope_attachments_result.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 15 | 2020-04-29T21:50:16.000Z | 2022-02-11T18:01:51.000Z | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule DocuSign.Model.EnvelopeAttachmentsResult do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:attachments
]
@type t :: %__MODULE__{
:attachments => [EnvelopeAttachments]
}
end
defimpl Poison.Decoder, for: DocuSign.Model.EnvelopeAttachmentsResult do
import DocuSign.Deserializer
def decode(value, options) do
value
|> deserialize(:attachments, :list, DocuSign.Model.EnvelopeAttachments, options)
end
end
| 22.75 | 84 | 0.720565 |
e8008e2165c7db2a40f6c5acb57f71af9b3e1f59 | 8,122 | ex | Elixir | lib/nerves_time/ntpd.ex | LostKobrakai/nerves_time | c7e49f436c40a571d9213e5c19f41ee1b71062cf | [
"Apache-2.0"
] | 17 | 2018-08-12T03:50:39.000Z | 2020-01-05T06:47:04.000Z | lib/nerves_time/ntpd.ex | LostKobrakai/nerves_time | c7e49f436c40a571d9213e5c19f41ee1b71062cf | [
"Apache-2.0"
] | 38 | 2018-08-12T12:07:46.000Z | 2020-02-11T10:11:48.000Z | lib/nerves_time/ntpd.ex | LostKobrakai/nerves_time | c7e49f436c40a571d9213e5c19f41ee1b71062cf | [
"Apache-2.0"
] | 6 | 2018-08-12T03:52:35.000Z | 2019-10-28T21:19:39.000Z | defmodule NervesTime.Ntpd do
use GenServer
require Logger
@moduledoc false
# If restarting ntpd due to a crash, delay its start to avoid pegging
# ntp servers. This delay can be long since the clock has either been
# set (i.e., it's not far off from the actual time) or there is a problem
# setting the time that has a low probability of being fixed by trying
# again immediately. Plus ntp server admins get annoyed by misbehaving
# IoT devices pegging their servers and we don't want that.
@ntpd_restart_delay 60_000
@ntpd_clean_start_delay 10
@default_ntpd_path "/usr/sbin/ntpd"
@default_ntp_servers [
"0.pool.ntp.org",
"1.pool.ntp.org",
"2.pool.ntp.org",
"3.pool.ntp.org"
]
defmodule State do
@moduledoc false
@type t() :: %__MODULE__{
socket: :gen_udp.socket(),
servers: [String.t()],
daemon: nil | pid(),
synchronized?: boolean(),
clean_start?: boolean()
}
defstruct socket: nil,
servers: [],
daemon: nil,
synchronized?: false,
clean_start?: true
end
@spec start_link(any()) :: GenServer.on_start()
def start_link(_args) do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
@doc """
Return whether ntpd has synchronized with a time server
"""
@spec synchronized?() :: boolean()
def synchronized?() do
GenServer.call(__MODULE__, :synchronized?)
end
@doc """
Return whether ntpd was started cleanly
If ntpd crashes or this GenServer crashes, then the run is considered
unclean and there's a delay in starting ntpd. This is intended to
prevent abusive polling of public ntpd servers.
"""
@spec clean_start?() :: boolean()
def clean_start?() do
GenServer.call(__MODULE__, :clean_start?)
end
@doc """
Update the list of NTP servers to poll
"""
@spec set_ntp_servers([String.t()]) :: :ok
def set_ntp_servers(servers) when is_list(servers) do
GenServer.call(__MODULE__, {:set_ntp_servers, servers})
end
@doc """
Get the list of NTP servers
"""
@spec ntp_servers() :: [String.t()] | {:error, term()}
def ntp_servers() do
GenServer.call(__MODULE__, :ntp_servers)
end
@doc """
Manually restart ntpd
"""
@spec restart_ntpd() :: :ok | {:error, term()}
def restart_ntpd() do
GenServer.call(__MODULE__, :restart_ntpd)
end
@impl GenServer
def init(_args) do
app_env = Application.get_all_env(:nerves_time)
ntp_servers = Keyword.get(app_env, :servers, @default_ntp_servers)
{:ok, %State{servers: ntp_servers}, {:continue, :continue}}
end
@impl GenServer
def handle_continue(:continue, state) do
new_state =
state
|> prep_ntpd_start()
|> schedule_ntpd_start()
{:noreply, new_state}
end
@impl GenServer
def handle_call(:synchronized?, _from, state) do
{:reply, state.synchronized?, state}
end
@impl GenServer
def handle_call(:clean_start?, _from, state) do
{:reply, state.clean_start?, state}
end
@impl GenServer
def handle_call({:set_ntp_servers, servers}, _from, state) do
new_state = %{state | servers: servers} |> stop_ntpd() |> schedule_ntpd_start()
{:reply, :ok, new_state}
end
@impl GenServer
def handle_call(:ntp_servers, _from, %State{servers: servers} = state) do
{:reply, servers, state}
end
@impl GenServer
def handle_call(:restart_ntpd, _from, state) do
new_state =
%{state | clean_start?: true}
|> stop_ntpd()
|> schedule_ntpd_start()
{:reply, :ok, new_state}
end
@impl GenServer
def handle_info(:start_ntpd, %State{daemon: nil, servers: servers} = state)
when servers != [] do
new_state = start_ntpd(state)
{:noreply, new_state}
end
@impl GenServer
def handle_info(:start_ntpd, state) do
# Ignore since ntpd is already running or there are no servers
{:noreply, state}
end
@impl GenServer
def handle_info({:udp, socket, _, 0, data}, %{socket: socket} = state) do
report = :erlang.binary_to_term(data)
handle_ntpd_report(report, state)
end
def handle_info({:EXIT, _pid, :normal}, state) do
# Normal exits come from the ntpd daemon and calls to set the time.
# They're initiated by us, so they can be safely ignored.
{:noreply, state}
end
def handle_info({:EXIT, from, reason}, state) do
# Log abnormal exits to aide debugging.
Logger.info("NervesTime.Ntpd: unexpected :EXIT #{inspect(from)}/#{inspect(reason)}")
{:stop, reason, state}
end
defp prep_ntpd_start(state) do
path = socket_path()
# Cleanup the socket file in case of a restart
clean_start =
case File.rm(path) do
{:error, :enoent} ->
# This is the expected case. There's no stale socket file sitting around
true
:ok ->
Logger.warn("ntpd crash detected. Delaying next start...")
false
end
{:ok, socket} = :gen_udp.open(0, [:local, :binary, {:active, true}, {:ip, {:local, path}}])
%State{state | socket: socket, clean_start?: clean_start}
end
defp schedule_ntpd_start(%State{servers: []} = state) do
# Don't schedule ntpd to start if no servers configured.
Logger.warn("Not scheduling ntpd to start since no servers configured")
state
end
defp schedule_ntpd_start(state) do
delay = ntpd_restart_delay(state)
Process.send_after(self(), :start_ntpd, delay)
state
end
defp ntpd_restart_delay(%State{clean_start?: false}), do: @ntpd_restart_delay
defp ntpd_restart_delay(%State{clean_start?: true}), do: @ntpd_clean_start_delay
defp stop_ntpd(%State{daemon: nil} = state), do: state
defp stop_ntpd(%State{daemon: pid} = state) do
GenServer.stop(pid)
%State{state | daemon: nil, synchronized?: false}
end
defp handle_ntpd_report({"stratum", _freq_drift_ppm, _offset, stratum, _poll_interval}, state) do
{:noreply, %State{state | synchronized?: maybe_update_rtc(stratum)}}
end
defp handle_ntpd_report({"periodic", _freq_drift_ppm, _offset, stratum, _poll_interval}, state) do
{:noreply, %State{state | synchronized?: maybe_update_rtc(stratum)}}
end
defp handle_ntpd_report({"step", _freq_drift_ppm, _offset, _stratum, _poll_interval}, state) do
# Ignore
{:noreply, state}
end
defp handle_ntpd_report({"unsync", _freq_drift_ppm, _offset, _stratum, _poll_interval}, state) do
Logger.error("ntpd reports that it is unsynchronized; restarting")
# According to the Busybox ntpd docs, if you get an `unsync` notification, then
# you should restart ntpd to be safe. This is stated to be due to name resolution
# only being done at initialization.
new_state =
state
|> stop_ntpd()
|> schedule_ntpd_start()
{:noreply, new_state}
end
defp handle_ntpd_report(report, state) do
Logger.error("ntpd ignored unexpected report #{inspect(report)}")
{:noreply, state}
end
defp start_ntpd(%State{servers: []} = state), do: state
defp start_ntpd(%State{servers: servers} = state) do
ntpd_path = Application.get_env(:nerves_time, :ntpd, @default_ntpd_path)
ntpd_script_path = Application.app_dir(:nerves_time, ["priv", "ntpd_script"])
server_args = Enum.flat_map(servers, fn s -> ["-p", s] end)
# Add "-d" and enable log_output below for more verbose prints from ntpd.
args = ["-n", "-S", ntpd_script_path | server_args]
Logger.debug("Starting #{ntpd_path} with: #{inspect(args)}")
{:ok, pid} =
MuonTrap.Daemon.start_link(ntpd_path, args,
env: [{"SOCKET_PATH", socket_path()}],
stderr_to_stdout: true
# log_output: :debug
)
%{state | daemon: pid, synchronized?: false}
end
# Only update the RTC if synchronized. I.e., ignore stratum > 4
@spec maybe_update_rtc(integer()) :: boolean()
defp maybe_update_rtc(stratum)
when stratum <= 4 do
NervesTime.SystemTime.update_rtc()
true
end
defp maybe_update_rtc(_stratum), do: false
defp socket_path() do
Path.join(System.tmp_dir!(), "nerves_time_comm")
end
end
| 28.903915 | 100 | 0.667939 |
e800c23d49aedaa810e7afdd2e493cb4af0fcf1d | 41,272 | ex | Elixir | lib/ecto/query/builder.ex | revati/ecto | 75b4c4179724af8ae2582aa8adae21eeb111ec78 | [
"Apache-2.0"
] | null | null | null | lib/ecto/query/builder.ex | revati/ecto | 75b4c4179724af8ae2582aa8adae21eeb111ec78 | [
"Apache-2.0"
] | null | null | null | lib/ecto/query/builder.ex | revati/ecto | 75b4c4179724af8ae2582aa8adae21eeb111ec78 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Query.Builder do
@moduledoc false
alias Ecto.Query
@comparisons [
is_nil: 1,
==: 2,
!=: 2,
<: 2,
>: 2,
<=: 2,
>=: 2
]
@dynamic_aggregates [
max: 1,
min: 1,
first_value: 1,
last_value: 1,
nth_value: 2,
lag: 3,
lead: 3,
lag: 2,
lead: 2,
lag: 1,
lead: 1
]
@static_aggregates [
count: {0, :integer},
count: {1, :integer},
count: {2, :integer},
avg: {1, :any},
sum: {1, :any},
row_number: {0, :integer},
rank: {0, :integer},
dense_rank: {0, :integer},
percent_rank: {0, :any},
cume_dist: {0, :any},
ntile: {1, :integer}
]
@typedoc """
Quoted types store primitive types and types in the format
{source, quoted}. The latter are handled directly in the planner,
never forwarded to Ecto.Type.
The Ecto.Type module concerns itself only with runtime types,
which include all primitive types and custom user types. Also
note custom user types do not show up during compilation time.
"""
@type quoted_type :: Ecto.Type.primitive | {non_neg_integer, atom | Macro.t}
@doc """
Smart escapes a query expression and extracts interpolated values in
a map.
Everything that is a query expression will be escaped, interpolated
expressions (`^foo`) will be moved to a map unescaped and replaced
with `^index` in the query where index is a number indexing into the
map.
"""
@spec escape(Macro.t, quoted_type | {:in, quoted_type} | {:out, quoted_type}, {list, term},
Keyword.t, Macro.Env.t | {Macro.Env.t, fun}) :: {Macro.t, {list, term}}
def escape(expr, type, params_acc, vars, env)
# var.x - where var is bound
def escape({{:., _, [callee, field]}, _, []}, _type, params_acc, vars, _env) when is_atom(field) do
{escape_field!(callee, field, vars), params_acc}
end
# field macro
def escape({:field, _, [callee, field]}, _type, params_acc, vars, _env) do
{escape_field!(callee, field, vars), params_acc}
end
# param interpolation
def escape({:^, _, [arg]}, type, {params, acc}, _vars, _env) do
expr = {:{}, [], [:^, [], [length(params)]]}
params = [{arg, type} | params]
{expr, {params, acc}}
end
# tagged types
def escape({:type, _, [{:^, _, [arg]}, type]}, _type, {params, acc}, vars, env) do
type = validate_type!(type, vars, env)
expr = {:{}, [], [:type, [], [{:{}, [], [:^, [], [length(params)]]}, type]]}
params = [{arg, type} | params]
{expr, {params, acc}}
end
def escape({:type, _, [{{:., _, [{var, _, context}, field]}, _, []} = expr, type]}, _type, params_acc, vars, env)
when is_atom(var) and is_atom(context) and is_atom(field) do
escape_with_type(expr, type, params_acc, vars, env)
end
def escape({:type, _, [{:field, _, [_ | _]} = expr, type]}, _type, params_acc, vars, env) do
escape_with_type(expr, type, params_acc, vars, env)
end
def escape({:type, _, [{math_op, _, [_, _]} = op_expr, type]}, _type, params_acc, vars, env)
when math_op in ~w(+ - * /)a do
escape_with_type(op_expr, type, params_acc, vars, env)
end
def escape({:type, _, [{fun, _, args} = expr, type]}, _type, params_acc, vars, env)
when is_list(args) and fun in ~w(fragment avg count max min sum over filter)a do
escape_with_type(expr, type, params_acc, vars, env)
end
def escape({:type, meta, [expr, type]}, given_type, params_acc, vars, env) do
case Macro.expand_once(expr, get_env(env)) do
^expr ->
error! """
the first argument of type/2 must be one of:
* interpolations, such as ^value
* fields, such as p.foo or field(p)
* fragments, such fragment("foo(?)", value)
* an arithmetic expression (+, -, *, /)
* an aggregation or window expression (avg, count, min, max, sum, over, filter)
Got: #{Macro.to_string(expr)}
"""
expanded ->
escape({:type, meta, [expanded, type]}, given_type, params_acc, vars, env)
end
end
# fragments
def escape({:fragment, _, [query]}, _type, params_acc, vars, env) when is_list(query) do
{escaped, params_acc} =
Enum.map_reduce(query, params_acc, &escape_fragment(&1, :any, &2, vars, env))
{{:{}, [], [:fragment, [], [escaped]]}, params_acc}
end
def escape({:fragment, _, [{:^, _, [var]} = _expr]}, _type, params_acc, _vars, _env) do
expr = quote do
Ecto.Query.Builder.fragment!(unquote(var))
end
{{:{}, [], [:fragment, [], [expr]]}, params_acc}
end
def escape({:fragment, _, [query | frags]}, _type, params_acc, vars, env) do
pieces = expand_and_split_fragment(query, env)
if length(pieces) != length(frags) + 1 do
error! "fragment(...) expects extra arguments in the same amount of question marks in string. " <>
"It received #{length(frags)} extra argument(s) but expected #{length(pieces) - 1}"
end
{frags, params_acc} = Enum.map_reduce(frags, params_acc, &escape(&1, :any, &2, vars, env))
{{:{}, [], [:fragment, [], merge_fragments(pieces, frags)]}, params_acc}
end
# interval
def escape({:from_now, meta, [count, interval]}, type, params_acc, vars, env) do
utc = quote do: ^DateTime.utc_now()
escape({:datetime_add, meta, [utc, count, interval]}, type, params_acc, vars, env)
end
def escape({:ago, meta, [count, interval]}, type, params_acc, vars, env) do
utc = quote do: ^DateTime.utc_now()
count =
case count do
{:^, meta, [value]} ->
negate = quote do: Ecto.Query.Builder.negate!(unquote(value))
{:^, meta, [negate]}
value ->
{:-, [], [value]}
end
escape({:datetime_add, meta, [utc, count, interval]}, type, params_acc, vars, env)
end
def escape({:datetime_add, _, [datetime, count, interval]} = expr, type, params_acc, vars, env) do
assert_type!(expr, type, {:param, :any_datetime})
{datetime, params_acc} = escape(datetime, {:param, :any_datetime}, params_acc, vars, env)
{count, interval, params_acc} = escape_interval(count, interval, params_acc, vars, env)
{{:{}, [], [:datetime_add, [], [datetime, count, interval]]}, params_acc}
end
def escape({:date_add, _, [date, count, interval]} = expr, type, params_acc, vars, env) do
assert_type!(expr, type, :date)
{date, params_acc} = escape(date, :date, params_acc, vars, env)
{count, interval, params_acc} = escape_interval(count, interval, params_acc, vars, env)
{{:{}, [], [:date_add, [], [date, count, interval]]}, params_acc}
end
# json
def escape({:json_extract_path, _, [field, path]} = expr, type, params_acc, vars, env) do
case field do
{{:., _, _}, _, _} ->
path = escape_json_path(path)
{field, params_acc} = escape(field, type, params_acc, vars, env)
{{:{}, [], [:json_extract_path, [], [field, path]]}, params_acc}
_ ->
error!("`#{Macro.to_string(expr)}` is not a valid query expression")
end
end
def escape({{:., meta, [Access, :get]}, _, [left, _]} = expr, type, params_acc, vars, env) do
case left do
{{:., _, _}, _, _} ->
{expr, path} = parse_access_get(expr, [])
escape({:json_extract_path, meta, [expr, path]}, type, params_acc, vars, env)
_ ->
error!("`#{Macro.to_string(expr)}` is not a valid query expression")
end
end
# sigils
def escape({name, _, [_, []]} = sigil, type, params_acc, vars, _env)
when name in ~w(sigil_s sigil_S sigil_w sigil_W)a do
{literal(sigil, type, vars), params_acc}
end
# lists
def escape(list, type, params_acc, vars, env) when is_list(list) do
if Enum.all?(list, &is_binary(&1) or is_number(&1) or is_boolean(&1)) do
{literal(list, type, vars), params_acc}
else
fun =
case type do
{:array, inner_type} ->
&escape(&1, inner_type, &2, vars, env)
_ ->
# In case we don't have an array nor a literal at compile-time,
# such as p.links == [^value], we don't do any casting nor validation.
# We may want to tackle this if the expression above is ever used.
&escape(&1, :any, &2, vars, env)
end
Enum.map_reduce(list, params_acc, fun)
end
end
# literals
def escape({:<<>>, _, args} = expr, type, params_acc, vars, _env) do
valid? = Enum.all?(args, fn
{:::, _, [left, _]} -> is_integer(left) or is_binary(left)
left -> is_integer(left) or is_binary(left)
end)
unless valid? do
error! "`#{Macro.to_string(expr)}` is not a valid query expression. " <>
"Only literal binaries and strings are allowed, " <>
"dynamic values need to be explicitly interpolated in queries with ^"
end
{literal(expr, type, vars), params_acc}
end
def escape({:-, _, [number]}, type, params_acc, vars, _env) when is_number(number),
do: {literal(-number, type, vars), params_acc}
def escape(number, type, params_acc, vars, _env) when is_number(number),
do: {literal(number, type, vars), params_acc}
def escape(binary, type, params_acc, vars, _env) when is_binary(binary),
do: {literal(binary, type, vars), params_acc}
def escape(nil, _type, params_acc, _vars, _env),
do: {nil, params_acc}
def escape(atom, type, params_acc, vars, _env) when is_atom(atom),
do: {literal(atom, type, vars), params_acc}
# negate any expression
def escape({:-, meta, arg}, type, params_acc, vars, env) do
{escaped_arg, params_acc} = escape(arg, type, params_acc, vars, env)
expr = {:{}, [], [:-, meta, escaped_arg]}
{expr, params_acc}
end
# comparison operators
def escape({comp_op, _, [left, right]} = expr, type, params_acc, vars, env)
when comp_op in ~w(== != < > <= >=)a do
assert_type!(expr, type, :boolean)
if is_nil(left) or is_nil(right) do
error! "comparison with nil is forbidden as it is unsafe. " <>
"If you want to check if a value is nil, use is_nil/1 instead"
end
ltype = quoted_type(right, vars)
rtype = quoted_type(left, vars)
{left, params_acc} = escape(left, ltype, params_acc, vars, env)
{right, params_acc} = escape(right, rtype, params_acc, vars, env)
{params, acc} = params_acc
{{:{}, [], [comp_op, [], [left, right]]},
{params |> wrap_nil(left) |> wrap_nil(right), acc}}
end
# mathematical operators
def escape({math_op, _, [left, right]}, type, params_acc, vars, env)
when math_op in ~w(+ - * /)a do
{left, params_acc} = escape(left, type, params_acc, vars, env)
{right, params_acc} = escape(right, type, params_acc, vars, env)
{{:{}, [], [math_op, [], [left, right]]}, params_acc}
end
# in operator
def escape({:in, _, [left, right]} = expr, type, params_acc, vars, env)
when is_list(right)
when is_tuple(right) and elem(right, 0) in ~w(sigil_w sigil_W)a do
assert_type!(expr, type, :boolean)
{:array, ltype} = quoted_type(right, vars)
rtype = {:array, quoted_type(left, vars)}
{left, params_acc} = escape(left, ltype, params_acc, vars, env)
{right, params_acc} = escape(right, rtype, params_acc, vars, env)
{{:{}, [], [:in, [], [left, right]]}, params_acc}
end
def escape({:in, _, [left, right]} = expr, type, params_acc, vars, env) do
assert_type!(expr, type, :boolean)
ltype = {:out, quoted_type(right, vars)}
rtype = {:in, quoted_type(left, vars)}
{left, params_acc} = escape(left, ltype, params_acc, vars, env)
{right, params_acc} = escape_subquery(right, rtype, params_acc, vars, env)
# Remove any type wrapper from the right side
right =
case right do
{:{}, [], [:type, [], [right, _]]} -> right
_ -> right
end
{{:{}, [], [:in, [], [left, right]]}, params_acc}
end
def escape({:count, _, [arg, :distinct]}, type, params_acc, vars, env) do
{arg, params_acc} = escape(arg, type, params_acc, vars, env)
expr = {:{}, [], [:count, [], [arg, :distinct]]}
{expr, params_acc}
end
def escape({:filter, _, [aggregate]}, type, params_acc, vars, env) do
escape(aggregate, type, params_acc, vars, env)
end
def escape({:filter, _, [aggregate, filter_expr]}, type, params_acc, vars, env) do
{aggregate, params_acc} = escape(aggregate, type, params_acc, vars, env)
{filter_expr, params_acc} = escape(filter_expr, :boolean, params_acc, vars, env)
{{:{}, [], [:filter, [], [aggregate, filter_expr]]}, params_acc}
end
def escape({:coalesce, _, [left, right]}, type, params_acc, vars, env) do
{left, params_acc} = escape(left, type, params_acc, vars, env)
{right, params_acc} = escape(right, type, params_acc, vars, env)
{{:{}, [], [:coalesce, [], [left, right]]}, params_acc}
end
def escape({:over, _, [{agg_name, _, agg_args} | over_args]}, type, params_acc, vars, env) do
aggregate = {agg_name, [], agg_args || []}
{aggregate, params_acc} = escape_window_function(aggregate, type, params_acc, vars, env)
{window, params_acc} = escape_window_description(over_args, params_acc, vars, env)
{{:{}, [], [:over, [], [aggregate, window]]}, params_acc}
end
def escape({quantifier, meta, [subquery]}, type, params_acc, vars, env) when quantifier in [:all, :any, :exists] do
{subquery, params_acc} = escape_subquery({:subquery, meta, [subquery]}, type, params_acc, vars, env)
{{:{}, [], [quantifier, [], [subquery]]}, params_acc}
end
def escape({:=, _, _} = expr, _type, _params_acc, _vars, _env) do
error! "`#{Macro.to_string(expr)}` is not a valid query expression. " <>
"The match operator is not supported: `=`. " <>
"Did you mean to use `==` instead?"
end
def escape({op, _, _}, _type, _params_acc, _vars, _env) when op in ~w(|| && !)a do
error! "short-circuit operators are not supported: `#{op}`. " <>
"Instead use boolean operators: `and`, `or`, and `not`"
end
# Tuple
def escape({left, right}, type, params_acc, vars, env) do
escape({:{}, [], [left, right]}, type, params_acc, vars, env)
end
# Tuple
def escape({:{}, _, list}, {:tuple, types}, params_acc, vars, env) do
if Enum.count(list) == Enum.count(types) do
{list, params_acc} =
list
|> Enum.zip(types)
|> Enum.map_reduce(params_acc, fn {expr, type}, params_acc ->
escape(expr, type, params_acc, vars, env)
end)
expr = {:{}, [], [:{}, [], list]}
{expr, params_acc}
else
escape({:{}, [], list}, :any, params_acc, vars, env)
end
end
# Tuple
def escape({:{}, _, _}, _, _, _, _) do
error! "Tuples can only be used in comparisons with literal tuples of the same size"
end
# Other functions - no type casting
def escape({name, _, args} = expr, type, params_acc, vars, env) when is_atom(name) and is_list(args) do
case call_type(name, length(args)) do
{in_type, out_type} ->
assert_type!(expr, type, out_type)
escape_call(expr, in_type, params_acc, vars, env)
nil ->
try_expansion(expr, type, params_acc, vars, env)
end
end
# Finally handle vars
def escape({var, _, context}, _type, params_acc, vars, _env) when is_atom(var) and is_atom(context) do
{escape_var!(var, vars), params_acc}
end
# Raise nice error messages for fun calls.
def escape({fun, _, args} = other, _type, _params_acc, _vars, _env)
when is_atom(fun) and is_list(args) do
error! """
`#{Macro.to_string(other)}` is not a valid query expression. \
If you are trying to invoke a function that is not supported by Ecto, \
you can use fragments:
fragment("some_function(?, ?, ?)", m.some_field, 1)
See Ecto.Query.API to learn more about the supported functions and \
Ecto.Query.API.fragment/1 to learn more about fragments.
"""
end
# Raise nice error message for remote calls
def escape({{:., _, [mod, fun]}, _, args} = other, _type, _params_acc, _vars, _env)
when is_atom(fun) do
fun_arity = "#{fun}/#{length(args)}"
error! """
`#{Macro.to_string(other)}` is not a valid query expression. \
If you want to invoke #{Macro.to_string(mod)}.#{fun_arity} in \
a query, make sure that the module #{Macro.to_string(mod)} \
is required and that #{fun_arity} is a macro
"""
end
# For everything else we raise
def escape(other, _type, _params_acc, _vars, _env) do
error! "`#{Macro.to_string(other)}` is not a valid query expression"
end
defp escape_with_type(expr, {:^, _, [type]}, params_acc, vars, env) do
{expr, params_acc} = escape(expr, :any, params_acc, vars, env)
{{:{}, [], [:type, [], [expr, type]]}, params_acc}
end
defp escape_with_type(expr, type, params_acc, vars, env) do
type = validate_type!(type, vars, env)
{expr, params_acc} = escape(expr, type, params_acc, vars, env)
{{:{}, [], [:type, [], [expr, Macro.escape(type)]]}, params_acc}
end
defp escape_subquery({:subquery, _, [expr]}, _, {params, subqueries}, _vars, _env) do
subquery = quote(do: Ecto.Query.subquery(unquote(expr)))
index = length(subqueries)
expr = {:subquery, index} # used both in ast and in parameters, as a placeholder.
{expr, {[expr | params], [subquery | subqueries]}}
end
defp escape_subquery(expr, type, params, vars, env) do
escape(expr, type, params, vars, env)
end
defp wrap_nil(params, {:{}, _, [:^, _, [ix]]}), do: wrap_nil(params, length(params) - ix - 1, [])
defp wrap_nil(params, _other), do: params
defp wrap_nil([{val, type} | params], 0, acc) do
val = quote do: Ecto.Query.Builder.not_nil!(unquote(val))
Enum.reverse(acc, [{val, type} | params])
end
defp wrap_nil([pair | params], i, acc) do
wrap_nil(params, i - 1, [pair | acc])
end
defp expand_and_split_fragment(query, env) do
case Macro.expand(query, get_env(env)) do
binary when is_binary(binary) ->
split_fragment(binary, "")
_ ->
error! bad_fragment_message(Macro.to_string(query))
end
end
defp bad_fragment_message(arg) do
"to prevent SQL injection attacks, fragment(...) does not allow strings " <>
"to be interpolated as the first argument via the `^` operator, got: `#{arg}`"
end
defp split_fragment(<<>>, consumed),
do: [consumed]
defp split_fragment(<<??, rest :: binary>>, consumed),
do: [consumed | split_fragment(rest, "")]
defp split_fragment(<<?\\, ??, rest :: binary>>, consumed),
do: split_fragment(rest, consumed <> <<??>>)
defp split_fragment(<<first :: utf8, rest :: binary>>, consumed),
do: split_fragment(rest, consumed <> <<first :: utf8>>)
@doc "Returns fragment pieces, given a fragment string and arguments."
def fragment_pieces(frag, args) do
frag
|> split_fragment("")
|> merge_fragments(args)
end
defp escape_window_description([], params_acc, _vars, _env),
do: {[], params_acc}
defp escape_window_description([window_name], params_acc, _vars, _env) when is_atom(window_name),
do: {window_name, params_acc}
defp escape_window_description([kw], params_acc, vars, env) do
case Ecto.Query.Builder.Windows.escape(kw, params_acc, vars, env) do
{runtime, [], params_acc} ->
{runtime, params_acc}
{_, [{key, _} | _], _} ->
error! "windows definitions given to over/2 do not allow interpolations at the root of " <>
"`#{key}`. Please use Ecto.Query.windows/3 to explicitly define a window instead"
end
end
defp escape_window_function(expr, type, params_acc, vars, env) do
expr
|> validate_window_function!(env)
|> escape(type, params_acc, vars, env)
end
defp validate_window_function!({:fragment, _, _} = expr, _env), do: expr
defp validate_window_function!({agg, _, args} = expr, env)
when is_atom(agg) and is_list(args) do
if Code.ensure_loaded?(Ecto.Query.WindowAPI) and
function_exported?(Ecto.Query.WindowAPI, agg, length(args)) do
expr
else
case Macro.expand_once(expr, get_env(env)) do
^expr ->
error! "unknown window function #{agg}/#{length(args)}. " <>
"See Ecto.Query.WindowAPI for all available functions"
expr ->
validate_window_function!(expr, env)
end
end
end
defp validate_window_function!(expr, _), do: expr
defp escape_call({name, _, args}, type, params_acc, vars, env) do
{args, params_acc} = Enum.map_reduce(args, params_acc, &escape(&1, type, &2, vars, env))
expr = {:{}, [], [name, [], args]}
{expr, params_acc}
end
defp escape_field!({var, _, context}, field, vars)
when is_atom(var) and is_atom(context) do
var = escape_var!(var, vars)
field = quoted_field!(field)
dot = {:{}, [], [:., [], [var, field]]}
{:{}, [], [dot, [], []]}
end
defp escape_field!({kind, _, [atom]}, field, _vars)
when kind in [:as, :parent_as] and is_atom(atom) do
as = {:{}, [], [kind, [], [atom]]}
field = quoted_field!(field)
dot = {:{}, [], [:., [], [as, field]]}
{:{}, [], [dot, [], []]}
end
defp escape_field!(expr, field, _vars) do
error!("""
cannot fetch field `#{field}` from `#{Macro.to_string(expr)}`. Can only fetch fields from:
* sources, such as `p` in `from p in Post`
* named bindings, such as `as(:post)` in `from Post, as: :post`
* parent named bindings, such as `parent_as(:post)` in a subquery
""")
end
defp escape_interval(count, interval, params_acc, vars, env) do
type =
cond do
is_float(count) -> :float
is_integer(count) -> :integer
true -> :decimal
end
{count, params_acc} = escape(count, type, params_acc, vars, env)
{count, quoted_interval!(interval), params_acc}
end
defp escape_fragment({key, [{_, _}|_] = exprs}, type, params_acc, vars, env) when is_atom(key) do
{escaped, params_acc} = Enum.map_reduce(exprs, params_acc, &escape_fragment(&1, type, &2, vars, env))
{{key, escaped}, params_acc}
end
defp escape_fragment({key, expr}, type, params_acc, vars, env) when is_atom(key) do
{escaped, params_acc} = escape(expr, type, params_acc, vars, env)
{{key, escaped}, params_acc}
end
defp escape_fragment({key, _expr}, _type, _params_acc, _vars, _env) do
error! "fragment(...) with keywords accepts only atoms as keys, got `#{Macro.to_string(key)}`"
end
defp merge_fragments([h1|t1], [h2|t2]),
do: [{:raw, h1}, {:expr, h2}|merge_fragments(t1, t2)]
defp merge_fragments([h1], []),
do: [{:raw, h1}]
for {agg, arity} <- @dynamic_aggregates do
defp call_type(unquote(agg), unquote(arity)), do: {:any, :any}
end
for {agg, {arity, return}} <- @static_aggregates do
defp call_type(unquote(agg), unquote(arity)), do: {:any, unquote(return)}
end
for {comp, arity} <- @comparisons do
defp call_type(unquote(comp), unquote(arity)), do: {:any, :boolean}
end
defp call_type(:or, 2), do: {:boolean, :boolean}
defp call_type(:and, 2), do: {:boolean, :boolean}
defp call_type(:not, 1), do: {:boolean, :boolean}
defp call_type(:like, 2), do: {:string, :boolean}
defp call_type(:ilike, 2), do: {:string, :boolean}
defp call_type(_, _), do: nil
defp assert_type!(expr, type, actual) do
cond do
not is_atom(type) and not Ecto.Type.primitive?(type) ->
:ok
Ecto.Type.match?(type, actual) ->
:ok
true ->
error! "expression `#{Macro.to_string(expr)}` does not type check. " <>
"It returns a value of type #{inspect actual} but a value of " <>
"type #{inspect type} is expected"
end
end
@doc """
Validates the type with the given vars.
"""
def validate_type!({composite, type}, vars, env),
do: {composite, validate_type!(type, vars, env)}
def validate_type!({:^, _, [type]}, _vars, _env),
do: type
def validate_type!({:__aliases__, _, _} = type, _vars, env),
do: Macro.expand(type, get_env(env))
def validate_type!(type, _vars, _env) when is_atom(type),
do: type
def validate_type!({{:., _, [{var, _, context}, field]}, _, []}, vars, _env)
when is_atom(var) and is_atom(context) and is_atom(field),
do: {find_var!(var, vars), field}
def validate_type!({:field, _, [{var, _, context}, field]}, vars, _env)
when is_atom(var) and is_atom(context) and is_atom(field),
do: {find_var!(var, vars), field}
def validate_type!(type, _vars, _env) do
error! "type/2 expects an alias, atom or source.field as second argument, got: `#{Macro.to_string(type)}`"
end
@always_tagged [:binary]
defp literal(value, expected, vars),
do: do_literal(value, expected, quoted_type(value, vars))
defp do_literal(value, _, current) when current in @always_tagged,
do: {:%, [], [Ecto.Query.Tagged, {:%{}, [], [value: value, type: current]}]}
defp do_literal(value, :any, _current),
do: value
defp do_literal(value, expected, expected),
do: value
defp do_literal(value, expected, _current),
do: {:%, [], [Ecto.Query.Tagged, {:%{}, [], [value: value, type: expected]}]}
@doc """
Escape the params entries list.
"""
@spec escape_params(list()) :: list()
def escape_params(list), do: Enum.reverse(list)
@doc """
Escapes a variable according to the given binds.
A escaped variable is represented internally as
`&0`, `&1` and so on.
"""
@spec escape_var!(atom, Keyword.t) :: Macro.t
def escape_var!(var, vars) do
{:{}, [], [:&, [], [find_var!(var, vars)]]}
end
@doc """
Escapes a list of bindings as a list of atoms.
Only variables or `{:atom, value}` tuples are allowed in the `bindings` list,
otherwise an `Ecto.Query.CompileError` is raised.
## Examples
iex> escape_binding(%Ecto.Query{}, quote(do: [x, y, z]), __ENV__)
{%Ecto.Query{}, [x: 0, y: 1, z: 2]}
iex> escape_binding(%Ecto.Query{}, quote(do: [{x, 0}, {z, 2}]), __ENV__)
{%Ecto.Query{}, [x: 0, z: 2]}
iex> escape_binding(%Ecto.Query{}, quote(do: [x, y, x]), __ENV__)
** (Ecto.Query.CompileError) variable `x` is bound twice
iex> escape_binding(%Ecto.Query{}, quote(do: [a, b, :foo]), __ENV__)
** (Ecto.Query.CompileError) binding list should contain only variables or `{as, var}` tuples, got: :foo
"""
@spec escape_binding(Macro.t, list, Macro.Env.t) :: {Macro.t, Keyword.t}
def escape_binding(query, binding, _env) when is_list(binding) do
vars = binding |> Enum.with_index |> Enum.map(&escape_bind/1)
assert_no_duplicate_binding!(vars)
{positional_vars, named_vars} = Enum.split_while(vars, ¬ named_bind?(&1))
assert_named_binds_in_tail!(named_vars, binding)
{query, positional_binds} = calculate_positional_binds(query, positional_vars)
{query, named_binds} = calculate_named_binds(query, named_vars)
{query, positional_binds ++ named_binds}
end
def escape_binding(_query, bind, _env) do
error! "binding should be list of variables and `{as, var}` tuples " <>
"at the end, got: #{Macro.to_string(bind)}"
end
defp named_bind?({kind, _, _}), do: kind == :named
defp assert_named_binds_in_tail!(named_vars, binding) do
if Enum.all?(named_vars, &named_bind?/1) do
:ok
else
error! "named binds in the form of `{as, var}` tuples must be at the end " <>
"of the binding list, got: #{Macro.to_string(binding)}"
end
end
defp assert_no_duplicate_binding!(vars) do
bound_vars = for {_, var, _} <- vars, var != :_, do: var
case bound_vars -- Enum.uniq(bound_vars) do
[] -> :ok
[var | _] -> error! "variable `#{var}` is bound twice"
end
end
defp calculate_positional_binds(query, vars) do
case Enum.split_while(vars, &elem(&1, 1) != :...) do
{vars, []} ->
vars = for {:pos, var, count} <- vars, do: {var, count}
{query, vars}
{vars, [_ | tail]} ->
query =
quote do
query = Ecto.Queryable.to_query(unquote(query))
escape_count = Ecto.Query.Builder.count_binds(query)
query
end
tail =
tail
|> Enum.with_index(-length(tail))
|> Enum.map(fn {{:pos, k, _}, count} -> {k, quote(do: escape_count + unquote(count))} end)
vars = for {:pos, var, count} <- vars, do: {var, count}
{query, vars ++ tail}
end
end
def calculate_named_binds(query, []), do: {query, []}
def calculate_named_binds(query, vars) do
query =
quote do
query = Ecto.Queryable.to_query(unquote(query))
end
vars =
for {:named, key, name} <- vars do
{key,
quote do
Ecto.Query.Builder.count_alias!(query, unquote(name))
end}
end
{query, vars}
end
@doc """
Count the alias for the given query.
"""
def count_alias!(%{aliases: aliases} = query, name) do
case aliases do
%{^name => ix} ->
ix
%{} ->
raise Ecto.QueryError, message: "unknown bind name `#{inspect name}`", query: query
end
end
defp escape_bind({{{var, _, context}, ix}, _}) when is_atom(var) and is_atom(context),
do: {:pos, var, ix}
defp escape_bind({{var, _, context}, ix}) when is_atom(var) and is_atom(context),
do: {:pos, var, ix}
defp escape_bind({{name, {var, _, context}}, _ix}) when is_atom(name) and is_atom(var) and is_atom(context),
do: {:named, var, name}
defp escape_bind({{{:^, _, [expr]}, {var, _, context}}, _ix}) when is_atom(var) and is_atom(context),
do: {:named, var, expr}
defp escape_bind({bind, _ix}),
do: error!("binding list should contain only variables or " <>
"`{as, var}` tuples, got: #{Macro.to_string(bind)}")
defp try_expansion(expr, type, params, vars, %Macro.Env{} = env) do
try_expansion(expr, type, params, vars, {env, &escape/5})
end
defp try_expansion(expr, type, params, vars, {env, fun}) do
case Macro.expand_once(expr, env) do
^expr ->
error! """
`#{Macro.to_string(expr)}` is not a valid query expression.
* If you intended to call a database function, please check the documentation
for Ecto.Query to see the supported database expressions
* If you intended to call an Elixir function or introduce a value,
you need to explicitly interpolate it with ^
"""
expanded ->
fun.(expanded, type, params, vars, env)
end
end
@doc """
Finds the index value for the given var in vars or raises.
"""
def find_var!(var, vars) do
vars[var] || error! "unbound variable `#{var}` in query. If you are attempting to interpolate a value, use ^var"
end
@doc """
Checks if the field is an atom at compilation time or
delegate the check to runtime for interpolation.
"""
def quoted_field!({:^, _, [expr]}),
do: quote(do: Ecto.Query.Builder.field!(unquote(expr)))
def quoted_field!(atom) when is_atom(atom),
do: atom
def quoted_field!(other),
do: error!("expected literal atom or interpolated value in field/2, got: `#{Macro.to_string(other)}`")
@doc """
Called by escaper at runtime to verify that value is an atom.
"""
def field!(atom) when is_atom(atom),
do: atom
def field!(other),
do: error!("expected atom in field/2, got: `#{inspect other}`")
defp escape_json_path(path) when is_list(path) do
Enum.map(path, "ed_json_path_element!/1)
end
defp escape_json_path(other) do
error!("expected JSON path to be compile-time list, got: `#{Macro.to_string(other)}`")
end
defp quoted_json_path_element!({:^, _, [expr]}),
do: quote(do: Ecto.Query.Builder.json_path_element!(unquote(expr)))
defp quoted_json_path_element!(binary) when is_binary(binary),
do: binary
defp quoted_json_path_element!(integer) when is_integer(integer),
do: integer
defp quoted_json_path_element!(other),
do:
error!(
"expected JSON path to contain literal strings, literal integers, or interpolated values, got: " <>
"`#{Macro.to_string(other)}`"
)
@doc """
Called by escaper at runtime to verify that value is a string or an integer.
"""
def json_path_element!(binary) when is_binary(binary),
do: binary
def json_path_element!(integer) when is_integer(integer),
do: integer
def json_path_element!(other),
do: error!("expected string or integer in json_extract_path/2, got: `#{inspect other}`")
@doc """
Called by escaper at runtime to verify that a value is not nil.
"""
def not_nil!(nil) do
raise ArgumentError, "comparison with nil is forbidden as it is unsafe. " <>
"If you want to check if a value is nil, use is_nil/1 instead"
end
def not_nil!(not_nil) do
not_nil
end
@doc """
Checks if the field is a valid interval at compilation time or
delegate the check to runtime for interpolation.
"""
def quoted_interval!({:^, _, [expr]}),
do: quote(do: Ecto.Query.Builder.interval!(unquote(expr)))
def quoted_interval!(other),
do: interval!(other)
@doc """
Called by escaper at runtime to verify keywords.
"""
def fragment!(kw) do
if Keyword.keyword?(kw) do
kw
else
raise ArgumentError, bad_fragment_message(inspect(kw))
end
end
@doc """
Called by escaper at runtime to verify that value is a valid interval.
"""
@interval ~w(year month week day hour minute second millisecond microsecond)
def interval!(interval) when interval in @interval,
do: interval
def interval!(other_string) when is_binary(other_string),
do: error!("invalid interval: `#{inspect other_string}` (expected one of #{Enum.join(@interval, ", ")})")
def interval!(not_string),
do: error!("invalid interval: `#{inspect not_string}` (expected a string)")
@doc """
Negates the given number.
"""
# TODO: Remove check when we depend on decimal v2.0
if Code.ensure_loaded?(Decimal) and function_exported?(Decimal, :negate, 1) do
def negate!(%Decimal{} = decimal), do: Decimal.negate(decimal)
else
def negate!(%Decimal{} = decimal), do: Decimal.minus(decimal)
end
def negate!(number) when is_number(number), do: -number
@doc """
Returns the type of an expression at build time.
"""
@spec quoted_type(Macro.t, Keyword.t) :: quoted_type
# Fields
def quoted_type({{:., _, [{var, _, context}, field]}, _, []}, vars)
when is_atom(var) and is_atom(context) and is_atom(field),
do: {find_var!(var, vars), field}
def quoted_type({:field, _, [{var, _, context}, field]}, vars)
when is_atom(var) and is_atom(context) and is_atom(field),
do: {find_var!(var, vars), field}
# Unquoting code here means the second argument of field will
# always be unquoted twice, one by the type checking and another
# in the query itself. We are assuming this is not an issue
# as the solution is somewhat complicated.
def quoted_type({:field, _, [{var, _, context}, {:^, _, [code]}]}, vars)
when is_atom(var) and is_atom(context),
do: {find_var!(var, vars), code}
# Interval
def quoted_type({:datetime_add, _, [_, _, _]}, _vars), do: :naive_datetime
def quoted_type({:date_add, _, [_, _, _]}, _vars), do: :date
# Tagged
def quoted_type({:<<>>, _, _}, _vars), do: :binary
def quoted_type({:type, _, [_, type]}, _vars), do: type
# Sigils
def quoted_type({sigil, _, [_, []]}, _vars) when sigil in ~w(sigil_s sigil_S)a, do: :string
def quoted_type({sigil, _, [_, []]}, _vars) when sigil in ~w(sigil_w sigil_W)a, do: {:array, :string}
# Lists
def quoted_type(list, vars) when is_list(list) do
case list |> Enum.map("ed_type(&1, vars)) |> Enum.uniq() do
[type] -> {:array, type}
_ -> {:array, :any}
end
end
# Negative numbers
def quoted_type({:-, _, [number]}, _vars) when is_integer(number), do: :integer
def quoted_type({:-, _, [number]}, _vars) when is_float(number), do: :float
# Dynamic aggregates
for {agg, arity} <- @dynamic_aggregates do
args = 1..arity |> Enum.map(fn _ -> Macro.var(:_, __MODULE__) end) |> tl()
def quoted_type({unquote(agg), _, [expr, unquote_splicing(args)]}, vars) do
quoted_type(expr, vars)
end
end
# Literals
def quoted_type(literal, _vars) when is_float(literal), do: :float
def quoted_type(literal, _vars) when is_binary(literal), do: :string
def quoted_type(literal, _vars) when is_boolean(literal), do: :boolean
def quoted_type(literal, _vars) when is_atom(literal) and not is_nil(literal), do: :atom
def quoted_type(literal, _vars) when is_integer(literal), do: :integer
# Tuples
def quoted_type({left, right}, vars), do: quoted_type({:{}, [], [left, right]}, vars)
def quoted_type({:{}, _, elems}, vars), do: {:tuple, Enum.map(elems, "ed_type(&1, vars))}
def quoted_type({name, _, args}, _vars) when is_atom(name) and is_list(args) do
case call_type(name, length(args)) do
{_in, out} -> out
nil -> :any
end
end
def quoted_type(_, _vars), do: :any
defp get_env({env, _}), do: env
defp get_env(env), do: env
@doc """
Raises a query building error.
"""
def error!(message) when is_binary(message) do
{:current_stacktrace, [_|t]} = Process.info(self(), :current_stacktrace)
t = Enum.drop_while t, fn
{mod, _, _, _} ->
String.starts_with?(Atom.to_string(mod), ["Elixir.Ecto.Query.", "Elixir.Enum"])
_ ->
false
end
reraise Ecto.Query.CompileError, [message: message], t
end
@doc """
Counts the bindings in a query expression.
## Examples
iex> count_binds(%Ecto.Query{joins: [1,2,3]})
4
"""
@spec count_binds(Ecto.Query.t) :: non_neg_integer
def count_binds(%Query{joins: joins}) do
1 + length(joins)
end
@doc """
Bump interpolations by the length of parameters.
"""
def bump_interpolations(expr, []), do: expr
def bump_interpolations(expr, params) do
len = length(params)
Macro.prewalk(expr, fn
{:^, meta, [counter]} when is_integer(counter) -> {:^, meta, [len + counter]}
other -> other
end)
end
@doc """
Applies a query at compilation time or at runtime.
This function is responsible for checking if a given query is an
`Ecto.Query` struct at compile time. If it is not it will act
accordingly.
If a query is available, it invokes the `apply` function in the
given `module`, otherwise, it delegates the call to runtime.
It is important to keep in mind the complexities introduced
by this function. In particular, a %Query{} is a mixture of escaped
and unescaped expressions which makes it impossible for this
function to properly escape or unescape it at compile/runtime.
For this reason, the apply function should be ready to handle
arguments in both escaped and unescaped form.
For example, take into account the `Builder.OrderBy`:
select = %Ecto.Query.QueryExpr{expr: expr, file: env.file, line: env.line}
Builder.apply_query(query, __MODULE__, [order_by], env)
`expr` is already an escaped expression and we must not escape
it again. However, it is wrapped in an Ecto.Query.QueryExpr,
which must be escaped! Furthermore, the `apply/2` function
in `Builder.OrderBy` very likely will inject the QueryExpr inside
Query, which again, is a mixture of escaped and unescaped expressions.
That said, you need to obey the following rules:
1. In order to call this function, the arguments must be escapable
values supported by the `escape/1` function below;
2. The apply function may not manipulate the given arguments,
with exception to the query.
In particular, when invoked at compilation time, all arguments
(except the query) will be escaped, so they can be injected into
the query properly, but they will be in their runtime form
when invoked at runtime.
"""
@spec apply_query(Macro.t, Macro.t, Macro.t, Macro.Env.t) :: Macro.t
def apply_query(query, module, args, env) do
case Macro.expand(query, env) |> unescape_query() do
%Query{} = compiletime_query ->
apply(module, :apply, [compiletime_query | args])
|> escape_query()
runtime_query ->
quote do
# Unquote the query before `module.apply()` for any binding variable.
query = unquote(runtime_query)
unquote(module).apply(query, unquote_splicing(args))
end
end
end
# Unescapes an `Ecto.Query` struct.
@spec unescape_query(Macro.t) :: Query.t | Macro.t
defp unescape_query({:%, _, [Query, {:%{}, _, list}]}) do
struct(Query, list)
end
defp unescape_query({:%{}, _, list} = ast) do
if List.keyfind(list, :__struct__, 0) == {:__struct__, Query} do
Map.new(list)
else
ast
end
end
defp unescape_query(other) do
other
end
# Escapes an `Ecto.Query` and associated structs.
@spec escape_query(Query.t) :: Macro.t
defp escape_query(%Query{} = query), do: {:%{}, [], Map.to_list(query)}
defp parse_access_get({{:., _, [Access, :get]}, _, [left, right]}, acc) do
parse_access_get(left, [right | acc])
end
defp parse_access_get({{:., _, [{var, _, context}, field]}, _, []} = expr, acc)
when is_atom(var) and is_atom(context) and is_atom(field) do
{expr, acc}
end
end
| 34.946655 | 117 | 0.627496 |
e800fbd6829f7763665d80a35cad4a7d007fa1a7 | 2,127 | exs | Elixir | test/controllers/task_controller_test.exs | wsmoak/my_app_802337 | a863bdd16c909b12a598bf06b8901e1c8c0d3f7c | [
"MIT"
] | null | null | null | test/controllers/task_controller_test.exs | wsmoak/my_app_802337 | a863bdd16c909b12a598bf06b8901e1c8c0d3f7c | [
"MIT"
] | null | null | null | test/controllers/task_controller_test.exs | wsmoak/my_app_802337 | a863bdd16c909b12a598bf06b8901e1c8c0d3f7c | [
"MIT"
] | null | null | null | defmodule MyApp_802337.TaskControllerTest do
use MyApp_802337.ConnCase
alias MyApp_802337.Task
@valid_attrs %{due_at: %{day: 17, hour: 14, min: 0, month: 4, year: 2010}, title: "some content"}
@invalid_attrs %{}
setup do
conn = conn() |> put_req_header("accept", "application/json")
{:ok, conn: conn}
end
test "lists all entries on index", %{conn: conn} do
conn = get conn, task_path(conn, :index)
assert json_response(conn, 200)["data"] == []
end
test "shows chosen resource", %{conn: conn} do
task = Repo.insert! %Task{}
conn = get conn, task_path(conn, :show, task)
assert json_response(conn, 200)["data"] == %{
"id" => task.id
}
end
test "does not show resource and instead throw error when id is nonexistent", %{conn: conn} do
assert_raise Ecto.NoResultsError, fn ->
get conn, task_path(conn, :show, -1)
end
end
test "creates and renders resource when data is valid", %{conn: conn} do
conn = post conn, task_path(conn, :create), task: @valid_attrs
assert json_response(conn, 200)["data"]["id"]
assert Repo.get_by(Task, @valid_attrs)
end
test "does not create resource and renders errors when data is invalid", %{conn: conn} do
conn = post conn, task_path(conn, :create), task: @invalid_attrs
assert json_response(conn, 422)["errors"] != %{}
end
test "updates and renders chosen resource when data is valid", %{conn: conn} do
task = Repo.insert! %Task{}
conn = put conn, task_path(conn, :update, task), task: @valid_attrs
assert json_response(conn, 200)["data"]["id"]
assert Repo.get_by(Task, @valid_attrs)
end
test "does not update chosen resource and renders errors when data is invalid", %{conn: conn} do
task = Repo.insert! %Task{}
conn = put conn, task_path(conn, :update, task), task: @invalid_attrs
assert json_response(conn, 422)["errors"] != %{}
end
test "deletes chosen resource", %{conn: conn} do
task = Repo.insert! %Task{}
conn = delete conn, task_path(conn, :delete, task)
assert response(conn, 204)
refute Repo.get(Task, task.id)
end
end
| 33.761905 | 99 | 0.661965 |
e800fc2fdaf793f419fdaebaf7808bc6ce42e0a3 | 4,018 | exs | Elixir | test/mustache_feature_test.exs | newaperio/Mustache.ex | a1314b202c59ab7c3c48113431a982fbee6fe50c | [
"MIT"
] | null | null | null | test/mustache_feature_test.exs | newaperio/Mustache.ex | a1314b202c59ab7c3c48113431a982fbee6fe50c | [
"MIT"
] | null | null | null | test/mustache_feature_test.exs | newaperio/Mustache.ex | a1314b202c59ab7c3c48113431a982fbee6fe50c | [
"MIT"
] | null | null | null | defmodule MustacheFeatureTest do
use ExUnit.Case
test "No Interpolation" do
assert Mustache.render("Hello from {Mustache}!\n") == "Hello from {Mustache}!\n"
end
test "Basic Interpolation" do
assert Mustache.render("Hello, {{subject}}!\n", %{subject: "world"}) == "Hello, world!\n"
end
test "HTML Escaping" do
assert Mustache.render("These characters should be HTML escaped: {{forbidden}}\n", %{forbidden: "& \" < >"}) == "These characters should be HTML escaped: & " < >\n"
end
test "Triple Mustache" do
assert Mustache.render("These characters should not be HTML escaped: {{{forbidden}}}\n", %{forbidden: "& \" < >"}) == "These characters should not be HTML escaped: & \" < >\n"
end
test "Ampersand" do
assert Mustache.render("These characters should not be HTML escaped: {{&forbidden}}\n", %{forbidden: "& \" < >"}) == "These characters should not be HTML escaped: & \" < >\n"
end
test "Integers should interpolate seamlessly." do
assert Mustache.render("\"{{mph}} miles an hour!\"", %{mph: 85}) == "\"85 miles an hour!\""
end
test "Triple Mustache Integer Interpolation" do
assert Mustache.render("\"{{{mph}}} miles an hour!\"", %{mph: 85}) == "\"85 miles an hour!\""
end
test "Ampersand Integer Interpolation" do
assert Mustache.render("\"{{&mph}} miles an hour!\"", %{mph: 85}) == "\"85 miles an hour!\""
end
test "Basic Decimal Interpolation" do
assert Mustache.render("\"{{power}} jiggawatts!\"", %{power: 1.21}) == "\"1.21 jiggawatts!\""
end
test "Triple Mustache Decimal Interpolation" do
assert Mustache.render("\"{{{power}}} jiggawatts!\"", %{power: 1.21}) == "\"1.21 jiggawatts!\""
end
test "Ampersand Decimal Interpolation" do
assert Mustache.render("\"{{&power}} jiggawatts!\"", %{power: 1.21}) == "\"1.21 jiggawatts!\""
end
test "Basic Context Miss Interpolation" do
assert Mustache.render("I ({{cannot}}) be seen!", %{}) == "I () be seen!"
end
test "Triple Mustache Context Miss Interpolation" do
assert Mustache.render("I ({{{cannot}}}) be seen!", %{}) == "I () be seen!"
end
test "Ampersand Context Miss Interpolation" do
assert Mustache.render("I ({{&cannot}}) be seen!", %{}) == "I () be seen!"
end
#Dotted Names
test "Dotted Names" do
assert Mustache.render("\"{{person.name}}\" == \"Joe\"",
%{person: %{name: "Joe"}}) == "\"Joe\" == \"Joe\""
assert Mustache.render("\"{{person.name.first}}\" == \"Joe\"",
%{person: %{name: %{first: "Joe"}}}) == "\"Joe\" == \"Joe\""
end
@tag :pending
test "Dotted Names - Basic Interpolation" do
assert Mustache.render("\"{{person.name}}\" == \"{{#person}}{{name}}{{/person}}\"",
%{person: %{name: "Joe"}}) == "\"Joe\" == \"Joe\""
end
#Whitespace sensitivity
test "Interpolation - Surrounding Whitespace" do
assert Mustache.render("| {{string}} |", %{string: '---'}) == "| --- |"
end
test "Triple Mustache - Surrounding Whitespace" do
assert Mustache.render("| {{{string}}} |", %{ string: '---' }) == "| --- |"
end
test "Ampersand - Surrounding Whitespace" do
assert Mustache.render("| {{&string}} |", %{string: '---' }) == "| --- |"
end
test "Interpolation - Standalone" do
assert Mustache.render(" {{string}}\n", %{string: '---' }) == " ---\n"
end
test "Triple Mustache - Standalone" do
assert Mustache.render(" {{{string}}}\n", %{ string: '---' }) == " ---\n"
end
test "Ampersand - Standalone" do
assert Mustache.render(" {{&string}}\n", %{ string: '---' }) == " ---\n"
end
# Whitespace Insensitivity
test "Interpolation With Padding" do
assert Mustache.render("|{{ string }}|", %{ string: "---" }) == "|---|"
end
test "Triple Mustache With Padding" do
assert Mustache.render("|{{{ string }}}|", %{ string: "---" }) == "|---|"
end
test "Ampersand With Padding" do
assert Mustache.render("|{{& string }}|", %{ string: "---" }) == "|---|"
end
end
| 34.637931 | 183 | 0.582379 |
e80111c8155ddce52802927f2cb866af4403c932 | 10,608 | ex | Elixir | lib/ecto/migration/runner.ex | arcz/ecto_sql | 22ecbe3782af32f50c36387c861db77041247de8 | [
"Apache-2.0"
] | null | null | null | lib/ecto/migration/runner.ex | arcz/ecto_sql | 22ecbe3782af32f50c36387c861db77041247de8 | [
"Apache-2.0"
] | null | null | null | lib/ecto/migration/runner.ex | arcz/ecto_sql | 22ecbe3782af32f50c36387c861db77041247de8 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Migration.Runner do
# A GenServer responsible for running migrations
# in either `:forward` or `:backward` directions.
@moduledoc false
require Logger
alias Ecto.Migration.Table
alias Ecto.Migration.Index
alias Ecto.Migration.Constraint
alias Ecto.Migration.Command
@doc """
Runs the given migration.
"""
def run(repo, module, direction, operation, migrator_direction, opts) do
level = Keyword.get(opts, :log, :info)
sql = Keyword.get(opts, :log_sql, false)
log = %{level: level, sql: sql}
args = [self(), repo, direction, migrator_direction, log]
{:ok, runner} = Supervisor.start_child(Ecto.Migration.Supervisor, args)
metadata(runner, opts)
log(level, "== Running #{inspect module}.#{operation}/0 #{direction}")
{time1, _} = :timer.tc(module, operation, [])
{time2, _} = :timer.tc(&flush/0, [])
time = time1 + time2
log(level, "== Migrated in #{inspect(div(time, 100_000) / 10)}s")
stop()
end
@doc """
Stores the runner metadata.
"""
def metadata(runner, opts) do
prefix = opts[:prefix]
Process.put(:ecto_migration, %{runner: runner, prefix: prefix && to_string(prefix)})
end
@doc """
Starts the runner for the specified repo.
"""
def start_link(parent, repo, direction, migrator_direction, log) do
Agent.start_link(fn ->
Process.link(parent)
%{direction: direction, repo: repo, migrator_direction: migrator_direction,
command: nil, subcommands: [], log: log, commands: [], config: repo.config()}
end)
end
@doc """
Stops the runner.
"""
def stop() do
Agent.stop(runner())
end
@doc """
Accesses the given repository configuration.
"""
def repo_config(key, default) do
Agent.get(runner(), &Keyword.get(&1.config, key, default))
end
@doc """
Returns the migrator command (up or down).
* forward + up: up
* forward + down: down
* forward + change: up
* backward + change: down
"""
def migrator_direction do
Agent.get(runner(), & &1.migrator_direction)
end
@doc """
Gets the prefix for this migration
"""
def prefix do
case Process.get(:ecto_migration) do
%{prefix: prefix} -> prefix
_ -> raise "could not find migration runner process for #{inspect self()}"
end
end
@doc """
Executes queue migration commands.
Reverses the order commands are executed when doing a rollback
on a change/0 function and resets commands queue.
"""
def flush do
%{commands: commands, direction: direction} = Agent.get_and_update(runner(), fn (state) ->
{state, %{state | commands: []}}
end)
commands = if direction == :backward, do: commands, else: Enum.reverse(commands)
for command <- commands do
{repo, direction, log} = runner_config()
execute_in_direction(repo, direction, log, command)
end
end
@doc """
Queues command tuples or strings for execution.
Ecto.MigrationError will be raised when the server
is in `:backward` direction and `command` is irreversible.
"""
def execute(command) do
reply =
Agent.get_and_update(runner(), fn
%{command: nil} = state ->
{:ok, %{state | subcommands: [], commands: [command|state.commands]}}
%{command: _} = state ->
{:error, %{state | command: nil}}
end)
case reply do
:ok ->
:ok
:error ->
raise Ecto.MigrationError, "cannot execute nested commands"
end
end
@doc """
Starts a command.
"""
def start_command(command) do
reply =
Agent.get_and_update(runner(), fn
%{command: nil} = state ->
{:ok, %{state | command: command}}
%{command: _} = state ->
{:error, %{state | command: command}}
end)
case reply do
:ok ->
:ok
:error ->
raise Ecto.MigrationError, "cannot execute nested commands"
end
end
@doc """
Queues and clears current command. Must call `start_command/1` first.
"""
def end_command do
Agent.update runner(), fn state ->
{operation, object} = state.command
command = {operation, object, Enum.reverse(state.subcommands)}
%{state | command: nil, subcommands: [], commands: [command|state.commands]}
end
end
@doc """
Adds a subcommand to the current command. Must call `start_command/1` first.
"""
def subcommand(subcommand) do
reply =
Agent.get_and_update(runner(), fn
%{command: nil} = state ->
{:error, state}
state ->
{:ok, update_in(state.subcommands, &[subcommand|&1])}
end)
case reply do
:ok ->
:ok
:error ->
raise Ecto.MigrationError, message: "cannot execute command outside of block"
end
end
## Execute
defp execute_in_direction(repo, :forward, log, %Command{up: up}) do
log_and_execute_ddl(repo, log, up)
end
defp execute_in_direction(repo, :forward, log, command) do
log_and_execute_ddl(repo, log, command)
end
defp execute_in_direction(repo, :backward, log, %Command{down: down}) do
log_and_execute_ddl(repo, log, down)
end
defp execute_in_direction(repo, :backward, log, command) do
if reversed = reverse(command) do
log_and_execute_ddl(repo, log, reversed)
else
raise Ecto.MigrationError, message:
"cannot reverse migration command: #{command command}. " <>
"You will need to explicitly define up/1 and down/1 in your migration"
end
end
defp reverse({:create, %Index{} = index}),
do: {:drop, index}
defp reverse({:create_if_not_exists, %Index{} = index}),
do: {:drop_if_exists, index}
defp reverse({:drop, %Index{} = index}),
do: {:create, index}
defp reverse({:create, %Table{} = table, _columns}),
do: {:drop, table}
defp reverse({:create_if_not_exists, %Table{} = table, _columns}),
do: {:drop_if_exists, table}
defp reverse({:rename, %Table{} = table_current, %Table{} = table_new}),
do: {:rename, table_new, table_current}
defp reverse({:rename, %Table{} = table, current_column, new_column}),
do: {:rename, table, new_column, current_column}
defp reverse({:alter, %Table{} = table, changes}) do
if reversed = table_reverse(changes, []) do
{:alter, table, reversed}
end
end
defp reverse({:create_if_not_exists, %Constraint{} = constraint}),
do: {:drop_if_exists, constraint}
defp reverse({:create, %Constraint{} = constraint}),
do: {:drop, constraint}
defp reverse(_command), do: false
defp table_reverse([{:remove, name, type, opts}| t], acc) do
table_reverse(t, [{:add, name, type, opts} | acc])
end
defp table_reverse([{:modify, name, type, opts} | t], acc) do
case opts[:from] do
nil -> false
from -> table_reverse(t, [{:modify, name, from, Keyword.put(opts, :from, type)} | acc])
end
end
defp table_reverse([{:add, name, _type, _opts} | t], acc) do
table_reverse(t, [{:remove, name} | acc])
end
defp table_reverse([_ | _], _acc) do
false
end
defp table_reverse([], acc) do
acc
end
## Helpers
defp runner do
case Process.get(:ecto_migration) do
%{runner: runner} -> runner
_ -> raise "could not find migration runner process for #{inspect self()}"
end
end
defp runner_config do
Agent.get(runner(), fn %{repo: repo, direction: direction, log: log} ->
{repo, direction, log}
end)
end
defp log_and_execute_ddl(repo, %{level: level, sql: sql}, command) do
log(level, command(command))
meta = Ecto.Adapter.lookup_meta(repo)
{:ok, logs} = repo.__adapter__.execute_ddl(meta, command, timeout: :infinity, log: sql)
Enum.each(logs, fn {level, message, metadata} ->
Logger.log(level, message, metadata)
end)
:ok
end
defp log(false, _msg), do: :ok
defp log(level, msg), do: Logger.log(level, msg)
defp command(ddl) when is_binary(ddl) or is_list(ddl),
do: "execute #{inspect ddl}"
defp command({:create, %Table{} = table, _}),
do: "create table #{quote_name(table.prefix, table.name)}"
defp command({:create_if_not_exists, %Table{} = table, _}),
do: "create table if not exists #{quote_name(table.prefix, table.name)}"
defp command({:alter, %Table{} = table, _}),
do: "alter table #{quote_name(table.prefix, table.name)}"
defp command({:drop, %Table{} = table}),
do: "drop table #{quote_name(table.prefix, table.name)}"
defp command({:drop_if_exists, %Table{} = table}),
do: "drop table if exists #{quote_name(table.prefix, table.name)}"
defp command({:create, %Index{} = index}),
do: "create index #{quote_name(index.prefix, index.name)}"
defp command({:create_if_not_exists, %Index{} = index}),
do: "create index if not exists #{quote_name(index.prefix, index.name)}"
defp command({:drop, %Index{} = index}),
do: "drop index #{quote_name(index.prefix, index.name)}"
defp command({:drop_if_exists, %Index{} = index}),
do: "drop index if exists #{quote_name(index.prefix, index.name)}"
defp command({:rename, %Table{} = current_table, %Table{} = new_table}),
do: "rename table #{quote_name(current_table.prefix, current_table.name)} to #{quote_name(new_table.prefix, new_table.name)}"
defp command({:rename, %Table{} = table, current_column, new_column}),
do: "rename column #{current_column} to #{new_column} on table #{quote_name(table.prefix, table.name)}"
defp command({:create, %Constraint{check: nil, exclude: nil}}),
do: raise ArgumentError, "a constraint must have either a check or exclude option"
defp command({:create, %Constraint{check: check, exclude: exclude}}) when is_binary(check) and is_binary(exclude),
do: raise ArgumentError, "a constraint must not have both check and exclude options"
defp command({:create, %Constraint{check: check} = constraint}) when is_binary(check),
do: "create check constraint #{constraint.name} on table #{quote_name(constraint.prefix, constraint.table)}"
defp command({:create, %Constraint{exclude: exclude} = constraint}) when is_binary(exclude),
do: "create exclude constraint #{constraint.name} on table #{quote_name(constraint.prefix, constraint.table)}"
defp command({:drop, %Constraint{} = constraint}),
do: "drop constraint #{constraint.name} from table #{quote_name(constraint.prefix, constraint.table)}"
defp quote_name(nil, name), do: quote_name(name)
defp quote_name(prefix, name), do: quote_name(prefix) <> "." <> quote_name(name)
defp quote_name(name) when is_atom(name), do: quote_name(Atom.to_string(name))
defp quote_name(name), do: name
end
| 32.440367 | 129 | 0.65083 |
e80125279f2af7326205c6f7df7be5d0e4f60771 | 1,317 | ex | Elixir | test/helper/check_case.ex | acac99/credo-module-function-order-rule | a60a8641e682dde1517bc38df37c91bb23359b2b | [
"MIT"
] | 1 | 2019-09-19T10:29:24.000Z | 2019-09-19T10:29:24.000Z | test/helper/check_case.ex | acac99/credo-module-function-ordering | a60a8641e682dde1517bc38df37c91bb23359b2b | [
"MIT"
] | null | null | null | test/helper/check_case.ex | acac99/credo-module-function-ordering | a60a8641e682dde1517bc38df37c91bb23359b2b | [
"MIT"
] | null | null | null | defmodule Test.Helper.CredoModuleFunctionOrdering.CheckCase do
import ExUnit.Assertions
alias Credo.Execution
alias Credo.SourceFile
def refute_issues(source_file, check, params \\ []) do
issues = issues_for(source_file, check, create_config(), params)
assert [] == issues,
"There should be no issues, got #{Enum.count(issues)}: #{to_inspected(issues)}"
issues
end
def assert_issue(source_file, check \\ nil, params \\ [], callback \\ nil) do
issues = issues_for(source_file, check, create_config(), params)
refute Enum.empty?(issues), "There should be one issue, got none."
assert Enum.count(issues) == 1,
"There should be only 1 issue, got #{Enum.count(issues)}: #{to_inspected(issues)}"
if callback do
issues |> List.first() |> callback.()
end
issues
end
def to_inspected(value) do
value
|> Inspect.Algebra.to_doc(%Inspect.Opts{})
|> Inspect.Algebra.format(50)
|> Enum.join("")
end
defp issues_for(%SourceFile{} = source_file, check, _exec, params) do
_issues = check.run(source_file, params)
end
defp create_config do
%Execution{}
|> Execution.ExecutionSourceFiles.start_server()
|> Execution.ExecutionIssues.start_server()
|> Execution.ExecutionTiming.start_server()
end
end
| 27.4375 | 93 | 0.678815 |
e80133696d096413fe76a5785a9069adbf6eaa0a | 128 | ex | Elixir | debian/cron.d.ex | winjer/squeal | 20401986e0d1698776f5b482b28e14c57b11833c | [
"Apache-2.0"
] | 2 | 2015-01-30T10:22:12.000Z | 2015-11-05T15:37:23.000Z | debian/cron.d.ex | winjer/squeal | 20401986e0d1698776f5b482b28e14c57b11833c | [
"Apache-2.0"
] | null | null | null | debian/cron.d.ex | winjer/squeal | 20401986e0d1698776f5b482b28e14c57b11833c | [
"Apache-2.0"
] | null | null | null | #
# Regular cron jobs for the squeal package
#
0 4 * * * root [ -x /usr/bin/squeal_maintenance ] && /usr/bin/squeal_maintenance
| 25.6 | 80 | 0.695313 |
e8018aa0020fddf5f164508fbb1b7acd81b2dbdd | 221 | exs | Elixir | test/distributed_elixir/web/controllers/page_controller_test.exs | odarriba/distributed-elixir-example | ef3075d8d14ff3c0f3cd6c06145659129d80f6d1 | [
"MIT"
] | 11 | 2017-08-08T08:38:28.000Z | 2019-09-16T09:20:06.000Z | test/distributed_elixir/web/controllers/page_controller_test.exs | odarriba/distributed-elixir-example | ef3075d8d14ff3c0f3cd6c06145659129d80f6d1 | [
"MIT"
] | null | null | null | test/distributed_elixir/web/controllers/page_controller_test.exs | odarriba/distributed-elixir-example | ef3075d8d14ff3c0f3cd6c06145659129d80f6d1 | [
"MIT"
] | null | null | null | defmodule DistributedElixir.Web.PageControllerTest do
use DistributedElixir.Web.ConnCase
test "GET /", %{conn: conn} do
conn = get conn, "/"
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 24.555556 | 60 | 0.705882 |
e801c1b41e0bc70e140f2f37cd14a037048b7bc2 | 567 | ex | Elixir | lib/phoenix/transports/serializer.ex | raspo/phoenix | 438b74255e7a4d68b4eaf1a295d0fcd201c71421 | [
"MIT"
] | 2 | 2017-06-08T23:28:13.000Z | 2017-06-08T23:28:16.000Z | lib/phoenix/transports/serializer.ex | raspo/phoenix | 438b74255e7a4d68b4eaf1a295d0fcd201c71421 | [
"MIT"
] | null | null | null | lib/phoenix/transports/serializer.ex | raspo/phoenix | 438b74255e7a4d68b4eaf1a295d0fcd201c71421 | [
"MIT"
] | null | null | null | defmodule Phoenix.Transports.Serializer do
@moduledoc """
Defines a behaviour for `Phoenix.Socket.Message` serialization.
"""
@doc "Translates a `Phoenix.Socket.Broadcast` struct to fastlane format"
@callback fastlane!(Phoenix.Socket.Broadcast.t) :: term
@doc "Encodes `Phoenix.Socket.Message` struct to transport representation"
@callback encode!(Phoenix.Socket.Message.t | Phoenix.Socket.Reply.t) :: term
@doc "Decodes iodata into `Phoenix.Socket.Message` struct"
@callback decode!(iodata, options :: Keyword.t) :: Phoenix.Socket.Message.t
end
| 37.8 | 78 | 0.749559 |
e801d81d12e3d69e1529f0f613d753945ad00d91 | 1,526 | ex | Elixir | test/support/data_case.ex | ATechnoHazard/katbin | 20a0b45954cf7819cd9d51c401db06be0f47666b | [
"MIT"
] | 4 | 2020-08-05T20:05:34.000Z | 2020-10-01T10:01:56.000Z | test/support/data_case.ex | ATechnoHazard/katbin | 20a0b45954cf7819cd9d51c401db06be0f47666b | [
"MIT"
] | 1 | 2020-07-08T05:02:12.000Z | 2020-09-25T10:05:11.000Z | test/support/data_case.ex | ATechnoHazard/katbin | 20a0b45954cf7819cd9d51c401db06be0f47666b | [
"MIT"
] | 1 | 2020-08-30T12:59:49.000Z | 2020-08-30T12:59:49.000Z | defmodule Ketbin.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use Ketbin.DataCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Ketbin.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Ketbin.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Ketbin.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Ketbin.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transforms changeset errors into a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Regex.replace(~r"%{(\w+)}", message, fn _, key ->
opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string()
end)
end)
end
end
| 27.25 | 77 | 0.687418 |
e801ea36eac431d2387e0f374e604eb037bc5caa | 98 | exs | Elixir | test/notifications_test.exs | rogaz/thesis-phoenix | 8ad24cdc7e24bf312139a527db5a3bf07e05820f | [
"MIT"
] | 681 | 2016-06-21T20:49:21.000Z | 2022-02-19T04:08:38.000Z | test/notifications_test.exs | rogaz/thesis-phoenix | 8ad24cdc7e24bf312139a527db5a3bf07e05820f | [
"MIT"
] | 125 | 2016-06-21T21:14:49.000Z | 2020-12-12T20:15:48.000Z | test/notifications_test.exs | rogaz/thesis-phoenix | 8ad24cdc7e24bf312139a527db5a3bf07e05820f | [
"MIT"
] | 76 | 2016-09-06T03:40:55.000Z | 2022-01-20T21:29:22.000Z | defmodule NotificationsTest do
use ExUnit.Case
doctest Thesis.Notifications, import: true
end
| 19.6 | 44 | 0.816327 |
e801f810973afe2bf6946934d1bc6ccf11bac74d | 141 | ex | Elixir | test_projects/phx_1.6/lib/phx_proj_web/controllers/page_controller.ex | qhwa/dockerize | d930f06da89a686961da7a5b5bdadb4c9b01ec32 | [
"MIT"
] | 47 | 2020-03-04T00:24:26.000Z | 2022-01-14T23:34:52.000Z | test_projects/phx_1.6/lib/phx_proj_web/controllers/page_controller.ex | qhwa/dockerize | d930f06da89a686961da7a5b5bdadb4c9b01ec32 | [
"MIT"
] | 2 | 2020-06-09T22:25:06.000Z | 2020-06-30T21:18:37.000Z | test_projects/phx_1.6/lib/phx_proj_web/controllers/page_controller.ex | qhwa/dockerize | d930f06da89a686961da7a5b5bdadb4c9b01ec32 | [
"MIT"
] | null | null | null | defmodule PhxProjWeb.PageController do
use PhxProjWeb, :controller
def index(conn, _params) do
render(conn, "index.html")
end
end
| 17.625 | 38 | 0.737589 |
e801f99c2d00210bbce290bebb66783eeb293a38 | 2,471 | ex | Elixir | clients/proximity_beacon/lib/google_api/proximity_beacon/v1beta1/model/ephemeral_id_registration_params.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/proximity_beacon/lib/google_api/proximity_beacon/v1beta1/model/ephemeral_id_registration_params.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/proximity_beacon/lib/google_api/proximity_beacon/v1beta1/model/ephemeral_id_registration_params.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.ProximityBeacon.V1beta1.Model.EphemeralIdRegistrationParams do
@moduledoc """
Information a client needs to provision and register beacons that
broadcast Eddystone-EID format beacon IDs, using Elliptic curve
Diffie-Hellman key exchange. See
[the Eddystone specification](https://github.com/google/eddystone/tree/master/eddystone-eid) at GitHub.
## Attributes
* `maxRotationPeriodExponent` (*type:* `integer()`, *default:* `nil`) - Indicates the maximum rotation period supported by the service.
See EddystoneEidRegistration.rotation_period_exponent
* `minRotationPeriodExponent` (*type:* `integer()`, *default:* `nil`) - Indicates the minimum rotation period supported by the service.
See EddystoneEidRegistration.rotation_period_exponent
* `serviceEcdhPublicKey` (*type:* `String.t`, *default:* `nil`) - The beacon service's public key for use by a beacon to derive its
Identity Key using Elliptic Curve Diffie-Hellman key exchange.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:maxRotationPeriodExponent => integer(),
:minRotationPeriodExponent => integer(),
:serviceEcdhPublicKey => String.t()
}
field(:maxRotationPeriodExponent)
field(:minRotationPeriodExponent)
field(:serviceEcdhPublicKey)
end
defimpl Poison.Decoder, for: GoogleApi.ProximityBeacon.V1beta1.Model.EphemeralIdRegistrationParams do
def decode(value, options) do
GoogleApi.ProximityBeacon.V1beta1.Model.EphemeralIdRegistrationParams.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ProximityBeacon.V1beta1.Model.EphemeralIdRegistrationParams do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.881356 | 139 | 0.760826 |
e802064203619a1d8a5554d8fda73a8bf7a1ddc1 | 1,144 | ex | Elixir | lib/rocketpay_web/router.ex | Pliavi/NLW-4-Rocketpay | fc146eb534e8dac634d618c7779b928b6172cbb2 | [
"MIT"
] | null | null | null | lib/rocketpay_web/router.ex | Pliavi/NLW-4-Rocketpay | fc146eb534e8dac634d618c7779b928b6172cbb2 | [
"MIT"
] | 1 | 2021-03-05T12:39:11.000Z | 2021-03-05T12:39:11.000Z | lib/rocketpay_web/router.ex | Pliavi/NLW-4-Rocketpay | fc146eb534e8dac634d618c7779b928b6172cbb2 | [
"MIT"
] | null | null | null | defmodule RocketpayWeb.Router do
use RocketpayWeb, :router
pipeline :api do
plug :accepts, ["json"]
end
scope "/api", RocketpayWeb do
pipe_through :api
post "/user", UserController, :create
post "/user/close_account", UserController, :delete
post "/operation/withdraw", OperationController, :withdraw
post "/operation/deposit", OperationController, :deposit
post "/operation/transfer", OperationController, :transfer
post "/account/block", AccountController, :set_block
end
# Enables LiveDashboard only for development
#
# If you want to use the LiveDashboard in production, you should put
# it behind authentication and allow only admins to access it.
# If your application does not have an admins-only section yet,
# you can use Plug.BasicAuth to set up some basic authentication
# as long as you are also using SSL (which you should anyway).
if Mix.env() in [:dev, :test] do
import Phoenix.LiveDashboard.Router
scope "/" do
pipe_through [:fetch_session, :protect_from_forgery]
live_dashboard "/dashboard", metrics: RocketpayWeb.Telemetry
end
end
end
| 30.918919 | 70 | 0.722902 |
e802199440c3bcea07b21a1a9e7e660ecc662d26 | 1,697 | ex | Elixir | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/ssl_certs_create_ephemeral_request.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/ssl_certs_create_ephemeral_request.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/ssl_certs_create_ephemeral_request.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SQLAdmin.V1beta4.Model.SslCertsCreateEphemeralRequest do
@moduledoc """
SslCerts create ephemeral certificate request.
## Attributes
* `access_token` (*type:* `String.t`, *default:* `nil`) - Access token to include in the signed certificate.
* `public_key` (*type:* `String.t`, *default:* `nil`) - PEM encoded public key to include in the signed certificate.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:access_token => String.t(),
:public_key => String.t()
}
field(:access_token)
field(:public_key)
end
defimpl Poison.Decoder, for: GoogleApi.SQLAdmin.V1beta4.Model.SslCertsCreateEphemeralRequest do
def decode(value, options) do
GoogleApi.SQLAdmin.V1beta4.Model.SslCertsCreateEphemeralRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.SQLAdmin.V1beta4.Model.SslCertsCreateEphemeralRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.94 | 120 | 0.741897 |
e8021e7bdcd1bc2e071076b2e52796f8293b5677 | 4,783 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/bubble_chart_spec.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/bubble_chart_spec.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/bubble_chart_spec.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Sheets.V4.Model.BubbleChartSpec do
@moduledoc """
A bubble chart.
## Attributes
* `bubbleBorderColor` (*type:* `GoogleApi.Sheets.V4.Model.Color.t`, *default:* `nil`) - The bubble border color.
* `bubbleBorderColorStyle` (*type:* `GoogleApi.Sheets.V4.Model.ColorStyle.t`, *default:* `nil`) - The bubble border color. If bubble_border_color is also set, this field takes precedence.
* `bubbleLabels` (*type:* `GoogleApi.Sheets.V4.Model.ChartData.t`, *default:* `nil`) - The data containing the bubble labels. These do not need to be unique.
* `bubbleMaxRadiusSize` (*type:* `integer()`, *default:* `nil`) - The max radius size of the bubbles, in pixels. If specified, the field must be a positive value.
* `bubbleMinRadiusSize` (*type:* `integer()`, *default:* `nil`) - The minimum radius size of the bubbles, in pixels. If specific, the field must be a positive value.
* `bubbleOpacity` (*type:* `number()`, *default:* `nil`) - The opacity of the bubbles between 0 and 1.0. 0 is fully transparent and 1 is fully opaque.
* `bubbleSizes` (*type:* `GoogleApi.Sheets.V4.Model.ChartData.t`, *default:* `nil`) - The data contianing the bubble sizes. Bubble sizes are used to draw the bubbles at different sizes relative to each other. If specified, group_ids must also be specified. This field is optional.
* `bubbleTextStyle` (*type:* `GoogleApi.Sheets.V4.Model.TextFormat.t`, *default:* `nil`) - The format of the text inside the bubbles. Underline and Strikethrough are not supported.
* `domain` (*type:* `GoogleApi.Sheets.V4.Model.ChartData.t`, *default:* `nil`) - The data containing the bubble x-values. These values locate the bubbles in the chart horizontally.
* `groupIds` (*type:* `GoogleApi.Sheets.V4.Model.ChartData.t`, *default:* `nil`) - The data containing the bubble group IDs. All bubbles with the same group ID are drawn in the same color. If bubble_sizes is specified then this field must also be specified but may contain blank values. This field is optional.
* `legendPosition` (*type:* `String.t`, *default:* `nil`) - Where the legend of the chart should be drawn.
* `series` (*type:* `GoogleApi.Sheets.V4.Model.ChartData.t`, *default:* `nil`) - The data contianing the bubble y-values. These values locate the bubbles in the chart vertically.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:bubbleBorderColor => GoogleApi.Sheets.V4.Model.Color.t(),
:bubbleBorderColorStyle => GoogleApi.Sheets.V4.Model.ColorStyle.t(),
:bubbleLabels => GoogleApi.Sheets.V4.Model.ChartData.t(),
:bubbleMaxRadiusSize => integer(),
:bubbleMinRadiusSize => integer(),
:bubbleOpacity => number(),
:bubbleSizes => GoogleApi.Sheets.V4.Model.ChartData.t(),
:bubbleTextStyle => GoogleApi.Sheets.V4.Model.TextFormat.t(),
:domain => GoogleApi.Sheets.V4.Model.ChartData.t(),
:groupIds => GoogleApi.Sheets.V4.Model.ChartData.t(),
:legendPosition => String.t(),
:series => GoogleApi.Sheets.V4.Model.ChartData.t()
}
field(:bubbleBorderColor, as: GoogleApi.Sheets.V4.Model.Color)
field(:bubbleBorderColorStyle, as: GoogleApi.Sheets.V4.Model.ColorStyle)
field(:bubbleLabels, as: GoogleApi.Sheets.V4.Model.ChartData)
field(:bubbleMaxRadiusSize)
field(:bubbleMinRadiusSize)
field(:bubbleOpacity)
field(:bubbleSizes, as: GoogleApi.Sheets.V4.Model.ChartData)
field(:bubbleTextStyle, as: GoogleApi.Sheets.V4.Model.TextFormat)
field(:domain, as: GoogleApi.Sheets.V4.Model.ChartData)
field(:groupIds, as: GoogleApi.Sheets.V4.Model.ChartData)
field(:legendPosition)
field(:series, as: GoogleApi.Sheets.V4.Model.ChartData)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.BubbleChartSpec do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.BubbleChartSpec.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.BubbleChartSpec do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 59.7875 | 314 | 0.720259 |
e8023343b5e382b1249321acaf3ef5b76bd807a3 | 4,290 | ex | Elixir | lib/game/world/master.ex | stevegrossi/ex_venture | e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa | [
"MIT"
] | 2 | 2019-05-14T11:36:44.000Z | 2020-07-01T08:54:04.000Z | lib/game/world/master.ex | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | null | null | null | lib/game/world/master.ex | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | 1 | 2021-01-29T14:12:40.000Z | 2021-01-29T14:12:40.000Z | defmodule Game.World.Master do
@moduledoc """
Master process for the world
Help orchestrate startup of zones
"""
use GenServer
alias Game.World.ZoneController
alias Game.Zone
require Logger
@behaviour Squabble.Leader
@group :world_leaders
@table :world_leader
# rebalance every 15 minutes
@rebalance_delay 15 * 60 * 1000
@start_world Application.get_env(:ex_venture, :game)[:world]
@impl true
def leader_selected(term) do
Logger.info("#{node()} chosen as the leader for term #{term}.", type: :leader)
if @start_world do
GenServer.cast(__MODULE__, :rebalance_zones)
end
end
@impl true
def node_down() do
if @start_world do
GenServer.cast(__MODULE__, :rebalance_zones)
end
end
def start_link(_) do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
@doc """
Check if the world is online
"""
@spec is_world_online?() :: boolean()
def is_world_online?() do
case :ets.lookup(@table, :world_online) do
[{_, status}] ->
status
_ ->
false
end
end
@doc """
Update a local cache
"""
def update_cache(type, item) do
members = :pg2.get_members(@group)
Enum.map(members, fn member ->
GenServer.cast(member, {:update_cache, type, item})
end)
end
@impl true
def init(_) do
:ok = :pg2.create(@group)
:ok = :pg2.join(@group, self())
:ets.new(@table, [:set, :protected, :named_table])
schedule_rebalance()
{:ok, %{}}
end
def handle_cast({:update_cache, type, item}, state) do
Cachex.put(type, item.id, item)
{:noreply, state}
end
# This is started by the squabble leader
@impl true
def handle_cast(:rebalance_zones, state) do
Logger.info("Starting zones", type: :leader)
rebalance_zones()
members = :pg2.get_members(@group)
Enum.each(members, fn member ->
send(member, {:set, :world_online, true})
end)
{:noreply, state}
end
@impl true
def handle_info(:maybe_rebalance_zones, state) do
if Squabble.node_is_leader?() do
GenServer.cast(__MODULE__, :rebalance_zones)
end
schedule_rebalance()
{:noreply, state}
end
@impl true
def handle_info({:set, :world_online, status}, state) do
:ets.insert(@table, {:world_online, status})
Logger.info("World is online? #{status}")
{:noreply, state}
end
# filter the member list down to connected nodes
# pg2 may not have caught up with the node falling off yet
defp master_pids() do
:world
|> :pg2.get_members()
|> Enum.map(&{&1, node(&1)})
|> Enum.filter(fn {_pid, controller_node} ->
controller_node == node() || controller_node in Node.list()
end)
|> Enum.map(&elem(&1, 0))
end
defp rebalance_zones() do
members = master_pids()
hosted_zones = get_member_zones(members)
zones = Zone.all()
zone_count = length(zones)
member_count = length(members)
max_zones = round(Float.ceil(zone_count / member_count))
zones
|> Enum.reject(fn zone ->
Enum.any?(hosted_zones, fn {_, zone_ids} ->
Enum.member?(zone_ids, zone.id)
end)
end)
|> restart_zones(hosted_zones, max_zones)
end
defp get_member_zones(members) do
Enum.map(members, fn controller ->
{controller, ZoneController.hosted_zones(controller)}
end)
end
defp restart_zones(zones, [], _max_zones) do
raise "Something bad happened, ran out of nodes to place these zones #{inspect(zones)}"
end
defp restart_zones([], _controllers, _max_zones), do: :ok
defp restart_zones(
[zone | zones],
[{controller, controller_zones} | controllers_with_zones],
max_zones
) do
case length(controller_zones) >= max_zones do
true ->
restart_zones([zone | zones], controllers_with_zones, max_zones)
false ->
Logger.info("Starting zone #{zone.id} on #{inspect(controller)}", type: :leader)
ZoneController.start_zone(controller, zone)
controller_zones = [zone | controller_zones]
restart_zones(zones, [{controller, controller_zones} | controllers_with_zones], max_zones)
end
end
defp schedule_rebalance() do
Process.send_after(self(), :maybe_rebalance_zones, @rebalance_delay)
end
end
| 23.189189 | 98 | 0.653147 |
e8027a99a5ef6419580de839f92e594ca3c26bb0 | 1,605 | exs | Elixir | apps/snitch_api/mix.exs | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 456 | 2018-09-20T02:40:59.000Z | 2022-03-07T08:53:48.000Z | apps/snitch_api/mix.exs | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 273 | 2018-09-19T06:43:43.000Z | 2021-08-07T12:58:26.000Z | apps/snitch_api/mix.exs | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 122 | 2018-09-26T16:32:46.000Z | 2022-03-13T11:44:19.000Z | defmodule SnitchApi.Mixfile do
use Mix.Project
def project do
[
app: :snitch_api,
version: "0.0.1",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.7.2",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {SnitchApi.Application, []},
extra_applications: [:logger, :runtime_tools, :sentry]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.3.0"},
{:phoenix_pubsub, "~> 1.0"},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"},
{:snitch_core, "~> 0.0.1", in_umbrella: true},
{:plug, "~> 1.0"},
{:corsica, "~> 1.0"},
{:uuid, "~> 1.1"},
{:ja_serializer, "~> 0.13.0"},
{:recase, "~> 0.2"},
# Authentication
{:guardian, "~> 1.0"},
{:inflex, "~> 1.10.0"},
# http client
{:httpoison, "~> 0.13"},
{:snitch_payments, github: "aviacommerce/avia_payments", branch: "develop"},
# html parser
{:floki, "~> 0.20.0"},
{:jason, "~> 1.1"}
]
end
end
| 25.078125 | 82 | 0.535826 |
e8027b540652bb74cb66d479c828d4d10e7e8924 | 1,016 | exs | Elixir | nashelixir+elixir/apps/cqrs_bank/mix.exs | NashFP/cqrs-bank | 6071db4a0c5ec681554aea8bebe783310c5d1525 | [
"MIT"
] | 7 | 2017-05-17T00:36:50.000Z | 2021-11-02T03:02:03.000Z | nashelixir+elixir/apps/cqrs_bank/mix.exs | NashFP/cqrs-bank | 6071db4a0c5ec681554aea8bebe783310c5d1525 | [
"MIT"
] | 3 | 2017-05-17T00:31:28.000Z | 2017-05-18T04:50:37.000Z | joshcrews+elixir/apps/cqrs_bank/mix.exs | NashFP/cqrs-bank | 6071db4a0c5ec681554aea8bebe783310c5d1525 | [
"MIT"
] | 5 | 2017-05-16T23:13:19.000Z | 2021-12-02T06:45:25.000Z | defmodule CqrsBank.Mixfile do
use Mix.Project
def project do
[app: :cqrs_bank,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
# Specify extra applications you'll use from Erlang/Elixir
[extra_applications: [:logger],
mod: {CqrsBank.Application, []}]
end
# Dependencies can be Hex packages:
#
# {:my_dep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:my_dep, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
#
# To depend on another app inside the umbrella:
#
# {:my_app, in_umbrella: true}
#
# Type "mix help deps" for more examples and options
defp deps do
[]
end
end
| 23.627907 | 79 | 0.606299 |
e802d90a2f0d2288bba2957a2faf640f70f9e13a | 3,071 | ex | Elixir | lib/join_maker.ex | m0rt3nlund/query_builder | 68bfdf7b642ded1a9056a85776c3fc5296b2b7b7 | [
"Apache-2.0"
] | 47 | 2019-12-20T08:35:20.000Z | 2022-03-23T15:11:45.000Z | lib/join_maker.ex | m0rt3nlund/query_builder | 68bfdf7b642ded1a9056a85776c3fc5296b2b7b7 | [
"Apache-2.0"
] | 7 | 2020-05-14T09:25:53.000Z | 2021-10-01T04:58:22.000Z | lib/join_maker.ex | m0rt3nlund/query_builder | 68bfdf7b642ded1a9056a85776c3fc5296b2b7b7 | [
"Apache-2.0"
] | 4 | 2020-04-21T13:54:46.000Z | 2021-08-12T22:15:22.000Z | defmodule QueryBuilder.JoinMaker do
@moduledoc false
require Ecto.Query
@doc ~S"""
Options may be:
* `:mode`: if set to `:if_preferable`, schemas are joined only if it is better
performance-wise; this happens only for one case: when the association has a
one-to-one cardinality, it is better to join and include the association's result
in the result set of the query, rather than emitting a new DB query.
* `:type`: see `Ecto.Query.join/5`'s qualifier argument for possible values.
"""
def make_joins(ecto_query, assoc_list) do
do_make_joins(ecto_query, assoc_list, [], [], assoc_list)
# returns {ecto_query, new_assoc_list}
end
defp do_make_joins(ecto_query, [], _, new_assoc_list, _original_assoc_list),
do: {ecto_query, new_assoc_list}
defp do_make_joins(ecto_query, [assoc_data | tail], bindings, new_assoc_list, original_assoc_list) do
{ecto_query, assoc_data, bindings} =
maybe_join(ecto_query, assoc_data, bindings, original_assoc_list)
{ecto_query, nested_assocs} =
if assoc_data.has_joined do
do_make_joins(ecto_query, assoc_data.nested_assocs, bindings, [], original_assoc_list)
else
{ecto_query, assoc_data.nested_assocs}
end
assoc_data = %{assoc_data | nested_assocs: nested_assocs}
{ecto_query, new_assoc_list} =
do_make_joins(ecto_query, tail, bindings, new_assoc_list, original_assoc_list)
{ecto_query, [assoc_data | new_assoc_list]}
end
defp maybe_join(ecto_query, %{cardinality: :many, join_type: :inner_if_cardinality_is_one} = assoc_data, bindings, _original_assoc_list),
do: {ecto_query, assoc_data, bindings}
defp maybe_join(ecto_query, assoc_data, bindings, original_assoc_list) do
%{
source_binding: source_binding,
source_schema: source_schema,
assoc_binding: assoc_binding,
assoc_field: assoc_field,
assoc_schema: assoc_schema,
join_type: join_type
} = assoc_data
if Ecto.Query.has_named_binding?(ecto_query, assoc_binding) do
raise "has already joined"
end
join_type = if(join_type == :left, do: :left, else: :inner)
on =
if assoc_data.join_filters != [] do
assoc_data.join_filters
|> Enum.map(fn [filters, or_filters] ->
QueryBuilder.Query.Where.build_dynamic_query(ecto_query, original_assoc_list, filters, or_filters)
end)
|> Enum.reduce(&Ecto.Query.dynamic(^&1 and ^&2))
else
[]
end
unless Enum.member?(bindings, assoc_binding) do
# see schema.ex's module doc in order to understand what's going on here
ecto_query =
if String.contains?(to_string(assoc_binding), "__") do
source_schema._join(ecto_query, join_type, source_binding, assoc_field, on)
else
assoc_schema._join(ecto_query, join_type, source_binding, assoc_field, on)
end
{
ecto_query,
%{assoc_data | has_joined: true},
[assoc_binding | bindings]
}
else
{ecto_query, assoc_data, bindings}
end
end
end
| 34.505618 | 139 | 0.694236 |
e802ef3a1922b65643aa51ab3dbbe6d8d3d22126 | 23,792 | exs | Elixir | installer/test/phx_new_umbrella_test.exs | TheMushrr00m/phoenix | 5f15107d6e102d5ceafc92b1ec1c397b601b9b66 | [
"MIT"
] | null | null | null | installer/test/phx_new_umbrella_test.exs | TheMushrr00m/phoenix | 5f15107d6e102d5ceafc92b1ec1c397b601b9b66 | [
"MIT"
] | null | null | null | installer/test/phx_new_umbrella_test.exs | TheMushrr00m/phoenix | 5f15107d6e102d5ceafc92b1ec1c397b601b9b66 | [
"MIT"
] | null | null | null | Code.require_file "mix_helper.exs", __DIR__
defmodule Mix.Tasks.Phx.New.UmbrellaTest do
use ExUnit.Case, async: false
import MixHelper
@app "phx_umb"
setup config do
# The shell asks to install deps.
# We will politely say not.
decline_prompt()
{:ok, tmp_dir: to_string(config.test)}
end
defp decline_prompt do
send self(), {:mix_shell_input, :yes?, false}
end
defp root_path(app, path \\ "") do
Path.join(["#{app}_umbrella", path])
end
defp app_path(app, path) do
Path.join(["#{app}_umbrella/apps/#{app}", path])
end
defp web_path(app, path) do
Path.join(["#{app}_umbrella/apps/#{app}_web", path])
end
test "new with umbrella and defaults" do
in_tmp "new with umbrella and defaults", fn ->
Mix.Tasks.Phx.New.run([@app, "--umbrella"])
assert_file root_path(@app, "README.md")
assert_file root_path(@app, ".gitignore")
assert_file app_path(@app, "README.md")
assert_file app_path(@app, ".gitignore"), "#{@app}-*.tar"
assert_file web_path(@app, "README.md")
assert_file root_path(@app, "mix.exs"), fn file ->
assert file =~ "apps_path: \"apps\""
end
assert_file app_path(@app, "mix.exs"), fn file ->
assert file =~ "app: :phx_umb"
assert file =~ ~S{build_path: "../../_build"}
assert file =~ ~S{config_path: "../../config/config.exs"}
assert file =~ ~S{deps_path: "../../deps"}
assert file =~ ~S{lockfile: "../../mix.lock"}
end
assert_file root_path(@app, "config/config.exs"), fn file ->
assert file =~ ~S[import_config "#{Mix.env()}.exs"]
assert file =~ "config :phoenix, :json_library, Jason"
assert file =~ "ecto_repos: [PhxUmb.Repo]"
assert file =~ ":phx_umb_web, PhxUmbWeb.Endpoint"
assert file =~ "generators: [context_app: :phx_umb]\n"
refute file =~ "namespace"
end
assert_file root_path(@app, "config/dev.exs"), fn file ->
assert file =~ ~r/watchers: \[\s+node:/
assert file =~ "lib/#{@app}_web/{live,views}/.*(ex)"
assert file =~ "lib/#{@app}_web/templates/.*(eex)"
end
assert_file root_path(@app, "config/prod.exs"), fn file ->
assert file =~ "port: 80"
assert file =~ ":inet6"
end
assert_file app_path(@app, ".formatter.exs"), fn file ->
assert file =~ "import_deps: [:ecto]"
assert file =~ "inputs: [\"*.{ex,exs}\", \"priv/*/seeds.exs\", \"{config,lib,test}/**/*.{ex,exs}\"]"
assert file =~ "subdirectories: [\"priv/*/migrations\"]"
end
assert_file web_path(@app, ".formatter.exs"), fn file ->
assert file =~ "inputs: [\"*.{ex,exs}\", \"{config,lib,test}/**/*.{ex,exs}\"]"
refute file =~ "import_deps: [:ecto]"
refute file =~ "subdirectories:"
end
assert_file app_path(@app, "lib/#{@app}/application.ex"), ~r/defmodule PhxUmb.Application do/
assert_file app_path(@app, "lib/#{@app}/application.ex"), ~r/PhxUmb.Repo/
assert_file app_path(@app, "lib/#{@app}.ex"), ~r/defmodule PhxUmb do/
assert_file app_path(@app, "mix.exs"), ~r/mod: {PhxUmb.Application, \[\]}/
assert_file app_path(@app, "test/test_helper.exs")
assert_file web_path(@app, "lib/#{@app}_web/application.ex"), ~r/defmodule PhxUmbWeb.Application do/
assert_file web_path(@app, "mix.exs"), fn file ->
assert file =~ "mod: {PhxUmbWeb.Application, []}"
assert file =~ "{:jason, \"~> 1.0\"}"
end
assert_file web_path(@app, "lib/#{@app}_web.ex"), fn file ->
assert file =~ "defmodule PhxUmbWeb do"
assert file =~ ~r/use Phoenix.View,\s+root: "lib\/phx_umb_web\/templates"/
end
assert_file web_path(@app, "lib/#{@app}_web/endpoint.ex"), ~r/defmodule PhxUmbWeb.Endpoint do/
assert_file web_path(@app, "test/#{@app}_web/controllers/page_controller_test.exs")
assert_file web_path(@app, "test/#{@app}_web/views/page_view_test.exs")
assert_file web_path(@app, "test/#{@app}_web/views/error_view_test.exs")
assert_file web_path(@app, "test/#{@app}_web/views/layout_view_test.exs")
assert_file web_path(@app, "test/support/conn_case.ex")
assert_file web_path(@app, "test/test_helper.exs")
assert_file web_path(@app, "lib/#{@app}_web/controllers/page_controller.ex"),
~r/defmodule PhxUmbWeb.PageController/
assert_file web_path(@app, "lib/#{@app}_web/views/page_view.ex"),
~r/defmodule PhxUmbWeb.PageView/
assert_file web_path(@app, "lib/#{@app}_web/router.ex"), "defmodule PhxUmbWeb.Router"
assert_file web_path(@app, "lib/#{@app}_web/templates/layout/app.html.eex"),
"<title>PhxUmb · Phoenix Framework</title>"
assert_file web_path(@app, "test/#{@app}_web/views/page_view_test.exs"),
"defmodule PhxUmbWeb.PageViewTest"
# webpack
assert_file web_path(@app, ".gitignore"), "/assets/node_modules/"
assert_file web_path(@app, ".gitignore"), "#{@app}_web-*.tar"
assert_file( web_path(@app, ".gitignore"), ~r/\n$/)
assert_file web_path(@app, "assets/webpack.config.js"), "js/app.js"
assert_file web_path(@app, "assets/.babelrc"), "env"
assert_file web_path(@app, "assets/static/favicon.ico")
assert_file web_path(@app, "assets/static/images/phoenix.png")
assert_file web_path(@app, "assets/css/app.css")
assert_file web_path(@app, "assets/css/phoenix.css")
assert_file web_path(@app, "assets/js/app.js"),
~s[import socket from "./socket"]
assert_file web_path(@app, "assets/js/socket.js"),
~s[import {Socket} from "phoenix"]
assert_file web_path(@app, "assets/package.json"), fn file ->
assert file =~ ~s["file:../../../deps/phoenix"]
assert file =~ ~s["file:../../../deps/phoenix_html"]
end
refute File.exists?(web_path(@app, "priv/static/css/app.css"))
refute File.exists?(web_path(@app, "priv/static/css/phoenix.css"))
refute File.exists?(web_path(@app, "priv/static/js/phoenix.js"))
refute File.exists?(web_path(@app, "priv/static/js/app.js"))
assert File.exists?(web_path(@app, "assets/vendor"))
# web deps
assert_file web_path(@app, "mix.exs"), fn file ->
assert file =~ "{:phx_umb, in_umbrella: true}"
assert file =~ "{:phoenix,"
assert file =~ "{:phoenix_pubsub,"
assert file =~ "{:gettext,"
assert file =~ "{:plug_cowboy,"
end
# app deps
assert_file web_path(@app, "mix.exs"), fn file ->
assert file =~ "{:phoenix_ecto,"
assert file =~ "{:jason,"
end
# Ecto
config = ~r/config :phx_umb, PhxUmb.Repo,/
assert_file root_path(@app, "config/dev.exs"), config
assert_file root_path(@app, "config/test.exs"), config
assert_file root_path(@app, "config/prod.secret.exs"), config
assert_file app_path(@app, "mix.exs"), fn file ->
assert file =~ "aliases: aliases()"
assert file =~ "ecto.setup"
assert file =~ "ecto.reset"
assert file =~ "{:jason,"
end
assert_file app_path(@app, "lib/#{@app}/repo.ex"), ~r"defmodule PhxUmb.Repo"
assert_file app_path(@app, "priv/repo/seeds.exs"), ~r"PhxUmb.Repo.insert!"
assert_file app_path(@app, "test/support/data_case.ex"), ~r"defmodule PhxUmb.DataCase"
assert_file app_path(@app, "priv/repo/migrations/.formatter.exs"), ~r"import_deps: \[:ecto_sql\]"
# Install dependencies?
assert_received {:mix_shell, :yes?, ["\nFetch and install dependencies?"]}
# Instructions
assert_received {:mix_shell, :info, ["\nWe are almost there" <> _ = msg]}
assert msg =~ "$ cd phx_umb"
assert msg =~ "$ mix deps.get"
assert_received {:mix_shell, :info, ["Then configure your database in config/dev.exs" <> _]}
assert_received {:mix_shell, :info, ["Start your Phoenix app" <> _]}
# Channels
assert File.exists?(web_path(@app, "/lib/#{@app}_web/channels"))
assert_file web_path(@app, "lib/#{@app}_web/channels/user_socket.ex"), ~r"defmodule PhxUmbWeb.UserSocket"
assert_file web_path(@app, "lib/#{@app}_web/endpoint.ex"), ~r"socket \"/socket\", PhxUmbWeb.UserSocket"
# Gettext
assert_file web_path(@app, "lib/#{@app}_web/gettext.ex"), ~r"defmodule PhxUmbWeb.Gettext"
assert File.exists?(web_path(@app, "priv/gettext/errors.pot"))
assert File.exists?(web_path(@app, "priv/gettext/en/LC_MESSAGES/errors.po"))
end
end
test "new without defaults" do
in_tmp "new without defaults", fn ->
Mix.Tasks.Phx.New.run([@app, "--umbrella", "--no-html", "--no-webpack", "--no-ecto"])
# No webpack
assert_file web_path(@app, ".gitignore"), fn file ->
assert file =~ ~r/\n$/
refute file =~ "/assets/node_modules/"
end
assert_file root_path(@app, "config/dev.exs"), ~r/watchers: \[\]/
# No webpack & No HTML
refute_file web_path(@app, "priv/static/css/app.css")
refute_file web_path(@app, "priv/static/css/phoenix.css")
refute_file web_path(@app, "priv/static/favicon.ico")
refute_file web_path(@app, "priv/static/images/phoenix.png")
refute_file web_path(@app, "priv/static/js/phoenix.js")
refute_file web_path(@app, "priv/static/js/app.js")
# No Ecto
config = ~r/config :phx_umb, PhxUmb.Repo,/
refute File.exists?(app_path(@app, "lib/#{@app}_web/repo.ex"))
assert_file app_path(@app, "mix.exs"), &refute(&1 =~ ~r":phoenix_ecto")
assert_file root_path(@app, "config/config.exs"), fn file ->
refute file =~ "config :phx_blog_web, :generators"
refute file =~ "ecto_repos:"
end
assert_file root_path(@app, "config/dev.exs"), &refute(&1 =~ config)
assert_file root_path(@app, "config/test.exs"), &refute(&1 =~ config)
assert_file root_path(@app, "config/prod.secret.exs"), &refute(&1 =~ config)
assert_file app_path(@app, "lib/#{@app}/application.ex"), ~r/Supervisor.start_link\(/
# No HTML
assert File.exists?(web_path(@app, "test/#{@app}_web/controllers"))
assert File.exists?(web_path(@app, "lib/#{@app}_web/controllers"))
assert File.exists?(web_path(@app, "lib/#{@app}_web/views"))
refute File.exists?(web_path(@app, "test/controllers/pager_controller_test.exs"))
refute File.exists?(web_path(@app, "test/views/layout_view_test.exs"))
refute File.exists?(web_path(@app, "test/views/page_view_test.exs"))
refute File.exists?(web_path(@app, "lib/#{@app}_web/controllers/page_controller.ex"))
refute File.exists?(web_path(@app, "lib/#{@app}_web/templates/layout/app.html.eex"))
refute File.exists?(web_path(@app, "lib/#{@app}_web/templates/page/index.html.eex"))
refute File.exists?(web_path(@app, "lib/#{@app}_web/views/layout_view.ex"))
refute File.exists?(web_path(@app, "lib/#{@app}_web/views/page_view.ex"))
assert_file web_path(@app, "mix.exs"), &refute(&1 =~ ~r":phoenix_html")
assert_file web_path(@app, "mix.exs"), &refute(&1 =~ ~r":phoenix_live_reload")
assert_file web_path(@app, "lib/#{@app}_web/endpoint.ex"),
&refute(&1 =~ ~r"Phoenix.LiveReloader")
assert_file web_path(@app, "lib/#{@app}_web/endpoint.ex"),
&refute(&1 =~ ~r"Phoenix.LiveReloader.Socket")
assert_file web_path(@app, "lib/#{@app}_web/views/error_view.ex"), ~r".json"
assert_file web_path(@app, "lib/#{@app}_web/router.ex"), &refute(&1 =~ ~r"pipeline :browser")
end
end
test "new with no_webpack" do
in_tmp "new with no_webpack", fn ->
Mix.Tasks.Phx.New.run([@app, "--umbrella", "--no-webpack"])
assert_file web_path(@app, ".gitignore")
assert_file( web_path(@app, ".gitignore"), ~r/\n$/)
assert_file web_path(@app, "priv/static/css/app.css")
assert_file web_path(@app, "priv/static/css/phoenix.css")
assert_file web_path(@app, "priv/static/favicon.ico")
assert_file web_path(@app, "priv/static/images/phoenix.png")
assert_file web_path(@app, "priv/static/js/phoenix.js")
assert_file web_path(@app, "priv/static/js/app.js")
end
end
test "new with binary_id" do
in_tmp "new with binary_id", fn ->
Mix.Tasks.Phx.New.run([@app, "--umbrella", "--binary-id"])
assert_file root_path(@app, "config/config.exs"),
~r/generators: \[context_app: :phx_umb, binary_id: true\]/
end
end
test "new with uppercase" do
in_tmp "new with uppercase", fn ->
Mix.Tasks.Phx.New.run(["phxUmb", "--umbrella"])
assert_file "phxUmb_umbrella/README.md"
assert_file "phxUmb_umbrella/apps/phxUmb/mix.exs", fn file ->
assert file =~ "app: :phxUmb"
end
assert_file "phxUmb_umbrella/apps/phxUmb_web/mix.exs", fn file ->
assert file =~ "app: :phxUmb_web"
end
assert_file "phxUmb_umbrella/config/dev.exs", fn file ->
assert file =~ ~r/config :phxUmb, PhxUmb.Repo,/
assert file =~ "database: \"phxumb_dev\""
end
end
end
test "new with path, app and module" do
in_tmp "new with path, app and module", fn ->
project_path = Path.join(File.cwd!, "custom_path")
Mix.Tasks.Phx.New.run([project_path, "--umbrella", "--app", @app, "--module", "PhoteuxBlog"])
assert_file "custom_path_umbrella/apps/phx_umb/mix.exs", ~r/app: :phx_umb/
assert_file "custom_path_umbrella/apps/phx_umb_web/lib/phx_umb_web/endpoint.ex", ~r/app: :#{@app}_web/
assert_file "custom_path_umbrella/apps/phx_umb_web/lib/#{@app}_web.ex", ~r/use Phoenix.Controller, namespace: PhoteuxBlogWeb/
assert_file "custom_path_umbrella/apps/phx_umb/lib/phx_umb/application.ex", ~r/defmodule PhoteuxBlog.Application/
assert_file "custom_path_umbrella/apps/phx_umb/mix.exs", ~r/mod: {PhoteuxBlog.Application, \[\]}/
assert_file "custom_path_umbrella/apps/phx_umb_web/lib/phx_umb_web/application.ex", ~r/defmodule PhoteuxBlogWeb.Application/
assert_file "custom_path_umbrella/apps/phx_umb_web/mix.exs", ~r/mod: {PhoteuxBlogWeb.Application, \[\]}/
assert_file "custom_path_umbrella/config/config.exs", ~r/namespace: PhoteuxBlogWeb/
assert_file "custom_path_umbrella/config/config.exs", ~r/namespace: PhoteuxBlog/
end
end
test "new inside umbrella" do
in_tmp "new inside umbrella", fn ->
File.write! "mix.exs", MixHelper.umbrella_mixfile_contents()
File.mkdir! "apps"
File.cd! "apps", fn ->
assert_raise Mix.Error, "Unable to nest umbrella project within apps", fn ->
Mix.Tasks.Phx.New.run([@app, "--umbrella"])
end
end
end
end
test "new defaults to pg adapter" do
in_tmp "new defaults to pg adapter", fn ->
app = "custom_path"
project_path = Path.join(File.cwd!, app)
Mix.Tasks.Phx.New.run([project_path, "--umbrella"])
assert_file app_path(app, "mix.exs"), ":postgrex"
assert_file app_path(app, "lib/custom_path/repo.ex"), "Ecto.Adapters.Postgres"
assert_file root_path(app, "config/dev.exs"), [~r/username: "postgres"/, ~r/password: "postgres"/, ~r/hostname: "localhost"/]
assert_file root_path(app, "config/test.exs"), [~r/username: "postgres"/, ~r/password: "postgres"/, ~r/hostname: "localhost"/]
assert_file root_path(app, "config/prod.secret.exs"), [~r/url: database_url/]
assert_file web_path(app, "test/support/conn_case.ex"), "Ecto.Adapters.SQL.Sandbox.checkout"
assert_file web_path(app, "test/support/channel_case.ex"), "Ecto.Adapters.SQL.Sandbox.checkout"
end
end
test "new with mysql adapter" do
in_tmp "new with mysql adapter", fn ->
app = "custom_path"
project_path = Path.join(File.cwd!, app)
Mix.Tasks.Phx.New.run([project_path, "--umbrella", "--database", "mysql"])
assert_file app_path(app, "mix.exs"), ":myxql"
assert_file app_path(app, "lib/custom_path/repo.ex"), "Ecto.Adapters.MyXQL"
assert_file root_path(app, "config/dev.exs"), [~r/username: "root"/, ~r/password: ""/]
assert_file root_path(app, "config/test.exs"), [~r/username: "root"/, ~r/password: ""/]
assert_file root_path(app, "config/prod.secret.exs"), [~r/url: database_url/]
assert_file web_path(app, "test/support/conn_case.ex"), "Ecto.Adapters.SQL.Sandbox.checkout"
assert_file web_path(app, "test/support/channel_case.ex"), "Ecto.Adapters.SQL.Sandbox.checkout"
end
end
test "new with invalid database adapter" do
in_tmp "new with invalid database adapter", fn ->
project_path = Path.join(File.cwd!, "custom_path")
assert_raise Mix.Error, ~s(Unknown database "invalid"), fn ->
Mix.Tasks.Phx.New.run([project_path, "--umbrella", "--database", "invalid"])
end
end
end
test "new with invalid args" do
assert_raise Mix.Error, ~r"Application name must start with a letter and ", fn ->
Mix.Tasks.Phx.New.run ["007invalid", "--umbrella"]
end
assert_raise Mix.Error, ~r"Application name must start with a letter and ", fn ->
Mix.Tasks.Phx.New.run ["valid1", "--app", "007invalid", "--umbrella"]
end
assert_raise Mix.Error, ~r"Module name must be a valid Elixir alias", fn ->
Mix.Tasks.Phx.New.run ["valid2", "--module", "not.valid", "--umbrella"]
end
assert_raise Mix.Error, ~r"Module name \w+ is already taken", fn ->
Mix.Tasks.Phx.New.run ["string", "--umbrella"]
end
assert_raise Mix.Error, ~r"Module name \w+ is already taken", fn ->
Mix.Tasks.Phx.New.run ["valid3", "--app", "mix", "--umbrella"]
end
assert_raise Mix.Error, ~r"Module name \w+ is already taken", fn ->
Mix.Tasks.Phx.New.run ["valid4", "--module", "String", "--umbrella"]
end
end
test "invalid options" do
assert_raise Mix.Error, ~r/Invalid option: -d/, fn ->
Mix.Tasks.Phx.New.run(["valid5", "-database", "mysql", "--umbrella"])
end
end
describe "ecto task" do
test "can only be run within an umbrella app dir", %{tmp_dir: tmp_dir} do
in_tmp tmp_dir, fn ->
cwd = File.cwd!()
umbrella_path = root_path(@app)
Mix.Tasks.Phx.New.run([@app, "--umbrella"])
flush()
for dir <- [cwd, umbrella_path] do
File.cd!(dir, fn ->
assert_raise Mix.Error, ~r"The ecto task can only be run within an umbrella's apps directory", fn ->
Mix.Tasks.Phx.New.Ecto.run(["valid"])
end
end)
end
end
end
end
describe "web task" do
test "can only be run within an umbrella app dir", %{tmp_dir: tmp_dir} do
in_tmp tmp_dir, fn ->
cwd = File.cwd!()
umbrella_path = root_path(@app)
Mix.Tasks.Phx.New.run([@app, "--umbrella"])
flush()
for dir <- [cwd, umbrella_path] do
File.cd!(dir, fn ->
assert_raise Mix.Error, ~r"The web task can only be run within an umbrella's apps directory", fn ->
Mix.Tasks.Phx.New.Web.run(["valid"])
end
end)
end
end
end
test "generates web-only files", %{tmp_dir: tmp_dir} do
in_tmp tmp_dir, fn ->
umbrella_path = root_path(@app)
Mix.Tasks.Phx.New.run([@app, "--umbrella"])
flush()
File.cd!(Path.join(umbrella_path, "apps"))
decline_prompt()
Mix.Tasks.Phx.New.Web.run(["another"])
assert_file "another/README.md"
assert_file "another/mix.exs", fn file ->
assert file =~ "app: :another"
assert file =~ "deps_path: \"../../deps\""
assert file =~ "lockfile: \"../../mix.lock\""
end
assert_file "../config/config.exs", fn file ->
assert file =~ "ecto_repos: [Another.Repo]"
end
assert_file "../config/prod.exs", fn file ->
assert file =~ "port: 80"
assert file =~ ":inet6"
end
assert_file "another/lib/another/application.ex", ~r/defmodule Another.Application do/
assert_file "another/mix.exs", ~r/mod: {Another.Application, \[\]}/
assert_file "another/lib/another.ex", ~r/defmodule Another do/
assert_file "another/lib/another/endpoint.ex", ~r/defmodule Another.Endpoint do/
assert_file "another/test/another/controllers/page_controller_test.exs"
assert_file "another/test/another/views/page_view_test.exs"
assert_file "another/test/another/views/error_view_test.exs"
assert_file "another/test/another/views/layout_view_test.exs"
assert_file "another/test/support/conn_case.ex"
assert_file "another/test/test_helper.exs"
assert_file "another/lib/another/controllers/page_controller.ex",
~r/defmodule Another.PageController/
assert_file "another/lib/another/views/page_view.ex",
~r/defmodule Another.PageView/
assert_file "another/lib/another/router.ex", "defmodule Another.Router"
assert_file "another/lib/another.ex", "defmodule Another"
assert_file "another/lib/another/templates/layout/app.html.eex",
"<title>Another · Phoenix Framework</title>"
# webpack
assert_file "another/.gitignore", "/assets/node_modules"
assert_file "another/.gitignore", ~r/\n$/
assert_file "another/assets/webpack.config.js", "js/app.js"
assert_file "another/assets/.babelrc", "env"
assert_file "another/assets/static/favicon.ico"
assert_file "another/assets/static/images/phoenix.png"
assert_file "another/assets/css/app.css"
assert_file "another/assets/css/phoenix.css"
assert_file "another/assets/js/app.js",
~s[import socket from "./socket"]
assert_file "another/assets/js/socket.js",
~s[import {Socket} from "phoenix"]
assert_file "another/assets/package.json", fn file ->
assert file =~ ~s["file:../../../deps/phoenix"]
assert file =~ ~s["file:../../../deps/phoenix_html"]
end
refute File.exists? "another/priv/static/css/app.css"
refute File.exists? "another/priv/static/js/phoenix.js"
refute File.exists? "another/priv/static/css/phoenix.css"
refute File.exists? "another/priv/static/js/app.js"
assert File.exists?("another/assets/vendor")
# Ecto
assert_file "another/mix.exs", fn file ->
assert file =~ "{:phoenix_ecto,"
end
assert_file "another/lib/another.ex", ~r"defmodule Another"
refute_file "another/lib/another/repo.ex"
refute_file "another/priv/repo/seeds.exs"
refute_file "another/test/support/data_case.ex"
# Install dependencies?
assert_received {:mix_shell, :yes?, ["\nFetch and install dependencies?"]}
# Instructions
assert_received {:mix_shell, :info, ["\nWe are almost there" <> _ = msg]}
assert msg =~ "$ cd another"
assert msg =~ "$ mix deps.get"
refute_received {:mix_shell, :info, ["Then configure your database" <> _]}
assert_received {:mix_shell, :info, ["Start your Phoenix app" <> _]}
# Channels
assert File.exists?("another/lib/another/channels")
assert_file "another/lib/another/channels/user_socket.ex", ~r"defmodule Another.UserSocket"
assert_file "another/lib/another/endpoint.ex", ~r"socket \"/socket\", Another.UserSocket"
# Gettext
assert_file "another/lib/another/gettext.ex", ~r"defmodule Another.Gettext"
assert File.exists?("another/priv/gettext/errors.pot")
assert File.exists?("another/priv/gettext/en/LC_MESSAGES/errors.po")
end
end
end
end
| 42.259325 | 132 | 0.628867 |
e802f18334b242f12aab36b7b2bfea38e986714e | 421 | exs | Elixir | apps/cronitex_web/test/cronitex_web/views/error_view_test.exs | alayers2/cronitex | cf8da9553b8e93b9171f5e9a0f0a08a24a324689 | [
"MIT"
] | 1 | 2020-11-05T15:38:53.000Z | 2020-11-05T15:38:53.000Z | apps/cronitex_web/test/cronitex_web/views/error_view_test.exs | alayers2/cronitex | cf8da9553b8e93b9171f5e9a0f0a08a24a324689 | [
"MIT"
] | 36 | 2020-10-24T01:28:42.000Z | 2022-02-07T11:11:37.000Z | apps/cronitex_web/test/cronitex_web/views/error_view_test.exs | alayers2/cronitex | cf8da9553b8e93b9171f5e9a0f0a08a24a324689 | [
"MIT"
] | null | null | null | defmodule CronitexWeb.ErrorViewTest do
use CronitexWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(CronitexWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(CronitexWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end
| 28.066667 | 93 | 0.736342 |
e803116279acd9973f80e4975dc7110e70da4301 | 123 | exs | Elixir | elixir/47.exs | merxer/kata | 5dbbca8b4173029f9311398148de9437a329cf9a | [
"MIT"
] | null | null | null | elixir/47.exs | merxer/kata | 5dbbca8b4173029f9311398148de9437a329cf9a | [
"MIT"
] | null | null | null | elixir/47.exs | merxer/kata | 5dbbca8b4173029f9311398148de9437a329cf9a | [
"MIT"
] | null | null | null | plusTwo = Enum.map [1,2,3,4], &(&1 + 2)
plusTwo
|> IO.inspect
eInspect = Enum.each [1,2,3,4], &(IO.inspect(&1))
eInspect
| 15.375 | 49 | 0.601626 |
e8033a4102a4763163ee77e7b87bb2a3000e6659 | 295 | ex | Elixir | lib/earmark/alt_parser.ex | aforward-oss/earmark | b44d4817aa2b4047b07f91d633ae83ed27c695ed | [
"Apache-2.0"
] | null | null | null | lib/earmark/alt_parser.ex | aforward-oss/earmark | b44d4817aa2b4047b07f91d633ae83ed27c695ed | [
"Apache-2.0"
] | null | null | null | lib/earmark/alt_parser.ex | aforward-oss/earmark | b44d4817aa2b4047b07f91d633ae83ed27c695ed | [
"Apache-2.0"
] | 1 | 2019-11-23T12:09:14.000Z | 2019-11-23T12:09:14.000Z | # # Inspired by https://github.com/MFP/OcsiBlog/blob/master/simple_markup.ml
# # Trying to port it to Elixir and later adapt it maybe to line oriented
# # parsing (lines being pre processed in parallel and then _unified_ ino tokens
# # by the parser.
# defmodule Earmark.AltParser do
# end
| 32.777778 | 80 | 0.749153 |
e8036c4a995f50f5c4fd32a266835e5ae3420186 | 13,589 | ex | Elixir | lib/surface/translator/component_translator_helper.ex | alexandrubagu/surface | b8ef74444e53f8325fb793fa1399966c2a4b1e7a | [
"MIT"
] | null | null | null | lib/surface/translator/component_translator_helper.ex | alexandrubagu/surface | b8ef74444e53f8325fb793fa1399966c2a4b1e7a | [
"MIT"
] | null | null | null | lib/surface/translator/component_translator_helper.ex | alexandrubagu/surface | b8ef74444e53f8325fb793fa1399966c2a4b1e7a | [
"MIT"
] | null | null | null | defmodule Surface.Translator.ComponentTranslatorHelper do
@moduledoc false
alias Surface.Translator.SlotableTranslator
alias Surface.Translator.IOHelper
def add_render_call(renderer, args, has_children? \\ false) do
["<%= ", renderer, "(", Enum.join(args, ", "), ") ", maybe_add("do ", has_children?), "%>"]
end
def maybe_add(value, condition) do
if condition, do: value, else: ""
end
def add_require(mod_str) do
["<% require ", mod_str, " %>"]
end
def add_begin_context(_mod, mod_str) do
["<% {props, context} = begin_context(props, context, ", mod_str, ") %>"]
end
def add_end_context(_mod, mod_str) do
["<% context = end_context(context, ", mod_str, ") %><% _ = context %>"]
end
def maybe_add_fallback_content(condition) do
maybe_add(
[
"<% {prop, i, arg} -> %>",
~S[<%= raise "no matching content function for #{inspect(prop)}\##{i} with argument #{inspect(arg)}" %>]
],
condition
)
end
defp find_bindings_from_lists(module, attributes) do
# TODO: Warn if :binding is not defined and we have lhs
for {name, {:attribute_expr, [expr], _}, _line} <- attributes,
[lhs, _] <- [String.split(expr, "<-")],
prop_info = module.__get_prop__(String.to_atom(name)),
prop_info.type == :list,
into: %{} do
{String.to_atom(name), String.trim(lhs)}
end
end
defp find_let_bindings(attributes) do
case Enum.find(attributes, fn attr -> match?({":let", _, _}, attr) end) do
{":let", {:attribute_expr, [expr], _}, %{line: line}} ->
bindings =
"[#{String.trim(expr)}]"
|> Code.string_to_quoted!()
|> List.flatten()
|> Enum.map(fn {k, v} -> {k, Macro.to_string(v)} end)
{bindings, line}
_ ->
{[], nil}
end
end
def translate_children(mod_str, mod, attributes, directives, children, caller) do
{parent_bindings, line} = find_let_bindings(directives)
slots_with_args = get_slots_with_args(mod, attributes)
validate_let_bindings!(
:default,
parent_bindings,
slots_with_args[:default] || [],
mod,
caller,
line
)
opts = %{
parent: mod,
parent_mod_str: mod_str,
parent_args: parent_bindings,
slots_with_args: slots_with_args,
caller: caller
}
init_slots_meta = %{__default__: %{size: 0}}
{slots_props, slots_meta, contents, _temp_contents, _opts} =
children
|> Enum.reduce({%{}, init_slots_meta, [], [], opts}, &handle_child/2)
|> maybe_add_default_content()
children_props =
for {prop_name, value} <- slots_props do
[to_string(prop_name), ": [", Enum.join(Enum.reverse(value), ", "), "]"]
end
{children_props, inspect(slots_meta), Enum.reverse(contents)}
end
def translate_attributes(attributes, mod, mod_str, space, caller, opts \\ []) do
put_default_props = Keyword.get(opts, :put_default_props, true)
if function_exported?(mod, :__props__, 0) do
translated_values =
Enum.reduce(attributes, [], fn {key, value, %{line: line, spaces: spaces}},
translated_values ->
key_atom = String.to_atom(key)
prop = mod.__get_prop__(key_atom)
if mod.__props__() != [] && !mod.__validate_prop__(key_atom) do
message = "Unknown property \"#{key}\" for component <#{mod_str}>"
IOHelper.warn(message, caller, &(&1 + line))
end
value = translate_value(prop[:type], key, value, caller, line)
[{key, value, spaces, ","} | translated_values]
end)
translated_values =
case translated_values do
[{key, value, spaces, _} | rest] ->
[{key, value, spaces, ""} | rest]
_ ->
translated_values
end
translated_props =
Enum.reduce(translated_values, [], fn {key, value, spaces, comma}, translated_props ->
[translate_prop(key, value, spaces, comma) | translated_props]
end)
props = ["%{", translated_props, space, "}"]
if put_default_props do
["put_default_props(", props, ", ", mod_str, ")"]
else
props
end
else
"%{}"
end
end
def translate_value(:event, key, value, caller, line) do
case value do
{:attribute_expr, [expr], meta} ->
{:attribute_expr, ["event_value(\"#{key}\", [#{expr}], assigns[:myself])"], meta}
event ->
if Module.open?(caller.module) do
event_reference = {to_string(event), caller.line + line}
Module.put_attribute(caller.module, :event_references, event_reference)
end
# TODO: We need to validate if this line is correct after the compiler is ready
{:attribute_expr, ["event_value(\"#{key}\", \"#{event}\", assigns[:myself])"],
%{line: line}}
end
end
def translate_value(:list, _key, {:attribute_expr, [expr], meta}, _caller, _line) do
value =
case String.split(expr, "<-") do
[_lhs, value] ->
value
[value] ->
value
end
{:attribute_expr, [value], meta}
end
def translate_value(:css_class, _key, {:attribute_expr, [expr], meta}, _caller, _line) do
# TODO: Validate expression
{:attribute_expr, ["css_class([", expr, "])"], meta}
end
def translate_value(:keyword, key, {:attribute_expr, [expr], meta}, _caller, _line) do
# TODO: Validate expression
{:attribute_expr, ["keyword_value(\"", key, "\", ", expr, ")"], meta}
end
def translate_value(:map, key, {:attribute_expr, [expr], meta}, _caller, _line) do
# TODO: Validate expression
{:attribute_expr, ["map_value(\"", key, "\", ", expr, ")"], meta}
end
def translate_value(_type, _key, value, _caller, _line) when is_list(value) do
for item <- value do
case item do
{:attribute_expr, [expr], _} ->
["\#{", expr, "}"]
_ ->
item
end
end
end
def translate_value(_type, _key, value, _caller, _line) do
value
end
@blanks ' \n\r\t\v\b\f\e\d\a'
def blank?([]), do: true
def blank?([h | t]), do: blank?(h) && blank?(t)
def blank?(""), do: true
def blank?(char) when char in @blanks, do: true
def blank?(<<h, t::binary>>) when h in @blanks, do: blank?(t)
def blank?(_), do: false
defp handle_child({_, _, _, %{translator: SlotableTranslator}} = child, acc) do
{mod_str, attributes, children, meta} = child
%{module: module, space: space, directives: directives, line: child_line} = meta
{slots_props, slots_meta, contents, _, opts} = maybe_add_default_content(acc)
{slot_name, slot_name_line} =
if module do
{module.__slot_name__(), child_line}
else
case Map.get(meta, :slot) do
{value, line} ->
{value, line}
_ ->
{nil, nil}
end
end
slot_args = opts.slots_with_args[slot_name] || []
slot_args_with_generators = Enum.filter(slot_args, fn {_k, v} -> v end)
{child_bindings, line} = find_let_bindings(directives)
validate_slot!(slot_name, opts.parent, opts.parent_mod_str, opts.caller, slot_name_line)
validate_let_bindings!(
slot_name,
child_bindings,
slot_args,
module || opts.parent,
opts.caller,
line
)
merged_args = Keyword.merge(slot_args_with_generators, child_bindings)
args = args_to_map_string(merged_args)
slots_meta = Map.put_new(slots_meta, slot_name, %{size: 0})
meta = slots_meta[slot_name]
content = [
"<% {",
inspect(slot_name),
", ",
to_string(meta.size),
", ",
args,
"} -> %>",
children
]
slots_meta = Map.put(slots_meta, slot_name, %{size: meta.size + 1})
slots_props = Map.put_new(slots_props, slot_name, [])
props = translate_attributes(attributes, module, mod_str, space, opts.caller)
slots_props = Map.update(slots_props, slot_name, [], &[props | &1])
{slots_props, slots_meta, [content | contents], [], opts}
end
defp handle_child(child, acc) do
{slots_props, slots_meta, contents, temp_contents, opts} = acc
{slots_props, slots_meta, contents, [child | temp_contents], opts}
end
defp maybe_add_default_content(acc) do
{slots_props, slots_meta, contents, temp_contents, opts} = acc
{slots_meta, contents} =
if blank?(temp_contents) do
{slots_meta, [Enum.reverse(temp_contents) | contents]}
else
meta = slots_meta[:__default__]
args = args_to_map_string(opts.parent_args)
content = [
"<% {:__default__, ",
to_string(meta.size),
", ",
args,
"} -> %>",
Enum.reverse(temp_contents)
]
slots_meta = update_in(slots_meta, [:__default__, :size], &(&1 + 1))
{slots_meta, [content | contents]}
end
{slots_props, slots_meta, contents, [], opts}
end
defp get_slots_with_args(mod, attributes) do
bindings = find_bindings_from_lists(mod, attributes)
for %{name: name, opts: opts} <- mod.__slots__(),
args_list = Keyword.get(opts, :props, []),
into: %{} do
args =
for %{name: name, generator: generator} <- args_list do
{name, bindings[generator]}
end
{name, args}
end
end
defp args_to_map_string(args) do
map_content =
args
|> Enum.map(fn {k, v} -> "#{k}: #{v}" end)
|> Enum.join(", ")
["%{", map_content, "}"]
end
defp translate_prop(key, value, spaces, comma) do
rhs =
case value do
{:attribute_expr, value, _} ->
expr = value |> IO.iodata_to_binary() |> String.trim()
["(", expr, ")"]
value when is_integer(value) ->
to_string(value)
value when is_boolean(value) ->
inspect(value)
_ ->
[~S("), value, ~S(")]
end
case spaces do
[space1, space2, space3] ->
space = space2 <> space3
space = if space != "", do: space, else: " "
[space1, key, ":", space, rhs, comma]
[space1, space2] ->
[space1, key, ": ", rhs, comma, space2]
end
end
defp validate_let_bindings!(_slot_name, _child_bindings, _slot_args, nil, _caller, _line) do
# TODO
:ok
end
defp validate_let_bindings!(slot_name, child_bindings, slot_args, mod, caller, line) do
child_bindings_keys = Keyword.keys(child_bindings)
slot_args_keys = Keyword.keys(slot_args)
undefined_keys = child_bindings_keys -- slot_args_keys
cond do
child_bindings_keys != [] && slot_args_keys == [] ->
message = """
there's no `#{slot_name}` slot defined in `#{inspect(mod)}`.
Directive :let can only be used on explicitly defined slots.
Hint: You can define a `#{slot_name}` slot and its props using: \
`slot #{slot_name}, props: #{inspect(child_bindings_keys)}\
"""
IOHelper.compile_error(message, caller.file, caller.line + line)
undefined_keys != [] ->
[prop | _] = undefined_keys
message = """
undefined prop `#{inspect(prop)}` for slot `#{slot_name}` in `#{inspect(mod)}`.
Available props: #{inspect(slot_args_keys)}.
Hint: You can define a new slot prop using the `props` option: \
`slot #{slot_name}, props: [..., #{inspect(prop)}]`\
"""
IOHelper.compile_error(message, caller.file, caller.line + line)
true ->
nil
end
end
defp validate_slot!(slot_name, parent_mod, parent_alias, caller, line) do
cond do
!function_exported?(parent_mod, :__slots__, 0) ->
message = """
parent component `#{inspect(parent_mod)}` does not define any slots. \
Cannot insert component #{inspect(caller.module)} here.
"""
IOHelper.compile_error(message, caller.file, caller.line)
parent_mod.__get_slot__(slot_name) == nil ->
parent_slots = parent_mod.__slots__() |> Enum.map(& &1.name)
similar_slot_message =
case did_you_mean(slot_name, parent_slots) do
{similar, score} when score > 0.8 ->
"\n\n Did you mean #{inspect(to_string(similar))}?"
_ ->
""
end
existing_slots_message =
if parent_slots == [] do
""
else
slots = Enum.map(parent_slots, &to_string/1)
available = list_to_string("slot:", "slots:", slots)
"\n\n Available #{available}"
end
message = """
no slot "#{slot_name}" defined in parent component <#{parent_alias}>\
#{similar_slot_message}\
#{existing_slots_message}\
"""
IOHelper.warn(message, caller, fn _ -> caller.line + line end)
true ->
:ok
end
end
defp did_you_mean(target, list) do
Enum.reduce(list, {nil, 0}, &max_similar(&1, to_string(target), &2))
end
defp max_similar(source, target, {_, current} = best) do
score = source |> to_string() |> String.jaro_distance(target)
if score < current, do: best, else: {source, score}
end
defp list_to_string(_singular, _plural, []) do
""
end
defp list_to_string(singular, _plural, [item]) do
"#{singular} #{inspect(item)}"
end
defp list_to_string(_singular, plural, items) do
[last | rest] = items |> Enum.map(&inspect/1) |> Enum.reverse()
"#{plural} #{rest |> Enum.reverse() |> Enum.join(", ")} and #{last}"
end
end
| 28.912766 | 112 | 0.588932 |
e803b3a31308238849c7f1e0b600253ace542ede | 1,774 | exs | Elixir | test/api_test.exs | Voronchuk/kraken | 4b5d310ebeab5ddd97039773dc0acd8b566a1236 | [
"MIT"
] | 2 | 2019-02-07T18:36:04.000Z | 2020-07-18T13:53:02.000Z | test/api_test.exs | Voronchuk/kraken | 4b5d310ebeab5ddd97039773dc0acd8b566a1236 | [
"MIT"
] | null | null | null | test/api_test.exs | Voronchuk/kraken | 4b5d310ebeab5ddd97039773dc0acd8b566a1236 | [
"MIT"
] | null | null | null | defmodule Kraken.ApiTest do
use ExUnit.Case
setup do
api_key = System.get_env("KRAKEN_API_KEY")
private_key = System.get_env("KRAKEN_PRIVATE_KEY")
{:ok, %{api_key: api_key, private_key: private_key}}
end
test "fetching balance for some account", %{api_key: api_key, private_key: private_key} do
opts = [api_key: api_key, private_key: private_key]
{:ok, balance} = Kraken.API.get_balance(opts)
assert is_map(balance)
end
test "fetching trade balance for some account", %{api_key: api_key, private_key: private_key} do
opts = [api_key: api_key, private_key: private_key]
{:ok, balance} = Kraken.API.get_trade_balance(opts)
assert match?(%{
"c" => _,
"e" => _,
"eb" => _,
"m" => _,
"mf" => _,
"n" => _,
"tb" => _,
"v" => _
}, balance)
end
test "fetching open orders for some account", %{api_key: api_key, private_key: private_key} do
opts = [api_key: api_key, private_key: private_key]
{:ok, orders} = Kraken.API.get_open_orders(opts)
assert match?(%{"open" => _}, orders)
end
test "fetching closed orders for some account", %{api_key: api_key, private_key: private_key} do
opts = [api_key: api_key, private_key: private_key]
{:ok, orders} = Kraken.API.get_closed_orders(opts)
assert match?(%{"closed" => _}, orders)
end
test "post new limit order for some account", %{api_key: api_key, private_key: private_key} do
opts = [
api_key: api_key,
private_key: private_key,
pair: "XXBTZUSD",
type: "buy",
ordertype: "limit",
price: 1,
volume: 1,
validate: "true",
oflags: "post"
]
{:ok, order} = Kraken.API.add_order opts
assert match?(%{"descr" => _}, order)
end
end
| 30.067797 | 98 | 0.629087 |
e803c93720b8149287198591c4599661e4738f7e | 1,997 | ex | Elixir | clients/data_migration/lib/google_api/data_migration/v1beta1/model/audit_log_config.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/data_migration/lib/google_api/data_migration/v1beta1/model/audit_log_config.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/data_migration/lib/google_api/data_migration/v1beta1/model/audit_log_config.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DataMigration.V1beta1.Model.AuditLogConfig do
@moduledoc """
Provides the configuration for logging a type of permissions. Example: { "audit_log_configs": [ { "log_type": "DATA_READ", "exempted_members": [ "user:jose@example.com" ] }, { "log_type": "DATA_WRITE" } ] } This enables 'DATA_READ' and 'DATA_WRITE' logging, while exempting jose@example.com from DATA_READ logging.
## Attributes
* `exemptedMembers` (*type:* `list(String.t)`, *default:* `nil`) - Specifies the identities that do not cause logging for this type of permission. Follows the same format of Binding.members.
* `logType` (*type:* `String.t`, *default:* `nil`) - The log type that this config enables.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:exemptedMembers => list(String.t()),
:logType => String.t()
}
field(:exemptedMembers, type: :list)
field(:logType)
end
defimpl Poison.Decoder, for: GoogleApi.DataMigration.V1beta1.Model.AuditLogConfig do
def decode(value, options) do
GoogleApi.DataMigration.V1beta1.Model.AuditLogConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DataMigration.V1beta1.Model.AuditLogConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.94 | 316 | 0.7331 |
e803f17aebe79e47145d634a3e23d6c5337b60af | 3,112 | ex | Elixir | lib/stripe/connect/transfer_reversal.ex | Rutaba/stripity_stripe | 12c525301c781f9c8c7e578cc0d933f5d35183d5 | [
"BSD-3-Clause"
] | 555 | 2016-11-29T05:02:27.000Z | 2022-03-30T00:47:59.000Z | lib/stripe/connect/transfer_reversal.ex | Rutaba/stripity_stripe | 12c525301c781f9c8c7e578cc0d933f5d35183d5 | [
"BSD-3-Clause"
] | 532 | 2016-11-28T18:22:25.000Z | 2022-03-30T17:04:32.000Z | lib/stripe/connect/transfer_reversal.ex | Rutaba/stripity_stripe | 12c525301c781f9c8c7e578cc0d933f5d35183d5 | [
"BSD-3-Clause"
] | 296 | 2016-12-05T14:04:09.000Z | 2022-03-28T20:39:37.000Z | defmodule Stripe.TransferReversal do
@moduledoc """
Work with Stripe transfer_reversal objects.
Stripe API reference: https://stripe.com/docs/api#transfer_reversal_object
"""
use Stripe.Entity
import Stripe.Request
@type t :: %__MODULE__{
id: Stripe.id(),
object: String.t(),
amount: integer,
balance_transaction: String.t() | Stripe.BalanceTransaction.t(),
created: Stripe.timestamp(),
currency: String.t(),
description: boolean,
destination_payment_refund: Stripe.id() | Stripe.Refund.t() | nil,
metadata: Stripe.Types.metadata(),
source_refund: Stripe.id() | Stripe.Refund.t() | nil,
transfer: Stripe.id() | Stripe.Transfer.t()
}
defstruct [
:id,
:object,
:amount,
:balance_transaction,
:created,
:currency,
:description,
:destination_payment_refund,
:metadata,
:source_refund,
:transfer
]
@endpoint "transfers"
@doc """
Create a transfer reversal
"""
@spec create(Stripe.id() | t, params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params: %{
optional(:amount) => pos_integer,
optional(:description) => String.t(),
optional(:metadata) => Stripe.Types.metadata(),
optional(:refund_application_fee) => boolean
}
def create(id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@endpoint <> "/#{id}/reversals")
|> put_params(params)
|> put_method(:post)
|> make_request()
end
@doc """
Retrieve a transfer reversal.
"""
@spec retrieve(Stripe.id() | t, Stripe.id() | t, Stripe.options()) ::
{:ok, t} | {:error, Stripe.Error.t()}
def retrieve(id, reversal_id, opts \\ []) do
new_request(opts)
|> put_endpoint(@endpoint <> "/#{id}/reversals/#{reversal_id}")
|> put_method(:get)
|> make_request()
end
@doc """
Update a transfer.
Takes the `id` and a map of changes.
"""
@spec update(Stripe.id() | t, Stripe.id() | t, params, Stripe.options()) ::
{:ok, t} | {:error, Stripe.Error.t()}
when params: %{
optional(:metadata) => Stripe.Types.metadata()
}
def update(id, reversal_id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@endpoint <> "/#{id}/reversals/#{reversal_id}")
|> put_method(:post)
|> put_params(params)
|> make_request()
end
@doc """
List all transfers.
"""
@spec list(Stripe.id() | t, params, Stripe.options()) ::
{:ok, Stripe.List.t(t)} | {:error, Stripe.Error.t()}
when params: %{
optional(:ending_before) => t | Stripe.id(),
optional(:limit) => 1..100,
optional(:starting_after) => t | Stripe.id()
}
def list(id, params \\ %{}, opts \\ []) do
new_request(opts)
|> put_endpoint(@endpoint <> "/#{id}/reversals")
|> put_method(:get)
|> put_params(params)
|> cast_to_id([:ending_before, :starting_after])
|> make_request()
end
end
| 28.814815 | 98 | 0.56973 |
e803f8ac9364d822b45ba3410cb8b26261c1e233 | 5,202 | ex | Elixir | apps/omg_db/lib/omg_db/level_db.ex | PinkDiamond1/elixir-omg | 70dfd24a0a1ddf5d1d9d71aab61ea25300f889f7 | [
"Apache-2.0"
] | null | null | null | apps/omg_db/lib/omg_db/level_db.ex | PinkDiamond1/elixir-omg | 70dfd24a0a1ddf5d1d9d71aab61ea25300f889f7 | [
"Apache-2.0"
] | null | null | null | apps/omg_db/lib/omg_db/level_db.ex | PinkDiamond1/elixir-omg | 70dfd24a0a1ddf5d1d9d71aab61ea25300f889f7 | [
"Apache-2.0"
] | 1 | 2021-12-04T00:37:46.000Z | 2021-12-04T00:37:46.000Z | # Copyright 2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.DB.LevelDB do
@moduledoc """
Our-types-aware port/adapter to a database backend.
Contains functions to access data stored in the database
"""
alias OMG.DB
@behaviour OMG.DB
require Logger
@server_name OMG.DB.LevelDB.Server
@one_minute 60_000
@ten_minutes 10 * @one_minute
@type utxo_pos_db_t :: {pos_integer, non_neg_integer, non_neg_integer}
def start_link(args) do
@server_name.start_link(args)
end
def child_spec do
db_path = Application.fetch_env!(:omg_db, :path)
[server_module: server_module, server_name: server_name] = Application.fetch_env!(:omg_db, :leveldb)
args = [db_path: db_path, name: server_name]
%{
id: server_module,
start: {server_module, :start_link, [args]},
type: :worker
}
end
def child_spec([db_path: _db_path, name: server_name] = args) do
[server_module: server_module, server_name: _] = Application.fetch_env!(:omg_db, :leveldb)
%{
id: server_name,
start: {server_module, :start_link, [args]},
type: :worker
}
end
def multi_update(db_updates, server_name \\ @server_name) do
GenServer.call(server_name, {:multi_update, db_updates})
end
@spec blocks(block_to_fetch :: list(), atom) :: {:ok, list()} | {:error, any}
def blocks(blocks_to_fetch, server_name \\ @server_name)
def blocks([], _server_name), do: {:ok, []}
def blocks(blocks_to_fetch, server_name) do
GenServer.call(server_name, {:blocks, blocks_to_fetch})
end
def utxos(server_name \\ @server_name) do
_ = Logger.info("Reading UTXO set, this might take a while. Allowing #{inspect(@ten_minutes)} ms")
GenServer.call(server_name, :utxos, @ten_minutes)
end
def exit_infos(server_name \\ @server_name) do
_ = Logger.info("Reading exits' info, this might take a while. Allowing #{inspect(@one_minute)} ms")
GenServer.call(server_name, :exit_infos, @one_minute)
end
def in_flight_exits_info(server_name \\ @server_name) do
_ = Logger.info("Reading in flight exits' info, this might take a while. Allowing #{inspect(@one_minute)} ms")
GenServer.call(server_name, :in_flight_exits_info, @one_minute)
end
def competitors_info(server_name \\ @server_name) do
_ = Logger.info("Reading competitors' info, this might take a while. Allowing #{inspect(@one_minute)} ms")
GenServer.call(server_name, :competitors_info, @one_minute)
end
@spec exit_info({pos_integer, non_neg_integer, non_neg_integer}, atom) :: {:ok, map} | {:error, atom}
def exit_info(utxo_pos, server_name \\ @server_name) do
GenServer.call(server_name, {:exit_info, utxo_pos})
end
@spec spent_blknum(utxo_pos_db_t(), atom) :: {:ok, pos_integer} | {:error, atom}
def spent_blknum(utxo_pos, server_name \\ @server_name) do
GenServer.call(server_name, {:spent_blknum, utxo_pos})
end
def block_hashes(block_numbers_to_fetch, server_name \\ @server_name) do
GenServer.call(server_name, {:block_hashes, block_numbers_to_fetch})
end
def last_deposit_child_blknum(server_name \\ @server_name) do
GenServer.call(server_name, :last_deposit_child_blknum)
end
def child_top_block_number(server_name \\ @server_name) do
GenServer.call(server_name, :child_top_block_number)
end
# Note: *_eth_height values below denote actual Ethereum height service has processed.
# It might differ from "latest" Ethereum block.
def get_single_value(parameter_name, server_name \\ @server_name) do
GenServer.call(server_name, {:get_single_value, parameter_name})
end
def initiation_multiupdate(server_name \\ @server_name) do
# setting a number of markers to zeroes
DB.single_value_parameter_names()
|> Enum.map(&{:put, &1, 0})
|> multi_update(server_name)
end
@doc """
Does all of the initialization of `OMG.DB` based on the configured path
"""
def init, do: do_init(@server_name, Application.fetch_env!(:omg_db, :path))
def init(path) when is_binary(path), do: do_init(@server_name, path)
def init(server_name), do: do_init(server_name, Application.fetch_env!(:omg_db, :path))
def init(server_name, path), do: do_init(server_name, path)
defp do_init(server_name, path) do
:ok = File.mkdir_p(path)
with :ok <- server_name.init_storage(path),
{:ok, started_apps} <- Application.ensure_all_started(:omg_db),
:ok <- initiation_multiupdate(server_name) do
started_apps |> Enum.reverse() |> Enum.each(fn app -> :ok = Application.stop(app) end)
:ok
else
error ->
_ = Logger.error("Unable to init: #{inspect(error)}")
error
end
end
end
| 34.450331 | 114 | 0.713379 |
e8042098a03373530c53d83f94e394e89a536c29 | 10,502 | ex | Elixir | lib/imagine/cms_pages/cms_page.ex | nacengineer/imagine_cms | 00a73790f9de6c409bb87e1fdfb3b41010a9d222 | [
"Apache-2.0"
] | 25 | 2020-09-08T05:13:40.000Z | 2022-03-15T13:35:59.000Z | lib/imagine/cms_pages/cms_page.ex | nacengineer/imagine_cms | 00a73790f9de6c409bb87e1fdfb3b41010a9d222 | [
"Apache-2.0"
] | null | null | null | lib/imagine/cms_pages/cms_page.ex | nacengineer/imagine_cms | 00a73790f9de6c409bb87e1fdfb3b41010a9d222 | [
"Apache-2.0"
] | 3 | 2021-08-11T03:48:54.000Z | 2022-02-09T20:11:06.000Z | defmodule Imagine.CmsPages.CmsPage do
@moduledoc """
CMS Page (versioned, parent-child tree)
has_many objects, tags, sub_pages
belongs_to cms_template, parent
"""
use Ecto.Schema
import Ecto.Changeset
import Ecto.Query, warn: false
alias Imagine.Repo
alias Imagine.CmsPages
alias Imagine.CmsPages.{CmsPage, CmsPageObject, CmsPageTag, CmsPageVersion}
# alias Imagine.CmsTemplates
alias Imagine.CmsTemplates.CmsTemplate
alias Imagine.Accounts.User
@derive {Jason.Encoder,
only: [
:version,
:path,
:name,
:title,
:published_date,
:article_date,
:article_end_date,
:expiration_date,
:summary,
:thumbnail_path,
:feature_image_path,
:position,
:index
]}
schema "cms_pages" do
has_many :versions, CmsPageVersion, on_delete: :delete_all
has_many :objects, CmsPageObject
has_many :tags, CmsPageTag
belongs_to :cms_template, CmsTemplate
field :cms_template_version, :integer
belongs_to :parent, CmsPage
has_many :sub_pages, CmsPage, foreign_key: :parent_id, where: [discarded_at: nil]
belongs_to :author, User, foreign_key: :updated_by
field :updated_by_username, :string
field :layout, :string
field :version, :integer
field :path, :string
field :name, :string
field :title, :string
field :published_version, :integer
field :published_date, :naive_datetime
field :article_date, :naive_datetime
field :article_end_date, :naive_datetime
field :expiration_date, :naive_datetime
field :expires, :boolean, default: false
field :summary, :string
field :html_head, :string
field :thumbnail_path, :string
field :feature_image_path, :string
field :redirect_enabled, :boolean, default: false
field :redirect_to, :string
field :position, :integer
field :index, :integer, virtual: true
# field :comment_count, :integer
field :search_index, :string
field :discarded_at, :naive_datetime
timestamps(inserted_at: :created_on, updated_at: :updated_on)
end
def is_home_page?(cms_page) do
cms_page.id && (cms_page.path == "" || cms_page.id == 1)
end
@doc false
def changeset(
cms_page,
attrs,
save_new_version \\ false,
current_user \\ nil
) do
cms_page
|> cast(attrs, [
:title,
:name,
:cms_template_id,
:parent_id,
:published_version,
:expires,
:summary,
:html_head,
:thumbnail_path,
:feature_image_path,
:redirect_enabled,
:redirect_to,
:position
])
|> put_path
|> put_date_with_default(:published_date, attrs)
|> put_date_with_default(:article_date, attrs)
|> put_date(:article_end_date, attrs)
|> put_date(:expiration_date, attrs)
|> put_change(:search_index, generate_search_index(cms_page))
|> validate_required([
:cms_template_id,
:title,
:published_date,
:expires,
:redirect_enabled,
:position
])
|> put_template_version
|> validate_required([
:cms_template_version
])
|> put_version(cms_page, save_new_version)
|> put_updated_by(current_user)
|> validate_parent(cms_page.id)
|> receive_file(attrs["thumbnail_file"], :thumbnail_path)
|> receive_file(attrs["feature_image_file"], :feature_image_path)
end
def receive_file(changeset, nil, _attr), do: changeset
def receive_file(changeset, upload, attr) do
with cms_page_id <- get_field(changeset, :id),
{:ok, _new_or_existing, original_path, filename} <- store_original(upload),
final_path <- link_original_to_page_path(original_path, filename, cms_page_id) do
put_change(changeset, attr, final_path)
else
{:error, _reason} = error -> error
end
end
def store_original(%Plug.Upload{filename: filename, path: tmp_path}),
do: store_original(filename, tmp_path)
def store_original(filename, tmp_path) do
with {:ok, %File.Stat{size: _size}} <- File.stat(tmp_path),
hash <- hash_from_file(tmp_path, :sha256),
original_path <-
Path.join(["uploads", "originals"] ++ hashed_path(hash)),
original_filepath <- Path.join(original_path, hash) do
if File.exists?(original_filepath) do
{:ok, :existing, original_filepath, filename}
else
File.mkdir_p!(original_path)
File.cp!(tmp_path, original_filepath)
{:ok, :new, original_filepath, filename}
end
else
{:error, _reason} = error -> error
end
end
def hashed_path(name, levels \\ 2) do
name |> String.split("", trim: true) |> Enum.take(levels)
end
# TODO: in the event that the file is *not* new and a link pointing to the
# target already exists, no need to increment the filename
def link_original_to_page_path(original_path, filename, cms_page_id) do
public_dir = Path.join(["public"])
local_path = Path.join([public_dir, "assets", to_string(cms_page_id)])
unique_filename = unique_filename_for_path(filename, local_path)
final_filepath = Path.join(local_path, unique_filename)
File.mkdir_p!(local_path)
File.ln_s!(build_relative_path_to(original_path, from: local_path), final_filepath)
"/" <> Path.relative_to(final_filepath, public_dir)
end
defp build_relative_path_to(path, from: cwd) do
cwd_segments = Path.split(cwd)
Path.join(Enum.map(cwd_segments, fn _ -> ".." end) ++ [path])
end
defp unique_filename_for_path(filename, path) do
# FIXME: check to see whether file is identical before incrementing
if File.exists?(Path.join(path, filename)),
do: unique_filename_for_path(filename, path, 1),
else: filename
end
defp unique_filename_for_path(filename, path, iteration) do
extension = Path.extname(filename)
basename = Path.basename(filename, extension)
new_filename = "#{basename}-#{iteration}#{extension}"
# FIXME: check to see whether file is identical before incrementing
if File.exists?(Path.join(path, new_filename)),
do: unique_filename_for_path(filename, path, iteration + 1),
else: new_filename
end
def hash_from_file(path, algorithm, chunk_size \\ 2048) do
path |> File.stream!([], chunk_size) |> hash_from_chunks(algorithm)
end
def hash_from_chunks(chunks_enum, algorithm) do
chunks_enum
|> Enum.reduce(:crypto.hash_init(algorithm), &:crypto.hash_update(&2, &1))
|> :crypto.hash_final()
|> Base.encode16()
|> String.downcase()
end
defp put_date(changeset, attr, attrs) when is_atom(attr) do
if val = attrs[Atom.to_string(attr)],
do: put_change(changeset, attr, format_date(val)),
else: changeset
end
defp put_date_with_default(changeset, attr, attrs) when is_atom(attr) do
if val = attrs[Atom.to_string(attr)],
do: put_change(changeset, attr, format_date(val)),
else: put_change(changeset, attr, NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second))
end
defp put_template_version(changeset) do
case get_field(changeset, :cms_template_id) do
nil ->
changeset
id ->
cms_template = Imagine.CmsTemplates.get_cms_template!(id)
put_change(changeset, :cms_template_version, cms_template.version)
end
end
defp put_version(changeset, cms_page, false) do
put_change(changeset, :version, cms_page.version || 0)
end
defp put_version(changeset, cms_page, true) do
put_change(changeset, :version, (cms_page.version || 0) + 1)
end
defp put_updated_by(changeset, nil) do
changeset
end
defp put_updated_by(changeset, %User{id: user_id, username: username}) do
changeset
|> put_change(:updated_by, user_id)
|> put_change(:updated_by_username, username)
|> validate_required([:updated_by, :updated_by_username])
end
defp put_updated_by(changeset, _current_user), do: changeset
defp validate_parent(changeset, my_id),
do: validate_parent(changeset, my_id, get_field(changeset, :parent_id))
defp validate_parent(changeset, nil, _), do: changeset
defp validate_parent(changeset, _, nil), do: changeset
defp validate_parent(changeset, my_id, parent_id) when my_id == parent_id do
add_error(
changeset,
:parent_id,
"cannot be this page or one of its descendants (would create a cycle)"
)
end
defp validate_parent(changeset, my_id, parent_id) do
parent = CmsPages.get_cms_page(parent_id)
case parent do
nil -> changeset
_ -> validate_parent(changeset, my_id, parent.parent_id)
end
end
def get_all_parents(page, parents \\ []) do
page = Imagine.Repo.preload(page, [:parent])
if page.parent do
get_all_parents(page.parent, [page.parent | parents])
else
parents
end
end
def update_descendant_paths(nil), do: {:error, nil}
def update_descendant_paths(cms_page) do
update_descendant_paths(Imagine.Repo.preload(cms_page, :sub_pages).sub_pages, cms_page.path)
end
def update_descendant_paths([], _path), do: :ok
def update_descendant_paths([cms_page | cms_pages], path) do
new_path =
[path, cms_page.name]
|> Enum.reject(fn p -> p in [nil, ""] end)
|> Enum.join("/")
{:ok, updated_cms_page} =
cms_page
|> change(%{path: new_path})
|> Imagine.Repo.update()
new_id = updated_cms_page.id
from(v in CmsPageVersion, where: v.cms_page_id == ^new_id)
|> Repo.update_all(set: [path: updated_cms_page.path])
Imagine.Repo.preload(updated_cms_page, :sub_pages).sub_pages
|> update_descendant_paths(new_path)
update_descendant_paths(cms_pages, path)
end
def format_date(nil), do: nil
def format_date(str) do
case Timex.parse(str, "{YYYY}-{0M}-{0D}") do
{:ok, date} -> date
{:error, _} -> nil
end
end
def put_path(changeset) do
path = calculate_path(get_field(changeset, :parent_id), get_field(changeset, :name), [])
put_change(changeset, :path, path)
end
def calculate_path(nil, name, path) do
[name | path] |> Enum.reject(fn p -> p in [nil, ""] end) |> Enum.join("/")
end
def calculate_path(parent_id, name, path) do
parent = CmsPages.get_cms_page!(parent_id)
calculate_path(parent.parent_id, parent.name, [name | path])
end
def generate_search_index(cms_page) do
# FIXME
cms_page.name
end
end
| 29.335196 | 97 | 0.67549 |
e804231135851df016af4ef76d1975e6b18da57e | 459 | ex | Elixir | Elixr/euler.ex | Nebopolis/project_euler | a1493c7fc6b6ee33ec6c584da5ffaf10032b4fbf | [
"MIT"
] | null | null | null | Elixr/euler.ex | Nebopolis/project_euler | a1493c7fc6b6ee33ec6c584da5ffaf10032b4fbf | [
"MIT"
] | null | null | null | Elixr/euler.ex | Nebopolis/project_euler | a1493c7fc6b6ee33ec6c584da5ffaf10032b4fbf | [
"MIT"
] | null | null | null | defmodule Fib do
def fib do
Stream.unfold({0, 1}, fn {a, b} -> {a, {b, a + b}} end)
end
end
defmodule Euler do
def euler1 max do
1..max-1
|> Stream.filter(&(rem(&1, 3) == 0 || rem(&1, 5) == 0))
|> Enum.sum
end
def euler2 max do
Fib.fib
|> Stream.take_while(&(&1 < max))
|> Stream.filter(&(rem(&1,2) == 0))
|> Enum.sum
end
end
IO.puts Euler.euler1(10)
IO.puts Euler.euler1(1000)
IO.puts Euler.euler2(4_000_000)
| 17 | 59 | 0.56427 |
e80426c61ef6195564fae1b0dcdc29e740959b85 | 3,884 | exs | Elixir | apps/ewallet/test/ewallet/web/v1/serializers/export_serializer_test.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | apps/ewallet/test/ewallet/web/v1/serializers/export_serializer_test.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | apps/ewallet/test/ewallet/web/v1/serializers/export_serializer_test.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWallet.Web.V1.ExportSerializerTest do
use EWallet.Web.SerializerCase, :v1
alias Ecto.Association.NotLoaded
alias EWallet.Web.Paginator
alias EWallet.Web.V1.ExportSerializer
alias Utils.Helpers.{Assoc, DateFormatter}
describe "serialize/1 for a single export" do
test "serializes into correct V1 export format" do
export = build(:export)
expected = %{
object: "export",
id: export.id,
filename: export.filename,
schema: export.schema,
status: export.status,
completion: export.completion,
download_url: export.url,
adapter: export.adapter,
user_id: Assoc.get(export, [:user, :id]),
key_id: Assoc.get(export, [:key, :id]),
params: export.params,
pid: export.pid,
failure_reason: nil,
created_at: DateFormatter.to_iso8601(export.inserted_at),
updated_at: DateFormatter.to_iso8601(export.updated_at)
}
assert ExportSerializer.serialize(export) == expected
end
test "serializes to nil if the export is not loaded" do
assert ExportSerializer.serialize(%NotLoaded{}) == nil
end
test "serializes nil to nil" do
assert ExportSerializer.serialize(nil) == nil
end
end
describe "serialize/1 for an export list" do
test "serialize into list of V1 export" do
export_1 = build(:export)
export_2 = build(:export)
paginator = %Paginator{
data: [export_1, export_2],
pagination: %{
current_page: 9,
per_page: 7,
is_first_page: false,
is_last_page: true
}
}
expected = %{
object: "list",
data: [
%{
object: "export",
id: export_1.id,
filename: export_1.filename,
schema: export_1.schema,
status: export_1.status,
completion: export_1.completion,
download_url: export_1.url,
adapter: export_1.adapter,
user_id: Assoc.get(export_1, [:user, :id]),
key_id: Assoc.get(export_1, [:key, :id]),
params: export_1.params,
pid: export_1.pid,
failure_reason: nil,
created_at: DateFormatter.to_iso8601(export_1.inserted_at),
updated_at: DateFormatter.to_iso8601(export_1.updated_at)
},
%{
object: "export",
id: export_2.id,
filename: export_2.filename,
schema: export_2.schema,
status: export_2.status,
completion: export_2.completion,
download_url: export_2.url,
adapter: export_2.adapter,
user_id: Assoc.get(export_2, [:user, :id]),
key_id: Assoc.get(export_2, [:key, :id]),
params: export_2.params,
pid: export_2.pid,
failure_reason: nil,
created_at: DateFormatter.to_iso8601(export_2.inserted_at),
updated_at: DateFormatter.to_iso8601(export_2.updated_at)
}
],
pagination: %{
current_page: 9,
per_page: 7,
is_first_page: false,
is_last_page: true
}
}
assert ExportSerializer.serialize(paginator) == expected
end
end
end
| 32.099174 | 74 | 0.613543 |
e8042a0e1f8548b5bb6472b055df556ad7ffe552 | 1,986 | ex | Elixir | lib/redix/exceptions.ex | duksis/redix | 2bda17516b6a702b4054dbd101ad3ba00464c707 | [
"MIT"
] | null | null | null | lib/redix/exceptions.ex | duksis/redix | 2bda17516b6a702b4054dbd101ad3ba00464c707 | [
"MIT"
] | null | null | null | lib/redix/exceptions.ex | duksis/redix | 2bda17516b6a702b4054dbd101ad3ba00464c707 | [
"MIT"
] | 1 | 2020-11-23T17:32:40.000Z | 2020-11-23T17:32:40.000Z | defmodule Redix.Error do
@moduledoc """
Error returned by Redis.
This exception represents semantic errors returned by Redis: for example,
non-existing commands or operations on keys with the wrong type (`INCR
not_an_integer`).
"""
defexception [:message]
@type t :: %__MODULE__{message: binary}
end
defmodule Redix.ConnectionError do
@moduledoc """
Error in the connection to Redis.
This exception represents errors in the connection to Redis: for example,
request timeouts, disconnections, and similar.
## Exception fields
This exception has the following public fields:
* `:reason` - (atom) the error reason. It can be one of the Redix-specific
reasons described in the "Error reasons" section below, or any error
reason returned by functions in the `:gen_tcp` module (see the
[`:inet.posix/0](http://www.erlang.org/doc/man/inet.html#type-posix) type.
## Error reasons
The `:reason` field can assume a few Redix-specific values:
* `:closed`: when the connection to Redis is closed (and Redix is
reconnecting) and the user attempts to talk to Redis
* `:disconnected`: when the connection drops while a request to Redis is in
flight.
* `:timeout`: when Redis doesn't reply to the request in time.
"""
defexception [:reason]
def message(%__MODULE__{reason: reason}) do
format_reason(reason)
end
@doc false
@spec format_reason(term) :: binary
def format_reason(reason)
# :inet.format_error/1 doesn't format :tcp_closed.
def format_reason(:tcp_closed) do
"TCP connection closed"
end
# Manually returned by us when the connection is closed and someone tries to
# send a command to Redis.
def format_reason(:closed) do
"the connection to Redis is closed"
end
def format_reason(reason) do
case :inet.format_error(reason) do
'unknown POSIX error' ->
inspect(reason)
message ->
List.to_string(message)
end
end
end
| 26.131579 | 80 | 0.70292 |
e8043edf9688eaddba177dd973bbb34166708dcd | 1,006 | ex | Elixir | web/views/error_helpers.ex | raincrash/pasteboard | e6b6f027bd18e1e4494c62c8127b89363b7466f5 | [
"MIT"
] | null | null | null | web/views/error_helpers.ex | raincrash/pasteboard | e6b6f027bd18e1e4494c62c8127b89363b7466f5 | [
"MIT"
] | null | null | null | web/views/error_helpers.ex | raincrash/pasteboard | e6b6f027bd18e1e4494c62c8127b89363b7466f5 | [
"MIT"
] | null | null | null | defmodule Pasteboard.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
if error = form.errors[field] do
content_tag :span, translate_error(error), class: "help-block"
end
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# Because error messages were defined within Ecto, we must
# call the Gettext module passing our Gettext backend. We
# also use the "errors" domain as translations are placed
# in the errors.po file. On your own code and templates,
# this could be written simply as:
#
# dngettext "errors", "1 file", "%{count} files", count
#
Gettext.dngettext(Pasteboard.Gettext, "errors", msg, msg, opts[:count], opts)
end
def translate_error(msg) do
Gettext.dgettext(Pasteboard.Gettext, "errors", msg)
end
end
| 27.944444 | 81 | 0.681909 |
e8045d85b9fae71cf199c972f00e70997943fc57 | 647 | exs | Elixir | test/slacker/parsers/prefix_test.exs | mikfreedman/slacker | 9e13b2ab232828ae7050f1c460233e572984d508 | [
"MIT"
] | 1 | 2016-05-19T05:10:16.000Z | 2016-05-19T05:10:16.000Z | test/slacker/parsers/prefix_test.exs | pivotal-sydney/slacker | 9e13b2ab232828ae7050f1c460233e572984d508 | [
"MIT"
] | 3 | 2016-01-17T09:07:17.000Z | 2016-01-19T21:55:21.000Z | test/slacker/parsers/prefix_test.exs | mikfreedman/slacker | 9e13b2ab232828ae7050f1c460233e572984d508 | [
"MIT"
] | null | null | null | defmodule Slacker.Parsers.PrefixTest do
use ExUnit.Case
test "#try_parse parses out echo command" do
assert Slacker.Parsers.Prefix.try_parse("echo hello world") == {:command, "echo", "hello world"}
end
test "#try_parse supports commands with hyphens" do
assert Slacker.Parsers.Prefix.try_parse("foo-bar hello world") == {:command, "foo-bar", "hello world"}
end
test "#try_parse supports empty message after command" do
assert Slacker.Parsers.Prefix.try_parse("ping") == {:command, "ping", ""}
end
test "#try_parse returns nil for invalid command" do
assert Slacker.Parsers.Prefix.try_parse("") == nil
end
end
| 32.35 | 106 | 0.712519 |
e8048e7eb18f18c399dee1f3b63c9376efafcf2f | 12,840 | ex | Elixir | lib/aws/kinesis_analytics_v2.ex | ahsandar/aws-elixir | 25de8b6c3a1401bde737cfc26b0679b14b058f23 | [
"Apache-2.0"
] | null | null | null | lib/aws/kinesis_analytics_v2.ex | ahsandar/aws-elixir | 25de8b6c3a1401bde737cfc26b0679b14b058f23 | [
"Apache-2.0"
] | null | null | null | lib/aws/kinesis_analytics_v2.ex | ahsandar/aws-elixir | 25de8b6c3a1401bde737cfc26b0679b14b058f23 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.KinesisAnalyticsV2 do
@moduledoc """
Amazon Kinesis Data Analytics is a fully managed service that you can use
to process and analyze streaming data using SQL or Java. The service
enables you to quickly author and run SQL or Java code against streaming
sources to perform time series analytics, feed real-time dashboards, and
create real-time metrics.
"""
@doc """
Adds an Amazon CloudWatch log stream to monitor application configuration
errors.
"""
def add_application_cloud_watch_logging_option(client, input, options \\ []) do
request(client, "AddApplicationCloudWatchLoggingOption", input, options)
end
@doc """
Adds a streaming source to your SQL-based Amazon Kinesis Data Analytics
application.
You can add a streaming source when you create an application, or you can
use this operation to add a streaming source after you create an
application. For more information, see `CreateApplication`.
Any configuration update, including adding a streaming source using this
operation, results in a new version of the application. You can use the
`DescribeApplication` operation to find the current application version.
"""
def add_application_input(client, input, options \\ []) do
request(client, "AddApplicationInput", input, options)
end
@doc """
Adds an `InputProcessingConfiguration` to an SQL-based Kinesis Data
Analytics application. An input processor pre-processes records on the
input stream before the application's SQL code executes. Currently, the
only input processor available is [AWS
Lambda](https://aws.amazon.com/documentation/lambda/).
"""
def add_application_input_processing_configuration(client, input, options \\ []) do
request(client, "AddApplicationInputProcessingConfiguration", input, options)
end
@doc """
Adds an external destination to your SQL-based Amazon Kinesis Data
Analytics application.
If you want Kinesis Data Analytics to deliver data from an in-application
stream within your application to an external destination (such as an
Kinesis data stream, a Kinesis Data Firehose delivery stream, or an AWS
Lambda function), you add the relevant configuration to your application
using this operation. You can configure one or more outputs for your
application. Each output configuration maps an in-application stream and an
external destination.
You can use one of the output configurations to deliver data from your
in-application error stream to an external destination so that you can
analyze the errors.
Any configuration update, including adding a streaming source using this
operation, results in a new version of the application. You can use the
`DescribeApplication` operation to find the current application version.
"""
def add_application_output(client, input, options \\ []) do
request(client, "AddApplicationOutput", input, options)
end
@doc """
Adds a reference data source to an existing SQL-based Amazon Kinesis Data
Analytics application.
Kinesis Data Analytics reads reference data (that is, an Amazon S3 object)
and creates an in-application table within your application. In the
request, you provide the source (S3 bucket name and object key name), name
of the in-application table to create, and the necessary mapping
information that describes how data in an Amazon S3 object maps to columns
in the resulting in-application table.
"""
def add_application_reference_data_source(client, input, options \\ []) do
request(client, "AddApplicationReferenceDataSource", input, options)
end
@doc """
Adds a Virtual Private Cloud (VPC) configuration to the application.
Applications can use VPCs to store and access resources securely.
Note the following about VPC configurations for Kinesis Data Analytics
applications:
<ul> <li> VPC configurations are not supported for SQL applications.
</li> <li> When a VPC is added to a Kinesis Data Analytics application, the
application can no longer be accessed from the Internet directly. To enable
Internet access to the application, add an Internet gateway to your VPC.
</li> </ul>
"""
def add_application_vpc_configuration(client, input, options \\ []) do
request(client, "AddApplicationVpcConfiguration", input, options)
end
@doc """
Creates an Amazon Kinesis Data Analytics application. For information about
creating a Kinesis Data Analytics application, see [Creating an
Application](https://docs.aws.amazon.com/kinesisanalytics/latest/java/getting-started.html).
"""
def create_application(client, input, options \\ []) do
request(client, "CreateApplication", input, options)
end
@doc """
Creates a snapshot of the application's state data.
"""
def create_application_snapshot(client, input, options \\ []) do
request(client, "CreateApplicationSnapshot", input, options)
end
@doc """
Deletes the specified application. Kinesis Data Analytics halts application
execution and deletes the application.
"""
def delete_application(client, input, options \\ []) do
request(client, "DeleteApplication", input, options)
end
@doc """
Deletes an Amazon CloudWatch log stream from an Amazon Kinesis Data
Analytics application.
"""
def delete_application_cloud_watch_logging_option(client, input, options \\ []) do
request(client, "DeleteApplicationCloudWatchLoggingOption", input, options)
end
@doc """
Deletes an `InputProcessingConfiguration` from an input.
"""
def delete_application_input_processing_configuration(client, input, options \\ []) do
request(client, "DeleteApplicationInputProcessingConfiguration", input, options)
end
@doc """
Deletes the output destination configuration from your SQL-based Amazon
Kinesis Data Analytics application's configuration. Kinesis Data Analytics
will no longer write data from the corresponding in-application stream to
the external output destination.
"""
def delete_application_output(client, input, options \\ []) do
request(client, "DeleteApplicationOutput", input, options)
end
@doc """
Deletes a reference data source configuration from the specified SQL-based
Amazon Kinesis Data Analytics application's configuration.
If the application is running, Kinesis Data Analytics immediately removes
the in-application table that you created using the
`AddApplicationReferenceDataSource` operation.
"""
def delete_application_reference_data_source(client, input, options \\ []) do
request(client, "DeleteApplicationReferenceDataSource", input, options)
end
@doc """
Deletes a snapshot of application state.
"""
def delete_application_snapshot(client, input, options \\ []) do
request(client, "DeleteApplicationSnapshot", input, options)
end
@doc """
Removes a VPC configuration from a Kinesis Data Analytics application.
"""
def delete_application_vpc_configuration(client, input, options \\ []) do
request(client, "DeleteApplicationVpcConfiguration", input, options)
end
@doc """
Returns information about a specific Amazon Kinesis Data Analytics
application.
If you want to retrieve a list of all applications in your account, use the
`ListApplications` operation.
"""
def describe_application(client, input, options \\ []) do
request(client, "DescribeApplication", input, options)
end
@doc """
Returns information about a snapshot of application state data.
"""
def describe_application_snapshot(client, input, options \\ []) do
request(client, "DescribeApplicationSnapshot", input, options)
end
@doc """
Infers a schema for an SQL-based Amazon Kinesis Data Analytics application
by evaluating sample records on the specified streaming source (Kinesis
data stream or Kinesis Data Firehose delivery stream) or Amazon S3 object.
In the response, the operation returns the inferred schema and also the
sample records that the operation used to infer the schema.
You can use the inferred schema when configuring a streaming source for
your application. When you create an application using the Kinesis Data
Analytics console, the console uses this operation to infer a schema and
show it in the console user interface.
"""
def discover_input_schema(client, input, options \\ []) do
request(client, "DiscoverInputSchema", input, options)
end
@doc """
Lists information about the current application snapshots.
"""
def list_application_snapshots(client, input, options \\ []) do
request(client, "ListApplicationSnapshots", input, options)
end
@doc """
Returns a list of Amazon Kinesis Data Analytics applications in your
account. For each application, the response includes the application name,
Amazon Resource Name (ARN), and status.
If you want detailed information about a specific application, use
`DescribeApplication`.
"""
def list_applications(client, input, options \\ []) do
request(client, "ListApplications", input, options)
end
@doc """
Retrieves the list of key-value tags assigned to the application. For more
information, see [Using
Tagging](https://docs.aws.amazon.com/kinesisanalytics/latest/java/how-tagging.html).
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Starts the specified Amazon Kinesis Data Analytics application. After
creating an application, you must exclusively call this operation to start
your application.
"""
def start_application(client, input, options \\ []) do
request(client, "StartApplication", input, options)
end
@doc """
Stops the application from processing data. You can stop an application
only if it is in the running state. You can use the `DescribeApplication`
operation to find the application state.
"""
def stop_application(client, input, options \\ []) do
request(client, "StopApplication", input, options)
end
@doc """
Adds one or more key-value tags to a Kinesis Analytics application. Note
that the maximum number of application tags includes system tags. The
maximum number of user-defined application tags is 50. For more
information, see [Using
Tagging](https://docs.aws.amazon.com/kinesisanalytics/latest/java/how-tagging.html).
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes one or more tags from a Kinesis Analytics application. For more
information, see [Using
Tagging](https://docs.aws.amazon.com/kinesisanalytics/latest/java/how-tagging.html).
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates an existing Amazon Kinesis Data Analytics application. Using this
operation, you can update application code, input configuration, and output
configuration.
Kinesis Data Analytics updates the `ApplicationVersionId` each time you
update your application.
"""
def update_application(client, input, options \\ []) do
request(client, "UpdateApplication", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "kinesisanalytics"}
host = build_host("kinesisanalytics", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "KinesisAnalytics_20180523.#{action}"}
]
payload = Poison.Encoder.encode(input, %{})
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
| 37.988166 | 94 | 0.735514 |
e8049e152e1b25f0be9c1e53aa93153d920e6c9c | 14,960 | ex | Elixir | deps/phoenix/lib/phoenix/test/channel_test.ex | Hajto/hangmanelixir | a05cc0b73e0a5d464405f63d274ae622565728ca | [
"MIT"
] | null | null | null | deps/phoenix/lib/phoenix/test/channel_test.ex | Hajto/hangmanelixir | a05cc0b73e0a5d464405f63d274ae622565728ca | [
"MIT"
] | null | null | null | deps/phoenix/lib/phoenix/test/channel_test.ex | Hajto/hangmanelixir | a05cc0b73e0a5d464405f63d274ae622565728ca | [
"MIT"
] | null | null | null | defmodule Phoenix.ChannelTest do
@moduledoc """
Conveniences for testing Phoenix channels.
In channel tests, we interact with channels via process
communication, sending and receiving messages. It is also
common to subscribe to the same topic the channel subscribes
to, allowing us to assert if a given message was broadcast
or not.
## Channel testing
To get started, define the module attribute `@endpoint`
in your test case pointing to your application endpoint.
Then you can directly create a socket and
`subscribe_and_join/4` topics and channels:
{:ok, _, socket} =
socket("user:id", %{some_assigns: 1})
|> subscribe_and_join(RoomChannel, "rooms:lobby", %{"id" => 3})
You usually want to set the same ID and assigns your
`UserSocket.connect/2` callback would set. Alternatively,
you can use the `connect/3` helper to call your `UserSocket.connect/2`
callback and initialize the socket with the socket id:
{:ok, socket} = connect(UserSocket, %{"some" => "params"})
{:ok, _, socket} = subscribe_and_join(socket, "rooms:lobby", %{"id" => 3})
Once called, `subscribe_and_join/4` will subscribe the
current test process to the "rooms:lobby" topic and start a
channel in another process. It returns `{:ok, reply, socket}`
or `{:error, reply}`.
Now, in the same way the channel has a socket representing
communication it will push to the client. Our test has a
socket representing communication to be pushed to the server.
For example, we can use the `push/3` function in the test
to push messages to the channel (it will invoke `handle_in/3`):
push socket, "my_event", %{"some" => "data"}
Similarly, we can broadcast messages from the test itself
on the topic that both test and channel are subscribed to,
triggering `handle_out/3` on the channel:
broadcast_from socket, "my_event", %{"some" => "data"}
> Note only `broadcast_from/3` and `broadcast_from!/3` are
available in tests to avoid broadcast messages to be resent
to the test process.
While the functions above are pushing data to the channel
(server) we can use `assert_push/3` to verify the channel
pushed a message to the client:
assert_push "my_event", %{"some" => "data"}
Or even assert something was broadcast into pubsub:
assert_broadcast "my_event", %{"some" => "data"}
Finally, every time a message is pushed to the channel,
a reference is returned. We can use this reference to
assert a particular reply was sent from the server:
ref = push socket, "counter", %{}
assert_reply ref, :ok, %{"counter" => 1}
## Checking side-effects
Often one may want to do side-effects inside channels,
like writing to the database, and verify those side-effects
during their tests.
Imagine the following `handle_in/3` inside a channel:
def handle_in("publish", %{"id" => id}, socket) do
Repo.get!(Post, id) |> Post.publish() |> Repo.update!()
{:noreply, socket}
end
Because the whole communication is asynchronous, the
following test would be very brittle:
push socket, "publish", %{"id" => 3}
assert Repo.get_by(Post, id: 3, published: true)
The issue is that we have no guarantees the channel has
done processing our message after calling `push/3`. The
best solution is to assert the channel sent us a reply
before doing any other assertion. First change the
channel to send replies:
def handle_in("publish", %{"id" => id}, socket) do
Repo.get!(Post, id) |> Post.publish() |> Repo.update!()
{:reply, :ok, socket}
end
Then expect them in the test:
ref = push socket, "publish", %{"id" => 3}
assert_reply ref, :ok
assert Repo.get_by(Post, id: 3, published: true)
## Leave and close
This module also provides functions to simulate leaving
and closing a channel. Once you leave or close a channel,
because the channel is linked to the test process on join,
it will crash the test process:
leave(socket)
** (EXIT from #PID<...>) {:shutdown, :leave}
You can avoid this by unlinking the channel process in
the test:
Process.unlink(socket.channel_pid)
Notice `leave/1` is async, so it will also return a
reference which you can use to check for a reply:
ref = leave(socket)
assert_reply ref, :ok
On the other hand, close is always sync and it will
return only after the channel process is guaranteed to
have been terminated:
:ok = close(socket)
This mimics the behaviour existing in clients.
"""
alias Phoenix.Socket
alias Phoenix.Socket.Message
alias Phoenix.Socket.Broadcast
alias Phoenix.Socket.Reply
alias Phoenix.Socket.Transport
alias Phoenix.Channel.Server
defmodule NoopSerializer do
@behaviour Phoenix.Transports.Serializer
@moduledoc false
def fastlane!(%Broadcast{} = msg) do
%Message{
topic: msg.topic,
event: msg.event,
payload: msg.payload
}
end
def encode!(%Reply{} = reply), do: reply
def encode!(%Message{} = msg), do: msg
def decode!(message, _opts), do: message
end
@doc false
defmacro __using__(_) do
quote do
import Phoenix.ChannelTest
end
end
@doc """
Builds a socket.
The socket is then used to subscribe and join channels.
Use this function when you want to create a blank socket
to pass to functions like `UserSocket.connect/2`.
Otherwise, use `socket/2` if you want to build a socket with
id and assigns.
The socket endpoint is read from the `@endpoint` variable.
"""
defmacro socket() do
if endpoint = Module.get_attribute(__CALLER__.module, :endpoint) do
quote do
%Socket{serializer: NoopSerializer,
transport_pid: self(),
endpoint: unquote(endpoint),
pubsub_server: unquote(endpoint).__pubsub_server__(),
transport: unquote(__MODULE__),
transport_name: :channel_test}
end
else
raise "module attribute @endpoint not set for socket/0"
end
end
@doc """
Builds a socket with given id and assigns.
The socket endpoint is read from the `@endpoint` variable.
"""
defmacro socket(id, assigns) do
if endpoint = Module.get_attribute(__CALLER__.module, :endpoint) do
quote do
%Socket{serializer: NoopSerializer,
transport_pid: self(),
endpoint: unquote(endpoint),
pubsub_server: unquote(endpoint).__pubsub_server__(),
id: unquote(id),
assigns: Enum.into(unquote(assigns), %{}),
transport: unquote(__MODULE__),
transport_name: :channel_test}
end
else
raise "module attribute @endpoint not set for socket/2"
end
end
@doc """
Initiates a transport connection for the socket handler.
Useful for testing UserSocket authentication. Returns
the result of the handler's `connect/2` callback.
"""
defmacro connect(handler, params) do
if endpoint = Module.get_attribute(__CALLER__.module, :endpoint) do
quote do
Transport.connect(unquote(endpoint), unquote(handler), :channel_test,
unquote(__MODULE__), NoopSerializer, unquote(params))
end
else
raise "module attribute @endpoint not set for socket/2"
end
end
@doc "See `subscribe_and_join!/4`."
def subscribe_and_join!(%Socket{} = socket, topic) when is_binary(topic) do
subscribe_and_join!(socket, topic, %{})
end
@doc "See `subscribe_and_join!/4`."
def subscribe_and_join!(%Socket{} = socket, topic, payload)
when is_binary(topic) and is_map(payload) do
channel = match_topic_to_channel!(socket, topic)
subscribe_and_join!(socket, channel, topic, payload)
end
@doc """
Same as `subscribe_and_join/4` but returns either the socket
or throws an error.
This is helpful when you are not testing joining the channel
and just need the socket.
"""
def subscribe_and_join!(%Socket{} = socket, channel, topic, payload \\ %{})
when is_atom(channel) and is_binary(topic) and is_map(payload) do
case subscribe_and_join(socket, channel, topic, payload) do
{:ok, _, socket} -> socket
{:error, error} -> raise "could not join channel, got error: #{inspect(error)}"
end
end
@doc "See `subscribe_and_join/4`."
def subscribe_and_join(%Socket{} = socket, topic) when is_binary(topic) do
subscribe_and_join(socket, topic, %{})
end
@doc "See `subscribe_and_join/4`."
def subscribe_and_join(%Socket{} = socket, topic, payload)
when is_binary(topic) and is_map(payload) do
channel = match_topic_to_channel!(socket, topic)
subscribe_and_join(socket, channel, topic, payload)
end
@doc """
Subscribes to the given topic and joins the channel
under the given topic and payload.
By subscribing to the topic, we can use `assert_broadcast/3`
to verify a message has been sent through the pubsub layer.
By joining the channel, we can interact with it directly.
The given channel is joined in a separate process which is
linked to the test process.
If no channel module is provided, the socket's handler is used to
lookup the matching channel for the given topic.
It returns `{:ok, reply, socket}` or `{:error, reply}`.
"""
def subscribe_and_join(%Socket{} = socket, channel, topic, payload \\ %{})
when is_atom(channel) and is_binary(topic) and is_map(payload) do
socket.endpoint.subscribe(self(), topic)
join(socket, channel, topic, payload)
end
@doc "See `join/4`."
def join(%Socket{} = socket, topic) when is_binary(topic) do
join(socket, topic, %{})
end
@doc "See `join/4`."
def join(%Socket{} = socket, topic, payload)
when is_binary(topic) and is_map(payload) do
channel = match_topic_to_channel!(socket, topic)
join(socket, channel, topic, payload)
end
@doc """
Joins the channel under the given topic and payload.
The given channel is joined in a separate process
which is linked to the test process.
It returns `{:ok, reply, socket}` or `{:error, reply}`.
"""
def join(%Socket{} = socket, channel, topic, payload \\ %{})
when is_atom(channel) and is_binary(topic) and is_map(payload) do
socket = %Socket{socket | topic: topic, channel: channel}
case Server.join(socket, payload) do
{:ok, reply, pid} ->
{:ok, reply, Server.socket(pid)}
{:error, _} = error ->
error
end
end
@doc """
Pushes a message into the channel.
The triggers the `handle_in/3` callback in the channel.
## Examples
iex> push socket, "new_message", %{id: 1, content: "hello"}
:ok
"""
def push(socket, event, payload \\ %{}) do
ref = make_ref()
send(socket.channel_pid,
%Message{event: event, topic: socket.topic, ref: ref, payload: payload})
ref
end
@doc """
Emulates the client leaving the channel.
"""
def leave(socket) do
push(socket, "phx_leave", %{})
end
@doc """
Emulates the client closing the channel.
Closing channels is synchronous and has a default timeout
of 5000 miliseconds.
"""
def close(socket, timeout \\ 5000) do
Server.close(socket.channel_pid, timeout)
end
@doc """
Broadcast event from pid to all subscribers of the socket topic.
The test process will not receive the published message. This triggers
the `handle_out/3` callback in the channel.
## Examples
iex> broadcast_from socket, "new_message", %{id: 1, content: "hello"}
:ok
"""
def broadcast_from(socket, event, message) do
%{pubsub_server: pubsub_server, topic: topic, transport_pid: transport_pid} = socket
Server.broadcast_from pubsub_server, transport_pid, topic, event, message
end
@doc """
Same as `broadcast_from/3` but raises if broadcast fails.
"""
def broadcast_from!(socket, event, message) do
%{pubsub_server: pubsub_server, topic: topic, transport_pid: transport_pid} = socket
Server.broadcast_from! pubsub_server, transport_pid, topic, event, message
end
@doc """
Asserts the channel has pushed a message back to the client
with the given event and payload under `timeout`.
Notice event and payload are patterns. This means one can write:
assert_push "some_event", %{"data" => _}
In the assertion above, we don't particularly care about
the data being sent, as long as something was sent.
The timeout is in miliseconds and defaults to 100ms.
"""
defmacro assert_push(event, payload, timeout \\ 100) do
quote do
assert_receive %Phoenix.Socket.Message{
event: unquote(event),
payload: unquote(payload)}, unquote(timeout)
end
end
@doc """
Asserts the channel has replies to the given message within
`timeout`.
Notice status and payload are patterns. This means one can write:
ref = push channel, "some_event"
assert_reply ref, :ok, %{"data" => _}
In the assertion above, we don't particularly care about
the data being sent, as long as something was replied.
The timeout is in miliseconds and defaults to 100ms.
"""
defmacro assert_reply(ref, status, payload \\ Macro.escape(%{}), timeout \\ 100) do
quote do
ref = unquote(ref)
assert_receive %Phoenix.Socket.Reply{
ref: ^ref,
status: unquote(status),
payload: unquote(payload)}, unquote(timeout)
end
end
@doc """
Asserts the channel has broadcast a message within `timeout`.
Before asserting anything was broadcast, we must first
subscribe to the topic of the channel in the test process:
@endpoint.subscribe(self(), "foo:ok")
Now we can match on event and payload as patterns:
assert_broadcast "some_event", %{"data" => _}
In the assertion above, we don't particularly care about
the data being sent, as long as something was sent.
The timeout is in miliseconds and defaults to 100ms.
"""
defmacro assert_broadcast(event, payload, timeout \\ 100) do
quote do
assert_receive %Phoenix.Socket.Broadcast{event: unquote(event),
payload: unquote(payload)}, unquote(timeout)
end
end
defp match_topic_to_channel!(socket, topic) do
unless socket.handler do
raise """
No socket handler found to lookup channel for topic #{inspect topic}.
Use `connect/2` when calling `subscribe_and_join` without a channel, for example:
{:ok, socket} = connect(UserSocket, %{})
socket = subscribe_and_join!(socket, "foo:bar", %{})
"""
end
case socket.handler.__channel__(topic, socket.transport_name) do
channel when is_atom(channel) -> channel
_ -> raise "no channel found for topic #{inspect topic} in #{inspect socket.handler}"
end
end
end
| 31.965812 | 91 | 0.673529 |
e804a4f6578282812fd9665cd8c960e92a3ffadb | 1,561 | ex | Elixir | apps/maintenance_web/lib/maintenance_web/views/error_helpers.ex | eksperimental/maintenance | 5d332f4886b59510e37d32ad89c724840989ada0 | [
"CC0-1.0",
"MIT-0"
] | 3 | 2021-11-13T04:50:40.000Z | 2022-01-09T21:27:33.000Z | apps/maintenance_web/lib/maintenance_web/views/error_helpers.ex | eksperimental/maintenance | 5d332f4886b59510e37d32ad89c724840989ada0 | [
"CC0-1.0",
"MIT-0"
] | null | null | null | apps/maintenance_web/lib/maintenance_web/views/error_helpers.ex | eksperimental/maintenance | 5d332f4886b59510e37d32ad89c724840989ada0 | [
"CC0-1.0",
"MIT-0"
] | null | null | null | defmodule MaintenanceWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error),
class: "invalid-feedback",
phx_feedback_for: input_name(form, field)
)
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(MaintenanceWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(MaintenanceWeb.Gettext, "errors", msg, opts)
end
end
end
| 32.520833 | 80 | 0.668161 |
e804ec08353a1bdafe3a32178d2a9ac28b583240 | 1,894 | ex | Elixir | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_publish_to_pub_sub.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_publish_to_pub_sub.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_publish_to_pub_sub.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2PublishToPubSub do
@moduledoc """
Publish a message into given Pub/Sub topic when DlpJob has completed. The message contains a single field, `DlpJobName`, which is equal to the finished job's [`DlpJob.name`](https://cloud.google.com/dlp/docs/reference/rest/v2/projects.dlpJobs#DlpJob). Compatible with: Inspect, Risk
## Attributes
* `topic` (*type:* `String.t`, *default:* `nil`) - Cloud Pub/Sub topic to send notifications to. The topic must have given publishing access rights to the DLP API service account executing the long running DlpJob sending the notifications. Format is projects/{project}/topics/{topic}.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:topic => String.t() | nil
}
field(:topic)
end
defimpl Poison.Decoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2PublishToPubSub do
def decode(value, options) do
GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2PublishToPubSub.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2PublishToPubSub do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.297872 | 288 | 0.755544 |
e804f91e18139fc3d50e2c19fca5d1038d33f297 | 50 | exs | Elixir | .medic/update.exs | dbrody/playwright-elixir | 48611c08dbdb8e36aa4dd8aa2d97a4014b753815 | [
"MIT"
] | 30 | 2021-06-01T16:59:35.000Z | 2022-03-25T16:56:19.000Z | .medic/update.exs | dbrody/playwright-elixir | 48611c08dbdb8e36aa4dd8aa2d97a4014b753815 | [
"MIT"
] | 35 | 2021-06-10T17:05:31.000Z | 2022-02-11T22:30:36.000Z | .medic/update.exs | dbrody/playwright-elixir | 48611c08dbdb8e36aa4dd8aa2d97a4014b753815 | [
"MIT"
] | 4 | 2021-08-13T20:38:18.000Z | 2022-01-31T04:32:35.000Z | [:update_code, :update_mix, :update_npm, :doctor]
| 25 | 49 | 0.74 |
e80520035d1c43992f19bb87bdb53831d8d8bed7 | 3,239 | exs | Elixir | test/support/testverifier.exs | axelson/inky | f9112e239040010b5336f144d407bd1492cae387 | [
"Apache-2.0"
] | 51 | 2019-06-23T21:03:59.000Z | 2022-03-17T04:57:57.000Z | test/support/testverifier.exs | axelson/inky | f9112e239040010b5336f144d407bd1492cae387 | [
"Apache-2.0"
] | 24 | 2019-06-17T05:56:34.000Z | 2021-12-05T15:06:37.000Z | test/support/testverifier.exs | axelson/inky | f9112e239040010b5336f144d407bd1492cae387 | [
"Apache-2.0"
] | 9 | 2019-07-26T13:06:13.000Z | 2021-11-27T13:12:24.000Z | defmodule Inky.TestVerifier do
@moduledoc false
defmodule MockInteractionException do
@moduledoc false
defexception message: "Interaction with mock did not match expectation.",
description: nil,
issue: nil,
item: -1
def unmet(reason, item) do
%MockInteractionException{
description: "Wrong interaction",
issue: {:unmet, reason},
item: item
}
end
def expected(spec, item) do
%MockInteractionException{
description: "Missing interactions",
issue: {:expected, spec},
item: item
}
end
def unexpected(actual, item) do
%MockInteractionException{
description: "Unexpected interactions",
issue: {:unexpected, actual},
item: item
}
end
end
def load_spec(path, prefix \\ "") do
prefix
|> Path.join(path)
|> File.read!()
|> parse_spec()
end
def check(spec, actual, items \\ 0) do
case {spec, actual} do
{[], []} ->
{:ok, items}
{spec, []} ->
throw(MockInteractionException.expected(spec, items + 1))
{[], actual} ->
throw(MockInteractionException.unexpected(actual, items + 1))
{[s | spec_rest], [a | actual_rest]} ->
case check_step(s, a) do
:cont -> check(spec_rest, actual_rest, items + 1)
{:halt, reason} -> throw(MockInteractionException.unmet(reason, items + 1))
end
end
end
def parse_spec(str) when is_binary(str) do
str
|> Code.string_to_quoted!()
|> do_parse_spec()
end
# atomic terms
defp do_parse_spec(term) when is_atom(term), do: term
defp do_parse_spec(term) when is_integer(term), do: term
defp do_parse_spec(term) when is_float(term), do: term
defp do_parse_spec(term) when is_binary(term), do: term
defp do_parse_spec([]), do: []
defp do_parse_spec([h | t]), do: [do_parse_spec(h) | do_parse_spec(t)]
defp do_parse_spec({a, b}), do: {do_parse_spec(a), do_parse_spec(b)}
defp do_parse_spec({:<<>>, _place, terms}) do
:erlang.list_to_binary(terms)
end
defp do_parse_spec({:{}, _place, terms}) do
terms
|> Enum.map(&do_parse_spec/1)
|> List.to_tuple()
end
defp do_parse_spec({:%{}, _place, terms}) do
for {k, v} <- terms, into: %{}, do: {do_parse_spec(k), do_parse_spec(v)}
end
# to ignore functions and operators
defp do_parse_spec({_term_type, _place, terms}), do: terms
# check "don't care"
defp check_step(:_, _), do: :cont
# check bitstrings
defp check_step(:bitstring, b) when is_bitstring(b), do: :cont
defp check_step(:bitstring, bad), do: mismatch(:bitstring, bad)
# check keys in tuples
defp check_step({:_, sv}, {_ak, av}), do: check_step(sv, av)
defp check_step({:bitstring, sv}, {b, av}) when is_bitstring(b), do: check_step(sv, av)
defp check_step({spec_key, sv}, {spec_key, av}), do: check_step(sv, av)
defp check_step({spec_key, _sv}, {bad_key, _av}), do: mismatch(spec_key, bad_key)
# check exact match, last resort
defp check_step(term, term), do: :cont
defp check_step(term, bad), do: mismatch(term, bad)
defp mismatch(spec, actual), do: {:halt, %{expected: spec, actual: actual}}
end
| 27.922414 | 89 | 0.62581 |
e80546a6e53e66d40b1aaed12c87e4c783b0a360 | 912 | ex | Elixir | lib/fake/headway/request.ex | mbta/realtime_signs | 3fd8cbc26ce2b0820e608e60fe12135dab5def69 | [
"MIT"
] | 1 | 2022-01-24T12:39:05.000Z | 2022-01-24T12:39:05.000Z | lib/fake/headway/request.ex | mbta/realtime_signs | 3fd8cbc26ce2b0820e608e60fe12135dab5def69 | [
"MIT"
] | 40 | 2021-05-05T10:14:25.000Z | 2022-03-31T18:34:15.000Z | lib/fake/headway/request.ex | mbta/realtime_signs | 3fd8cbc26ce2b0820e608e60fe12135dab5def69 | [
"MIT"
] | 1 | 2022-03-20T21:08:12.000Z | 2022-03-20T21:08:12.000Z | defmodule Fake.Headway.Request do
@times [
~N[2017-07-04 09:05:00],
~N[2017-07-04 08:55:00],
~N[2017-07-04 08:45:00],
~N[2017-07-04 09:20:00]
]
def get_schedules(stop_list) do
Enum.flat_map(stop_list, fn stop_id ->
Enum.map(@times, fn time ->
%{
"relationships" => %{
"prediction" => %{},
"route" => %{"data" => %{"id" => "743", "type" => "route"}},
"stop" => %{"data" => %{"id" => stop_id, "type" => "stop"}},
"trip" => %{"data" => %{"id" => "36684269", "type" => "trip"}}
},
"attributes" => %{
"arrival_time" =>
Timex.format!(Timex.to_datetime(time, "America/New_York"), "{ISO:Extended}"),
"departure_time" =>
Timex.format!(Timex.to_datetime(time, "America/New_York"), "{ISO:Extended}")
}
}
end)
end)
end
end
| 30.4 | 91 | 0.469298 |
e805947b26765d72280ce45f36e803664b1f5df3 | 254 | exs | Elixir | config/test.exs | 03juan/ecto_cellar | 0adbf48207947927ab133754a7cfdb4a45d4da61 | [
"Apache-2.0"
] | null | null | null | config/test.exs | 03juan/ecto_cellar | 0adbf48207947927ab133754a7cfdb4a45d4da61 | [
"Apache-2.0"
] | null | null | null | config/test.exs | 03juan/ecto_cellar | 0adbf48207947927ab133754a7cfdb4a45d4da61 | [
"Apache-2.0"
] | null | null | null | import Config
if System.get_env("DB_ADAPTER") == "mysql" do
config :ecto_cellar, :repo, MyXQL.Repo
config :ecto_cellar, ecto_repos: [MyXQL.Repo]
else
config :ecto_cellar, :repo, Postgres.Repo
config :ecto_cellar, ecto_repos: [Postgres.Repo]
end
| 25.4 | 50 | 0.744094 |
e80596ae37bc5521da5500b789bc716c0a257d64 | 395 | ex | Elixir | lib/rocketseat_nlw_heat_impulse_elixir/messages/create.ex | jobsonita/rocketseat_nlw_heat_impulse_elixir | 13b5dc0fe2137c1acbc42afee1d40c43e7f9883f | [
"MIT"
] | null | null | null | lib/rocketseat_nlw_heat_impulse_elixir/messages/create.ex | jobsonita/rocketseat_nlw_heat_impulse_elixir | 13b5dc0fe2137c1acbc42afee1d40c43e7f9883f | [
"MIT"
] | null | null | null | lib/rocketseat_nlw_heat_impulse_elixir/messages/create.ex | jobsonita/rocketseat_nlw_heat_impulse_elixir | 13b5dc0fe2137c1acbc42afee1d40c43e7f9883f | [
"MIT"
] | null | null | null | defmodule RocketseatNlwHeatImpulseElixir.Messages.Create do
alias RocketseatNlwHeatImpulseElixir.{Message, Repo}
def call(params) do
params
|> Message.changeset()
|> Repo.insert()
|> handle_insert()
end
defp handle_insert({:ok, %Message{}} = result), do: result
defp handle_insert({:error, result}) do
{:error, %{result: result, status: :bad_request}}
end
end
| 23.235294 | 60 | 0.693671 |
e8059ef91761cf48c86aa0fca87efef79a1d1ee4 | 3,233 | exs | Elixir | elixir/triangle/triangle_test.exs | macborowy/exercism | c5d45e074e81b946a82a340b2730e0d2732b7e0a | [
"MIT"
] | null | null | null | elixir/triangle/triangle_test.exs | macborowy/exercism | c5d45e074e81b946a82a340b2730e0d2732b7e0a | [
"MIT"
] | null | null | null | elixir/triangle/triangle_test.exs | macborowy/exercism | c5d45e074e81b946a82a340b2730e0d2732b7e0a | [
"MIT"
] | null | null | null | if !System.get_env("EXERCISM_TEST_EXAMPLES") do
Code.load_file("triangle.exs")
end
ExUnit.start
ExUnit.configure exclude: :pending, trace: true
# defmodule IsTriangleTests do
# use ExUnit.Case
#
# test "triangle (4,5,6) is valid triangle" do
# assert Triangle.is_inequal?(4, 5, 6) == :ok
# end
#
# test "triangle (1, 1, 10) is not a triangle" do
# assert Triangle.is_inequal?(1, 1, 10) == {:error, "side lengths violate triangle inequality"}
# end
# end
#
# defmodule WithTests do
# use ExUnit.Case
#
# test "when triangle has negative side should return error with message" do
# assert Triangle.is_triangle?(-1, 1, 1) == {:error, "all side lengths must be positive"}
# end
#
# test "when arguments not make up valid triangle" do
# assert Triangle.is_triangle?(1, 1, 10) == {:error, "side lengths violate triangle inequality"}
# end
# end
defmodule TriangleTest do
use ExUnit.Case
# @tag :pending
test "equilateral triangles have equal sides" do
assert Triangle.kind(2, 2, 2) == { :ok, :equilateral }
end
# @tag :pending
test "larger equilateral triangles also have equal sides" do
assert Triangle.kind(10, 10, 10) == { :ok, :equilateral }
end
# @tag :pending
test "isosceles triangles have last two sides equal" do
assert Triangle.kind(3, 4, 4) == { :ok, :isosceles }
end
# @tag :pending
test "isosceles triangles have first and last sides equal" do
assert Triangle.kind(4, 3, 4) == { :ok, :isosceles }
end
# @tag :pending
test "isosceles triangles have two first sides equal" do
assert Triangle.kind(4, 4, 3) == { :ok, :isosceles }
end
# @tag :pending
test "isosceles triangles have in fact exactly two sides equal" do
assert Triangle.kind(10, 10, 2) == { :ok, :isosceles }
end
# @tag :pending
test "scalene triangles have no equal sides" do
assert Triangle.kind(3, 4, 5) == { :ok, :scalene }
end
# @tag :pending
test "scalene triangles have no equal sides at a larger scale too" do
assert Triangle.kind(10, 11, 12) == { :ok, :scalene }
end
# @tag :pending
test "scalene triangles have no equal sides in descending order either" do
assert Triangle.kind(5, 4, 2) == { :ok, :scalene }
end
# @tag :pending
test "very small triangles are legal" do
assert Triangle.kind(0.4, 0.6, 0.3) == { :ok, :scalene }
end
# @tag :pending
test "triangles with no size are illegal" do
assert Triangle.kind(0, 0, 0) == { :error, "all side lengths must be positive" }
end
# @tag :pending
test "triangles with negative sides are illegal" do
assert Triangle.kind(3, 4, -5) == { :error, "all side lengths must be positive" }
end
# @tag :pending
test "triangles violating triangle inequality are illegal" do
assert Triangle.kind(1, 1, 3) == { :error, "side lengths violate triangle inequality" }
end
# @tag :pending
test "triangles violating triangle inequality are illegal 2" do
assert Triangle.kind(2, 4, 2) == { :error, "side lengths violate triangle inequality" }
end
# @tag :pending
test "triangles violating triangle inequality are illegal 3" do
assert Triangle.kind(7, 3, 2) == { :error, "side lengths violate triangle inequality" }
end
end
| 29.390909 | 100 | 0.668419 |
e805b5e259a0e690ee523a04c2f7e5ac8f310dfa | 1,469 | exs | Elixir | lib/iex/test/iex/server_test.exs | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | lib/iex/test/iex/server_test.exs | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | lib/iex/test/iex/server_test.exs | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | Code.require_file "../test_helper.exs", __DIR__
defmodule IEx.ServerTest do
use IEx.Case
setup do
IEx.Options.set :colors, enabled: false
:ok
end
# Options
test "prefix option" do
assert capture_io(fn ->
boot([prefix: "pry"])
end) =~ "pry(1)> "
end
test "delegate_locals_to option" do
assert capture_io("sort([:foo, :bar])", fn ->
boot([delegate_locals_to: Enum])
end) =~ "[:bar, :foo]"
end
test "env option" do
assert capture_io("__ENV__.file", fn ->
boot([env: __ENV__])
end) =~ "server_test.exs"
end
# Take over
test "allows take over of the shell during boot" do
assert capture_io("Y\na+b", fn ->
server = self
boot([], fn ->
opts = [prefix: "dbg", binding: [a: 1, b: 2]]
IEx.Server.take_over("iex:13", opts, 1000, server)
end)
end) =~ "dbg(1)> "
end
test "does not operate if callback during boot fails" do
assert capture_io(fn ->
boot([], fn -> exit(0) end)
end) == nil
end
test "take over fails when there is no shell" do
assert IEx.Server.take_over("iex:13", [], 10) == { :error, :no_iex }
end
test "pry wraps around take over" do
require IEx
assert capture_io(fn ->
assert IEx.pry == { :error, :no_iex }
end) =~ "Is an IEx shell running?"
end
# Helpers
defp boot(opts, callback // fn -> end) do
IEx.Server.start(Keyword.merge([dot_iex_path: ""], opts), callback)
end
end
| 22.257576 | 72 | 0.596324 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.