hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
08119751f9a39e79cf64d967ae51e58729dd632a | 2,529 | ex | Elixir | lib/ory/hydra/request.ex | sitch/ory-hydra-elixir | 529b5d120e0e857f9fadecf0e05f023f56394826 | [
"MIT"
] | null | null | null | lib/ory/hydra/request.ex | sitch/ory-hydra-elixir | 529b5d120e0e857f9fadecf0e05f023f56394826 | [
"MIT"
] | null | null | null | lib/ory/hydra/request.ex | sitch/ory-hydra-elixir | 529b5d120e0e857f9fadecf0e05f023f56394826 | [
"MIT"
] | null | null | null | defmodule ORY.Hydra.Request do
alias ORY.Hydra.{Helpers, Response}
@type t ::
%__MODULE__{
attempt: integer,
body: binary,
headers: ORY.Hydra.http_headers_t(),
method: ORY.Hydra.http_method_t(),
result: any,
url: String.t()
}
defstruct attempt: 0,
body: nil,
headers: nil,
method: nil,
result: nil,
url: nil
defp form_encode(operation) do
operation.params
end
@spec send(ORY.Hydra.Operation.t(), ORY.Hydra.Config.t()) :: ORY.Hydra.response_t()
def send(%{method: method} = operation, config) do
url = Helpers.URL.to_string(operation, config)
{body, headers} =
case method do
method when method in [:delete, :get, :post, :put] ->
{Helpers.Body.encode!(operation, config), [{"content-type", "application/json"}]}
:form ->
{form_encode(operation), [{"content-type", "application/x-www-form-urlencoded"}]}
end
request = %__MODULE__{}
request = Map.put(request, :url, url)
request = Map.put(request, :body, body)
request = Map.put(request, :method, method)
request = Map.put(request, :headers, headers)
dispatch(request, config)
end
defp dispatch(request, config) do
http_client = config.http_client
result =
http_client.request(
request.method,
request.url,
request.headers,
request.body,
config.http_client_opts
)
request = Map.put(request, :attempt, request.attempt + 1)
request = Map.put(request, :result, result)
request
|> retry(config)
|> finish(config)
end
defp retry(request, config) do
max_attempts = Keyword.get(config.retry_opts, :max_attempts, 3)
if config.retry && max_attempts > request.attempt do
case request.result do
{:ok, %{status_code: status_code}} when status_code >= 500 ->
dispatch(request, config)
{:error, _reason} ->
dispatch(request, config)
_otherwise ->
request
end
else
request
end
end
defp finish(request, config) do
case request.result do
{:ok, %{status_code: status_code} = response} when status_code >= 400 ->
{:error, Response.new(response, config)}
{:ok, %{status_code: status_code} = response} when status_code >= 200 ->
{:ok, Response.new(response, config)}
otherwise ->
otherwise
end
end
end
| 25.545455 | 91 | 0.591934 |
0811eeef20bfeb6ee0b5ee7345df66ac7f66f974 | 2,149 | ex | Elixir | clients/private_ca/lib/google_api/private_ca/v1beta1/model/subject_config.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/private_ca/lib/google_api/private_ca/v1beta1/model/subject_config.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/private_ca/lib/google_api/private_ca/v1beta1/model/subject_config.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.PrivateCA.V1beta1.Model.SubjectConfig do
@moduledoc """
These values are used to create the distinguished name and subject alternative name fields in an X.509 certificate.
## Attributes
* `commonName` (*type:* `String.t`, *default:* `nil`) - Optional. The "common name" of the distinguished name.
* `subject` (*type:* `GoogleApi.PrivateCA.V1beta1.Model.Subject.t`, *default:* `nil`) - Required. Contains distinguished name fields such as the location and organization.
* `subjectAltName` (*type:* `GoogleApi.PrivateCA.V1beta1.Model.SubjectAltNames.t`, *default:* `nil`) - Optional. The subject alternative name fields.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:commonName => String.t(),
:subject => GoogleApi.PrivateCA.V1beta1.Model.Subject.t(),
:subjectAltName => GoogleApi.PrivateCA.V1beta1.Model.SubjectAltNames.t()
}
field(:commonName)
field(:subject, as: GoogleApi.PrivateCA.V1beta1.Model.Subject)
field(:subjectAltName, as: GoogleApi.PrivateCA.V1beta1.Model.SubjectAltNames)
end
defimpl Poison.Decoder, for: GoogleApi.PrivateCA.V1beta1.Model.SubjectConfig do
def decode(value, options) do
GoogleApi.PrivateCA.V1beta1.Model.SubjectConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.PrivateCA.V1beta1.Model.SubjectConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.54717 | 175 | 0.743136 |
0811f418e55b284d06f1a580b6563e8210378159 | 1,609 | exs | Elixir | config/dev.exs | DaveMuirhead/naboo-server | daf15d9b92ad6a3f85eb42bdbe7125c489a079c1 | [
"Apache-2.0"
] | null | null | null | config/dev.exs | DaveMuirhead/naboo-server | daf15d9b92ad6a3f85eb42bdbe7125c489a079c1 | [
"Apache-2.0"
] | null | null | null | config/dev.exs | DaveMuirhead/naboo-server | daf15d9b92ad6a3f85eb42bdbe7125c489a079c1 | [
"Apache-2.0"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :naboo, NabooWeb.Endpoint,
http: [port: 4004],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: []
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
config :naboo, Naboo.Repo,
adapter: Ecto.Adapters.Postgres,
username: "naboo",
password: "naboo",
database: "naboo_dev",
hostname: "localhost",
port: 5434,
pool_size: 10
| 27.741379 | 68 | 0.717216 |
0811f838c814c27071ea9dd219c5f794262a5ba4 | 1,176 | exs | Elixir | test/command_test.exs | lubien/nats.ex | 9ddb11acbca7727e97c6624425a2cb22243cfafe | [
"MIT"
] | 102 | 2019-04-17T06:19:48.000Z | 2022-03-31T02:36:11.000Z | test/command_test.exs | lubien/nats.ex | 9ddb11acbca7727e97c6624425a2cb22243cfafe | [
"MIT"
] | 68 | 2017-03-23T04:54:00.000Z | 2019-03-04T03:30:28.000Z | test/command_test.exs | lubien/nats.ex | 9ddb11acbca7727e97c6624425a2cb22243cfafe | [
"MIT"
] | 17 | 2019-05-07T19:33:17.000Z | 2022-03-24T18:27:25.000Z | defmodule Gnat.CommandTest do
use ExUnit.Case, async: true
alias Gnat.Command
test "formatting a simple pub message" do
command = Command.build(:pub, "topic", "payload", []) |> IO.iodata_to_binary
assert command == "PUB topic 7\r\npayload\r\n"
end
test "formatting a pub with reply_to set" do
command = Command.build(:pub, "topic", "payload", [reply_to: "INBOX"]) |> IO.iodata_to_binary
assert command == "PUB topic INBOX 7\r\npayload\r\n"
end
test "formatting a basic sub message" do
command = Command.build(:sub, "foobar", 4, []) |> IO.iodata_to_binary
assert command == "SUB foobar 4\r\n"
end
test "formatting a sub with a queue group" do
command = Command.build(:sub, "foobar", 5, [queue_group: "us"]) |> IO.iodata_to_binary
assert command == "SUB foobar us 5\r\n"
end
test "formatting a simple unsub message" do
command = Command.build(:unsub, 12, []) |> IO.iodata_to_binary
assert command == "UNSUB 12\r\n"
end
test "formatting an unsub message with max messages" do
command = Command.build(:unsub, 12, [max_messages: 3]) |> IO.iodata_to_binary
assert command == "UNSUB 12 3\r\n"
end
end
| 33.6 | 97 | 0.67432 |
081220245a67e9bf5731b60e0ba76a1a2365771f | 1,356 | exs | Elixir | test/mastani_server/cms/video_test.exs | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | 1 | 2019-05-07T15:03:54.000Z | 2019-05-07T15:03:54.000Z | test/mastani_server/cms/video_test.exs | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | null | null | null | test/mastani_server/cms/video_test.exs | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | null | null | null | defmodule MastaniServer.Test.CMS.Video do
use MastaniServer.TestTools
alias Helper.ORM
alias MastaniServer.CMS
setup do
{:ok, user} = db_insert(:user)
{:ok, community} = db_insert(:community)
video_attrs = mock_attrs(:video, %{community_id: community.id})
{:ok, ~m(user community video_attrs)a}
end
describe "[cms video curd]" do
alias CMS.{Author, Community}
test "can create video with valid attrs", ~m(user community video_attrs)a do
assert {:error, _} = ORM.find_by(Author, user_id: user.id)
{:ok, video} = CMS.create_content(community, :video, video_attrs, user)
assert video.title == video_attrs.title
end
test "add user to cms authors, if the user is not exsit in cms authors",
~m(user community video_attrs)a do
assert {:error, _} = ORM.find_by(Author, user_id: user.id)
{:ok, _} = CMS.create_content(community, :video, video_attrs, user)
{:ok, author} = ORM.find_by(Author, user_id: user.id)
assert author.user_id == user.id
end
test "create post with on exsit community fails", ~m(user)a do
invalid_attrs = mock_attrs(:video, %{community_id: non_exsit_id()})
ivalid_community = %Community{id: non_exsit_id()}
assert {:error, _} = CMS.create_content(ivalid_community, :video, invalid_attrs, user)
end
end
end
| 31.534884 | 92 | 0.675516 |
0812473e65ecf2f2e61e61c4113d9f26a2faac97 | 1,785 | ex | Elixir | clients/plus_domains/lib/google_api/plus_domains/v1/model/videostream.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/plus_domains/lib/google_api/plus_domains/v1/model/videostream.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/plus_domains/lib/google_api/plus_domains/v1/model/videostream.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.PlusDomains.V1.Model.Videostream do
@moduledoc """
## Attributes
* `height` (*type:* `integer()`, *default:* `nil`) - The height, in pixels, of the video resource.
* `type` (*type:* `String.t`, *default:* `nil`) - MIME type of the video stream.
* `url` (*type:* `String.t`, *default:* `nil`) - URL of the video stream.
* `width` (*type:* `integer()`, *default:* `nil`) - The width, in pixels, of the video resource.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:height => integer(),
:type => String.t(),
:url => String.t(),
:width => integer()
}
field(:height)
field(:type)
field(:url)
field(:width)
end
defimpl Poison.Decoder, for: GoogleApi.PlusDomains.V1.Model.Videostream do
def decode(value, options) do
GoogleApi.PlusDomains.V1.Model.Videostream.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.PlusDomains.V1.Model.Videostream do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.875 | 102 | 0.686275 |
0812616ba8dd676b3620eeda35bc947d5e10bdb7 | 61 | exs | Elixir | config/dev.exs | dominique-vassard/boltex | acf71d9c97782d17ad9c07f7ea2a7e7f12dbb311 | [
"Apache-2.0"
] | 31 | 2016-07-19T00:05:56.000Z | 2020-04-28T12:28:43.000Z | config/dev.exs | dominique-vassard/boltex | acf71d9c97782d17ad9c07f7ea2a7e7f12dbb311 | [
"Apache-2.0"
] | 24 | 2016-07-31T20:17:51.000Z | 2019-03-21T15:45:59.000Z | config/dev.exs | dominique-vassard/boltex | acf71d9c97782d17ad9c07f7ea2a7e7f12dbb311 | [
"Apache-2.0"
] | 8 | 2016-08-01T12:56:29.000Z | 2017-03-22T23:02:49.000Z | use Mix.Config
config :boltex,
log: true,
log_hex: true
| 10.166667 | 15 | 0.688525 |
08126df225e1525783621ccd1e44bf76013080b7 | 38,436 | exs | Elixir | lib/elixir/test/elixir/stream_test.exs | mguimas/elixir | 161f5f9a3b6bc38faa152e08c567486b71b0602a | [
"Apache-2.0"
] | 1 | 2018-02-24T19:48:35.000Z | 2018-02-24T19:48:35.000Z | lib/elixir/test/elixir/stream_test.exs | mguimas/elixir | 161f5f9a3b6bc38faa152e08c567486b71b0602a | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/stream_test.exs | mguimas/elixir | 161f5f9a3b6bc38faa152e08c567486b71b0602a | [
"Apache-2.0"
] | null | null | null | Code.require_file("test_helper.exs", __DIR__)
defmodule StreamTest do
use ExUnit.Case, async: true
doctest Stream
defmodule Pdict do
defstruct []
defimpl Collectable do
def into(struct) do
fun = fn
_, {:cont, x} -> Process.put(:stream_cont, [x | Process.get(:stream_cont)])
_, :done -> Process.put(:stream_done, true)
_, :halt -> Process.put(:stream_halt, true)
end
{struct, fun}
end
end
end
defmodule HaltAcc do
defstruct [:acc]
defimpl Enumerable do
def count(_lazy), do: {:error, __MODULE__}
def member?(_lazy, _value), do: {:error, __MODULE__}
def slice(_lazy), do: {:error, __MODULE__}
def reduce(lazy, _acc, _fun) do
{:halted, Enum.to_list(lazy.acc)}
end
end
end
test "streams as enumerables" do
stream = Stream.map([1, 2, 3], &(&1 * 2))
# Reduce
assert Enum.map(stream, &(&1 + 1)) == [3, 5, 7]
# Member
assert Enum.member?(stream, 4)
refute Enum.member?(stream, 1)
# Count
assert Enum.count(stream) == 3
end
test "streams are composable" do
stream = Stream.map([1, 2, 3], &(&1 * 2))
assert lazy?(stream)
stream = Stream.map(stream, &(&1 + 1))
assert lazy?(stream)
assert Enum.to_list(stream) == [3, 5, 7]
end
test "chunk_every/2, chunk_every/3 and chunk_every/4" do
assert Stream.chunk_every([1, 2, 3, 4, 5], 2) |> Enum.to_list() == [[1, 2], [3, 4], [5]]
assert Stream.chunk_every([1, 2, 3, 4, 5], 2, 2, [6]) |> Enum.to_list() ==
[[1, 2], [3, 4], [5, 6]]
assert Stream.chunk_every([1, 2, 3, 4, 5, 6], 3, 2, :discard) |> Enum.to_list() ==
[[1, 2, 3], [3, 4, 5]]
assert Stream.chunk_every([1, 2, 3, 4, 5, 6], 2, 3, :discard) |> Enum.to_list() ==
[[1, 2], [4, 5]]
assert Stream.chunk_every([1, 2, 3, 4, 5, 6], 3, 2, []) |> Enum.to_list() ==
[[1, 2, 3], [3, 4, 5], [5, 6]]
assert Stream.chunk_every([1, 2, 3, 4, 5, 6], 3, 3, []) |> Enum.to_list() ==
[[1, 2, 3], [4, 5, 6]]
assert Stream.chunk_every([1, 2, 3, 4, 5], 4, 4, 6..10) |> Enum.to_list() ==
[[1, 2, 3, 4], [5, 6, 7, 8]]
end
test "chunk_every/4 is zippable" do
stream = Stream.chunk_every([1, 2, 3, 4, 5, 6], 3, 2, [])
list = Enum.to_list(stream)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
end
test "chunk_every/4 is haltable" do
assert 1..10 |> Stream.take(6) |> Stream.chunk_every(4, 4, [7, 8]) |> Enum.to_list() ==
[[1, 2, 3, 4], [5, 6, 7, 8]]
assert 1..10
|> Stream.take(6)
|> Stream.chunk_every(4, 4, [7, 8])
|> Stream.take(3)
|> Enum.to_list() == [[1, 2, 3, 4], [5, 6, 7, 8]]
assert 1..10
|> Stream.take(6)
|> Stream.chunk_every(4, 4, [7, 8])
|> Stream.take(2)
|> Enum.to_list() == [[1, 2, 3, 4], [5, 6, 7, 8]]
assert 1..10
|> Stream.take(6)
|> Stream.chunk_every(4, 4, [7, 8])
|> Stream.take(1)
|> Enum.to_list() == [[1, 2, 3, 4]]
assert 1..6 |> Stream.take(6) |> Stream.chunk_every(4, 4, [7, 8]) |> Enum.to_list() ==
[[1, 2, 3, 4], [5, 6, 7, 8]]
end
test "chunk_by/2" do
stream = Stream.chunk_by([1, 2, 2, 3, 4, 4, 6, 7, 7], &(rem(&1, 2) == 1))
assert lazy?(stream)
assert Enum.to_list(stream) == [[1], [2, 2], [3], [4, 4, 6], [7, 7]]
assert stream |> Stream.take(3) |> Enum.to_list() == [[1], [2, 2], [3]]
assert 1..10 |> Stream.chunk_every(2) |> Enum.take(2) == [[1, 2], [3, 4]]
end
test "chunk_by/2 is zippable" do
stream = Stream.chunk_by([1, 2, 2, 3], &(rem(&1, 2) == 1))
list = Enum.to_list(stream)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
end
test "chunk_while/4" do
chunk_fun = fn i, acc ->
cond do
i > 10 -> {:halt, acc}
rem(i, 2) == 0 -> {:cont, Enum.reverse([i | acc]), []}
true -> {:cont, [i | acc]}
end
end
after_fun = fn
[] -> {:cont, []}
acc -> {:cont, Enum.reverse(acc), []}
end
assert Stream.chunk_while([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], [], chunk_fun, after_fun)
|> Enum.to_list() == [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]]
assert Stream.chunk_while(0..9, [], chunk_fun, after_fun) |> Enum.to_list() ==
[[0], [1, 2], [3, 4], [5, 6], [7, 8], [9]]
assert Stream.chunk_while(0..10, [], chunk_fun, after_fun) |> Enum.to_list() ==
[[0], [1, 2], [3, 4], [5, 6], [7, 8], [9, 10]]
assert Stream.chunk_while(0..11, [], chunk_fun, after_fun) |> Enum.to_list() ==
[[0], [1, 2], [3, 4], [5, 6], [7, 8], [9, 10]]
assert Stream.chunk_while([5, 7, 9, 11], [], chunk_fun, after_fun) |> Enum.to_list() ==
[[5, 7, 9]]
end
test "chunk_while/4 with inner halt" do
chunk_fun = fn
i, [] ->
{:cont, [i]}
i, chunk ->
if rem(i, 2) == 0 do
{:cont, Enum.reverse(chunk), [i]}
else
{:cont, [i | chunk]}
end
end
after_fun = fn
[] -> {:cont, []}
chunk -> {:cont, Enum.reverse(chunk), []}
end
assert Stream.chunk_while([1, 2, 3, 4, 5], [], chunk_fun, after_fun) |> Enum.at(0) == [1]
end
test "concat/1" do
stream = Stream.concat([1..3, [], [4, 5, 6], [], 7..9])
assert is_function(stream)
assert Enum.to_list(stream) == [1, 2, 3, 4, 5, 6, 7, 8, 9]
assert Enum.take(stream, 5) == [1, 2, 3, 4, 5]
stream = Stream.concat([1..3, [4, 5, 6], Stream.cycle(7..100)])
assert is_function(stream)
assert Enum.take(stream, 13) == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
end
test "concat/2" do
stream = Stream.concat(1..3, 4..6)
assert is_function(stream)
assert Stream.cycle(stream) |> Enum.take(16) ==
[1, 2, 3, 4, 5, 6, 1, 2, 3, 4, 5, 6, 1, 2, 3, 4]
stream = Stream.concat(1..3, [])
assert is_function(stream)
assert Stream.cycle(stream) |> Enum.take(5) == [1, 2, 3, 1, 2]
stream = Stream.concat(1..6, Stream.cycle(7..9))
assert is_function(stream)
assert Stream.drop(stream, 3) |> Enum.take(13) == [4, 5, 6, 7, 8, 9, 7, 8, 9, 7, 8, 9, 7]
stream = Stream.concat(Stream.cycle(1..3), Stream.cycle(4..6))
assert is_function(stream)
assert Enum.take(stream, 13) == [1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3, 1]
end
test "concat/2 is zippable" do
stream = 1..2 |> Stream.take(2) |> Stream.concat(3..4)
assert Enum.zip(1..4, [1, 2, 3, 4]) == Enum.zip(1..4, stream)
end
test "concat/2 does not intercept wrapped lazy enumeration" do
# concat returns a lazy enumeration that does not halt
assert Stream.concat([[0], Stream.map([1, 2, 3], & &1), [4]])
|> Stream.take_while(fn x -> x <= 4 end)
|> Enum.to_list() == [0, 1, 2, 3, 4]
# concat returns a lazy enumeration that does halts
assert Stream.concat([[0], Stream.take_while(1..6, &(&1 <= 3)), [4]])
|> Stream.take_while(fn x -> x <= 4 end)
|> Enum.to_list() == [0, 1, 2, 3, 4]
end
test "cycle/1" do
stream = Stream.cycle([1, 2, 3])
assert is_function(stream)
assert_raise ArgumentError, "cannot cycle over empty enumerable", fn ->
Stream.cycle([])
end
assert_raise ArgumentError, "cannot cycle over empty enumerable", fn ->
Stream.cycle(%{}) |> Enum.to_list()
end
assert Stream.cycle([1, 2, 3]) |> Stream.take(5) |> Enum.to_list() == [1, 2, 3, 1, 2]
assert Enum.take(stream, 5) == [1, 2, 3, 1, 2]
end
test "cycle/1 is zippable" do
stream = Stream.cycle([1, 2, 3])
assert Enum.zip(1..6, [1, 2, 3, 1, 2, 3]) == Enum.zip(1..6, stream)
end
test "cycle/1 with inner stream" do
assert [1, 2, 3] |> Stream.take(2) |> Stream.cycle() |> Enum.take(4) == [1, 2, 1, 2]
end
test "dedup/1 is lazy" do
assert lazy?(Stream.dedup([1, 2, 3]))
end
test "dedup/1" do
assert Stream.dedup([1, 1, 2, 1, 1, 2, 1]) |> Enum.to_list() == [1, 2, 1, 2, 1]
assert Stream.dedup([2, 1, 1, 2, 1]) |> Enum.to_list() == [2, 1, 2, 1]
assert Stream.dedup([1, 2, 3, 4]) |> Enum.to_list() == [1, 2, 3, 4]
assert Stream.dedup([1, 1.0, 2.0, 2]) |> Enum.to_list() == [1, 1.0, 2.0, 2]
assert Stream.dedup([]) |> Enum.to_list() == []
assert Stream.dedup([nil, nil, true, {:value, true}]) |> Enum.to_list() ==
[nil, true, {:value, true}]
assert Stream.dedup([nil]) |> Enum.to_list() == [nil]
end
test "dedup_by/2" do
assert Stream.dedup_by([{1, :x}, {2, :y}, {2, :z}, {1, :x}], fn {x, _} -> x end)
|> Enum.to_list() == [{1, :x}, {2, :y}, {1, :x}]
end
test "drop/2" do
stream = Stream.drop(1..10, 5)
assert lazy?(stream)
assert Enum.to_list(stream) == [6, 7, 8, 9, 10]
assert Enum.to_list(Stream.drop(1..5, 0)) == [1, 2, 3, 4, 5]
assert Enum.to_list(Stream.drop(1..3, 5)) == []
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.drop(nats, 2) |> Enum.take(5) == [3, 4, 5, 6, 7]
end
test "drop/2 with negative count" do
stream = Stream.drop(1..10, -5)
assert lazy?(stream)
assert Enum.to_list(stream) == [1, 2, 3, 4, 5]
stream = Stream.drop(1..10, -5)
list = Enum.to_list(stream)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
end
test "drop/2 with negative count stream entries" do
par = self()
pid =
spawn_link(fn ->
Enum.each(Stream.drop(&inbox_stream/2, -3), fn x -> send(par, {:stream, x}) end)
end)
send(pid, {:stream, 1})
send(pid, {:stream, 2})
send(pid, {:stream, 3})
refute_receive {:stream, 1}
send(pid, {:stream, 4})
assert_receive {:stream, 1}
send(pid, {:stream, 5})
assert_receive {:stream, 2}
refute_receive {:stream, 3}
end
test "drop_every/2" do
assert 1..10
|> Stream.drop_every(2)
|> Enum.to_list() == [2, 4, 6, 8, 10]
assert 1..10
|> Stream.drop_every(3)
|> Enum.to_list() == [2, 3, 5, 6, 8, 9]
assert 1..10
|> Stream.drop(2)
|> Stream.drop_every(2)
|> Stream.drop(1)
|> Enum.to_list() == [6, 8, 10]
assert 1..10
|> Stream.drop_every(0)
|> Enum.to_list() == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
assert []
|> Stream.drop_every(10)
|> Enum.to_list() == []
end
test "drop_every/2 without non-negative integer" do
assert_raise FunctionClauseError, fn ->
Stream.drop_every(1..10, -1)
end
assert_raise FunctionClauseError, fn ->
Stream.drop_every(1..10, 3.33)
end
end
test "drop_while/2" do
stream = Stream.drop_while(1..10, &(&1 <= 5))
assert lazy?(stream)
assert Enum.to_list(stream) == [6, 7, 8, 9, 10]
assert Enum.to_list(Stream.drop_while(1..5, &(&1 <= 0))) == [1, 2, 3, 4, 5]
assert Enum.to_list(Stream.drop_while(1..3, &(&1 <= 5))) == []
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.drop_while(nats, &(&1 <= 5)) |> Enum.take(5) == [6, 7, 8, 9, 10]
end
test "each/2" do
Process.put(:stream_each, [])
stream =
Stream.each([1, 2, 3], fn x ->
Process.put(:stream_each, [x | Process.get(:stream_each)])
end)
assert lazy?(stream)
assert Enum.to_list(stream) == [1, 2, 3]
assert Process.get(:stream_each) == [3, 2, 1]
end
test "filter/2" do
stream = Stream.filter([1, 2, 3], fn x -> rem(x, 2) == 0 end)
assert lazy?(stream)
assert Enum.to_list(stream) == [2]
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.filter(nats, &(rem(&1, 2) == 0)) |> Enum.take(5) == [2, 4, 6, 8, 10]
end
test "flat_map/2" do
stream = Stream.flat_map([1, 2, 3], &[&1, &1 * 2])
assert lazy?(stream)
assert Enum.to_list(stream) == [1, 2, 2, 4, 3, 6]
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.flat_map(nats, &[&1, &1 * 2]) |> Enum.take(6) == [1, 2, 2, 4, 3, 6]
end
test "flat_map/2 does not intercept wrapped lazy enumeration" do
# flat_map returns a lazy enumeration that does not halt
assert [1, 2, 3, -1, -2]
|> Stream.flat_map(fn x -> Stream.map([x, x + 1], & &1) end)
|> Stream.take_while(fn x -> x >= 0 end)
|> Enum.to_list() == [1, 2, 2, 3, 3, 4]
# flat_map returns a lazy enumeration that does halts
assert [1, 2, 3, -1, -2]
|> Stream.flat_map(fn x -> Stream.take_while([x, x + 1, x + 2], &(&1 <= x + 1)) end)
|> Stream.take_while(fn x -> x >= 0 end)
|> Enum.to_list() == [1, 2, 2, 3, 3, 4]
# flat_map returns a lazy enumeration that does halts wrapped in an enumerable
assert [1, 2, 3, -1, -2]
|> Stream.flat_map(fn x ->
Stream.concat([x], Stream.take_while([x + 1, x + 2], &(&1 <= x + 1)))
end)
|> Stream.take_while(fn x -> x >= 0 end)
|> Enum.to_list() == [1, 2, 2, 3, 3, 4]
end
test "flat_map/2 is zippable" do
stream =
[1, 2, 3, -1, -2]
|> Stream.flat_map(fn x -> Stream.map([x, x + 1], & &1) end)
|> Stream.take_while(fn x -> x >= 0 end)
list = Enum.to_list(stream)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
end
test "flat_map/2 does not leave inner stream suspended" do
stream =
Stream.flat_map([1, 2, 3], fn i ->
Stream.resource(fn -> i end, fn acc -> {[acc], acc + 1} end, fn _ ->
Process.put(:stream_flat_map, true)
end)
end)
Process.put(:stream_flat_map, false)
assert stream |> Enum.take(3) == [1, 2, 3]
assert Process.get(:stream_flat_map)
end
test "flat_map/2 does not leave outer stream suspended" do
stream =
Stream.resource(fn -> 1 end, fn acc -> {[acc], acc + 1} end, fn _ ->
Process.put(:stream_flat_map, true)
end)
stream = Stream.flat_map(stream, fn i -> [i, i + 1, i + 2] end)
Process.put(:stream_flat_map, false)
assert stream |> Enum.take(3) == [1, 2, 3]
assert Process.get(:stream_flat_map)
end
test "flat_map/2 closes on error" do
stream =
Stream.resource(fn -> 1 end, fn acc -> {[acc], acc + 1} end, fn _ ->
Process.put(:stream_flat_map, true)
end)
stream = Stream.flat_map(stream, fn _ -> throw(:error) end)
Process.put(:stream_flat_map, false)
assert catch_throw(Enum.to_list(stream)) == :error
assert Process.get(:stream_flat_map)
end
test "flat_map/2 with inner flat_map/2" do
stream =
Stream.flat_map(1..5, fn x ->
Stream.flat_map([x], fn x ->
x..(x * x)
end)
|> Stream.map(&(&1 * 1))
end)
assert Enum.take(stream, 5) == [1, 2, 3, 4, 3]
end
test "flat_map/2 properly halts both inner and outer stream when inner stream is halted" do
# Fixes a bug that, when the inner stream was done,
# sending it a halt would cause it to return the
# inner stream was halted, forcing flat_map to get
# the next value from the outer stream, evaluate it,
# get another inner stream, just to halt it.
# 2 should never be used
assert [1, 2]
|> Stream.flat_map(fn 1 -> Stream.repeatedly(fn -> 1 end) end)
|> Stream.flat_map(fn 1 -> Stream.repeatedly(fn -> 1 end) end)
|> Enum.take(1) == [1]
end
test "interval/1" do
stream = Stream.interval(10)
now = :os.timestamp()
assert Enum.take(stream, 5) == [0, 1, 2, 3, 4]
assert :timer.now_diff(:os.timestamp(), now) >= 50000
end
test "into/2 and run/1" do
Process.put(:stream_cont, [])
Process.put(:stream_done, false)
Process.put(:stream_halt, false)
stream = Stream.into([1, 2, 3], %Pdict{})
assert lazy?(stream)
assert Stream.run(stream) == :ok
assert Process.get(:stream_cont) == [3, 2, 1]
assert Process.get(:stream_done)
refute Process.get(:stream_halt)
stream = Stream.into(fn _, _ -> raise "error" end, %Pdict{})
catch_error(Stream.run(stream))
assert Process.get(:stream_halt)
end
test "into/3" do
Process.put(:stream_cont, [])
Process.put(:stream_done, false)
Process.put(:stream_halt, false)
stream = Stream.into([1, 2, 3], %Pdict{}, fn x -> x * 2 end)
assert lazy?(stream)
assert Enum.to_list(stream) == [1, 2, 3]
assert Process.get(:stream_cont) == [6, 4, 2]
assert Process.get(:stream_done)
refute Process.get(:stream_halt)
end
test "into/2 with halting" do
Process.put(:stream_cont, [])
Process.put(:stream_done, false)
Process.put(:stream_halt, false)
stream = Stream.into([1, 2, 3], %Pdict{})
assert lazy?(stream)
assert Enum.take(stream, 1) == [1]
assert Process.get(:stream_cont) == [1]
assert Process.get(:stream_done)
refute Process.get(:stream_halt)
end
test "transform/3" do
stream = Stream.transform([1, 2, 3], 0, &{[&1, &2], &1 + &2})
assert lazy?(stream)
assert Enum.to_list(stream) == [1, 0, 2, 1, 3, 3]
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.transform(nats, 0, &{[&1, &2], &1 + &2}) |> Enum.take(6) == [1, 0, 2, 1, 3, 3]
end
test "transform/3 with early halt" do
stream =
fn -> throw(:error) end
|> Stream.repeatedly()
|> Stream.transform(nil, &{[&1, &2], &1})
assert {:halted, nil} = Enumerable.reduce(stream, {:halt, nil}, fn _, _ -> throw(:error) end)
end
test "transform/3 with early suspend" do
stream =
Stream.repeatedly(fn -> throw(:error) end)
|> Stream.transform(nil, &{[&1, &2], &1})
assert {:suspended, nil, _} =
Enumerable.reduce(stream, {:suspend, nil}, fn _, _ -> throw(:error) end)
end
test "transform/3 with halt" do
stream =
Stream.resource(fn -> 1 end, fn acc -> {[acc], acc + 1} end, fn _ ->
Process.put(:stream_transform, true)
end)
stream =
Stream.transform(stream, 0, fn i, acc ->
if acc < 3, do: {[i], acc + 1}, else: {:halt, acc}
end)
Process.put(:stream_transform, false)
assert Enum.to_list(stream) == [1, 2, 3]
assert Process.get(:stream_transform)
end
test "transform/3 (via flat_map) handles multiple returns from suspension" do
assert [false]
|> Stream.take(1)
|> Stream.concat([true])
|> Stream.flat_map(&[&1])
|> Enum.to_list() == [false, true]
end
test "iterate/2" do
stream = Stream.iterate(0, &(&1 + 2))
assert Enum.take(stream, 5) == [0, 2, 4, 6, 8]
stream = Stream.iterate(5, &(&1 + 2))
assert Enum.take(stream, 5) == [5, 7, 9, 11, 13]
# Only calculate values if needed
stream = Stream.iterate("HELLO", &raise/1)
assert Enum.take(stream, 1) == ["HELLO"]
end
test "map/2" do
stream = Stream.map([1, 2, 3], &(&1 * 2))
assert lazy?(stream)
assert Enum.to_list(stream) == [2, 4, 6]
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.map(nats, &(&1 * 2)) |> Enum.take(5) == [2, 4, 6, 8, 10]
assert Stream.map(nats, &(&1 - 2)) |> Stream.map(&(&1 * 2)) |> Enum.take(3) == [-2, 0, 2]
end
test "map_every/3" do
assert 1..10
|> Stream.map_every(2, &(&1 * 2))
|> Enum.to_list() == [2, 2, 6, 4, 10, 6, 14, 8, 18, 10]
assert 1..10
|> Stream.map_every(3, &(&1 * 2))
|> Enum.to_list() == [2, 2, 3, 8, 5, 6, 14, 8, 9, 20]
assert 1..10
|> Stream.drop(2)
|> Stream.map_every(2, &(&1 * 2))
|> Stream.drop(1)
|> Enum.to_list() == [4, 10, 6, 14, 8, 18, 10]
assert 1..5
|> Stream.map_every(0, &(&1 * 2))
|> Enum.to_list() == [1, 2, 3, 4, 5]
assert []
|> Stream.map_every(10, &(&1 * 2))
|> Enum.to_list() == []
assert_raise FunctionClauseError, fn ->
Stream.map_every(1..10, -1, &(&1 * 2))
end
assert_raise FunctionClauseError, fn ->
Stream.map_every(1..10, 3.33, &(&1 * 2))
end
end
test "reject/2" do
stream = Stream.reject([1, 2, 3], fn x -> rem(x, 2) == 0 end)
assert lazy?(stream)
assert Enum.to_list(stream) == [1, 3]
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.reject(nats, &(rem(&1, 2) == 0)) |> Enum.take(5) == [1, 3, 5, 7, 9]
end
test "repeatedly/1" do
stream = Stream.repeatedly(fn -> 1 end)
assert Enum.take(stream, 5) == [1, 1, 1, 1, 1]
stream = Stream.repeatedly(&:rand.uniform/0)
[r1, r2] = Enum.take(stream, 2)
assert r1 != r2
end
test "resource/3 closes on outer errors" do
stream =
Stream.resource(
fn -> 1 end,
fn
2 -> throw(:error)
acc -> {[acc], acc + 1}
end,
fn 2 -> Process.put(:stream_resource, true) end
)
Process.put(:stream_resource, false)
assert catch_throw(Enum.to_list(stream)) == :error
assert Process.get(:stream_resource)
end
test "resource/3 is zippable" do
transform_fun = fn
10 -> {:halt, 10}
acc -> {[acc], acc + 1}
end
after_fun = fn _ -> Process.put(:stream_resource, true) end
stream = Stream.resource(fn -> 1 end, transform_fun, after_fun)
list = Enum.to_list(stream)
Process.put(:stream_resource, false)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
assert Process.get(:stream_resource)
end
test "resource/3 halts with inner list" do
transform_fun = fn acc -> {[acc, acc + 1, acc + 2], acc + 1} end
after_fun = fn _ -> Process.put(:stream_resource, true) end
stream = Stream.resource(fn -> 1 end, transform_fun, after_fun)
Process.put(:stream_resource, false)
assert Enum.take(stream, 5) == [1, 2, 3, 2, 3]
assert Process.get(:stream_resource)
end
test "resource/3 closes on errors with inner list" do
transform_fun = fn acc -> {[acc, acc + 1, acc + 2], acc + 1} end
after_fun = fn _ -> Process.put(:stream_resource, true) end
stream = Stream.resource(fn -> 1 end, transform_fun, after_fun)
Process.put(:stream_resource, false)
stream = Stream.map(stream, fn x -> if x > 2, do: throw(:error), else: x end)
assert catch_throw(Enum.to_list(stream)) == :error
assert Process.get(:stream_resource)
end
test "resource/3 is zippable with inner list" do
transform_fun = fn
10 -> {:halt, 10}
acc -> {[acc, acc + 1, acc + 2], acc + 1}
end
after_fun = fn _ -> Process.put(:stream_resource, true) end
stream = Stream.resource(fn -> 1 end, transform_fun, after_fun)
list = Enum.to_list(stream)
Process.put(:stream_resource, false)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
assert Process.get(:stream_resource)
end
test "resource/3 halts with inner enum" do
transform_fun = fn acc -> {acc..(acc + 2), acc + 1} end
after_fun = fn _ -> Process.put(:stream_resource, true) end
stream = Stream.resource(fn -> 1 end, transform_fun, after_fun)
Process.put(:stream_resource, false)
assert Enum.take(stream, 5) == [1, 2, 3, 2, 3]
assert Process.get(:stream_resource)
end
test "resource/3 closes on errors with inner enum" do
transform_fun = fn acc -> {acc..(acc + 2), acc + 1} end
after_fun = fn _ -> Process.put(:stream_resource, true) end
stream = Stream.resource(fn -> 1 end, transform_fun, after_fun)
Process.put(:stream_resource, false)
stream = Stream.map(stream, fn x -> if x > 2, do: throw(:error), else: x end)
assert catch_throw(Enum.to_list(stream)) == :error
assert Process.get(:stream_resource)
end
test "resource/3 is zippable with inner enum" do
transform_fun = fn
10 -> {:halt, 10}
acc -> {acc..(acc + 2), acc + 1}
end
after_fun = fn _ -> Process.put(:stream_resource, true) end
stream = Stream.resource(fn -> 1 end, transform_fun, after_fun)
list = Enum.to_list(stream)
Process.put(:stream_resource, false)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
assert Process.get(:stream_resource)
end
test "transform/4" do
transform_fun = fn x, acc -> {[x, x + acc], x} end
after_fun = fn 10 -> Process.put(:stream_transform, true) end
stream = Stream.transform(1..10, fn -> 0 end, transform_fun, after_fun)
Process.put(:stream_transform, false)
assert Enum.to_list(stream) ==
[1, 1, 2, 3, 3, 5, 4, 7, 5, 9, 6, 11, 7, 13, 8, 15, 9, 17, 10, 19]
assert Process.get(:stream_transform)
end
test "transform/4 with early halt" do
after_fun = fn nil -> Process.put(:stream_transform, true) end
stream =
fn -> throw(:error) end
|> Stream.repeatedly()
|> Stream.transform(fn -> nil end, &{[&1, &2], &1}, after_fun)
Process.put(:stream_transform, false)
assert {:halted, nil} = Enumerable.reduce(stream, {:halt, nil}, fn _, _ -> throw(:error) end)
assert Process.get(:stream_transform)
end
test "transform/4 with early suspend" do
after_fun = fn nil -> Process.put(:stream_transform, true) end
stream =
fn -> throw(:error) end
|> Stream.repeatedly()
|> Stream.transform(fn -> nil end, &{[&1, &2], &1}, after_fun)
refute Process.get(:stream_transform)
assert {:suspended, nil, _} =
Enumerable.reduce(stream, {:suspend, nil}, fn _, _ -> throw(:error) end)
end
test "transform/4 closes on outer errors" do
transform_fun = fn
3, _ -> throw(:error)
x, acc -> {[x + acc], x}
end
after_fun = fn 2 -> Process.put(:stream_transform, true) end
stream = Stream.transform(1..10, fn -> 0 end, transform_fun, after_fun)
Process.put(:stream_transform, false)
assert catch_throw(Enum.to_list(stream)) == :error
assert Process.get(:stream_transform)
end
test "transform/4 closes on nested errors" do
transform_fun = fn
3, _ -> throw(:error)
x, acc -> {[x + acc], x}
end
after_fun = fn _ -> Process.put(:stream_transform_inner, true) end
outer_after_fun = fn 0 -> Process.put(:stream_transform_outer, true) end
stream =
1..10
|> Stream.transform(fn -> 0 end, transform_fun, after_fun)
|> Stream.transform(fn -> 0 end, fn x, acc -> {[x], acc} end, outer_after_fun)
Process.put(:stream_transform_inner, false)
Process.put(:stream_transform_outer, false)
assert catch_throw(Enum.to_list(stream)) == :error
assert Process.get(:stream_transform_inner)
assert Process.get(:stream_transform_outer)
end
test "transform/4 is zippable" do
transform_fun = fn
10, acc -> {:halt, acc}
x, acc -> {[x + acc], x}
end
after_fun = fn 9 -> Process.put(:stream_transform, true) end
stream = Stream.transform(1..20, fn -> 0 end, transform_fun, after_fun)
list = Enum.to_list(stream)
Process.put(:stream_transform, false)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
assert Process.get(:stream_transform)
end
test "transform/4 halts with inner list" do
transform_fun = fn x, acc -> {[x, x + 1, x + 2], acc} end
after_fun = fn :acc -> Process.put(:stream_transform, true) end
stream = Stream.transform(1..10, fn -> :acc end, transform_fun, after_fun)
Process.put(:stream_transform, false)
assert Enum.take(stream, 5) == [1, 2, 3, 2, 3]
assert Process.get(:stream_transform)
end
test "transform/4 closes on errors with inner list" do
transform_fun = fn x, acc -> {[x, x + 1, x + 2], acc} end
after_fun = fn :acc -> Process.put(:stream_transform, true) end
stream = Stream.transform(1..10, fn -> :acc end, transform_fun, after_fun)
Process.put(:stream_transform, false)
stream = Stream.map(stream, fn x -> if x > 2, do: throw(:error), else: x end)
assert catch_throw(Enum.to_list(stream)) == :error
assert Process.get(:stream_transform)
end
test "transform/4 is zippable with inner list" do
transform_fun = fn
10, acc -> {:halt, acc}
x, acc -> {[x, x + 1, x + 2], acc}
end
after_fun = fn :inner -> Process.put(:stream_transform, true) end
stream = Stream.transform(1..20, fn -> :inner end, transform_fun, after_fun)
list = Enum.to_list(stream)
Process.put(:stream_transform, false)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
assert Process.get(:stream_transform)
end
test "transform/4 halts with inner enum" do
transform_fun = fn x, acc -> {x..(x + 2), acc} end
after_fun = fn :acc -> Process.put(:stream_transform, true) end
stream = Stream.transform(1..10, fn -> :acc end, transform_fun, after_fun)
Process.put(:stream_transform, false)
assert Enum.take(stream, 5) == [1, 2, 3, 2, 3]
assert Process.get(:stream_transform)
end
test "transform/4 closes on errors with inner enum" do
transform_fun = fn x, acc -> {x..(x + 2), acc} end
after_fun = fn :acc -> Process.put(:stream_transform, true) end
stream = Stream.transform(1..10, fn -> :acc end, transform_fun, after_fun)
Process.put(:stream_transform, false)
stream = Stream.map(stream, fn x -> if x > 2, do: throw(:error), else: x end)
assert catch_throw(Enum.to_list(stream)) == :error
assert Process.get(:stream_transform)
end
test "transform/4 is zippable with inner enum" do
transform_fun = fn
10, acc -> {:halt, acc}
x, acc -> {x..(x + 2), acc}
end
after_fun = fn :inner -> Process.put(:stream_transform, true) end
stream = Stream.transform(1..20, fn -> :inner end, transform_fun, after_fun)
list = Enum.to_list(stream)
Process.put(:stream_transform, false)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
assert Process.get(:stream_transform)
end
test "scan/2" do
stream = Stream.scan(1..5, &(&1 + &2))
assert lazy?(stream)
assert Enum.to_list(stream) == [1, 3, 6, 10, 15]
assert Stream.scan([], &(&1 + &2)) |> Enum.to_list() == []
end
test "scan/3" do
stream = Stream.scan(1..5, 0, &(&1 + &2))
assert lazy?(stream)
assert Enum.to_list(stream) == [1, 3, 6, 10, 15]
assert Stream.scan([], 0, &(&1 + &2)) |> Enum.to_list() == []
end
test "take/2" do
stream = Stream.take(1..1000, 5)
assert lazy?(stream)
assert Enum.to_list(stream) == [1, 2, 3, 4, 5]
assert Enum.to_list(Stream.take(1..1000, 0)) == []
assert Enum.to_list(Stream.take([], 5)) == []
assert Enum.to_list(Stream.take(1..3, 5)) == [1, 2, 3]
nats = Stream.iterate(1, &(&1 + 1))
assert Enum.to_list(Stream.take(nats, 5)) == [1, 2, 3, 4, 5]
stream = Stream.drop(1..100, 5)
assert Stream.take(stream, 5) |> Enum.to_list() == [6, 7, 8, 9, 10]
stream = 1..5 |> Stream.take(10) |> Stream.drop(15)
assert {[], []} = Enum.split(stream, 5)
stream = 1..20 |> Stream.take(10 + 5) |> Stream.drop(4)
assert Enum.to_list(stream) == [5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
end
test "take/2 does not consume next element on halt" do
assert [false, true]
|> Stream.each(&(&1 && raise("oops")))
|> Stream.take(1)
|> Stream.take_while(& &1)
|> Enum.to_list() == []
end
test "take/2 does not consume next element on suspend" do
assert [false, true]
|> Stream.each(&(&1 && raise("oops")))
|> Stream.take(1)
|> Stream.flat_map(&[&1])
|> Enum.to_list() == [false]
end
test "take/2 with negative count" do
Process.put(:stream_each, [])
stream = Stream.take(1..100, -5)
assert lazy?(stream)
stream = Stream.each(stream, &Process.put(:stream_each, [&1 | Process.get(:stream_each)]))
assert Enum.to_list(stream) == [96, 97, 98, 99, 100]
assert Process.get(:stream_each) == [100, 99, 98, 97, 96]
end
test "take/2 is zippable" do
stream = Stream.take(1..1000, 5)
list = Enum.to_list(stream)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
end
test "take_every/2" do
assert 1..10
|> Stream.take_every(2)
|> Enum.to_list() == [1, 3, 5, 7, 9]
assert 1..10
|> Stream.take_every(3)
|> Enum.to_list() == [1, 4, 7, 10]
assert 1..10
|> Stream.drop(2)
|> Stream.take_every(2)
|> Stream.drop(1)
|> Enum.to_list() == [5, 7, 9]
assert 1..10
|> Stream.take_every(0)
|> Enum.to_list() == []
assert []
|> Stream.take_every(10)
|> Enum.to_list() == []
end
test "take_every/2 without non-negative integer" do
assert_raise FunctionClauseError, fn ->
Stream.take_every(1..10, -1)
end
assert_raise FunctionClauseError, fn ->
Stream.take_every(1..10, 3.33)
end
end
test "take_while/2" do
stream = Stream.take_while(1..1000, &(&1 <= 5))
assert lazy?(stream)
assert Enum.to_list(stream) == [1, 2, 3, 4, 5]
assert Enum.to_list(Stream.take_while(1..1000, &(&1 <= 0))) == []
assert Enum.to_list(Stream.take_while(1..3, &(&1 <= 5))) == [1, 2, 3]
nats = Stream.iterate(1, &(&1 + 1))
assert Enum.to_list(Stream.take_while(nats, &(&1 <= 5))) == [1, 2, 3, 4, 5]
stream = Stream.drop(1..100, 5)
assert Stream.take_while(stream, &(&1 < 11)) |> Enum.to_list() == [6, 7, 8, 9, 10]
end
test "timer/1" do
stream = Stream.timer(10)
now = :os.timestamp()
assert Enum.to_list(stream) == [0]
# We check for >= 5000 (us) instead of >= 10000 (us)
# because the resolution on Windows system is not high
# enough and we would get a difference of 9000 from
# time to time. So a value halfway is good enough.
assert :timer.now_diff(:os.timestamp(), now) >= 5000
end
test "unfold/2" do
stream = Stream.unfold(10, fn x -> if x > 0, do: {x, x - 1} end)
assert Enum.take(stream, 5) == [10, 9, 8, 7, 6]
stream = Stream.unfold(5, fn x -> if x > 0, do: {x, x - 1} end)
assert Enum.to_list(stream) == [5, 4, 3, 2, 1]
end
test "unfold/2 only calculates values if needed" do
stream = Stream.unfold(1, fn x -> if x > 0, do: {x, x - 1}, else: throw(:boom) end)
assert Enum.take(stream, 1) == [1]
stream = Stream.unfold(5, fn x -> if x > 0, do: {x, x - 1} end)
assert Enum.to_list(Stream.take(stream, 2)) == [5, 4]
end
test "unfold/2 is zippable" do
stream = Stream.unfold(10, fn x -> if x > 0, do: {x, x - 1} end)
list = Enum.to_list(stream)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
end
test "uniq/1 & uniq/2" do
assert Stream.uniq([1, 2, 3, 2, 1]) |> Enum.to_list() == [1, 2, 3]
end
test "uniq_by/2" do
assert Stream.uniq_by([{1, :x}, {2, :y}, {1, :z}], fn {x, _} -> x end) |> Enum.to_list() ==
[{1, :x}, {2, :y}]
assert Stream.uniq_by([a: {:tea, 2}, b: {:tea, 2}, c: {:coffee, 1}], fn {_, y} -> y end)
|> Enum.to_list() == [a: {:tea, 2}, c: {:coffee, 1}]
end
test "zip/2" do
concat = Stream.concat(1..3, 4..6)
cycle = Stream.cycle([:a, :b, :c])
assert Stream.zip(concat, cycle) |> Enum.to_list() ==
[{1, :a}, {2, :b}, {3, :c}, {4, :a}, {5, :b}, {6, :c}]
end
test "zip/1" do
concat = Stream.concat(1..3, 4..6)
cycle = Stream.cycle([:a, :b, :c])
assert Stream.zip([concat, cycle]) |> Enum.to_list() ==
[{1, :a}, {2, :b}, {3, :c}, {4, :a}, {5, :b}, {6, :c}]
assert Stream.chunk_every([0, 1, 2, 3], 2) |> Stream.zip() |> Enum.to_list() ==
[{0, 2}, {1, 3}]
stream = %HaltAcc{acc: 1..3}
assert Stream.zip([1..3, stream]) |> Enum.to_list() == [{1, 1}, {2, 2}, {3, 3}]
range_cycle = Stream.cycle(1..2)
assert Stream.zip([1..3, range_cycle]) |> Enum.to_list() == [{1, 1}, {2, 2}, {3, 1}]
end
test "zip/1 does not leave streams suspended" do
stream =
Stream.resource(fn -> 1 end, fn acc -> {[acc], acc + 1} end, fn _ ->
Process.put(:stream_zip, true)
end)
Process.put(:stream_zip, false)
assert Stream.zip([[:a, :b, :c], stream]) |> Enum.to_list() == [a: 1, b: 2, c: 3]
assert Process.get(:stream_zip)
Process.put(:stream_zip, false)
assert Stream.zip([stream, [:a, :b, :c]]) |> Enum.to_list() == [{1, :a}, {2, :b}, {3, :c}]
assert Process.get(:stream_zip)
end
test "zip/1 does not leave streams suspended on halt" do
stream =
Stream.resource(fn -> 1 end, fn acc -> {[acc], acc + 1} end, fn _ ->
Process.put(:stream_zip, :done)
end)
assert Stream.zip([[:a, :b, :c, :d, :e], stream]) |> Enum.take(3) == [a: 1, b: 2, c: 3]
assert Process.get(:stream_zip) == :done
end
test "zip/1 closes on inner error" do
stream = Stream.into([1, 2, 3], %Pdict{})
stream = Stream.zip([stream, Stream.map([:a, :b, :c], fn _ -> throw(:error) end)])
Process.put(:stream_done, false)
assert catch_throw(Enum.to_list(stream)) == :error
assert Process.get(:stream_done)
end
test "zip/1 closes on outer error" do
stream =
Stream.zip([Stream.into([1, 2, 3], %Pdict{}), [:a, :b, :c]])
|> Stream.map(fn _ -> throw(:error) end)
Process.put(:stream_done, false)
assert catch_throw(Enum.to_list(stream)) == :error
assert Process.get(:stream_done)
end
test "with_index/2" do
stream = Stream.with_index([1, 2, 3])
assert lazy?(stream)
assert Enum.to_list(stream) == [{1, 0}, {2, 1}, {3, 2}]
stream = Stream.with_index([1, 2, 3], 10)
assert Enum.to_list(stream) == [{1, 10}, {2, 11}, {3, 12}]
nats = Stream.iterate(1, &(&1 + 1))
assert Stream.with_index(nats) |> Enum.take(3) == [{1, 0}, {2, 1}, {3, 2}]
end
test "intersperse/2 is lazy" do
assert lazy?(Stream.intersperse([], 0))
end
test "intersperse/2 on an empty list" do
assert Enum.to_list(Stream.intersperse([], 0)) == []
end
test "intersperse/2 on a single element list" do
assert Enum.to_list(Stream.intersperse([1], 0)) == [1]
end
test "intersperse/2 on a multiple elements list" do
assert Enum.to_list(Stream.intersperse(1..3, 0)) == [1, 0, 2, 0, 3]
end
test "intersperse/2 is zippable" do
stream = Stream.intersperse(1..10, 0)
list = Enum.to_list(stream)
assert Enum.zip(list, list) == Enum.zip(stream, stream)
end
defp lazy?(stream) do
match?(%Stream{}, stream) or is_function(stream, 2)
end
defp inbox_stream({:suspend, acc}, f) do
{:suspended, acc, &inbox_stream(&1, f)}
end
defp inbox_stream({:halt, acc}, _f) do
{:halted, acc}
end
defp inbox_stream({:cont, acc}, f) do
receive do
{:stream, element} ->
inbox_stream(f.(element, acc), f)
end
end
end
| 31.504918 | 97 | 0.56611 |
08126fc274545b8fdef43c41b7b57ffbb449e512 | 481 | ex | Elixir | lib/charlex/application.ex | uduDudu/Charlex | 67da95ed547a5b558373477ed08cacdedb446bfe | [
"MIT"
] | 2 | 2021-08-12T01:15:52.000Z | 2021-10-11T18:28:14.000Z | lib/charlex/application.ex | uduDudu/Charlex | 67da95ed547a5b558373477ed08cacdedb446bfe | [
"MIT"
] | null | null | null | lib/charlex/application.ex | uduDudu/Charlex | 67da95ed547a5b558373477ed08cacdedb446bfe | [
"MIT"
] | null | null | null | defmodule Charlex.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
children = [
Charlex.Consumer,
Charlex.Server
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Charlex.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 24.05 | 61 | 0.711019 |
0812a23e739f576922ea5f2bc60348e04814cdcd | 1,768 | ex | Elixir | clients/tool_results/lib/google_api/tool_results/v1/model/upgrade_insight.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/tool_results/lib/google_api/tool_results/v1/model/upgrade_insight.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/tool_results/lib/google_api/tool_results/v1/model/upgrade_insight.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ToolResults.V1.Model.UpgradeInsight do
@moduledoc """
This insight is a recommendation to upgrade a given library to the specified
version, in order to avoid dependencies on non-SDK APIs.
## Attributes
* `packageName` (*type:* `String.t`, *default:* `nil`) - The name of the package to be upgraded.
* `upgradeToVersion` (*type:* `String.t`, *default:* `nil`) - The suggested version to upgrade to.
Optional: In case we are not sure which version solves this problem
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:packageName => String.t(),
:upgradeToVersion => String.t()
}
field(:packageName)
field(:upgradeToVersion)
end
defimpl Poison.Decoder, for: GoogleApi.ToolResults.V1.Model.UpgradeInsight do
def decode(value, options) do
GoogleApi.ToolResults.V1.Model.UpgradeInsight.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ToolResults.V1.Model.UpgradeInsight do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34 | 102 | 0.7319 |
0812c39f2a562cabfc66a7fecebf7711c1dfe487 | 1,294 | ex | Elixir | clients/vm_migration/lib/google_api/vm_migration/v1/model/cancel_clone_job_response.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/vm_migration/lib/google_api/vm_migration/v1/model/cancel_clone_job_response.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/vm_migration/lib/google_api/vm_migration/v1/model/cancel_clone_job_response.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.VMMigration.V1.Model.CancelCloneJobResponse do
@moduledoc """
Response message for 'CancelCloneJob' request.
## Attributes
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{}
end
defimpl Poison.Decoder, for: GoogleApi.VMMigration.V1.Model.CancelCloneJobResponse do
def decode(value, options) do
GoogleApi.VMMigration.V1.Model.CancelCloneJobResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.VMMigration.V1.Model.CancelCloneJobResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.809524 | 85 | 0.767388 |
0812d0dc43a00a6e353f3264ce9700b37e7d33b1 | 3,683 | ex | Elixir | clients/chat/lib/google_api/chat/v1/model/deprecated_event.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | clients/chat/lib/google_api/chat/v1/model/deprecated_event.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | clients/chat/lib/google_api/chat/v1/model/deprecated_event.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Chat.V1.Model.DeprecatedEvent do
@moduledoc """
Hangouts Chat events.
## Attributes
* `action` (*type:* `GoogleApi.Chat.V1.Model.FormAction.t`, *default:* `nil`) - The form action data associated with an interactive card that was clicked. Only populated for CARD_CLICKED events. See the [Interactive Cards guide](/hangouts/chat/how-tos/cards-onclick) for more information.
* `configCompleteRedirectUrl` (*type:* `String.t`, *default:* `nil`) - The URL the bot should redirect the user to after they have completed an authorization or configuration flow outside of Hangouts Chat. See the [Authorizing access to 3p services guide](/hangouts/chat/how-tos/auth-3p) for more information.
* `eventTime` (*type:* `DateTime.t`, *default:* `nil`) - The timestamp indicating when the event was dispatched.
* `message` (*type:* `GoogleApi.Chat.V1.Model.Message.t`, *default:* `nil`) - The message that triggered the event, if applicable.
* `space` (*type:* `GoogleApi.Chat.V1.Model.Space.t`, *default:* `nil`) - The room or DM in which the event occurred.
* `threadKey` (*type:* `String.t`, *default:* `nil`) - The bot-defined key for the thread related to the event. See the thread_key field of the `spaces.message.create` request for more information.
* `token` (*type:* `String.t`, *default:* `nil`) - A secret value that bots can use to verify if a request is from Google. The token is randomly generated by Google, remains static, and can be obtained from the Hangouts Chat API configuration page in the Cloud Console. Developers can revoke/regenerate it if needed from the same page.
* `type` (*type:* `String.t`, *default:* `nil`) - The type of the event.
* `user` (*type:* `GoogleApi.Chat.V1.Model.User.t`, *default:* `nil`) - The user that triggered the event.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:action => GoogleApi.Chat.V1.Model.FormAction.t(),
:configCompleteRedirectUrl => String.t(),
:eventTime => DateTime.t(),
:message => GoogleApi.Chat.V1.Model.Message.t(),
:space => GoogleApi.Chat.V1.Model.Space.t(),
:threadKey => String.t(),
:token => String.t(),
:type => String.t(),
:user => GoogleApi.Chat.V1.Model.User.t()
}
field(:action, as: GoogleApi.Chat.V1.Model.FormAction)
field(:configCompleteRedirectUrl)
field(:eventTime, as: DateTime)
field(:message, as: GoogleApi.Chat.V1.Model.Message)
field(:space, as: GoogleApi.Chat.V1.Model.Space)
field(:threadKey)
field(:token)
field(:type)
field(:user, as: GoogleApi.Chat.V1.Model.User)
end
defimpl Poison.Decoder, for: GoogleApi.Chat.V1.Model.DeprecatedEvent do
def decode(value, options) do
GoogleApi.Chat.V1.Model.DeprecatedEvent.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Chat.V1.Model.DeprecatedEvent do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 51.873239 | 339 | 0.707575 |
08131499f9b8b08ae01e54a1ebd597a4b7393430 | 285 | ex | Elixir | lib/protobuf/wire_types.ex | gialib/protobuf-elixir | 25598a2ae13eee56550467901d789dc9b21aab17 | [
"MIT"
] | 2 | 2019-06-01T05:52:52.000Z | 2019-06-01T06:03:10.000Z | lib/protobuf/wire_types.ex | gialib/protobuf-elixir | 25598a2ae13eee56550467901d789dc9b21aab17 | [
"MIT"
] | 3 | 2019-09-19T14:52:12.000Z | 2019-09-24T15:45:07.000Z | lib/protobuf/wire_types.ex | gialib/protobuf-elixir | 25598a2ae13eee56550467901d789dc9b21aab17 | [
"MIT"
] | 1 | 2018-12-03T07:04:27.000Z | 2018-12-03T07:04:27.000Z | defmodule Protobuf.WireTypes do
@moduledoc """
Protocol buffer wire types.
"""
defmacro wire_varint, do: 0
defmacro wire_64bits, do: 1
defmacro wire_delimited, do: 2
# defmacro wire_start_group, do: 3
# defmacro wire_end_group, do: 4
defmacro wire_32bits, do: 5
end
| 21.923077 | 36 | 0.722807 |
081329881ebdbe00f1c41112463ad0cd07ae63c8 | 2,152 | ex | Elixir | clients/content/lib/google_api/content/v2/model/orders_custom_batch_request_entry_set_line_item_metadata.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/content/lib/google_api/content/v2/model/orders_custom_batch_request_entry_set_line_item_metadata.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/orders_custom_batch_request_entry_set_line_item_metadata.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Content.V2.Model.OrdersCustomBatchRequestEntrySetLineItemMetadata do
@moduledoc """
## Attributes
* `annotations` (*type:* `list(GoogleApi.Content.V2.Model.OrderMerchantProvidedAnnotation.t)`, *default:* `nil`) -
* `lineItemId` (*type:* `String.t`, *default:* `nil`) - The ID of the line item to set metadata. Either lineItemId or productId is required.
* `productId` (*type:* `String.t`, *default:* `nil`) - The ID of the product to set metadata. This is the REST ID used in the products service. Either lineItemId or productId is required.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:annotations => list(GoogleApi.Content.V2.Model.OrderMerchantProvidedAnnotation.t()),
:lineItemId => String.t(),
:productId => String.t()
}
field(:annotations, as: GoogleApi.Content.V2.Model.OrderMerchantProvidedAnnotation, type: :list)
field(:lineItemId)
field(:productId)
end
defimpl Poison.Decoder,
for: GoogleApi.Content.V2.Model.OrdersCustomBatchRequestEntrySetLineItemMetadata do
def decode(value, options) do
GoogleApi.Content.V2.Model.OrdersCustomBatchRequestEntrySetLineItemMetadata.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Content.V2.Model.OrdersCustomBatchRequestEntrySetLineItemMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.103448 | 191 | 0.738848 |
081336e79e29246405a3904ffa62d96389631c1d | 64 | ex | Elixir | web/views/admin_view.ex | thluiz/yehudimtv | 71aba0ee537b4bba28474fb20d3209fc261a03d7 | [
"MIT"
] | null | null | null | web/views/admin_view.ex | thluiz/yehudimtv | 71aba0ee537b4bba28474fb20d3209fc261a03d7 | [
"MIT"
] | null | null | null | web/views/admin_view.ex | thluiz/yehudimtv | 71aba0ee537b4bba28474fb20d3209fc261a03d7 | [
"MIT"
] | null | null | null | defmodule Yehudimtv.AdminView do
use Yehudimtv.Web, :view
end
| 16 | 32 | 0.796875 |
08134d31825906c1b578b96df412019602596849 | 294 | exs | Elixir | priv/repo/migrations/20171029081608_create_tag.exs | OSM-Browser/points-api | 7022d4e61318fc8da507809302a57bf38223207a | [
"MIT"
] | 2 | 2018-01-18T20:38:08.000Z | 2020-01-19T17:44:20.000Z | priv/repo/migrations/20171029081608_create_tag.exs | OSM-Browser/points-api | 7022d4e61318fc8da507809302a57bf38223207a | [
"MIT"
] | 85 | 2018-02-26T05:30:01.000Z | 2021-07-26T06:22:02.000Z | priv/repo/migrations/20171029081608_create_tag.exs | OSM-Browser/points-api | 7022d4e61318fc8da507809302a57bf38223207a | [
"MIT"
] | 1 | 2018-10-07T06:57:34.000Z | 2018-10-07T06:57:34.000Z | defmodule OsmPoints.Repo.Migrations.CreateTag do
use Ecto.Migration
def change do
create table(:tags, primary_key: false) do
add :point_id, references(:points), primary_key: true
add :key, :string, primary_key: true
add :value, :string, null: false
end
end
end
| 24.5 | 59 | 0.693878 |
0813516c5515d0c17f6534f1ccf16bb4cbfb6e44 | 2,978 | exs | Elixir | test/xdr/transactions/operations/set_trust_line_flags_test.exs | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 3 | 2021-08-17T20:32:45.000Z | 2022-03-13T20:26:02.000Z | test/xdr/transactions/operations/set_trust_line_flags_test.exs | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 45 | 2021-08-12T20:19:41.000Z | 2022-03-27T21:00:10.000Z | test/xdr/transactions/operations/set_trust_line_flags_test.exs | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 2 | 2021-09-22T23:11:13.000Z | 2022-01-23T03:19:11.000Z | defmodule StellarBase.XDR.Operations.SetTrustLineFlagsTest do
use ExUnit.Case
alias StellarBase.XDR.{
AccountID,
AlphaNum4,
Asset,
AssetCode4,
AssetType,
PublicKey,
PublicKeyType,
UInt32,
UInt256
}
alias StellarBase.XDR.Operations.SetTrustLineFlags
alias StellarBase.StrKey
describe "SetTrustLineFlags Operation" do
setup do
pk_type = PublicKeyType.new(:PUBLIC_KEY_TYPE_ED25519)
trustor =
"GCNY5OXYSY4FKHOPT2SPOQZAOEIGXB5LBYW3HVU3OWSTQITS65M5RCNY"
|> StrKey.decode!(:ed25519_public_key)
|> UInt256.new()
|> PublicKey.new(pk_type)
|> AccountID.new()
issuer =
"GBZNLMUQMIN3VGUJISKZU7GNY3O3XLMYEHJCKCSMDHKLGSMKALRXOEZD"
|> StrKey.decode!(:ed25519_public_key)
|> UInt256.new()
|> PublicKey.new(pk_type)
|> AccountID.new()
asset =
"BTCN"
|> AssetCode4.new()
|> AlphaNum4.new(issuer)
|> Asset.new(AssetType.new(:ASSET_TYPE_CREDIT_ALPHANUM4))
clear_flags = UInt32.new(0)
set_flags = UInt32.new(2)
%{
trustor: trustor,
asset: asset,
clear_flags: clear_flags,
set_flags: set_flags,
set_trust_line_flags: SetTrustLineFlags.new(trustor, asset, clear_flags, set_flags),
binary:
<<0, 0, 0, 0, 155, 142, 186, 248, 150, 56, 85, 29, 207, 158, 164, 247, 67, 32, 113, 16,
107, 135, 171, 14, 45, 179, 214, 155, 117, 165, 56, 34, 114, 247, 89, 216, 0, 0, 0, 1,
66, 84, 67, 78, 0, 0, 0, 0, 114, 213, 178, 144, 98, 27, 186, 154, 137, 68, 149, 154,
124, 205, 198, 221, 187, 173, 152, 33, 210, 37, 10, 76, 25, 212, 179, 73, 138, 2, 227,
119, 0, 0, 0, 0, 0, 0, 0, 2>>
}
end
test "new/1", %{
trustor: trustor,
asset: asset,
clear_flags: clear_flags,
set_flags: set_flags
} do
%SetTrustLineFlags{
trustor: ^trustor,
asset: ^asset,
clear_flags: ^clear_flags,
set_flags: ^set_flags
} = SetTrustLineFlags.new(trustor, asset, clear_flags, set_flags)
end
test "encode_xdr/1", %{set_trust_line_flags: set_trust_line_flags, binary: binary} do
{:ok, ^binary} = SetTrustLineFlags.encode_xdr(set_trust_line_flags)
end
test "encode_xdr!/1", %{set_trust_line_flags: set_trust_line_flags, binary: binary} do
^binary = SetTrustLineFlags.encode_xdr!(set_trust_line_flags)
end
test "decode_xdr/2", %{set_trust_line_flags: set_trust_line_flags, binary: binary} do
{:ok, {^set_trust_line_flags, ""}} = SetTrustLineFlags.decode_xdr(binary)
end
test "decode_xdr/2 with an invalid binary" do
{:error, :not_binary} = SetTrustLineFlags.decode_xdr(123)
end
test "decode_xdr!/2", %{set_trust_line_flags: set_trust_line_flags, binary: binary} do
{^set_trust_line_flags, ^binary} = SetTrustLineFlags.decode_xdr!(binary <> binary)
end
end
end
| 30.387755 | 98 | 0.629617 |
0813a1591af12d33d0d0023f68d2b52eeee81be5 | 1,040 | exs | Elixir | day6/test/day6/part1_test.exs | ryanbillingsley/advent_of_code | 9219a4b763966b70caed85865bb073a26bb3a705 | [
"MIT"
] | null | null | null | day6/test/day6/part1_test.exs | ryanbillingsley/advent_of_code | 9219a4b763966b70caed85865bb073a26bb3a705 | [
"MIT"
] | null | null | null | day6/test/day6/part1_test.exs | ryanbillingsley/advent_of_code | 9219a4b763966b70caed85865bb073a26bb3a705 | [
"MIT"
] | null | null | null | defmodule Day6.Part1Test do
use ExUnit.Case
doctest Day6.Part1
test "Calculates ranges" do
# 1-2-3-4-5
# 1_________
# 2_________
# 3__*_* * *
# 4__*_* * *
# 5__*_* * *
expect = [
{2,3}, {3,3}, {4,3}, {5,3},
{2,4}, {3,4}, {4,4}, {5,4},
{2,5}, {3,5}, {4,5}, {5,5}
]
result = Day6.Part1.calculate_range({2,3},{5,5})
assert expect == result
end
test "Parse range string" do
expect = {5,2}
input = "5,2"
result = Day6.Part1.convert_range_to_tuple input
assert expect == result
end
test "Parses instruction from line" do
input = "toggle 461,550 through 564,900"
expect = [:toggle, {461,550}, {564,900}]
result = Day6.Part1.parse_instruction(input)
assert expect == result
end
test "Runs instructions" do
input = [
[:on, {2,3}, {2,4}],
[:off, {2,2}, {2,3}],
[:toggle, {2,3}, {2,5}]
]
expect = Enum.into [{2,3}], HashSet.new
result = Day6.Part1.run_instructions(input, HashSet.new)
end
end
| 19.259259 | 60 | 0.546154 |
0813bdf4dfb0e0e69c4ce7161f88e44aefc4c4ba | 922 | ex | Elixir | lib/my_sensors/broadcast.ex | ConnorRigby/my_sensors | 25296a7e5629b58462576f7c8042bb15c1a0f7e7 | [
"MIT"
] | 6 | 2018-02-21T21:13:24.000Z | 2020-07-06T12:18:24.000Z | lib/my_sensors/broadcast.ex | ConnorRigby/my_sensors | 25296a7e5629b58462576f7c8042bb15c1a0f7e7 | [
"MIT"
] | 2 | 2018-02-09T22:04:46.000Z | 2018-10-09T16:34:00.000Z | lib/my_sensors/broadcast.ex | ConnorRigby/my_sensors | 25296a7e5629b58462576f7c8042bb15c1a0f7e7 | [
"MIT"
] | 1 | 2018-02-09T20:29:09.000Z | 2018-02-09T20:29:09.000Z | defmodule MySensors.Broadcast do
@moduledoc """
Elixir Broadcast mechanism for MySensors data.
Will receive messages in the shape of:
`{:my_sensors, {type, data}}`
where `type` will be:
* `insert_or_update`
* `delete`
and `data` will be a `Node` struct.
"""
@name MySensorsRegistry
@doc false
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :worker,
restart: :permanent,
shutdown: 500
}
end
@doc false
def start_link(_args) do
Registry.start_link(keys: :duplicate, name: @name)
end
@doc """
Subscribe to events about MySensors Data.
"""
def subscribe() do
{:ok, _} = Registry.register(@name, __MODULE__, [])
:ok
end
def dispatch(msg) do
Registry.dispatch(@name, __MODULE__, fn entries ->
for {pid, _} <- entries, do: send(pid, {:my_sensors, msg})
end)
end
end
| 20.954545 | 64 | 0.625813 |
0813cc69c18a41b2ebc63946b1572317536cae59 | 3,028 | exs | Elixir | exercises/parallel-letter-frequency/parallel_letter_frequency_test.exs | jerith/elixir | 9a3f2a2fbee26a7b6a6b3ad74a9e6d1ff2495ed4 | [
"Apache-2.0"
] | null | null | null | exercises/parallel-letter-frequency/parallel_letter_frequency_test.exs | jerith/elixir | 9a3f2a2fbee26a7b6a6b3ad74a9e6d1ff2495ed4 | [
"Apache-2.0"
] | null | null | null | exercises/parallel-letter-frequency/parallel_letter_frequency_test.exs | jerith/elixir | 9a3f2a2fbee26a7b6a6b3ad74a9e6d1ff2495ed4 | [
"Apache-2.0"
] | 1 | 2018-07-19T23:43:56.000Z | 2018-07-19T23:43:56.000Z | if !System.get_env("EXERCISM_TEST_EXAMPLES") do
Code.load_file("frequency.exs", __DIR__)
end
ExUnit.start()
ExUnit.configure(exclude: :pending, trace: true)
# Your code should contain a frequency(texts, workers) function which accepts a
# list of texts and the number of workers to use in parallel.
defmodule FrequencyTest do
use ExUnit.Case
# Poem by Friedrich Schiller. The corresponding music is the European Anthem.
@ode_an_die_freude """
Freude schöner Götterfunken
Tochter aus Elysium,
Wir betreten feuertrunken,
Himmlische, dein Heiligtum!
Deine Zauber binden wieder
Was die Mode streng geteilt;
Alle Menschen werden Brüder,
Wo dein sanfter Flügel weilt.
"""
# Dutch national anthem
@wilhelmus """
Wilhelmus van Nassouwe
ben ik, van Duitsen bloed,
den vaderland getrouwe
blijf ik tot in den dood.
Een Prinse van Oranje
ben ik, vrij, onverveerd,
den Koning van Hispanje
heb ik altijd geëerd.
"""
# American national anthem
@star_spangled_banner """
O say can you see by the dawn's early light,
What so proudly we hailed at the twilight's last gleaming,
Whose broad stripes and bright stars through the perilous fight,
O'er the ramparts we watched, were so gallantly streaming?
And the rockets' red glare, the bombs bursting in air,
Gave proof through the night that our flag was still there;
O say does that star-spangled banner yet wave,
O'er the land of the free and the home of the brave?
"""
# Returns the frequencies in a sorted list. This means it doesn't matter if
# your frequency() function returns a list of pairs or a map, the
# testing code will handle it.
defp freq(texts, workers \\ 4) do
Frequency.frequency(texts, workers) |> Enum.sort() |> Enum.into(%{})
end
# @tag :pending
test "no texts mean no letters" do
assert freq([]) == %{}
end
@tag :pending
test "one letter" do
assert freq(["a"]) == %{"a" => 1}
end
@tag :pending
test "case insensitivity" do
assert freq(["aA"]) == %{"a" => 2}
end
@tag :pending
test "many empty texts still mean no letters" do
assert freq(List.duplicate(" ", 10000)) == %{}
end
@tag :pending
test "many times the same text gives a predictable result" do
assert freq(List.duplicate("abc", 1000)) == %{"a" => 1000, "b" => 1000, "c" => 1000}
end
@tag :pending
test "punctuation doesn't count" do
assert freq([@ode_an_die_freude])[","] == nil
end
@tag :pending
test "numbers don't count" do
assert freq(["Testing, 1, 2, 3"])["1"] == nil
end
@tag :pending
test "all three anthems, together, 1 worker" do
freqs = freq([@ode_an_die_freude, @wilhelmus, @star_spangled_banner], 1)
assert freqs["a"] == 49
assert freqs["t"] == 56
assert freqs["ü"] == 2
end
@tag :pending
test "all three anthems, together, 4 workers" do
freqs = freq([@ode_an_die_freude, @wilhelmus, @star_spangled_banner], 4)
assert freqs["a"] == 49
assert freqs["t"] == 56
assert freqs["ü"] == 2
end
end
| 28.037037 | 88 | 0.679657 |
0813d02ecf6232620284b03e4185b8b5b8ea4c34 | 2,954 | ex | Elixir | clients/private_ca/lib/google_api/private_ca/v1/model/audit_config.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/private_ca/lib/google_api/private_ca/v1/model/audit_config.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/private_ca/lib/google_api/private_ca/v1/model/audit_config.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.PrivateCA.V1.Model.AuditConfig do
@moduledoc """
Specifies the audit configuration for a service. The configuration determines which permission types are logged, and what identities, if any, are exempted from logging. An AuditConfig must have one or more AuditLogConfigs. If there are AuditConfigs for both `allServices` and a specific service, the union of the two AuditConfigs is used for that service: the log_types specified in each AuditConfig are enabled, and the exempted_members in each AuditLogConfig are exempted. Example Policy with multiple AuditConfigs: { "audit_configs": [ { "service": "allServices", "audit_log_configs": [ { "log_type": "DATA_READ", "exempted_members": [ "user:jose@example.com" ] }, { "log_type": "DATA_WRITE" }, { "log_type": "ADMIN_READ" } ] }, { "service": "sampleservice.googleapis.com", "audit_log_configs": [ { "log_type": "DATA_READ" }, { "log_type": "DATA_WRITE", "exempted_members": [ "user:aliya@example.com" ] } ] } ] } For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ logging. It also exempts jose@example.com from DATA_READ logging, and aliya@example.com from DATA_WRITE logging.
## Attributes
* `auditLogConfigs` (*type:* `list(GoogleApi.PrivateCA.V1.Model.AuditLogConfig.t)`, *default:* `nil`) - The configuration for logging of each type of permission.
* `service` (*type:* `String.t`, *default:* `nil`) - Specifies a service that will be enabled for audit logging. For example, `storage.googleapis.com`, `cloudsql.googleapis.com`. `allServices` is a special value that covers all services.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:auditLogConfigs => list(GoogleApi.PrivateCA.V1.Model.AuditLogConfig.t()) | nil,
:service => String.t() | nil
}
field(:auditLogConfigs, as: GoogleApi.PrivateCA.V1.Model.AuditLogConfig, type: :list)
field(:service)
end
defimpl Poison.Decoder, for: GoogleApi.PrivateCA.V1.Model.AuditConfig do
def decode(value, options) do
GoogleApi.PrivateCA.V1.Model.AuditConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.PrivateCA.V1.Model.AuditConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 59.08 | 1,106 | 0.745091 |
0813d437a76b847c949014342a16fe725e9a1844 | 1,679 | exs | Elixir | test/pumpkin_web/controllers/bug_controller_test.exs | krasio/pumpkin.ex | bfa8d1ae1854ba6c5f7c3df877157656a67e99f9 | [
"BSD-3-Clause"
] | null | null | null | test/pumpkin_web/controllers/bug_controller_test.exs | krasio/pumpkin.ex | bfa8d1ae1854ba6c5f7c3df877157656a67e99f9 | [
"BSD-3-Clause"
] | null | null | null | test/pumpkin_web/controllers/bug_controller_test.exs | krasio/pumpkin.ex | bfa8d1ae1854ba6c5f7c3df877157656a67e99f9 | [
"BSD-3-Clause"
] | null | null | null | defmodule PumpkinWeb.BugControllerTest do
use PumpkinWeb.ConnCase, async: false
alias Pumpkin.Repo
alias Pumpkin.Exceptions
alias Pumpkin.Exceptions.Tasks
alias Pumpkin.Exceptions.Environment
describe "index/2" do
test "responds with list of bugs for environment", %{conn: conn} do
staging = create_environment("pumpkin-staging")
stable = create_environment("pumpkin-stable")
create_occurrence(staging, %{message: "Ooops, something went wrong!"})
create_occurrence(staging, %{message: "Ooops, something went wrong!", occurred_at: ~N[2017-04-17 14:27:39.000000]})
create_occurrence(stable, %{message: "Ooops, something went wrong!"})
create_occurrence(stable, %{message: "Something else went wrong!"})
conn = get(conn, environment_bug_path(conn, :index, staging.id))
html = html_response(conn, 200)
assert html =~ "pumpkin-staging"
assert html =~ "Ooops, something went wrong! (2)"
assert html =~ "Started at 14:25:34 on 2017-04-17"
assert html =~ "Last seen at 14:27:39 on 2017-04-17"
refute html =~ "pumpkin-stable"
refute html =~ "Something else went wrong!"
end
defp create_environment(id) do
%Environment{}
|> Environment.changeset(%{id: id})
|> Repo.insert!
end
defp create_occurrence(environment, attrs) do
defaults = %{
message: "Opps!",
occurred_at: ~N[2017-04-17 14:25:34.000000],
data: %{}
}
attrs = Map.merge(defaults, attrs)
{:ok, occurrence} = Exceptions.create_occurrence(Map.put(attrs, :environment_id, environment.id))
Tasks.assign_bug(occurrence)
end
end
end
| 34.265306 | 121 | 0.666468 |
0813ddbdbef1e108fb0ca79aa12ee7f382b5d83e | 271 | ex | Elixir | lib/hl7/2.5.1/segments/erq.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.5.1/segments/erq.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.5.1/segments/erq.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | defmodule HL7.V2_5_1.Segments.ERQ do
@moduledoc false
require Logger
alias HL7.V2_5_1.{DataTypes}
use HL7.Segment,
fields: [
segment: nil,
query_tag: nil,
event_identifier: DataTypes.Ce,
input_parameter_list: DataTypes.Qip
]
end
| 18.066667 | 41 | 0.678967 |
08141ac7f910bdbb1b4e1a5f94d27727c32e0ba6 | 2,049 | exs | Elixir | test/mysimplelist/accounts_test.exs | ScorpionResponse/mysimplelist | 3c792373cc372ab5e196fe109b9dae68b97b7220 | [
"Apache-2.0"
] | null | null | null | test/mysimplelist/accounts_test.exs | ScorpionResponse/mysimplelist | 3c792373cc372ab5e196fe109b9dae68b97b7220 | [
"Apache-2.0"
] | null | null | null | test/mysimplelist/accounts_test.exs | ScorpionResponse/mysimplelist | 3c792373cc372ab5e196fe109b9dae68b97b7220 | [
"Apache-2.0"
] | null | null | null | defmodule Mysimplelist.AccountsTest do
use Mysimplelist.DataCase
alias Mysimplelist.Accounts
describe "users" do
alias Mysimplelist.Accounts.User
@valid_attrs %{email: "some@valid.email", name: "some name"}
@update_attrs %{email: "some@updated.email", name: "some updated name"}
@invalid_attrs %{email: nil, name: nil}
def user_fixture(attrs \\ %{}) do
{:ok, user} =
attrs
|> Enum.into(@valid_attrs)
|> Accounts.create_user()
user
end
test "list_users/0 returns all users" do
user = user_fixture()
assert Accounts.list_users() == [user]
end
test "get_user!/1 returns the user with given id" do
user = user_fixture()
assert Accounts.get_user!(user.id) == user
end
test "create_user/1 with valid data creates a user" do
assert {:ok, %User{} = user} = Accounts.create_user(@valid_attrs)
assert user.email == "some@valid.email"
assert user.name == "some name"
end
test "create_user/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Accounts.create_user(@invalid_attrs)
end
test "update_user/2 with valid data updates the user" do
user = user_fixture()
assert {:ok, %User{} = user} = Accounts.update_user(user, @update_attrs)
assert user.email == "some@updated.email"
assert user.name == "some updated name"
end
test "update_user/2 with invalid data returns error changeset" do
user = user_fixture()
assert {:error, %Ecto.Changeset{}} = Accounts.update_user(user, @invalid_attrs)
assert user == Accounts.get_user!(user.id)
end
test "delete_user/1 deletes the user" do
user = user_fixture()
assert {:ok, %User{}} = Accounts.delete_user(user)
assert_raise Ecto.NoResultsError, fn -> Accounts.get_user!(user.id) end
end
test "change_user/1 returns a user changeset" do
user = user_fixture()
assert %Ecto.Changeset{} = Accounts.change_user(user)
end
end
end
| 30.58209 | 85 | 0.654954 |
08141ec1133432ecf593450ad714f5ca43170933 | 1,037 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/url_map_reference.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/url_map_reference.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/url_map_reference.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.UrlMapReference do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"urlMap"
]
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.UrlMapReference do
def decode(value, _options) do
value
end
end
| 28.027027 | 77 | 0.745419 |
081440073d8e6e152cc58b2d7c539d85d9e50049 | 1,178 | ex | Elixir | lib/extensions/invitation/ecto/context.ex | foggy1/pow | c46cf4c4c2d59d10861783faa46f3979d54343a9 | [
"MIT"
] | null | null | null | lib/extensions/invitation/ecto/context.ex | foggy1/pow | c46cf4c4c2d59d10861783faa46f3979d54343a9 | [
"MIT"
] | null | null | null | lib/extensions/invitation/ecto/context.ex | foggy1/pow | c46cf4c4c2d59d10861783faa46f3979d54343a9 | [
"MIT"
] | null | null | null | defmodule PowInvitation.Ecto.Context do
@moduledoc false
alias Ecto.Changeset
alias Pow.Ecto.Context
alias PowInvitation.Ecto.Schema
@doc """
Creates an invited user
"""
@spec create(map(), map(), Config.t()) :: {:ok, map()} | {:error, Changeset.t()}
def create(inviter_user, params, config) do
user_mod = Context.user_schema_mod(config)
user_mod
|> struct()
|> user_mod.invite_changeset(inviter_user, params)
|> Context.do_insert(config)
end
@doc """
Updates an invited user and accepts invitation.
"""
@spec update(map(), map(), Config.t()) :: {:ok, map()} | {:error, Changeset.t()}
def update(user, params, config) do
user
|> Schema.accept_invitation_changeset(params)
|> Context.do_update(config)
end
@doc """
Finds an invited user by the `invitation_token` column.
Ignores users with `:invitation_accepted_at` set.
"""
@spec get_by_invitation_token(binary(), Config.t()) :: map() | nil
def get_by_invitation_token(token, config) do
[invitation_token: token]
|> Context.get_by(config)
|> case do
%{invitation_accepted_at: nil} = user -> user
_ -> nil
end
end
end
| 26.177778 | 82 | 0.66129 |
081442c463f09fc4b9a9e4c5231814835d9c098b | 1,274 | ex | Elixir | deps/credo/lib/credo/execution/task_runner.ex | BandanaPandey/nary_tree | fb1eeb69e38e43c9f9ffb54297cef52dff5c928d | [
"MIT"
] | 13 | 2018-09-19T21:03:29.000Z | 2022-01-27T04:06:32.000Z | deps/credo/lib/credo/execution/task_runner.ex | BandanaPandey/nary_tree | fb1eeb69e38e43c9f9ffb54297cef52dff5c928d | [
"MIT"
] | 1 | 2020-05-26T04:16:57.000Z | 2020-05-26T04:16:57.000Z | deps/credo/lib/credo/execution/task_runner.ex | BandanaPandey/nary_tree | fb1eeb69e38e43c9f9ffb54297cef52dff5c928d | [
"MIT"
] | 3 | 2020-05-21T04:32:08.000Z | 2021-07-28T05:14:01.000Z | defmodule Credo.Execution.TaskRunner do
@moduledoc """
This module defines all Tasks which make up the execution of Credo.
Each task receives an Execution struct and returns an Execution struct upon completion.
Any Task can mark the Execution as "halted" to stop Credo's execution.
Subsequent Tasks won't be run.
Tasks are organized in named groups.
"""
use Credo.Execution.TaskRunnerBuilder
group :parse_cli_options do
task Credo.Execution.Task.ParseOptions
end
group :validate_cli_options do
task Credo.Execution.Task.ValidateOptions
end
group :convert_cli_options_to_config do
task Credo.Execution.Task.ConvertCLIOptionsToConfig
end
group :determine_command do
task Credo.Execution.Task.DetermineCommand
end
group :set_default_command do
task Credo.Execution.Task.SetDefaultCommand
end
group :validate_config do
task Credo.Execution.Task.ValidateConfig
end
group :resolve_config do
task Credo.Execution.Task.UseColors
task Credo.Execution.Task.RequireRequires
end
group :run_command do
task Credo.Execution.Task.RunCommand
end
group :halt_execution do
task Credo.Execution.Task.AssignExitStatusForIssues
task Credo.Execution.Task.HaltIfExitStatusAssigned
end
end
| 24.5 | 89 | 0.776295 |
08144ed77865b1ff0d000210c1ba70ca6e081880 | 825 | exs | Elixir | priv/repo/migrations/20151202133406_create_user.exs | yknx4/opencov | dc961a41e29b41b0657bc2a64bb67350a65477b8 | [
"MIT"
] | 8 | 2021-08-22T10:37:57.000Z | 2022-01-10T11:27:06.000Z | priv/repo/migrations/20151202133406_create_user.exs | yknx4/librecov | dc961a41e29b41b0657bc2a64bb67350a65477b8 | [
"MIT"
] | 109 | 2021-08-20T04:08:04.000Z | 2022-01-03T07:39:18.000Z | priv/repo/migrations/20151202133406_create_user.exs | Librecov/librecov | dc961a41e29b41b0657bc2a64bb67350a65477b8 | [
"MIT"
] | null | null | null | defmodule Librecov.Repo.Migrations.CreateUser do
use Ecto.Migration
def change do
create table(:users) do
add(:name, :string)
add(:email, :string)
add(:password_digest, :string)
add(:admin, :boolean, default: false)
add(:password_initialized, :boolean, default: false)
add(:github_access_token, :string)
add(:confirmation_token, :string)
add(:confirmed_at, :utc_datetime_usec)
add(:unconfirmed_email, :string)
add(:password_reset_token, :string)
add(:password_reset_sent_at, :utc_datetime_usec)
timestamps()
end
create(unique_index(:users, [:email]))
create(unique_index(:users, [:unconfirmed_email]))
create(unique_index(:users, [:confirmation_token]))
create(unique_index(:users, [:password_reset_token]))
end
end
| 26.612903 | 58 | 0.682424 |
081472df0254fcaf491bf07eabf614ebd1d28834 | 1,133 | ex | Elixir | lib/howhow_speak_web/router.ex | pastleo/howhow_speak | c298cc6624cd41da3d3f045ab92d4bcf50b84898 | [
"MIT"
] | 1 | 2020-06-19T11:23:44.000Z | 2020-06-19T11:23:44.000Z | lib/howhow_speak_web/router.ex | pastleo/howhow_speak | c298cc6624cd41da3d3f045ab92d4bcf50b84898 | [
"MIT"
] | null | null | null | lib/howhow_speak_web/router.ex | pastleo/howhow_speak | c298cc6624cd41da3d3f045ab92d4bcf50b84898 | [
"MIT"
] | null | null | null | defmodule HowhowSpeakWeb.Router do
use HowhowSpeakWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", HowhowSpeakWeb do
pipe_through :browser
get "/", PageController, :index
end
# Other scopes may use custom stacks.
scope "/api", HowhowSpeakWeb do
pipe_through :api
get "/sounds", PageController, :sounds
end
# Enables LiveDashboard only for development
#
# If you want to use the LiveDashboard in production, you should put
# it behind authentication and allow only admins to access it.
# If your application does not have an admins-only section yet,
# you can use Plug.BasicAuth to set up some basic authentication
# as long as you are also using SSL (which you should anyway).
if Mix.env() in [:dev, :test] do
import Phoenix.LiveDashboard.Router
scope "/" do
pipe_through :browser
live_dashboard "/dashboard", metrics: HowhowSpeakWeb.Telemetry
end
end
end
| 25.177778 | 70 | 0.70609 |
08147d56a818146451a6df267f54f6ee846eca53 | 468 | exs | Elixir | apps/device_fake/mix.exs | rockwood/ExAir | 3944f4b0ea66fc2179784b728e033e7eeaad664f | [
"Apache-2.0"
] | 4 | 2016-09-21T11:22:27.000Z | 2017-12-09T02:06:52.000Z | apps/device_fake/mix.exs | rockwood/ExAir | 3944f4b0ea66fc2179784b728e033e7eeaad664f | [
"Apache-2.0"
] | null | null | null | apps/device_fake/mix.exs | rockwood/ExAir | 3944f4b0ea66fc2179784b728e033e7eeaad664f | [
"Apache-2.0"
] | 2 | 2017-12-09T02:07:22.000Z | 2021-01-06T15:30:03.000Z | defmodule Device.Mixfile do
use Mix.Project
def project do
[app: :device_fake,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.3",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
def application do
[applications: [:logger]]
end
defp deps do
[]
end
end
| 18.72 | 44 | 0.564103 |
081499649d796cc0bb0a8fc62827b6ac096f992c | 894 | ex | Elixir | test/support/conn_case.ex | JPYamamoto/PhoenixLiveView-GameOfLife | 20e671c178c91ad1ccf6d7572ff6c51421f878bd | [
"MIT"
] | null | null | null | test/support/conn_case.ex | JPYamamoto/PhoenixLiveView-GameOfLife | 20e671c178c91ad1ccf6d7572ff6c51421f878bd | [
"MIT"
] | null | null | null | test/support/conn_case.ex | JPYamamoto/PhoenixLiveView-GameOfLife | 20e671c178c91ad1ccf6d7572ff6c51421f878bd | [
"MIT"
] | null | null | null | defmodule GameOfLifeWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
alias GameOfLifeWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint GameOfLifeWeb.Endpoint
end
end
setup _tags do
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 27.090909 | 59 | 0.731544 |
0814cc7d7baee4c59343a70a47f2dd6f4ce18a81 | 1,298 | ex | Elixir | apps/omg_api/lib/root_chain_coordinator/service.ex | hoardexchange/elixir-omg | 423528699d467f1cc0d02c596290ab907af38c2c | [
"Apache-2.0"
] | null | null | null | apps/omg_api/lib/root_chain_coordinator/service.ex | hoardexchange/elixir-omg | 423528699d467f1cc0d02c596290ab907af38c2c | [
"Apache-2.0"
] | null | null | null | apps/omg_api/lib/root_chain_coordinator/service.ex | hoardexchange/elixir-omg | 423528699d467f1cc0d02c596290ab907af38c2c | [
"Apache-2.0"
] | 2 | 2020-06-07T11:14:54.000Z | 2020-08-02T07:36:32.000Z | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.API.RootChainCoordinator.Service do
@moduledoc """
Represents a service that is coordinated by root chain coordinator.
Such a service is expected to get root chain height by calling `RootChainCoordinator.get_height()` function
and report processed height by calling `RootChainCoordiantor.check_in(height, service_name)`
where `service_name` is a unique name of that service.
Service is expected to handle `:sync` message that notifies about necessity to check synchronization status by calling
`OMG.API.RootChainCoordinator.get_height/0`.
"""
defstruct synced_height: nil, pid: nil
@type t() :: %__MODULE__{
synced_height: pos_integer(),
pid: pid()
}
end
| 41.870968 | 120 | 0.753467 |
0814ea1fe36208b225d0b3e58bc61b8daf3114e3 | 1,647 | ex | Elixir | clients/script/lib/google_api/script/v1/model/google_apps_script_type_function_set.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/script/lib/google_api/script/v1/model/google_apps_script_type_function_set.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/script/lib/google_api/script/v1/model/google_apps_script_type_function_set.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Script.V1.Model.GoogleAppsScriptTypeFunctionSet do
@moduledoc """
A set of functions. No duplicates are permitted.
## Attributes
* `values` (*type:* `list(GoogleApi.Script.V1.Model.GoogleAppsScriptTypeFunction.t)`, *default:* `nil`) - A list of functions composing the set.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:values => list(GoogleApi.Script.V1.Model.GoogleAppsScriptTypeFunction.t())
}
field(:values, as: GoogleApi.Script.V1.Model.GoogleAppsScriptTypeFunction, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Script.V1.Model.GoogleAppsScriptTypeFunctionSet do
def decode(value, options) do
GoogleApi.Script.V1.Model.GoogleAppsScriptTypeFunctionSet.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Script.V1.Model.GoogleAppsScriptTypeFunctionSet do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.042553 | 148 | 0.757134 |
0814fc7cfa63d817a631f3817ccea14b3488bbb9 | 2,540 | ex | Elixir | lib/cforum/messages/read_messages.ex | campingrider/cforum_ex | cf27684c47d6dc26c9c37a946f1c729a79d27c70 | [
"MIT"
] | null | null | null | lib/cforum/messages/read_messages.ex | campingrider/cforum_ex | cf27684c47d6dc26c9c37a946f1c729a79d27c70 | [
"MIT"
] | null | null | null | lib/cforum/messages/read_messages.ex | campingrider/cforum_ex | cf27684c47d6dc26c9c37a946f1c729a79d27c70 | [
"MIT"
] | null | null | null | defmodule Cforum.Messages.ReadMessages do
use Appsignal.Instrumentation.Decorators
import Ecto.Query, warn: false
alias Cforum.Repo
alias Cforum.Messages.Message
alias Cforum.Messages.ReadMessage
alias Cforum.Threads.InvisibleThread
@doc """
Marks a single message or list of messages as read
## Parameters
user: the current user
message: the message to mark read, **or**
messages: the list of messages to mark read
## Examples
iex> mark_messages_read(%User{}, %Message{})
"""
def mark_messages_read(nil, _), do: nil
def mark_messages_read(user, message) when not is_list(message), do: mark_messages_read(user, [message])
@decorate transaction()
def mark_messages_read(user, messages) do
messages = Enum.reject(messages, & &1.attribs[:is_read])
for msg <- messages do
ret =
%ReadMessage{}
|> ReadMessage.changeset(%{message_id: msg.message_id, user_id: user.user_id})
|> Repo.insert()
case ret do
{:ok, rm} ->
rm
{:error, _} ->
nil
end
end
|> Enum.filter(&(!is_nil(&1)))
|> notify_user(user)
end
defp notify_user(read_messages, user) do
message_ids = Enum.map(read_messages, & &1.message_id)
CforumWeb.Endpoint.broadcast("users:#{user.user_id}", "message_marked_read", %{"message_ids" => message_ids})
read_messages
end
@doc """
Counts the number of unread threads and messages for a user. Returns a tuple
`{number of unread threads, number of unread messages}`. A thread is counted
as unread if it contains unread messages; so a return value of `{1, 5}` means
five unread messages in one thread.
## Examples
iex> count_unread_messages(%User{})
{1, 5}
"""
def count_unread_messages(user, visible_forums)
def count_unread_messages(nil, _), do: {0, 0}
def count_unread_messages(user, visible_forums) do
forum_ids = Enum.map(visible_forums, & &1.forum_id)
from(
msg in Message,
select: {fragment("COUNT(DISTINCT ?)", msg.thread_id), count("*")},
inner_join: thr in assoc(msg, :thread),
left_join: rm in ReadMessage,
on: rm.message_id == msg.message_id and rm.user_id == ^user.user_id,
left_join: inv in InvisibleThread,
on: inv.thread_id == thr.thread_id and inv.user_id == ^user.user_id,
where: msg.deleted == false and thr.archived == false,
where: is_nil(rm.message_id) and is_nil(inv.thread_id),
where: msg.forum_id in ^forum_ids
)
|> Repo.one()
end
end
| 29.195402 | 113 | 0.668504 |
081509c5f6724a82df0a1bd6ec1951ff8220d4a6 | 8,893 | ex | Elixir | lib/mix/lib/mix/project.ex | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/project.ex | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/project.ex | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Project do
@moduledoc """
A module that provides conveniences for defining and working
with projects.
In order to configure Mix, a developer needs to use
`Mix.Project` in a module and define a function named
`project` that returns a keyword list with configuration.
defmodule MyApp do
use Mix.Project
def project do
[
app: :my_app,
vsn: "0.6.0"
]
end
end
After being defined, the configuration for this project can be read
as `Mix.project/0`. Notice that `Mix.project/0` won't fail if a
project is not defined; this allows many mix tasks to work
even without a project.
In case the developer needs a project or wants to access a special
function in the project, he/she can call `Mix.Project.get!/0`
which fails with `Mix.NoProjectError` in case a project is not
defined.
"""
@doc false
defmacro __using__(_) do
quote do
@after_compile Mix.Project
end
end
@private_config [:build_path, :app_path]
# Invoked after each Mix.Project is compiled.
@doc false
def __after_compile__(env, _binary) do
push env.module, env.file
end
# Push a project onto the project stack. Only
# the top of the stack can be accessed.
@doc false
def push(atom, file // "nofile") when is_atom(atom) do
config = default_config
|> Keyword.merge(get_project_config(atom))
|> Keyword.drop(@private_config)
case Mix.ProjectStack.push(atom, config, file) do
:ok ->
:ok
{ :error, other } when is_binary(other) ->
raise Mix.Error, message: "Trying to load #{inspect atom} from #{inspect file}" <>
" but another project with the same name was already defined at #{inspect other}"
end
end
# Pops a project from the stack.
@doc false
def pop do
Mix.ProjectStack.pop
end
# The configuration that is pushed down to dependencies.
@doc false
def deps_config(config // config()) do
[ build_path: build_path(config),
build_per_environment: config[:build_per_environment],
deps_path: deps_path(config) ]
end
@doc """
Retrieves the current project, `nil` if there is no
current project (i.e. there is no mixfile in the current
project).
If you expect a project to be defined, i.e. it is a
requirement of the current task, you should call
`get!/0` instead.
"""
def get do
case Mix.ProjectStack.peek do
{ name, _config, _file } -> name
_ -> nil
end
end
@doc """
Same as `get/0`, but raises an exception if there is no current project.
This is usually called by tasks that need additional
functions on the project to be defined. Since such
tasks usually depend on a project being defined, this
function raises `Mix.NoProjectError` in case no project
is available.
"""
def get! do
get || raise Mix.NoProjectError
end
@doc """
Returns the project configuration for the current environment.
This configuration is cached once the project is pushed into the stack.
"""
def config do
case Mix.ProjectStack.peek do
{ _name, config, _file } -> config
_ -> default_config
end
end
@doc """
Returns a list of project configuration files as known by
this project. This function is usually used in compilation
tasks to trigger a full recompilation whenever such
configuration files change.
By default it includes the mix.exs file and the lock manifest.
"""
def config_files do
project = get
opts = [Mix.Deps.Lock.manifest]
if project && (source = project.__info__(:compile)[:source]) do
opts = [String.from_char_list!(source)|opts]
end
opts
end
@doc """
Returns `true` if project is an umbrella project.
"""
def umbrella? do
config[:apps_path] != nil
end
@doc """
Runs the given `fun` inside the given project by changing
the current working directory and loading the given project
onto the project stack.
"""
def in_project(app, path, post_config // [], fun)
def in_project(app, ".", post_config, fun) do
cached = load_project(app, post_config)
result = try do
fun.(cached)
after
Mix.Project.pop
end
result
end
def in_project(app, path, post_config, fun) do
File.cd! path, fn ->
in_project(app, ".", post_config, fun)
end
end
@doc """
Returns the path to store dependencies for this project.
The returned path will be expanded.
## Examples
Mix.Project.deps_path
#=> "/path/to/project/deps"
"""
def deps_path(config // config()) do
Path.expand config[:deps_path]
end
@doc """
Returns the build path for this project.
The returned path will be expanded.
## Examples
Mix.Project.build_path
#=> "/path/to/project/_build/shared"
If :build_per_environment is set to true, it
will create a new build per environment:
Mix.env
#=> :dev
Mix.Project.build_path
#=> "/path/to/project/_build/dev"
"""
def build_path(config // config()) do
config[:build_path] || if config[:build_per_environment] do
Path.expand("_build/#{Mix.env}")
else
Path.expand("_build/shared")
end
end
@doc """
The path to store manifests. By default they are
stored in the same app path but it may be changed
in future releases.
The returned path will be expanded.
## Examples
Mix.Project.manifest_path
#=> "/path/to/project/_build/shared/lib/app"
"""
def manifest_path(config // config()) do
app_path(config)
end
@doc """
Returns the application path inside the build.
The returned path will be expanded.
## Examples
Mix.Project.app_path
#=> "/path/to/project/_build/shared/lib/app"
"""
def app_path(config // config()) do
config[:app_path] || cond do
app = config[:app] ->
Path.join([build_path(config), "lib", app])
config[:apps_path] ->
raise "Trying to access app_path for an umbrella project but umbrellas have no app"
true ->
raise Mix.Error, message: "Cannot access build without an application name, " <>
"please ensure you are in a directory with a mix.exs file and it defines " <>
"an :app name under the project configuration"
end
end
@doc """
Returns the paths this project compiles to.
The returned path will be expanded.
## Examples
Mix.Project.compile_path
#=> "/path/to/project/_build/shared/lib/app/priv"
"""
def compile_path(config // config()) do
Path.join(app_path(config), "ebin")
end
@doc """
Builds the project structure for the current application.
## Options
* `:symlink_ebin` - Symlink ebin instead of copying it
"""
def build_structure(config // config(), opts // []) do
app = app_path(config)
File.mkdir_p!(app)
source = Path.expand("ebin")
target = Path.join(app, "ebin")
cond do
opts[:symlink_ebin] ->
Mix.Utils.symlink_or_copy(source, target)
match?({ :ok, _ }, :file.read_link(target)) ->
File.rm_rf!(target)
File.mkdir_p!(target)
true ->
File.mkdir_p!(target)
end
Mix.Utils.symlink_or_copy(Path.expand("include"), Path.join(app, "include"))
Mix.Utils.symlink_or_copy(Path.expand("priv"), Path.join(app, "priv"))
end
@doc """
Returns all load paths for this project.
"""
def load_paths do
if umbrella? do
[]
else
[compile_path]
end
end
# Loads mix.exs in the current directory or loads the project from the
# mixfile cache and pushes the project to the project stack.
defp load_project(app, post_config) do
Mix.ProjectStack.post_config(post_config)
if cached = Mix.ProjectStack.read_cache(app) do
{ project, file } = cached
push(project, file)
project
else
file = Path.expand("mix.exs")
old_proj = get
if File.regular?(file) do
Code.load_file(file)
end
new_proj = get
if old_proj == new_proj do
file = "nofile"
new_proj = nil
push new_proj, file
end
Mix.ProjectStack.write_cache(app, { new_proj, file })
new_proj
end
end
defp default_config do
[ build_per_environment: false,
default_task: "run",
deps: [],
deps_path: "deps",
elixirc_exts: [:ex],
elixirc_paths: ["lib"],
elixirc_watch_exts: [:ex, :eex, :exs],
erlc_paths: ["src"],
erlc_include_path: "include",
erlc_options: [:debug_info],
lockfile: "mix.lock",
preferred_cli_env: [{ "test", :test }] ]
end
defp get_project_config(nil), do: []
defp get_project_config(atom) do
config = atom.project
if env = config[:env][Mix.env] do
config |> Keyword.delete(:env) |> Keyword.merge(env)
else
config
end
end
end
| 24.910364 | 91 | 0.646688 |
08150c63eeeb9795ee9d16a92446a1482e126ff5 | 3,062 | exs | Elixir | test/protobuf/protoc/cli_test.exs | matehat/protobuf-elixir | 3f27218842979037c31f355488e5c3350f641b41 | [
"MIT"
] | null | null | null | test/protobuf/protoc/cli_test.exs | matehat/protobuf-elixir | 3f27218842979037c31f355488e5c3350f641b41 | [
"MIT"
] | 3 | 2019-09-19T14:52:12.000Z | 2019-09-24T15:45:07.000Z | test/protobuf/protoc/cli_test.exs | demilletech/protobuf-elixir | 0e79aa7f8dbed615a1c917488b61a84728843bdd | [
"MIT"
] | null | null | null | defmodule Protobuf.Protoc.CLITest do
use ExUnit.Case, async: true
import Protobuf.Protoc.CLI
alias Protobuf.Protoc.Context
alias Google.Protobuf.FileDescriptorProto
alias Google.Protobuf.DescriptorProto
alias Google.Protobuf.EnumDescriptorProto
test "parse_params/2 parse plugins" do
ctx = %Context{}
ctx = parse_params(ctx, "plugins=grpc")
assert ctx == %Context{plugins: ["grpc"]}
end
test "find_types/2 returns multiple files" do
ctx = %Context{}
descs = [FileDescriptorProto.new(name: "file1"), FileDescriptorProto.new(name: "file2")]
assert %{global_type_mapping: %{"file1" => %{}, "file2" => %{}}} = find_types(ctx, descs)
end
test "find_types_in_proto/1 merge message and enum" do
desc =
FileDescriptorProto.new(
name: "file1",
package: "pkg",
message_type: [DescriptorProto.new(name: "Msg")],
enum_type: [EnumDescriptorProto.new(name: "Enum")]
)
assert %{".pkg.Msg" => %{type_name: "Pkg.Msg"}, ".pkg.Enum" => %{type_name: "Pkg.Enum"}} =
find_types_in_proto(desc)
end
test "find_types_in_proto/1 have nested message types" do
desc =
FileDescriptorProto.new(
name: "file1",
package: "pkg",
message_type: [
DescriptorProto.new(
name: "Msg",
nested_type: [DescriptorProto.new(name: "NestedMsg")],
enum_type: [EnumDescriptorProto.new(name: "NestedEnumMsg")]
)
]
)
assert %{
".pkg.Msg" => %{type_name: "Pkg.Msg"},
".pkg.Msg.NestedMsg" => %{type_name: "Pkg.Msg.NestedMsg"},
".pkg.Msg.NestedEnumMsg" => %{type_name: "Pkg.Msg.NestedEnumMsg"}
} = find_types_in_proto(desc)
end
test "find_types_in_proto/1 have deeper nested message types" do
desc =
FileDescriptorProto.new(
name: "file1",
package: "pkg",
message_type: [
DescriptorProto.new(
name: "Msg",
nested_type: [
DescriptorProto.new(
name: "NestedMsg",
nested_type: [DescriptorProto.new(name: "NestedMsg2")]
)
]
)
]
)
assert %{
".pkg.Msg" => %{type_name: "Pkg.Msg"},
".pkg.Msg.NestedMsg" => %{type_name: "Pkg.Msg.NestedMsg"},
".pkg.Msg.NestedMsg.NestedMsg2" => %{type_name: "Pkg.Msg.NestedMsg.NestedMsg2"}
} = find_types_in_proto(desc)
end
test "find_types_in_proto/1 supports elixir_module_prefix" do
desc =
FileDescriptorProto.new(
name: "file1",
package: "pkg",
message_type: [DescriptorProto.new(name: "Msg")],
enum_type: [EnumDescriptorProto.new(name: "Enum")],
options: Google.Protobuf.FileOptions.new(elixir_module_prefix: "foo_bar.prefix")
)
assert %{
".pkg.Msg" => %{type_name: "FooBar.Prefix.Msg"},
".pkg.Enum" => %{type_name: "FooBar.Prefix.Enum"}
} = find_types_in_proto(desc)
end
end
| 31.244898 | 94 | 0.586545 |
081514fdd3191c2367536f4796314b62559b9b01 | 1,504 | ex | Elixir | lib/gateway_web/presence/controller.ex | mmacai/reactive-interaction-gateway | edb9262c65b10a8a5dc21ebf326cf73638e97d36 | [
"Apache-2.0"
] | 1 | 2019-11-06T13:35:35.000Z | 2019-11-06T13:35:35.000Z | lib/gateway_web/presence/controller.ex | mmacai/reactive-interaction-gateway | edb9262c65b10a8a5dc21ebf326cf73638e97d36 | [
"Apache-2.0"
] | null | null | null | lib/gateway_web/presence/controller.ex | mmacai/reactive-interaction-gateway | edb9262c65b10a8a5dc21ebf326cf73638e97d36 | [
"Apache-2.0"
] | null | null | null | defmodule GatewayWeb.Presence.Controller do
@moduledoc """
HTTP-accessible API for client connections.
"""
use GatewayWeb, :controller
use Gateway.Config, [:session_role]
require Logger
alias GatewayWeb.Presence.Channel
alias GatewayWeb.Endpoint
alias Gateway.Blacklist
alias Gateway.Utils.Jwt
def list_channels(conn, _params) do
conf = config()
channels =
"role:#{conf.session_role}"
|> Channel.channels_list
|> Map.keys
json(conn, channels)
end
def list_channel_connections(conn, params) do
%{"id" => id} = params
connections =
"user:#{id}"
|> Channel.channels_list
|> Enum.map(fn(user) -> elem(user, 1).metas end)
|> List.flatten
json(conn, connections)
end
def disconnect_channel_connection(conn, %{"jti" => jti}) do
Blacklist.add_jti(Blacklist, jti, jwt_expiry(conn))
Endpoint.broadcast(jti, "disconnect", %{})
conn
|> put_status(204)
|> json(%{})
end
defp jwt_expiry(conn) do
conn
|> get_req_header("authorization")
|> jwt_expiry_from_tokens
rescue
e ->
Logger.warn("No token (expiration) found, using default blacklist expiration timeout (#{inspect e}).")
nil
end
defp jwt_expiry_from_tokens([]), do: nil
defp jwt_expiry_from_tokens([token]) do
{:ok, %{"exp" => expiry}} = Jwt.decode(token)
expiry
|> Integer.to_string # Comes as int, convert to string
|> Timex.parse!("{s-epoch}") # Parse as UTC epoch
end
end
| 23.873016 | 108 | 0.652926 |
08151f9d24d713d4f1fdf9718674ab5e434208e7 | 974 | ex | Elixir | debian/postinst.ex | sharkwouter/vaporos-flatpak-manager | da1dce2a806fdb51aa9366408ace50c28fbc3ff6 | [
"MIT"
] | null | null | null | debian/postinst.ex | sharkwouter/vaporos-flatpak-manager | da1dce2a806fdb51aa9366408ace50c28fbc3ff6 | [
"MIT"
] | 21 | 2019-09-15T08:42:58.000Z | 2021-08-28T16:48:54.000Z | debian/postinst.ex | sharkwouter/vaporos-flatpak-manager | da1dce2a806fdb51aa9366408ace50c28fbc3ff6 | [
"MIT"
] | null | null | null | #!/bin/sh
# postinst script for vaporos-flatpak-manager
#
# see: dh_installdeb(1)
set -e
# summary of how this script can be called:
# * <postinst> `configure' <most-recently-configured-version>
# * <old-postinst> `abort-upgrade' <new version>
# * <conflictor's-postinst> `abort-remove' `in-favour' <package>
# <new-version>
# * <postinst> `abort-remove'
# * <deconfigured's-postinst> `abort-deconfigure' `in-favour'
# <failed-install-package> <version> `removing'
# <conflicting-package> <version>
# for details, see https://www.debian.org/doc/debian-policy/ or
# the debian-policy package
case "$1" in
configure)
;;
abort-upgrade|abort-remove|abort-deconfigure)
;;
*)
echo "postinst called with unknown argument \`$1'" >&2
exit 1
;;
esac
# dh_installdeb will replace this with shell code automatically
# generated by other debhelper scripts.
#DEBHELPER#
exit 0
| 24.35 | 71 | 0.643737 |
0815288c2b1336647990e4891caef0c107adc185 | 493 | exs | Elixir | test/helpers/query_test.exs | mahcloud/medium-sdk-elixir | dbdf3ec8bdd3e8224891444cfc64b331660c8933 | [
"MIT"
] | 26 | 2016-11-26T06:17:39.000Z | 2020-03-23T15:20:02.000Z | test/helpers/query_test.exs | mahcloud/medium-sdk-elixir | dbdf3ec8bdd3e8224891444cfc64b331660c8933 | [
"MIT"
] | 3 | 2017-03-09T18:44:15.000Z | 2020-01-03T15:10:36.000Z | test/helpers/query_test.exs | mahcloud/medium-sdk-elixir | dbdf3ec8bdd3e8224891444cfc64b331660c8933 | [
"MIT"
] | 3 | 2017-03-09T18:39:51.000Z | 2020-03-23T15:24:50.000Z | defmodule Medium.QueryTest do
use ExUnit.Case, async: true
alias Medium.Helpers.Query
test "delegates the encoding of simple structures to URI.encode_query/1" do
simple_query = %{one: 1, two: 2}
encoded_query = Query.encode(simple_query)
assert encoded_query == URI.encode_query(simple_query)
end
test "encodes lists" do
complex_query = %{one: ["one", "uno"]}
encoded_query = Query.encode(complex_query)
assert encoded_query == "one=one%2Cuno"
end
end
| 24.65 | 77 | 0.711968 |
08152e468a2c6ff6e9ad59420a7c3fea90c66e33 | 2,432 | ex | Elixir | DL-5TM/DL-5TM.ex | johannesE/decentlab-decoders | c290ea1218de2c82d665fdc9f71f16682e12d917 | [
"MIT"
] | 1 | 2021-06-25T10:36:07.000Z | 2021-06-25T10:36:07.000Z | DL-5TM/DL-5TM.ex | johannesE/decentlab-decoders | c290ea1218de2c82d665fdc9f71f16682e12d917 | [
"MIT"
] | null | null | null | DL-5TM/DL-5TM.ex | johannesE/decentlab-decoders | c290ea1218de2c82d665fdc9f71f16682e12d917 | [
"MIT"
] | null | null | null |
# https://www.decentlab.com/support
defmodule DecentlabDecoder do
@protocol_version 2
defp sensor_defs do
[
%{
length: 2,
values: [
%{
:name => "Dielectric permittivity",
:convert => fn x -> Enum.at(x, 0) / 50 end,
:unit => nil
},
%{
:name => "Volumetric water content",
:convert => fn x -> 0.0000043 * :math.pow(Enum.at(x, 0)/50, 3) - 0.00055 * :math.pow(Enum.at(x, 0)/50, 2) + 0.0292 * (Enum.at(x, 0)/50) - 0.053 end,
:unit => "m³⋅m⁻³"
},
%{
:name => "Soil temperature",
:convert => fn x -> (Enum.at(x, 1) - 400) / 10 end,
:unit => "°C"
}
]
},
%{
length: 1,
values: [
%{
:name => "Battery voltage",
:convert => fn x -> Enum.at(x, 0) / 1000 end,
:unit => "V"
}
]
}
]
end
def decode(msg, :hex) do
{:ok, bytes} = Base.decode16(msg, case: :mixed)
decode(bytes)
end
def decode(msg) when is_binary(msg), do: decode_binary(msg)
def decode(msg), do: to_string(msg) |> decode
defp decode_binary(<<@protocol_version, device_id::size(16), flags::binary-size(2), bytes::binary>>) do
bytes
|> bytes_to_words()
|> sensor(flags, sensor_defs())
|> Map.put("Device ID", device_id)
|> Map.put("Protocol version", @protocol_version)
end
defp bytes_to_words(<<>>), do: []
defp bytes_to_words(<<word::size(16), rest::binary>>), do: [word | bytes_to_words(rest)]
defp sensor(words, <<flags::size(15), 1::size(1)>>, [%{length: len, values: value_defs} | rest]) do
{x, rest_words} = Enum.split(words, len)
value(value_defs, x)
|> Map.merge(sensor(rest_words, <<0::size(1), flags::size(15)>>, rest))
end
defp sensor(words, <<flags::size(15), 0::size(1)>>, [_cur | rest]) do
sensor(words, <<0::size(1), flags::size(15)>>, rest)
end
defp sensor([], _flags, []), do: %{}
defp value([], _x), do: %{}
defp value([%{convert: nil} | rest], x), do: value(rest, x)
defp value([%{name: name, unit: unit, convert: convert} | rest], x) do
value(rest, x)
|> Map.put(name, %{"unit" => unit, "value" => convert.(x)})
end
end
IO.inspect(DecentlabDecoder.decode("02023b0003003702710c60", :hex))
IO.inspect(DecentlabDecoder.decode("02023b00020c60", :hex))
| 26.434783 | 160 | 0.528783 |
0815329709dabe03a152c038cf54cf6c2077727c | 2,874 | exs | Elixir | exercises/concept/log-level/test/log_level_test.exs | herminiotorres/exercism-elixir | 0464a968f3e37680b2bf34868b889d6f8de5581e | [
"MIT"
] | 200 | 2019-12-12T13:50:59.000Z | 2022-02-20T22:38:42.000Z | exercises/concept/log-level/test/log_level_test.exs | herminiotorres/exercism-elixir | 0464a968f3e37680b2bf34868b889d6f8de5581e | [
"MIT"
] | 1,938 | 2019-12-12T08:07:10.000Z | 2021-01-29T12:56:13.000Z | exercises/concept/log-level/test/log_level_test.exs | herminiotorres/exercism-elixir | 0464a968f3e37680b2bf34868b889d6f8de5581e | [
"MIT"
] | 239 | 2019-12-12T14:09:08.000Z | 2022-03-18T00:04:07.000Z | defmodule LogLevelTest do
use ExUnit.Case
describe "LogLevel.to_label/1" do
# @tag :pending
test "level 0 has label trace only in a non-legacy app" do
assert LogLevel.to_label(0, false) == :trace
assert LogLevel.to_label(0, true) == :unknown
end
@tag :pending
test "level 1 has label debug" do
assert LogLevel.to_label(1, false) == :debug
assert LogLevel.to_label(1, true) == :debug
end
@tag :pending
test "level 2 has label info" do
assert LogLevel.to_label(2, false) == :info
assert LogLevel.to_label(2, true) == :info
end
@tag :pending
test "level 3 has label warning" do
assert LogLevel.to_label(3, false) == :warning
assert LogLevel.to_label(3, true) == :warning
end
@tag :pending
test "level 4 has label error" do
assert LogLevel.to_label(4, false) == :error
assert LogLevel.to_label(4, true) == :error
end
@tag :pending
test "level 5 has label fatal only in a non-legacy app" do
assert LogLevel.to_label(5, false) == :fatal
assert LogLevel.to_label(5, true) == :unknown
end
@tag :pending
test "level 6 has label unknown" do
assert LogLevel.to_label(6, false) == :unknown
assert LogLevel.to_label(6, true) == :unknown
end
@tag :pending
test "level -1 has label unknown" do
assert LogLevel.to_label(-1, false) == :unknown
assert LogLevel.to_label(-1, true) == :unknown
end
end
describe "LogLevel.alert_recipient/2" do
@tag :pending
test "fatal code sends alert to ops" do
assert LogLevel.alert_recipient(5, false) == :ops
end
@tag :pending
test "error code sends alert to ops" do
assert LogLevel.alert_recipient(4, false) == :ops
assert LogLevel.alert_recipient(4, true) == :ops
end
@tag :pending
test "unknown code sends alert to dev team 1 for a legacy app" do
assert LogLevel.alert_recipient(6, true) == :dev1
assert LogLevel.alert_recipient(0, true) == :dev1
assert LogLevel.alert_recipient(5, true) == :dev1
end
@tag :pending
test "unknown code sends alert to dev team 2" do
assert LogLevel.alert_recipient(6, false) == :dev2
end
@tag :pending
test "trace code does not send alert" do
refute LogLevel.alert_recipient(0, false)
end
@tag :pending
test "debug code does not send alert" do
refute LogLevel.alert_recipient(1, false)
refute LogLevel.alert_recipient(1, true)
end
@tag :pending
test "info code does not send alert" do
refute LogLevel.alert_recipient(2, false)
refute LogLevel.alert_recipient(2, true)
end
@tag :pending
test "warning code does not send alert" do
refute LogLevel.alert_recipient(3, false)
refute LogLevel.alert_recipient(3, true)
end
end
end
| 28.176471 | 69 | 0.651009 |
081540759e4c988893867d74133c481e87a86f7d | 1,371 | ex | Elixir | talks-articles/frameworks/phoenix/book--programming-phoenix-ge-1.4/videologue/lib/videologue_web/controllers/auth.ex | abhishekkr/tutorials_as_code | f355dc62a5025b710ac6d4a6ac2f9610265fad54 | [
"MIT"
] | 37 | 2015-02-01T23:16:39.000Z | 2021-12-22T16:50:48.000Z | talks-articles/frameworks/phoenix/book--programming-phoenix-ge-1.4/videologue/lib/videologue_web/controllers/auth.ex | abhishekkr/tutorials_as_code | f355dc62a5025b710ac6d4a6ac2f9610265fad54 | [
"MIT"
] | 1 | 2017-03-02T04:55:48.000Z | 2018-01-14T10:51:11.000Z | talks-articles/frameworks/phoenix/book--programming-phoenix-ge-1.4/videologue/lib/videologue_web/controllers/auth.ex | abhishekkr/tutorials_as_code | f355dc62a5025b710ac6d4a6ac2f9610265fad54 | [
"MIT"
] | 15 | 2015-03-02T08:09:01.000Z | 2021-06-10T03:25:41.000Z | defmodule VideologueWeb.Auth do
import Plug.Conn
alias Videologue.Accounts
alias VideologueWeb.Router.Helpers, as: Routes
@token_salt Application.get_env(:videologue, :phoenix_token_salt)
def init(opts), do: opts
def call(conn, _opts), do: do_call(conn, conn.assigns[:current_user])
def do_call(conn, nil) do
user_id = get_session(conn, :user_id)
user = user_id && Accounts.get_user(user_id)
assign(conn, :current_user, user)
|> put_user_token(user)
end
def do_call(conn, user), do: put_user_token(conn, user)
def login(conn, user) do
conn
|> assign(:current_user, user)
|> put_user_token(user)
|> put_session(:user_id, user.id)
|> configure_session(renew: true)
end
def logout(conn), do: configure_session(conn, drop: true)
def authenticate_user(conn, _opts), do: do_authenticate_user(conn, conn.assigns[:current_user])
def do_authenticate_user(conn, nil) do
conn
|> Phoenix.Controller.put_flash(:error, "You are not logged in.")
|> Phoenix.Controller.redirect(to: Routes.page_path(conn, :index))
|> halt()
end
def do_authenticate_user(conn, _current_user), do: conn
defp put_user_token(conn, nil), do: assign(conn, :user_token, nil)
defp put_user_token(conn, user) do
token = Phoenix.Token.sign(conn, @token_salt, user.id)
assign(conn, :user_token, token)
end
end
| 31.159091 | 97 | 0.71116 |
081560d478c51678ab81afa52d190d5061f94fd3 | 3,219 | ex | Elixir | apps/nightcrawler/lib/nightcrawler/marvel/series.ex | ZucchiniZe/nightcrawler_elixir | be82b0e7c25f55c7d2c2d3aa2860371cb68aeeb6 | [
"MIT"
] | null | null | null | apps/nightcrawler/lib/nightcrawler/marvel/series.ex | ZucchiniZe/nightcrawler_elixir | be82b0e7c25f55c7d2c2d3aa2860371cb68aeeb6 | [
"MIT"
] | null | null | null | apps/nightcrawler/lib/nightcrawler/marvel/series.ex | ZucchiniZe/nightcrawler_elixir | be82b0e7c25f55c7d2c2d3aa2860371cb68aeeb6 | [
"MIT"
] | null | null | null | defmodule Nightcrawler.Marvel.Series do
@moduledoc false
@behaviour Nightcrawler.Marvel.Entity
use Ecto.Schema
import Ecto.Changeset
alias Nightcrawler.Parser
import Ecto.Query, only: [from: 2]
alias Nightcrawler.Marvel.Comic
schema "series" do
field(:title, :string)
field(:description, :string)
field(:end_year, :integer)
field(:start_year, :integer)
field(:rating, :string)
field(:modified, :utc_datetime)
embeds_one(:thumbnail, Nightcrawler.Marvel.Common.Image)
has_many(:comics, Nightcrawler.Marvel.Comic)
many_to_many(:events, Nightcrawler.Marvel.Event, join_through: "series_events")
many_to_many(:creators, Nightcrawler.Marvel.Creator, join_through: "series_creators")
many_to_many(:characters, Nightcrawler.Marvel.Character, join_through: "series_characters")
timestamps()
end
def changeset(attrs), do: changeset(%__MODULE__{}, attrs)
def changeset(series, attrs) do
series
|> cast(attrs, [:title, :id, :description, :start_year, :end_year, :modified, :rating])
|> validate_required([:title, :id, :start_year, :end_year])
|> cast_embed(:thumbnail)
end
def transform do
%{
id: &Parser.integer_or_string/1,
title: &Parser.integer_or_string/1,
description: &Parser.integer_or_string/1,
startYear: &Parser.underscore_key/1,
endYear: &Parser.underscore_key/1,
rating: &transform_rating/1,
modified: &Parser.maybe_datetime/1,
thumbnail: &Parser.thumbnail/1
}
end
defp transform_rating({key, val}) do
key_atom = String.to_existing_atom(key)
{key_atom, parse_rating(val)}
end
def parse_rating(rating) do
cond do
rating in ["ALL AGES", "All Ages", "PG"] ->
"All Ages"
rating in [
"RATED T",
"Rated T",
"T",
"RATED A",
"Rated a",
"Rated A",
"A",
"MARVEL PSR",
"Marvel Psr",
"Parental Guidance",
"PARENTAL SUPERVISION"
] ->
"T"
rating in [
"RATED T+",
"Rated T+",
"T+",
"13 & Up",
"AGES 12 & UP",
"MARVEL AGE (12+)",
"MARVEL PSR+"
] ->
"T+"
rating in [
"Parental Advisory",
"PARENTAL ADVISORY",
"PARENTAL ADVISORYSLC",
"PARENTAL ADVISORY/EXPLICIT CONTENT",
"Parental Advisory/Explicit Content",
"Parental Advisoryslc"
] ->
"Parental Advisory"
rating in [
"Mature",
"EXPLICIT CONTENT",
"Explicit Content",
"MAX: EXPLICIT CONTENT",
"Max: Explicit Content",
"17 AND UP",
"17 & UP"
] ->
"Max: Explicit Content"
rating in [
"NO RATING",
"No Rating",
"NOT IN ORACLE",
"Not in Oracle",
" "
] ->
"No Rating"
true ->
rating
end
end
# queries
@doc """
Returns an `Ecto.Query` listing all of the series without any comics attached to them
"""
def with_no_comics do
from(
series in __MODULE__,
left_join: c in Comic,
on: [series_id: series.id],
where: is_nil(c.series_id)
)
end
end
| 23.49635 | 95 | 0.58745 |
0815995d823eb1518ded315d00cc2a7cd707bda3 | 819 | exs | Elixir | apps/core/test/buffers/token_audit_test.exs | michaeljguarino/forge | 50ee583ecb4aad5dee4ef08fce29a8eaed1a0824 | [
"Apache-2.0"
] | 59 | 2021-09-16T19:29:39.000Z | 2022-03-31T20:44:24.000Z | apps/core/test/buffers/token_audit_test.exs | svilenkov/plural | ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026 | [
"Apache-2.0"
] | 111 | 2021-08-15T09:56:37.000Z | 2022-03-31T23:59:32.000Z | apps/core/test/buffers/token_audit_test.exs | svilenkov/plural | ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026 | [
"Apache-2.0"
] | 4 | 2021-12-13T09:43:01.000Z | 2022-03-29T18:08:44.000Z | defmodule Core.Buffers.TokenAuditTest do
use Core.SchemaCase, async: false
alias Core.Buffers.TokenAudit
describe "Core.Buffers.TokenAudit" do
test "it will collect token audits and aggregate conuts" do
tok = insert(:persisted_token)
now = Timex.now()
|> Timex.set(minute: 0, second: 0, microsecond: {0, 6})
ip = '1.2.3.4'
{:ok, pid} = TokenAudit.start()
Process.monitor(pid)
TokenAudit.submit(pid, {tok.id, now, ip})
TokenAudit.submit(pid, {tok.id, now, ip})
send(pid, :flush)
assert_receive {:DOWN, _, :process, ^pid, _}
[audit] = Core.Repo.all(Core.Schema.AccessTokenAudit)
assert audit.ip == "#{ip}"
assert audit.token_id == tok.id
assert audit.timestamp == now
assert audit.count == 2
end
end
end
| 28.241379 | 67 | 0.623932 |
0815ad3e0bfa5e7f232e433de95a2010f0366090 | 50,696 | ex | Elixir | clients/compute/lib/google_api/compute/v1/api/region_disks.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-10-01T09:20:41.000Z | 2021-10-01T09:20:41.000Z | clients/compute/lib/google_api/compute/v1/api/region_disks.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/api/region_disks.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Api.RegionDisks do
@moduledoc """
API calls for all endpoints tagged `RegionDisks`.
"""
alias GoogleApi.Compute.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Adds existing resource policies to a regional disk. You can only add one policy which will be applied to this disk for scheduling snapshot creation.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - The name of the region for this request.
* `disk` (*type:* `String.t`) - The disk name for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:userIp` (*type:* `String.t`) - Legacy name for parameter that has been superseded by `quotaUser`.
* `:requestId` (*type:* `String.t`) - An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* `:body` (*type:* `GoogleApi.Compute.V1.Model.RegionDisksAddResourcePoliciesRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec compute_region_disks_add_resource_policies(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def compute_region_disks_add_resource_policies(
connection,
project,
region,
disk,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:userIp => :query,
:requestId => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/projects/{project}/regions/{region}/disks/{disk}/addResourcePolicies", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1),
"disk" => URI.encode(disk, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Operation{}])
end
@doc """
Creates a snapshot of this regional disk.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - Name of the region for this request.
* `disk` (*type:* `String.t`) - Name of the regional persistent disk to snapshot.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:userIp` (*type:* `String.t`) - Legacy name for parameter that has been superseded by `quotaUser`.
* `:requestId` (*type:* `String.t`) - An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* `:body` (*type:* `GoogleApi.Compute.V1.Model.Snapshot.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec compute_region_disks_create_snapshot(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def compute_region_disks_create_snapshot(
connection,
project,
region,
disk,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:userIp => :query,
:requestId => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/projects/{project}/regions/{region}/disks/{disk}/createSnapshot", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1),
"disk" => URI.encode(disk, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Operation{}])
end
@doc """
Deletes the specified regional persistent disk. Deleting a regional disk removes all the replicas of its data permanently and is irreversible. However, deleting a disk does not delete any snapshots previously made from the disk. You must separately delete snapshots.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - Name of the region for this request.
* `disk` (*type:* `String.t`) - Name of the regional persistent disk to delete.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:userIp` (*type:* `String.t`) - Legacy name for parameter that has been superseded by `quotaUser`.
* `:requestId` (*type:* `String.t`) - An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec compute_region_disks_delete(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def compute_region_disks_delete(
connection,
project,
region,
disk,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:userIp => :query,
:requestId => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/projects/{project}/regions/{region}/disks/{disk}", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1),
"disk" => URI.encode(disk, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Operation{}])
end
@doc """
Returns a specified regional persistent disk.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - Name of the region for this request.
* `disk` (*type:* `String.t`) - Name of the regional persistent disk to return.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:userIp` (*type:* `String.t`) - Legacy name for parameter that has been superseded by `quotaUser`.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Disk{}}` on success
* `{:error, info}` on failure
"""
@spec compute_region_disks_get(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.Disk.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def compute_region_disks_get(
connection,
project,
region,
disk,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/projects/{project}/regions/{region}/disks/{disk}", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1),
"disk" => URI.encode(disk, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Disk{}])
end
@doc """
Gets the access control policy for a resource. May be empty if no such policy or resource exists.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - The name of the region for this request.
* `resource` (*type:* `String.t`) - Name or id of the resource for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:userIp` (*type:* `String.t`) - Legacy name for parameter that has been superseded by `quotaUser`.
* `:optionsRequestedPolicyVersion` (*type:* `integer()`) - Requested IAM Policy version.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Policy{}}` on success
* `{:error, info}` on failure
"""
@spec compute_region_disks_get_iam_policy(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.Policy.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def compute_region_disks_get_iam_policy(
connection,
project,
region,
resource,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:userIp => :query,
:optionsRequestedPolicyVersion => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/projects/{project}/regions/{region}/disks/{resource}/getIamPolicy", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1),
"resource" => URI.encode(resource, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Policy{}])
end
@doc """
Creates a persistent regional disk in the specified project using the data included in the request.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - Name of the region for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:userIp` (*type:* `String.t`) - Legacy name for parameter that has been superseded by `quotaUser`.
* `:requestId` (*type:* `String.t`) - An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* `:sourceImage` (*type:* `String.t`) - Source image to restore onto a disk. This field is optional.
* `:body` (*type:* `GoogleApi.Compute.V1.Model.Disk.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec compute_region_disks_insert(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def compute_region_disks_insert(connection, project, region, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:userIp => :query,
:requestId => :query,
:sourceImage => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/projects/{project}/regions/{region}/disks", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Operation{}])
end
@doc """
Retrieves the list of persistent disks contained within the specified region.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - Name of the region for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:userIp` (*type:* `String.t`) - Legacy name for parameter that has been superseded by `quotaUser`.
* `:filter` (*type:* `String.t`) - A filter expression that filters resources listed in the response. The expression must specify the field name, a comparison operator, and the value that you want to use for filtering. The value must be a string, a number, or a boolean. The comparison operator must be either `=`, `!=`, `>`, or `<`. For example, if you are filtering Compute Engine instances, you can exclude instances named `example-instance` by specifying `name != example-instance`. You can also filter nested fields. For example, you could specify `scheduling.automaticRestart = false` to include instances only if they are not scheduled for automatic restarts. You can use filtering on nested fields to filter based on resource labels. To filter on multiple expressions, provide each separate expression within parentheses. For example: ``` (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake") ``` By default, each expression is an `AND` expression. However, you can include `AND` and `OR` expressions explicitly. For example: ``` (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true) ```
* `:maxResults` (*type:* `integer()`) - The maximum number of results per page that should be returned. If the number of available results is larger than `maxResults`, Compute Engine returns a `nextPageToken` that can be used to get the next page of results in subsequent list requests. Acceptable values are `0` to `500`, inclusive. (Default: `500`)
* `:orderBy` (*type:* `String.t`) - Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name. You can also sort results in descending order based on the creation timestamp using `orderBy="creationTimestamp desc"`. This sorts results based on the `creationTimestamp` field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first. Currently, only sorting by `name` or `creationTimestamp desc` is supported.
* `:pageToken` (*type:* `String.t`) - Specifies a page token to use. Set `pageToken` to the `nextPageToken` returned by a previous list request to get the next page of results.
* `:returnPartialSuccess` (*type:* `boolean()`) - Opt-in for partial success behavior which provides partial results in case of failure. The default value is false.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.DiskList{}}` on success
* `{:error, info}` on failure
"""
@spec compute_region_disks_list(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.DiskList.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def compute_region_disks_list(connection, project, region, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:userIp => :query,
:filter => :query,
:maxResults => :query,
:orderBy => :query,
:pageToken => :query,
:returnPartialSuccess => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/projects/{project}/regions/{region}/disks", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.DiskList{}])
end
@doc """
Removes resource policies from a regional disk.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - The name of the region for this request.
* `disk` (*type:* `String.t`) - The disk name for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:userIp` (*type:* `String.t`) - Legacy name for parameter that has been superseded by `quotaUser`.
* `:requestId` (*type:* `String.t`) - An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* `:body` (*type:* `GoogleApi.Compute.V1.Model.RegionDisksRemoveResourcePoliciesRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec compute_region_disks_remove_resource_policies(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def compute_region_disks_remove_resource_policies(
connection,
project,
region,
disk,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:userIp => :query,
:requestId => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url(
"/projects/{project}/regions/{region}/disks/{disk}/removeResourcePolicies",
%{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1),
"disk" => URI.encode(disk, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Operation{}])
end
@doc """
Resizes the specified regional persistent disk.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - The project ID for this request.
* `region` (*type:* `String.t`) - Name of the region for this request.
* `disk` (*type:* `String.t`) - Name of the regional persistent disk.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:userIp` (*type:* `String.t`) - Legacy name for parameter that has been superseded by `quotaUser`.
* `:requestId` (*type:* `String.t`) - An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* `:body` (*type:* `GoogleApi.Compute.V1.Model.RegionDisksResizeRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec compute_region_disks_resize(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def compute_region_disks_resize(
connection,
project,
region,
disk,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:userIp => :query,
:requestId => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/projects/{project}/regions/{region}/disks/{disk}/resize", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1),
"disk" => URI.encode(disk, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Operation{}])
end
@doc """
Sets the access control policy on the specified resource. Replaces any existing policy.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - The name of the region for this request.
* `resource` (*type:* `String.t`) - Name or id of the resource for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:userIp` (*type:* `String.t`) - Legacy name for parameter that has been superseded by `quotaUser`.
* `:body` (*type:* `GoogleApi.Compute.V1.Model.RegionSetPolicyRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Policy{}}` on success
* `{:error, info}` on failure
"""
@spec compute_region_disks_set_iam_policy(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.Policy.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def compute_region_disks_set_iam_policy(
connection,
project,
region,
resource,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/projects/{project}/regions/{region}/disks/{resource}/setIamPolicy", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1),
"resource" => URI.encode(resource, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Policy{}])
end
@doc """
Sets the labels on the target regional disk.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - The region for this request.
* `resource` (*type:* `String.t`) - Name or id of the resource for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:userIp` (*type:* `String.t`) - Legacy name for parameter that has been superseded by `quotaUser`.
* `:requestId` (*type:* `String.t`) - An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* `:body` (*type:* `GoogleApi.Compute.V1.Model.RegionSetLabelsRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec compute_region_disks_set_labels(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def compute_region_disks_set_labels(
connection,
project,
region,
resource,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:userIp => :query,
:requestId => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/projects/{project}/regions/{region}/disks/{resource}/setLabels", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1),
"resource" => URI.encode(resource, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Operation{}])
end
@doc """
Returns permissions that a caller has on the specified resource.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - The name of the region for this request.
* `resource` (*type:* `String.t`) - Name or id of the resource for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:userIp` (*type:* `String.t`) - Legacy name for parameter that has been superseded by `quotaUser`.
* `:body` (*type:* `GoogleApi.Compute.V1.Model.TestPermissionsRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.TestPermissionsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec compute_region_disks_test_iam_permissions(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.TestPermissionsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def compute_region_disks_test_iam_permissions(
connection,
project,
region,
resource,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url(
"/projects/{project}/regions/{region}/disks/{resource}/testIamPermissions",
%{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1),
"resource" => URI.encode(resource, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.TestPermissionsResponse{}])
end
end
| 49.799607 | 1,174 | 0.621943 |
0815af8c4efb2a2c8e85cd808e18ffc88bcb97ae | 1,035 | exs | Elixir | config/config.exs | smpallen99/exsyslog | 64c7933d551f6a17cf042443f3ec79898aec59f2 | [
"MIT"
] | 2 | 2015-01-14T14:01:03.000Z | 2016-09-13T08:04:33.000Z | config/config.exs | smpallen99/exsyslog | 64c7933d551f6a17cf042443f3ec79898aec59f2 | [
"MIT"
] | null | null | null | config/config.exs | smpallen99/exsyslog | 64c7933d551f6a17cf042443f3ec79898aec59f2 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application and
# its dependencies. It must return a keyword list containing the
# application name and have as value another keyword list with
# the application key-value pairs.
# Note this configuration is loaded before any dependency and is
# restricted to this project. If another project depends on this
# project, this file won't be loaded nor affect the parent project.
# You can customize the configuration path by setting :config_path
# in your mix.exs file. For example, you can emulate configuration
# per environment by setting:
#
# config_path: "config/#{Mix.env}.exs"
#
# Changing any file inside the config directory causes the whole
# project to be recompiled.
# Sample configuration:
#
# [dep1: [key: :value],
# dep2: [key: :value]]
# [exsyslog: [
# level: :info,
# host: '127.0.0.1',
# facility: :local2,
# appid: "exsyslog",
# max_term_size: 8192,
# max_message_size: 16000
# ]]
[exsyslog: [level: :info, host: '127.0.0.1', facility: :local1]]
| 30.441176 | 67 | 0.720773 |
0815b19b0171f33deb76de7e628c7ab308ff2062 | 484 | ex | Elixir | lib/absinthe/language/null_value.ex | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 4,101 | 2016-03-02T03:49:20.000Z | 2022-03-31T05:46:01.000Z | lib/absinthe/language/null_value.ex | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 889 | 2016-03-02T16:06:59.000Z | 2022-03-31T20:24:12.000Z | lib/absinthe/language/null_value.ex | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 564 | 2016-03-02T07:49:59.000Z | 2022-03-06T14:40:59.000Z | defmodule Absinthe.Language.NullValue do
@moduledoc false
alias Absinthe.{Blueprint, Language}
defstruct [
:loc
]
@type t :: %__MODULE__{
loc: Language.loc_t()
}
defimpl Blueprint.Draft do
def convert(node, _doc) do
%Blueprint.Input.Null{
source_location: source_location(node)
}
end
defp source_location(%{loc: nil}), do: nil
defp source_location(%{loc: loc}), do: Blueprint.SourceLocation.at(loc)
end
end
| 19.36 | 75 | 0.644628 |
0815bb894cc83fa320a788c81c1230390fa25254 | 567 | ex | Elixir | lib/postoffice_web/views/changeset_view.ex | IgorRodriguez/postoffice | 9012193e0780f2403bd3db90b8f6258656780fee | [
"Apache-2.0"
] | 15 | 2020-01-24T10:33:57.000Z | 2020-10-24T07:57:14.000Z | lib/postoffice_web/views/changeset_view.ex | IgorRodriguez/postoffice | 9012193e0780f2403bd3db90b8f6258656780fee | [
"Apache-2.0"
] | 24 | 2020-01-24T09:52:56.000Z | 2021-02-19T09:15:12.000Z | lib/postoffice_web/views/changeset_view.ex | IgorRodriguez/postoffice | 9012193e0780f2403bd3db90b8f6258656780fee | [
"Apache-2.0"
] | 5 | 2020-01-25T18:03:44.000Z | 2021-02-23T10:07:03.000Z | defmodule PostofficeWeb.ChangesetView do
use PostofficeWeb, :view
@doc """
Traverses and translates changeset errors.
See `Ecto.Changeset.traverse_errors/2` and
`PostofficeWeb.ErrorHelpers.translate_error/1` for more details.
"""
def translate_errors(changeset) do
Ecto.Changeset.traverse_errors(changeset, &translate_error/1)
end
def render("error.json", %{changeset: changeset}) do
# When encoded, the changeset returns its errors
# as a JSON object. So we just pass it forward.
%{errors: translate_errors(changeset)}
end
end
| 28.35 | 66 | 0.742504 |
0815cafcf0ce8cf0822c852b0b9417ae163408fa | 3,257 | ex | Elixir | lib/fryse/content.ex | fryse/fryse | 56e25ff2dc3dad56593849af7e8786225b918934 | [
"MIT"
] | 7 | 2018-02-03T01:04:22.000Z | 2020-07-19T16:47:03.000Z | lib/fryse/content.ex | fryse/fryse | 56e25ff2dc3dad56593849af7e8786225b918934 | [
"MIT"
] | 3 | 2020-07-16T09:28:38.000Z | 2020-07-22T09:40:45.000Z | lib/fryse/content.ex | fryse/fryse | 56e25ff2dc3dad56593849af7e8786225b918934 | [
"MIT"
] | null | null | null | defmodule Fryse.Content do
@moduledoc false
alias Fryse.File
alias Fryse.Folder
alias Fryse.Sort
def find_page(path, %Fryse{} = fryse) when is_binary(path) do
pieces =
case String.starts_with?(path, "/") do
true -> path |> Path.split() |> Enum.drop(1)
false -> path |> Path.split()
end
get_content_item(pieces, fryse.content.children)
end
def find_pages(path, %Fryse{} = fryse, options \\ []) when is_binary(path) do
pieces =
case String.starts_with?(path, "/") do
true -> path |> Path.split() |> Enum.drop(1)
false -> path |> Path.split()
end
get_content_items(pieces, fryse.content.children, options)
end
defp get_content_item([name | []], content) do
files = for %File{} = file <- content, do: file
found = Enum.filter(files, fn %File{path: path} -> String.ends_with?(path, name) end)
case found do
[file] -> {:ok, file}
[] -> {:error, :not_found}
end
end
defp get_content_item([name | path], content) do
folder = for %Folder{} = folder <- content, do: folder
sub =
folder
|> Enum.filter(fn %Folder{name: f_name} -> f_name == name end)
|> Enum.at(0)
case sub do
%Folder{children: children} -> get_content_item(path, children)
nil -> {:error, :not_found}
end
end
defp get_content_items([], content, options) do
excluded = Keyword.get(options, :excluded, false)
index = Keyword.get(options, :index, false)
sort = Keyword.get(options, :sort, false)
offset = Keyword.get(options, :offset, false)
limit = Keyword.get(options, :limit, false)
items =
content
|> files_filter_excluded(excluded)
|> files_filter_index(index)
|> files_sort(sort)
|> files_offset(offset)
|> files_limit(limit)
{:ok, items}
end
defp get_content_items([name | path], content, options) do
folder = for %Folder{} = folder <- content, do: folder
sub =
folder
|> Enum.filter(fn %Folder{name: f_name} -> f_name == name end)
|> Enum.at(0)
case sub do
%Folder{children: children} -> get_content_items(path, children, options)
nil -> {:error, :not_found}
end
end
defp files_filter_excluded(content, excluded) do
case excluded do
true -> for %File{} = file <- content, do: file
false -> for %File{excluded: false} = file <- content, do: file
end
end
defp files_filter_index(files, true), do: files
defp files_filter_index(files, false) do
files
|> Enum.reject(fn %File{name: name} -> name == "index" end)
end
defp files_sort(files, false), do: files
defp files_sort(files, arg) do
[key, sort] = Sort.parse(arg)
mapper = fn file -> Map.get(file.document.frontmatter, key, 0) end
sorter = Sort.function(sort)
Enum.sort_by(files, mapper, sorter)
end
defp files_offset(files, false), do: files
defp files_offset(files, offset) when is_integer(offset) do
files |> Enum.drop(offset)
end
defp files_offset(files, _), do: files
defp files_limit(files, false), do: files
defp files_limit(files, limit) when is_integer(limit) do
files |> Enum.take(limit)
end
defp files_limit(files, _), do: files
end
| 25.849206 | 89 | 0.629721 |
0815cb71d50687937824071677cf5975882239be | 2,148 | ex | Elixir | plugins/ucc_chat/lib/ucc_chat_web/views/admin_view.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | plugins/ucc_chat/lib/ucc_chat_web/views/admin_view.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | plugins/ucc_chat/lib/ucc_chat_web/views/admin_view.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | defmodule UccChatWeb.AdminView do
use UccChatWeb, :view
import UccAdminWeb.View.Utils, warn: false
alias UcxUcc.Accounts
def room_type(0), do: ~g"Channel"
def room_type(1), do: ~g"Private Group"
def room_type(2), do: ~g"Direct Message"
def render_user_action_button(user, "admin") do
if Accounts.has_role? user, "admin" do
render "user_action_buttons.html", opts: %{
fun: "change-admin", user: user, type: :danger, action: "remove-admin", icon: :shield, label: ~g(REMOVE ADMIN)
}
else
render "user_action_buttons.html", opts: %{
fun: "change-admin", user: user, type: :secondary, action: "make-admin", icon: :shield, label: ~g(MAKE ADMIN)
}
end
end
def render_user_action_button(user, "activate") do
if user.active do
render "user_action_buttons.html", opts: %{
fun: "change-active", user: user, type: :danger, action: "deactivate", icon: :block, label: ~g(DEACTIVATE)
}
else
render "user_action_buttons.html", opts: %{
fun: "change-active", user: user, type: :secondary, action: "activate", icon: "ok-circled", label: ~g(ACTIVATE)
}
end
end
def render_user_action_button(user, "edit") do
render "user_action_buttons.html", opts: %{user: user, type: :primary, action: "edit-user", icon: :edit, label: ~g(EDIT)}
end
def render_user_action_button(user, "delete") do
render "user_action_buttons.html", opts: %{user: user, type: :danger, action: "delete", icon: :trash, label: ~g(DELETE)}
end
def admin_type_label(%{type: 0}), do: ~g(Channel)
def admin_type_label(%{type: 1}), do: ~g(Private Group)
def admin_type_label(%{type: 2}), do: ~g(Direct Message)
def admin_state_label(%{archived: true}), do: ~g(Archived)
def admin_state_label(_), do: ~g(Active)
def admin_label(channel, field) do
channel
|> Map.get(field)
|> do_admin_label
end
def admin_label(item), do: do_admin_label(item)
defp do_admin_label(true), do: ~g(True)
defp do_admin_label(false), do: ~g(False)
def get_slash_commands(item, field) do
item |> Map.get(field) |> String.split("\n")
end
end
| 33.5625 | 125 | 0.662477 |
081647587e036d42379d795cbaf1521a984e7559 | 309 | ex | Elixir | archive/ivy/transaction.ex | naramore/ivy | 171c81a23e0657088c6c5156fb181f58313c03d7 | [
"MIT"
] | null | null | null | archive/ivy/transaction.ex | naramore/ivy | 171c81a23e0657088c6c5156fb181f58313c03d7 | [
"MIT"
] | null | null | null | archive/ivy/transaction.ex | naramore/ivy | 171c81a23e0657088c6c5156fb181f58313c03d7 | [
"MIT"
] | null | null | null | defmodule Ivy.Transaction do
alias Ivy.Datom
@enforce_keys [:t, :data]
defstruct [:t, :instant, :id, data: [], attributes: %{}]
@type t :: %__MODULE__{
t: non_neg_integer,
instant: DateTime.t,
id: Ivy.id,
data: [Datom.t],
attributes: %{optional(Ivy.ident) => Datom.value}
}
end
| 22.071429 | 58 | 0.618123 |
081648796dc3cc52a4c3c324ab970f2494893e44 | 512 | ex | Elixir | lib/ethereumex.ex | Boyuan-Chen/ethereumex | 8b269a0651e7904589c5f659a1426ff6b7fe2980 | [
"MIT"
] | 158 | 2018-11-05T23:45:47.000Z | 2022-03-21T18:58:31.000Z | lib/ethereumex.ex | hawku-com/ethereumex | 19a5cf1a4fcfd6c06aae3f97554c8a9af3f2d89b | [
"MIT"
] | 43 | 2018-11-02T18:45:50.000Z | 2022-02-28T09:09:46.000Z | lib/ethereumex.ex | hawku-com/ethereumex | 19a5cf1a4fcfd6c06aae3f97554c8a9af3f2d89b | [
"MIT"
] | 28 | 2018-11-13T16:33:31.000Z | 2022-03-31T17:49:11.000Z | defmodule Ethereumex do
@external_resource "README.md"
@moduledoc "README.md"
|> File.read!()
|> String.split("<!-- MDOC !-->")
|> Enum.fetch!(1)
use Application
import Supervisor.Spec, warn: false
alias Ethereumex.Config
alias Ethereumex.Counter
def start(_type, _args) do
:ok = Counter.setup()
children = Config.setup_children()
opts = [strategy: :one_for_one, name: Ethereumex.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 24.380952 | 64 | 0.642578 |
08164ed8030924969e7f9dc56c205c3f7e29cf57 | 2,784 | ex | Elixir | test/support/case_helper.ex | blakedietz/courtbot | b18d7eb84fd1405b359c8da980e1175bb1738841 | [
"ISC"
] | 6 | 2018-07-25T18:35:57.000Z | 2018-11-03T17:01:02.000Z | test/support/case_helper.ex | blakedietz/courtbot | b18d7eb84fd1405b359c8da980e1175bb1738841 | [
"ISC"
] | 19 | 2018-11-12T05:29:44.000Z | 2020-04-12T01:04:41.000Z | test/support/case_helper.ex | blakedietz/courtbot | b18d7eb84fd1405b359c8da980e1175bb1738841 | [
"ISC"
] | 1 | 2020-04-12T00:36:50.000Z | 2020-04-12T00:36:50.000Z | ExUnit.start()
defmodule CourtbotTest.Helper.Case do
@moduledoc false
use ExUnit.CaseTemplate
alias Courtbot.Case
using do
quote do
import CourtbotTest.Helper.Case.Conversation
import CourtbotWeb.Router.Helpers
use CourtbotWeb.ConnCase, async: true
# The default endpoint for testing
@endpoint CourtbotWeb.Endpoint
defp new_conversation(), do: build_conn()
end
end
def debug_case(),
do: %Case{
case_number: "BEEPBOOP",
formatted_case_number: "BEEPBOOP"
}
defmodule Conversation do
@moduledoc false
alias Courtbot.{Case, Subscriber, Repo}
alias CourtbotWeb.Response
use Phoenix.ConnTest
import Ecto.Query
@endpoint CourtbotWeb.Endpoint
defmacro for_case(case_details, do: block) do
quote do
unquote(
block
|> Macro.prewalk(&prewalk(&1, case_details))
)
end
end
def text(conn, case, message) do
message = replace_properties(case, message)
conn = post(conn, "/sms/en", %{"From" => "+12025550170", "Body" => message})
assert(conn.status === 200, "Request failed with a non 200 error: #{conn.status}")
conn
end
def response(conn, case, message) do
message =
case
|> replace_properties(message)
|> HtmlEntities.encode()
assert "<?xml version=\"1.0\" encoding=\"UTF-8\"?><Response><Sms>#{message}</Sms></Response>" ===
conn.resp_body
conn
end
defp replace_properties(case_details = %Courtbot.Case{id: case_id}, original_message) do
subscribers =
Repo.all(from(s in Subscriber, where: s.case_id == ^case_id, preload: :case))
case_properties =
case_details
|> case_properties()
|> Map.merge(Response.custom_variables())
|> Map.merge(Response.cases_context(%{context: %{cases: subscribers}}))
Enum.reduce(case_properties, original_message, fn {k, v}, message ->
key = Atom.to_string(k)
if String.contains?(original_message, "{#{key}}") do
assert String.trim(v) != "", "#{key} is empty but is expected in: #{original_message}"
assert v != nil, "#{key} is null but is expected in: #{original_message}"
String.replace(message, "{#{key}}", v)
else
message
end
end)
end
defp case_properties(case_details) do
with case = %{hearings: [hearing]} <- Case.format(case_details) do
case |> Map.delete(:hearings) |> Map.merge(hearing)
else
case -> case
end
end
defp prewalk({type, meta, [message]}, case) when type === :text or type === :response do
{type, meta, [case, message]}
end
defp prewalk(ast, _), do: ast
end
end
| 26.018692 | 103 | 0.613865 |
0816620a85fa4722a73639704e9dddeab5b55ffa | 718 | ex | Elixir | lib/fancy_chat_web/gettext.ex | LKlemens/fancy_chat | 15ecd005a9b4cc8e3bb5548a942069896811d234 | [
"MIT"
] | 1 | 2020-07-27T14:33:02.000Z | 2020-07-27T14:33:02.000Z | lib/fancy_chat_web/gettext.ex | LKlemens/fancy_chat | 15ecd005a9b4cc8e3bb5548a942069896811d234 | [
"MIT"
] | null | null | null | lib/fancy_chat_web/gettext.ex | LKlemens/fancy_chat | 15ecd005a9b4cc8e3bb5548a942069896811d234 | [
"MIT"
] | null | null | null | defmodule FancyChatWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import FancyChatWeb.Gettext
# Simple translation
gettext("Here is the string to translate")
# Plural translation
ngettext("Here is the string to translate",
"Here are the strings to translate",
3)
# Domain-based translation
dgettext("errors", "Here is the error message to translate")
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :fancy_chat
end
| 28.72 | 72 | 0.681058 |
0817136ac5197d976374c963da523ab28b0d1c84 | 7,615 | ex | Elixir | lib/challenge_gov/message_context_statuses.ex | jennstein2017/Challenge_gov | e0820df8b124a32ff8b78cb827ae43551492988b | [
"CC0-1.0"
] | 9 | 2020-02-26T20:24:38.000Z | 2022-03-22T21:14:52.000Z | lib/challenge_gov/message_context_statuses.ex | jennstein2017/Challenge_gov | e0820df8b124a32ff8b78cb827ae43551492988b | [
"CC0-1.0"
] | 15 | 2020-04-22T19:33:24.000Z | 2022-03-26T15:11:17.000Z | lib/challenge_gov/message_context_statuses.ex | jennstein2017/Challenge_gov | e0820df8b124a32ff8b78cb827ae43551492988b | [
"CC0-1.0"
] | 4 | 2020-04-27T22:58:57.000Z | 2022-01-14T13:42:09.000Z | defmodule ChallengeGov.MessageContextStatuses do
@moduledoc """
Context for MessageContextStatuses
"""
@behaviour Stein.Filter
import Ecto.Query
alias Ecto.Multi
alias ChallengeGov.Repo
alias ChallengeGov.Accounts
alias ChallengeGov.Challenges
alias ChallengeGov.Challenges.Challenge
alias ChallengeGov.Submissions
alias ChallengeGov.Messages.MessageContextStatus
alias Stein.Filter
def all_for_user(user, opts \\ []) do
MessageContextStatus
|> preload(context: [:messages, last_message: [:author]])
|> join(:inner, [mcs], mc in assoc(mcs, :context))
|> order_by([mcs, mc], desc: mc.updated_at)
|> where([mcs], mcs.user_id == ^user.id)
|> maybe_filter_archived(opts[:filter])
|> Filter.filter(opts[:filter], __MODULE__)
|> Repo.all()
end
defp maybe_filter_archived(query, %{"archived" => _}), do: query
defp maybe_filter_archived(query, _filter), do: where(query, [mcs], mcs.archived != true)
def get(id) do
MessageContextStatus
|> Repo.get(id)
|> case do
nil ->
{:error, :not_found}
message_context_status ->
{:ok, message_context_status}
end
end
def get(user, context) do
MessageContextStatus
|> Repo.get_by(user_id: user.id, message_context_id: context.id)
|> case do
nil ->
{:error, :not_found}
message_context_status ->
{:ok, message_context_status}
end
end
def get_by_ids(user_id, context_id) do
MessageContextStatus
|> Repo.get_by(user_id: user_id, message_context_id: context_id)
|> case do
nil ->
{:error, :not_found}
message_context_status ->
{:ok, message_context_status}
end
end
def create(user, context) do
%MessageContextStatus{}
|> MessageContextStatus.create_changeset(user.id, context.id)
|> Repo.insert()
end
def create_all_for_message_context(message_context) do
message_context
|> get_user_ids_for_message_context
|> Enum.reduce(Multi.new(), fn user_id, multi ->
case get_by_ids(user_id, message_context.id) do
{:ok, _message_context} ->
multi
{:error, :not_found} ->
changeset =
MessageContextStatus.create_changeset(
%MessageContextStatus{},
user_id,
message_context.id
)
Multi.insert(multi, {:message_context_status, user_id, message_context.id}, changeset)
end
end)
end
def get_user_ids_for_message_context(
message_context = %{context: "challenge", audience: "challenge_managers"}
) do
%{context_id: context_id, audience: _audience} = message_context
{:ok, challenge} = Challenges.get(context_id)
admin_user_ids =
Accounts.all_admins()
|> Enum.map(& &1.id)
challenge_manager_user_ids =
challenge.challenge_managers
|> Enum.map(& &1.user_id)
user_ids = admin_user_ids ++ challenge_manager_user_ids
Enum.uniq(user_ids)
end
def get_user_ids_for_message_context(message_context = %{context: "challenge"}) do
%{context_id: context_id, audience: _audience} = message_context
{:ok, challenge} = Challenges.get(context_id)
admin_user_ids =
Accounts.all_admins()
|> Enum.map(& &1.id)
challenge_manager_user_ids =
challenge.challenge_managers
|> Enum.map(& &1.user_id)
solver_user_ids =
%{filter: %{"challenge_id" => context_id}}
|> Submissions.all()
|> Enum.map(& &1.submitter_id)
message_context = Repo.preload(message_context, [:contexts])
# Should exclude solvers that already have a child context that is one of their submissions
solver_user_ids_to_exclude =
message_context.contexts
|> Enum.map(fn context ->
{:ok, solver} = Accounts.get(context.context_id)
solver.id
end)
user_ids =
admin_user_ids ++
challenge_manager_user_ids ++ (solver_user_ids -- solver_user_ids_to_exclude)
Enum.uniq(user_ids)
end
def get_user_ids_for_message_context(message_context = %{context: "submission"}) do
%{context_id: context_id, audience: _audience} = message_context
{:ok, submission} = Submissions.get(context_id)
submission = Repo.preload(submission, challenge: [:challenge_managers])
admin_user_ids =
Accounts.all_admins()
|> Enum.map(& &1.id)
challenge_manager_user_ids =
submission.challenge.challenge_managers
|> Enum.map(& &1.user_id)
solver_user_ids = [submission.submitter_id]
user_ids = admin_user_ids ++ challenge_manager_user_ids ++ solver_user_ids
Enum.uniq(user_ids)
end
def get_user_ids_for_message_context(message_context = %{context: "solver"}) do
message_context = Repo.preload(message_context, [:parent])
%{context_id: context_id, audience: _audience} = message_context
{:ok, challenge} = Challenges.get(message_context.parent.context_id)
challenge = Repo.preload(challenge, [:challenge_managers])
admin_user_ids =
Accounts.all_admins()
|> Enum.map(& &1.id)
challenge_manager_user_ids =
challenge.challenge_managers
|> Enum.map(& &1.user_id)
solver_user_ids = [context_id]
user_ids = admin_user_ids ++ challenge_manager_user_ids ++ solver_user_ids
Enum.uniq(user_ids)
end
def get_challenges_for_user(user) do
MessageContextStatus
|> join(:inner, [mcs], mc in assoc(mcs, :context))
|> join(:inner, [mcs, mc], c in Challenge,
on: mc.context == "challenge" and mc.context_id == c.id
)
|> where([mcs], mcs.user_id == ^user.id)
|> select([mcs, mc, c], c)
|> Repo.all()
end
def toggle_read(message_context_status) do
message_context_status
|> MessageContextStatus.changeset(%{read: !message_context_status.read})
|> Repo.update()
end
def mark_read(message_context_status) do
message_context_status
|> MessageContextStatus.changeset(%{read: true})
|> Repo.update()
end
def mark_unread(message_context_status) do
message_context_status
|> MessageContextStatus.changeset(%{read: false})
|> Repo.update()
end
def toggle_starred(message_context_status) do
message_context_status
|> MessageContextStatus.changeset(%{starred: !message_context_status.starred})
|> Repo.update()
end
def toggle_archived(message_context_status) do
message_context_status
|> MessageContextStatus.changeset(%{archived: !message_context_status.archived})
|> Repo.update()
end
def archive(message_context_status) do
message_context_status
|> MessageContextStatus.changeset(%{archived: true})
|> Repo.update()
end
def unarchive(message_context_status) do
message_context_status
|> MessageContextStatus.changeset(%{archived: false})
|> Repo.update()
end
def has_messages?(user) do
MessageContextStatus
|> where([mcs], mcs.user_id == ^user.id)
|> Repo.aggregate(:count, :id)
|> case do
0 -> false
_ -> true
end
end
@impl Stein.Filter
def filter_on_attribute({"starred", value}, query) do
query
|> where([mcs], mcs.starred == ^value)
end
def filter_on_attribute({"archived", value}, query) do
query
|> where([mcs], mcs.archived == ^value)
end
def filter_on_attribute({"read", value}, query) do
query
|> where([mcs], mcs.read == ^value)
end
def filter_on_attribute({"challenge_id", value}, query) do
query
|> join(:inner, [mcs], mc in assoc(mcs, :context))
|> where([mcs, mc], mc.context == "challenge" and mc.context_id == ^value)
end
end
| 27.293907 | 96 | 0.673539 |
0817473ddd6b3451285abc0f9a6ec3737e516867 | 1,834 | ex | Elixir | clients/pub_sub/lib/google_api/pub_sub/v1/model/expiration_policy.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/pub_sub/lib/google_api/pub_sub/v1/model/expiration_policy.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/pub_sub/lib/google_api/pub_sub/v1/model/expiration_policy.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.PubSub.V1.Model.ExpirationPolicy do
@moduledoc """
A policy that specifies the conditions for resource expiration (i.e.,
automatic resource deletion).
## Attributes
* `ttl` (*type:* `String.t`, *default:* `nil`) - Specifies the "time-to-live" duration for an associated resource. The
resource expires if it is not active for a period of `ttl`. The definition
of "activity" depends on the type of the associated resource. The minimum
and maximum allowed values for `ttl` depend on the type of the associated
resource, as well. If `ttl` is not set, the associated resource never
expires.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:ttl => String.t()
}
field(:ttl)
end
defimpl Poison.Decoder, for: GoogleApi.PubSub.V1.Model.ExpirationPolicy do
def decode(value, options) do
GoogleApi.PubSub.V1.Model.ExpirationPolicy.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.PubSub.V1.Model.ExpirationPolicy do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.603774 | 122 | 0.731734 |
081763334492edbdb082739e703c920f00b8f5d8 | 1,002 | ex | Elixir | lib/changelog/files/logo.ex | gustavoarmoa/changelog.com | e898a9979a237ae66962714821ed8633a4966f37 | [
"MIT"
] | null | null | null | lib/changelog/files/logo.ex | gustavoarmoa/changelog.com | e898a9979a237ae66962714821ed8633a4966f37 | [
"MIT"
] | null | null | null | lib/changelog/files/logo.ex | gustavoarmoa/changelog.com | e898a9979a237ae66962714821ed8633a4966f37 | [
"MIT"
] | null | null | null | defmodule Changelog.Files.Logo do
defmacro __using__(prefix) do
quote do
use Changelog.File, [:jpg, :jpeg, :png, :svg]
@versions [:original, :large, :medium, :small]
def storage_dir(_, {_, scope}), do: "uploads/logos/#{hashed(scope.id)}"
def filename(version, _), do: "#{unquote(prefix)}_logo_#{version}"
def transform(:original, _), do: :noaction
def transform(version, {file, _scope}) do
if file_type(file) == :svg do
:noaction
else
{:convert, "-strip -resize #{dimensions(version)} -format png", :png}
end
end
defp dimensions(:large), do: "800x800"
defp dimensions(:medium), do: "400x400"
defp dimensions(:small), do: "200x200"
end
end
end
defmodule Changelog.Files.ColorLogo do
use Changelog.Files.Logo, "color"
end
defmodule Changelog.Files.DarkLogo do
use Changelog.Files.Logo, "dark"
end
defmodule Changelog.Files.LightLogo do
use Changelog.Files.Logo, "light"
end
| 25.692308 | 79 | 0.643713 |
0817ab793eeba428873683bbd8bbaa51b13f98ab | 196 | ex | Elixir | lib/trakki.ex | agschaid/trakki | 28cc4a1bda70c1136089f033eb38220ab557f14c | [
"MIT"
] | null | null | null | lib/trakki.ex | agschaid/trakki | 28cc4a1bda70c1136089f033eb38220ab557f14c | [
"MIT"
] | null | null | null | lib/trakki.ex | agschaid/trakki | 28cc4a1bda70c1136089f033eb38220ab557f14c | [
"MIT"
] | null | null | null | defmodule Trakki do
@moduledoc """
Documentation for Trakki.
"""
@doc """
Hello world.
## Examples
iex> Trakki.hello()
:world
"""
def hello do
:world
end
end
| 10.315789 | 27 | 0.556122 |
0817b7681cf79b9f399bef27916444b0b398bd0c | 575 | ex | Elixir | lib/remote_retro_web/channels/retro_management_handlers.ex | codyduval/remote_retro_crd | b3a4adda7503b4e1ca5ec135cd34e1e71ec8d247 | [
"MIT"
] | null | null | null | lib/remote_retro_web/channels/retro_management_handlers.ex | codyduval/remote_retro_crd | b3a4adda7503b4e1ca5ec135cd34e1e71ec8d247 | [
"MIT"
] | null | null | null | lib/remote_retro_web/channels/retro_management_handlers.ex | codyduval/remote_retro_crd | b3a4adda7503b4e1ca5ec135cd34e1e71ec8d247 | [
"MIT"
] | null | null | null | defmodule RemoteRetroWeb.RetroManagementHandlers do
import Phoenix.Channel
alias RemoteRetro.{Repo}
alias RemoteRetroWeb.{RetroManagement}
def handle_in("retro_edited" = message, payload, socket) do
{reply_atom, _} = atomic_update_and_broadcast(message, payload, socket)
{:reply, reply_atom, socket}
end
defp atomic_update_and_broadcast(message, payload, socket) do
Repo.transaction(fn ->
RetroManagement.update!(socket.assigns.retro_id, payload)
broadcast!(socket, message, payload)
end)
rescue
_ -> {:error, %{}}
end
end
| 26.136364 | 75 | 0.728696 |
0817caa165bf0b1a798fd3e3f07049be7d97952b | 3,322 | ex | Elixir | lib/tabular/test_support.ex | sparta-developers/tabular | 39c4a97c6378efa274c351dc435afd4bc011c616 | [
"BlueOak-1.0.0"
] | null | null | null | lib/tabular/test_support.ex | sparta-developers/tabular | 39c4a97c6378efa274c351dc435afd4bc011c616 | [
"BlueOak-1.0.0"
] | null | null | null | lib/tabular/test_support.ex | sparta-developers/tabular | 39c4a97c6378efa274c351dc435afd4bc011c616 | [
"BlueOak-1.0.0"
] | null | null | null | defmodule Tabular.TestSupport do
@moduledoc """
Functions to simplify testing with ascii tables.
`compare/2` compares two ascii tables, producing a
matrix of the individual comparisons. `equal?/1`
examines the matrix generated by `compare/2` and
returns true if all cells are true and false
otherwise.
"""
@doc ~S'''
Compares two ascii tables producing a matrix of
individual cell comparison results. Optional
comparator functions can be provided.
## Examples
iex> table1 = """
...> +---------+-------+
...> | name | count |
...> +---------+-------+
...> | Malcolm | 10 |
...> +---------+-------+
...> | Zoe | 5 |
...> +---------+-------+
...> """
...>
...> table2 = """
...> +---------+-------+
...> | name | count |
...> +---------+-------+
...> | Mike | 11 |
...> +---------+-------+
...> | Zoe | 20 |
...> +---------+-------+
...> """
...>
...> comparators = %{"count" => &(abs(String.to_integer(&1) - String.to_integer(&2)) < 2)}
...>
...> Tabular.TestSupport.compare(table1, table2, comparators: comparators)
[
[false, true],
[true, false]
]
'''
def compare(actual_table, expected_table, opts \\ [comparators: %{}]) do
actual_table_data = actual_table |> Tabular.to_list_of_lists()
expected_table_data = expected_table |> Tabular.to_list_of_lists()
[actual_headers | actual_table_rows] = actual_table_data
[expected_header_row | expected_table_rows] = expected_table_data
[actual_headers] ++
(Enum.zip(actual_table_rows, expected_table_rows)
|> Enum.map(fn {actual_row, expected_row} ->
Enum.zip([expected_header_row, actual_row, expected_row])
|> Enum.map(fn {header, actual_value, expected_value} ->
match? =
case opts[:comparators][header] do
nil -> actual_value == expected_value
comparator -> comparator.(actual_value, expected_value)
end
case match? do
true -> actual_value
false -> {actual_value, expected_value}
end
end)
end))
end
@doc ~S'''
Examines the matrix produced by `compare/2`. Returns false
if any cell is false. Otherwise returns true.
## Examples
iex> results_table = [
...> [true, true, true],
...> [true, false, true],
...> [true, true, true]
...> ]
...>
...> Tabular.TestSupport.equal?(results_table)
false
'''
def equal?(compare_result) do
Enum.all?(compare_result, fn row ->
Enum.all?(row, fn
{_, _} -> false
_ -> true
end)
end)
end
def assert_equal(results_table) do
if equal?(results_table) do
true
else
generate_error_message(results_table)
|> ExUnit.Assertions.flunk()
end
end
def generate_error_message([header | body]) do
format_cells(body) |> TableRex.quick_render!(header)
end
def format_cells(results_table) do
Enum.map(results_table, fn row ->
Enum.map(row, fn
{left, right} -> ">>> #{left} <=> #{right} <<<"
cell -> cell
end)
end)
end
end
| 27.00813 | 96 | 0.528597 |
0817e9fc153819eaba210cd69b41530b1d9e456a | 2,075 | ex | Elixir | test/fixtures/good_handler.ex | NFIBrokerage/beeline | 8280a351a8c36634ced9088f90fe8103b47d685b | [
"Apache-2.0"
] | null | null | null | test/fixtures/good_handler.ex | NFIBrokerage/beeline | 8280a351a8c36634ced9088f90fe8103b47d685b | [
"Apache-2.0"
] | null | null | null | test/fixtures/good_handler.ex | NFIBrokerage/beeline | 8280a351a8c36634ced9088f90fe8103b47d685b | [
"Apache-2.0"
] | null | null | null | defmodule Beeline.Fixtures.GoodHandler do
@moduledoc """
A good-example fixture which sets up a handler that subscribes to two
different versions of EventStoreDB
Note that this handler stores stream positions in GenStage state which
is not a good strategy for an actual process-manager or read-model.
Ideally stream positions should be stored somewhere reliable like a
PostgreSQL table.
"""
use Beeline
def get_state do
GenServer.call(__MODULE__, :get_state)
end
def get_stream_position(producer) do
case GenServer.whereis(__MODULE__) do
pid when is_pid(pid) ->
GenServer.call(pid, {:get_stream_position, producer})
_ ->
-1
end
end
defstruct [:opts, events: [], stream_positions: %{}]
def start_link(opts) do
Beeline.start_link(__MODULE__,
name: __MODULE__,
producers: [
tcp: [
connection: Beeline.Fixtures.ExtremeClient,
stream_name: opts[:tcp_stream_name],
adapter: :kelvin
],
grpc: [
connection: Beeline.Fixtures.SpearClient,
stream_name: opts[:grpc_stream_name],
adapter: :volley
]
],
get_stream_position: {__MODULE__, :get_stream_position, []},
subscribe_after: 0,
health_check_interval: 1_000,
health_check_drift: 0,
context: %__MODULE__{opts: Map.new(opts)}
)
end
@impl GenStage
def handle_events([subscription_event], _from, state) do
event = Beeline.decode_event(subscription_event)
producer = Beeline.producer(subscription_event)
stream_position = Beeline.stream_position(subscription_event)
state =
state
|> put_in([Access.key(:stream_positions), producer], stream_position)
|> update_in([Access.key(:events)], &[event | &1])
{:noreply, [], state}
end
@impl GenStage
def handle_call(:get_state, _from, state), do: {:reply, state, [], state}
def handle_call({:get_stream_position, producer}, _from, state) do
{:reply, state.stream_positions[producer] || -1, [], state}
end
end
| 28.040541 | 75 | 0.670843 |
0817f0c3d86478d0bbf30acddf774f62f99022c4 | 1,193 | ex | Elixir | lib/monocle/options.ex | brianbroderick/monocle | eeabecea658468479c04a02352271f6304447736 | [
"Apache-2.0"
] | 2 | 2018-02-11T01:18:24.000Z | 2020-01-12T17:19:22.000Z | lib/monocle/options.ex | brianbroderick/monocle | eeabecea658468479c04a02352271f6304447736 | [
"Apache-2.0"
] | null | null | null | lib/monocle/options.ex | brianbroderick/monocle | eeabecea658468479c04a02352271f6304447736 | [
"Apache-2.0"
] | null | null | null | defmodule Monocle.Options do
@type t :: %__MODULE__{}
# What we use to render
defstruct renderer: Monocle.HtmlRenderer,
# Inline style options
gfm: true, breaks: false, pedantic: false,
smartypants: true, sanitize: false,
footnotes: false, footnote_offset: 1,
# additional prefies for class of code blocks
code_class_prefix: nil,
# Internal—only override if you're brave
do_smartypants: nil, do_sanitize: nil,
# Very internal—the callback used to perform
# parallel rendering. Set to &Enum.map/2
# to keep processing in process and
# serial
mapper: &Monocle.pmap/2,
render_code: &Monocle.HtmlRenderer.render_code/1,
# Filename and initial line number of the markdown block passed in
# for meaningfull error messages
file: "<no file>",
line: 1,
messages: [], # [{:error|:warning, lnb, text},...]
plugins: %{}
def plugin_for_prefix(options, plugin_name) do
Map.get(options.plugins, plugin_name, false)
end
end
| 30.589744 | 79 | 0.58005 |
08180a6a30a9796acd6a981ced6139c1ef4ed018 | 1,596 | ex | Elixir | web/controllers/registration_controller.ex | timrourke/colorstorm-api | fee60a52701a4f773fcd2c8c5c70a472d0d52f09 | [
"MIT"
] | null | null | null | web/controllers/registration_controller.ex | timrourke/colorstorm-api | fee60a52701a4f773fcd2c8c5c70a472d0d52f09 | [
"MIT"
] | null | null | null | web/controllers/registration_controller.ex | timrourke/colorstorm-api | fee60a52701a4f773fcd2c8c5c70a472d0d52f09 | [
"MIT"
] | null | null | null | defmodule Colorstorm.RegistrationController do
use Colorstorm.Web, :controller
import Ecto.Query, only: [from: 2]
alias Colorstorm.User
alias Colorstorm.Redis
alias JaSerializer.Params
require Logger
plug :scrub_params, "data" when action in [:create]
plug :put_view, Colorstorm.RegistrationView
def create(conn, %{"data" => data = %{"type" => "users", "attributes" => _user_params}}) do
changeset = User.changeset(%User{}, Params.to_attributes(data))
case Repo.insert(changeset) do
{:ok, user} ->
conn
|> send_welcome_email(user)
|> test_redis(user)
|> put_status(:created)
|> put_resp_header("location", user_path(conn, :show, user))
|> render(Colorstorm.UserView, "show.json-api", data: user)
{:error, changeset} ->
conn
|> put_status(:unprocessable_entity)
|> render(Colorstorm.UserView, "errors.json-api", data: changeset)
end
end
defp send_welcome_email(conn, user) do
email_assigns = %{
"hero_text" => "Welcome to ColorStorm!",
"user" => user,
"email_confirmation_link_url" => "https://fake.com/link?foo=bar"
}
Colorstorm.Email.welcome_html_email(email_assigns)
|> Colorstorm.Mailer.deliver_later
conn
end
defp test_redis(conn, user) do
uuid = Ecto.UUID.generate()
save_confirm_uuid = ~w(SET #{uuid} #{user.id}_#{user.email})
case Redis.command(save_confirm_uuid, timeout: 1800000) do
{:ok, message} -> Logger.info message
{:error, error} -> Logger.error error
end
conn
end
end | 28.5 | 93 | 0.649749 |
08180b2998d69c97d02f0acf646000c36b8596d1 | 831 | ex | Elixir | server/lib/blogsley_web/helpers/auth_helper.ex | blogsley/blogsley-absinthe | 7266f80a0638f8c30c37b37296ba5d78dcf8fb8b | [
"MIT"
] | 3 | 2020-01-23T06:37:18.000Z | 2020-12-05T08:31:00.000Z | server/lib/blogsley_web/helpers/auth_helper.ex | blogsley/blogsley-absinthe | 7266f80a0638f8c30c37b37296ba5d78dcf8fb8b | [
"MIT"
] | 16 | 2020-04-04T05:13:52.000Z | 2022-03-31T11:19:20.000Z | server/lib/blogsley_web/helpers/auth_helper.ex | blogsley/blogsley | 7266f80a0638f8c30c37b37296ba5d78dcf8fb8b | [
"MIT"
] | null | null | null | defmodule Blogsley.AuthHelper do
@moduledoc false
#import Comeonin.Bcrypt, only: [checkpw: 2]
alias Blogsley.Repo
alias Blogsley.Accounts.User
def login_with_username_pass(username, given_pass) do
user = Repo.get_by(User, username: username)
cond do
user && Bcrypt.verify_pass(given_pass, user.password_hash) ->
{:ok, user}
user ->
{:error, "Incorrect login credentials"}
true ->
{:error, :"User not found"}
end
end
def login_with_email_pass(email, given_pass) do
user = Repo.get_by(User, email: String.downcase(email))
cond do
user && Bcrypt.verify_pass(given_pass, user.password_hash) ->
{:ok, user}
user ->
{:error, "Incorrect login credentials"}
true ->
{:error, :"User not found"}
end
end
end
| 21.868421 | 67 | 0.632972 |
08180fba5d809af29a1562ff6d0297cbd6e4a8c4 | 2,206 | exs | Elixir | test/raxx/request_test.exs | varnerac/raxx | 0ca9b87cb2b10ca8b0eecaa86f10a9628da8fd93 | [
"Apache-2.0"
] | 401 | 2016-06-13T13:40:53.000Z | 2022-03-29T17:18:50.000Z | test/raxx/request_test.exs | varnerac/raxx | 0ca9b87cb2b10ca8b0eecaa86f10a9628da8fd93 | [
"Apache-2.0"
] | 148 | 2016-06-11T09:59:43.000Z | 2021-03-20T11:28:19.000Z | test/raxx/request_test.exs | varnerac/raxx | 0ca9b87cb2b10ca8b0eecaa86f10a9628da8fd93 | [
"Apache-2.0"
] | 31 | 2017-02-23T14:06:29.000Z | 2021-03-22T06:06:53.000Z | defmodule Raxx.RequestTest do
use ExUnit.Case
alias Raxx.Request
describe "uri/1" do
test "handles a basic https case correctly" do
request = Raxx.request(:GET, "https://example.com/")
uri = Request.uri(request)
assert %URI{
authority: "example.com",
fragment: nil,
host: "example.com",
path: "/",
port: 443,
query: nil,
scheme: "https",
userinfo: nil
} == uri
end
test "handles a basic http case correctly" do
request = Raxx.request(:GET, "http://example.com/")
uri = Request.uri(request)
assert %URI{
authority: "example.com",
fragment: nil,
host: "example.com",
path: "/",
port: 80,
query: nil,
scheme: "http",
userinfo: nil
} == uri
end
test "handles the case with normal path" do
request = Raxx.request(:GET, "https://example.com/foo/bar")
uri = Request.uri(request)
assert uri.path == "/foo/bar"
end
test "handles the case with duplicate slashes" do
request = Raxx.request(:GET, "https://example.com/foo//bar")
uri = Request.uri(request)
assert uri.path == "/foo//bar"
end
test "passes through the query" do
request = Raxx.request(:GET, "https://example.com?foo=bar&baz=ban")
uri = Request.uri(request)
assert uri.query == "foo=bar&baz=ban"
end
test "if there's a port number in the request, it is contained in the authority, but not the host" do
url = "https://example.com:4321/foo/bar"
request = Raxx.request(:GET, url)
uri = Request.uri(request)
assert uri.host == "example.com"
assert uri.authority == "example.com:4321"
assert uri.port == 4321
end
test "the uri won't contain userinfo" do
url = "https://example.com/"
request =
Raxx.request(:GET, url)
|> Raxx.set_header("authorization", "Basic YWxhZGRpbjpvcGVuc2VzYW1l")
uri = Request.uri(request)
assert uri.userinfo == nil
end
end
end
| 28.649351 | 105 | 0.552584 |
081816b69989dd81d7812685cb79abd70ce5581d | 141 | exs | Elixir | test/test_helper.exs | Raulkumar/bamboo | 0fcff1bf61e595eb821bc691ed9370b6a092afbf | [
"MIT"
] | 1,845 | 2016-03-29T23:36:36.000Z | 2022-03-31T19:23:38.000Z | test/test_helper.exs | Raulkumar/bamboo | 0fcff1bf61e595eb821bc691ed9370b6a092afbf | [
"MIT"
] | 407 | 2016-03-29T14:55:19.000Z | 2022-02-02T13:53:50.000Z | test/test_helper.exs | Raulkumar/bamboo | 0fcff1bf61e595eb821bc691ed9370b6a092afbf | [
"MIT"
] | 361 | 2016-03-31T13:33:22.000Z | 2022-02-25T12:38:43.000Z | ExUnit.start()
Application.ensure_all_started(:phoenix)
Application.ensure_all_started(:cowboy)
Application.ensure_all_started(:ex_machina)
| 23.5 | 43 | 0.858156 |
08181e98e63cfb34ead6c7de09c5720a398bdcaa | 4,282 | exs | Elixir | test/test_helper.exs | harlantwood/credo | 4569c9c8cfcf74a2074d6911541da0265e52ae99 | [
"MIT"
] | null | null | null | test/test_helper.exs | harlantwood/credo | 4569c9c8cfcf74a2074d6911541da0265e52ae99 | [
"MIT"
] | null | null | null | test/test_helper.exs | harlantwood/credo | 4569c9c8cfcf74a2074d6911541da0265e52ae99 | [
"MIT"
] | null | null | null | Code.require_file("support/test_application.exs", __DIR__)
Credo.Test.Application.start([], [])
ExUnit.start()
check_version =
~w()
|> Enum.reduce([], fn version, acc ->
# allow -dev versions so we can test before the Elixir release.
if System.version() |> Version.match?("< #{version}-dev") do
acc ++ [needs_elixir: version]
else
acc
end
end)
exclude = Keyword.merge([to_be_implemented: true], check_version)
ExUnit.configure(exclude: exclude)
defmodule Credo.TestHelper do
defmacro __using__(_) do
quote do
use ExUnit.Case
import CredoSourceFileCase
import CredoCheckCase
end
end
end
defmodule CredoSourceFileCase do
alias Credo.Test.FilenameGenerator
def to_source_file(source) do
to_source_file(source, FilenameGenerator.next())
end
def to_source_file(source, filename) do
case Credo.SourceFile.parse(source, filename) do
%{valid?: true} = source_file ->
source_file
_ ->
raise "Source could not be parsed!"
end
end
def to_source_files(list) do
Enum.map(list, &to_source_file/1)
end
end
defmodule CredoCheckCase do
use ExUnit.Case
alias Credo.Execution.Issues
alias Credo.SourceFile
def refute_issues(source_file, check \\ nil, params \\ []) do
issues = issues_for(source_file, check, create_config(), params)
assert [] == issues,
"There should be no issues, got #{Enum.count(issues)}: #{to_inspected(issues)}"
issues
end
def assert_issue(source_file, callback) when is_function(callback) do
assert_issue(source_file, nil, [], callback)
end
def assert_issue(source_file, check, callback) when is_function(callback) do
assert_issue(source_file, check, [], callback)
end
def assert_issue(source_file, check \\ nil, params \\ [], callback \\ nil) do
issues = issues_for(source_file, check, create_config(), params)
refute Enum.count(issues) == 0, "There should be one issue, got none."
assert Enum.count(issues) == 1,
"There should be only 1 issue, got #{Enum.count(issues)}: #{to_inspected(issues)}"
if callback do
issues |> List.first() |> callback.()
end
issues
end
def assert_issues(source_file, callback) when is_function(callback) do
assert_issues(source_file, nil, [], callback)
end
def assert_issues(source_file, check, callback) when is_function(callback) do
assert_issues(source_file, check, [], callback)
end
def assert_issues(source_file, check \\ nil, params \\ [], callback \\ nil) do
issues = issues_for(source_file, check, create_config(), params)
assert Enum.count(issues) > 0, "There should be multiple issues, got none."
assert Enum.count(issues) > 1,
"There should be more than one issue, got: #{to_inspected(issues)}"
if callback, do: callback.(issues)
issues
end
defp issues_for(source_files, nil, exec, _) when is_list(source_files) do
Enum.flat_map(source_files, &(&1 |> get_issues_from_source_file(exec)))
end
defp issues_for(source_files, check, _exec, params)
when is_list(source_files) do
exec = create_config()
if check.run_on_all? do
:ok = check.run(source_files, exec, params)
source_files
|> Enum.flat_map(&(&1 |> get_issues_from_source_file(exec)))
else
source_files
|> check.run(params)
|> Enum.flat_map(&(&1 |> get_issues_from_source_file(exec)))
end
end
defp issues_for(%SourceFile{} = source_file, nil, exec, _) do
source_file |> get_issues_from_source_file(exec)
end
defp issues_for(%SourceFile{} = source_file, check, _exec, params) do
_issues = check.run(source_file, params)
end
def assert_trigger([issue], trigger), do: [assert_trigger(issue, trigger)]
def assert_trigger(issue, trigger) do
assert trigger == issue.trigger
issue
end
def to_inspected(value) do
value
|> Inspect.Algebra.to_doc(%Inspect.Opts{})
|> Inspect.Algebra.format(50)
|> Enum.join("")
end
defp create_config do
%Credo.Execution{}
|> Credo.Execution.SourceFiles.start_server()
|> Credo.Execution.Issues.start_server()
end
defp get_issues_from_source_file(source_file, exec) do
Issues.get(exec, source_file)
end
end
| 25.795181 | 93 | 0.686362 |
0818270fecbc74bffaaa971aaddb55fb5102ec0f | 676 | exs | Elixir | src/test/inmana_web/controllers/restaurants_controller_test.exs | igormaraujo/inmana-NLW5-elixir | 819209fd6af6556d941b4a142d8d614b2a05d585 | [
"MIT"
] | null | null | null | src/test/inmana_web/controllers/restaurants_controller_test.exs | igormaraujo/inmana-NLW5-elixir | 819209fd6af6556d941b4a142d8d614b2a05d585 | [
"MIT"
] | null | null | null | src/test/inmana_web/controllers/restaurants_controller_test.exs | igormaraujo/inmana-NLW5-elixir | 819209fd6af6556d941b4a142d8d614b2a05d585 | [
"MIT"
] | null | null | null | defmodule InmanaWeb.RestaurantsControllerTest do
use InmanaWeb.ConnCase, async: true
describe "create/2" do
test "when all params are valid, creates the user", %{conn: conn} do
params = %{name: "Restaurant 01", email: "restaurant01@mail.com"}
response =
conn
|> post(Routes.restaurants_path(conn, :create, params))
|> json_response(:created)
assert %{
"message" => "Restaurant Created",
"restaurant" => %{
"email" => "restaurant01@mail.com",
"id" => _id,
"name" => "Restaurant 01"
}
} = response
end
end
end
| 28.166667 | 72 | 0.535503 |
081830787f1c169895539c8e58b3e3a3ee3bb16e | 717 | ex | Elixir | lib/runosaari_web/gettext.ex | codevictory/runosaari.net | 2462409ccf49987fa562b0ae189247c1fe6f1ccc | [
"MIT"
] | 1 | 2021-11-02T07:43:13.000Z | 2021-11-02T07:43:13.000Z | lib/runosaari_web/gettext.ex | codevictory/runosaari.net | 2462409ccf49987fa562b0ae189247c1fe6f1ccc | [
"MIT"
] | null | null | null | lib/runosaari_web/gettext.ex | codevictory/runosaari.net | 2462409ccf49987fa562b0ae189247c1fe6f1ccc | [
"MIT"
] | 1 | 2021-05-19T18:43:09.000Z | 2021-05-19T18:43:09.000Z | defmodule RunosaariWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import RunosaariWeb.Gettext
# Simple translation
gettext("Here is the string to translate")
# Plural translation
ngettext("Here is the string to translate",
"Here are the strings to translate",
3)
# Domain-based translation
dgettext("errors", "Here is the error message to translate")
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :runosaari
end
| 28.68 | 72 | 0.680614 |
08184016e5031e012216315a9725582d669f6a73 | 353 | exs | Elixir | priv/repo/seeds.exs | Jesterovskiy/geo-tasks | 65fbfdc3d1604084aa288fefba9c548e087a387e | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | Jesterovskiy/geo-tasks | 65fbfdc3d1604084aa288fefba9c548e087a387e | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | Jesterovskiy/geo-tasks | 65fbfdc3d1604084aa288fefba9c548e087a387e | [
"MIT"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# GeoTasks.Repo.insert!(%GeoTasks.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 29.416667 | 61 | 0.708215 |
0818a5a702161a9f60ca44f77e03a5b4512c6c02 | 2,344 | exs | Elixir | test/ecto/sql_dust_test.exs | rodrigo-dap/sql_dust | 8fc06bbdb103bd013509b3f5baf071c4584f0775 | [
"MIT",
"Unlicense"
] | 66 | 2016-02-04T21:31:43.000Z | 2016-03-14T19:41:26.000Z | test/ecto/sql_dust_test.exs | rodrigo-dap/sql_dust | 8fc06bbdb103bd013509b3f5baf071c4584f0775 | [
"MIT",
"Unlicense"
] | 11 | 2016-03-22T11:17:43.000Z | 2018-04-03T07:30:10.000Z | test/ecto/sql_dust_test.exs | rodrigo-dap/sql_dust | 8fc06bbdb103bd013509b3f5baf071c4584f0775 | [
"MIT",
"Unlicense"
] | 17 | 2016-05-09T16:42:14.000Z | 2021-09-21T22:30:23.000Z | defmodule Test do
defmodule Weather do
use Ecto.Schema
schema "weather" do
belongs_to :city, Test.City
end
end
defmodule City do
use Ecto.Schema
schema "cities" do
has_many :local_weather, Test.Weather
belongs_to :country, Test.Country
end
end
defmodule Country do
use Ecto.Schema
schema "countries" do
has_many :cities, Test.City
# has_many :weather, through: [:cities, :local_weather] ???
end
end
end
defmodule Ecto.SqlDustTest do
use ExUnit.Case
doctest Ecto.SqlDust
import Ecto.SqlDust
test "generates simple SQL based on Ecto model schemas" do
sql = Test.Weather
|> to_sql
assert sql == {"""
SELECT "w".*
FROM "weather" "w"
""", []}
end
test "generates simple SQL based on Ecto model schemas (using from)" do
sql = from(Test.Weather)
|> to_sql
assert sql == {"""
SELECT "w".*
FROM "weather" "w"
""", []}
end
test "generates SQL based on Ecto model schemas" do
sql = Test.City
|> select("id, name, country.name, local_weather.temp_lo, local_weather.temp_hi")
|> where("local_weather.wdate = '2015-09-12'")
|> to_sql
assert sql == {"""
SELECT
"c"."id",
"c"."name",
"country"."name",
"local_weather"."temp_lo",
"local_weather"."temp_hi"
FROM "cities" "c"
LEFT JOIN "countries" "country" ON "country"."id" = "c"."country_id"
LEFT JOIN "weather" "local_weather" ON "local_weather"."city_id" = "c"."id"
WHERE ("local_weather"."wdate" = '2015-09-12')
""", []}
end
test "support generating SQL based on Ecto model schemas for MySQL" do
sql = Test.City
|> select("id, name, country.name, local_weather.temp_lo, local_weather.temp_hi")
|> where(["local_weather.wdate = ?", "2015-09-12"])
|> adapter(:mysql)
|> to_sql
assert sql == {"""
SELECT
`c`.`id`,
`c`.`name`,
`country`.`name`,
`local_weather`.`temp_lo`,
`local_weather`.`temp_hi`
FROM `cities` `c`
LEFT JOIN `countries` `country` ON `country`.`id` = `c`.`country_id`
LEFT JOIN `weather` `local_weather` ON `local_weather`.`city_id` = `c`.`id`
WHERE (`local_weather`.`wdate` = ?)
""", ["2015-09-12"]}
end
end
| 25.478261 | 87 | 0.584898 |
0818b654cd97eae34c319fe7ef2f86563c9ea89d | 621 | ex | Elixir | lib/radiator/constants.ex | djschilling/radiator | 382e22904d7e400a8ffba54e9ddfd2845bc2b623 | [
"MIT"
] | null | null | null | lib/radiator/constants.ex | djschilling/radiator | 382e22904d7e400a8ffba54e9ddfd2845bc2b623 | [
"MIT"
] | null | null | null | lib/radiator/constants.ex | djschilling/radiator | 382e22904d7e400a8ffba54e9ddfd2845bc2b623 | [
"MIT"
] | null | null | null | defmodule Radiator.Constants do
@moduledoc false
defmacro __using__(_) do
quote do
@otp_app Mix.Project.config()[:app]
@permission_values [:readonly, :edit, :manage, :own]
@auth_user_status_values [:unverified, :active, :suspended]
@not_authorized_match {:error, :not_authorized}
@not_authorized_response {:error, "Not Authorized"}
@not_found_match {:error, :not_found}
@not_found_response {:error, "Entity not found"}
defguard is_permission(p) when p in @permission_values
defguard is_user_status(s) when s in @auth_user_status_values
end
end
end
| 28.227273 | 67 | 0.700483 |
0818db61a1037045c1514921af879838278df74c | 55 | ex | Elixir | lib/rps_web/views/layout_view.ex | cabol/rps | 813028c4b7bb18f8c7cbcf4422249a81ccb9f059 | [
"MIT"
] | 3 | 2018-05-28T15:10:11.000Z | 2020-08-02T21:14:25.000Z | lib/rps_web/views/layout_view.ex | cabol/rps | 813028c4b7bb18f8c7cbcf4422249a81ccb9f059 | [
"MIT"
] | null | null | null | lib/rps_web/views/layout_view.ex | cabol/rps | 813028c4b7bb18f8c7cbcf4422249a81ccb9f059 | [
"MIT"
] | null | null | null | defmodule RpsWeb.LayoutView do
use RpsWeb, :view
end
| 13.75 | 30 | 0.781818 |
0818ebc541beb6535ab123f6fb80d47cd31aa5d8 | 1,147 | exs | Elixir | clients/you_tube_analytics/config/config.exs | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/you_tube_analytics/config/config.exs | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/you_tube_analytics/config/config.exs | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :you_tube_analytics_api, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:you_tube_analytics_api, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 37 | 73 | 0.757629 |
0818f561926df6ea395a00fc2515e660f9237f96 | 5,319 | exs | Elixir | apps/omg_child_chain/test/omg_child_chain/release_tasks/set_fee_feed_adapter_opts_test.exs | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | apps/omg_child_chain/test/omg_child_chain/release_tasks/set_fee_feed_adapter_opts_test.exs | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | apps/omg_child_chain/test/omg_child_chain/release_tasks/set_fee_feed_adapter_opts_test.exs | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019-2020 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.ChildChain.ReleaseTasks.SetFeeFeedAdapterOptsTest do
use ExUnit.Case, async: true
alias OMG.ChildChain.Fees.FeedAdapter
alias OMG.ChildChain.ReleaseTasks.SetFeeFeedAdapterOpts
@app :omg_child_chain
@config_key :fee_adapter
@env_fee_adapter "FEE_ADAPTER"
@env_fee_feed_url "FEE_FEED_URL"
@env_fee_change_tolerance_percent "FEE_CHANGE_TOLERANCE_PERCENT"
@env_stored_fee_update_interval_minutes "STORED_FEE_UPDATE_INTERVAL_MINUTES"
setup do
original_config = Application.get_all_env(@app)
on_exit(fn ->
# Delete all related env vars
:ok = System.delete_env(@env_fee_adapter)
:ok = System.delete_env(@env_fee_feed_url)
:ok = System.delete_env(@env_fee_change_tolerance_percent)
:ok = System.delete_env(@env_stored_fee_update_interval_minutes)
# configuration is global state so we reset it to known values in case it got fiddled before
:ok = Enum.each(original_config, fn {key, value} -> Application.put_env(@app, key, value, persistent: true) end)
end)
:ok
end
test "sets the fee adapter to FeedAdapter and configure with its related env vars" do
:ok = System.put_env(@env_fee_adapter, "feed")
:ok = System.put_env(@env_fee_feed_url, "http://example.com/fee-feed-url")
:ok = System.put_env(@env_fee_change_tolerance_percent, "10")
:ok = System.put_env(@env_stored_fee_update_interval_minutes, "600")
config = SetFeeFeedAdapterOpts.load([], [])
{adapter, opts: adapter_opts} = config[@app][@config_key]
assert adapter == FeedAdapter
assert adapter_opts[:fee_feed_url] == "http://example.com/fee-feed-url"
assert adapter_opts[:fee_change_tolerance_percent] == 10
assert adapter_opts[:stored_fee_update_interval_minutes] == 600
end
test "raises an ArgumentError when FEE_CHANGE_TOLERANCE_PERCENT is not a stingified integer" do
:ok = System.put_env(@env_fee_adapter, "feed")
:ok = System.put_env(@env_fee_change_tolerance_percent, "1.5")
assert_raise RuntimeError, "Invalid integer: 1.5", fn -> SetFeeFeedAdapterOpts.load([], []) end
:ok = System.put_env(@env_fee_change_tolerance_percent, "not integer")
assert_raise RuntimeError, "Invalid integer: not integer", fn -> SetFeeFeedAdapterOpts.load([], []) end
end
test "raises an ArgumentError when STORED_FEE_UPDATE_INTERVAL_MINUTES is not a stingified integer" do
:ok = System.put_env(@env_fee_adapter, "feed")
:ok = System.put_env(@env_stored_fee_update_interval_minutes, "not a number")
:ok = System.put_env(@env_stored_fee_update_interval_minutes, "100.20")
assert_raise RuntimeError, "Invalid integer: 100.20", fn -> SetFeeFeedAdapterOpts.load([], []) end
:ok = System.put_env(@env_stored_fee_update_interval_minutes, "not integer")
assert_raise RuntimeError, "Invalid integer: not integer", fn -> SetFeeFeedAdapterOpts.load([], []) end
end
test "does not touch the configuration that's not present as env var" do
adapter_opts = [
fee_feed_url: "http://example.com/fee-feed-url-original",
fee_change_tolerance_percent: 10,
stored_fee_update_interval_minutes: 30
]
config = [
omg_child_chain: [fee_adapter: {OMG.ChildChain.Fees.FeedAdapter, opts: adapter_opts}]
]
# Intentionally not configuring @env_fee_feed_url and @env_stored_fee_update_interval_minutes
:ok = System.put_env(@env_fee_adapter, "feed")
:ok = System.put_env(@env_fee_change_tolerance_percent, "50")
config = SetFeeFeedAdapterOpts.load(config, [])
{adapter, opts: new_opts} = config[:omg_child_chain][:fee_adapter]
assert adapter == FeedAdapter
assert new_opts[:fee_feed_url] == adapter_opts[:fee_feed_url]
assert new_opts[:fee_change_tolerance_percent] == 50
assert new_opts[:stored_fee_update_interval_minutes] == adapter_opts[:stored_fee_update_interval_minutes]
end
test "does not change the configuration when FEE_ADAPTER is not \"feed\"" do
:ok = System.put_env(@env_fee_adapter, "not_feed_adapter")
:ok = System.put_env(@env_fee_feed_url, "http://example.com/fee-feed-url")
:ok = System.put_env(@env_fee_change_tolerance_percent, "10")
:ok = System.put_env(@env_stored_fee_update_interval_minutes, "60000")
assert SetFeeFeedAdapterOpts.load([], []) == []
end
test "no other configurations got affected" do
:ok = System.put_env(@env_fee_adapter, "feed")
:ok = System.put_env(@env_fee_feed_url, "http://example.com/fee-feed-url")
:ok = System.put_env(@env_fee_change_tolerance_percent, "10")
:ok = System.put_env(@env_stored_fee_update_interval_minutes, "60000")
config = SetFeeFeedAdapterOpts.load([], [])
assert Keyword.delete(config[@app], @config_key) == []
end
end
| 43.598361 | 118 | 0.740929 |
081918b0c681ebbbef68bf6c61a2ca56c08d1122 | 2,757 | ex | Elixir | lib/nostrum/shard/supervisor.ex | mckethanor/nostrum | 13a3927c872c1540266e9f1ba4bcad4182baa9bf | [
"MIT"
] | null | null | null | lib/nostrum/shard/supervisor.ex | mckethanor/nostrum | 13a3927c872c1540266e9f1ba4bcad4182baa9bf | [
"MIT"
] | null | null | null | lib/nostrum/shard/supervisor.ex | mckethanor/nostrum | 13a3927c872c1540266e9f1ba4bcad4182baa9bf | [
"MIT"
] | 1 | 2021-09-13T20:59:42.000Z | 2021-09-13T20:59:42.000Z | defmodule Nostrum.Shard.Supervisor do
@moduledoc false
use Supervisor
alias Nostrum.Cache.Mapping.GuildShard
alias Nostrum.Error.CacheError
alias Nostrum.Shard
alias Nostrum.Shard.Session
alias Nostrum.Shard.Stage.{Cache, Producer}
alias Nostrum.Util
require Logger
def start_link(_args) do
{url, gateway_shard_count} = Util.gateway()
num_shards =
case Application.get_env(:nostrum, :num_shards, :auto) do
:auto ->
gateway_shard_count
^gateway_shard_count ->
gateway_shard_count
shard_count when is_integer(shard_count) and shard_count > 0 ->
Logger.warn(
"Configured shard count (#{shard_count}) " <>
"differs from Discord Gateway's recommended shard count (#{gateway_shard_count}). " <>
"Consider using `num_shards: :auto` option in your Nostrum config."
)
shard_count
value ->
raise ~s("#{value}" is not a valid shard count)
end
Supervisor.start_link(
__MODULE__,
[url, num_shards],
name: ShardSupervisor
)
end
def update_status(status, game, stream, type) do
ShardSupervisor
|> Supervisor.which_children()
|> Enum.filter(fn {_id, _pid, _type, [modules]} -> modules == Nostrum.Shard end)
|> Enum.map(fn {_id, pid, _type, _modules} -> Supervisor.which_children(pid) end)
|> List.flatten()
|> Enum.map(fn {_id, pid, _type, _modules} ->
Session.update_status(pid, status, game, stream, type)
end)
end
def update_voice_state(guild_id, channel_id, self_mute, self_deaf) do
case GuildShard.get_shard(guild_id) do
{:ok, shard_id} ->
ShardSupervisor
|> Supervisor.which_children()
|> Enum.filter(fn {id, _pid, _type, [modules]} ->
modules == Nostrum.Shard and id == shard_id
end)
|> Enum.map(fn {_id, pid, _type, _modules} -> Supervisor.which_children(pid) end)
|> List.flatten()
|> Enum.filter(fn {_id, _pid, _type, [modules]} -> modules == Nostrum.Shard.Session end)
|> List.first()
|> elem(1)
|> Session.update_voice_state(guild_id, channel_id, self_mute, self_deaf)
{:error, :id_not_found} ->
raise CacheError, key: guild_id, cache_name: GuildShardMapping
end
end
@doc false
def init([url, num_shards]) do
children =
[
Producer,
Cache
] ++ for i <- 0..(num_shards - 1), do: create_worker(url, i)
Supervisor.init(children, strategy: :one_for_one, max_restarts: 3, max_seconds: 60)
end
@doc false
def create_worker(gateway, shard_num) do
Supervisor.child_spec(
{Shard, [gateway, shard_num]},
id: shard_num
)
end
end
| 28.71875 | 100 | 0.630758 |
08192e9925ef9c8f85c1838181aba7ecee45fb64 | 1,611 | ex | Elixir | apps/raptor/lib/raptor/schemas/user_org_assoc.ex | smartcitiesdata/smartcitiesdata | c926c25003a8ee2d09b933c521c49f674841c0b6 | [
"Apache-2.0"
] | 26 | 2019-09-20T23:54:45.000Z | 2020-08-20T14:23:32.000Z | apps/raptor/lib/raptor/schemas/user_org_assoc.ex | smartcitiesdata/smartcitiesdata | c926c25003a8ee2d09b933c521c49f674841c0b6 | [
"Apache-2.0"
] | 757 | 2019-08-15T18:15:07.000Z | 2020-09-18T20:55:31.000Z | apps/raptor/lib/raptor/schemas/user_org_assoc.ex | smartcitiesdata/smartcitiesdata | c926c25003a8ee2d09b933c521c49f674841c0b6 | [
"Apache-2.0"
] | 9 | 2019-11-12T16:43:46.000Z | 2020-03-25T16:23:16.000Z | defmodule Raptor.Schemas.UserOrgAssoc do
@moduledoc """
This module defines the structure for a user organization association
"""
@derive Jason.Encoder
@type t :: %__MODULE__{
user_id: String.t(),
org_id: String.t(),
email: String.t()
}
defstruct [
:user_id,
:org_id,
:email
]
@doc """
Converts a `SmartCity.UserOrgAssociation` to a `Raptor.Schemas.UserOrgAssoc`
"""
@spec from_associate_event(SmartCity.UserOrganizationAssociate.t()) ::
{:ok, Raptor.Schemas.UserOrgAssoc.t()}
def from_associate_event(%SmartCity.UserOrganizationAssociate{} = assoc) do
struct = %__MODULE__{
user_id: assoc.subject_id,
org_id: assoc.org_id,
email: assoc.email
}
{:ok, struct}
end
@doc """
Converts a `SmartCity.UserOrgDisassociation` to a `Raptor.Schemas.UserOrgAssoc`
"""
@spec from_disassociate_event(SmartCity.UserOrganizationDisassociate.t()) ::
{:ok, Raptor.Schemas.UserOrgAssoc.t()}
def from_disassociate_event(%SmartCity.UserOrganizationDisassociate{} = assoc) do
struct = %__MODULE__{
user_id: assoc.subject_id,
org_id: assoc.org_id,
email: nil
}
{:ok, struct}
end
@doc """
Convert a `Raptor.Schemas.UserOrgAssoc` into JSON
"""
@spec encode(Raptor.Schemas.UserOrgAssoc.t()) ::
{:ok, String.t()} | {:error, Jason.EncodeError.t() | Exception.t()}
def encode(%__MODULE__{} = user_org_assoc) do
Jason.encode(user_org_assoc)
end
def encode!(%__MODULE__{} = user_org_assoc) do
Jason.encode!(user_org_assoc)
end
end
| 25.571429 | 83 | 0.657976 |
08194809ebfec02ca72e6c2cad9cafe9e8563fe5 | 2,614 | ex | Elixir | lib/rfx_cli/main.ex | andyl/rfxi | 9007c75693d643555c45a20e9634dd4b3867deba | [
"MIT"
] | 1 | 2021-08-10T14:46:10.000Z | 2021-08-10T14:46:10.000Z | lib/rfx_cli/main.ex | andyl/rfxi | 9007c75693d643555c45a20e9634dd4b3867deba | [
"MIT"
] | 2 | 2021-06-22T14:12:37.000Z | 2021-06-28T05:06:23.000Z | lib/rfx_cli/main.ex | andyl/rfxi | 9007c75693d643555c45a20e9634dd4b3867deba | [
"MIT"
] | null | null | null | defmodule RfxCli.Main do
# READING FROM STDIO
# defp get_stdin do
# case IO.read(:stdio, :line) do
# :eof -> ""
# {:error, _} -> ""
# data -> data
# end
# end
@moduledoc """
Main CLI module.
"""
alias RfxCli.Main
alias RfxCli.State
alias RfxCli.Main
def main(argv), do: run(argv)
def main(argv, state), do: run(argv, state)
def run(argv) when is_binary(argv) do
argv
|> OptionParser.split()
|> run()
end
def run(argv) do
%{argv: argv}
|> run_core()
|> print_output()
end
def run(argv, initial_state) when is_map(initial_state) do
initial_state
|> Map.merge(%{argv: argv})
|> run_core()
|> print_output()
end
def run_core(argv) when is_binary(argv) do
argv
|> OptionParser.split()
|> run_core()
end
def run_core(argv) when is_list(argv) do
%{argv: argv}
|> run_core()
end
def run_core(initial_state) do
initial_state
|> new_state()
|> gen_optimus()
|> parse_argv()
|> validate_parse()
|> extract_command()
|> validate_command()
|> execute_command()
|> encode_changeset()
end
def new_state(initial_state) do
initial_state
|> State.new()
end
def gen_optimus(state) do
state
|> Main.GenOptimus.run()
end
def parse_argv({:error, msg}), do: {:error, msg}
def parse_argv(input) when is_binary(input) do
argv = input |> OptionParser.split()
%{argv: argv}
|> new_state()
|> gen_optimus()
|> parse_argv()
end
def parse_argv(state) do
state
|> Main.ParseArgv.run()
end
def validate_parse(state) do
state
end
def extract_command({:error, msg}), do: {:error, msg}
def extract_command(state) do
state
|> Main.ExtractCommand.run()
end
def validate_command(state) do
state
end
def execute_command({:error, msg}), do: {:error, msg}
def execute_command(state) do
state
|> Main.ExecuteCommand.run()
end
def encode_changeset({:error, msg}), do: {:error, msg}
def encode_changeset(%{changeset: :ok}) do
{:error, "Closed"}
end
def encode_changeset(state) do
json =
state.changeset
|> Enum.map(&unstruct/1)
|> Jason.encode!()
State.assign(state, :json, json)
end
def print_output({:error, msg}), do: {:error, msg}
def print_output(state) do
case state.command_args[:op_oneline] do
true -> state.json |> IO.puts()
false -> state.json |> Jason.Formatter.pretty_print() |> IO.puts()
end
end
defp unstruct(struct) do
struct
|> Map.from_struct()
end
end
| 18.408451 | 72 | 0.607116 |
08194aa64a3d5e76141f358d27f4fa34d028ab9b | 1,877 | ex | Elixir | lib/fun_land/traversable.ex | gerbal/elixir_fun_land | 86adc20dc6af8c7c84d39dffd92195c3e9571009 | [
"MIT"
] | null | null | null | lib/fun_land/traversable.ex | gerbal/elixir_fun_land | 86adc20dc6af8c7c84d39dffd92195c3e9571009 | [
"MIT"
] | null | null | null | lib/fun_land/traversable.ex | gerbal/elixir_fun_land | 86adc20dc6af8c7c84d39dffd92195c3e9571009 | [
"MIT"
] | null | null | null | defmodule FunLand.Traversable do
@moduledoc """
WARNING: This module is not completely stable yet,
and should therefore
not be used in practice yet.
"""
@type traversable(_) :: FunLand.adt
@callback traverse(traversable(a), module(), (a -> FunLand.Applicative.applicative(b))) :: FunLand.Applicative.applicative(traversable(b)) when a: any, b: any
defmacro __using__(_opts) do
quote do
use FunLand.Mappable
use FunLand.Reducable
@behaviour FunLand.Traversable
end
end
@doc """
The `Module` argument is there to know what kind of
Applicative structure to return in the case the `traversable`
that is passed is empty (meaning that `fun` is never called).
As many definitions of `traverse` are recursive, this case frequently occurs.
Thus: `result_module` should be the same name as the name of thing you return from `fun`.
"""
# stdlib structs
for {stdlib_module, module} <- FunLand.Builtin.__stdlib_struct_modules__ do
def traverse(traversable = %unquote(stdlib_module){}, result_module_or_datatype, fun) do
result_module = FunLand.Helper.map_datatype_to_module(result_module_or_datatype)
apply(unquote(module), :traverse, [traversable, result_module, fun])
end
end
# Custom Structs
def traverse(traversable = %module{}, result_module_or_datatype, fun) do
result_module = FunLand.Helper.map_datatype_to_module(result_module_or_datatype)
module.traverse(traversable, result_module, fun)
end
# Builtin
use FunLand.Helper.GuardMacros
for {guard, module} <- FunLand.Builtin.__builtin__ do
def traverse(traversable, result_module_or_datatype, fun) when unquote(guard)(traversable) do
result_module = FunLand.Helper.map_datatype_to_module(result_module_or_datatype)
apply(unquote(module), :traverse, [traversable, result_module, fun])
end
end
end
| 34.127273 | 160 | 0.741609 |
0819549e00b3890535d67afaf39aa094f2b076cd | 659 | ex | Elixir | lib/pigeon/tasks.ex | VoiSmart/pigeon | 54735f869dd5500991dfa0b13dc71ca2191cd0ff | [
"MIT"
] | 545 | 2015-09-06T15:50:32.000Z | 2022-03-21T08:21:11.000Z | lib/pigeon/tasks.ex | VoiSmart/pigeon | 54735f869dd5500991dfa0b13dc71ca2191cd0ff | [
"MIT"
] | 178 | 2016-01-14T22:21:20.000Z | 2022-02-18T22:34:30.000Z | lib/pigeon/tasks.ex | VoiSmart/pigeon | 54735f869dd5500991dfa0b13dc71ca2191cd0ff | [
"MIT"
] | 124 | 2016-01-17T11:30:16.000Z | 2022-02-28T16:59:41.000Z | defmodule Pigeon.Tasks do
@moduledoc false
def process_on_response(%{__meta__: %{on_response: nil}}), do: :ok
def process_on_response(%{__meta__: %{on_response: {m, f}}} = notif) do
Task.Supervisor.start_child(Pigeon.Tasks, fn ->
:erlang.apply(m, f, [notif])
end)
end
def process_on_response(%{__meta__: %{on_response: {m, f, a}}} = notif) do
Task.Supervisor.start_child(Pigeon.Tasks, fn ->
:erlang.apply(m, f, [notif] ++ a)
end)
end
def process_on_response(%{__meta__: %{on_response: fun}} = notif)
when is_function(fun, 1) do
Task.Supervisor.start_child(Pigeon.Tasks, fn -> fun.(notif) end)
end
end
| 28.652174 | 76 | 0.660091 |
08195eb30f3569e740dc135c70cffc67406bf2ce | 2,570 | ex | Elixir | lib/real_world/account.ex | mihkelal/mihkels-elixir-phoenix-realworld-example-app | f241093522b394c816e7bab8ccf5f18fdd854ac6 | [
"MIT"
] | null | null | null | lib/real_world/account.ex | mihkelal/mihkels-elixir-phoenix-realworld-example-app | f241093522b394c816e7bab8ccf5f18fdd854ac6 | [
"MIT"
] | 4 | 2021-03-23T19:01:21.000Z | 2021-03-30T18:27:24.000Z | lib/real_world/account.ex | mihkelal/mihkels-elixir-phoenix-realworld-example-app | f241093522b394c816e7bab8ccf5f18fdd854ac6 | [
"MIT"
] | null | null | null | defmodule RealWorld.Account do
@moduledoc """
The Account context.
"""
import Ecto.Query, warn: false
alias RealWorld.Repo
alias RealWorld.Account.{Following, User}
@doc """
Returns the list of users.
## Examples
iex> list_users()
[%User{}, ...]
"""
def list_users do
Repo.all(User)
end
@doc """
Gets a single user.
Raises `Ecto.NoResultsError` if the User does not exist.
## Examples
iex> get_user!(123)
%User{}
iex> get_user!(456)
** (Ecto.NoResultsError)
"""
def get_user!(id), do: Repo.get!(User, id)
def get_user_by_username(username), do: Repo.get_by(User, username: username)
@doc """
Creates a user.
## Examples
iex> create_user(%{field: value})
{:ok, %User{}}
iex> create_user(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_user(attrs \\ %{}) do
%User{}
|> User.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a user.
## Examples
iex> update_user(user, %{field: new_value})
{:ok, %User{}}
iex> update_user(user, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_user(%User{} = user, attrs) do
user
|> User.changeset(attrs)
|> Repo.update()
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking user changes.
## Examples
iex> change_user(user)
%Ecto.Changeset{data: %User{}}
"""
def change_user(%User{} = user, attrs \\ %{}) do
User.changeset(user, attrs)
end
def find_user_and_check_password(%{"email" => email, "password" => password}) do
case Repo.get_by(User, email: email) do
nil -> {:error, "User not found"}
user -> Argon2.check_pass(user, password, hash_key: :password)
end
end
def encode_and_sign_user_id(user) do
case RealWorldWeb.Guardian.encode_and_sign(user, %{}, token_type: :access) do
{:ok, jwt, _} -> {:ok, jwt}
{:error, message} -> {:error, message}
end
end
def create_following(%User{} = follower, %User{} = followee) do
%Following{}
|> Following.changeset(%{follower_id: follower.id, followee_id: followee.id})
|> Repo.insert()
end
def delete_following(%User{} = follower, %User{} = followee) do
Following
|> Repo.get_by!(follower_id: follower.id, followee_id: followee.id)
|> Repo.delete()
end
def following_exists?(%User{} = follower, %User{} = followee) do
Following
|> from
|> where(follower_id: ^follower.id, followee_id: ^followee.id)
|> Repo.exists?()
end
end
| 20.725806 | 82 | 0.607393 |
08196fc2b73e8e5ed7b4a6d923e45752f7e6fb61 | 6,548 | ex | Elixir | lib/b2_client/backend/memory.ex | jgelens/b2_client | 568d33921b841ff6f363ab19bf79e13541e07557 | [
"MIT"
] | 12 | 2016-06-12T06:56:36.000Z | 2021-12-09T21:58:25.000Z | lib/b2_client/backend/memory.ex | jgelens/b2_client | 568d33921b841ff6f363ab19bf79e13541e07557 | [
"MIT"
] | 9 | 2016-07-18T14:16:07.000Z | 2021-12-05T02:07:57.000Z | lib/b2_client/backend/memory.ex | jgelens/b2_client | 568d33921b841ff6f363ab19bf79e13541e07557 | [
"MIT"
] | 12 | 2016-07-18T14:05:48.000Z | 2021-12-06T22:00:04.000Z | defmodule B2Client.Backend.Memory do
@moduledoc """
A mock to store test data.
"""
use GenServer
alias B2Client.{Authorization, Bucket, File, UploadAuthorization}
@behaviour B2Client.Backend
def start_link do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
@doc """
Resets the backend back to the initial state.
"""
def reset! do
GenServer.call(__MODULE__, :reset)
end
## B2Client.Backend callbacks ##
def authenticate(application_key_id, application_key) do
GenServer.call(__MODULE__, {:authenticate, application_key_id, application_key})
end
def get_bucket(b2, bucket_name) do
GenServer.call(__MODULE__, {:get_bucket, b2, bucket_name})
end
def get_upload_url(b2, bucket) do
GenServer.call(__MODULE__, {:get_upload_url, b2, bucket})
end
def upload(b2, %Bucket{} = bucket, iodata, filename) do
{:ok, auth} = get_upload_url(b2, bucket)
upload(b2, auth, iodata, filename)
end
def upload(b2, %UploadAuthorization{} = auth, iodata, filename) do
GenServer.call(__MODULE__, {:upload, b2, auth, iodata, filename})
end
def download(b2, bucket, path) do
GenServer.call(__MODULE__, {:download, b2, bucket, path})
end
def download_head(b2, bucket, path) do
GenServer.call(__MODULE__, {:download_head, b2, bucket, path})
end
def delete(b2, bucket, path) do
GenServer.call(__MODULE__, {:delete, b2, bucket, path})
end
# def delete(b2, file) do
# GenServer.call(__MODULE__, {:delete, b2, file})
# end
# def list_file_versions(b2, bucket, path) do
# GenServer.call(__MODULE__, {:list_file_versions, b2, bucket, path})
# end
## GenServer Callbacks ##
def init(:ok) do
{:ok, initial_state()}
end
defp initial_state do
%{
accounts: %{
"valid_application_key_id" => %{
api_url: "https://api900.backblaze.example",
download_url: "https://f900.backblaze.example",
account_id: "valid_account_id",
application_key: "valid_application_key"
}
},
authorizations: %{
"fake_auth_token" => "valid_application_key_id"
},
buckets: %{
"valid_account_id" => [
%Bucket{
bucket_name: "ex-b2-client-test-bucket",
bucket_id: "8c654d7edfe71ef507ed5c27d6b787a5",
bucket_type: "allPrivate",
account_id: "valid_account_id"
}
]
},
files: %{}
}
end
def handle_call({:authenticate, application_key_id, application_key}, _from, state) do
r =
case Map.fetch(state.accounts, application_key_id) do
{:ok, %{application_key: ^application_key} = acct} ->
client = Map.drop(acct, [:application_key])
client =
Map.merge(%Authorization{}, client)
|> Map.put(:authorization_token, "fake_auth_token")
{:ok, client}
_ ->
{:error, :authentication_failed}
end
{:reply, r, state}
end
def handle_call(:reset, _from, _state) do
{:reply, :ok, initial_state()}
end
def handle_call(rpc, _from, state) do
b2 = elem(rpc, 1)
auth = validate_authentication!(b2, state)
{r, state} = execute_rpc(auth, rpc, state)
{:reply, r, state}
end
defp validate_authentication!(b2, state) do
case Map.fetch(state.authorizations, b2.authorization_token) do
{:ok, account_id} ->
client = Map.drop(state.accounts[account_id], [:application_key])
Map.merge(%Authorization{}, client)
|> Map.put(:authorization_token, "fake_auth_token")
:error ->
false
end
end
defp execute_rpc(false, _, state), do: {{:error, :token_wrong}, state}
defp execute_rpc(b2, {:get_bucket, _, bucket_name}, state) do
b =
Enum.find(state.buckets[b2.account_id], fn
%{bucket_name: ^bucket_name} -> true
_ -> false
end)
if b do
{{:ok, b}, state}
else
{{:error, :bucket_not_found}, state}
end
end
defp execute_rpc(_b2, {:get_upload_url, _, bucket}, state) do
{{:ok,
%UploadAuthorization{
bucket_id: bucket.bucket_id,
upload_url:
"https://pod-000-1000-00.backblaze.example/b2api/v1/b2_upload_file?cvt=c000-1000-00&bucket=#{
bucket.bucket_id
}",
authorization_token: "fake_upload_auth_token"
}}, state}
end
defp execute_rpc(b2, {:upload, _, auth, iodata, filename}, state) do
bucket_id = auth.bucket_id
bucket =
Enum.find(state.buckets[b2.account_id], fn
%{bucket_id: ^bucket_id} -> true
_ -> false
end)
uri = get_download_url(b2, bucket, filename)
bytes = IO.iodata_to_binary(iodata)
file =
%File{
bucket_id: auth.bucket_id,
file_id: "some-random-file-id",
file_name: filename,
content_length: byte_size(bytes),
content_sha1: sha1hash(bytes),
content_type: "text/plain",
file_info: %{}
}
|> Map.put(:file_contents, bytes)
state = update_in(state.files, &Map.put(&1, uri, file))
{
{:ok, file},
state
}
end
defp execute_rpc(b2, {:download, _, bucket, path}, state) do
uri = get_download_url(b2, bucket, path)
case Map.fetch(state.files, uri) do
{:ok, file} ->
{{:ok, file.file_contents}, state}
_ ->
{{:error, {:http_404, ""}}, state}
end
end
defp execute_rpc(b2, {:download_head, _, bucket, path}, state) do
case execute_rpc(b2, {:download, nil, bucket, path}, state) do
{{:ok, contents}, state} ->
file_size = byte_size(contents)
{{:ok, file_size}, state}
other ->
other
end
end
defp execute_rpc(b2, {:delete, _, bucket, path}, state) do
uri = get_download_url(b2, bucket, path)
state = update_in(state.files, &Map.delete(&1, uri))
{:ok, state}
end
# defp execute_rpc(b2, {:list_file_versions, _, bucket, path}, state) do
# uri = get_download_url(b2, bucket, path)
# file_versions = Enum.filter(state.files, fn
# {^uri, _} -> true
# _ -> false
# end)
# {{:ok, file_versions}, state}
# end
defp execute_rpc(_, bad_rpc, state) do
IO.inspect(bad_rpc)
{{:error, :badrpc}, state}
end
defp sha1hash(bytes) do
:crypto.hash(:sha, bytes) |> Base.encode16(case: :lower)
end
defp get_download_url(%{download_url: download_url}, %{bucket_name: bucket_name}, filename) do
download_url <> "/file/" <> bucket_name <> "/" <> filename
end
end
| 25.881423 | 103 | 0.61744 |
0819785cd1285521cfdfe279306ec6ef491c374d | 2,007 | ex | Elixir | clients/domains/lib/google_api/domains/v1beta1/model/configure_management_settings_request.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/domains/lib/google_api/domains/v1beta1/model/configure_management_settings_request.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/domains/lib/google_api/domains/v1beta1/model/configure_management_settings_request.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Domains.V1beta1.Model.ConfigureManagementSettingsRequest do
@moduledoc """
Request for the `ConfigureManagementSettings` method.
## Attributes
* `managementSettings` (*type:* `GoogleApi.Domains.V1beta1.Model.ManagementSettings.t`, *default:* `nil`) - Fields of the `ManagementSettings` to update.
* `updateMask` (*type:* `String.t`, *default:* `nil`) - Required. The field mask describing which fields to update as a comma-separated list. For example, if only the transfer lock is being updated, the `update_mask` would be `"transfer_lock_state"`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:managementSettings => GoogleApi.Domains.V1beta1.Model.ManagementSettings.t(),
:updateMask => String.t()
}
field(:managementSettings, as: GoogleApi.Domains.V1beta1.Model.ManagementSettings)
field(:updateMask)
end
defimpl Poison.Decoder, for: GoogleApi.Domains.V1beta1.Model.ConfigureManagementSettingsRequest do
def decode(value, options) do
GoogleApi.Domains.V1beta1.Model.ConfigureManagementSettingsRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Domains.V1beta1.Model.ConfigureManagementSettingsRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.14 | 254 | 0.758346 |
081989342fa0ae9e693c636b8f6eb11084463784 | 421 | exs | Elixir | priv/repo/migrations/20181228232731_create_craigslist_results_table.exs | ChristopherJHart/disclist | 3a1ce880856ad87f588e902e72004b887064eeb8 | [
"Apache-2.0"
] | 1 | 2021-11-18T03:25:47.000Z | 2021-11-18T03:25:47.000Z | priv/repo/migrations/20181228232731_create_craigslist_results_table.exs | ChristopherJHart/disclist | 3a1ce880856ad87f588e902e72004b887064eeb8 | [
"Apache-2.0"
] | null | null | null | priv/repo/migrations/20181228232731_create_craigslist_results_table.exs | ChristopherJHart/disclist | 3a1ce880856ad87f588e902e72004b887064eeb8 | [
"Apache-2.0"
] | 1 | 2021-11-18T03:25:48.000Z | 2021-11-18T03:25:48.000Z | defmodule Disclist.Repo.Migrations.CreateCraigslistResultsTable do
use Ecto.Migration
def change do
create table("craigslist_results") do
add :data_id, :bigint
add :datetime, :naive_datetime
add :price, :float
add :title, :text
add :url, :string
add :postingbody, :text
add :image_urls, {:array, :string}
add :published, :boolean, default: false
end
end
end
| 24.764706 | 66 | 0.662708 |
0819893b5cb9fc469436fddd3b8f88894483bf45 | 11,341 | ex | Elixir | lib/phoenix/tracker.ex | Goose97/phoenix_pubsub | 35e7c54a6f32babea1638f6c5b74575a66a2fe6f | [
"MIT"
] | 524 | 2016-01-22T23:51:56.000Z | 2022-03-26T00:33:43.000Z | lib/phoenix/tracker.ex | Goose97/phoenix_pubsub | 35e7c54a6f32babea1638f6c5b74575a66a2fe6f | [
"MIT"
] | 136 | 2016-01-22T01:51:57.000Z | 2022-03-23T11:13:56.000Z | lib/phoenix/tracker.ex | Goose97/phoenix_pubsub | 35e7c54a6f32babea1638f6c5b74575a66a2fe6f | [
"MIT"
] | 132 | 2016-01-21T21:20:12.000Z | 2022-03-22T09:33:12.000Z | defmodule Phoenix.Tracker do
@moduledoc ~S"""
Provides distributed presence tracking to processes.
Tracker shards use a heartbeat protocol and CRDT to replicate presence
information across a cluster in an eventually consistent, conflict-free
manner. Under this design, there is no single source of truth or global
process. Each node runs a pool of trackers and node-local changes are
replicated across the cluster and handled locally as a diff of changes.
## Implementing a Tracker
To start a tracker, first add the tracker to your supervision tree:
children = [
# ...
{MyTracker, [name: MyTracker, pubsub_server: MyApp.PubSub]}
]
Next, implement `MyTracker` with support for the `Phoenix.Tracker`
behaviour callbacks. An example of a minimal tracker could include:
defmodule MyTracker do
use Phoenix.Tracker
def start_link(opts) do
opts = Keyword.merge([name: __MODULE__], opts)
Phoenix.Tracker.start_link(__MODULE__, opts, opts)
end
def init(opts) do
server = Keyword.fetch!(opts, :pubsub_server)
{:ok, %{pubsub_server: server, node_name: Phoenix.PubSub.node_name(server)}}
end
def handle_diff(diff, state) do
for {topic, {joins, leaves}} <- diff do
for {key, meta} <- joins do
IO.puts "presence join: key \"#{key}\" with meta #{inspect meta}"
msg = {:join, key, meta}
Phoenix.PubSub.direct_broadcast!(state.node_name, state.pubsub_server, topic, msg)
end
for {key, meta} <- leaves do
IO.puts "presence leave: key \"#{key}\" with meta #{inspect meta}"
msg = {:leave, key, meta}
Phoenix.PubSub.direct_broadcast!(state.node_name, state.pubsub_server, topic, msg)
end
end
{:ok, state}
end
end
Trackers must implement `start_link/1`, `c:init/1`, and `c:handle_diff/2`.
The `c:init/1` callback allows the tracker to manage its own state when
running within the `Phoenix.Tracker` server. The `handle_diff` callback
is invoked with a diff of presence join and leave events, grouped by
topic. As replicas heartbeat and replicate data, the local tracker state is
merged with the remote data, and the diff is sent to the callback. The
handler can use this information to notify subscribers of events, as
done above.
## Special Considerations
Operations within `handle_diff/2` happen *in the tracker server's context*.
Therefore, blocking operations should be avoided when possible, and offloaded
to a supervised task when required. Also, a crash in the `handle_diff/2` will
crash the tracker server, so operations that may crash the server should be
offloaded with a `Task.Supervisor` spawned process.
"""
use Supervisor
require Logger
alias Phoenix.Tracker.Shard
@type presence :: {key :: String.t, meta :: map}
@type topic :: String.t
@callback init(Keyword.t) :: {:ok, state :: term} | {:error, reason :: term}
@callback handle_diff(%{topic => {joins :: [presence], leaves :: [presence]}}, state :: term) :: {:ok, state :: term}
defmacro __using__(_opts) do
quote location: :keep do
@behaviour Phoenix.Tracker
if Module.get_attribute(__MODULE__, :doc) == nil do
@doc """
Returns a specification to start this module under a supervisor.
See `Supervisor`.
"""
end
def child_spec(init_arg) do
default = %{
id: __MODULE__,
start: {__MODULE__, :start_link, [init_arg]},
type: :supervisor
}
end
defoverridable child_spec: 1
end
end
## Client
@doc """
Tracks a presence.
* `server_name` - The registered name of the tracker server
* `pid` - The Pid to track
* `topic` - The `Phoenix.PubSub` topic for this presence
* `key` - The key identifying this presence
* `meta` - The map of metadata to attach to this presence
A process may be tracked multiple times, provided the topic and key pair
are unique for any prior calls for the given process.
## Examples
iex> Phoenix.Tracker.track(MyTracker, self(), "lobby", u.id, %{stat: "away"})
{:ok, "1WpAofWYIAA="}
iex> Phoenix.Tracker.track(MyTracker, self(), "lobby", u.id, %{stat: "away"})
{:error, {:already_tracked, #PID<0.56.0>, "lobby", "123"}}
"""
@spec track(atom, pid, topic, term, map) :: {:ok, ref :: binary} | {:error, reason :: term}
def track(tracker_name, pid, topic, key, meta) when is_pid(pid) and is_map(meta) do
tracker_name
|> Shard.name_for_topic(topic, pool_size(tracker_name))
|> GenServer.call({:track, pid, topic, key, meta})
end
@doc """
Untracks a presence.
* `server_name` - The registered name of the tracker server
* `pid` - The Pid to untrack
* `topic` - The `Phoenix.PubSub` topic to untrack for this presence
* `key` - The key identifying this presence
All presences for a given Pid can be untracked by calling the
`Phoenix.Tracker.untrack/2` signature of this function.
## Examples
iex> Phoenix.Tracker.untrack(MyTracker, self(), "lobby", u.id)
:ok
iex> Phoenix.Tracker.untrack(MyTracker, self())
:ok
"""
@spec untrack(atom, pid, topic, term) :: :ok
def untrack(tracker_name, pid, topic, key) when is_pid(pid) do
tracker_name
|> Shard.name_for_topic(topic, pool_size(tracker_name))
|> GenServer.call({:untrack, pid, topic, key})
end
def untrack(tracker_name, pid) when is_pid(pid) do
shard_multicall(tracker_name, {:untrack, pid})
:ok
end
@doc """
Updates a presence's metadata.
* `server_name` - The registered name of the tracker server
* `pid` - The Pid being tracked
* `topic` - The `Phoenix.PubSub` topic to update for this presence
* `key` - The key identifying this presence
* `meta` - Either a new map of metadata to attach to this presence,
or a function. The function will receive the current metadata as
input and the return value will be used as the new metadata
## Examples
iex> Phoenix.Tracker.update(MyTracker, self(), "lobby", u.id, %{stat: "zzz"})
{:ok, "1WpAofWYIAA="}
iex> Phoenix.Tracker.update(MyTracker, self(), "lobby", u.id, fn meta -> Map.put(meta, :away, true) end)
{:ok, "1WpAofWYIAA="}
"""
@spec update(atom, pid, topic, term, map | (map -> map)) :: {:ok, ref :: binary} | {:error, reason :: term}
def update(tracker_name, pid, topic, key, meta) when is_pid(pid) and (is_map(meta) or is_function(meta)) do
tracker_name
|> Shard.name_for_topic(topic, pool_size(tracker_name))
|> GenServer.call({:update, pid, topic, key, meta})
end
@doc """
Lists all presences tracked under a given topic.
* `server_name` - The registered name of the tracker server
* `topic` - The `Phoenix.PubSub` topic
Returns a lists of presences in key/metadata tuple pairs.
## Examples
iex> Phoenix.Tracker.list(MyTracker, "lobby")
[{123, %{name: "user 123"}}, {456, %{name: "user 456"}}]
"""
@spec list(atom, topic) :: [presence]
def list(tracker_name, topic) do
tracker_name
|> Shard.name_for_topic(topic, pool_size(tracker_name))
|> Phoenix.Tracker.Shard.list(topic)
end
@doc """
Gets presences tracked under a given topic and key pair.
* `server_name` - The registered name of the tracker server
* `topic` - The `Phoenix.PubSub` topic
* `key` - The key of the presence
Returns a lists of presence metadata.
## Examples
iex> Phoenix.Tracker.get_by_key(MyTracker, "lobby", "user1")
[{#PID<0.88.0>, %{name: "User 1"}, {#PID<0.89.0>, %{name: "User 1"}]
"""
@spec get_by_key(atom, topic, term) :: [presence]
def get_by_key(tracker_name, topic, key) do
tracker_name
|> Shard.name_for_topic(topic, pool_size(tracker_name))
|> Phoenix.Tracker.Shard.get_by_key(topic, key)
end
@doc """
Gracefully shuts down by broadcasting permdown to all replicas.
## Examples
iex> Phoenix.Tracker.graceful_permdown(MyTracker)
:ok
"""
@spec graceful_permdown(atom) :: :ok
def graceful_permdown(tracker_name) do
shard_multicall(tracker_name, :graceful_permdown)
Supervisor.stop(tracker_name)
end
@doc """
Starts a tracker pool.
* `tracker` - The tracker module implementing the `Phoenix.Tracker` behaviour
* `tracker_arg` - The argument to pass to the tracker handler `c:init/1`
* `pool_opts` - The list of options used to construct the shard pool
## Required `pool_opts`:
* `:name` - The name of the server, such as: `MyApp.Tracker`
This will also form the common prefix for all shard names
* `:pubsub_server` - The name of the PubSub server, such as: `MyApp.PubSub`
## Optional `pool_opts`:
* `:broadcast_period` - The interval in milliseconds to send delta broadcasts
across the cluster. Default `1500`
* `:max_silent_periods` - The max integer of broadcast periods for which no
delta broadcasts have been sent. Default `10` (15s heartbeat)
* `:down_period` - The interval in milliseconds to flag a replica
as temporarily down. Default `broadcast_period * max_silent_periods * 2`
(30s down detection). Note: This must be at least 2x the `broadcast_period`.
* `:permdown_period` - The interval in milliseconds to flag a replica
as permanently down, and discard its state.
Note: This must be at least greater than the `down_period`.
Default `1_200_000` (20 minutes)
* `:clock_sample_periods` - The numbers of heartbeat windows to sample
remote clocks before collapsing and requesting transfer. Default `2`
* `:max_delta_sizes` - The list of delta generation sizes to keep before
falling back to sending entire state. Defaults `[100, 1000, 10_000]`.
* `:log_level` - The log level to log events, defaults `:debug` and can be
disabled with `false`
* `:pool_size` - The number of tracker shards to launch. Default `1`
"""
def start_link(tracker, tracker_arg, pool_opts) do
name = Keyword.fetch!(pool_opts, :name)
Supervisor.start_link(__MODULE__, [tracker, tracker_arg, pool_opts, name], name: name)
end
@impl true
def init([tracker, tracker_opts, opts, name]) do
pool_size = Keyword.get(opts, :pool_size, 1)
^name = :ets.new(name, [:set, :named_table, read_concurrency: true])
true = :ets.insert(name, {:pool_size, pool_size})
shards =
for n <- 0..(pool_size - 1) do
shard_name = Shard.name_for_number(name, n)
shard_opts = Keyword.put(opts, :shard_number, n)
%{
id: shard_name,
start: {Phoenix.Tracker.Shard, :start_link, [tracker, tracker_opts, shard_opts]}
}
end
opts = [
strategy: :one_for_one,
max_restarts: pool_size * 2,
max_seconds: 1
]
Supervisor.init(shards, opts)
end
defp pool_size(tracker_name) do
[{:pool_size, size}] = :ets.lookup(tracker_name, :pool_size)
size
end
defp shard_multicall(tracker_name, message) do
for shard_number <- 0..(pool_size(tracker_name) - 1) do
tracker_name
|> Shard.name_for_number(shard_number)
|> GenServer.call(message)
end
end
end
| 35.330218 | 119 | 0.661053 |
08198f9c97049baa546031cbcb77c66960be7679 | 417 | ex | Elixir | web/views/admin/person_view.ex | soleo/changelog.com | 621c7471b23379e1cdd4a0c960b66ed98d8d1a53 | [
"MIT"
] | null | null | null | web/views/admin/person_view.ex | soleo/changelog.com | 621c7471b23379e1cdd4a0c960b66ed98d8d1a53 | [
"MIT"
] | null | null | null | web/views/admin/person_view.ex | soleo/changelog.com | 621c7471b23379e1cdd4a0c960b66ed98d8d1a53 | [
"MIT"
] | null | null | null | defmodule Changelog.Admin.PersonView do
use Changelog.Web, :view
import Changelog.Admin.SharedView
import Scrivener.HTML
def avatar_url(person), do: Changelog.PersonView.avatar_url(person)
def episode_count(person), do: Changelog.Person.episode_count(person)
def list_of_links(person), do: Changelog.PersonView.list_of_links(person)
def post_count(person), do: Changelog.Person.post_count(person)
end
| 34.75 | 75 | 0.800959 |
081990060293ec7dc2e4bdd5253bfa5faa2b2fd4 | 2,567 | ex | Elixir | test/support/mocks/api.ex | kuma/teslamate | ea175fddb49cc08070182455e0073c3dcfcb3b4c | [
"MIT"
] | 2,602 | 2019-07-24T23:19:12.000Z | 2022-03-31T15:03:48.000Z | test/support/mocks/api.ex | kuma/teslamate | ea175fddb49cc08070182455e0073c3dcfcb3b4c | [
"MIT"
] | 1,547 | 2019-07-26T22:02:09.000Z | 2022-03-31T15:39:41.000Z | test/support/mocks/api.ex | kuma/teslamate | ea175fddb49cc08070182455e0073c3dcfcb3b4c | [
"MIT"
] | 524 | 2019-07-26T17:31:33.000Z | 2022-03-29T15:16:36.000Z | defmodule ApiMock do
use GenServer
defmodule State do
defstruct [:pid, :events, :captcha]
end
# API
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: Keyword.fetch!(opts, :name))
end
def get_vehicle(name, id), do: GenServer.call(name, {:get_vehicle, id})
def get_vehicle_with_state(name, id), do: GenServer.call(name, {:get_vehicle_with_state, id})
def stream(name, vid, receiver), do: GenServer.call(name, {:stream, vid, receiver})
def sign_in(name, tokens), do: GenServer.call(name, {:sign_in, tokens})
# Callbacks
@impl true
def init(opts) do
state = %State{
pid: Keyword.fetch!(opts, :pid),
events: Keyword.get(opts, :events, []),
captcha: Keyword.get(opts, :captcha, true)
}
{:ok, state}
end
@impl true
def handle_call({action, _id}, _from, %State{events: [event | []]} = state)
when action in [:get_vehicle, :get_vehicle_with_state] do
{:reply, exec(event), state}
end
def handle_call({action, _id}, _from, %State{events: [event | events]} = state)
when action in [:get_vehicle, :get_vehicle_with_state] do
{:reply, exec(event), %State{state | events: events}}
end
def handle_call({:sign_in, {email, password}}, _from, %State{captcha: false} = state) do
send(state.pid, {ApiMock, :sign_in, email, password})
{:reply, :ok, state}
end
def handle_call({:sign_in, {email, password}}, _from, %State{captcha: true} = state) do
callback = fn
"mfa" = captcha ->
send(state.pid, {ApiMock, :sign_in_callback, email, password, captcha})
:ok
devices = [
%{"id" => "000", "name" => "Device #1"},
%{"id" => "111", "name" => "Device #2"}
]
callback = fn device_id, passcode ->
send(state.pid, {ApiMock, :mfa_callback, [device_id, passcode]})
:ok
end
{:ok, {:mfa, devices, callback}}
captcha ->
send(state.pid, {ApiMock, :sign_in_callback, email, password, captcha})
:ok
end
captcha = ~S(<svg xmlns="http://www.w3.org/2000/svg">)
{:reply, {:ok, {:captcha, captcha, callback}}, state}
end
def handle_call({:sign_in, _tokens} = event, _from, %State{pid: pid} = state) do
send(pid, {ApiMock, event})
{:reply, :ok, state}
end
def handle_call({:stream, _vid, _receiver} = event, _from, %State{pid: pid} = state) do
send(pid, {ApiMock, event})
{:reply, {:ok, pid}, state}
end
defp exec(event) when is_function(event), do: event.()
defp exec(event), do: event
end
| 28.522222 | 95 | 0.613946 |
0819b74121e754014db5f49fa1ed5e6a37872002 | 960 | exs | Elixir | apps/biz/test/account_test.exs | aforward/wuw | 8f41157aeb9fce738a402e757bf78a322a890b7a | [
"MIT"
] | null | null | null | apps/biz/test/account_test.exs | aforward/wuw | 8f41157aeb9fce738a402e757bf78a322a890b7a | [
"MIT"
] | null | null | null | apps/biz/test/account_test.exs | aforward/wuw | 8f41157aeb9fce738a402e757bf78a322a890b7a | [
"MIT"
] | null | null | null | defmodule Biz.AccountTest do
use ExUnit.Case, async: true
alias Biz.{User,Account,Repo}
doctest Biz.Account
setup do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Repo)
end
test "latest_for" do
user = User.create_guest
assert %{} == Account.latest_for(user)
{:ok, aaaCash} = Account.changeset(user, "AAA", "10", "Cash") |> Repo.insert
{:ok, zzzCash} = Account.changeset(user, "ZZZ", "11", "Cash") |> Repo.insert
{:ok, mmmCash} = Account.changeset(user, "MMM", "12", "Cash") |> Repo.insert
{:ok, bbbDebt} = Account.changeset(user, "BBB", "-10", "Debt") |> Repo.insert
{:ok, yyyDebt} = Account.changeset(user, "YYY", "-11", "Debt") |> Repo.insert
{:ok, pppDebt} = Account.changeset(user, "PPP", "-12", "Debt") |> Repo.insert
%{"Cash" => all_cash, "Debt" => all_debt} = Account.latest_for(user)
assert all_cash == [aaaCash, mmmCash, zzzCash]
assert all_debt == [bbbDebt, pppDebt, yyyDebt]
end
end
| 30.967742 | 81 | 0.630208 |
0819d331be091702dc03f458f112b86bde2f2957 | 2,154 | exs | Elixir | config/dev.exs | patatoid/kommissar | c6c06ba29257ba7641daf03e8e4a94fa967ecfd6 | [
"MIT"
] | null | null | null | config/dev.exs | patatoid/kommissar | c6c06ba29257ba7641daf03e8e4a94fa967ecfd6 | [
"MIT"
] | null | null | null | config/dev.exs | patatoid/kommissar | c6c06ba29257ba7641daf03e8e4a94fa967ecfd6 | [
"MIT"
] | null | null | null | use Mix.Config
# Configure your database
config :kommissar, Kommissar.Repo,
username: "postgres",
password: "postgres",
database: "kommissar_dev",
hostname: "localhost",
show_sensitive_data_on_connection_error: true,
pool_size: 10
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :kommissar, KommissarWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch-stdin",
cd: Path.expand("../assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :kommissar, KommissarWeb.Endpoint,
live_reload: [
patterns: [
~r"priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$",
~r"priv/gettext/.*(po)$",
~r"lib/kommissar_web/{live,views}/.*(ex)$",
~r"lib/kommissar_web/templates/.*(eex)$"
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 27.974026 | 68 | 0.693129 |
0819d8cb99f5c2f9f4c0eaf4e4c6abfa9bb5d5a0 | 241 | exs | Elixir | exercises/connect/connect.exs | jerith/elixir | 9a3f2a2fbee26a7b6a6b3ad74a9e6d1ff2495ed4 | [
"Apache-2.0"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | exercises/connect/connect.exs | jerith/elixir | 9a3f2a2fbee26a7b6a6b3ad74a9e6d1ff2495ed4 | [
"Apache-2.0"
] | 583 | 2017-06-19T10:48:40.000Z | 2022-03-28T21:43:12.000Z | exercises/connect/connect.exs | jerith/elixir | 9a3f2a2fbee26a7b6a6b3ad74a9e6d1ff2495ed4 | [
"Apache-2.0"
] | 228 | 2017-07-05T07:09:32.000Z | 2022-03-27T08:59:08.000Z | defmodule Connect do
@doc """
Calculates the winner (if any) of a board
using "O" as the white player
and "X" as the black player
"""
@spec result_for([String.t()]) :: :none | :black | :white
def result_for(board) do
end
end
| 21.909091 | 59 | 0.647303 |
0819e6e7d653593d1c5978a363fc02ea45b0d568 | 1,139 | exs | Elixir | gen_fsm_playground/config/config.exs | enilsen16/elixir | b4d1d45858a25e4beb39e07de8685f3d93d6a520 | [
"MIT"
] | null | null | null | gen_fsm_playground/config/config.exs | enilsen16/elixir | b4d1d45858a25e4beb39e07de8685f3d93d6a520 | [
"MIT"
] | null | null | null | gen_fsm_playground/config/config.exs | enilsen16/elixir | b4d1d45858a25e4beb39e07de8685f3d93d6a520 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :gen_fsm_playground, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:gen_fsm_playground, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.741935 | 73 | 0.755926 |
0819ff706aae9ed4c84321ddfdd80376030c1894 | 25,037 | ex | Elixir | lib/militerm/machines/script.ex | jgsmith/militerm | c4252d0a93f5620b90750ac2b61baf282e9ef7eb | [
"Apache-2.0"
] | 6 | 2017-06-16T10:26:35.000Z | 2021-04-07T15:01:00.000Z | lib/militerm/machines/script.ex | jgsmith/militerm | c4252d0a93f5620b90750ac2b61baf282e9ef7eb | [
"Apache-2.0"
] | 2 | 2020-04-14T02:17:46.000Z | 2021-03-10T11:09:05.000Z | lib/militerm/machines/script.ex | jgsmith/militerm | c4252d0a93f5620b90750ac2b61baf282e9ef7eb | [
"Apache-2.0"
] | null | null | null | defmodule Militerm.Machines.Script do
alias Militerm.Systems.Entity
alias Militerm.Systems
require Logger
@boolean_ops [:and, :or]
@numeric_ops [:sum, :product]
@boolean_false_values [false, "", 0, 0.0, nil, []]
@moduledoc false
# A simple stack machine used for executing event handlers.
# The code is a sequence of atoms, strings, and numbers provided by
# `Militerm.System.Compilers.Script`:
#
# source_code
# |> Militerm.System.Parsers.Script.parse
# |> Militerm.System.Compilers.Script.compile
# |> Militerm.System.Machines.Script.run(objects)
#
# Where `objects` is a mapping of atoms to object identifiers useful with
# `Militerm.Entity`.
#
# The machine consists of the code, an instruction pointer, a stack, a
# stack of stack marks (for rolling back a stack after a subroutine call),
# a set of objects in the narrative context, and a scratch pad for temporary
# values such as variables referenced in code.
#
# The machine runs in the same thread as the caller.
defmodule Machine do
@moduledoc false
defstruct code: {}, stack: [], ip: 0, marks: [], objects: %{}, pad: %{}
end
@doc """
Run the script from the current position.
## Examples
### Get passed-in arguments/objects
iex> Script.run({"coord", :get_obj}, %{"coord" => "default"})
"default"
### Boolean AND
iex> Script.run({
...> true, 0, true,
...> 3, :and
...> })
false
### Boolean OR
iex> Script.run({
...> true, false, true,
...> 3, :or
...> })
true
### Make a list from the top _n_ items
iex> Script.run({
...> 1, 2, 3,
...> 3, :make_list
...> })
[3, 2, 1]
### Jump unconditionally
iex> Script.run({
...> :jump, 1, 3, 7
...> })
7
### Comparisons
iex> {:ok, ast} = Militerm.Parsers.Script.parse_expression(Militerm.Util.Scanner.new("123 > 456"))
...> Script.run(Militerm.Compilers.Script.compile(ast))
false
iex> {:ok, ast} = Militerm.Parsers.Script.parse_expression(Militerm.Util.Scanner.new("123 < 456"))
...> Script.run(Militerm.Compilers.Script.compile(ast))
true
"""
def run(code, objects \\ %{}, pad \\ %{}) do
# IO.inspect({:running, code}, limit: :infinity)
machine = %Machine{
code: code,
objects: objects,
pad: pad
}
case step_until_done(machine) do
%{stack: [return_value | _]} ->
return_value
_ ->
nil
end
end
defp step_until_done(state) do
case step(state) do
:done -> state
new_state -> step_until_done(new_state)
end
end
defp step(%{code: code, ip: ip}) when ip >= tuple_size(code), do: :done
defp step(%{code: code, ip: ip} = state) when ip >= 0 do
execute_step(elem(code, ip), %{state | ip: ip + 1})
end
defp execute_step(v, %{stack: stack} = state) when not is_atom(v) do
%{state | stack: [v | stack]}
end
defp execute_step(bool, %{stack: stack} = state) when bool in [true, false] do
%{state | stack: [bool | stack]}
end
defp execute_step(:done, _), do: :done
defp execute_step(:dup, %{stack: [h | stack]} = state) do
%{state | stack: [h, h | stack]}
end
defp execute_step(:drop, %{stack: [_ | stack]} = state) do
%{state | stack: stack}
end
defp execute_step(:mark, %{stack: stack, marks: marks} = state) do
%{state | marks: [Enum.count(stack) | marks]}
end
defp execute_step(:clear, %{marks: []} = state) do
%{state | stack: []}
end
defp execute_step(:clear, %{stack: stack, marks: [h | marks]} = state) do
stack_size = Enum.count(stack)
if h <= stack_size do
%{state | stack: Enum.drop(stack, stack_size - h), marks: marks}
else
%{state | stack: [], marks: marks}
end
end
defp execute_step(:jump, %{code: code, ip: ip} = state) do
%{state | ip: ip + 1 + elem(code, ip)}
end
defp execute_step(:jump_unless, %{stack: [v | stack]} = state) when is_map(v) do
execute_jump_unless(map_size(v) == 0, %{state | stack: stack})
end
defp execute_step(:jump_unless, %{stack: [v | stack]} = state) when is_tuple(v) do
execute_jump_unless(tuple_size(v) == 0, %{state | stack: stack})
end
defp execute_step(:jump_unless, %{stack: [v | stack]} = state) do
execute_jump_unless(v in @boolean_false_values, %{state | stack: stack})
end
defp execute_step(:not, %{stack: [v | stack]} = state) when is_map(v) do
%{state | stack: [map_size(v) == 0 | stack]}
end
defp execute_step(:not, %{stack: [v | stack]} = state) when is_tuple(v) do
%{state | stack: [tuple_size(v) == 0 | stack]}
end
defp execute_step(:not, %{stack: [v | stack]} = state) do
%{state | stack: [v in @boolean_false_values | stack]}
end
defp execute_step(:this_can, %{stack: [ability, pov | stack], objects: objects} = state) do
case Map.get(objects, "this") do
nil ->
%{state | stack: [false | stack]}
[_ | _] = these ->
%{
state
| stack: [
Enum.any?(these, &Entity.can?(&1, ability, pov, objects))
| stack
]
}
this ->
%{
state
| stack: [
Entity.can?(this, ability, pov, objects)
| stack
]
}
end
end
defp execute_step(:can, %{stack: [ability, pov, base | stack], objects: objects} = state)
when is_tuple(base) do
%{
state
| stack: [
Entity.can?(base, ability, pov, objects)
| stack
]
}
end
defp execute_step(:can, %{stack: [ability, pov, base | stack], objects: objects} = state)
when is_list(base) do
%{
state
| stack: [
base |> Enum.map(&Entity.can?(&1, ability, pov, objects))
| stack
]
}
end
defp execute_step(:can, %{stack: [_, _, _ | stack]} = state) do
%{state | stack: [nil | stack]}
end
defp execute_step(:this_is, %{stack: [trait | stack], objects: objects} = state) do
case Map.get(objects, "this") do
nil ->
%{state | stack: [false | stack]}
[_ | _] = these ->
%{
state
| stack: [
Enum.any?(these, &Entity.is?(&1, trait, objects))
| stack
]
}
this ->
%{
state
| stack: [
Entity.is?(this, trait, objects)
| stack
]
}
end
end
defp execute_step(:is, %{stack: [thing, trait | stack], objects: objects} = state) do
case thing do
nil ->
%{state | stack: [false | stack]}
[_ | _] = these ->
%{
state
| stack: [
Enum.any?(these, &Entity.is?(&1, trait, Map.put(objects, "this", &1)))
| stack
]
}
this ->
%{
state
| stack: [
Entity.is?(this, trait, Map.put(objects, "this", this))
| stack
]
}
end
end
defp execute_step(:is, %{stack: [trait, base | stack], objects: objects} = state)
when is_tuple(base) do
%{
state
| stack: [
Entity.is?(base, trait, objects)
| stack
]
}
end
defp execute_step(:is, %{stack: [trait, base | stack], objects: objects} = state)
when is_list(base) do
%{
state
| stack: [
base |> Enum.map(&Entity.is?(&1, trait, objects))
| stack
]
}
end
defp execute_step(:is, %{stack: [_, _, _ | stack]} = state) do
%{state | stack: [false | stack]}
end
defp execute_step(:uhoh, %{stack: [message | stack], objects: objects} = state) do
with {:ok, parse} <- Militerm.Parsers.MML.parse(message),
{:ok, bound_message} <- Militerm.Systems.MML.bind(parse, objects) do
%{
state
| stack: [
{:halt, bound_message} | stack
]
}
else
_ ->
%{
state
| stack: [
{:halt, "Something went wrong."} | stack
]
}
end
end
defp execute_step(:index, %{stack: [idx | [base | stack]]} = state)
when is_list(base) and is_integer(idx) do
%{state | stack: [Enum.at(base, idx, nil) | stack]}
end
defp execute_step(:index, %{stack: [prop | [base | stack]], objects: objects} = state)
when is_list(base) do
%{
state
| stack: [
base
|> Enum.map(fn obj ->
Entity.property(obj, prop, objects)
end)
| stack
]
}
end
defp execute_step(:index, %{stack: [idx | [base | stack]]} = state) when is_map(base) do
%{state | stack: [Map.get(base, idx, nil) | stack]}
end
defp execute_step(:index, %{stack: [idx | [base | stack]]} = state) when is_binary(base) do
%{state | stack: [String.at(base, idx) | stack]}
end
defp execute_step(:index, %{stack: [prop | [base | stack]], objects: objects} = state)
when is_tuple(base) do
%{state | stack: [Entity.property(base, prop, objects) | stack]}
end
defp execute_step(:index, %{stack: []} = state), do: %{state | stack: [nil]}
defp execute_step(:index, %{stack: [_ | [_ | stack]]} = state) do
%{state | stack: [nil | stack]}
end
defp execute_step(:call, %{code: code, ip: ip, stack: stack, objects: objects} = state) do
function = elem(code, ip)
arity = elem(code, ip + 1)
{args, rest} = Enum.split(stack, arity)
%{
state
| ip: ip + 2,
stack: [
Systems.Script.call_function(
function,
args,
objects
)
| rest
]
}
end
defp execute_step(
:select,
%{code: code, ip: ip, stack: [list | stack], objects: objects, pad: pad} = state
) do
var = elem(code, ip)
body = elem(code, ip + 1)
new_list =
list
|> Enum.filter(fn item ->
run(body, objects, Map.put(pad, var, item))
end)
%{state | ip: ip + 2, stack: [new_list | stack]}
end
defp execute_step(
:map,
%{code: code, ip: ip, stack: [list | stack], objects: objects, pad: pad} = state
) do
var = elem(code, ip)
body = elem(code, ip + 1)
new_list =
case list do
nil ->
[]
_ ->
list
|> to_list()
|> Enum.flat_map(fn item ->
body
|> run(objects, Map.put(pad, var, item))
|> to_list()
end)
end
%{state | ip: ip + 2, stack: [new_list | stack]}
end
defp execute_step(
:loop,
%{code: code, ip: ip, stack: [list | stack], objects: objects, pad: pad} = state
) do
var = elem(code, ip)
prior_var_value = Map.get(pad, var)
body = elem(code, ip + 1)
%{pad: new_pad} =
list
|> Enum.reduce(state, fn item, state ->
%{pad: new_pad} =
step_until_done(%{state | ip: 0, code: body, pad: Map.put(pad, var, item)})
%{state | pad: new_pad}
end)
new_pad =
if is_nil(prior_var_value) do
Map.drop(new_pad, [var])
else
Map.put(new_pad, var, prior_var_value)
end
%{state | ip: ip + 2, pad: new_pad, stack: [true | stack]}
end
defp execute_step(
:narrate,
%{
stack: [sense, volume, message | stack],
pad: pad,
objects: %{"this" => this} = objects
} = state
) do
# send the message out to everyone - it's a "msg:#{sense}" event that's tailored to them
# so the target object decides if it gets displayed or not
observers =
this
|> Militerm.Services.Location.find_near()
|> Enum.map(fn item -> {item, "observer"} end)
observers =
case Militerm.Services.Location.where(this) do
{_, loc} -> [{loc, "observer"} | observers]
_ -> observers
end
{:ok, bound_message} =
message
|> Militerm.Systems.MML.bind(
objects
|> Map.put_new("actor", to_list(this))
|> Map.put("this", to_list(this))
|> Map.merge(pad)
)
event = "msg:#{sense}"
entities =
for slot <- ~w[this actor direct indirect instrument],
entities <- to_list(Map.get(objects, slot, [])),
entity_id <- to_list(entities),
do: {entity_id, slot}
# TOOD: add observant entities in the environment
entities = Enum.uniq_by(entities ++ observers, &elem(&1, 0))
for {entity_id, role} <- entities do
Militerm.Systems.Entity.event(entity_id, event, to_string(role), %{
"this" => entity_id,
"text" => bound_message,
"intensity" => volume
})
end
%{state | stack: stack}
end
defp execute_step(:make_list, %{stack: [n | stack]} = state) do
with {list, new_stack} <- Enum.split(stack, n) do
%{state | stack: [list | new_stack]}
end
end
defp execute_step(:make_dict, %{stack: [n | stack]} = state) do
with {list, new_stack} <- Enum.split(stack, 2 * n) do
map =
list
|> Enum.chunk_every(2)
|> Enum.map(&List.to_tuple/1)
|> Enum.into(%{})
%{state | stack: [map | new_stack]}
end
end
defp execute_step(:trigger_event, %{stack: [target, event, pov, args | stack]} = state) do
targets = if is_list(target), do: target, else: [target]
targets =
targets
|> Enum.filter(fn thing ->
case Militerm.Systems.Entity.pre_event(thing, event, pov, Map.put(args, "this", thing)) do
{:halt, _} -> false
:halt -> false
_ -> true
end
end)
for thing <- targets do
Militerm.Systems.Entity.event(thing, event, pov, Map.put(args, "this", thing))
end
for thing <- targets do
Militerm.Systems.Entity.post_event(thing, event, pov, Map.put(args, "this", thing))
end
%{state | stack: [Enum.any?(targets) | stack]}
end
defp execute_step(:sum, state) do
do_series_op(0, &series_sum/2, :numeric, state)
end
defp execute_step(:concat, state) do
do_series_op("", &<>/2, :string, state)
end
defp execute_step(:product, state) do
do_series_op(1, &*/2, :numeric, state)
end
defp execute_step(:and, state) do
do_series_op(true, &series_and/2, :boolean, state)
end
defp execute_step(:or, state) do
do_series_op(false, &series_or/2, :boolean, state)
end
defp execute_step(:set_union, state) do
do_mapset_op(:union, state)
end
defp execute_step(:set_intersection, state) do
do_mapset_op(:intersection, state)
end
defp execute_step(:set_diff, state) do
do_mapset_op(:difference, state)
end
defp execute_step(:lt, state), do: do_ordered_op(&Kernel.</2, state)
defp execute_step(:le, state), do: do_ordered_op(&Kernel.<=/2, state)
defp execute_step(:gt, state), do: do_ordered_op(&Kernel.>/2, state)
defp execute_step(:ge, state), do: do_ordered_op(&Kernel.>=/2, state)
defp execute_step(:eq, state), do: do_ordered_op(&equal?/2, state)
defp execute_step(:ne, %{stack: [n | stack]} = state) do
with {list, new_stack} <- Enum.split(stack, n) do
%{state | stack: [list |> Enum.uniq() |> Enum.count() == n | new_stack]}
end
end
defp execute_step(:difference, %{stack: [r, l | stack]} = state) do
%{state | stack: [l - r | stack]}
end
defp execute_step(:div, %{stack: [d, n | stack]} = state) do
%{state | stack: [n / d | stack]}
end
defp execute_step(:mod, %{stack: [r, l | stack]} = state) do
mod =
if l < 0 do
l + rem(l, r)
else
rem(l, r)
end
%{state | stack: [mod | stack]}
end
defp execute_step(:set_var, %{pad: pad, stack: [name | [value | _] = stack]} = state) do
%{state | stack: stack, pad: Map.put(pad, name, value)}
end
defp execute_step(
:set_this_prop,
%{pad: pad, stack: [name | [value | _] = stack], objects: %{"this" => this} = objects} =
state
) do
path =
case name do
nil ->
""
_ ->
name
|> String.split(":", trim: true)
|> resolve_var_references(pad)
end
Entity.set_property(this, path, value, objects)
%{state | stack: stack}
end
defp execute_step(
:reset_this_prop,
%{pad: pad, stack: [name | stack], objects: %{"this" => this} = objects} = state
) do
path =
case name do
nil ->
""
_ ->
name
|> String.split(":", trim: true)
|> resolve_var_references(pad)
end
Entity.reset_property(this, path, objects)
%{state | stack: [nil | stack]}
end
defp execute_step(
:get_this_prop,
%{pad: pad, stack: [name | stack], objects: %{"this" => this} = objects} = state
) do
path =
case name do
nil ->
""
_ ->
name
|> String.split(":", trim: true)
|> resolve_var_references(pad)
end
this =
case this do
[thing | _] -> thing
thing -> thing
end
%{state | stack: [Entity.property(this, path, objects) | stack]}
end
defp execute_step(
:remove_prop,
%{pad: pad, stack: [name | stack], objects: %{"this" => this}} = state
) do
path =
case name do
nil ->
""
_ ->
name
|> String.split(":", trim: true)
|> resolve_var_references(pad)
end
Entity.remove_property(this, path)
%{state | stack: stack}
end
defp execute_step(:get_obj, %{stack: [name | stack], objects: objects} = state) do
%{state | stack: [Map.get(objects, name, nil) | stack]}
end
defp execute_step(:get_context_var, %{stack: [name | stack], objects: objects} = state) do
%{state | stack: [Map.get(objects, name, nil) | stack]}
end
defp execute_step(:get_var, %{stack: [name | stack], pad: pad} = state) do
%{state | stack: [Map.get(pad, name, nil) | stack]}
end
defp execute_step(
:get_prop,
%{stack: [bases, n | stack], objects: objects, pad: pad} = state
)
when is_list(bases) do
{paths, new_stack} = Enum.split(stack, n)
values =
paths
|> Enum.map(fn name ->
name
|> String.split(":")
|> resolve_var_references(pad)
end)
|> Enum.reduce(bases, fn path, bases ->
bases
|> Enum.flat_map(fn base ->
to_list(Entity.property(base, path, objects))
end)
|> Enum.reject(&is_nil/1)
end)
%{state | stack: [values | new_stack]}
end
defp execute_step(:get_prop, %{stack: [base, n | stack], objects: objects, pad: pad} = state)
when is_tuple(base) do
{paths, new_stack} = Enum.split(stack, n)
Logger.debug(fn ->
["Machine: get_prop ", inspect(base), " : ", inspect(paths)]
end)
paths =
paths
|> Enum.map(fn name ->
name
|> String.split(":")
|> resolve_var_references(pad)
end)
values =
paths
|> Enum.reduce([base], fn path, bases ->
bases
|> Enum.map(fn base ->
Entity.property(base, path, objects)
end)
|> Enum.reject(&is_nil/1)
end)
%{state | stack: [values | new_stack]}
end
defp execute_step(:get_prop, %{stack: [_base, n | stack]} = state) do
{_paths, new_stack} = Enum.split(stack, n)
%{state | stack: [nil | new_stack]}
end
defp execute_step(opcode, state) do
Logger.debug("Unknown instruction! #{opcode}")
Logger.debug(inspect(state))
end
defp to_list(list) when is_list(list), do: list
defp to_list(scalar), do: [scalar]
defp maybe_atom(atom) when is_atom(atom), do: atom
defp maybe_atom(string) when is_binary(string) do
# try do
String.to_existing_atom(string)
# catch
# _ -> nil
# end
end
defp resolve_var_references(bits, pad) do
Enum.flat_map(bits, fn bit ->
case bit do
<<"$", _::binary>> = var ->
val = Map.get(pad, var, "")
case val do
v when is_binary(v) -> String.split(v, ":", trim: true)
[v | _] when is_binary(v) -> String.split(v, ":", trim: true)
_ -> []
end
_ ->
[bit]
end
end)
end
defp do_ordered_op(op, %{stack: [n | stack]} = state) do
with {list, new_stack} <- Enum.split(stack, n) do
%{state | stack: [ordering_satisfied?(op, Enum.reverse(list)) | new_stack]}
end
end
defp ordering_satisfied?(_, []), do: true
defp ordering_satisfied?(_, [_]), do: true
defp ordering_satisfied?(op, [l | [r | _] = rest]) do
{real_l, real_r} =
case {l, r} do
{[l], r} -> {l, r}
{l, [r]} -> {l, r}
{x, y} -> {x, y}
end
if op.(real_l, real_r) do
ordering_satisfied?(op, rest)
else
false
end
end
defp do_mapset_op(op, %{stack: [n | stack]} = state) when n > 0 do
{list, new_stack} = Enum.split(stack, n)
[prime | sets] =
list
|> Enum.map(fn x ->
if is_list(x), do: MapSet.new(x), else: MapSet.new([x])
end)
%{
state
| stack: [
sets
|> Enum.reduce(prime, fn x, acc ->
apply(MapSet, op, [x, acc])
end)
|> MapSet.to_list()
| new_stack
]
}
end
defp do_mapset_op(_, %{stack: [_ | rest]} = state) do
%{state | stack: [[] | rest]}
end
defp do_series_op(init, op, type, %{stack: [n | rest]} = state) when n > 0 do
{values, new_stack} = Enum.split(rest, n)
values =
case values do
[a, b] -> [b, a]
_ -> values
end
Logger.debug(fn ->
[
"series op ",
inspect(type),
" ",
inspect(values),
" -> ",
values
|> convert_for(type)
|> inspect,
" -> ",
values |> convert_for(type) |> List.foldl(init, op) |> inspect
]
end)
%{
state
| stack: [
values
|> convert_for(type)
|> List.foldl(init, op)
| new_stack
]
}
end
defp do_series_op(init, _, _, %{stack: [_ | rest]} = state) do
%{state | stack: [init | rest]}
end
defp convert_for(list, type, acc \\ [])
defp convert_for([], _, acc), do: acc
defp convert_for([nil | rest], type, acc), do: convert_for(rest, type, acc)
defp convert_for([list | rest], :numeric, acc) when is_list(list) do
convert_for(rest, :numeric, [convert_for(list, :numeric) | acc])
end
defp convert_for([item | rest], type, acc) do
convert_for(rest, type, [convert_item_for(type, item) | acc])
end
defp convert_item_for(:boolean, item) when item in @boolean_false_values, do: false
defp convert_item_for(:boolean, _), do: true
defp convert_item_for(:string, true), do: "True"
defp convert_item_for(:string, false), do: ""
defp convert_item_for(:string, item) when is_binary(item), do: item
defp convert_item_for(:string, item) when is_integer(item), do: Integer.to_string(item)
defp convert_item_for(:string, item) when is_float(item), do: Float.to_string(item)
defp convert_item_for(:string, {:thing, id}), do: "<Thing##{id}@default>"
defp convert_item_for(:string, {:thing, id, {x, y}}), do: "<Thing##{id}@#{x},#{y}>"
defp convert_item_for(:string, {:thing, id, {t}}), do: "<Thing##{id}@#{t}>"
defp convert_item_for(:string, {:thing, id, d}), do: "<Thing##{id}@#{d}>"
defp convert_item_for(:string, list) when is_list(list),
do: Militerm.English.item_list(list)
defp convert_item_for(:string, _), do: ""
defp convert_item_for(:numeric, item) when is_float(item) or is_integer(item), do: item
defp convert_item_for(:numeric, item) when is_binary(item) do
module =
if String.contains?(item, ".") do
Float
else
Integer
end
case module.parse(item) do
{numeric, _} -> numeric
_ -> 0
end
end
defp convert_item_for(:numeric, _), do: 0
defp series_and(_, y) when y in @boolean_false_values, do: false
defp series_and(x, _) when x in @boolean_false_values, do: false
defp series_and(_, _), do: true
defp series_or(x, y) when x in @boolean_false_values and y in @boolean_false_values, do: false
defp series_or(_, _), do: true
defp series_sum(x, y) do
[x, y]
|> List.flatten()
|> Enum.sum()
end
defp execute_jump_unless(true, %{code: code, ip: ip} = state) do
%{state | ip: ip + elem(code, ip) + 1}
end
defp execute_jump_unless(false, %{ip: ip} = state) do
%{state | ip: ip + 1}
end
defp to_list(list) when is_list(list), do: list
defp to_list(nil), do: []
defp to_list(scalar), do: [scalar]
def equal?({:thing, id}, {:thing, id, "default"}), do: true
def equal?({:thing, id, "default"}, {:thing, id}), do: true
def equal?(l, r), do: l == r
end
| 25.626407 | 104 | 0.549347 |
081a1e9c7aacd5f578322940e49cb33b7ca6f46e | 7,803 | ex | Elixir | lib/elixir/lib/kernel/utils.ex | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | 1 | 2019-08-13T23:22:33.000Z | 2019-08-13T23:22:33.000Z | lib/elixir/lib/kernel/utils.ex | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/kernel/utils.ex | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | null | null | null | import Kernel, except: [destructure: 2, defdelegate: 2, defstruct: 2]
defmodule Kernel.Utils do
@moduledoc false
@doc """
Callback for destructure.
"""
def destructure(list, count)
when is_list(list) and is_integer(count) and count >= 0,
do: destructure_list(list, count)
def destructure(nil, count)
when is_integer(count) and count >= 0,
do: destructure_nil(count)
defp destructure_list(_, 0), do: []
defp destructure_list([], count), do: destructure_nil(count)
defp destructure_list([h | t], count), do: [h | destructure_list(t, count - 1)]
defp destructure_nil(0), do: []
defp destructure_nil(count), do: [nil | destructure_nil(count - 1)]
@doc """
Callback for defdelegate.
"""
def defdelegate(fun, opts) when is_list(opts) do
# TODO: Remove on v2.0
append_first? = Keyword.get(opts, :append_first, false)
{name, args} =
case Macro.decompose_call(fun) do
{_, _} = pair -> pair
_ -> raise ArgumentError, "invalid syntax in defdelegate #{Macro.to_string(fun)}"
end
as = Keyword.get(opts, :as, name)
as_args = build_as_args(args, append_first?)
{name, args, as, as_args}
end
defp build_as_args(args, append_first?) do
as_args = :lists.map(&build_as_arg/1, args)
case append_first? do
true -> tl(as_args) ++ [hd(as_args)]
false -> as_args
end
end
defp build_as_arg({:\\, _, [arg, _default_arg]}), do: validate_arg(arg)
defp build_as_arg(arg), do: validate_arg(arg)
defp validate_arg({name, _, mod} = arg) when is_atom(name) and is_atom(mod) do
arg
end
defp validate_arg(ast) do
raise ArgumentError,
"defdelegate/2 only accepts function parameters, got: #{Macro.to_string(ast)}"
end
@doc """
Callback for defstruct.
"""
def defstruct(module, fields) do
case fields do
fs when is_list(fs) ->
:ok
other ->
raise ArgumentError, "struct fields definition must be list, got: #{inspect(other)}"
end
mapper = fn
{key, val} when is_atom(key) ->
try do
Macro.escape(val)
rescue
e in [ArgumentError] ->
raise ArgumentError, "invalid value for struct field #{key}, " <> Exception.message(e)
else
_ -> {key, val}
end
key when is_atom(key) ->
{key, nil}
other ->
raise ArgumentError, "struct field names must be atoms, got: #{inspect(other)}"
end
fields = :lists.map(mapper, fields)
enforce_keys = List.wrap(Module.get_attribute(module, :enforce_keys))
foreach = fn
key when is_atom(key) ->
:ok
key ->
raise ArgumentError, "keys given to @enforce_keys must be atoms, got: #{inspect(key)}"
end
:lists.foreach(foreach, enforce_keys)
struct = :maps.put(:__struct__, module, :maps.from_list(fields))
{struct, enforce_keys, Module.get_attribute(module, :derive)}
end
@doc """
Announcing callback for defstruct.
"""
def announce_struct(module) do
case :erlang.get(:elixir_compiler_pid) do
:undefined -> :ok
pid -> send(pid, {:struct_available, module})
end
end
@doc """
Callback for raise.
"""
def raise(msg) when is_binary(msg) do
RuntimeError.exception(msg)
end
def raise(module) when is_atom(module) do
module.exception([])
end
def raise(%_{__exception__: true} = exception) do
exception
end
def raise(other) do
ArgumentError.exception(
"raise/1 and reraise/2 expect a module name, string or exception " <>
"as the first argument, got: #{inspect(other)}"
)
end
@doc """
Callback for defguard.
Rewrites an expression so it can be used both inside and outside a guard.
Take, for example, the expression:
is_integer(value) and rem(value, 2) == 0
If we wanted to create a macro, `is_even`, from this expression, that could be
used in guards, we'd have to take several things into account.
First, if this expression is being used inside a guard, `value` needs to be
unquoted each place it occurs, since it has not yet been at that point in our
macro.
Secondly, if the expression is being used outside of a guard, we want to unquote
`value`, but only once, and then re-use the unquoted form throughout the expression.
This helper does exactly that: takes the AST for an expression and a list of
variable references it should be aware of, and rewrites it into a new expression
that checks for its presence in a guard, then unquotes the variable references as
appropriate.
The following code
expression = quote do: is_integer(value) and rem(value, 2) == 0
variable_references = [value: Elixir]
Kernel.Utils.defguard(expression, variable_references) |> Macro.to_string() |> IO.puts()
would print a code similar to:
case Macro.Env.in_guard?(__CALLER__) do
true ->
quote do
is_integer(unquote(value)) and rem(unquote(value), 2) == 0
end
false ->
quote do
value = unquote(value)
is_integer(value) and rem(value, 2) == 0
end
end
"""
defmacro defguard(args, expr) do
defguard(args, expr, __CALLER__)
end
@spec defguard([Macro.t()], Macro.t(), Macro.Env.t()) :: Macro.t()
def defguard(args, expr, env) do
{^args, vars} = extract_refs_from_args(args)
env = :elixir_env.with_vars(%{env | context: :guard}, vars)
{expr, _scope} = :elixir_expand.expand(expr, env)
quote do
case Macro.Env.in_guard?(__CALLER__) do
true -> unquote(literal_quote(unquote_every_ref(expr, vars)))
false -> unquote(literal_quote(unquote_refs_once(expr, vars)))
end
end
end
defp extract_refs_from_args(args) do
Macro.postwalk(args, [], fn
{ref, meta, context} = var, acc when is_atom(ref) and is_atom(context) ->
{var, [{ref, var_context(meta, context)} | acc]}
node, acc ->
{node, acc}
end)
end
# Finds every reference to `refs` in `guard` and wraps them in an unquote.
defp unquote_every_ref(guard, refs) do
Macro.postwalk(guard, fn
{ref, meta, context} = var when is_atom(ref) and is_atom(context) ->
case {ref, var_context(meta, context)} in refs do
true -> literal_unquote(var)
false -> var
end
node ->
node
end)
end
# Prefaces `guard` with unquoted versions of `refs`.
defp unquote_refs_once(guard, refs) do
{guard, used_refs} =
Macro.postwalk(guard, %{}, fn
{ref, meta, context} = var, acc when is_atom(ref) and is_atom(context) ->
pair = {ref, var_context(meta, context)}
case pair in refs do
true ->
case acc do
%{^pair => {new_var, _}} ->
{new_var, acc}
%{} ->
generated = String.to_atom("arg" <> Integer.to_string(map_size(acc)))
new_var = Macro.var(generated, Elixir)
{new_var, Map.put(acc, pair, {new_var, var})}
end
false ->
{var, acc}
end
node, acc ->
{node, acc}
end)
all_used = for ref <- :lists.reverse(refs), used = :maps.get(ref, used_refs, nil), do: used
{vars, exprs} = :lists.unzip(all_used)
quote do
{unquote_splicing(vars)} = {unquote_splicing(Enum.map(exprs, &literal_unquote/1))}
unquote(guard)
end
end
defp literal_quote(ast) do
{:quote, [], [[do: ast]]}
end
defp literal_unquote(ast) do
{:unquote, [], List.wrap(ast)}
end
defp var_context(meta, kind) do
case :lists.keyfind(:counter, 1, meta) do
{:counter, counter} -> counter
false -> kind
end
end
end
| 27.670213 | 98 | 0.620531 |
081a474d2cd1bac62c66dd7861795c7e0f5a184c | 4,594 | ex | Elixir | lib/membrane_aac_format/aac.ex | membraneframework/membrane_aac_format | 9240395ee27a6469283eaee166eadf47d9a1f8b5 | [
"Apache-2.0"
] | null | null | null | lib/membrane_aac_format/aac.ex | membraneframework/membrane_aac_format | 9240395ee27a6469283eaee166eadf47d9a1f8b5 | [
"Apache-2.0"
] | null | null | null | lib/membrane_aac_format/aac.ex | membraneframework/membrane_aac_format | 9240395ee27a6469283eaee166eadf47d9a1f8b5 | [
"Apache-2.0"
] | null | null | null | defmodule Membrane.AAC do
@moduledoc """
Capabilities for [Advanced Audio Codec](https://wiki.multimedia.cx/index.php/Understanding_AAC).
"""
@type profile_t :: :main | :LC | :SSR | :LTP | :HE | :HEv2
@type mpeg_version_t :: 2 | 4
@type samples_per_frame_t :: 1024 | 960
@typedoc """
Indicates whether stream contains AAC frames only or are they encapsulated
in [ADTS](https://wiki.multimedia.cx/index.php/ADTS)
"""
@type encapsulation_t :: :none | :ADTS
@typedoc """
Identifiers of [MPEG Audio Object Types](https://wiki.multimedia.cx/index.php/MPEG-4_Audio#Audio_Object_Types)
"""
@type audio_object_type_id_t :: 1..5 | 29
@typedoc """
Identifiers of [MPEG Audio sampling frequencies](https://wiki.multimedia.cx/index.php/MPEG-4_Audio#Sampling_Frequencies)
"""
@type sampling_frequency_id_t :: 0..12 | 15
@typedoc """
Identifiers of [MPEG Audio channel configurations](https://wiki.multimedia.cx/index.php/MPEG-4_Audio#Channel_Configurations)
"""
@type channel_config_id_t :: 0..7
@typedoc """
AAC frame length identifiers.
`0` indicates 1024 samples/frame and `1` - 960 samples/frame.
"""
@type frame_length_id_t :: 0 | 1
@type t :: %__MODULE__{
profile: profile_t,
mpeg_version: mpeg_version_t,
sample_rate: pos_integer,
channels: pos_integer,
samples_per_frame: 1024 | 960,
frames_per_buffer: pos_integer,
encapsulation: encapsulation_t
}
@enforce_keys [
:profile,
:sample_rate,
:channels
]
defstruct @enforce_keys ++
[
mpeg_version: 2,
samples_per_frame: 1024,
frames_per_buffer: 1,
encapsulation: :none
]
@audio_object_type BiMap.new(%{
1 => :main,
2 => :LC,
3 => :SSR,
4 => :LTP,
5 => :HE,
29 => :HEv2
})
@sampling_frequency BiMap.new(%{
0 => 96000,
1 => 88200,
2 => 64000,
3 => 48000,
4 => 44100,
5 => 32000,
6 => 24000,
7 => 22050,
8 => 16000,
9 => 12000,
10 => 11025,
11 => 8000,
12 => 7350,
15 => :explicit
})
@channel_config BiMap.new(%{
0 => :AOT_specific,
1 => 1,
2 => 2,
3 => 3,
4 => 4,
5 => 5,
6 => 6,
7 => 8
})
@frame_length BiMap.new(%{
0 => 1024,
1 => 960
})
@spec aot_id_to_profile(audio_object_type_id_t) :: profile_t
def aot_id_to_profile(audio_object_type_id),
do: BiMap.fetch!(@audio_object_type, audio_object_type_id)
@spec profile_to_aot_id(profile_t) :: audio_object_type_id_t
def profile_to_aot_id(profile), do: BiMap.fetch_key!(@audio_object_type, profile)
@spec sampling_frequency_id_to_sample_rate(sampling_frequency_id_t) :: pos_integer
def sampling_frequency_id_to_sample_rate(sampling_frequency_id),
do: BiMap.fetch!(@sampling_frequency, sampling_frequency_id)
@spec sample_rate_to_sampling_frequency_id(sample_rate :: pos_integer | :explicit) ::
sampling_frequency_id_t
def sample_rate_to_sampling_frequency_id(sample_rate),
do: BiMap.fetch_key!(@sampling_frequency, sample_rate)
@spec channel_config_id_to_channels(channel_config_id_t) :: pos_integer | :AOT_specific
def channel_config_id_to_channels(channel_config_id),
do: BiMap.fetch!(@channel_config, channel_config_id)
@spec channels_to_channel_config_id(channels :: pos_integer | :AOT_specific) ::
channel_config_id_t
def channels_to_channel_config_id(channels), do: BiMap.fetch_key!(@channel_config, channels)
@spec frame_length_id_to_samples_per_frame(frame_length_id_t) :: samples_per_frame_t
def frame_length_id_to_samples_per_frame(frame_length_id),
do: BiMap.fetch!(@frame_length, frame_length_id)
@spec samples_per_frame_to_frame_length_id(samples_per_frame_t) :: pos_integer
def samples_per_frame_to_frame_length_id(samples_per_frame),
do: BiMap.fetch_key!(@frame_length, samples_per_frame)
end
| 34.02963 | 126 | 0.584676 |
081a5e0775a994f0dab2bb1f29a758c8f4a0dd8b | 1,639 | ex | Elixir | apps/massa_proxy/lib/supervisor.ex | drowzy/massa | 624cb02e0039b0624c534636f96fd157b1e34a95 | [
"Apache-2.0"
] | 20 | 2021-06-28T12:03:49.000Z | 2022-03-28T22:35:56.000Z | apps/massa_proxy/lib/supervisor.ex | drowzy/massa | 624cb02e0039b0624c534636f96fd157b1e34a95 | [
"Apache-2.0"
] | 48 | 2021-03-19T12:01:38.000Z | 2022-03-29T21:19:26.000Z | apps/massa_proxy/lib/supervisor.ex | drowzy/massa | 624cb02e0039b0624c534636f96fd157b1e34a95 | [
"Apache-2.0"
] | 5 | 2021-03-18T21:46:10.000Z | 2021-10-01T17:39:05.000Z | defmodule MassaProxy.Children do
@moduledoc false
use Supervisor
require Logger
def start_link(config) do
Supervisor.start_link(__MODULE__, config, name: __MODULE__)
end
@impl true
def init(config) do
children = [
http_server(config),
{Task.Supervisor, name: MassaProxy.TaskSupervisor},
{Registry, [name: MassaProxy.LocalRegistry, keys: :unique]},
{DynamicSupervisor, [name: MassaProxy.LocalSupervisor, strategy: :one_for_one]},
{MassaProxy.Infra.Cache.Distributed, []},
local_node(),
{MassaProxy.Entity.EntityRegistry.Supervisor, [%{}]},
%{
id: CachedServers,
start: {MassaProxy.Infra.Cache, :start_link, [[cache_name: :cached_servers]]}
},
%{
id: ReflectionCache,
start: {MassaProxy.Infra.Cache, :start_link, [[cache_name: :reflection_cache]]}
}
]
Supervisor.init(children, strategy: :one_for_one)
end
defp http_server(config) do
port = get_http_port(config)
plug_spec =
Plug.Cowboy.child_spec(
scheme: :http,
plug: Http.Endpoint,
options: [port: port]
)
Logger.info("HTTP Server started on port #{port}")
plug_spec
end
defp get_http_port(config), do: config.proxy_http_port
defp local_node() do
%{
id: MassaProxy.Local.Orchestrator,
restart: :transient,
start: {
Task,
:start_link,
[
fn ->
DynamicSupervisor.start_child(
MassaProxy.LocalSupervisor,
{MassaProxy.Orchestrator, []}
)
end
]
}
}
end
end
| 23.753623 | 87 | 0.608298 |
081a68976dfdad69f640b8c47b326e3ff3e9bee8 | 1,937 | exs | Elixir | test/bsv_rpc/sighash_test.exs | slashrsm/bsv_rpc | 6ac717f4aefcd26045ad15796e4129fdc6a01434 | [
"MIT"
] | 4 | 2019-09-25T05:52:02.000Z | 2020-08-13T21:09:42.000Z | test/bsv_rpc/sighash_test.exs | slashrsm/bsv_rpc | 6ac717f4aefcd26045ad15796e4129fdc6a01434 | [
"MIT"
] | null | null | null | test/bsv_rpc/sighash_test.exs | slashrsm/bsv_rpc | 6ac717f4aefcd26045ad15796e4129fdc6a01434 | [
"MIT"
] | null | null | null | defmodule BsvRpc.SighashTest do
use ExUnit.Case
doctest BsvRpc.Sighash
@cases [
{[:sighash_all, :sighash_forkid], 0x41},
{[:sighash_none, :sighash_forkid], 0x42},
{[:sighash_single, :sighash_forkid], 0x43},
{[:sighash_all, :sighash_forkid, :sighash_anyonecanpay], 0xC1},
{[:sighash_none, :sighash_forkid, :sighash_anyonecanpay], 0xC2},
{[:sighash_single, :sighash_forkid, :sighash_anyonecanpay], 0xC3},
{[:sighash_all], 0x01},
{[:sighash_none], 0x02},
{[:sighash_single], 0x03},
{[:sighash_all, :sighash_anyonecanpay], 0x81},
{[:sighash_none, :sighash_anyonecanpay], 0x82},
{[:sighash_single, :sighash_anyonecanpay], 0x83}
]
test "test get_sighash_suffix/1" do
Enum.each(
@cases,
fn {sighash_type, expected_suffix} ->
assert expected_suffix == BsvRpc.Sighash.get_sighash_suffix(sighash_type)
end
)
end
test "currently unsupported sighash types" do
{:ok, k} =
ExtendedKey.from_string(
"xprv9s21ZrQH143K42Wyfo4GvDT1QBNSgq5sCBPXr4zaftZr2WKCrgEzdtniz5TvRgXA6V8hi2QrUMG3QTQnqovLp2UBAqsDcaxDUP3YCA61rJV"
)
|> BsvRpc.PrivateKey.create()
tx =
BsvRpc.Transaction.create_from_hex!(
"0100000001040800A41008F4C353626694DAC1EE5553FBD36B11AC5647528E29C7D6C89BE20000000000FFFFFFFF0200F90295000000001976A9141D7C7B4894BE23A6495B004157F3A1BBA173C52988AC0CF70295000000001976A9141D7C7B4894BE23A6495B004157F3A1BBA173C52988AC00000000"
)
[tx_in | _] = tx.inputs
utxo = %BsvRpc.UTXO{
script_pubkey: Base.decode16!("76A9141D7C7B4894BE23A6495B004157F3A1BBA173C52988AC"),
value: 5_000_000_000,
transaction: <<>>,
output: 0
}
[_supported | unsupported] = @cases
Enum.each(
unsupported,
fn sighash_type ->
assert_raise RuntimeError, fn ->
BsvRpc.Sighash.sighash(tx_in, tx, k, utxo, sighash_type)
end
end
)
end
end
| 31.241935 | 248 | 0.699535 |
081a6bf679db319d1a9aec364b62788fdbd2eaaa | 2,546 | exs | Elixir | test/postgres/process_pg_crud_test.exs | elixir-garage/ex_tenant | 2336ab92d4cf0e826fdd6bf6ce5fb0206095f007 | [
"Apache-2.0"
] | 1 | 2022-02-09T23:17:09.000Z | 2022-02-09T23:17:09.000Z | test/postgres/process_pg_crud_test.exs | elixir-garage/ex_tenant | 2336ab92d4cf0e826fdd6bf6ce5fb0206095f007 | [
"Apache-2.0"
] | null | null | null | test/postgres/process_pg_crud_test.exs | elixir-garage/ex_tenant | 2336ab92d4cf0e826fdd6bf6ce5fb0206095f007 | [
"Apache-2.0"
] | null | null | null | defmodule ExTenantRepoProcessPgCrudTest do
use ExTenant.Postgres.Cases.DataCase
alias ExTenant.Support.PgExTenantRepository, as: TenantRepo
alias ExTenant.Support.PgTestTenantRepository, as: TenantRepoWithoutTenantId
alias ExTenant.Test.Support.Repos.PgTestRepo, as: Repo
setup do
{:ok, tenant} = TenantRepo.create_tenant("foo", Repo)
{:ok, bar_tenant} = TenantRepo.create_tenant("bar", Repo)
#
# this normally would be handled by a plug in PHX..
#
Repo.put_tenant_id(tenant.tenant_id)
{:ok, tenant: tenant, bar_tenant: bar_tenant}
end
describe "test the schema - comment crud" do
test "creates a comment on a post" do
post = TenantRepo.create_post("test-p-name", "test-p-body", Repo)
created_comment = TenantRepo.create_comment("test-c-name", "test-c-body", post.id, Repo)
retrieved_commment = TenantRepo.get_comment(created_comment.id, Repo)
assert retrieved_commment.id == created_comment.id
assert retrieved_commment.name == created_comment.name
assert retrieved_commment.post_id == created_comment.post_id
assert retrieved_commment.tenant_id == created_comment.tenant_id
end
test "creates a post when the tenant_id is in the process dictionary" do
assert_raise Postgrex.Error, fn ->
TenantRepo.create_post_without_cast("test-p-name", "test-p-body", Repo)
end
end
test "fails to create a post - when the tenant_id is passed in" do
assert_raise Postgrex.Error, fn ->
TenantRepoWithoutTenantId.create_post_without_tenant_id(
"test-p-name",
"test-p-body"
)
end
end
end
describe "test the schema - post crud" do
test "creates a post in the tenant" do
created_post = TenantRepo.create_post("test-post-name", "test-post-body", Repo)
retrieved_post = TenantRepo.get_post(created_post.id, Repo)
assert retrieved_post.id == created_post.id
assert retrieved_post.name == created_post.name
assert retrieved_post.tenant_id == created_post.tenant_id
end
end
describe "test the schema - post crud with using cast_tenanted macro" do
test "creates a post in the tenant" do
created_post = TenantRepo.create_post_with_cast("test-post-name", "test-post-body", Repo)
retrieved_post = TenantRepo.get_post(created_post.id, Repo)
assert retrieved_post.id == created_post.id
assert retrieved_post.name == created_post.name
assert retrieved_post.tenant_id == created_post.tenant_id
end
end
end
| 36.898551 | 95 | 0.715632 |
081a70be979cc95de67beb841d39e0e8682197b0 | 264 | exs | Elixir | priv/repo/migrations/20190811132542_tie_title.exs | TylerTemp/jollacn-api | 4e0d887ea9bf4c5fd94dd1880682de1f6384a078 | [
"MIT"
] | null | null | null | priv/repo/migrations/20190811132542_tie_title.exs | TylerTemp/jollacn-api | 4e0d887ea9bf4c5fd94dd1880682de1f6384a078 | [
"MIT"
] | null | null | null | priv/repo/migrations/20190811132542_tie_title.exs | TylerTemp/jollacn-api | 4e0d887ea9bf4c5fd94dd1880682de1f6384a078 | [
"MIT"
] | null | null | null | defmodule JollaCNAPI.DB.Repo.Migrations.TieTitle do
use Ecto.Migration
def change do
alter table("tie") do
add(:title, :string, null: true, comment: "标题")
end
create(index("tie", [:title], unique: true, comment: "Index Comment"))
end
end
| 22 | 74 | 0.662879 |
081a739564b06eb3ba72a4bb64dd232735359d85 | 418 | ex | Elixir | server/lib/realtime/database_replication_supervisor.ex | gustavoarmoa/realtime | e8075779ed19bfb8c22541dc1e5e8ea032d5b823 | [
"Apache-2.0"
] | 4,609 | 2019-10-25T12:28:35.000Z | 2022-03-31T23:05:06.000Z | server/lib/realtime/database_replication_supervisor.ex | gustavoarmoa/realtime | e8075779ed19bfb8c22541dc1e5e8ea032d5b823 | [
"Apache-2.0"
] | 223 | 2019-09-27T03:21:45.000Z | 2022-03-29T23:04:03.000Z | server/lib/realtime/database_replication_supervisor.ex | gustavoarmoa/realtime | e8075779ed19bfb8c22541dc1e5e8ea032d5b823 | [
"Apache-2.0"
] | 187 | 2019-10-27T07:44:15.000Z | 2022-03-29T19:34:52.000Z | defmodule Realtime.DatabaseReplicationSupervisor do
use Supervisor
alias Realtime.Adapters.Postgres.EpgsqlServer
alias Realtime.Replication
def start_link(config) do
Supervisor.start_link(__MODULE__, config, name: __MODULE__)
end
@impl true
def init(config) do
children = [
Replication,
{EpgsqlServer, config}
]
Supervisor.init(children, strategy: :one_for_all)
end
end
| 19.904762 | 63 | 0.73445 |
081a7caa6cbd3c6f3d0fda140c66451a25f6c9c4 | 1,253 | ex | Elixir | hw2/Part 1/bulls_and_cows.ex | alpdenizz/ElixirPlayground | 58b5a16c489058e0067e4811042d96fe6a5f8d59 | [
"MIT"
] | null | null | null | hw2/Part 1/bulls_and_cows.ex | alpdenizz/ElixirPlayground | 58b5a16c489058e0067e4811042d96fe6a5f8d59 | [
"MIT"
] | null | null | null | hw2/Part 1/bulls_and_cows.ex | alpdenizz/ElixirPlayground | 58b5a16c489058e0067e4811042d96fe6a5f8d59 | [
"MIT"
] | null | null | null | defmodule BullsAndCows do
def score_guess(str1, str2) do
if str1 == str2 do "You win"
else
list1 = Enum.with_index(String.codepoints(str1))
list2 = Enum.with_index(String.codepoints(str2))
resultList = Enum.uniq(for tuple1 <- list1, tuple2 <- list2, do: checkTuple(tuple1,tuple2))
bullsList = Enum.filter(resultList, fn{x,_} -> x == "Bulls" end)
findExtraCows = Enum.map(bullsList, fn{_,index} -> Enum.filter(resultList, fn{x,y} -> x=="Cows" and y==index end) end)
findExtraCows = List.flatten(findExtraCows)
resultList = resultList--findExtraCows
bullsNumber = length (Enum.filter(resultList, fn{x,_} -> x=="Bulls" end))
cowsNumber = length (Enum.filter(resultList, fn{x,_} -> x=="Cows" end))
Integer.to_string(bullsNumber)<>" Bulls, "<>Integer.to_string(cowsNumber)<>" Cows"
end
end
def checkTuple(t1,t2) do
if t1==t2 do {"Bulls",elem(t1,1)}
else
ind1 = elem(t1,1)
ind2 = elem(t2,1)
ind = Enum.min([ind1,ind2])
if elem(t1,0) == elem(t2,0) do {"Cows",ind}
else {"",elem(t2,1)}
end
end
end
end | 41.766667 | 130 | 0.561852 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.