hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0865ed4db37f6e94882a18c9be5d58c793051bd6 | 2,458 | ex | Elixir | clients/drive/lib/google_api/drive/v3/model/file_list.ex | CertifiedrLi/elixir-google-api | 4e0e261dd06ee7753c356cca413783f3facd5f03 | [
"Apache-2.0"
] | null | null | null | clients/drive/lib/google_api/drive/v3/model/file_list.ex | CertifiedrLi/elixir-google-api | 4e0e261dd06ee7753c356cca413783f3facd5f03 | [
"Apache-2.0"
] | null | null | null | clients/drive/lib/google_api/drive/v3/model/file_list.ex | CertifiedrLi/elixir-google-api | 4e0e261dd06ee7753c356cca413783f3facd5f03 | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Drive.V3.Model.FileList do
@moduledoc """
A list of files.
## Attributes
- files ([File]): The list of files. If nextPageToken is populated, then this list may be incomplete and an additional page of results should be fetched. Defaults to: `null`.
- incompleteSearch (boolean()): Whether the search process was incomplete. If true, then some search results may be missing, since all documents were not searched. This may occur when searching multiple Team Drives with the \"user,allTeamDrives\" corpora, but all corpora could not be searched. When this happens, it is suggested that clients narrow their query by choosing a different corpus such as \"user\" or \"teamDrive\". Defaults to: `null`.
- kind (String.t): Identifies what kind of resource this is. Value: the fixed string \"drive#fileList\". Defaults to: `null`.
- nextPageToken (String.t): The page token for the next page of files. This will be absent if the end of the files list has been reached. If the token is rejected for any reason, it should be discarded, and pagination should be restarted from the first page of results. Defaults to: `null`.
"""
defstruct [
:files,
:incompleteSearch,
:kind,
:nextPageToken
]
end
defimpl Poison.Decoder, for: GoogleApi.Drive.V3.Model.FileList do
import GoogleApi.Drive.V3.Deserializer
def decode(value, options) do
value
|> deserialize(:files, :list, GoogleApi.Drive.V3.Model.File, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Drive.V3.Model.FileList do
def encode(value, options) do
GoogleApi.Drive.V3.Deserializer.serialize_non_nil(value, options)
end
end
| 46.377358 | 480 | 0.751424 |
086633d9cf141c521abb92b7e25a2a94f7813f03 | 1,593 | ex | Elixir | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p4beta1_gcs_source.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p4beta1_gcs_source.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p4beta1_gcs_source.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p4beta1GcsSource do
@moduledoc """
The Google Cloud Storage location where the input will be read from.
## Attributes
* `uri` (*type:* `String.t`, *default:* `nil`) - Google Cloud Storage URI for the input file. This must only be a
Google Cloud Storage object. Wildcards are not currently supported.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:uri => String.t()
}
field(:uri)
end
defimpl Poison.Decoder, for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p4beta1GcsSource do
def decode(value, options) do
GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p4beta1GcsSource.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p4beta1GcsSource do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.1875 | 117 | 0.748274 |
0866868935f595f2933e60618ddbc5e3b2d261c4 | 606 | ex | Elixir | lib/helpers/patch_startup_scripts.ex | corka149/burrito | fa7d9af37b83e59a6429c19c6645ca8da170a86f | [
"MIT"
] | 1 | 2021-12-07T18:11:14.000Z | 2021-12-07T18:11:14.000Z | lib/helpers/patch_startup_scripts.ex | wojtekmach/burrito | 2a3baf280a69b9fc2692d55386caca2c86e2c358 | [
"MIT"
] | null | null | null | lib/helpers/patch_startup_scripts.ex | wojtekmach/burrito | 2a3baf280a69b9fc2692d55386caca2c86e2c358 | [
"MIT"
] | null | null | null | defmodule Burrito.Helpers.PatchStartupScripts do
require Logger
def run(self_path, release_path, release_name) do
Logger.info("Patching shell script for release...")
shell_patch_path = Path.join(self_path, ["/src", "/pass_args_patch_posix.diff"])
bat_patch_path = Path.join(self_path, ["/src", "/pass_args_patch_win.diff"])
shell_path = Path.join(release_path, ["/bin", "/#{release_name}"])
bat_path = Path.join(release_path, ["/bin", "/#{release_name}.bat"])
System.cmd("patch", [shell_path, shell_patch_path])
System.cmd("patch", [bat_path, bat_patch_path])
end
end
| 35.647059 | 84 | 0.706271 |
086689196a0f69499fa64f5b82476d29f0f2ef3c | 3,365 | exs | Elixir | test/absinthe/phoenix/controller_test.exs | matrinox/absinthe_phoenix | e0ae14c04a55550f9cd7a6845583ad0bcce79305 | [
"MIT"
] | 263 | 2016-05-12T21:26:08.000Z | 2022-03-29T04:06:46.000Z | test/absinthe/phoenix/controller_test.exs | matrinox/absinthe_phoenix | e0ae14c04a55550f9cd7a6845583ad0bcce79305 | [
"MIT"
] | 79 | 2017-06-25T08:18:46.000Z | 2021-12-14T15:13:06.000Z | test/absinthe/phoenix/controller_test.exs | matrinox/absinthe_phoenix | e0ae14c04a55550f9cd7a6845583ad0bcce79305 | [
"MIT"
] | 86 | 2016-05-19T09:52:32.000Z | 2022-03-26T13:46:52.000Z | defmodule Absinthe.Phoenix.ControllerTest do
use ExUnit.Case, async: true
use Absinthe.Phoenix.ConnCase
defmodule Schema do
use Absinthe.Schema
query do
field :string, :string do
arg :echo, :string
resolve &resolve_echo/3
end
field :integer, :integer do
arg :echo, :integer
resolve &resolve_echo/3
end
field :list_of_integers, list_of(:integer) do
arg :echo, list_of(:integer)
resolve &resolve_echo/3
end
field :input_object_with_integers, :deep_integers do
arg :echo, :deep_integers_input
resolve &resolve_echo/3
end
end
object :deep_integers do
field :foo, :integer
field :bar, :integer
field :baz, :integer
end
input_object :deep_integers_input do
field :foo, :integer
field :bar, :integer
field :baz, :integer
end
def resolve_echo(_, %{echo: echo}, _) do
{:ok, echo}
end
end
defmodule ReverseSchema do
use Absinthe.Schema
query do
field :string, :string do
arg :echo, :string
resolve &resolve_echo/3
end
end
def resolve_echo(_, %{echo: echo}, _) do
{:ok, echo |> String.reverse()}
end
end
defmodule Controller do
use Phoenix.Controller
use Absinthe.Phoenix.Controller,
schema: Absinthe.Phoenix.ControllerTest.Schema,
action: [mode: :internal]
@graphql """
query ($echo: String) { string(echo: $echo) }
"""
def string(conn, %{data: data}), do: json(conn, data)
@graphql {"""
query ($echo: String) { string(echo: $echo) }
""", ReverseSchema}
def reverse_string(conn, %{data: data}), do: json(conn, data)
@graphql """
query ($echo: Int) { integer(echo: $echo) }
"""
def integer(conn, %{data: data}), do: json(conn, data)
@graphql """
query ($echo: [Int]) { list_of_integers(echo: $echo) }
"""
def list_of_integers(conn, %{data: data}), do: json(conn, data)
@graphql """
query ($echo: DeepIntegersInput) { input_object_with_integers(echo: $echo) }
"""
def input_object_with_integers(conn, %{data: data}), do: json(conn, data)
end
describe "input" do
test "string" do
assert %{"string" => "one"} == result(Controller, :string, %{"echo" => "one"})
end
test "integer" do
assert %{"integer" => 1} == result(Controller, :integer, %{"echo" => "1"})
end
test "list of integers" do
assert %{"list_of_integers" => [1, 2, 3]} ==
result(Controller, :list_of_integers, %{"echo" => ~w(1 2 3)})
end
test "input object with integers" do
assert %{"input_object_with_integers" => %{"foo" => 1, "bar" => 2, "baz" => 3}} ==
result(Controller, :input_object_with_integers, %{
"echo" => %{"foo" => "1", "bar" => "2", "baz" => "3"}
})
end
end
describe "using an alternate schema" do
test "can be defined using a @graphql tuple" do
assert %{"string" => "eno"} == result(Controller, :reverse_string, %{"echo" => "one"})
end
end
def result(controller, name, params, verb \\ :get) do
conn = build_conn(verb, "/", params) |> Plug.Conn.fetch_query_params()
controller.call(conn, controller.init(name))
|> json_response(200)
end
end
| 25.884615 | 92 | 0.585736 |
0866992f2713d75bfe5f358811d7e41c8510197d | 2,287 | ex | Elixir | clients/proximity_beacon/lib/google_api/proximity_beacon/v1beta1/model/beacon_info.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/proximity_beacon/lib/google_api/proximity_beacon/v1beta1/model/beacon_info.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/proximity_beacon/lib/google_api/proximity_beacon/v1beta1/model/beacon_info.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.ProximityBeacon.V1beta1.Model.BeaconInfo do
@moduledoc """
A subset of beacon information served via the `beaconinfo.getforobserved`
method, which you call when users of your app encounter your beacons.
## Attributes
* `advertisedId` (*type:* `GoogleApi.ProximityBeacon.V1beta1.Model.AdvertisedId.t`, *default:* `nil`) - The ID advertised by the beacon.
* `attachments` (*type:* `list(GoogleApi.ProximityBeacon.V1beta1.Model.AttachmentInfo.t)`, *default:* `nil`) - Attachments matching the type(s) requested.
May be empty if no attachment types were requested.
* `beaconName` (*type:* `String.t`, *default:* `nil`) - The name under which the beacon is registered.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:advertisedId => GoogleApi.ProximityBeacon.V1beta1.Model.AdvertisedId.t(),
:attachments => list(GoogleApi.ProximityBeacon.V1beta1.Model.AttachmentInfo.t()),
:beaconName => String.t()
}
field(:advertisedId, as: GoogleApi.ProximityBeacon.V1beta1.Model.AdvertisedId)
field(:attachments, as: GoogleApi.ProximityBeacon.V1beta1.Model.AttachmentInfo, type: :list)
field(:beaconName)
end
defimpl Poison.Decoder, for: GoogleApi.ProximityBeacon.V1beta1.Model.BeaconInfo do
def decode(value, options) do
GoogleApi.ProximityBeacon.V1beta1.Model.BeaconInfo.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ProximityBeacon.V1beta1.Model.BeaconInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.581818 | 158 | 0.746393 |
0866a395dd72b131ce31219cc3bd17cf8d998a0b | 2,243 | ex | Elixir | lib/zam_web.ex | praxis-of-nines/zam-search | d58261f35a22b7fd84df83674e74d59c925461f5 | [
"MIT"
] | 11 | 2019-05-27T23:43:46.000Z | 2021-08-24T06:37:23.000Z | lib/zam_web.ex | praxis-of-nines/zam-search | d58261f35a22b7fd84df83674e74d59c925461f5 | [
"MIT"
] | 1 | 2020-05-17T04:51:52.000Z | 2021-03-25T15:38:33.000Z | lib/zam_web.ex | praxis-of-nines/zam-search | d58261f35a22b7fd84df83674e74d59c925461f5 | [
"MIT"
] | 2 | 2019-05-27T23:43:52.000Z | 2020-10-15T09:09:58.000Z | defmodule ZamWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use ZamWeb, :controller
use ZamWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: ZamWeb
import Plug.Conn
import ZamWeb.Gettext
alias ZamWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/zam_web/templates",
namespace: ZamWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 1, get_flash: 2, view_module: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def live_view do
quote do
use Phoenix.LiveView,
layout: {ZamWeb.LayoutView, "live.html"}
unquote(view_helpers())
end
end
def live_component do
quote do
use Phoenix.LiveComponent
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
import Phoenix.LiveView.Router
end
end
def channel do
quote do
use Phoenix.Channel
import ZamWeb.Gettext
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
# Import LiveView helpers (live_render, live_component, live_patch, etc)
import Phoenix.LiveView.Helpers
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import ZamWeb.ErrorHelpers
import ZamWeb.Gettext
alias ZamWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 21.990196 | 83 | 0.676326 |
0866b60b5c6f9b755cad1b615364272531740ff2 | 363 | ex | Elixir | lib/otp_demo/accounts/otp_helpers.ex | moroz/phoenix-totp-demo | b07b1925a1116ecbade15cb37764e4aebf442b5b | [
"MIT"
] | null | null | null | lib/otp_demo/accounts/otp_helpers.ex | moroz/phoenix-totp-demo | b07b1925a1116ecbade15cb37764e4aebf442b5b | [
"MIT"
] | null | null | null | lib/otp_demo/accounts/otp_helpers.ex | moroz/phoenix-totp-demo | b07b1925a1116ecbade15cb37764e4aebf442b5b | [
"MIT"
] | null | null | null | defmodule OtpDemo.OtpHelpers do
alias OtpDemo.Accounts.User
def subject_for_otpauth(%User{} = user) do
"OtpDemo:#{user.email}"
end
def otpauth_uri_for_user(%User{} = user) do
secret = NimbleTOTP.secret()
uri =
user
|> subject_for_otpauth()
|> NimbleTOTP.otpauth_uri(secret, issuer: "OtpDemo")
{secret, uri}
end
end
| 19.105263 | 58 | 0.658402 |
0866b6c5a0bef403e849752753b439fd3c99259b | 702 | ex | Elixir | lib/mango_web/gettext.ex | jacruzca/mango | 9978dc609ed1fc86aa40386e35fae06cb95f80fc | [
"MIT"
] | 121 | 2017-06-24T10:33:38.000Z | 2022-03-17T16:02:16.000Z | lib/mango_web/gettext.ex | jacruzca/mango | 9978dc609ed1fc86aa40386e35fae06cb95f80fc | [
"MIT"
] | 6 | 2017-06-24T22:48:21.000Z | 2018-03-17T01:20:32.000Z | lib/mango_web/gettext.ex | jacruzca/mango | 9978dc609ed1fc86aa40386e35fae06cb95f80fc | [
"MIT"
] | 29 | 2017-07-03T04:09:15.000Z | 2022-03-02T15:45:49.000Z | defmodule MangoWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import MangoWeb.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :mango
end
| 28.08 | 72 | 0.678063 |
0866cfcdb538dcd046c6600b00c28c2e45948c49 | 3,288 | ex | Elixir | web/channels/document_channel.ex | Microflow/how-can-we-fix | 62f84e1c1d69a84d81cfbfcb08e2733165aa4ec6 | [
"MIT"
] | 93 | 2017-08-17T13:52:53.000Z | 2022-02-06T17:28:35.000Z | web/channels/document_channel.ex | rudi-c/alchemy-book | 979e1f08711eb3449913be47cddf1969beff518c | [
"MIT"
] | 6 | 2017-10-07T22:55:46.000Z | 2019-04-03T20:05:30.000Z | web/channels/document_channel.ex | Microflow/how-can-we-fix | 62f84e1c1d69a84d81cfbfcb08e2733165aa4ec6 | [
"MIT"
] | 11 | 2017-08-17T13:54:54.000Z | 2022-01-14T09:53:58.000Z | defmodule AlchemyBook.DocumentChannel do
require Logger
use AlchemyBook.Web, :channel
alias AlchemyBook.Presence
alias AlchemyBook.User
alias AlchemyBook.Document
alias AlchemyBook.DocumentRegistry
alias AlchemyBook.DocumentSession
intercept ["change"]
def join("documents:" <> slug, _params, socket) do
{:ok, [document_id]} = Document.id_from_slug(slug)
site_id =
DocumentRegistry.lookup(document_id)
|> DocumentSession.request_site_for_user(socket.assigns.user_id)
socket =
socket
|> assign(:document_id, document_id)
|> assign(:site_id, site_id)
send(self(), :after_join)
{:ok, socket}
end
def handle_in("change", params, socket) do
change = parse_change(params["change"])
DocumentRegistry.lookup(socket.assigns.document_id)
|> DocumentSession.update(change)
broadcast! socket, "change", %{
siteId: socket.assigns.site_id,
change: params["change"],
lamport: params["lamport"]
}
{:reply, :ok, socket}
end
def handle_in("cursor", params, socket) do
Presence.update(socket, socket.assigns.site_id, fn meta ->
%{ meta | cursor: %{ line: params["line"], ch: params["ch"] } }
end)
{:reply, :ok, socket}
end
def handle_out("change", payload, socket) do
# Don't send a change event to the originator of the change event
if payload[:siteId] != socket.assigns.site_id do
push socket, "change", payload
end
{:noreply, socket}
end
def terminate(reason, socket) do
Logger.info "User #{socket.assigns.user_id} leaving document #{socket.assigns.document_id}"
if Dict.size(Presence.list(socket)) <= 1 do
# Last connection
DocumentRegistry.close(socket.assigns.document_id)
end
end
def handle_info(:after_join, socket) do
init_value =
DocumentRegistry.lookup(socket.assigns.document_id)
|> DocumentSession.get
|> Document.crdt_to_json_ready
push socket, "init", %{ state: init_value, site: socket.assigns.site_id }
handle_presence(socket)
{:noreply, socket}
end
def handle_presence(socket) do
document_id = socket.assigns.document_id
user_id = socket.assigns.user_id
color =
DocumentRegistry.lookup(document_id)
|> DocumentSession.request_color_for_user(user_id)
Presence.track(socket, socket.assigns.site_id, %{
color: color,
cursor: nil,
online_at: :os.system_time(:milli_seconds),
user_id: user_id,
username: Repo.get!(User, user_id).username
})
Logger.info "Document #{document_id} now has " <>
"#{Dict.size(Presence.list(socket))} users, "
push socket, "presence_state", Presence.list(socket)
end
defp parse_change([type, char]) do
position =
char["position"]
|> Enum.map(fn %{"digit" => digit, "site" => site} -> {digit, site} end)
[type, { { position, char["lamport"] }, char["value"] }]
end
end | 30.165138 | 99 | 0.599148 |
0866d817d3c3ef4eba5d15e829c85bf289ab946b | 2,757 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/target_vpn_gateway_aggregated_list.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/target_vpn_gateway_aggregated_list.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/target_vpn_gateway_aggregated_list.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.TargetVpnGatewayAggregatedList do
@moduledoc """
## Attributes
- id (String.t): [Output Only] Unique identifier for the resource; defined by the server. Defaults to: `null`.
- items (%{optional(String.t) => TargetVpnGatewaysScopedList}): A list of TargetVpnGateway resources. Defaults to: `null`.
- kind (String.t): [Output Only] Type of resource. Always compute#targetVpnGateway for target VPN gateways. Defaults to: `null`.
- nextPageToken (String.t): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is larger than maxResults, use the nextPageToken as a value for the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. Defaults to: `null`.
- selfLink (String.t): [Output Only] Server-defined URL for this resource. Defaults to: `null`.
- warning (AcceleratorTypeAggregatedListWarning): Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:id => any(),
:items => map(),
:kind => any(),
:nextPageToken => any(),
:selfLink => any(),
:warning => GoogleApi.Compute.V1.Model.AcceleratorTypeAggregatedListWarning.t()
}
field(:id)
field(:items, as: GoogleApi.Compute.V1.Model.TargetVpnGatewaysScopedList, type: :map)
field(:kind)
field(:nextPageToken)
field(:selfLink)
field(:warning, as: GoogleApi.Compute.V1.Model.AcceleratorTypeAggregatedListWarning)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.TargetVpnGatewayAggregatedList do
def decode(value, options) do
GoogleApi.Compute.V1.Model.TargetVpnGatewayAggregatedList.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.TargetVpnGatewayAggregatedList do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 43.761905 | 381 | 0.739935 |
0866dad715e679d81f4bcda1e141cdaff719cbd0 | 142 | ex | Elixir | aula2/lib/file_reader.ex | cenezaraujo/unifor_prog_funcional | 08b5d75c535b10b7dca8ac26efc1dba14fe2a4f1 | [
"Apache-2.0"
] | null | null | null | aula2/lib/file_reader.ex | cenezaraujo/unifor_prog_funcional | 08b5d75c535b10b7dca8ac26efc1dba14fe2a4f1 | [
"Apache-2.0"
] | null | null | null | aula2/lib/file_reader.ex | cenezaraujo/unifor_prog_funcional | 08b5d75c535b10b7dca8ac26efc1dba14fe2a4f1 | [
"Apache-2.0"
] | null | null | null | defmodule FileReader do
def parse_data(file_name) do
file_name
|> File.read!
|> String.split(~r{\s}, trim: true)
end
end
| 15.777778 | 41 | 0.626761 |
0866e2c92e7ad659c58468aeb339f8e1086873b2 | 1,099 | ex | Elixir | web/controllers/user_controller.ex | Microflow/how-can-we-fix | 62f84e1c1d69a84d81cfbfcb08e2733165aa4ec6 | [
"MIT"
] | 93 | 2017-08-17T13:52:53.000Z | 2022-02-06T17:28:35.000Z | web/controllers/user_controller.ex | rudi-c/alchemy-book | 979e1f08711eb3449913be47cddf1969beff518c | [
"MIT"
] | 6 | 2017-10-07T22:55:46.000Z | 2019-04-03T20:05:30.000Z | web/controllers/user_controller.ex | Microflow/how-can-we-fix | 62f84e1c1d69a84d81cfbfcb08e2733165aa4ec6 | [
"MIT"
] | 11 | 2017-08-17T13:54:54.000Z | 2022-01-14T09:53:58.000Z | defmodule AlchemyBook.UserController do
use AlchemyBook.Web, :controller
plug :authenticate_user when action in [:index, :show]
alias AlchemyBook.User
def new(conn, _params) do
changeset = User.changeset(%User{})
render conn, "new.html", changeset: changeset
end
def create(conn, %{"user" => user_params}) do
changeset = User.registration_changeset(%User{}, user_params)
case Repo.insert(changeset) do
{:ok, user} ->
conn
|> AlchemyBook.Auth.login(user)
|> put_flash(:info, "#{user.name} created!")
|> redirect(to: document_path(conn, :index))
{:error, changeset} ->
render(conn, "new.html", changeset: changeset)
end
end
def create_anonymous() do
unique_user = "Guest" <> to_string(:rand.uniform(99999));
if Repo.get_by(User, username: unique_user) == nil do
anonymous = %AlchemyBook.User{name: unique_user, username: unique_user, anonymous: true}
Repo.insert!(anonymous)
else
# Generate random usernames until we find an unique one
create_anonymous()
end
end
end
| 30.527778 | 94 | 0.66242 |
086711811131d73bd958ea89b0a3f438456c774a | 132 | exs | Elixir | filas/test/filas_test.exs | henriquecido/ChallengesRocketseat | 9b85fefc38692dfcb9b49d07873b61555a91400d | [
"MIT"
] | null | null | null | filas/test/filas_test.exs | henriquecido/ChallengesRocketseat | 9b85fefc38692dfcb9b49d07873b61555a91400d | [
"MIT"
] | null | null | null | filas/test/filas_test.exs | henriquecido/ChallengesRocketseat | 9b85fefc38692dfcb9b49d07873b61555a91400d | [
"MIT"
] | null | null | null | defmodule FilasTest do
use ExUnit.Case
doctest Filas
test "greets the world" do
assert Filas.hello() == :world
end
end
| 14.666667 | 34 | 0.69697 |
086711a3098b88da0120b31c9baf2e853e873382 | 4,325 | ex | Elixir | lib/images.ex | alexanderttalvarez/igdb | b5b3fa661ff48a31b97e10b1c78b3c3f38d0c342 | [
"Apache-2.0"
] | null | null | null | lib/images.ex | alexanderttalvarez/igdb | b5b3fa661ff48a31b97e10b1c78b3c3f38d0c342 | [
"Apache-2.0"
] | null | null | null | lib/images.ex | alexanderttalvarez/igdb | b5b3fa661ff48a31b97e10b1c78b3c3f38d0c342 | [
"Apache-2.0"
] | null | null | null | defmodule Igdb.Images do
@moduledoc """
Modules to retrieve the images from the API
"""
@images_url "http://images.igdb.com/igdb/image/upload/t_"
@images_sizes ["cover_small",
"screenshot_med",
"cover_big",
"logo_med",
"screenshot_big",
"screenshot_huge",
"thumb",
"micro",
"720p",
"1080p"]
@doc """
Retrieves the images from a single game
## Examples
iex> images = Igdb.Images.get_game_images(113113)
iex> images |> Enum.member?(%{
...> "alpha_channel" => false,
...> "animated" => false,
...> "game" => 113113,
...> "height" => 902,
...> "id" => 268575,
...> "image_id" => "sc5r8f",
...> "url" => "//images.igdb.com/igdb/image/upload/t_thumb/sc5r8f.jpg",
...> "urls" => %{
...> "1080p" => "http://images.igdb.com/igdb/image/upload/t_1080p/sc5r8f.jpg",
...> "720p" => "http://images.igdb.com/igdb/image/upload/t_720p/sc5r8f.jpg",
...> "cover_big" => "http://images.igdb.com/igdb/image/upload/t_cover_big/sc5r8f.jpg",
...> "cover_small" => "http://images.igdb.com/igdb/image/upload/t_cover_small/sc5r8f.jpg",
...> "logo_med" => "http://images.igdb.com/igdb/image/upload/t_logo_med/sc5r8f.jpg",
...> "micro" => "http://images.igdb.com/igdb/image/upload/t_micro/sc5r8f.jpg",
...> "screenshot_big" => "http://images.igdb.com/igdb/image/upload/t_screenshot_big/sc5r8f.jpg",
...> "screenshot_huge" => "http://images.igdb.com/igdb/image/upload/t_screenshot_huge/sc5r8f.jpg",
...> "screenshot_med" => "http://images.igdb.com/igdb/image/upload/t_screenshot_med/sc5r8f.jpg",
...> "thumb" => "http://images.igdb.com/igdb/image/upload/t_thumb/sc5r8f.jpg"
...> },
...> "width" => 1600
...> })
true
"""
@spec get_game_images(Number, list) :: {:error, String.t} | list
def get_game_images(game_id, sizes \\ @images_sizes),
do: get_game_images(game_id, sizes, Igdb.Games.get(%{"id" => game_id}, ["screenshots.*"]))
defp get_game_images(_game_id, _sizes, {:error, error}),
do: {:error, error}
defp get_game_images(_game_id, _sizes, []),
do: {:error, "No game found"}
defp get_game_images(_game_id, sizes, game),
do: get_game_images(nil, sizes, game, List.first(game) |> Map.get("screenshots"))
defp get_game_images(_game_id, _sizes, _game, screenshots) when screenshots in ["", [""], [], nil],
do: []
defp get_game_images(_game_id, sizes, _game, screenshots),
do: screenshots
|> Enum.map(fn image ->
[_basename, ext] = "#{image["url"] |> Path.basename}" |> String.split(".")
image
|> Map.put("urls",
Enum.reduce(sizes, %{}, fn size, acc ->
Map.put(acc, size, "#{@images_url}#{size}/#{image["image_id"]}.#{ext}")
end)
)
end)
@doc """
Retrieves and downloads the images of a particular game using the game ID
"""
def download_game_images(game_id, path \\ File.cwd! <> "/images"),
do: download_game_images(game_id, path, get_game_images(game_id))
defp download_game_images(_game_id, _path, error) when is_tuple(error),
do: error
defp download_game_images(_game_id, path, images) do
# Checking if the path exists, to create it beforehand
if !File.exists?(path) do
File.mkdir!(path)
end
# Downloading every image in the path folder, using its original name
Enum.map(images, fn image ->
download_game_images(nil, path, nil, image)
end)
end
defp download_game_images(_game_id, path, _images, image) do
{image["image_id"],
image["urls"]
|> Enum.map(fn {size, url} ->
%HTTPoison.Response{body: body, status_code: status_code} = HTTPoison.get!(url)
download_game_images(nil, path, nil, nil, {size, url, body, status_code})
end)}
end
defp download_game_images(_game_id, path, _images, _image, {size, url, body, status_code}) when status_code in [200] do
[basename, ext] = "#{url |> Path.basename}" |> String.split(".")
{size, File.write!("#{path}/#{basename}_#{size}.#{ext}", body)}
end
defp download_game_images(_game_id, _path, _images, _image, {size, _url, _body, status_code}),
do: {size, {:error, status_code}}
end
| 36.344538 | 121 | 0.609711 |
0867149e93431b96b29bd846b1e25f5fabcd3cfd | 1,601 | ex | Elixir | clients/dlp/lib/google_api/dlp/v2beta1/model/google_privacy_dlp_v2beta1_projection.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/dlp/lib/google_api/dlp/v2beta1/model/google_privacy_dlp_v2beta1_projection.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/dlp/lib/google_api/dlp/v2beta1/model/google_privacy_dlp_v2beta1_projection.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.DLP.V2beta1.Model.GooglePrivacyDlpV2beta1Projection do
@moduledoc """
A representation of a Datastore property in a projection.
## Attributes
- property (GooglePrivacyDlpV2beta1PropertyReference): The property to project. Defaults to: `null`.
"""
defstruct [
:"property"
]
end
defimpl Poison.Decoder, for: GoogleApi.DLP.V2beta1.Model.GooglePrivacyDlpV2beta1Projection do
import GoogleApi.DLP.V2beta1.Deserializer
def decode(value, options) do
value
|> deserialize(:"property", :struct, GoogleApi.DLP.V2beta1.Model.GooglePrivacyDlpV2beta1PropertyReference, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DLP.V2beta1.Model.GooglePrivacyDlpV2beta1Projection do
def encode(value, options) do
GoogleApi.DLP.V2beta1.Deserializer.serialize_non_nil(value, options)
end
end
| 33.354167 | 119 | 0.772642 |
08677a7f914b5818b5325b61bc84380f24c61fac | 836 | ex | Elixir | cowboy_rest_example/lib/resource/single_todo_resource.ex | ErikMejerHansen/REST-service-examples | 88358f74b490f718b7c6882fc1265a2b360b8ef2 | [
"Unlicense"
] | 1 | 2019-04-22T13:39:19.000Z | 2019-04-22T13:39:19.000Z | cowboy_rest_example/lib/resource/single_todo_resource.ex | ErikMejerHansen/REST-service-examples | 88358f74b490f718b7c6882fc1265a2b360b8ef2 | [
"Unlicense"
] | 1 | 2017-10-06T11:54:24.000Z | 2017-10-09T09:29:04.000Z | cowboy_rest_example/lib/resource/single_todo_resource.ex | ErikMejerHansen/REST-service-examples | 88358f74b490f718b7c6882fc1265a2b360b8ef2 | [
"Unlicense"
] | null | null | null | defmodule CowboyRestExample.SingleTodoResource do
use CowboyRestExample.Defaults
def allowed_methods(request, state) do
{["GET", "HEAD", "OPTIONS"], request, state}
end
def resource_exists(request, state) do
{id, request} = :cowboy_req.binding(:id, request)
IO.inspect(id)
case Repo.get(CowboyRestExample.Todo, String.to_integer(id)) do
nil -> {false, request, state}
todo ->
{true, request, Map.put(state, :todo, todo)}
end
end
def to_json(request, state) do
todo = Poison.encode!(%{
todo: state.todo.subject,
created: state.todo.inserted_at
})
{todo, request, state}
end
def forbidden(request, %{rights: "Everything"} = state), do: {false, request, state}
def forbidden(request, state), do: {true, request, state}
end
| 26.967742 | 86 | 0.639952 |
08677e999c43adeefd942f68de73d87bae3efb0e | 297 | exs | Elixir | server/priv/repo/migrations/20220506230516_users.exs | thooton/secure-notepad | e780c660a9ea6e425a0e06ff1f9469f527251815 | [
"MIT"
] | null | null | null | server/priv/repo/migrations/20220506230516_users.exs | thooton/secure-notepad | e780c660a9ea6e425a0e06ff1f9469f527251815 | [
"MIT"
] | null | null | null | server/priv/repo/migrations/20220506230516_users.exs | thooton/secure-notepad | e780c660a9ea6e425a0e06ff1f9469f527251815 | [
"MIT"
] | null | null | null | defmodule SecureNotepadServer.Database.Repo.Migrations.Users do
use Ecto.Migration
def change do
create_if_not_exists table ("users") do
add :username, :string, primary_key: true
add :password, :string
add :notes, :string
add :note_salt, :binary
end
end
end
| 22.846154 | 63 | 0.693603 |
0867904c80e8c1df658eb5f00a1b8f9e2126c57f | 912 | ex | Elixir | test/support/schemas/people_struct_with_prepare_option.ex | wmorris92/algoliax | b05d5861e694008f51289d0af7a9a4475367d1e0 | [
"BSD-2-Clause"
] | null | null | null | test/support/schemas/people_struct_with_prepare_option.ex | wmorris92/algoliax | b05d5861e694008f51289d0af7a9a4475367d1e0 | [
"BSD-2-Clause"
] | null | null | null | test/support/schemas/people_struct_with_prepare_option.ex | wmorris92/algoliax | b05d5861e694008f51289d0af7a9a4475367d1e0 | [
"BSD-2-Clause"
] | null | null | null | defmodule Algoliax.Schemas.PeopleStructWithPrepareObject do
@moduledoc false
use Algoliax.Indexer,
index_name: :algoliax_people_with_prepare_object_struct,
object_id: :reference,
prepare_object: &__MODULE__.prepare/2,
algolia: [
attributes_for_faceting: ["age"],
searchable_attributes: ["full_name"],
custom_ranking: ["desc(update_at)"]
]
defstruct reference: nil, last_name: nil, first_name: nil, age: nil
attributes([:first_name, :last_name, :age])
attribute(:updated_at, ~U[2019-01-01 00:00:00Z] |> DateTime.to_unix())
attribute :full_name do
Map.get(model, :first_name, "") <> " " <> Map.get(model, :last_name, "")
end
attribute :nickname do
Map.get(model, :first_name, "") |> String.downcase()
end
def prepare(object, _) do
object
|> Map.put(:prepared, true)
end
def to_be_indexed?(model) do
model.age > 50
end
end
| 24.648649 | 76 | 0.678728 |
08679d62a9d7741ed9d73a401c869b29e08ec873 | 763 | exs | Elixir | integration_test/sojourn/protector_test.exs | ericentin/db_connection | 5a11383dd250e77b8475fddba92e26e2bf9efad8 | [
"Apache-2.0"
] | null | null | null | integration_test/sojourn/protector_test.exs | ericentin/db_connection | 5a11383dd250e77b8475fddba92e26e2bf9efad8 | [
"Apache-2.0"
] | null | null | null | integration_test/sojourn/protector_test.exs | ericentin/db_connection | 5a11383dd250e77b8475fddba92e26e2bf9efad8 | [
"Apache-2.0"
] | null | null | null | defmodule TestProtector do
use ExUnit.Case, async: true
alias TestPool, as: P
alias TestAgent, as: A
test "protector drops requests" do
stack = [{:ok, :state}]
{:ok, agent} = A.start_link(stack)
opts = [agent: agent, parent: self(), queue_timeout: 5, protector: true,
protector_update: 1, protector_interval: 1, protector_target: 0]
{:ok, pool} = P.start_link(opts)
P.run(pool, fn(_) ->
assert_raise DBConnection.ConnectionError,
fn() -> P.run(pool, fn(_) -> :ok end) end
:timer.sleep(200)
assert_raise DBConnection.ConnectionError, ~r"after 0ms",
fn() -> P.run(pool, fn(_) -> :ok end, [protector: true]) end
end)
assert [connect: [_]] = A.record(agent)
end
end
| 27.25 | 76 | 0.613368 |
086820b5d41fcedc33f41396eeb2fa92cff21305 | 482 | ex | Elixir | lib/battle_box/installed_games.ex | GrantJamesPowell/battle_box | 301091955b68cd4672f6513d645eca4e3c4e17d0 | [
"Apache-2.0"
] | 2 | 2020-10-17T05:48:49.000Z | 2020-11-11T02:34:15.000Z | lib/battle_box/installed_games.ex | FlyingDutchmanGames/battle_box | 301091955b68cd4672f6513d645eca4e3c4e17d0 | [
"Apache-2.0"
] | 3 | 2020-05-18T05:52:21.000Z | 2020-06-09T07:24:14.000Z | lib/battle_box/installed_games.ex | FlyingDutchmanGames/battle_box | 301091955b68cd4672f6513d645eca4e3c4e17d0 | [
"Apache-2.0"
] | null | null | null | defmodule BattleBox.InstalledGames do
import BattleBox.Utilities.Humanize, only: [kebabify: 1]
@games Application.get_env(:battle_box, BattleBox.GameEngine)[:games] ||
raise("Must set the :battle_box, BattleBox.GameEngine, :games config value")
def installed_games, do: @games
for game <- @games,
id <- Enum.uniq([game, game.name, to_string(game.name), kebabify(game.title)]) do
def game_type_name_to_module(unquote(id)), do: unquote(game)
end
end
| 34.428571 | 87 | 0.717842 |
08684330f0880d1b1f926acf31649d0b291200b2 | 2,799 | exs | Elixir | test/jumubase/accounts/user_test.exs | richeterre/jumubase-phoenix | 7584f890af117d496971b5284bf9de798e22266f | [
"MIT"
] | 2 | 2019-01-20T07:03:30.000Z | 2019-04-11T10:20:14.000Z | test/jumubase/accounts/user_test.exs | richeterre/jumubase-phoenix | 7584f890af117d496971b5284bf9de798e22266f | [
"MIT"
] | 6 | 2018-09-20T05:52:14.000Z | 2019-04-23T19:27:39.000Z | test/jumubase/accounts/user_test.exs | richeterre/jumubase-phoenix | 7584f890af117d496971b5284bf9de798e22266f | [
"MIT"
] | null | null | null | defmodule Jumubase.UserTest do
use Jumubase.DataCase
alias Jumubase.Accounts.User
describe "create_changeset/2" do
test "is valid with valid attributes" do
params = valid_user_params()
changeset = User.create_changeset(%User{}, params)
assert changeset.valid?
end
test "is invalid without a given name" do
params = %{valid_user_params() | given_name: nil}
changeset = User.create_changeset(%User{}, params)
refute changeset.valid?
end
test "is invalid without a family name" do
params = %{valid_user_params() | family_name: nil}
changeset = User.create_changeset(%User{}, params)
refute changeset.valid?
end
test "is invalid with an invalid email" do
with_spaces = "a @b.c"
no_at_sign = "ab.c"
too_long = String.duplicate("a", 157) <> "@b.c"
for email <- [nil, with_spaces, no_at_sign, too_long] do
params = %{valid_user_params() | email: email}
changeset = User.create_changeset(%User{}, params)
refute changeset.valid?
end
end
test "validates email uniqueness" do
%{email: email} = insert(:user)
params = %{valid_user_params() | email: email}
changeset = User.create_changeset(%User{}, params)
assert "has already been taken" in errors_on(changeset).email
end
test "is invalid with an invalid password" do
too_short = String.duplicate("x", 7)
too_long = String.duplicate("x", 73)
for pwd <- [nil, too_short, too_long] do
params = %{valid_user_params() | password: pwd}
changeset = User.create_changeset(%User{}, params)
refute changeset.valid?
end
end
test "hashes and clears the password for valid attributes" do
params = valid_user_params()
changeset = User.create_changeset(%User{}, params)
assert is_binary(changeset.changes.hashed_password)
refute Map.has_key?(changeset.changes, :password)
end
test "does not hash and clear the password for invalid attributes" do
params = %{valid_user_params() | given_name: nil}
changeset = User.create_changeset(%User{}, params)
refute Map.has_key?(changeset.changes, :hashed_password)
assert is_binary(changeset.changes.password)
end
test "is invalid with an invalid role" do
for role <- [nil, "unknown"] do
params = %{valid_user_params() | role: role}
changeset = User.create_changeset(%User{}, params)
refute changeset.valid?
end
end
end
describe "inspect/2" do
test "does not include password" do
refute inspect(%User{password: "123456"}) =~ "password: \"123456\""
end
end
# Private helpers
defp valid_user_params do
params_for(:user, password: "password")
end
end
| 31.449438 | 73 | 0.656306 |
086845966d11600b8da30d0e78fa5b9b53cbf05a | 1,796 | ex | Elixir | lib/slack_autolinker.ex | wojtekmach/slack_autolinker | a3623e406439f387a19456055644263993c974ff | [
"MIT"
] | 3 | 2017-07-28T14:19:20.000Z | 2021-02-09T15:01:25.000Z | lib/slack_autolinker.ex | socialpaymentsbv/slack_autolinker | a3623e406439f387a19456055644263993c974ff | [
"MIT"
] | 7 | 2017-02-27T08:54:41.000Z | 2020-03-18T10:18:49.000Z | lib/slack_autolinker.ex | socialpaymentsbv/slack_autolinker | a3623e406439f387a19456055644263993c974ff | [
"MIT"
] | 2 | 2017-03-16T12:13:55.000Z | 2021-01-14T11:51:11.000Z | defmodule SlackAutolinker do
@default_repo_aliases %{
"elixir" => "elixir-lang/elixir",
"ecto" => "elixir-ecto/ecto",
"phoenix" => "phoenixframework/phoenix",
}
@github Application.get_env(:slack_autolinker, :github_adapter)
alias SlackAutolinker.GitHub.Issue
require Logger
def reply(text, repo_aliases) do
case extract(text, repo_aliases) do
[] ->
nil
links ->
links_with_titles(links)
end
end
defp links_with_titles(links) do
issues =
for {_, repo, number} <- links do
[owner, repo] = String.split(repo, "/", trim: true)
{owner, repo, number}
end
pids =
for {owner, repo, number} <- issues do
Task.async(fn -> @github.get_issue(owner, repo, number) end)
end
for {link, pid} <- Enum.zip(links, pids) do
case Task.await(pid) do
{:ok, %Issue{title: title}} ->
github_link(link) <> " - " <> title
{:error, reason} ->
Logger.warn("error: #{inspect reason}")
github_link(link) <> " (error: couldn't fetch title)"
end
end
|> Enum.join("\n")
end
@doc false
def extract(text, repo_aliases \\ %{}) do
repo_aliases = Map.merge(@default_repo_aliases, repo_aliases)
repo_pattern = "([a-z]+[a-z0-9-_\.]*[a-z0-9]*)"
Regex.scan(~r/#{repo_pattern}#(\d+)/i, String.downcase(text))
|> Enum.flat_map(fn [orig, repo_alias, number] ->
case Map.fetch(repo_aliases, repo_alias) do
{:ok, repo} -> [{orig, repo, String.to_integer(number)}]
:error -> []
end
end)
|> Enum.uniq
end
defp github_link({orig, repo, number}),
do: "<#{github_url(repo, number)}|#{orig}>"
defp github_url(repo, number),
do: "https://github.com/#{repo}/issues/#{number}"
end
| 27.212121 | 68 | 0.589644 |
086871df2319a804c340b011869cdf79326d56ca | 93 | ex | Elixir | lib/bus_car_dsl/prefix.ex | elbow-jason/bus_car_dsl | a83a16b9c5c1c954b7b2284dd6a5636dbb7f4157 | [
"MIT"
] | null | null | null | lib/bus_car_dsl/prefix.ex | elbow-jason/bus_car_dsl | a83a16b9c5c1c954b7b2284dd6a5636dbb7f4157 | [
"MIT"
] | 2 | 2017-04-13T22:02:38.000Z | 2017-04-13T22:17:26.000Z | lib/bus_car_dsl/prefix.ex | elbow-jason/bus_car_dsl | a83a16b9c5c1c954b7b2284dd6a5636dbb7f4157 | [
"MIT"
] | null | null | null | defmodule BusCarDsl.Prefix do
use BusCarDsl.Element
TreeLeaf.rule(:prefix, :value)
end
| 13.285714 | 32 | 0.763441 |
0868ce5c088deed1b6767032a0d834a69d168464 | 3,097 | exs | Elixir | test/real_world_web/controllers/user_controller_test.exs | mihkelal/mihkels-elixir-phoenix-realworld-example-app | f241093522b394c816e7bab8ccf5f18fdd854ac6 | [
"MIT"
] | null | null | null | test/real_world_web/controllers/user_controller_test.exs | mihkelal/mihkels-elixir-phoenix-realworld-example-app | f241093522b394c816e7bab8ccf5f18fdd854ac6 | [
"MIT"
] | 4 | 2021-03-23T19:01:21.000Z | 2021-03-30T18:27:24.000Z | test/real_world_web/controllers/user_controller_test.exs | mihkelal/mihkels-elixir-phoenix-realworld-example-app | f241093522b394c816e7bab8ccf5f18fdd854ac6 | [
"MIT"
] | null | null | null | defmodule RealWorldWeb.UserControllerTest do
use RealWorldWeb.ConnCase
alias RealWorld.Account
alias RealWorld.Account.User
@create_attrs %{
bio: "some bio",
email: "some email",
image: "some image",
password: "some password",
username: "some username"
}
@update_attrs %{
bio: "some updated bio",
email: "some updated email",
image: "some updated image",
password: "some updated password",
username: "some updated username"
}
@invalid_attrs %{bio: nil, email: nil, image: nil, password: nil, username: nil}
def fixture(:user) do
{:ok, user} = Account.create_user(@create_attrs)
user
end
setup %{conn: conn} do
{:ok, conn: put_req_header(conn, "accept", "application/json")}
end
describe "index" do
test "lists all users", %{conn: conn} do
conn = get(conn, Routes.user_path(conn, :index))
assert json_response(conn, 200)["data"] == []
end
end
describe "create user" do
test "renders user when data is valid", %{conn: conn} do
conn = post(conn, Routes.user_path(conn, :create), user: @create_attrs)
assert %{"id" => id} = json_response(conn, 201)["data"]
conn = get(conn, Routes.user_path(conn, :show, id))
assert %{
"id" => id,
"bio" => "some bio",
"email" => "some email",
"image" => "some image",
"password" => "some password",
"username" => "some username"
} = json_response(conn, 200)["data"]
end
test "renders errors when data is invalid", %{conn: conn} do
conn = post(conn, Routes.user_path(conn, :create), user: @invalid_attrs)
assert json_response(conn, 422)["errors"] != %{}
end
end
describe "update user" do
setup [:create_user]
test "renders user when data is valid", %{conn: conn, user: %User{id: id} = user} do
conn = put(conn, Routes.user_path(conn, :update, user), user: @update_attrs)
assert %{"id" => ^id} = json_response(conn, 200)["data"]
conn = get(conn, Routes.user_path(conn, :show, id))
assert %{
"id" => id,
"bio" => "some updated bio",
"email" => "some updated email",
"image" => "some updated image",
"password" => "some updated password",
"username" => "some updated username"
} = json_response(conn, 200)["data"]
end
test "renders errors when data is invalid", %{conn: conn, user: user} do
conn = put(conn, Routes.user_path(conn, :update, user), user: @invalid_attrs)
assert json_response(conn, 422)["errors"] != %{}
end
end
describe "delete user" do
setup [:create_user]
test "deletes chosen user", %{conn: conn, user: user} do
conn = delete(conn, Routes.user_path(conn, :delete, user))
assert response(conn, 204)
assert_error_sent 404, fn ->
get(conn, Routes.user_path(conn, :show, user))
end
end
end
defp create_user(_) do
user = fixture(:user)
%{user: user}
end
end
| 29.495238 | 88 | 0.586697 |
0868e49878b0ce608e5536c96d0a76bb700e77f4 | 834 | ex | Elixir | lib/chameleon/util.ex | joydrive/chameleon | e314f57129a9d5db7c7526dcfeb15d82493e16a0 | [
"Apache-2.0"
] | 20 | 2018-05-21T17:06:06.000Z | 2022-02-02T18:25:44.000Z | lib/chameleon/util.ex | joydrive/chameleon | e314f57129a9d5db7c7526dcfeb15d82493e16a0 | [
"Apache-2.0"
] | 12 | 2018-05-24T19:04:11.000Z | 2022-01-24T22:28:30.000Z | lib/chameleon/util.ex | joydrive/chameleon | e314f57129a9d5db7c7526dcfeb15d82493e16a0 | [
"Apache-2.0"
] | 13 | 2018-05-20T10:39:59.000Z | 2022-01-23T01:04:12.000Z | defmodule Chameleon.Util do
@moduledoc false
def keyword_to_rgb_map do
Chameleon.KeywordToRGB.load()
end
def keyword_to_hex_map do
Chameleon.KeywordToHex.load()
end
def pantone_to_hex_map do
Chameleon.PantoneToHex.load()
end
def derive_input_struct(string) do
cond do
match = Regex.named_captures(~r/^#?(?<val>[0-9A-Fa-f]{6})$/, string) ->
{:ok, Chameleon.Hex.new(match["val"])}
match = Regex.named_captures(~r/^#?(?<val>[0-9A-Fa-f]{3})$/, string) ->
{:ok, Chameleon.Hex.new(match["val"])}
string in Map.keys(keyword_to_hex_map()) ->
{:ok, Chameleon.Keyword.new(string)}
string in Map.keys(pantone_to_hex_map()) ->
{:ok, Chameleon.Pantone.new(string)}
true ->
{:error, "The input could not be translated"}
end
end
end
| 23.828571 | 77 | 0.629496 |
086941f1b8547c29d1c01bf263987234c43a32fd | 1,725 | ex | Elixir | apps/site/lib/excluded_stops.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 42 | 2019-05-29T16:05:30.000Z | 2021-08-09T16:03:37.000Z | apps/site/lib/excluded_stops.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 872 | 2019-05-29T17:55:50.000Z | 2022-03-30T09:28:43.000Z | apps/site/lib/excluded_stops.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 12 | 2019-07-01T18:33:21.000Z | 2022-03-10T02:13:57.000Z | defmodule ExcludedStops do
@moduledoc """
Responsible for determing which, if any, stops should be filtered out of the list of all_stops
based on origin, route ID, and direction ID.
"""
@braintree_stops [
"place-brntn",
"place-qamnl",
"place-qnctr",
"place-wlsta",
"place-nqncy"
]
@ashmont_stops [
"place-asmnl",
"place-smmnl",
"place-fldcr",
"place-shmnl"
]
def excluded_origin_stops(0, "Red", _all_stops) do
["place-brntn", "place-asmnl"]
end
def excluded_origin_stops(0, "Green", _) do
["place-lake", "place-clmnl", "place-river", "place-hsmnl"]
end
def excluded_origin_stops(_direction_id, _route_id, []) do
[]
end
def excluded_origin_stops(_direction_id, _route_id, all_stops) do
[List.last(all_stops).id]
end
def excluded_destination_stops("Red", origin_id) when origin_id in @braintree_stops do
@ashmont_stops
end
def excluded_destination_stops("Red", origin_id) when origin_id in @ashmont_stops do
@braintree_stops
end
def excluded_destination_stops("Green", origin_id) do
stops_on_routes = GreenLine.stops_on_routes(0)
# Determine which lines the origin could be on and take the
# difference of those stops with all the GL stops
possible_stop_ids =
GreenLine.branch_ids()
|> Enum.filter(&GreenLine.stop_on_route?(origin_id, &1, stops_on_routes))
|> Enum.reduce(MapSet.new(), &MapSet.union(GreenLine.route_stops(&1, stops_on_routes), &2))
all_stop_ids =
stops_on_routes
|> GreenLine.all_stops()
|> MapSet.new(& &1.id)
MapSet.difference(all_stop_ids, possible_stop_ids)
end
def excluded_destination_stops(_route_id, _origin_id), do: []
end
| 26.538462 | 97 | 0.693913 |
08694a0fce9189bbd54c3d7d01c45fe553e90bb7 | 4,901 | ex | Elixir | lib/ex_admin/view_helpers.ex | IvanIvanoff/ex_admin | 550f748c3b8e18f02ea86b63c67e4e674b2edb41 | [
"MIT"
] | 1 | 2020-08-07T19:51:47.000Z | 2020-08-07T19:51:47.000Z | lib/ex_admin/view_helpers.ex | IvanIvanoff/ex_admin | 550f748c3b8e18f02ea86b63c67e4e674b2edb41 | [
"MIT"
] | null | null | null | lib/ex_admin/view_helpers.ex | IvanIvanoff/ex_admin | 550f748c3b8e18f02ea86b63c67e4e674b2edb41 | [
"MIT"
] | 1 | 2019-11-24T12:39:38.000Z | 2019-11-24T12:39:38.000Z | defmodule ExAdmin.ViewHelpers do
@moduledoc false
use Xain
import ExAdmin.Utils
import ExAdmin.Gettext
require Logger
@endpoint Application.get_env(:ex_admin, :endpoint)
def endpoint, do: @endpoint
# defmacro __using__(_opts) do
# import unquote(__MODULE__)
# import UcxNotifier.Admin.ViewHelpers.Table
# end
@doc """
Build an action item link.
"""
def action_item_link(name, opts) do
{:custom, [{name, opts}]}
end
def flashes(conn) do
markup safe: true do
messages =
Enum.reduce([:notice, :error], [], fn which, acc ->
acc ++ get_flash(conn, which)
end)
if messages != [] do
div ".flashes" do
Enum.map(messages, fn {which, flash} ->
div(".flash.flash_#{which} #{flash}")
end)
end
end
end
end
def get_flash(conn, which) do
case Phoenix.Controller.get_flash(conn, which) do
nil ->
[]
flash ->
[{which, flash}]
end
end
def page_title(conn, resource) do
plural = displayable_name_plural(conn)
singular = Inflex.singularize(plural)
case ExAdmin.Utils.action_name(conn) do
:index ->
plural
:show ->
cond do
function_exported?(
ExAdmin.get_registered(resource.__struct__).__struct__,
:display_name,
1
) ->
apply(ExAdmin.get_registered(resource.__struct__).__struct__, :display_name, [
resource
])
function_exported?(resource.__struct__, :display_name, 1) ->
apply(resource.__struct__, :display_name, [resource])
true ->
ExAdmin.Helpers.resource_identity(resource)
end
action when action in [:edit, :update] ->
gettext("Edit") <> " #{singular}"
action when action in [:new, :create] ->
gettext("New") <> " #{singular}"
_ ->
""
end
end
def status_tag(nil), do: status_tag(false)
def status_tag(status) do
span(".status_tag.#{status} #{status}")
end
def build_link(action, opts, html_opts \\ [])
def build_link(_action, opts, _) when opts in [nil, []], do: ""
def build_link(_action, [{name, opts} | _], html_opts) do
attrs =
Enum.reduce(opts ++ html_opts, "", fn {k, v}, acc ->
acc <> "#{k}='#{v}' "
end)
Phoenix.HTML.raw("<a #{attrs}>#{name}</a>")
end
@js_escape_map Enum.into(
[
{"^", ""},
{~S(\\), ~S(\\\\)},
{~S(</), ~S(<\/)},
{"\r\n", ~S(\n)},
{"\n", ~S(\n)},
{"\r", ~S(\n)},
{~S("), ~S(\")},
{"'", "\\'"}
],
%{}
)
# {~S(\"), ~S(\\")},
def escape_javascript(unescaped) do
# Phoenix.HTML.safe _escape_javascript(unescaped)
# IO.puts "escape_javascript: unescaped: #{inspect unescaped}`"
res =
Phoenix.HTML.safe_to_string(unescaped)
|> String.replace("\n", "")
|> _escape_javascript
# IO.puts "escape_javascript: #{inspect res}"
res
end
def _escape_javascript({:safe, list}) do
_escape_javascript(list)
end
def _escape_javascript([h | t]) do
[_escape_javascript(h) | _escape_javascript(t)]
end
def _escape_javascript([]), do: []
def _escape_javascript(javascript) when is_binary(javascript) do
Regex.replace(~r/(\|<\/|\r\n|\342\200\250|\342\200\251|[\n\r"'^])/u, javascript, fn match ->
@js_escape_map[match]
end)
end
def decimal_to_currency(%Decimal{} = num, opts \\ []) do
del = opts[:delimiter] || "$"
sep = opts[:seperator] || "."
rnd = opts[:round] || 2
neg_opts =
case opts[:negative] do
nil -> {"-", ""}
{pre, post} -> {"#{pre}", "#{post}"}
pre -> {"#{pre}", ""}
end
case Decimal.round(num, rnd) |> Decimal.to_string() |> String.split(".") do
[int, dec] ->
del <> wrap_negative(int <> sep <> String.pad_trailing(dec, 2, "0"), neg_opts)
[int] ->
del <> wrap_negative(int <> sep <> "00", neg_opts)
end
end
defp wrap_negative("-" <> num, {neg_pre, neg_post}) do
"#{neg_pre}#{num}#{neg_post}"
end
defp wrap_negative(num, _), do: num
def truncate(string, opts \\ []) when is_binary(string) do
length = Keyword.get(opts, :length, 30)
omission = Keyword.get(opts, :omission, "...")
if String.length(string) < length do
string
else
String.slice(string, 0, length) <> omission
end
end
def auto_link(resource) do
case resource.__struct__.__schema__(:fields) do
[_, field | _] ->
name = Map.get(resource, field, "Unknown")
a(name, href: admin_resource_path(resource, :show))
_ ->
""
end
end
end
| 24.628141 | 96 | 0.537849 |
086986e470db2b309fb544cce01391dd58b1b5f4 | 1,574 | ex | Elixir | lib/remote_ip/headers/generic.ex | almirsarajcic/remote_ip | ccaa8b93aadcaae356f149e6253fe9671bc455d9 | [
"MIT"
] | null | null | null | lib/remote_ip/headers/generic.ex | almirsarajcic/remote_ip | ccaa8b93aadcaae356f149e6253fe9671bc455d9 | [
"MIT"
] | null | null | null | lib/remote_ip/headers/generic.ex | almirsarajcic/remote_ip | ccaa8b93aadcaae356f149e6253fe9671bc455d9 | [
"MIT"
] | null | null | null | defmodule RemoteIp.Headers.Generic do
@moduledoc """
Generic parser for forwarding headers.
When there is no other special `RemoteIp.Headers.*` parser submodule,
`RemoteIp.Headers.parse/2` will use this module to parse the header value.
So, `RemoteIp.Headers.Generic` is used to parse `X-Forwarded-For`,
`X-Real-IP`, `X-Client-IP`, and generally unrecognized headers.
"""
@doc """
Parses a comma-separated list of IPs.
Any amount of whitespace is allowed before and after the commas, as well as
at the beginning/end of the input.
## Examples
iex> RemoteIp.Headers.Generic.parse("1.2.3.4, 5.6.7.8")
[{1, 2, 3, 4}, {5, 6, 7, 8}]
iex> RemoteIp.Headers.Generic.parse(" ::1 ")
[{0, 0, 0, 0, 0, 0, 0, 1}]
iex> RemoteIp.Headers.Generic.parse("invalid")
[]
"""
@type header :: String.t
@type ip :: :inet.ip_address
@spec parse(header) :: [ip]
def parse(header) when is_binary(header) do
header
|> split_commas
|> parse_ips
end
defp split_commas(header) do
header |> String.trim |> String.split(~r/\s*,\s*/)
end
defp parse_ips(strings) do
Enum.reduce(strings, [], fn string, ips ->
case parse_ip(string) do
{:ok, ip} -> [ip | ips]
{:error, :einval} -> ips
{:error, :invalid_unicode} -> ips
end
end) |> Enum.reverse
end
defp parse_ip(string) do
try do
string |> to_charlist |> :inet.parse_strict_address
rescue
UnicodeConversionError -> {:error, :invalid_unicode}
end
end
end
| 25.803279 | 77 | 0.617535 |
0869a5301da810f0e184ccec922a22c402f0155b | 984 | exs | Elixir | test/bitcoin/node/storage/engine/postgres_test.exs | JohnSmall/bitcoin-elixir | db82ced4c7d0ae7638fcbe67d2312e44905c5f35 | [
"Apache-2.0"
] | null | null | null | test/bitcoin/node/storage/engine/postgres_test.exs | JohnSmall/bitcoin-elixir | db82ced4c7d0ae7638fcbe67d2312e44905c5f35 | [
"Apache-2.0"
] | null | null | null | test/bitcoin/node/storage/engine/postgres_test.exs | JohnSmall/bitcoin-elixir | db82ced4c7d0ae7638fcbe67d2312e44905c5f35 | [
"Apache-2.0"
] | null | null | null | defmodule Bitcoin.Node.Storage.Engine.PostgesTest do
use ExUnit.Case
alias Bitcoin.Node.Storage.Engine.Postgres
alias Bitcoin.Protocol.Messages
@moduletag :postgres
setup do
# Explicitly get a connection before each test
{:ok, _pid} = Postgres.start_link(%{})
Ecto.Adapters.SQL.Sandbox.mode(Postgres.Repo, :manual)
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Postgres.Repo)
end
test "store a block" do
assert Postgres.max_height() == nil
block = File.read!("test/data/blk_100000.dat") |> Messages.Block.parse()
block_hash = Bitcoin.Block.hash(block)
# {:ok, pid} = Postgres.start_link(%{})
:ok = Postgres.store_block(block, %{height: 100_000}) |> IO.inspect()
tx1 = block.transactions |> Enum.at(1)
assert Postgres.get_tx(tx1 |> Bitcoin.Tx.hash()) == tx1
assert Postgres.get_block(block_hash) == block
assert Postgres.get_block_height(block_hash) == 100_000
assert Postgres.max_height() == 100_000
end
end
| 28.941176 | 76 | 0.698171 |
0869d4ee5ce95b3aaebbca2a749ea130eaa0f903 | 764 | exs | Elixir | integration_test/pool/pool.exs | ashneyderman/ecto | 16f27f64c5ca2480568fad10e40c26522ffbf793 | [
"Apache-2.0"
] | null | null | null | integration_test/pool/pool.exs | ashneyderman/ecto | 16f27f64c5ca2480568fad10e40c26522ffbf793 | [
"Apache-2.0"
] | null | null | null | integration_test/pool/pool.exs | ashneyderman/ecto | 16f27f64c5ca2480568fad10e40c26522ffbf793 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Integration.Pool do
alias Ecto.Adapters.Pool
defmodule Connection do
@behaviour Ecto.Adapters.Connection
def connect(_opts) do
Agent.start_link(fn -> [] end)
end
def disconnect(conn) do
Agent.stop(conn)
end
end
defmacro __using__(pool_mod) do
quote do
def start_link(opts) do
unquote(pool_mod).start_link(Connection, [size: 1] ++ opts)
end
def transaction(pool, timeout, fun) do
Pool.transaction(unquote(pool_mod), pool, timeout, fun)
end
def run(pool, timeout, fun) do
Pool.run(unquote(pool_mod), pool, timeout, fun)
end
defdelegate stop(pool), to: unquote(pool_mod)
defoverridable [start_link: 1, stop: 1]
end
end
end
| 21.828571 | 67 | 0.646597 |
0869dd03918c0a1561b3834cce74d6cfe3de63b6 | 814 | ex | Elixir | test/support/noop_runtime.ex | doyobi/livebook | 136d5039c42b406dd0b31aea188deb4fce3b1328 | [
"Apache-2.0"
] | 1 | 2022-02-16T09:13:27.000Z | 2022-02-16T09:13:27.000Z | test/support/noop_runtime.ex | doyobi/livebook | 136d5039c42b406dd0b31aea188deb4fce3b1328 | [
"Apache-2.0"
] | null | null | null | test/support/noop_runtime.ex | doyobi/livebook | 136d5039c42b406dd0b31aea188deb4fce3b1328 | [
"Apache-2.0"
] | null | null | null | defmodule Livebook.Runtime.NoopRuntime do
@moduledoc false
# A runtime that doesn't do any actual evaluation,
# thus not requiring any underlying resources.
defstruct []
def new(), do: %__MODULE__{}
defimpl Livebook.Runtime do
def connect(_, _), do: make_ref()
def disconnect(_), do: :ok
def evaluate_code(_, _, _, _, _ \\ []), do: :ok
def forget_evaluation(_, _), do: :ok
def drop_container(_, _), do: :ok
def handle_intellisense(_, _, _, _, _), do: :ok
def duplicate(_), do: {:ok, Livebook.Runtime.NoopRuntime.new()}
def standalone?(_), do: false
def read_file(_, path) do
case File.read(path) do
{:ok, content} -> {:ok, content}
{:error, posix} -> {:error, posix |> :file.format_error() |> List.to_string()}
end
end
end
end
| 28.068966 | 86 | 0.62285 |
0869df60e7186ad0f60043706342441349dda348 | 1,975 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/update_protected_range_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/sheets/lib/google_api/sheets/v4/model/update_protected_range_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/update_protected_range_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Sheets.V4.Model.UpdateProtectedRangeRequest do
@moduledoc """
Updates an existing protected range with the specified
protectedRangeId.
## Attributes
* `fields` (*type:* `String.t`, *default:* `nil`) - The fields that should be updated. At least one field must be specified.
The root `protectedRange` is implied and should not be specified.
A single `"*"` can be used as short-hand for listing every field.
* `protectedRange` (*type:* `GoogleApi.Sheets.V4.Model.ProtectedRange.t`, *default:* `nil`) - The protected range to update with the new properties.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:fields => String.t(),
:protectedRange => GoogleApi.Sheets.V4.Model.ProtectedRange.t()
}
field(:fields)
field(:protectedRange, as: GoogleApi.Sheets.V4.Model.ProtectedRange)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.UpdateProtectedRangeRequest do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.UpdateProtectedRangeRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.UpdateProtectedRangeRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.264151 | 152 | 0.739747 |
086a16a8d358610eeee80e9970959ec11af2e5b5 | 70,423 | ex | Elixir | lib/ecto/repo.ex | sthagen/elixir-ecto-ecto | a71dc13ba376663279f0c607ebec510018802b30 | [
"Apache-2.0"
] | null | null | null | lib/ecto/repo.ex | sthagen/elixir-ecto-ecto | a71dc13ba376663279f0c607ebec510018802b30 | [
"Apache-2.0"
] | null | null | null | lib/ecto/repo.ex | sthagen/elixir-ecto-ecto | a71dc13ba376663279f0c607ebec510018802b30 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Repo do
@moduledoc """
Defines a repository.
A repository maps to an underlying data store, controlled by the
adapter. For example, Ecto ships with a Postgres adapter that
stores data into a PostgreSQL database.
When used, the repository expects the `:otp_app` and `:adapter` as
option. The `:otp_app` should point to an OTP application that has
the repository configuration. For example, the repository:
defmodule Repo do
use Ecto.Repo,
otp_app: :my_app,
adapter: Ecto.Adapters.Postgres
end
Could be configured with:
config :my_app, Repo,
database: "ecto_simple",
username: "postgres",
password: "postgres",
hostname: "localhost"
Most of the configuration that goes into the `config` is specific
to the adapter. For this particular example, you can check
[`Ecto.Adapters.Postgres`](https://hexdocs.pm/ecto_sql/Ecto.Adapters.Postgres.html)
for more information. In spite of this, the following configuration values
are shared across all adapters:
* `:name`- The name of the Repo supervisor process
* `:priv` - the directory where to keep repository data, like
migrations, schema and more. Defaults to "priv/YOUR_REPO".
It must always point to a subdirectory inside the priv directory
* `:url` - an URL that specifies storage information. Read below
for more information
* `:log` - the log level used when logging the query with Elixir's
Logger. If false, disables logging for that repository.
Defaults to `:debug`
* `:pool_size` - the size of the pool used by the connection module.
Defaults to `10`
* `:telemetry_prefix` - we recommend adapters to publish events
using the `Telemetry` library. By default, the telemetry prefix
is based on the module name, so if your module is called
`MyApp.Repo`, the prefix will be `[:my_app, :repo]`. See the
"Telemetry Events" section to see which events we recommend
adapters to publish. Note that if you have multiple databases, you
should keep the `:telemetry_prefix` consistent for each repo and
use the `:repo` property in the event metadata for distinguishing
between repos.
* `:stacktrace`- when true, publishes the stacktrace in telemetry events
and allows more advanced logging.
## URLs
Repositories by default support URLs. For example, the configuration
above could be rewritten to:
config :my_app, Repo,
url: "ecto://postgres:postgres@localhost/ecto_simple"
The schema can be of any value. The path represents the database name
while options are simply merged in.
URL can include query parameters to override shared and adapter-specific
options, like `ssl`, `timeout` and `pool_size`. The following example
shows how to pass these configuration values:
config :my_app, Repo,
url: "ecto://postgres:postgres@localhost/ecto_simple?ssl=true&pool_size=10"
In case the URL needs to be dynamically configured, for example by
reading a system environment variable, such can be done via the
`c:init/2` repository callback:
def init(_type, config) do
{:ok, Keyword.put(config, :url, System.get_env("DATABASE_URL"))}
end
## Shared options
Almost all of the repository functions outlined in this module accept the following
options:
* `:timeout` - The time in milliseconds (as an integer) to wait for the query call to
finish. `:infinity` will wait indefinitely (default: `15_000`)
* `:log` - When false, does not log the query
* `:telemetry_event` - The telemetry event name to dispatch the event under.
See the next section for more information
* `:telemetry_options` - Extra options to attach to telemetry event name.
See the next section for more information
## Telemetry events
There are two types of telemetry events. The ones emitted by Ecto and the
ones that are adapter specific.
### Ecto telemetry events
The following events are emitted by all Ecto repositories:
* `[:ecto, :repo, :init]` - it is invoked whenever a repository starts.
The measurement is a single `system_time` entry in native unit. The
metadata is the `:repo` and all initialization options under `:opts`.
### Adapter-specific events
We recommend adapters to publish certain `Telemetry` events listed below.
Those events will use the `:telemetry_prefix` outlined above which defaults
to `[:my_app, :repo]`.
For instance, to receive all query events published by a repository called
`MyApp.Repo`, one would define a module:
defmodule MyApp.Telemetry do
def handle_event([:my_app, :repo, :query], measurements, metadata, config) do
IO.inspect binding()
end
end
Then, in the `Application.start/2` callback, attach the handler to this event using
a unique handler id:
:ok = :telemetry.attach("my-app-handler-id", [:my_app, :repo, :query], &MyApp.Telemetry.handle_event/4, %{})
For details, see [the telemetry documentation](https://hexdocs.pm/telemetry/).
Below we list all events developers should expect from Ecto. All examples
below consider a repository named `MyApp.Repo`:
#### `[:my_app, :repo, :query]`
This event should be invoked on every query sent to the adapter, including
queries that are related to the transaction management.
The `:measurements` map will include the following, all given in the
`:native` time unit:
* `:idle_time` - the time the connection spent waiting before being checked out for the query
* `:queue_time` - the time spent waiting to check out a database connection
* `:query_time` - the time spent executing the query
* `:decode_time` - the time spent decoding the data received from the database
* `:total_time` - the sum of the other measurements
All measurements are given in the `:native` time unit. You can read more
about it in the docs for `System.convert_time_unit/3`.
A telemetry `:metadata` map including the following fields. Each database
adapter may emit different information here. For Ecto.SQL databases, it
will look like this:
* `:type` - the type of the Ecto query. For example, for Ecto.SQL
databases, it would be `:ecto_sql_query`
* `:repo` - the Ecto repository
* `:result` - the query result
* `:params` - the query parameters
* `:query` - the query sent to the database as a string
* `:source` - the source the query was made on (may be nil)
* `:options` - extra options given to the repo operation under
`:telemetry_options`
## Read-only repositories
You can mark a repository as read-only by passing the `:read_only`
flag on `use`:
use Ecto.Repo, otp_app: ..., adapter: ..., read_only: true
By passing the `:read_only` option, none of the functions that perform
write operations, such as `c:insert/2`, `c:insert_all/3`, `c:update_all/3`,
and friends will be defined.
"""
@type t :: module
@doc """
Returns all running Ecto repositories.
The list is returned in no particular order. The list
contains either atoms, for named Ecto repositories, or
PIDs.
"""
@spec all_running() :: [atom() | pid()]
defdelegate all_running(), to: Ecto.Repo.Registry
@doc false
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@behaviour Ecto.Repo
{otp_app, adapter, behaviours} =
Ecto.Repo.Supervisor.compile_config(__MODULE__, opts)
@otp_app otp_app
@adapter adapter
@default_dynamic_repo opts[:default_dynamic_repo] || __MODULE__
@read_only opts[:read_only] || false
@before_compile adapter
@aggregates [:count, :avg, :max, :min, :sum]
def config do
{:ok, config} = Ecto.Repo.Supervisor.runtime_config(:runtime, __MODULE__, @otp_app, [])
config
end
def __adapter__ do
@adapter
end
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :supervisor
}
end
def start_link(opts \\ []) do
Ecto.Repo.Supervisor.start_link(__MODULE__, @otp_app, @adapter, opts)
end
def stop(timeout \\ 5000) do
Supervisor.stop(get_dynamic_repo(), :normal, timeout)
end
def load(schema_or_types, data) do
Ecto.Repo.Schema.load(@adapter, schema_or_types, data)
end
def checkout(fun, opts \\ []) when is_function(fun) do
%{adapter: adapter} = meta = Ecto.Repo.Registry.lookup(get_dynamic_repo())
adapter.checkout(meta, opts, fun)
end
def checked_out? do
%{adapter: adapter} = meta = Ecto.Repo.Registry.lookup(get_dynamic_repo())
adapter.checked_out?(meta)
end
@compile {:inline, get_dynamic_repo: 0, prepare_opts: 2}
def get_dynamic_repo() do
Process.get({__MODULE__, :dynamic_repo}, @default_dynamic_repo)
end
def put_dynamic_repo(dynamic) when is_atom(dynamic) or is_pid(dynamic) do
Process.put({__MODULE__, :dynamic_repo}, dynamic) || @default_dynamic_repo
end
def default_options(_operation), do: []
defoverridable default_options: 1
defp prepare_opts(operation_name, opts) do
operation_name
|> default_options()
|> Keyword.merge(opts)
end
## Transactions
if Ecto.Adapter.Transaction in behaviours do
def transaction(fun_or_multi, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Transaction.transaction(__MODULE__, repo, fun_or_multi, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:transaction, opts)))
end
def in_transaction? do
Ecto.Repo.Transaction.in_transaction?(get_dynamic_repo())
end
@spec rollback(term) :: no_return
def rollback(value) do
Ecto.Repo.Transaction.rollback(get_dynamic_repo(), value)
end
end
## Schemas
if Ecto.Adapter.Schema in behaviours and not @read_only do
def insert(struct, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Schema.insert(__MODULE__, repo, struct, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:insert, opts)))
end
def update(struct, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Schema.update(__MODULE__, get_dynamic_repo(), struct, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:update, opts)))
end
def insert_or_update(changeset, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Schema.insert_or_update(__MODULE__, get_dynamic_repo(), changeset, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:insert_or_update, opts)))
end
def delete(struct, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Schema.delete(__MODULE__, get_dynamic_repo(), struct, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:delete, opts)))
end
def insert!(struct, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Schema.insert!(__MODULE__, get_dynamic_repo(), struct, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:insert, opts)))
end
def update!(struct, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Schema.update!(__MODULE__, get_dynamic_repo(), struct, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:update, opts)))
end
def insert_or_update!(changeset, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Schema.insert_or_update!(__MODULE__, get_dynamic_repo(), changeset, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:insert_or_update, opts)))
end
def delete!(struct, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Schema.delete!(__MODULE__, get_dynamic_repo(), struct, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:delete, opts)))
end
def insert_all(schema_or_source, entries, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Schema.insert_all(__MODULE__, get_dynamic_repo(), schema_or_source, entries, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:insert_all, opts)))
end
end
## Queryable
if Ecto.Adapter.Queryable in behaviours do
if not @read_only do
def update_all(queryable, updates, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Queryable.update_all(get_dynamic_repo(), queryable, updates, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:update_all, opts)))
end
def delete_all(queryable, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Queryable.delete_all(get_dynamic_repo(), queryable, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:delete_all, opts)))
end
end
def all(queryable, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Queryable.all(get_dynamic_repo(), queryable, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:all, opts)))
end
def stream(queryable, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Queryable.stream(get_dynamic_repo(), queryable, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:stream, opts)))
end
def get(queryable, id, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Queryable.get(get_dynamic_repo(), queryable, id, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:all, opts)))
end
def get!(queryable, id, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Queryable.get!(get_dynamic_repo(), queryable, id, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:all, opts)))
end
def get_by(queryable, clauses, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Queryable.get_by(get_dynamic_repo(), queryable, clauses, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:all, opts)))
end
def get_by!(queryable, clauses, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Queryable.get_by!(get_dynamic_repo(), queryable, clauses, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:all, opts)))
end
def reload(queryable, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Queryable.reload(get_dynamic_repo(), queryable, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:reload, opts)))
end
def reload!(queryable, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Queryable.reload!(get_dynamic_repo(), queryable, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:reload, opts)))
end
def one(queryable, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Queryable.one(get_dynamic_repo(), queryable, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:all, opts)))
end
def one!(queryable, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Queryable.one!(get_dynamic_repo(), queryable, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:all, opts)))
end
def aggregate(queryable, aggregate, opts \\ [])
def aggregate(queryable, aggregate, opts)
when aggregate in [:count] and is_list(opts) do
repo = get_dynamic_repo()
Ecto.Repo.Queryable.aggregate(get_dynamic_repo(), queryable, aggregate, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:all, opts)))
end
def aggregate(queryable, aggregate, field)
when aggregate in @aggregates and is_atom(field) do
repo = get_dynamic_repo()
Ecto.Repo.Queryable.aggregate(get_dynamic_repo(), queryable, aggregate, field, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:all, [])))
end
def aggregate(queryable, aggregate, field, opts)
when aggregate in @aggregates and is_atom(field) and is_list(opts) do
repo = get_dynamic_repo()
Ecto.Repo.Queryable.aggregate(get_dynamic_repo(), queryable, aggregate, field, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:all, opts)))
end
def exists?(queryable, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Queryable.exists?(get_dynamic_repo(), queryable, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:all, opts)))
end
def preload(struct_or_structs_or_nil, preloads, opts \\ []) do
repo = get_dynamic_repo()
Ecto.Repo.Preloader.preload(struct_or_structs_or_nil, get_dynamic_repo(), preloads, Ecto.Repo.Supervisor.tuplet(repo, prepare_opts(:preload, opts)))
end
def prepare_query(operation, query, opts), do: {query, opts}
defoverridable prepare_query: 3
end
end
end
## User callbacks
@optional_callbacks init: 2
@doc """
A callback executed when the repo starts or when configuration is read.
The first argument is the context the callback is being invoked. If it
is called because the Repo supervisor is starting, it will be `:supervisor`.
It will be `:runtime` if it is called for reading configuration without
actually starting a process.
The second argument is the repository configuration as stored in the
application environment. It must return `{:ok, keyword}` with the updated
list of configuration or `:ignore` (only in the `:supervisor` case).
"""
@doc group: "User callbacks"
@callback init(context :: :supervisor | :runtime, config :: Keyword.t()) ::
{:ok, Keyword.t()} | :ignore
## Ecto.Adapter
@doc """
Returns the adapter tied to the repository.
"""
@doc group: "Runtime API"
@callback __adapter__ :: Ecto.Adapter.t()
@doc """
Returns the adapter configuration stored in the `:otp_app` environment.
If the `c:init/2` callback is implemented in the repository,
it will be invoked with the first argument set to `:runtime`.
"""
@doc group: "Runtime API"
@callback config() :: Keyword.t()
@doc """
Starts any connection pooling or supervision and return `{:ok, pid}`
or just `:ok` if nothing needs to be done.
Returns `{:error, {:already_started, pid}}` if the repo is already
started or `{:error, term}` in case anything else goes wrong.
## Options
See the configuration in the moduledoc for options shared between adapters,
for adapter-specific configuration see the adapter's documentation.
"""
@doc group: "Runtime API"
@callback start_link(opts :: Keyword.t()) ::
{:ok, pid}
| {:error, {:already_started, pid}}
| {:error, term}
@doc """
Shuts down the repository.
"""
@doc group: "Runtime API"
@callback stop(timeout) :: :ok
@doc """
Checks out a connection for the duration of the function.
It returns the result of the function. This is useful when
you need to perform multiple operations against the repository
in a row and you want to avoid checking out the connection
multiple times.
`checkout/2` and `transaction/2` can be combined and nested
multiple times. If `checkout/2` is called inside the function
of another `checkout/2` call, the function is simply executed,
without checking out a new connection.
## Options
See the ["Shared options"](#module-shared-options) section at the module
documentation for more options.
"""
@doc group: "Transaction API"
@callback checkout((() -> result), opts :: Keyword.t()) :: result when result: var
@doc """
Returns true if a connection has been checked out.
This is true if inside a `c:Ecto.Repo.checkout/2` or
`c:Ecto.Repo.transaction/2`.
## Examples
MyRepo.checked_out?
#=> false
MyRepo.transaction(fn ->
MyRepo.checked_out? #=> true
end)
MyRepo.checkout(fn ->
MyRepo.checked_out? #=> true
end)
"""
@doc group: "Transaction API"
@callback checked_out?() :: boolean
@doc """
Loads `data` into a schema or a map.
The first argument can be a a schema module or a map (of types).
The first argument determines the return value: a struct or a map,
respectively.
The second argument `data` specifies fields and values that are to be loaded.
It can be a map, a keyword list, or a `{fields, values}` tuple.
Fields can be atoms or strings.
Fields that are not present in the schema (or `types` map) are ignored.
If any of the values has invalid type, an error is raised.
To load data from non-database sources, use `Ecto.embedded_load/3`.
## Examples
iex> MyRepo.load(User, %{name: "Alice", age: 25})
%User{name: "Alice", age: 25}
iex> MyRepo.load(User, [name: "Alice", age: 25])
%User{name: "Alice", age: 25}
`data` can also take form of `{fields, values}`:
iex> MyRepo.load(User, {[:name, :age], ["Alice", 25]})
%User{name: "Alice", age: 25, ...}
The first argument can also be a `types` map:
iex> types = %{name: :string, age: :integer}
iex> MyRepo.load(types, %{name: "Alice", age: 25})
%{name: "Alice", age: 25}
This function is especially useful when parsing raw query results:
iex> result = Ecto.Adapters.SQL.query!(MyRepo, "SELECT * FROM users", [])
iex> Enum.map(result.rows, &MyRepo.load(User, {result.columns, &1}))
[%User{...}, ...]
"""
@doc group: "Schema API"
@callback load(
schema_or_map :: module | map(),
data :: map() | Keyword.t() | {list, list}
) :: Ecto.Schema.t() | map()
@doc """
Returns the atom name or pid of the current repository.
See `c:put_dynamic_repo/1` for more information.
"""
@doc group: "Runtime API"
@callback get_dynamic_repo() :: atom() | pid()
@doc """
Sets the dynamic repository to be used in further interactions.
Sometimes you may want a single Ecto repository to talk to
many different database instances. By default, when you call
`MyApp.Repo.start_link/1`, it will start a repository with
name `MyApp.Repo`. But if you want to start multiple repositories,
you can give each of them a different name:
MyApp.Repo.start_link(name: :tenant_foo, hostname: "foo.example.com")
MyApp.Repo.start_link(name: :tenant_bar, hostname: "bar.example.com")
You can also start repositories without names by explicitly
setting the name to nil:
MyApp.Repo.start_link(name: nil, hostname: "temp.example.com")
However, once the repository is started, you can't directly interact with
it, since all operations in `MyApp.Repo` are sent by default to the repository
named `MyApp.Repo`. You can change the default repo at compile time with:
use Ecto.Repo, default_dynamic_repo: :name_of_repo
Or you can change it anytime at runtime by calling `put_dynamic_repo/1`:
MyApp.Repo.put_dynamic_repo(:tenant_foo)
From this moment on, all future queries done by the current process will
run on `:tenant_foo`.
"""
@doc group: "Runtime API"
@callback put_dynamic_repo(name_or_pid :: atom() | pid()) :: atom() | pid()
## Ecto.Adapter.Queryable
@optional_callbacks get: 3, get!: 3, get_by: 3, get_by!: 3, reload: 2, reload!: 2, aggregate: 3,
aggregate: 4, exists?: 2, one: 2, one!: 2, preload: 3, all: 2, stream: 2,
update_all: 3, delete_all: 2
@doc """
Fetches a single struct from the data store where the primary key matches the
given id.
Returns `nil` if no result was found. If the struct in the queryable
has no or more than one primary key, it will raise an argument error.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This will be applied to all `from`
and `join`s in the query that did not have a prefix previously given
either via the `:prefix` option on `join`/`from` or via `@schema_prefix`
in the schema. For more information see the "Query Prefix" section of the
`Ecto.Query` documentation.
See the ["Shared options"](#module-shared-options) section at the module
documentation for more options.
## Example
MyRepo.get(Post, 42)
MyRepo.get(Post, 42, prefix: "public")
"""
@doc group: "Query API"
@callback get(queryable :: Ecto.Queryable.t(), id :: term, opts :: Keyword.t()) ::
Ecto.Schema.t() | nil
@doc """
Similar to `c:get/3` but raises `Ecto.NoResultsError` if no record was found.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This will be applied to all `from`
and `join`s in the query that did not have a prefix previously given
either via the `:prefix` option on `join`/`from` or via `@schema_prefix`
in the schema. For more information see the "Query Prefix" section of the
`Ecto.Query` documentation.
See the ["Shared options"](#module-shared-options) section at the module
documentation for more options.
## Example
MyRepo.get!(Post, 42)
MyRepo.get!(Post, 42, prefix: "public")
"""
@doc group: "Query API"
@callback get!(queryable :: Ecto.Queryable.t(), id :: term, opts :: Keyword.t()) ::
Ecto.Schema.t()
@doc """
Fetches a single result from the query.
Returns `nil` if no result was found. Raises if more than one entry.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This will be applied to all `from`
and `join`s in the query that did not have a prefix previously given
either via the `:prefix` option on `join`/`from` or via `@schema_prefix`
in the schema. For more information see the "Query Prefix" section of the
`Ecto.Query` documentation.
See the ["Shared options"](#module-shared-options) section at the module
documentation for more options.
## Example
MyRepo.get_by(Post, title: "My post")
MyRepo.get_by(Post, [title: "My post"], prefix: "public")
"""
@doc group: "Query API"
@callback get_by(
queryable :: Ecto.Queryable.t(),
clauses :: Keyword.t() | map,
opts :: Keyword.t()
) :: Ecto.Schema.t() | nil
@doc """
Similar to `c:get_by/3` but raises `Ecto.NoResultsError` if no record was found.
Raises if more than one entry.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This will be applied to all `from`
and `join`s in the query that did not have a prefix previously given
either via the `:prefix` option on `join`/`from` or via `@schema_prefix`
in the schema. For more information see the "Query Prefix" section of the
`Ecto.Query` documentation.
See the ["Shared options"](#module-shared-options) section at the module
documentation for more options.
## Example
MyRepo.get_by!(Post, title: "My post")
MyRepo.get_by!(Post, [title: "My post"], prefix: "public")
"""
@doc group: "Query API"
@callback get_by!(
queryable :: Ecto.Queryable.t(),
clauses :: Keyword.t() | map,
opts :: Keyword.t()
) :: Ecto.Schema.t()
@doc """
Reloads a given schema or schema list from the database.
When using with lists, it is expected that all of the structs in the list belong
to the same schema. Ordering is guaranteed to be kept. Results not found in
the database will be returned as `nil`.
## Example
MyRepo.reload(post)
%Post{}
MyRepo.reload([post1, post2])
[%Post{}, %Post{}]
MyRepo.reload([deleted_post, post1])
[nil, %Post{}]
"""
@doc group: "Schema API"
@callback reload(
struct_or_structs :: Ecto.Schema.t() | [Ecto.Schema.t()],
opts :: Keyword.t()
) :: Ecto.Schema.t() | [Ecto.Schema.t() | nil] | nil
@doc """
Similar to `c:reload/2`, but raises when something is not found.
When using with lists, ordering is guaranteed to be kept.
## Example
MyRepo.reload!(post)
%Post{}
MyRepo.reload!([post1, post2])
[%Post{}, %Post{}]
"""
@doc group: "Schema API"
@callback reload!(struct_or_structs, opts :: Keyword.t()) :: struct_or_structs
when struct_or_structs: Ecto.Schema.t() | [Ecto.Schema.t()]
@doc """
Calculate the given `aggregate`.
If the query has a limit, offset, distinct or combination set, it will be
automatically wrapped in a subquery in order to return the
proper result.
Any preload or select in the query will be ignored in favor of
the column being aggregated.
The aggregation will fail if any `group_by` field is set.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This will be applied to all `from`
and `join`s in the query that did not have a prefix previously given
either via the `:prefix` option on `join`/`from` or via `@schema_prefix`
in the schema. For more information see the "Query Prefix" section of the
`Ecto.Query` documentation.
See the ["Shared options"](#module-shared-options) section at the module
documentation for more options.
## Examples
# Returns the number of blog posts
Repo.aggregate(Post, :count)
# Returns the number of blog posts in the "private" schema path
# (in Postgres) or database (in MySQL)
Repo.aggregate(Post, :count, prefix: "private")
"""
@doc group: "Query API"
@callback aggregate(
queryable :: Ecto.Queryable.t(),
aggregate :: :count,
opts :: Keyword.t()
) :: term | nil
@doc """
Calculate the given `aggregate` over the given `field`.
See `c:aggregate/3` for general considerations and options.
## Examples
# Returns the number of visits per blog post
Repo.aggregate(Post, :count, :visits)
# Returns the number of visits per blog post in the "private" schema path
# (in Postgres) or database (in MySQL)
Repo.aggregate(Post, :count, :visits, prefix: "private")
# Returns the average number of visits for the top 10
query = from Post, limit: 10
Repo.aggregate(query, :avg, :visits)
"""
@doc group: "Query API"
@callback aggregate(
queryable :: Ecto.Queryable.t(),
aggregate :: :avg | :count | :max | :min | :sum,
field :: atom,
opts :: Keyword.t()
) :: term | nil
@doc """
Checks if there exists an entry that matches the given query.
Returns a boolean.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This will be applied to all `from`
and `join`s in the query that did not have a prefix previously given
either via the `:prefix` option on `join`/`from` or via `@schema_prefix`
in the schema. For more information see the "Query Prefix" section of the
`Ecto.Query` documentation.
See the ["Shared options"](#module-shared-options) section at the module
documentation for more options.
## Examples
# checks if any posts exist
Repo.exists?(Post)
# checks if any posts exist in the "private" schema path (in Postgres) or
# database (in MySQL)
Repo.exists?(Post, schema: "private")
# checks if any post with a like count greater than 10 exists
query = from p in Post, where: p.like_count > 10
Repo.exists?(query)
"""
@doc group: "Query API"
@callback exists?(queryable :: Ecto.Queryable.t(), opts :: Keyword.t()) :: boolean()
@doc """
Fetches a single result from the query.
Returns `nil` if no result was found. Raises if more than one entry.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This will be applied to all `from`
and `join`s in the query that did not have a prefix previously given
either via the `:prefix` option on `join`/`from` or via `@schema_prefix`
in the schema. For more information see the "Query Prefix" section of the
`Ecto.Query` documentation.
See the ["Shared options"](#module-shared-options) section at the module
documentation for more options.
## Examples
Repo.one(from p in Post, join: c in assoc(p, :comments), where: p.id == ^post_id)
query = from p in Post, join: c in assoc(p, :comments), where: p.id == ^post_id
Repo.one(query, prefix: "private")
"""
@doc group: "Query API"
@callback one(queryable :: Ecto.Queryable.t(), opts :: Keyword.t()) ::
Ecto.Schema.t() | nil
@doc """
Similar to `c:one/2` but raises `Ecto.NoResultsError` if no record was found.
Raises if more than one entry.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This will be applied to all `from`
and `join`s in the query that did not have a prefix previously given
either via the `:prefix` option on `join`/`from` or via `@schema_prefix`
in the schema. For more information see the "Query Prefix" section of the
`Ecto.Query` documentation.
See the ["Shared options"](#module-shared-options) section at the module
documentation for more options.
"""
@doc group: "Query API"
@callback one!(queryable :: Ecto.Queryable.t(), opts :: Keyword.t()) ::
Ecto.Schema.t()
@doc """
Preloads all associations on the given struct or structs.
This is similar to `Ecto.Query.preload/3` except it allows
you to preload structs after they have been fetched from the
database.
In case the association was already loaded, preload won't attempt
to reload it.
## Options
* `:force` - By default, Ecto won't preload associations that
are already loaded. By setting this option to true, any existing
association will be discarded and reloaded.
* `:in_parallel` - If the preloads must be done in parallel. It can
only be performed when we have more than one preload and the
repository is not in a transaction. Defaults to `true`.
* `:prefix` - the prefix to fetch preloads from. By default, queries
will use the same prefix as the first struct in the given collection.
This option allows the prefix to be changed.
See the ["Shared options"](#module-shared-options) section at the module
documentation for more options.
## Examples
# Use a single atom to preload an association
posts = Repo.preload posts, :comments
# Use a list of atoms to preload multiple associations
posts = Repo.preload posts, [:comments, :authors]
# Use a keyword list to preload nested associations as well
posts = Repo.preload posts, [comments: [:replies, :likes], authors: []]
# You can mix atoms and keywords, but the atoms must come first
posts = Repo.preload posts, [:authors, comments: [:likes, replies: [:reactions]]]
# Use a keyword list to customize how associations are queried
posts = Repo.preload posts, [comments: from(c in Comment, order_by: c.published_at)]
# Use a two-element tuple for a custom query and nested association definition
query = from c in Comment, order_by: c.published_at
posts = Repo.preload posts, [comments: {query, [:replies, :likes]}]
The query given to preload may also preload its own associations.
"""
@doc group: "Schema API"
@callback preload(structs_or_struct_or_nil, preloads :: term, opts :: Keyword.t()) ::
structs_or_struct_or_nil
when structs_or_struct_or_nil: [Ecto.Schema.t()] | Ecto.Schema.t() | nil
@doc """
A user customizable callback invoked for query-based operations.
This callback can be used to further modify the query and options
before it is transformed and sent to the database.
This callback is invoked for all query APIs, including the `stream`
functions. It is also invoked for `insert_all` if a source query is
given. It is not invoked for any of the other schema functions.
## Examples
Let's say you want to filter out records that were "soft-deleted"
(have `deleted_at` column set) from all operations unless an admin
is running the query; you can define the callback like this:
@impl true
def prepare_query(_operation, query, opts) do
if opts[:admin] do
{query, opts}
else
query = from(x in query, where: is_nil(x.deleted_at))
{query, opts}
end
end
And then execute the query:
Repo.all(query) # only non-deleted records are returned
Repo.all(query, admin: true) # all records are returned
The callback will be invoked for all queries, including queries
made from associations and preloads. It is not invoked for each
individual join inside a query.
"""
@doc group: "User callbacks"
@callback prepare_query(operation, query :: Ecto.Query.t(), opts :: Keyword.t()) ::
{Ecto.Query.t(), Keyword.t()}
when operation: :all | :update_all | :delete_all | :stream | :insert_all
@doc """
A user customizable callback invoked to retrieve default options
for operations.
This can be used to provide default values per operation that
have higher precedence than the values given on configuration
or when starting the repository. It can also be used to set
query specific options, such as `:prefix`.
This callback is invoked as the entry point for all repository
operations. For example, if you are executing a query with preloads,
this callback will be invoked once at the beginning, but the
options returned here will be passed to all following operations.
"""
@doc group: "User callbacks"
@callback default_options(operation) :: Keyword.t()
when operation: :all | :insert_all | :update_all | :delete_all | :stream |
:transaction | :insert | :update | :delete | :insert_or_update
@doc """
Fetches all entries from the data store matching the given query.
May raise `Ecto.QueryError` if query validation fails.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This will be applied to all `from`
and `join`s in the query that did not have a prefix previously given
either via the `:prefix` option on `join`/`from` or via `@schema_prefix`
in the schema. For more information see the "Query Prefix" section of the
`Ecto.Query` documentation.
See the ["Shared options"](#module-shared-options) section at the module
documentation for more options.
## Example
# Fetch all post titles
query = from p in Post,
select: p.title
MyRepo.all(query)
"""
@doc group: "Query API"
@callback all(queryable :: Ecto.Queryable.t(), opts :: Keyword.t()) :: [Ecto.Schema.t()]
@doc """
Returns a lazy enumerable that emits all entries from the data store
matching the given query.
SQL adapters, such as Postgres and MySQL, can only enumerate a stream
inside a transaction.
May raise `Ecto.QueryError` if query validation fails.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This will be applied to all `from`
and `join`s in the query that did not have a prefix previously given
either via the `:prefix` option on `join`/`from` or via `@schema_prefix`
in the schema. For more information see the "Query Prefix" section of the
`Ecto.Query` documentation.
* `:max_rows` - The number of rows to load from the database as we stream.
It is supported at least by Postgres and MySQL and defaults to 500.
See the ["Shared options"](#module-shared-options) section at the module
documentation for more options.
## Example
# Fetch all post titles
query = from p in Post,
select: p.title
stream = MyRepo.stream(query)
MyRepo.transaction(fn ->
Enum.to_list(stream)
end)
"""
@doc group: "Query API"
@callback stream(queryable :: Ecto.Queryable.t(), opts :: Keyword.t()) :: Enum.t()
@doc """
Updates all entries matching the given query with the given values.
It returns a tuple containing the number of entries and any returned
result as second element. The second element is `nil` by default
unless a `select` is supplied in the update query. Note, however,
not all databases support returning data from UPDATEs.
Keep in mind this `update_all` will not update autogenerated
fields like the `updated_at` columns.
See `Ecto.Query.update/3` for update operations that can be
performed on fields.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the query and any `@schema_prefix` set in the schema.
See the ["Shared options"](#module-shared-options) section at the module
documentation for remaining options.
## Examples
MyRepo.update_all(Post, set: [title: "New title"])
MyRepo.update_all(Post, inc: [visits: 1])
from(p in Post, where: p.id < 10, select: p.visits)
|> MyRepo.update_all(set: [title: "New title"])
from(p in Post, where: p.id < 10, update: [set: [title: "New title"]])
|> MyRepo.update_all([])
from(p in Post, where: p.id < 10, update: [set: [title: ^new_title]])
|> MyRepo.update_all([])
from(p in Post, where: p.id < 10, update: [set: [title: fragment("upper(?)", ^new_title)]])
|> MyRepo.update_all([])
"""
@doc group: "Query API"
@callback update_all(
queryable :: Ecto.Queryable.t(),
updates :: Keyword.t(),
opts :: Keyword.t()
) :: {non_neg_integer, nil | [term]}
@doc """
Deletes all entries matching the given query.
It returns a tuple containing the number of entries and any returned
result as second element. The second element is `nil` by default
unless a `select` is supplied in the delete query. Note, however,
not all databases support returning data from DELETEs.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the query and any `@schema_prefix` set in the schema.
See the ["Shared options"](#module-shared-options) section at the module
documentation for remaining options.
## Examples
MyRepo.delete_all(Post)
from(p in Post, where: p.id < 10) |> MyRepo.delete_all
"""
@doc group: "Query API"
@callback delete_all(queryable :: Ecto.Queryable.t(), opts :: Keyword.t()) ::
{non_neg_integer, nil | [term]}
## Ecto.Adapter.Schema
@optional_callbacks insert_all: 3, insert: 2, insert!: 2, update: 2, update!: 2,
delete: 2, delete!: 2, insert_or_update: 2, insert_or_update!: 2,
prepare_query: 3
@doc """
Inserts all entries into the repository.
It expects a schema module (`MyApp.User`) or a source (`"users"`) or
both (`{"users", MyApp.User}`) as the first argument. The second
argument is a list of entries to be inserted, either as keyword
lists or as maps. The keys of the entries are the field names as
atoms and the value should be the respective value for the field
type or, optionally, an `Ecto.Query` that returns a single entry
with a single value.
It returns a tuple containing the number of entries
and any returned result as second element. If the database
does not support RETURNING in INSERT statements or no
return result was selected, the second element will be `nil`.
When a schema module is given, the entries given will be properly dumped
before being sent to the database. If the schema primary key has type
`:id` or `:binary_id`, it will be handled either at the adapter
or the storage layer. However any other primary key type or autogenerated
value, like `Ecto.UUID` and timestamps, won't be autogenerated when
using `c:insert_all/3`. You must set those fields explicitly. This is by
design as this function aims to be a more direct way to insert data into
the database without the conveniences of `c:insert/2`. This is also
consistent with `c:update_all/3` that does not handle auto generated
values as well.
It is also not possible to use `insert_all` to insert across multiple
tables, therefore associations are not supported.
If a source is given, without a schema module, the given fields are passed
as is to the adapter.
## Options
* `:returning` - selects which fields to return. When `true`,
returns all fields in the given schema. May be a list of
fields, where a struct is still returned but only with the
given fields. Or `false`, where nothing is returned (the default).
This option is not supported by all databases.
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the query and any `@schema_prefix` set in the schema.
* `:on_conflict` - It may be one of `:raise` (the default), `:nothing`,
`:replace_all`, `{:replace_all_except, fields}`, `{:replace, fields}`,
a keyword list of update instructions or an `Ecto.Query`
query for updates. See the "Upserts" section for more information.
* `:conflict_target` - A list of column names to verify for conflicts.
It is expected those columns to have unique indexes on them that may conflict.
If none is specified, the conflict target is left up to the database.
It may also be `{:unsafe_fragment, binary_fragment}` to pass any
expression to the database without any sanitization, this is useful
for partial index or index with expressions, such as
`{:unsafe_fragment, "(coalesce(firstname, ""), coalesce(lastname, "")) WHERE middlename IS NULL"}` for
`ON CONFLICT (coalesce(firstname, ""), coalesce(lastname, "")) WHERE middlename IS NULL` SQL query.
* `:placeholders` - A map with placeholders. This feature is not supported
by all databases. See the "Placeholders" section for more information.
See the ["Shared options"](#module-shared-options) section at the module
documentation for remaining options.
## Source query
A query can be given instead of a list with entries. This query needs to select
into a map containing only keys that are available as writeable columns in the
schema.
## Examples
MyRepo.insert_all(Post, [[title: "My first post"], [title: "My second post"]])
MyRepo.insert_all(Post, [%{title: "My first post"}, %{title: "My second post"}])
query = from p in Post,
join: c in assoc(p, :comments),
select: %{
author_id: p.author_id,
posts: count(p.id, :distinct),
interactions: sum(p.likes) + count(c.id)
},
group_by: p.author_id
MyRepo.insert_all(AuthorStats, query)
## Upserts
`c:insert_all/3` provides upserts (update or inserts) via the `:on_conflict`
option. The `:on_conflict` option supports the following values:
* `:raise` - raises if there is a conflicting primary key or unique index
* `:nothing` - ignores the error in case of conflicts
* `:replace_all` - replace **all** values on the existing row with the values
in the schema/changeset, including fields not explicitly set in the changeset,
such as IDs and autogenerated timestamps (`inserted_at` and `updated_at`).
Do not use this option if you have auto-incrementing primary keys, as they
will also be replaced. You most likely want to use `{:replace_all_except, [:id]}`
or `{:replace, fields}` explicitly instead. This option requires a schema
* `{:replace_all_except, fields}` - same as above except the given fields
are not replaced. This option requires a schema
* `{:replace, fields}` - replace only specific columns. This option requires
`:conflict_target`
* a keyword list of update instructions - such as the one given to
`c:update_all/3`, for example: `[set: [title: "new title"]]`
* an `Ecto.Query` that will act as an `UPDATE` statement, such as the
one given to `c:update_all/3`
Upserts map to "ON CONFLICT" on databases like Postgres and "ON DUPLICATE KEY"
on databases such as MySQL.
## Return values
By default, both Postgres and MySQL will return the number of entries
inserted on `c:insert_all/3`. However, when the `:on_conflict` option
is specified, Postgres and MySQL will return different results.
Postgres will only count a row if it was affected and will
return 0 if no new entry was added.
MySQL will return, at a minimum, the number of entries attempted. For example,
if `:on_conflict` is set to `:nothing`, MySQL will return
the number of entries attempted to be inserted, even when no entry
was added.
Also note that if `:on_conflict` is a query, MySQL will return
the number of attempted entries plus the number of entries modified
by the UPDATE query.
## Placeholders
Passing in a map for the `:placeholders` allows you to send less
data over the wire when you have many entries with the same value
for a field. To use a placeholder, replace its value in each of your
entries with `{:placeholder, key}`, where `key` is the key you
are using in the `:placeholders` option map. For example:
placeholders = %{blob: large_blob_of_text(...)}
entries = [
%{title: "v1", body: {:placeholder, :blob}},
%{title: "v2", body: {:placeholder, :blob}}
]
Repo.insert_all(Post, entries, placeholders: placeholders)
Keep in mind that:
* placeholders cannot be nested in other values. For example, you
cannot put a placeholder inside an array. Instead, the whole
array has to be the placeholder
* a placeholder key can only be used with columns of the same type
* placeholders require a database that supports index parameters,
so they are not currently compatible with MySQL
"""
@doc group: "Schema API"
@callback insert_all(
schema_or_source :: binary | {binary, module} | module,
entries_or_query :: [%{atom => value} | Keyword.t(value)] | Ecto.Query.t,
opts :: Keyword.t()
) :: {non_neg_integer, nil | [term]} when value: term | Ecto.Query.t()
@doc """
Inserts a struct defined via `Ecto.Schema` or a changeset.
In case a struct is given, the struct is converted into a changeset
with all non-nil fields as part of the changeset.
In case a changeset is given, the changes in the changeset are
merged with the struct fields, and all of them are sent to the
database. If more than one database operation is required, they're
automatically wrapped in a transaction.
It returns `{:ok, struct}` if the struct has been successfully
inserted or `{:error, changeset}` if there was a validation
or a known constraint error.
## Options
* `:returning` - selects which fields to return. It accepts a list
of fields to be returned from the database. When `true`, returns
all fields. When `false`, no extra fields are returned. It will
always include all fields in `read_after_writes` as well as any
autogenerated id. Not all databases support this option and it
may not be available during upserts. See the "Upserts" section
for more information.
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the query and any `@schema_prefix` set any schemas. Also, the
`@schema_prefix` for the parent record will override all default
`@schema_prefix`s set in any child schemas for associations.
* `:on_conflict` - It may be one of `:raise` (the default), `:nothing`,
`:replace_all`, `{:replace_all_except, fields}`, `{:replace, fields}`,
a keyword list of update instructions or an `Ecto.Query` query for updates.
See the "Upserts" section for more information.
* `:conflict_target` - A list of column names to verify for conflicts.
It is expected those columns to have unique indexes on them that may conflict.
If none is specified, the conflict target is left up to the database.
It may also be `{:unsafe_fragment, binary_fragment}` to pass any
expression to the database without any sanitization, this is useful
for partial index or index with expressions, such as
`{:unsafe_fragment, "(coalesce(firstname, ""), coalesce(lastname, "")) WHERE middlename IS NULL"}` for
`ON CONFLICT (coalesce(firstname, ""), coalesce(lastname, "")) WHERE middlename IS NULL` SQL query.
* `:stale_error_field` - The field where stale errors will be added in
the returning changeset. This option can be used to avoid raising
`Ecto.StaleEntryError`.
* `:stale_error_message` - The message to add to the configured
`:stale_error_field` when stale errors happen, defaults to "is stale".
See the ["Shared options"](#module-shared-options) section at the module
documentation for more options.
## Examples
A typical example is calling `MyRepo.insert/1` with a struct
and acting on the return value:
case MyRepo.insert %Post{title: "Ecto is great"} do
{:ok, struct} -> # Inserted with success
{:error, changeset} -> # Something went wrong
end
## Upserts
`c:insert/2` provides upserts (update or inserts) via the `:on_conflict`
option. The `:on_conflict` option supports the following values:
* `:raise` - raises if there is a conflicting primary key or unique index
* `:nothing` - ignores the error in case of conflicts
* `:replace_all` - replace **all** values on the existing row with the values
in the schema/changeset, including fields not explicitly set in the changeset,
such as IDs and autogenerated timestamps (`inserted_at` and `updated_at`).
Do not use this option if you have auto-incrementing primary keys, as they
will also be replaced. You most likely want to use `{:replace_all_except, [:id]}`
or `{:replace, fields}` explicitly instead. This option requires a schema
* `{:replace_all_except, fields}` - same as above except the given fields are
not replaced. This option requires a schema
* `{:replace, fields}` - replace only specific columns. This option requires
`:conflict_target`
* a keyword list of update instructions - such as the one given to
`c:update_all/3`, for example: `[set: [title: "new title"]]`
* an `Ecto.Query` that will act as an `UPDATE` statement, such as the
one given to `c:update_all/3`. Similarly to `c:update_all/3`, auto
generated values, such as timestamps are not automatically updated.
If the struct cannot be found, `Ecto.StaleEntryError` will be raised.
Upserts map to "ON CONFLICT" on databases like Postgres and "ON DUPLICATE KEY"
on databases such as MySQL.
As an example, imagine `:title` is marked as a unique column in
the database:
{:ok, inserted} = MyRepo.insert(%Post{title: "this is unique"})
Now we can insert with the same title but do nothing on conflicts:
{:ok, ignored} = MyRepo.insert(%Post{title: "this is unique"}, on_conflict: :nothing)
assert ignored.id == nil
Because we used `on_conflict: :nothing`, instead of getting an error,
we got `{:ok, struct}`. However the returned struct does not reflect
the data in the database. One possible mechanism to detect if an
insert or nothing happened in case of `on_conflict: :nothing` is by
checking the `id` field. `id` will be nil if the field is autogenerated
by the database and no insert happened.
For actual upserts, where an insert or update may happen, the situation
is slightly more complex, as the database does not actually inform us
if an insert or update happened. Let's insert a post with the same title
but use a query to update the body column in case of conflicts:
# In Postgres (it requires the conflict target for updates):
on_conflict = [set: [body: "updated"]]
{:ok, updated} = MyRepo.insert(%Post{title: "this is unique"},
on_conflict: on_conflict, conflict_target: :title)
# In MySQL (conflict target is not supported):
on_conflict = [set: [title: "updated"]]
{:ok, updated} = MyRepo.insert(%Post{id: inserted.id, title: "updated"},
on_conflict: on_conflict)
In the examples above, even though it returned `:ok`, we do not know
if we inserted new data or if we updated only the `:on_conflict` fields.
In case an update happened, the data in the struct most likely does
not match the data in the database. For example, autogenerated fields
such as `inserted_at` will point to now rather than the time the
struct was actually inserted.
If you need to guarantee the data in the returned struct mirrors the
database, you have three options:
* Use `on_conflict: :replace_all`, although that will replace all
fields in the database with the ones in the struct/changeset,
including autogenerated fields such as `inserted_at` and `updated_at`:
MyRepo.insert(%Post{title: "this is unique"},
on_conflict: :replace_all, conflict_target: :title)
* Specify `read_after_writes: true` in your schema for choosing
fields that are read from the database after every operation.
Or pass `returning: true` to `insert` to read all fields back:
MyRepo.insert(%Post{title: "this is unique"}, returning: true,
on_conflict: on_conflict, conflict_target: :title)
* Alternatively, read the data again from the database in a separate
query. This option requires the primary key to be generated by the
database:
{:ok, updated} = MyRepo.insert(%Post{title: "this is unique"}, on_conflict: on_conflict)
Repo.get(Post, updated.id)
Because of the inability to know if the struct is up to date or not,
inserting a struct with associations and using the `:on_conflict` option
at the same time is not recommended, as Ecto will be unable to actually
track the proper status of the association.
"""
@doc group: "Schema API"
@callback insert(
struct_or_changeset :: Ecto.Schema.t() | Ecto.Changeset.t(),
opts :: Keyword.t()
) :: {:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
@doc """
Updates a changeset using its primary key.
A changeset is required as it is the only mechanism for
tracking dirty changes. Only the fields present in the `changes` part
of the changeset are sent to the database. Any other, in-memory
changes done to the schema are ignored. If more than one database
operation is required, they're automatically wrapped in a transaction.
If the struct has no primary key, `Ecto.NoPrimaryKeyFieldError`
will be raised.
If the struct cannot be found, `Ecto.StaleEntryError` will be raised.
It returns `{:ok, struct}` if the struct has been successfully
updated or `{:error, changeset}` if there was a validation
or a known constraint error.
## Options
* `:returning` - selects which fields to return. It accepts a list
of fields to be returned from the database. When `true`, returns
all fields. When `false`, no extra fields are returned. It will
always include all fields in `read_after_writes`. Not all
databases support this option.
* `:force` - By default, if there are no changes in the changeset,
`c:update/2` is a no-op. By setting this option to true, update
callbacks will always be executed, even if there are no changes
(including timestamps).
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the query and any `@schema_prefix` set any schemas. Also, the
`@schema_prefix` for the parent record will override all default
`@schema_prefix`s set in any child schemas for associations.
* `:stale_error_field` - The field where stale errors will be added in
the returning changeset. This option can be used to avoid raising
`Ecto.StaleEntryError`.
* `:stale_error_message` - The message to add to the configured
`:stale_error_field` when stale errors happen, defaults to "is stale".
See the ["Shared options"](#module-shared-options) section at the module
documentation for more options.
## Example
post = MyRepo.get!(Post, 42)
post = Ecto.Changeset.change post, title: "New title"
case MyRepo.update post do
{:ok, struct} -> # Updated with success
{:error, changeset} -> # Something went wrong
end
"""
@doc group: "Schema API"
@callback update(changeset :: Ecto.Changeset.t(), opts :: Keyword.t()) ::
{:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
@doc """
Inserts or updates a changeset depending on whether the struct is persisted
or not.
The distinction whether to insert or update will be made on the
`Ecto.Schema.Metadata` field `:state`. The `:state` is automatically set by
Ecto when loading or building a schema.
Please note that for this to work, you will have to load existing structs from
the database. So even if the struct exists, this won't work:
struct = %Post{id: "existing_id", ...}
MyRepo.insert_or_update changeset
# => {:error, changeset} # id already exists
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the query and any `@schema_prefix` set any schemas. Also, the
`@schema_prefix` for the parent record will override all default
`@schema_prefix`s set in any child schemas for associations.
* `:stale_error_field` - The field where stale errors will be added in
the returning changeset. This option can be used to avoid raising
`Ecto.StaleEntryError`. Only applies to updates.
* `:stale_error_message` - The message to add to the configured
`:stale_error_field` when stale errors happen, defaults to "is stale".
Only applies to updates.
See the ["Shared options"](#module-shared-options) section at the module
documentation for more options.
## Example
result =
case MyRepo.get(Post, id) do
nil -> %Post{id: id} # Post not found, we build one
post -> post # Post exists, let's use it
end
|> Post.changeset(changes)
|> MyRepo.insert_or_update
case result do
{:ok, struct} -> # Inserted or updated with success
{:error, changeset} -> # Something went wrong
end
"""
@doc group: "Schema API"
@callback insert_or_update(changeset :: Ecto.Changeset.t(), opts :: Keyword.t()) ::
{:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
@doc """
Deletes a struct using its primary key.
If the struct has no primary key, `Ecto.NoPrimaryKeyFieldError`
will be raised. If the struct has been removed prior to the call,
`Ecto.StaleEntryError` will be raised. If more than one database
operation is required, they're automatically wrapped in a transaction.
It returns `{:ok, struct}` if the struct has been successfully
deleted or `{:error, changeset}` if there was a validation
or a known constraint error. By default, constraint errors will
raise the `Ecto.ConstraintError` exception, unless a changeset is
given as the first argument with the relevant constraints declared
in it (see `Ecto.Changeset`).
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the query and any `@schema_prefix` set in the schema.
* `:stale_error_field` - The field where stale errors will be added in
the returning changeset. This option can be used to avoid raising
`Ecto.StaleEntryError`.
* `:stale_error_message` - The message to add to the configured
`:stale_error_field` when stale errors happen, defaults to "is stale".
See the ["Shared options"](#module-shared-options) section at the module
documentation for more options.
## Example
post = MyRepo.get!(Post, 42)
case MyRepo.delete post do
{:ok, struct} -> # Deleted with success
{:error, changeset} -> # Something went wrong
end
"""
@doc group: "Schema API"
@callback delete(
struct_or_changeset :: Ecto.Schema.t() | Ecto.Changeset.t(),
opts :: Keyword.t()
) :: {:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
@doc """
Same as `c:insert/2` but returns the struct or raises if the changeset is invalid.
"""
@doc group: "Schema API"
@callback insert!(
struct_or_changeset :: Ecto.Schema.t() | Ecto.Changeset.t(),
opts :: Keyword.t()
) :: Ecto.Schema.t()
@doc """
Same as `c:update/2` but returns the struct or raises if the changeset is invalid.
"""
@doc group: "Schema API"
@callback update!(changeset :: Ecto.Changeset.t(), opts :: Keyword.t()) ::
Ecto.Schema.t()
@doc """
Same as `c:insert_or_update/2` but returns the struct or raises if the changeset
is invalid.
"""
@doc group: "Schema API"
@callback insert_or_update!(changeset :: Ecto.Changeset.t(), opts :: Keyword.t()) ::
Ecto.Schema.t()
@doc """
Same as `c:delete/2` but returns the struct or raises if the changeset is invalid.
"""
@doc group: "Schema API"
@callback delete!(
struct_or_changeset :: Ecto.Schema.t() | Ecto.Changeset.t(),
opts :: Keyword.t()
) :: Ecto.Schema.t()
## Ecto.Adapter.Transaction
@optional_callbacks transaction: 2, in_transaction?: 0, rollback: 1
@doc """
Runs the given function or `Ecto.Multi` inside a transaction.
## Use with function
`c:transaction/2` can be called with both a function of arity
zero or one. The arity zero function will just be executed as is,
while the arity one function will receive the repo of the transaction
as its first argument, similar to `Ecto.Multi.run/3`.
If an unhandled error occurs the transaction will be rolled back
and the error will bubble up from the transaction function.
If no error occurred the transaction will be committed when the
function returns. A transaction can be explicitly rolled back
by calling `c:rollback/1`, this will immediately leave the function
and return the value given to `rollback` as `{:error, value}`.
A successful transaction returns the value returned by the function
wrapped in a tuple as `{:ok, value}`.
If `c:transaction/2` is called inside another transaction, the function
is simply executed, without wrapping the new transaction call in any
way. If there is an error in the inner transaction and the error is
rescued, or the inner transaction is rolled back, the whole outer
transaction is marked as tainted, guaranteeing nothing will be committed.
## Use with Ecto.Multi
Besides functions, transactions can be used with an `Ecto.Multi` struct.
A transaction will be started, all operations applied and in case of
success committed returning `{:ok, changes}`. In case of any errors
the transaction will be rolled back and
`{:error, failed_operation, failed_value, changes_so_far}` will be
returned.
You can read more about using transactions with `Ecto.Multi` as well as
see some examples in the `Ecto.Multi` documentation.
## Options
See the ["Shared options"](#module-shared-options) section at the module
documentation for more options.
## Examples
import Ecto.Changeset, only: [change: 2]
MyRepo.transaction(fn ->
MyRepo.update!(change(alice, balance: alice.balance - 10))
MyRepo.update!(change(bob, balance: bob.balance + 10))
end)
# When passing a function of arity 1, it receives the repository itself
MyRepo.transaction(fn repo ->
repo.insert!(%Post{})
end)
# Roll back a transaction explicitly
MyRepo.transaction(fn ->
p = MyRepo.insert!(%Post{})
if not Editor.post_allowed?(p) do
MyRepo.rollback(:posting_not_allowed)
end
end)
# With Ecto.Multi
Ecto.Multi.new()
|> Ecto.Multi.insert(:post, %Post{})
|> MyRepo.transaction
"""
@doc group: "Transaction API"
@callback transaction(fun_or_multi :: fun | Ecto.Multi.t(), opts :: Keyword.t()) ::
{:ok, any}
| {:error, any}
| {:error, Ecto.Multi.name(), any, %{Ecto.Multi.name() => any}}
@doc """
Returns true if the current process is inside a transaction.
If you are using the `Ecto.Adapters.SQL.Sandbox` in tests, note that even
though each test is inside a transaction, `in_transaction?/0` will only
return true inside transactions explicitly created with `transaction/2`. This
is done so the test environment mimics dev and prod.
If you are trying to debug transaction-related code while using
`Ecto.Adapters.SQL.Sandbox`, it may be more helpful to configure the database
to log all statements and consult those logs.
## Examples
MyRepo.in_transaction?
#=> false
MyRepo.transaction(fn ->
MyRepo.in_transaction? #=> true
end)
"""
@doc group: "Transaction API"
@callback in_transaction?() :: boolean
@doc """
Rolls back the current transaction.
The transaction will return the value given as `{:error, value}`.
Note that calling `rollback` causes the code in the transaction to stop executing.
"""
@doc group: "Transaction API"
@callback rollback(value :: any) :: no_return
end
| 38.045921 | 164 | 0.673388 |
086a17a29f3c8007d14403e0e3d6a2ed79d66c44 | 490 | ex | Elixir | lib/providers/internal_token.ex | Betterez/btrz_ex_auth_api | cf4fc4826a4b41d527b2cf50d1cbfbe34973c1ed | [
"MIT"
] | 1 | 2018-10-16T08:13:12.000Z | 2018-10-16T08:13:12.000Z | lib/providers/internal_token.ex | Betterez/btrz_ex_auth_api | cf4fc4826a4b41d527b2cf50d1cbfbe34973c1ed | [
"MIT"
] | null | null | null | lib/providers/internal_token.ex | Betterez/btrz_ex_auth_api | cf4fc4826a4b41d527b2cf50d1cbfbe34973c1ed | [
"MIT"
] | null | null | null | defmodule BtrzAuth.Providers.InternalToken do
@moduledoc """
InternalTokenToken will generate tokens for internal services authentication
"""
@doc """
Gets a token with the internal options
"""
@spec get_token(Keyword.t()) ::
{:ok, Guardian.Token.token(), Guardian.Token.claims()} | {:error, any}
def get_token(opts) do
secret = Keyword.get(opts, :main_secret, "")
BtrzAuth.Guardian.encode_and_sign(%{}, %{}, secret: secret, ttl: {2, :minutes})
end
end
| 30.625 | 83 | 0.67551 |
086a5d5b402699e32e8682f376e97b93e0f24529 | 5,713 | ex | Elixir | lib/plug/session/cookie.ex | gjaldon/plug | bfe88530b429c7b9b29b69b737772ef7c6aa2f6b | [
"Apache-2.0"
] | null | null | null | lib/plug/session/cookie.ex | gjaldon/plug | bfe88530b429c7b9b29b69b737772ef7c6aa2f6b | [
"Apache-2.0"
] | null | null | null | lib/plug/session/cookie.ex | gjaldon/plug | bfe88530b429c7b9b29b69b737772ef7c6aa2f6b | [
"Apache-2.0"
] | 1 | 2019-11-23T12:09:14.000Z | 2019-11-23T12:09:14.000Z | defmodule Plug.Session.COOKIE do
@moduledoc """
Stores the session in a cookie.
This cookie store is based on `Plug.Crypto.MessageVerifier`
and `Plug.Crypto.Message.Encryptor` which encrypts and signs
each cookie to ensure they can't be read nor tampered with.
Since this store uses crypto features, it requires you to
set the `:secret_key_base` field in your connection. This
can be easily achieved with a plug:
plug :put_secret_key_base
def put_secret_key_base(conn, _) do
put_in conn.secret_key_base, "-- LONG STRING WITH AT LEAST 64 BYTES --"
end
## Options
* `:encryption_salt` - a salt used with `conn.secret_key_base` to generate
a key for encrypting/decrypting a cookie.
* `:signing_salt` - a salt used with `conn.secret_key_base` to generate a
key for signing/verifying a cookie;
* `:key_iterations` - option passed to `Plug.Crypto.KeyGenerator`
when generating the encryption and signing keys. Defaults to 1000;
* `:key_length` - option passed to `Plug.Crypto.KeyGenerator`
when generating the encryption and signing keys. Defaults to 32;
* `:key_digest` - option passed to `Plug.Crypto.KeyGenerator`
when generating the encryption and signing keys. Defaults to `:sha256`;
* `:serializer` - cookie serializer module that defines `encode/1` and
`decode/1` returning an `{:ok, value}` tuple. Defaults to
`:external_term_format`.
* `:log` - Log level to use when the cookie cannot be decoded.
Defaults to `:debug`, can be set to false to disable it.
## Examples
# Use the session plug with the table name
plug Plug.Session, store: :cookie,
key: "_my_app_session",
encryption_salt: "cookie store encryption salt",
signing_salt: "cookie store signing salt",
key_length: 64,
log: :debug
"""
require Logger
@behaviour Plug.Session.Store
alias Plug.Crypto.KeyGenerator
alias Plug.Crypto.MessageVerifier
alias Plug.Crypto.MessageEncryptor
def init(opts) do
encryption_salt = opts[:encryption_salt]
signing_salt = check_signing_salt(opts)
iterations = Keyword.get(opts, :key_iterations, 1000)
length = Keyword.get(opts, :key_length, 32)
digest = Keyword.get(opts, :key_digest, :sha256)
log = Keyword.get(opts, :log, :debug)
key_opts = [iterations: iterations,
length: length,
digest: digest,
cache: Plug.Keys]
serializer = check_serializer(opts[:serializer] || :external_term_format)
%{encryption_salt: encryption_salt,
signing_salt: signing_salt,
key_opts: key_opts,
serializer: serializer,
log: log}
end
def get(conn, cookie, opts) do
%{key_opts: key_opts, signing_salt: signing_salt, log: log} = opts
case opts do
%{encryption_salt: nil} ->
MessageVerifier.verify(cookie, derive(conn, signing_salt, key_opts))
%{encryption_salt: key} ->
# TODO: Change to verify/3 after backwards compatibility period.
MessageEncryptor.verify_and_decrypt(cookie,
derive(conn, key, key_opts),
derive(conn, signing_salt, key_opts))
end |> decode(opts.serializer, log)
end
def put(conn, _sid, term, opts) do
%{serializer: serializer, key_opts: key_opts, signing_salt: signing_salt} = opts
binary = encode(term, serializer)
case opts do
%{encryption_salt: nil} ->
MessageVerifier.sign(binary, derive(conn, signing_salt, key_opts))
%{encryption_salt: key} ->
MessageEncryptor.encrypt(binary,
derive(conn, key, key_opts),
derive(conn, signing_salt, key_opts))
end
end
def delete(_conn, _sid, _opts) do
:ok
end
defp encode(term, :external_term_format) do
:erlang.term_to_binary(term)
end
defp encode(term, serializer) do
{:ok, binary} = serializer.encode(term)
binary
end
defp decode({:ok, binary}, :external_term_format, _log) do
{:term,
try do
:erlang.binary_to_term(binary)
rescue
_ -> %{}
end}
end
defp decode({:ok, binary}, serializer, _log) do
case serializer.decode(binary) do
{:ok, term} -> {:custom, term}
_ -> {:custom, %{}}
end
end
defp decode(:error, _serializer, false) do
{nil, %{}}
end
defp decode(:error, _serializer, log) do
Logger.log log, "Plug.Session could not decode incoming session cookie. " <>
"This may happen when the session settings change or a stale cookie is sent."
{nil, %{}}
end
defp derive(conn, key, key_opts) do
conn.secret_key_base
|> validate_secret_key_base()
|> KeyGenerator.generate(key, key_opts)
end
defp validate_secret_key_base(nil), do:
raise(ArgumentError, "cookie store expects conn.secret_key_base to be set")
defp validate_secret_key_base(secret_key_base) when byte_size(secret_key_base) < 64, do:
raise(ArgumentError, "cookie store expects conn.secret_key_base to be at least 64 bytes")
defp validate_secret_key_base(secret_key_base), do:
secret_key_base
defp check_signing_salt(opts) do
case opts[:signing_salt] do
nil -> raise ArgumentError, "cookie store expects :signing_salt as option"
salt -> salt
end
end
defp check_serializer(serializer) when is_atom(serializer), do: serializer
defp check_serializer(_), do:
raise(ArgumentError, "cookie store expects :serializer option to be a module")
end
| 32.833333 | 97 | 0.653422 |
086a77dd5c782b5fa9a09fa39fe314a519a401d8 | 4,300 | ex | Elixir | lib/faker/industry/hy.ex | joshillian/faker | eeede9d7c35c543dcf6abe72dc476e755c80415b | [
"MIT"
] | 540 | 2015-01-05T16:31:49.000Z | 2019-09-25T00:40:27.000Z | lib/faker/industry/hy.ex | joshillian/faker | eeede9d7c35c543dcf6abe72dc476e755c80415b | [
"MIT"
] | 172 | 2015-01-06T03:55:17.000Z | 2019-10-03T12:58:02.000Z | lib/faker/industry/hy.ex | joshillian/faker | eeede9d7c35c543dcf6abe72dc476e755c80415b | [
"MIT"
] | 163 | 2015-01-05T21:24:54.000Z | 2019-10-03T07:59:42.000Z | defmodule Faker.Industry.Hy do
import Faker, only: [sampler: 2]
@moduledoc """
Functions for generating industry related data in Armenian
"""
@doc """
Returns an industry name.
## Examples
iex> Faker.Industry.Hy.industry
"Հյուրընկալություն"
iex> Faker.Industry.Hy.industry
"Բժշկական Գործունեություն"
iex> Faker.Industry.Hy.industry
"Վենչուրային և Մասնավոր Կապիտալ"
iex> Faker.Industry.Hy.industry
"Էներգետիկա"
"""
@spec industry() :: String.t()
sampler(:industry, [
"Պաշտպանություն",
"Համակարգչային Տեխնիկա",
"Համակարգչային Ծրագրեր",
"Համակարգչային Ցանցեր",
"Համացանց",
"Կիսահաղորդիչներ",
"Հեռահաղորդակցություն",
"Իրավական Գործառույթներ",
"Իրավաբանական Ծառայություններ",
"Կառավարման Խորհրդատվություն",
"Կենսատեխնոլոգիա",
"Բժշկական Գործունեություն",
"Հիվանդանոց և Առողջապահություն",
"Դեղագործություն",
"Անասնաբուժություն",
"Բժշկական Սարքավորումներ",
"Կոսմետիկա",
"Հագուստ և Նորաձևություն",
"Սպորտային Ապրանքներ",
"Ծխախոտագործություն",
"Սուպերմարկետներ",
"Սննդի Արտադրություն",
"Սպառողական Էլեկտրոնիկա",
"Սպառողական Ապրանքներ",
"Կահույք",
"Մանրածախ Առևտուր",
"Զվարճանք",
"Դրամախաղ և Խաղատներ",
"Ժամանց",
"Ճանապարհորդություն և Տուրիզմ",
"Հյուրընկալություն",
"Ռեստորաններ",
"Սպորտաձևեր",
"Սննդամթերք և Ըմպելիքներ",
"Կինոնկարներ",
"Հեռարձակվող Մեդիա",
"Թանգարաններ",
"Կատարողական Արվեստ",
"Ժամանցի Հարմարություններ և Ծառայություններ",
"Բանկային Գործունեություն",
"Ապահովագրություն",
"Ֆինանսական Ծառայություններ",
"Անշարժ Գույք",
"Ներդրումային Բանկային Ծառայություններ",
"Ներդրումային Կառավարում",
"Հաշվապահություն",
"Շինարարություն",
"Շինանյութեր",
"Ճարտարապետություն և Պլանավորում",
"Քաղաքացիական Շինարարություն",
"Քիմիական Նյութեր",
"Մեքենաներ",
"Հանքարդյունաբերություն և Մետաղներ",
"Նավթարդյունաբերություն",
"Էներգետիկա",
"Նավաշինություն",
"Կոմունալ Ծառայություններ",
"Տեքստիլ Արդյունաբերություն",
"Թուղթ և Անտառային Ապրանքներ",
"Երկաթուղու Արտադրություն",
"Հողագործություն",
"Անասնապահություն",
"Կաթնամթերք",
"Ձկնորսություն",
"Միջնակարգ Կրթություն",
"Բարձրագույն Կրթություն",
"Կրթության Կառավարում",
"Հետազոտություն",
"Ռազմական Գործ",
"Օրենսդրական Գրասենյակ",
"Դատարան",
"Միջազգային Հարաբերություններ",
"Կառավարության Ղեկավարում",
"Իրավապահ Համակարգ",
"Հանրային Անվտանգություն",
"Հանրային Քաղաքականություն",
"Մարկետինգ և Գովազդ",
"Թերթեր",
"Հրատարակչություն",
"Տպագրություն",
"Տեղեկատվական Ծառայություններ",
"Գրադարաններ",
"Բեռնափոխադրումներ",
"Անհատական և Ընտանեկան Ծառայություններ",
"Կրոնական Հաստատություններ",
"Քաղաքացիական և Սոցիալական Կազմակերպություն",
"Սպառողական Ծառայություններ",
"Երկաթուղի",
"Պահեստավորում",
"Ավիացիա",
"Տեղեկատվական Տեխնոլոգիաներ և Ծառայություններ",
"Շուկայի ՈՒսումնասիրություն",
"Հասարակայնության Հետ Կապեր և Հաղորդակցություն",
"Դիզայն",
"Մասնագիտական Վերապատրաստում",
"Վենչուրային և Մասնավոր Կապիտալ",
"Թարգմանություն և Տեղայնացում",
"Համակարգչային Խաղեր",
"Իրադարձությունների Կազմակերպում",
"Արվեստ և Արհեստ",
"Էլեկտրական և Էլեկտրոնային Արտադրություն",
"Առցանց Լրատվամիջոցներ",
"Նանոտեխնոլոգիա",
"Երաժշտություն",
"Լոգիստիկա և Մատակարարում",
"Համակարգչային և Ցանցային Անվտանգություն",
"Անլար Տեխնոլոգիաներ",
"Անվտանգություն և Հետաքննություն",
"Ծառայությունների Մատուցում",
"Աութսորսինգ և Օֆշորային Ծառայություններ",
"Այլընտրանքային Բժշկություն",
"Մեդիա Արտադրանք",
"Կապիտալի Շուկաներ",
"Բարեգործություն",
"Մեծածախ Առևտուր",
"Ներմուծում և Արտահանում",
"Մեխանիկական կամ Արդյունաբերական Ճարտարագիտություն",
"Լուսանկարչություն",
"Մարդկային Ռեսուրսներ",
"Բիզնես Սարքավորումներ",
"Հոգեկան Առողջության Խնամք",
"Գրաֆիկական Դիզայն",
"Միջազգային Առևտուր և Զարգացում",
"Ալկոհոլային Խմիչքներ",
"Պերճանքի Առարկաներ և Ոսկերչական Իրեր",
"Շրջակա Միջավայրի Պահպանություն",
"Ապակի և Կերամիկա",
"Փաթեթավորում և Բեռնարկղեր",
"Արդյունաբերական Ավտոմատացում",
"Կառավարական Հարաբերություններ"
])
end
| 27.564103 | 60 | 0.684186 |
086ab1916ce68492ae2e8af71b20259a96c69461 | 282 | exs | Elixir | priv/repo/migrations/20171223111152_add_cms_author_id_to_posts.exs | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | 1 | 2019-05-07T15:03:54.000Z | 2019-05-07T15:03:54.000Z | priv/repo/migrations/20171223111152_add_cms_author_id_to_posts.exs | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | null | null | null | priv/repo/migrations/20171223111152_add_cms_author_id_to_posts.exs | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | null | null | null | defmodule MastaniServer.Repo.Migrations.AddCmsAuthorIdToPosts do
use Ecto.Migration
def change do
alter table(:cms_posts) do
add(:author_id, references(:cms_authors, on_delete: :delete_all), null: false)
end
create(index(:cms_posts, [:author_id]))
end
end
| 23.5 | 84 | 0.730496 |
086ab6a8988a0c587b231760c38b2663cde08c5b | 1,641 | ex | Elixir | clients/container/lib/google_api/container/v1/model/legacy_abac.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/container/lib/google_api/container/v1/model/legacy_abac.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/container/lib/google_api/container/v1/model/legacy_abac.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Container.V1.Model.LegacyAbac do
@moduledoc """
Configuration for the legacy Attribute Based Access Control authorization mode.
## Attributes
* `enabled` (*type:* `boolean()`, *default:* `nil`) - Whether the ABAC authorizer is enabled for this cluster. When enabled, identities in the system, including service accounts, nodes, and controllers, will have statically granted permissions beyond those provided by the RBAC configuration or IAM.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:enabled => boolean() | nil
}
field(:enabled)
end
defimpl Poison.Decoder, for: GoogleApi.Container.V1.Model.LegacyAbac do
def decode(value, options) do
GoogleApi.Container.V1.Model.LegacyAbac.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Container.V1.Model.LegacyAbac do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.914894 | 303 | 0.748324 |
086ac49fd2c8f9ac83396235e5dfa9783116c5cc | 212 | ex | Elixir | testData/org/elixir_lang/formatting/map_update_arguments_align_keywords.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/formatting/map_update_arguments_align_keywords.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/formatting/map_update_arguments_align_keywords.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | %{
state |
request_by_correlation_id: Map.put(
state.request_by_correlation_id,
correlation_id,
%Retort.Client.Request{
from: from,
method: String.to_existing_atom(method)
}
)
}
| 17.666667 | 45 | 0.665094 |
086b00049d4e2a6a4031a1baec23c4238946c8d0 | 1,306 | exs | Elixir | test/mango_web/acceptance/registration_test.exs | sophiabrandt/mango | c42c2e463c7bdeb7f18400a3001a8cc6e326159d | [
"MIT"
] | 4 | 2019-07-17T04:46:37.000Z | 2020-09-20T14:15:23.000Z | test/mango_web/acceptance/registration_test.exs | sophiabrandt/mango | c42c2e463c7bdeb7f18400a3001a8cc6e326159d | [
"MIT"
] | null | null | null | test/mango_web/acceptance/registration_test.exs | sophiabrandt/mango | c42c2e463c7bdeb7f18400a3001a8cc6e326159d | [
"MIT"
] | 1 | 2019-07-17T04:46:39.000Z | 2019-07-17T04:46:39.000Z | defmodule MangoWeb.Acceptance.RegistrationTest do
use Mango.DataCase
use Hound.Helpers
hound_session()
test "registers an account with valid data" do
navigate_to("/register")
form = find_element(:id, "registration-form")
find_within_element(form, :name, "registration[name]")
|> fill_field("John")
find_within_element(form, :name, "registration[email]")
|> fill_field("john@example.com")
find_within_element(form, :name, "registration[phone]")
|> fill_field("1111")
find_element(:css, "#registration_residence_area
option[value='Area 1']")
|> click
find_within_element(form, :name, "registration[password]")
|> fill_field("password")
find_within_element(form, :tag, "button")
|> click
assert current_path() == "/"
message =
find_element(:class, "alert")
|> visible_text()
assert message == "Registration successful"
end
test "shows error messages on invalid data" do
navigate_to("/register")
form = find_element(:id, "registration-form")
find_within_element(form, :tag, "button") |> click
assert current_path() == "/register"
message = find_element(:id, "form-error") |> visible_text()
assert message == "Oops, something went wrong! Please check the errors below."
end
end
| 25.607843 | 82 | 0.677642 |
086b1c8a4874502c98ddcbda70e98011eae1ca78 | 409 | exs | Elixir | test/ctrlv_web/views/error_view_test.exs | ryanwinchester/ctrlv | eee44962dda062ba2154cc8bb57d86a6d814c71f | [
"Apache-2.0"
] | 1 | 2022-03-31T17:55:16.000Z | 2022-03-31T17:55:16.000Z | test/ctrlv_web/views/error_view_test.exs | ryanwinchester/ctrlv | eee44962dda062ba2154cc8bb57d86a6d814c71f | [
"Apache-2.0"
] | 7 | 2022-03-30T02:52:54.000Z | 2022-03-30T23:11:01.000Z | test/ctrlv_web/views/error_view_test.exs | ryanwinchester/ctrlv | eee44962dda062ba2154cc8bb57d86a6d814c71f | [
"Apache-2.0"
] | 1 | 2022-03-31T03:37:16.000Z | 2022-03-31T03:37:16.000Z | defmodule CtrlvWeb.ErrorViewTest do
use CtrlvWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(CtrlvWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(CtrlvWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end
| 27.266667 | 90 | 0.728606 |
086b2c82ec399f214dea8a67cedd8aea5f9af3f2 | 3,575 | exs | Elixir | test/user/refute_called_once_test.exs | kianmeng/patch | d7d8d70a0285129ec67a43473db587011524fe0c | [
"MIT"
] | 57 | 2020-04-22T00:19:04.000Z | 2022-03-20T11:57:00.000Z | test/user/refute_called_once_test.exs | kianmeng/patch | d7d8d70a0285129ec67a43473db587011524fe0c | [
"MIT"
] | 9 | 2021-10-29T20:54:56.000Z | 2022-02-19T03:41:01.000Z | test/user/refute_called_once_test.exs | kianmeng/patch | d7d8d70a0285129ec67a43473db587011524fe0c | [
"MIT"
] | 2 | 2021-07-02T14:41:48.000Z | 2022-01-12T11:47:26.000Z | defmodule Patch.Test.User.RefuteCalledOnecTest do
use ExUnit.Case
use Patch
alias Patch.Test.Support.User.RefuteCalledOnce
describe "refute_called_once/1" do
test "exact call can be refuted" do
patch(RefuteCalledOnce, :example, :patched)
assert RefuteCalledOnce.example(1, 2) == :patched
refute_called_once RefuteCalledOnce.example(3, 4)
end
test "exact call that have happened raise UnexpectedCall" do
patch(RefuteCalledOnce, :example, :patched)
assert RefuteCalledOnce.example(1, 2) == :patched
assert_raise Patch.UnexpectedCall, fn ->
refute_called_once RefuteCalledOnce.example(1, 2)
end
end
test "exact call after multiple calls can be refuted" do
patch(RefuteCalledOnce, :example, :patched)
assert RefuteCalledOnce.example(1, 2) == :patched
assert RefuteCalledOnce.example(1, 2) == :patched
refute_called_once RefuteCalledOnce.example(1, 2)
end
test "partial call can be refuted" do
patch(RefuteCalledOnce, :example, :patched)
assert RefuteCalledOnce.example(1, 2) == :patched
refute_called_once RefuteCalledOnce.example(3, _)
refute_called_once RefuteCalledOnce.example(_, 4)
end
test "partial call that match raises UnexpectedCall" do
patch(RefuteCalledOnce, :example, :patched)
assert RefuteCalledOnce.example(1, 2) == :patched
assert_raise Patch.UnexpectedCall, fn ->
refute_called_once RefuteCalledOnce.example(1, _)
end
assert_raise Patch.UnexpectedCall, fn ->
refute_called_once RefuteCalledOnce.example(_, 2)
end
end
test "partial call after multiple calls can be refuted" do
patch(RefuteCalledOnce, :example, :patched)
assert RefuteCalledOnce.example(1, 2) == :patched
assert RefuteCalledOnce.example(1, 3) == :patched
assert RefuteCalledOnce.example(3, 2) == :patched
refute_called_once RefuteCalledOnce.example(1, _)
refute_called_once RefuteCalledOnce.example(_, 2)
end
test "an uncalled function can be wildcard refuted" do
patch(RefuteCalledOnce, :example, :patched)
refute_called_once RefuteCalledOnce.example(_, _)
end
test "any call causes a wildcard to raise UnexpectedCall" do
patch(RefuteCalledOnce, :example, :patched)
assert RefuteCalledOnce.example(1, 2) == :patched
assert_raise Patch.UnexpectedCall, fn ->
refute_called_once RefuteCalledOnce.example(_, _)
end
end
test "wildcard call with multiple calls can be refuted" do
patch(RefuteCalledOnce, :example, :patched)
assert RefuteCalledOnce.example(1, 2) == :patched
assert RefuteCalledOnce.example(3, 4) == :patched
refute_called_once RefuteCalledOnce.example(_, _)
end
test "exception formatting" do
patch(RefuteCalledOnce, :example, :patched)
assert RefuteCalledOnce.example(1, 2) == :patched
assert RefuteCalledOnce.example(3, 4) == :patched
expected_message = """
\n
Expected the following call to occur any number of times but once, but it occurred once:
Patch.Test.Support.User.RefuteCalledOnce.example(1, 2)
Calls which were received (matching calls are marked with *):
* 1. Patch.Test.Support.User.RefuteCalledOnce.example(1, 2)
2. Patch.Test.Support.User.RefuteCalledOnce.example(3, 4)
"""
assert_raise Patch.UnexpectedCall, expected_message, fn ->
refute_called_once RefuteCalledOnce.example(1, 2)
end
end
end
end
| 30.29661 | 94 | 0.696503 |
086b3579f599638a49f78de76324104174a5195a | 1,018 | exs | Elixir | test/ranking_test.exs | jannikbecher/schocken | ea45d27dbbbbfa540aebeba55f82f420c7ddda8f | [
"MIT"
] | null | null | null | test/ranking_test.exs | jannikbecher/schocken | ea45d27dbbbbfa540aebeba55f82f420c7ddda8f | [
"MIT"
] | null | null | null | test/ranking_test.exs | jannikbecher/schocken | ea45d27dbbbbfa540aebeba55f82f420c7ddda8f | [
"MIT"
] | null | null | null | defmodule Schocken.Game.RankingTest do
use ExUnit.Case
alias Schocken.Game.Ranking
setup do
toss = %{
one_toss: true,
score: nil,
tries: 1
}
{:ok, toss: toss}
end
test "ranking of house number", context do
assert {1, 331, 1} == get_eval([1, 3, 3], context.toss)
end
test "ranking of street", context do
assert {2, 3, 1} == get_eval([1, 2, 3], context.toss)
end
test "ranking not one toss street", context do
toss = Map.put(context.toss, :one_toss, false)
assert {1, 321, 1} == get_eval([1, 2, 3], toss)
end
test "ranking of general", context do
assert {3, 5, 1} == get_eval([5, 5, 5], context.toss)
end
test "ranking of shock", context do
assert {4, 4, 1} == get_eval([1, 4, 1], context.toss)
end
test "ranking of shockout", context do
assert {5, 0, 1} == get_eval([1, 1, 1], context.toss)
end
defp get_eval(dices, toss) do
Map.put(toss, :dices, dices)
|> Ranking.evaluate()
|> Map.get(:score)
end
end
| 21.659574 | 59 | 0.603143 |
086b4f914b55d1d969170684deb8e1183a011fec | 920 | ex | Elixir | elixir/advent_of_code/lib/day2/part1.ex | childss/aoc-2016 | 8570cf4bcf42e1ea85cfdf5e3444baf71c5c5312 | [
"MIT"
] | null | null | null | elixir/advent_of_code/lib/day2/part1.ex | childss/aoc-2016 | 8570cf4bcf42e1ea85cfdf5e3444baf71c5c5312 | [
"MIT"
] | null | null | null | elixir/advent_of_code/lib/day2/part1.ex | childss/aoc-2016 | 8570cf4bcf42e1ea85cfdf5e3444baf71c5c5312 | [
"MIT"
] | null | null | null | defmodule Day2.Part1 do
@states %{
# U, D, L, R
1 => {1, 4, 1, 2},
2 => {2, 5, 1, 3},
3 => {3, 6, 2, 3},
4 => {1, 7, 4, 5},
5 => {2, 8, 4, 6},
6 => {3, 9, 5, 6},
7 => {4, 7, 7, 8},
8 => {5, 8, 7, 9},
9 => {6, 9, 8, 9}
}
def run(input) do
input
|> File.read!
|> process_input
|> compute_digits(5)
|> Enum.join("")
end
defp process_input(input) do
input
|> String.split("\n", trim: true)
|> Enum.map(&String.split(&1, "", trim: true))
end
defp compute_digits([], _digit), do: []
defp compute_digits([line | tail], digit) do
next_digit = Enum.reduce(line, digit, &next(&1, @states[&2]))
[next_digit | compute_digits(tail, next_digit)]
end
defp next("U", {up, _, _, _}), do: up
defp next("D", {_, down, _, _}), do: down
defp next("L", {_, _, left, _}), do: left
defp next("R", {_, _, _, right}), do: right
end
| 23 | 65 | 0.488043 |
086b56bdc982492ef5a25b7780a1a29f52274c6d | 884 | ex | Elixir | clients/workflows/lib/google_api/workflows/v1/metadata.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/workflows/lib/google_api/workflows/v1/metadata.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/workflows/lib/google_api/workflows/v1/metadata.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Workflows.V1 do
@moduledoc """
API client metadata for GoogleApi.Workflows.V1.
"""
@discovery_revision "20211117"
def discovery_revision(), do: @discovery_revision
end
| 32.740741 | 74 | 0.75905 |
086b63cb3327b60dd25d8a78d2494a9f1db84751 | 490 | ex | Elixir | lib/koans/04_atoms.ex | tootoobeepbeep/elixir-koans-ans | 01b0962e1db0c51a72e33820a360c216ce466f31 | [
"MIT"
] | null | null | null | lib/koans/04_atoms.ex | tootoobeepbeep/elixir-koans-ans | 01b0962e1db0c51a72e33820a360c216ce466f31 | [
"MIT"
] | null | null | null | lib/koans/04_atoms.ex | tootoobeepbeep/elixir-koans-ans | 01b0962e1db0c51a72e33820a360c216ce466f31 | [
"MIT"
] | null | null | null | defmodule Atoms do
use Koans
@intro "Atoms"
koan "Atoms are constants where their name is their own value" do
adam = :human
assert adam == :human
end
koan "It is surprising to find out that booleans are atoms" do
assert is_atom(true) == true
assert is_boolean(false) == true
assert true == :true
assert false == :false
end
koan "Like booleans, the nil value is also an atom" do
assert is_atom(nil) == true
assert nil == :nil
end
end
| 21.304348 | 67 | 0.653061 |
086b77fa4cba9ce964557712a7c6400df7e4e333 | 1,561 | ex | Elixir | farmbot_ext/lib/farmbot_ext/api/eager_loader/supervisor.ex | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | 1 | 2021-08-23T13:36:14.000Z | 2021-08-23T13:36:14.000Z | farmbot_ext/lib/farmbot_ext/api/eager_loader/supervisor.ex | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | null | null | null | farmbot_ext/lib/farmbot_ext/api/eager_loader/supervisor.ex | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | null | null | null | defmodule FarmbotExt.API.EagerLoader.Supervisor do
@moduledoc """
Responsible for supervising all assets that need to be
eagerloaded
"""
use Supervisor
alias FarmbotExt.API.EagerLoader
alias FarmbotCore.Asset.{
Device,
FarmEvent,
FarmwareEnv,
FirstPartyFarmware,
FarmwareInstallation,
FbosConfig,
FirmwareConfig,
Peripheral,
PinBinding,
Point,
PointGroup,
Regimen,
SensorReading,
Sensor,
Sequence,
Tool
}
@doc false
def start_link(args) do
Supervisor.start_link(__MODULE__, args, name: __MODULE__)
end
@doc "Drop all cached assets"
def drop_all_cache() do
for {_, pid, _, _} <- Supervisor.which_children(FarmbotExt.API.EagerLoader.Supervisor),
do: GenServer.cast(pid, :drop)
end
@impl Supervisor
def init(_args) do
Supervisor.init(children(), strategy: :one_for_one)
end
def children do
config = Application.get_env(:farmbot_ext, __MODULE__) || []
Keyword.get(config, :children, [
{EagerLoader, Device},
{EagerLoader, FarmEvent},
{EagerLoader, FarmwareEnv},
{EagerLoader, FirstPartyFarmware},
{EagerLoader, FarmwareInstallation},
{EagerLoader, FbosConfig},
{EagerLoader, FirmwareConfig},
{EagerLoader, Peripheral},
{EagerLoader, PinBinding},
{EagerLoader, Point},
{EagerLoader, PointGroup},
{EagerLoader, Regimen},
{EagerLoader, SensorReading},
{EagerLoader, Sensor},
{EagerLoader, Sequence},
{EagerLoader, Tool}
])
end
end
| 22.955882 | 91 | 0.668161 |
086b79d3183818604c3ff6ce38ad479832dedd0e | 1,745 | exs | Elixir | mix.exs | Dania02525/widget_saas | 17b853f07be08a851c3e355863c18e15755cb7cb | [
"MIT"
] | 12 | 2016-01-27T01:30:42.000Z | 2019-12-07T20:31:01.000Z | mix.exs | Dania02525/widget_saas | 17b853f07be08a851c3e355863c18e15755cb7cb | [
"MIT"
] | 3 | 2016-11-22T12:22:59.000Z | 2017-08-01T17:26:40.000Z | mix.exs | Dania02525/widget_saas | 17b853f07be08a851c3e355863c18e15755cb7cb | [
"MIT"
] | 5 | 2016-07-11T18:39:02.000Z | 2019-10-23T03:22:49.000Z | defmodule WidgetSaas.Mixfile do
use Mix.Project
def project do
[
app: :widget_saas,
version: "0.0.2",
elixir: "~> 1.4",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {WidgetSaas, []},
extra_applications: [:phoenix, :phoenix_html, :cowboy, :logger,
:phoenix_ecto, :postgrex]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.3.0-rc"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_ecto, "~> 3.2"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.10"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"},
{:apartmentex, "~> 0.2.3"}
]
end
# Aliases are shortcut or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
"test": ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 27.265625 | 79 | 0.574212 |
086b8e969e22235600dcb7b7c7dc1c16208041bc | 1,810 | ex | Elixir | clients/games/lib/google_api/games/v1/model/profile_settings.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/games/lib/google_api/games/v1/model/profile_settings.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/games/lib/google_api/games/v1/model/profile_settings.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Games.V1.Model.ProfileSettings do
@moduledoc """
This is a JSON template for profile settings
## Attributes
* `friendsListVisibility` (*type:* `String.t`, *default:* `nil`) - Whether the player's friends list is visible to the game.
* `kind` (*type:* `String.t`, *default:* `games#profileSettings`) - Uniquely identifies the type of this resource. Value is always the fixed string games#profileSettings.
* `profileVisible` (*type:* `boolean()`, *default:* `nil`) -
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:friendsListVisibility => String.t(),
:kind => String.t(),
:profileVisible => boolean()
}
field(:friendsListVisibility)
field(:kind)
field(:profileVisible)
end
defimpl Poison.Decoder, for: GoogleApi.Games.V1.Model.ProfileSettings do
def decode(value, options) do
GoogleApi.Games.V1.Model.ProfileSettings.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Games.V1.Model.ProfileSettings do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.150943 | 174 | 0.720442 |
086b918bf30dae04fe9f6e0bd44745ab4aab9c32 | 1,662 | exs | Elixir | config/dev.exs | tairone-livinalli/rocketpay | f58be9330b678a486d717613790e9e3d9f0fe9d1 | [
"MIT"
] | null | null | null | config/dev.exs | tairone-livinalli/rocketpay | f58be9330b678a486d717613790e9e3d9f0fe9d1 | [
"MIT"
] | null | null | null | config/dev.exs | tairone-livinalli/rocketpay | f58be9330b678a486d717613790e9e3d9f0fe9d1 | [
"MIT"
] | null | null | null | use Mix.Config
# Configure your database
config :rocketpay, Rocketpay.Repo,
username: "postgres",
password: "rocketpay",
database: "rocketpay_dev",
hostname: "localhost",
show_sensitive_data_on_connection_error: true,
pool_size: 10
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :rocketpay, RocketpayWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: []
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 28.655172 | 68 | 0.728039 |
086b95d530d8b5f9a11a3a7246e5ea0e3725e9aa | 2,902 | exs | Elixir | test/tracer/pid_handler_test.exs | gabiz/tracer | 4659bf3cc9e0cc86e653794b800e9eb5bbe9f0a2 | [
"MIT"
] | 125 | 2017-09-08T06:33:28.000Z | 2022-03-09T10:48:04.000Z | test/tracer/pid_handler_test.exs | Gazler/tracer | 4659bf3cc9e0cc86e653794b800e9eb5bbe9f0a2 | [
"MIT"
] | 3 | 2017-09-21T01:56:24.000Z | 2020-10-29T13:27:44.000Z | test/tracer/pid_handler_test.exs | Gazler/tracer | 4659bf3cc9e0cc86e653794b800e9eb5bbe9f0a2 | [
"MIT"
] | 6 | 2017-09-08T21:41:32.000Z | 2019-11-16T10:39:16.000Z | defmodule Tracer.PidHandler.Test do
use ExUnit.Case
alias Tracer.PidHandler
test "start raises an error when no callback is passed" do
assert_raise ArgumentError, "missing event_callback configuration", fn ->
PidHandler.start(max_message_count: 1)
end
end
test "start spawn a process and returns its pid" do
pid = PidHandler.start(event_callback: fn _event -> :ok end)
assert is_pid(pid)
assert Process.alive?(pid)
end
test "stop causes the process to end with a normal status" do
Process.flag(:trap_exit, true)
pid = PidHandler.start(event_callback: fn _event -> :ok end)
assert Process.alive?(pid)
PidHandler.stop(pid)
assert_receive({:EXIT, ^pid, :normal})
refute Process.alive?(pid)
end
test "max_message_count triggers when too many events are received" do
Process.flag(:trap_exit, true)
pid = PidHandler.start(max_message_count: 2,
event_callback: fn _event -> :ok end)
assert Process.alive?(pid)
send pid, {:trace, :foo}
send pid, {:trace_ts, :bar}
assert_receive({:EXIT, ^pid, {:max_message_count, 2}})
refute Process.alive?(pid)
end
test "unrecognized messages are discarded to avoid queue from filling up" do
Process.flag(:trap_exit, true)
pid = PidHandler.start(max_message_count: 1,
event_callback: fn _event -> :ok end)
assert Process.alive?(pid)
for i <- 1..100, do: send pid, {:not_expeted_message, i}
case Process.info(self(), :message_queue_len) do
{:message_queue_len, len} -> assert len == 0
error -> assert error
end
end
test "callback is invoked when a trace event is received" do
Process.flag(:trap_exit, true)
test_pid = self()
pid = PidHandler.start(event_callback: fn event ->
send test_pid, event
:ok
end)
assert Process.alive?(pid)
for i <- 1..100, do: send pid, {:trace, i}
for i <- 1..100, do: assert_receive {:trace, ^i}
end
test "process exits if callback does not return :ok" do
Process.flag(:trap_exit, true)
pid = PidHandler.start(event_callback: fn _event -> :not_ok end)
assert Process.alive?(pid)
send pid, {:trace, :foo}
assert_receive({:EXIT, ^pid, {:not_ok, []}})
refute Process.alive?(pid)
end
test "process exits if too many messages wait in mailbox" do
Process.flag(:trap_exit, true)
pid = PidHandler.start(max_queue_size: 10,
event_callback: fn _event ->
:timer.sleep(20);
:ok end)
assert Process.alive?(pid)
for i <- 1..100, do: send pid, {:trace, i}
case Process.info(pid, :message_queue_len) do
{:message_queue_len, len} -> assert len >= 10
_ -> :ok
end
assert_receive({:EXIT, ^pid, {:message_queue_size, _}})
refute Process.alive?(pid)
end
end
| 33.356322 | 78 | 0.639904 |
086ba231546fda220797479b09b81b6d9608b959 | 133 | ex | Elixir | 03-chapter/04_checkout.ex | herminiotorres/learn-funcional-programming-with-elixir | 01b0696e0859da20f389a6e53c51b5ab6c4f6e24 | [
"MIT"
] | 1 | 2022-03-01T13:54:43.000Z | 2022-03-01T13:54:43.000Z | 03-chapter/04_checkout.ex | herminiotorres/learn-funcional-programming-with-elixir | 01b0696e0859da20f389a6e53c51b5ab6c4f6e24 | [
"MIT"
] | null | null | null | 03-chapter/04_checkout.ex | herminiotorres/learn-funcional-programming-with-elixir | 01b0696e0859da20f389a6e53c51b5ab6c4f6e24 | [
"MIT"
] | null | null | null | defmodule Checkout do
def total_cost(price), do: total_cost(price, 10)
def total_cost(price, quantity), do: price * quantity
end
| 26.6 | 55 | 0.75188 |
086bb4972d3bfae39eb6c46663e80afa21d68b99 | 68 | exs | Elixir | test/test_helper.exs | yoossaland/yoossa | 1e1ab968d12c7690a76fc670c47c91c29efb2979 | [
"BSD-2-Clause"
] | null | null | null | test/test_helper.exs | yoossaland/yoossa | 1e1ab968d12c7690a76fc670c47c91c29efb2979 | [
"BSD-2-Clause"
] | null | null | null | test/test_helper.exs | yoossaland/yoossa | 1e1ab968d12c7690a76fc670c47c91c29efb2979 | [
"BSD-2-Clause"
] | null | null | null | ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(Madari.Repo, :manual)
| 22.666667 | 52 | 0.779412 |
086bd5c3aa2f2178f92916382a928ea5ff5203ad | 4,127 | ex | Elixir | lib/repo/queryable.ex | scripbox/ex_audit | 6478d72816e9357c9ba4eb5eeafd0244010133b4 | [
"MIT"
] | null | null | null | lib/repo/queryable.ex | scripbox/ex_audit | 6478d72816e9357c9ba4eb5eeafd0244010133b4 | [
"MIT"
] | null | null | null | lib/repo/queryable.ex | scripbox/ex_audit | 6478d72816e9357c9ba4eb5eeafd0244010133b4 | [
"MIT"
] | null | null | null | defmodule ExAudit.Queryable do
@version_schema Application.get_env(:ex_audit, :version_schema)
require Logger
def update_all(module, adapter, queryable, updates, opts) do
Ecto.Repo.Queryable.update_all(module, adapter, queryable, updates, opts)
end
def delete_all(module, adapter, queryable, opts) do
Ecto.Repo.Queryable.delete_all(module, adapter, queryable, opts)
end
def history(module, adapter, struct, opts) do
import Ecto.Query
query = from v in @version_schema,
order_by: [desc: :recorded_at]
# TODO what do when we get a query
query = case struct do
# %Ecto.Query{from: struct} ->
# from v in query,
# where: v.entity_id == subquery(from q in struct, select: q.id),
# where: v.entity_schema == ^struct
%{__struct__: struct, id: id} when nil not in [struct, id] ->
from v in query,
where: v.entity_id == ^id,
where: v.entity_schema == ^struct
end
versions = Ecto.Repo.Queryable.all(module, adapter, query, opts)
if Keyword.get(opts, :render_struct, false) do
{versions, oldest_struct} =
versions
|> Enum.map_reduce(struct, fn version, new_struct ->
old_struct = _revert(version, new_struct)
version =
version
|> Map.put(:original, empty_map_to_nil(new_struct))
|> Map.put(:first, false)
{version, old_struct}
end)
{versions, oldest_id} =
versions
|> Enum.map_reduce(nil, fn version, id ->
{%{version | id: id}, version.id}
end)
versions ++ [struct(@version_schema, %{
id: oldest_id,
}) |> Map.put(:original, empty_map_to_nil(oldest_struct))]
else
versions
end
end
@drop_fields [:__meta__, :__struct__]
def revert(module, _adapter, version, opts) do
import Ecto.Query
# get the history of the entity after this version
query = from v in @version_schema,
where: v.entity_id == ^version.entity_id,
where: v.entity_schema == ^version.entity_schema,
where: v.recorded_at >= ^version.recorded_at,
order_by: [desc: :recorded_at]
versions = module.all(query)
# get the referenced struct as it exists now
struct = module.one(from s in version.entity_schema, where: s.id == ^version.entity_id)
result = Enum.reduce(versions, struct, &_revert/2)
result = empty_map_to_nil(result)
schema = version.entity_schema
drop_from_params = @drop_fields ++ schema.__schema__(:associations)
{action, changeset} = case {struct, result} do
{nil, %{}} -> {:insert, schema.changeset(struct(schema, %{}), Map.drop(result, drop_from_params))}
{%{}, nil} -> {:delete, struct}
{nil, nil} -> {nil, nil}
_ ->
struct = case Keyword.get(opts, :preload) do
nil -> struct
[] -> struct
preloads when is_list(preloads) -> module.preload(struct, preloads)
end
{:update, schema.changeset(struct, Map.drop(result, drop_from_params))}
end
opts = Keyword.update(opts, :ex_audit_custom, [rollback: true], fn custom -> [{:rollback, true} | custom] end)
if action do
res = apply(module, action, [changeset, opts])
case action do
:delete -> {:ok, nil}
_ -> res
end
else
Logger.warn(["Can't revert ", inspect(version), " because the entity would still be deleted"])
{:ok, nil}
end
end
defp empty_map_to_nil(map) do
if map |> Map.keys() |> length() == 0 do
nil
else
map
end
end
defp _revert(version, struct) do
apply_change(reverse_action(version.action), ExAudit.Diff.reverse(version.patch), struct)
end
defp apply_change(:updated, patch, to) do
ExAudit.Patch.patch(to, patch)
end
defp apply_change(:deleted, _patch, _to) do
%{}
end
defp apply_change(:created, patch, _to) do
ExAudit.Patch.patch(%{}, patch)
end
defp reverse_action(:updated), do: :updated
defp reverse_action(:created), do: :deleted
defp reverse_action(:deleted), do: :created
end | 29.269504 | 114 | 0.62709 |
086be317b7382ebeb459fc3281cb3b6e690e8217 | 1,084 | ex | Elixir | lib/bootstrap_form/email_input.ex | feliperenan/bootstrap_form | a12d0665973687bfefeee499f8581398a25ccf75 | [
"MIT"
] | 6 | 2019-02-07T00:37:24.000Z | 2021-05-29T23:37:32.000Z | lib/bootstrap_form/email_input.ex | feliperenan/bootstrap_form | a12d0665973687bfefeee499f8581398a25ccf75 | [
"MIT"
] | 7 | 2019-02-07T00:19:39.000Z | 2019-11-04T17:01:50.000Z | lib/bootstrap_form/email_input.ex | feliperenan/bootstrap_form | a12d0665973687bfefeee499f8581398a25ccf75 | [
"MIT"
] | 3 | 2019-10-30T13:49:44.000Z | 2021-09-26T23:45:02.000Z | defmodule BootstrapForm.EmailInput do
@moduledoc false
import Phoenix.HTML.Form, only: [email_input: 3, label: 3]
alias BootstrapForm.{Input, InputBuilder, Wrapper}
@label_class "control-label"
@default_classes [input_class: "form-control", wrapper_class: "form-group"]
@behaviour InputBuilder
@doc """
Generate a bootstrap text input according to the given options.
## Custom options
wrapper_html: HTML attributes that will be used in the wrapper.
## Examples
build(:user, :email, class: "my-custom-class")
# => <div class="form-group">
<label class="control-label" for="user_email">Email</label>
<input class="form-control my-custom-class" id="user_email" name="user[email]" type="text">
</div>
"""
@impl true
def build(form, field_name, options \\ []) do
input = Input.new(form, field_name, options, @default_classes)
Wrapper.build_tag input do
[
label(form, field_name, class: @label_class),
email_input(form, field_name, input.options)
]
end
end
end
| 27.1 | 104 | 0.665129 |
086bebca0a04c7e27e7738d59fb254690e04d177 | 544 | ex | Elixir | lib/quiz_site/page/card.ex | rbgraham/quiz_site | 57ded7991e355e2cf9f5edecc823b67833ee0ceb | [
"MIT"
] | 2 | 2017-10-30T00:42:01.000Z | 2017-10-30T21:29:15.000Z | lib/quiz_site/page/card.ex | rbgraham/quiz_site | 57ded7991e355e2cf9f5edecc823b67833ee0ceb | [
"MIT"
] | null | null | null | lib/quiz_site/page/card.ex | rbgraham/quiz_site | 57ded7991e355e2cf9f5edecc823b67833ee0ceb | [
"MIT"
] | null | null | null | defmodule QuizSite.Page.Card do
use Ecto.Schema
import Ecto.Changeset
alias QuizSite.Page.Card
schema "cards" do
field :navigation, :string
field :title, :string
field :sequence, :integer
field :site, :string
has_many :questions, QuizSite.Cards.Question
has_many :sections, QuizSite.Cards.Section
timestamps()
end
@doc false
def changeset(%Card{} = card, attrs) do
card
|> cast(attrs, [:navigation, :title, :sequence, :site])
|> validate_required([:title, :sequence, :site])
end
end
| 20.923077 | 59 | 0.674632 |
086bec0d185af35477663158b7f776b89e9d3ea0 | 2,757 | ex | Elixir | clients/app_engine/lib/google_api/app_engine/v1/model/firewall_rule.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/app_engine/lib/google_api/app_engine/v1/model/firewall_rule.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/app_engine/lib/google_api/app_engine/v1/model/firewall_rule.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AppEngine.V1.Model.FirewallRule do
@moduledoc """
A single firewall rule that is evaluated against incoming traffic and provides an action to take on matched requests.
## Attributes
* `action` (*type:* `String.t`, *default:* `nil`) - The action to take on matched requests.
* `description` (*type:* `String.t`, *default:* `nil`) - An optional string description of this rule. This field has a maximum length of 100 characters.
* `priority` (*type:* `integer()`, *default:* `nil`) - A positive integer between 1, Int32.MaxValue-1 that defines the order of rule evaluation. Rules with the lowest priority are evaluated first.A default rule at priority Int32.MaxValue matches all IPv4 and IPv6 traffic when no previous rule matches. Only the action of this rule can be modified by the user.
* `sourceRange` (*type:* `String.t`, *default:* `nil`) - IP address or range, defined using CIDR notation, of requests that this rule applies to. You can use the wildcard character "*" to match all IPs equivalent to "0/0" and "::/0" together. Examples: 192.168.1.1 or 192.168.0.0/16 or 2001:db8::/32 or 2001:0db8:0000:0042:0000:8a2e:0370:7334. Truncation will be silently performed on addresses which are not properly truncated. For example, 1.2.3.4/24 is accepted as the same address as 1.2.3.0/24. Similarly, for IPv6, 2001:db8::1/32 is accepted as the same address as 2001:db8::/32.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:action => String.t(),
:description => String.t(),
:priority => integer(),
:sourceRange => String.t()
}
field(:action)
field(:description)
field(:priority)
field(:sourceRange)
end
defimpl Poison.Decoder, for: GoogleApi.AppEngine.V1.Model.FirewallRule do
def decode(value, options) do
GoogleApi.AppEngine.V1.Model.FirewallRule.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AppEngine.V1.Model.FirewallRule do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 49.232143 | 589 | 0.725426 |
086c041722b7fffd20c3d7f47892345b42ffe673 | 1,292 | exs | Elixir | mix.exs | zacck/plaid-elixir | 5467e5fcd5bd16f0d7a738da22e17c9be4e447b3 | [
"MIT"
] | null | null | null | mix.exs | zacck/plaid-elixir | 5467e5fcd5bd16f0d7a738da22e17c9be4e447b3 | [
"MIT"
] | null | null | null | mix.exs | zacck/plaid-elixir | 5467e5fcd5bd16f0d7a738da22e17c9be4e447b3 | [
"MIT"
] | null | null | null | defmodule Plaid.Mixfile do
use Mix.Project
@description """
An Elixir Library for Plaid's V2 API
"""
def project do
[app: :plaid,
version: "1.1.3",
description: @description,
elixir: "~> 1.5",
elixirc_paths: elixirc_paths(Mix.env),
package: package(),
deps: deps(),
docs: docs(),
source_url: "https://github.com/wfgilman/plaid-elixir",
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [coveralls: :test, "coveralls.detail": :test]
]
end
def application do
[extra_applications: [:logger]]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp deps do
[
{:httpoison, "~> 0.13"},
{:poison, "~> 3.0"},
{:bypass, "~> 0.8", only: [:test]},
{:credo, "~> 0.5", only: [:dev], runtime: false},
{:excoveralls, "~> 0.6", only: [:test]},
{:ex_doc, "~> 0.19", only: [:dev], runtime: false}
]
end
defp package do
[
name: :plaid_elixir,
files: ["lib", "mix.exs", "README*", "LICENSE*"],
licenses: ["MIT"],
maintainers: ["Will Gilman"],
links: %{"Github" => "https://github.com/wfgilman/plaid-elixir"}
]
end
defp docs do
[
extras: ["parameters.md"]
]
end
end
| 22.666667 | 70 | 0.554954 |
086c09dd563ad9ff2ddf02cbf01fd299318b4fc9 | 1,256 | ex | Elixir | lib/ergo/telemetry_server.ex | mmower/ergo | f0cfa8debd6697c56509e7856578dc49666f3ff2 | [
"MIT"
] | 5 | 2021-07-11T13:01:56.000Z | 2021-12-29T17:02:00.000Z | lib/ergo/telemetry_server.ex | mmower/ergo | f0cfa8debd6697c56509e7856578dc49666f3ff2 | [
"MIT"
] | null | null | null | lib/ergo/telemetry_server.ex | mmower/ergo | f0cfa8debd6697c56509e7856578dc49666f3ff2 | [
"MIT"
] | null | null | null | defmodule Ergo.TelemetryServer do
use GenServer
@name :ergo_telemetry_server
defmodule State do
defstruct runs: %{}
end
def init(args) do
{:ok, args}
end
def get_events(id) do
GenServer.call(@name, {:get_events, id})
end
def reset() do
GenServer.call(@name, :reset)
end
def start_link(_) do
events = [
[:ergo, :enter],
[:ergo, :leave],
[:ergo, :match],
[:ergo, :error],
[:ergo, :event]
]
:telemetry.attach_many(
to_string(@name),
events,
&Ergo.TelemetryServer.handle_event/4,
nil
)
GenServer.start(__MODULE__, %State{}, name: @name)
end
def handle_event(evt, _measurements, metadata, _config) do
GenServer.cast(@name, {:log, evt, metadata})
end
def handle_cast({:log, _evt, %{id: id} = metadata}, %State{runs: runs} = state) do
events =
Map.get(runs, id, [])
|> List.insert_at(0, metadata)
{:noreply, %{state | runs: Map.put(runs, id, events)}}
end
def handle_cast(:reset, _state) do
{:noreply, %State{}}
end
def handle_call({:get_events, id}, _from, %State{runs: runs} = state) do
events =
Map.get(runs, id, [])
|> Enum.reverse()
{:reply, events, state}
end
end
| 19.030303 | 84 | 0.590764 |
086c4ac79a6980f791b6ef4a54f6b8dd5ddab72c | 1,612 | exs | Elixir | examples/chuck_ex/test/chuck_norris_api_test.exs | igor-i/bookish_spork | bcefdc6d911044d6438a1efa74d69e9d0921db4f | [
"Apache-2.0"
] | null | null | null | examples/chuck_ex/test/chuck_norris_api_test.exs | igor-i/bookish_spork | bcefdc6d911044d6438a1efa74d69e9d0921db4f | [
"Apache-2.0"
] | null | null | null | examples/chuck_ex/test/chuck_norris_api_test.exs | igor-i/bookish_spork | bcefdc6d911044d6438a1efa74d69e9d0921db4f | [
"Apache-2.0"
] | null | null | null | defmodule ChuckNorrisApiTest do
use ExUnit.Case
doctest ChuckNorrisApi
setup_all do
{:ok, _} = :bookish_spork.start_server
{:ok, %{}}
end
test "retrieves random joke" do
:bookish_spork.stub_request([200, %{}, "{
\"value\": \"Chuck norris tried to crank that soulja boy but it wouldn't crank up\"
}"])
assert ChuckNorrisApi.random == "Chuck norris tried to crank that soulja boy but it wouldn't crank up"
{:ok, request} = :bookish_spork.capture_request
assert request.uri == '/jokes/random'
end
test "retrieves a random joke from a particular category" do
:bookish_spork.stub_request([200, %{}, "{
\"value\": \"Chuck Norris doesn't go on the internet, he has every internet site stored in his memory. He refreshes webpages by blinking.\"
}"])
assert ChuckNorrisApi.random("dev") == "Chuck Norris doesn't go on the internet, he has every internet site stored in his memory. He refreshes webpages by blinking."
{:ok, request} = :bookish_spork.capture_request
assert request.uri == '/jokes/random?category=dev'
end
test "retrieves two random jokes in different processes" do
:bookish_spork.stub_request([200, %{}, "{\"value\": \"First joke.\"}"])
:bookish_spork.stub_request([200, %{}, "{\"value\": \"Second joke.\"}"])
Task.start(fn -> ChuckNorrisApi.random() end)
Task.start(fn -> ChuckNorrisApi.random() end)
Process.sleep(500)
{:ok, _request} = :bookish_spork.capture_request()
{:ok, _request} = :bookish_spork.capture_request()
{:error, _request} = :bookish_spork.capture_request()
end
end
| 36.636364 | 169 | 0.680521 |
086c8820c96aa713eb243b5e6508c1986e12119c | 1,555 | ex | Elixir | assistant/lib/assistant_web/views/error_helpers.ex | dainst/bibliography-assistant | 6495055bf852964d416a02d92729406a6a1a33e9 | [
"Apache-2.0"
] | null | null | null | assistant/lib/assistant_web/views/error_helpers.ex | dainst/bibliography-assistant | 6495055bf852964d416a02d92729406a6a1a33e9 | [
"Apache-2.0"
] | null | null | null | assistant/lib/assistant_web/views/error_helpers.ex | dainst/bibliography-assistant | 6495055bf852964d416a02d92729406a6a1a33e9 | [
"Apache-2.0"
] | null | null | null | defmodule AssistantWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error),
class: "invalid-feedback",
phx_feedback_for: input_name(form, field)
)
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(AssistantWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(AssistantWeb.Gettext, "errors", msg, opts)
end
end
end
| 32.395833 | 78 | 0.666881 |
086cae28ff9a73dc9959e8afb715155fa2f2ba67 | 9,989 | ex | Elixir | lib/aws/code_build.ex | hosh/aws-elixir | 3ef18a94ed3b628b09ba1aaf297161f16c48867e | [
"Apache-2.0"
] | 2 | 2019-11-17T02:31:18.000Z | 2019-11-20T22:00:29.000Z | lib/aws/code_build.ex | hosh/aws-elixir | 3ef18a94ed3b628b09ba1aaf297161f16c48867e | [
"Apache-2.0"
] | null | null | null | lib/aws/code_build.ex | hosh/aws-elixir | 3ef18a94ed3b628b09ba1aaf297161f16c48867e | [
"Apache-2.0"
] | 1 | 2019-11-16T18:08:18.000Z | 2019-11-16T18:08:18.000Z | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/jkakar/aws-codegen for more details.
defmodule AWS.CodeBuild do
@moduledoc """
AWS CodeBuild
AWS CodeBuild is a fully managed build service in the cloud. AWS CodeBuild
compiles your source code, runs unit tests, and produces artifacts that are
ready to deploy. AWS CodeBuild eliminates the need to provision, manage,
and scale your own build servers. It provides prepackaged build
environments for the most popular programming languages and build tools,
such as Apache Maven, Gradle, and more. You can also fully customize build
environments in AWS CodeBuild to use your own build tools. AWS CodeBuild
scales automatically to meet peak build requests. You pay only for the
build time you consume. For more information about AWS CodeBuild, see the
*AWS CodeBuild User Guide*.
AWS CodeBuild supports these operations:
<ul> <li> `BatchDeleteBuilds`: Deletes one or more builds.
</li> <li> `BatchGetProjects`: Gets information about one or more build
projects. A *build project* defines how AWS CodeBuild runs a build. This
includes information such as where to get the source code to build, the
build environment to use, the build commands to run, and where to store the
build output. A *build environment* is a representation of operating
system, programming language runtime, and tools that AWS CodeBuild uses to
run a build. You can add tags to build projects to help manage your
resources and costs.
</li> <li> `CreateProject`: Creates a build project.
</li> <li> `CreateWebhook`: For an existing AWS CodeBuild build project
that has its source code stored in a GitHub or Bitbucket repository,
enables AWS CodeBuild to start rebuilding the source code every time a code
change is pushed to the repository.
</li> <li> `UpdateWebhook`: Changes the settings of an existing webhook.
</li> <li> `DeleteProject`: Deletes a build project.
</li> <li> `DeleteWebhook`: For an existing AWS CodeBuild build project
that has its source code stored in a GitHub or Bitbucket repository, stops
AWS CodeBuild from rebuilding the source code every time a code change is
pushed to the repository.
</li> <li> `ListProjects`: Gets a list of build project names, with each
build project name representing a single build project.
</li> <li> `UpdateProject`: Changes the settings of an existing build
project.
</li> <li> `BatchGetBuilds`: Gets information about one or more builds.
</li> <li> `ListBuilds`: Gets a list of build IDs, with each build ID
representing a single build.
</li> <li> `ListBuildsForProject`: Gets a list of build IDs for the
specified build project, with each build ID representing a single build.
</li> <li> `StartBuild`: Starts running a build.
</li> <li> `StopBuild`: Attempts to stop running a build.
</li> <li> `ListCuratedEnvironmentImages`: Gets information about Docker
images that are managed by AWS CodeBuild.
</li> <li> `DeleteSourceCredentials`: Deletes a set of GitHub, GitHub
Enterprise, or Bitbucket source credentials.
</li> <li> `ImportSourceCredentials`: Imports the source repository
credentials for an AWS CodeBuild project that has its source code stored in
a GitHub, GitHub Enterprise, or Bitbucket repository.
</li> <li> `ListSourceCredentials`: Returns a list of
`SourceCredentialsInfo` objects. Each `SourceCredentialsInfo` object
includes the authentication type, token ARN, and type of source provider
for one set of credentials.
</li> </ul>
"""
@doc """
Deletes one or more builds.
"""
def batch_delete_builds(client, input, options \\ []) do
request(client, "BatchDeleteBuilds", input, options)
end
@doc """
Gets information about builds.
"""
def batch_get_builds(client, input, options \\ []) do
request(client, "BatchGetBuilds", input, options)
end
@doc """
Gets information about build projects.
"""
def batch_get_projects(client, input, options \\ []) do
request(client, "BatchGetProjects", input, options)
end
@doc """
Creates a build project.
"""
def create_project(client, input, options \\ []) do
request(client, "CreateProject", input, options)
end
@doc """
For an existing AWS CodeBuild build project that has its source code stored
in a GitHub or Bitbucket repository, enables AWS CodeBuild to start
rebuilding the source code every time a code change is pushed to the
repository.
<important> If you enable webhooks for an AWS CodeBuild project, and the
project is used as a build step in AWS CodePipeline, then two identical
builds are created for each commit. One build is triggered through
webhooks, and one through AWS CodePipeline. Because billing is on a
per-build basis, you are billed for both builds. Therefore, if you are
using AWS CodePipeline, we recommend that you disable webhooks in AWS
CodeBuild. In the AWS CodeBuild console, clear the Webhook box. For more
information, see step 5 in [Change a Build Project's
Settings](https://docs.aws.amazon.com/codebuild/latest/userguide/change-project.html#change-project-console).
</important>
"""
def create_webhook(client, input, options \\ []) do
request(client, "CreateWebhook", input, options)
end
@doc """
Deletes a build project.
"""
def delete_project(client, input, options \\ []) do
request(client, "DeleteProject", input, options)
end
@doc """
Deletes a set of GitHub, GitHub Enterprise, or Bitbucket source
credentials.
"""
def delete_source_credentials(client, input, options \\ []) do
request(client, "DeleteSourceCredentials", input, options)
end
@doc """
For an existing AWS CodeBuild build project that has its source code stored
in a GitHub or Bitbucket repository, stops AWS CodeBuild from rebuilding
the source code every time a code change is pushed to the repository.
"""
def delete_webhook(client, input, options \\ []) do
request(client, "DeleteWebhook", input, options)
end
@doc """
Imports the source repository credentials for an AWS CodeBuild project that
has its source code stored in a GitHub, GitHub Enterprise, or Bitbucket
repository.
"""
def import_source_credentials(client, input, options \\ []) do
request(client, "ImportSourceCredentials", input, options)
end
@doc """
Resets the cache for a project.
"""
def invalidate_project_cache(client, input, options \\ []) do
request(client, "InvalidateProjectCache", input, options)
end
@doc """
Gets a list of build IDs, with each build ID representing a single build.
"""
def list_builds(client, input, options \\ []) do
request(client, "ListBuilds", input, options)
end
@doc """
Gets a list of build IDs for the specified build project, with each build
ID representing a single build.
"""
def list_builds_for_project(client, input, options \\ []) do
request(client, "ListBuildsForProject", input, options)
end
@doc """
Gets information about Docker images that are managed by AWS CodeBuild.
"""
def list_curated_environment_images(client, input, options \\ []) do
request(client, "ListCuratedEnvironmentImages", input, options)
end
@doc """
Gets a list of build project names, with each build project name
representing a single build project.
"""
def list_projects(client, input, options \\ []) do
request(client, "ListProjects", input, options)
end
@doc """
Returns a list of `SourceCredentialsInfo` objects.
"""
def list_source_credentials(client, input, options \\ []) do
request(client, "ListSourceCredentials", input, options)
end
@doc """
Starts running a build.
"""
def start_build(client, input, options \\ []) do
request(client, "StartBuild", input, options)
end
@doc """
Attempts to stop running a build.
"""
def stop_build(client, input, options \\ []) do
request(client, "StopBuild", input, options)
end
@doc """
Changes the settings of a build project.
"""
def update_project(client, input, options \\ []) do
request(client, "UpdateProject", input, options)
end
@doc """
Updates the webhook associated with an AWS CodeBuild build project.
<note> If you use Bitbucket for your repository, `rotateSecret` is ignored.
</note>
"""
def update_webhook(client, input, options \\ []) do
request(client, "UpdateWebhook", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "codebuild"}
host = get_host("codebuild", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "CodeBuild_20161006.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
| 35.172535 | 111 | 0.704675 |
086cdee929a7893fa0a14cf41192a16608566394 | 412 | exs | Elixir | test/ex_algo/sort/distribution_test.exs | code-shoily/ex_algo | 7837c222fd2844a151b6b92038f94ea088bec0a2 | [
"MIT"
] | 21 | 2021-11-21T08:07:38.000Z | 2022-03-13T06:19:35.000Z | test/ex_algo/sort/distribution_test.exs | code-shoily/ex_algo | 7837c222fd2844a151b6b92038f94ea088bec0a2 | [
"MIT"
] | 3 | 2021-11-26T22:54:09.000Z | 2022-03-06T21:16:12.000Z | test/ex_algo/sort/distribution_test.exs | code-shoily/ex_algo | 7837c222fd2844a151b6b92038f94ea088bec0a2 | [
"MIT"
] | null | null | null | defmodule ExAlgo.Sort.DistributionTest do
use ExUnit.Case
use ExUnitProperties
@moduletag :distribution_sort
alias ExAlgo.Sort.Distribution
doctest ExAlgo.Sort.Distribution
describe "pigeonhole_sort/1" do
property "pigeonhole_sort sorts properly" do
check all list <- list_of(integer()) do
assert Distribution.pigeonhole_sort(list) == Enum.sort(list)
end
end
end
end
| 22.888889 | 68 | 0.737864 |
086d2906e35e4ab1380cb5b71ad02d2d1a802229 | 28,405 | ex | Elixir | apps/core/lib/core/declaration_requests/api/v1/creator.ex | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 8 | 2019-06-14T11:34:49.000Z | 2021-08-05T19:14:24.000Z | apps/core/lib/core/declaration_requests/api/v1/creator.ex | edenlabllc/ehealth.api.public | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 1 | 2019-07-08T15:20:22.000Z | 2019-07-08T15:20:22.000Z | apps/core/lib/core/declaration_requests/api/v1/creator.ex | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 6 | 2018-05-11T13:59:32.000Z | 2022-01-19T20:15:22.000Z | defmodule Core.DeclarationRequests.API.V1.Creator do
@moduledoc false
use Confex, otp_app: :core
use Timex
import Ecto.Changeset
import Ecto.Query
import Core.Utils.TypesConverter, only: [string_to_integer: 1]
alias Core.DeclarationRequests
alias Core.DeclarationRequests.API.Documents
alias Core.DeclarationRequests.API.V1.MpiSearch
alias Core.DeclarationRequests.DeclarationRequest
alias Core.DeclarationRequests.Urgent
alias Core.Employees.Employee
alias Core.GlobalParameters
alias Core.Man.Templates.DeclarationRequestPrintoutForm
alias Core.PartyUsers
alias Core.Persons.V1.Validator, as: PersonsValidator
alias Core.Repo
alias Core.Utils.NumberGenerator
alias Core.ValidationError
alias Core.Validators.BirthDate
alias Core.Validators.Error
alias Core.Validators.TaxID
alias Ecto.Adapters.SQL
alias Ecto.Changeset
alias Ecto.UUID
require Logger
@rpc_worker Application.get_env(:core, :rpc_worker)
@declaration_request_creator Application.get_env(:core, :api_resolvers)[:declaration_request_creator]
@auth_na DeclarationRequest.authentication_method(:na)
@auth_otp DeclarationRequest.authentication_method(:otp)
@auth_offline DeclarationRequest.authentication_method(:offline)
@mithril_api Application.get_env(:core, :api_resolvers)[:mithril]
@channel_cabinet DeclarationRequest.channel(:cabinet)
@status_new DeclarationRequest.status(:new)
@status_approved DeclarationRequest.status(:approved)
@pediatrician "PEDIATRICIAN"
@therapist "THERAPIST"
@family_doctor "FAMILY_DOCTOR"
@allowed_employee_specialities [@pediatrician, @therapist, @family_doctor]
@mithril_api Application.get_env(:core, :api_resolvers)[:mithril]
@read_repo Application.get_env(:core, :repos)[:read_repo]
def create(params, user_id, person, employee, division, legal_entity) do
global_parameters = GlobalParameters.get_values()
auxiliary_entities = %{
employee: employee,
global_parameters: global_parameters,
division: division,
legal_entity: legal_entity,
person_id: person["id"]
}
pending_declaration_requests = pending_declaration_requests(person, employee.id, legal_entity.id)
Repo.transaction(fn ->
cancel_declaration_requests(user_id, pending_declaration_requests)
with {:ok, declaration_request} <- insert_declaration_request(params, user_id, auxiliary_entities),
{:ok, declaration_request} <- finalize(declaration_request),
{:ok, urgent_data} <- prepare_urgent_data(declaration_request) do
%{urgent_data: urgent_data, finalize: declaration_request}
else
{:error, reason} -> Repo.rollback(reason)
end
end)
end
def cancel_declaration_requests(user_id, pending_declaration_requests) do
previous_request_ids =
pending_declaration_requests
|> @read_repo.all()
|> Enum.map(&Map.get(&1, :id))
DeclarationRequest
|> where([dr], dr.id in ^previous_request_ids)
|> Repo.update_all(
set: [
status: DeclarationRequest.status(:cancelled),
updated_at: DateTime.utc_now(),
updated_by: user_id
]
)
end
defp insert_declaration_request(params, user_id, auxiliary_entities) do
params
|> changeset(user_id, auxiliary_entities)
|> determine_auth_method_for_mpi(params["channel"], auxiliary_entities)
|> generate_printout_form(auxiliary_entities[:employee])
|> do_insert_declaration_request()
end
def do_insert_declaration_request(changeset) do
case Repo.insert(changeset) do
{:ok, declaration_request} ->
{:ok, declaration_request}
{:error, reason} ->
{:error, reason}
end
end
def validate_employee_speciality(%Employee{additional_info: additional_info}) do
specialities = Map.get(additional_info, "specialities", [])
if Enum.any?(specialities, fn s -> Map.get(s, "speciality") in @allowed_employee_specialities end) do
:ok
else
alllowed_types = Enum.join(@allowed_employee_specialities, ", ")
Error.dump(%ValidationError{
description: "Employee's speciality does not belong to a doctor: #{alllowed_types}",
params: [allowed_types: alllowed_types],
rule: "speciality_inclusion",
path: "$.data"
})
end
end
defp prepare_auth_method_current(%{type: @auth_offline}), do: %{"type" => @auth_offline}
defp prepare_auth_method_current(_), do: %{"type" => @auth_na}
defp prepare_auth_method_current(@auth_otp, %{phone_number: phone_number}, _) do
%{
"type" => @auth_otp,
"number" => phone_number
}
end
defp prepare_auth_method_current(@auth_na, _, req_auth_method) do
auth_method = Map.take(req_auth_method, ["type"])
case Map.has_key?(req_auth_method, "phone_number") do
true -> Map.put(auth_method, "number", req_auth_method["phone_number"])
_ -> auth_method
end
end
defp prepare_auth_method_current(type, _authentication_method, _), do: %{"type" => type}
defp fetch_users(result) do
{:ok, Enum.map(result, &Map.get(&1, :user_id))}
end
defp get_role_id(name) do
with {:ok, results} <- @mithril_api.get_roles_by_name(name, []) do
roles = Map.get(results, "data")
case length(roles) do
1 -> {:ok, roles |> List.first() |> Map.get("id")}
_ -> {:error, "Role #{name} does not exist"}
end
end
end
defp filter_users_by_role(role_id, users) do
user_roles_results = Enum.map(users, &@mithril_api.get_user_roles(&1, %{}, []))
error = Enum.find(user_roles_results, fn {k, _} -> k == :error end)
case error do
nil -> {:ok, Enum.filter(user_roles_results, fn {:ok, result} -> check_role(result, role_id) end)}
err -> err
end
end
defp get_user_id(user_roles) when length(user_roles) > 0 do
{:ok, user_role} = List.last(user_roles)
user_id =
user_role
|> Map.get("data")
|> List.first()
|> Map.get("user_id")
{:ok, user_id}
end
defp get_user_id(_), do: {:error, "Current user is not a doctor"}
defp check_role(user, role_id) do
Enum.any?(Map.get(user, "data"), fn user_role -> Map.get(user_role, "role_id") == role_id end)
end
defp get_user_email(user_id) do
with {:ok, user} <- @mithril_api.get_user_by_id(user_id, []), do: {:ok, get_in(user, ["data", "email"])}
end
defp get_party_email(party_id) do
with result <- PartyUsers.list!(%{party_id: party_id}),
{:ok, users} <- fetch_users(result),
{:ok, role_id} <- get_role_id("DOCTOR"),
{:ok, user_roles} <- filter_users_by_role(role_id, users),
{:ok, user_id} <- get_user_id(user_roles),
do: get_user_email(user_id)
end
defp put_in_data(changeset, keys, value) do
new_data =
changeset
|> get_field(:data)
|> put_in(keys, value)
put_change(changeset, :data, new_data)
end
defp format_error_response(microservice, result) do
"Error during #{microservice} interaction. Result from #{microservice}: #{inspect(result)}"
end
def finalize(%DeclarationRequest{data: %{"person" => person}} = declaration_request) do
authorization = declaration_request.authentication_method_current
no_tax_id = person["no_tax_id"]
do_finalize(declaration_request, authorization, no_tax_id)
end
defp do_finalize(declaration_request, %{"type" => @auth_na}, true),
do: generate_links(declaration_request, ["PUT"], true)
defp do_finalize(declaration_request, %{"type" => @auth_na}, _), do: {:ok, declaration_request}
defp do_finalize(declaration_request, %{"type" => @auth_otp, "number" => auth_number}, true) do
case @rpc_worker.run("otp_verification_api", OtpVerification.Rpc, :initialize, [auth_number]) do
{:ok, _} ->
generate_links(declaration_request, ["PUT"], true)
{:error, error} ->
{:error, error}
end
end
defp do_finalize(declaration_request, %{"type" => @auth_otp, "number" => auth_number}, _) do
case @rpc_worker.run("otp_verification_api", OtpVerification.Rpc, :initialize, [auth_number]) do
{:ok, _} ->
{:ok, declaration_request}
{:error, error} ->
{:error, error}
end
end
defp do_finalize(declaration_request, %{"type" => @auth_offline}, _),
do: generate_links(declaration_request, ["PUT"], false)
defp generate_links(declaration_request, http_verbs, no_tax_id_only) do
case Documents.generate_links(declaration_request, http_verbs, no_tax_id_only) do
{:ok, documents} ->
update_documents(declaration_request, documents)
{:error, _} = bad_result ->
bad_result
end
end
defp update_documents(%DeclarationRequest{} = declaration_request, documents) do
declaration_request
|> DeclarationRequests.changeset(%{documents: documents})
|> Repo.update()
end
def prepare_urgent_data(declaration_request) do
filtered_authentication_method_current =
Urgent.filter_authentication_method(declaration_request.authentication_method_current)
filter_document_links = fn documents ->
filter_fun = fn document -> document["verb"] == "PUT" end
map_fun = fn document -> Map.drop(document, ["verb"]) end
documents
|> Enum.filter(filter_fun)
|> Enum.map(map_fun)
end
urgent_data =
if declaration_request.documents do
%{
authentication_method_current: filtered_authentication_method_current,
documents: filter_document_links.(declaration_request.documents)
}
else
%{
authentication_method_current: filtered_authentication_method_current
}
end
{:ok, urgent_data}
end
def pending_declaration_requests(%{"tax_id" => tax_id}, employee_id, legal_entity_id) when not is_nil(tax_id) do
DeclarationRequest
|> where([p], p.status in [@status_new, @status_approved])
|> where([p], p.data_person_tax_id == ^tax_id)
|> where([p], p.data_employee_id == ^employee_id)
|> where([p], p.data_legal_entity_id == ^legal_entity_id)
end
def pending_declaration_requests(person, employee_id, legal_entity_id) do
first_name = Map.get(person, "first_name")
last_name = Map.get(person, "last_name")
birth_date =
person
|> Map.get("birth_date")
|> case do
value when is_binary(value) -> Date.from_iso8601!(value)
_ -> nil
end
DeclarationRequest
|> where([p], p.status in [@status_new, @status_approved])
|> where([p], p.data_person_first_name == ^first_name)
|> where([p], p.data_person_last_name == ^last_name)
|> where([p], p.data_person_birth_date == ^birth_date)
|> where([p], p.data_employee_id == ^employee_id)
|> where([p], p.data_legal_entity_id == ^legal_entity_id)
end
def changeset(attrs, user_id, auxiliary_entities) do
%{
employee: employee,
global_parameters: global_parameters,
division: division,
legal_entity: legal_entity
} = auxiliary_entities
employee_speciality_officio = employee.speciality["speciality"]
overlimit = Map.get(attrs, "overlimit", false)
channel = attrs["channel"]
attrs = Map.drop(attrs, ~w(person_id employee_id division_id overlimit))
id = UUID.generate()
declaration_id = UUID.generate()
%DeclarationRequest{id: id}
|> cast(%{data: attrs, overlimit: overlimit, channel: channel}, ~w(data overlimit channel)a)
|> validate_legal_entity_employee(legal_entity, employee)
|> validate_legal_entity_division(legal_entity, division)
|> validate_employee_type(employee)
|> validate_patient_birth_date()
|> validate_patient_age(employee_speciality_officio, global_parameters["adult_age"])
|> validate_authentication_method_phone_number()
|> validate_tax_id()
|> validate_person_addresses()
|> validate_confidant_persons_tax_id()
|> validate_confidant_person_rel_type()
|> validate_authentication_methods()
|> put_start_end_dates(employee_speciality_officio, global_parameters)
|> put_in_data(["employee"], prepare_employee_struct(employee))
|> put_in_data(["division"], prepare_division_struct(division))
|> put_in_data(["legal_entity"], prepare_legal_entity_struct(legal_entity))
|> put_in_data(["declaration_id"], declaration_id)
|> put_change(:id, id)
|> put_change(:declaration_id, declaration_id)
|> put_change(:status, @status_new)
|> put_change(:inserted_by, user_id)
|> put_change(:updated_by, user_id)
|> put_declaration_number()
|> unique_constraint(:declaration_number, name: :declaration_requests_declaration_number_index)
|> put_party_email()
|> duplicate_data_fields()
end
defp validate_legal_entity_employee(changeset, legal_entity, employee) do
validate_change(changeset, :data, fn :data, _data ->
case employee.legal_entity_id == legal_entity.id do
true -> []
false -> [data: {"Employee does not belong to legal entity.", validation: "employee_unemployed"}]
end
end)
end
defp validate_legal_entity_division(changeset, legal_entity, division) do
validate_change(changeset, :data, fn :data, _data ->
case division.legal_entity_id == legal_entity.id do
true -> []
false -> [data: "Division does not belong to legal entity."]
end
end)
end
def validate_employee_status(%Employee{status: status}) do
if status == Employee.status(:approved) do
:ok
else
Error.dump(%ValidationError{description: "Invalid employee status", path: "$.employee_id"})
end
end
def validate_employee_type(changeset, employee) do
if Employee.type(:doctor) == employee.employee_type do
changeset
else
add_error(changeset, :"data.person.employee_id", "Employee ID must reference a doctor.")
end
end
def validate_patient_birth_date(changeset) do
validate_change(changeset, :data, fn :data, data ->
data
|> get_in(["person", "birth_date"])
|> BirthDate.validate()
|> case do
true -> []
false -> [data: "Invalid birth date."]
end
end)
end
def validate_patient_age(changeset, speciality, adult_age) do
validate_change(changeset, :data, fn :data, data ->
patient_birth_date =
data
|> get_in(["person", "birth_date"])
|> Date.from_iso8601!()
patient_age = Timex.diff(Timex.now(), patient_birth_date, :years)
case belongs_to(patient_age, adult_age, speciality) do
true -> []
false -> [data: {"Doctor speciality doesn't match patient's age", validation: "invalid_age"}]
end
end)
end
def belongs_to(age, adult_age, @therapist), do: age >= string_to_integer(adult_age)
def belongs_to(age, adult_age, @pediatrician), do: age < string_to_integer(adult_age)
def belongs_to(_age, _adult_age, @family_doctor), do: true
defp validate_authentication_method_phone_number(changeset) do
validate_change(changeset, :data, fn :data, data ->
result =
data
|> get_in(["person", "authentication_methods"])
|> PersonsValidator.validate_authentication_method_phone_number()
case result do
:ok -> []
{:error, message} -> [data: message]
end
end)
end
def validate_tax_id(changeset) do
tax_id =
changeset
|> get_field(:data)
|> get_in(["person", "tax_id"])
if is_nil(tax_id) || TaxID.validate(tax_id, nil) == :ok do
changeset
else
add_error(changeset, :"data.person.tax_id", "Person's tax ID in not valid.")
end
end
def validate_person_addresses(changeset) do
addresses =
changeset
|> get_field(:data)
|> get_in(["person", "addresses"])
with :ok <- assert_address_count(addresses, "RESIDENCE", 1) do
changeset
else
{:error, "RESIDENCE"} ->
add_error(changeset, :"data.person.addresses", "one and only one residence address is required")
end
end
defp assert_address_count(enum, address_type, count) do
if count == Enum.count(enum, fn %{"type" => type} -> type == address_type end) do
:ok
else
{:error, address_type}
end
end
def validate_confidant_persons_tax_id(changeset) do
confidant_persons =
changeset
|> get_field(:data)
|> get_in(["person", "confidant_person"])
if is_list(confidant_persons) && !Enum.empty?(confidant_persons) do
validation = fn {person, index}, changeset ->
tax_id = person["tax_id"]
if is_nil(tax_id) || TaxID.validate(tax_id, nil) == :ok do
changeset
else
add_error(changeset, :"data.person.confidant_person.[#{index}].tax_id", "Person's tax ID in not valid.")
end
end
confidant_persons
|> Enum.with_index()
|> Enum.reduce(changeset, validation)
else
changeset
end
end
def validate_confidant_person_rel_type(changeset) do
confidant_persons =
changeset
|> get_field(:data)
|> get_in(["person", "confidant_person"])
if is_list(confidant_persons) && !Enum.empty?(confidant_persons) do
if 1 == Enum.count(confidant_persons, fn %{"relation_type" => type} -> type == "PRIMARY" end) do
changeset
else
message = "one and only one confidant person with type PRIMARY is required"
add_error(changeset, :"data.person.confidant_persons.[0].relation_type", message)
end
else
changeset
end
end
def validate_authentication_methods(changeset) do
authentication_methods =
changeset
|> get_field(:data)
|> get_in(["person", "authentication_methods"])
if is_list(authentication_methods) && !Enum.empty?(authentication_methods) do
authentication_methods
|> Enum.reduce({0, changeset}, &validate_auth_method/2)
|> elem(1)
else
changeset
end
end
defp validate_auth_method(%{"type" => @auth_otp} = method, {i, changeset}) do
case Map.has_key?(method, "phone_number") do
true ->
{i + 1, changeset}
false ->
message = "required property phone_number was not present"
{i + 1, add_error(changeset, :"data.person.authentication_methods.[#{i}].phone_number", message)}
end
end
defp validate_auth_method(_, {i, changeset}) do
{i + 1, changeset}
end
defp put_start_end_dates(changeset, employee_speciality_officio, global_parameters) do
%{
"declaration_term" => term,
"declaration_term_unit" => unit,
"adult_age" => adult_age
} = global_parameters
adult_age = String.to_integer(adult_age)
term = String.to_integer(term)
normalized_unit =
unit
|> String.downcase()
|> String.to_atom()
data = get_field(changeset, :data)
birth_date = get_in(data, ["person", "birth_date"])
start_date = Date.utc_today()
end_date =
request_end_date(employee_speciality_officio, start_date, [{normalized_unit, term}], birth_date, adult_age)
new_data =
data
|> put_in(["end_date"], end_date)
|> put_in(["start_date"], start_date)
put_change(changeset, :data, new_data)
end
defp prepare_employee_struct(employee) do
%{
"id" => employee.id,
"position" => employee.position,
"party" => %{
"id" => employee.party.id,
"first_name" => employee.party.first_name,
"second_name" => employee.party.second_name,
"last_name" => employee.party.last_name,
"phones" => employee.party.phones,
"tax_id" => employee.party.tax_id
}
}
end
defp prepare_division_struct(division) do
%{
"id" => division.id,
"type" => division.type,
"phones" => division.phones,
"name" => division.name,
"legal_entity_id" => division.legal_entity_id,
"external_id" => division.external_id,
"email" => division.email,
"addresses" => prepare_addresses(division.addresses)
}
end
defp prepare_addresses(addresses) do
Enum.map(addresses, fn address ->
address
|> Jason.encode!()
|> Jason.decode!()
|> Map.drop(~w(id division_id))
end)
end
defp prepare_legal_entity_struct(legal_entity) do
%{
"id" => legal_entity.id,
"name" => legal_entity.name,
"short_name" => legal_entity.short_name,
"phones" => legal_entity.phones,
"legal_form" => legal_entity.legal_form,
"edrpou" => legal_entity.edrpou,
"public_name" => legal_entity.public_name,
"email" => legal_entity.email,
"addresses" => legal_entity.addresses,
"accreditation" => legal_entity.accreditation,
"licenses" => [legal_entity.license]
}
end
defp put_declaration_number(changeset) do
with {:ok, sequence} <- get_sequence_number() do
put_change(changeset, :declaration_number, NumberGenerator.generate_from_sequence(1, sequence))
else
_ ->
add_error(changeset, :sequence, "declaration_request sequence doesn't return a number")
end
end
def put_party_email(%Changeset{valid?: false} = changeset), do: changeset
def put_party_email(changeset) do
party_id =
changeset
|> get_field(:data)
|> get_in(["employee", "party", "id"])
case get_party_email(party_id) do
{:ok, email} ->
put_in_data(changeset, ["employee", "party", "email"], email)
{:error, error} when is_binary(error) ->
add_error(changeset, :email, error)
{:error, error_response} ->
add_error(changeset, :email, format_error_response("microservice", error_response))
end
end
def check_phone_number_auth_limit({:ok, _} = search_result, changeset, auxiliary_entities) do
if config()[:use_phone_number_auth_limit] do
phone_number =
changeset
|> get_field(:data)
|> get_in(["person", "authentication_methods"])
|> Enum.find(fn authentication_method -> Map.has_key?(authentication_method, "phone_number") end)
|> Kernel.||(%{})
|> Map.get("phone_number")
check_search_result(search_result, phone_number, auxiliary_entities)
else
search_result
end
end
def check_phone_number_auth_limit(error, _, _), do: error
defp check_search_result(search_result, nil, _), do: search_result
defp check_search_result({:ok, nil}, phone_number, auxiliary_entities),
do: run_phone_number_auth_limit_check(nil, phone_number, auxiliary_entities)
defp check_search_result({:ok, person}, phone_number, auxiliary_entities) do
new_phone_number? =
person
|> Map.get(:authentication_methods)
|> Enum.filter(fn authentication_method -> Map.get(authentication_method, :phone_number) == phone_number end)
|> Enum.empty?()
if new_phone_number? do
run_phone_number_auth_limit_check(person, phone_number, auxiliary_entities)
else
{:ok, person}
end
end
defp run_phone_number_auth_limit_check(search_params, phone_number, auxiliary_entities) do
phone_number_auth_limit =
auxiliary_entities
|> get_in([:global_parameters, "phone_number_auth_limit"])
|> String.to_integer()
with {:ok, persons} <- mpi_search(%{"auth_phone_number" => phone_number}) do
if Enum.count(persons) < phone_number_auth_limit do
{:ok, search_params}
else
{:error, :authentication_methods,
"This phone number is present more than #{phone_number_auth_limit} times in the system"}
end
end
end
def determine_auth_method_for_mpi(%Changeset{valid?: false} = changeset, _, _), do: changeset
def determine_auth_method_for_mpi(changeset, @channel_cabinet, auxiliary_entities) do
changeset
|> put_change(:authentication_method_current, %{"type" => @auth_na})
|> put_change(:mpi_id, auxiliary_entities[:person_id])
end
def determine_auth_method_for_mpi(changeset, _, auxiliary_entities) do
changeset
|> get_field(:data)
|> get_in(["person"])
|> mpi_search()
|> check_phone_number_auth_limit(changeset, auxiliary_entities)
|> do_determine_auth_method_for_mpi(changeset)
end
def mpi_search(person) do
MpiSearch.search(person)
end
def do_determine_auth_method_for_mpi({:ok, nil}, changeset) do
data = get_field(changeset, :data)
authentication_method = hd(data["person"]["authentication_methods"])
put_change(changeset, :authentication_method_current, prepare_auth_method_current(authentication_method))
end
def do_determine_auth_method_for_mpi({:ok, person}, changeset) do
authentication_method = List.first(person.authentication_methods || [])
authenticated_methods = changeset |> get_field(:data) |> get_in(~w(person authentication_methods)) |> hd
authentication_method_current =
prepare_auth_method_current(
authentication_method.type,
authentication_method,
authenticated_methods
)
changeset
|> put_change(:authentication_method_current, authentication_method_current)
|> put_change(:mpi_id, person.id)
end
def do_determine_auth_method_for_mpi({:error, field, reason}, changeset),
do: add_error(changeset, field, reason)
def do_determine_auth_method_for_mpi({:error, reason}, changeset),
do: add_error(changeset, :authentication_method_current, format_error_response("MPI", reason))
def generate_printout_form(%Changeset{valid?: false} = changeset, _), do: changeset
def generate_printout_form(changeset, employee) do
form_data = get_field(changeset, :data)
employee = Map.put(Map.get(form_data, "employee") || %{}, "speciality", Map.get(employee, :speciality))
form_data = Map.put(form_data, "employee", employee)
declaration_number = get_field(changeset, :declaration_number)
authentication_method_current =
case get_change(changeset, :authentication_method_default) do
%{"type" => @auth_na} = default -> default
_ -> get_change(changeset, :authentication_method_current)
end
case DeclarationRequestPrintoutForm.render(form_data, declaration_number, authentication_method_current) do
{:ok, printout_content} ->
put_change(changeset, :printout_content, printout_content)
{:error, _} ->
add_error(changeset, :printout_content, format_error_response("MAN", "Remote server internal error"))
end
end
def request_end_date(employee_speciality_officio, today, expiration, birth_date, adult_age) do
birth_date = Date.from_iso8601!(birth_date)
normal_expiration_date = Timex.shift(today, expiration)
adjusted_expiration_date = Timex.shift(birth_date, years: adult_age, days: -1)
case {employee_speciality_officio, Timex.diff(today, birth_date, :years) >= adult_age} do
{@pediatrician, false} ->
case Timex.compare(normal_expiration_date, adjusted_expiration_date) do
1 -> adjusted_expiration_date
_ -> normal_expiration_date
end
_ ->
normal_expiration_date
end
end
def sql_get_sequence_number do
SQL.query(Repo, "SELECT nextval('declaration_request');", [])
end
def get_sequence_number do
case @declaration_request_creator.sql_get_sequence_number() do
{:ok, %Postgrex.Result{rows: [[sequence]]}} ->
{:ok, sequence}
_ ->
Logger.error("Can't get declaration_request sequence")
{:error, %{"type" => "internal_error"}}
end
end
defp duplicate_data_fields(changeset) do
data = get_field(changeset, :data)
start_date =
data
|> Map.get("start_date")
|> case do
value when is_binary(value) ->
value
|> Date.from_iso8601!()
|> Map.get(:year)
_ ->
nil
end
birth_date =
data
|> Map.get("birth_date")
|> case do
value when is_binary(value) -> Date.from_iso8601!(value)
_ -> nil
end
changeset
|> put_change(:data_legal_entity_id, get_in(data, ~w(legal_entity id)))
|> put_change(:data_employee_id, get_in(data, ~w(employee id)))
|> put_change(:data_start_date_year, start_date)
|> put_change(:data_person_tax_id, get_in(data, ~w(person tax_id)))
|> put_change(:data_person_first_name, get_in(data, ~w(person first_name)))
|> put_change(:data_person_last_name, get_in(data, ~w(person last_name)))
|> put_change(:data_person_birth_date, birth_date)
end
end
| 32.762399 | 115 | 0.682274 |
086d3e69f66b85395040a78ba5880b6a5deffe87 | 761 | ex | Elixir | lib/builder/car.ex | Fulnir/Elixir_Design_Pattern | 77e77541ac604968bfbfe9ebbd6b51f1c3442c1a | [
"MIT"
] | 3 | 2018-03-06T13:45:42.000Z | 2019-07-22T00:16:54.000Z | lib/builder/car.ex | Fulnir/Elixir_Design_Pattern | 77e77541ac604968bfbfe9ebbd6b51f1c3442c1a | [
"MIT"
] | null | null | null | lib/builder/car.ex | Fulnir/Elixir_Design_Pattern | 77e77541ac604968bfbfe9ebbd6b51f1c3442c1a | [
"MIT"
] | null | null | null | defmodule Car do
@moduledoc """
Copyright © 2018 Edwin Buehler. All rights reserved.
"""
@doc """
A simple struct.
iex> %Car{name: "Blue Ocean", color: :blue, color_doors: :cyan}
%Car{name: "Blue Ocean", color: :blue, color_doors: :cyan}
"""
defstruct name: String,
color: :white,
color_doors: :white
@doc """
Creates a new car without the builder.
"""
def new(name) do
%Car{name: name}
end
@doc """
A car or a transformer? Not builder related.
"""
def is_transformer?(%{__struct__: struct_name}) do
case struct_name do
TransformerCar ->
true
_ ->
false
end
end
end
| 20.567568 | 71 | 0.521682 |
086d4f7bdc3f4f75d4e70ea776d980253e0bf7f3 | 772 | ex | Elixir | test/support/fixtures/accounts_fixtures.ex | jahio/lifelog | a3660e65acb3abdaac388b494736a645d825df1f | [
"MIT"
] | null | null | null | test/support/fixtures/accounts_fixtures.ex | jahio/lifelog | a3660e65acb3abdaac388b494736a645d825df1f | [
"MIT"
] | null | null | null | test/support/fixtures/accounts_fixtures.ex | jahio/lifelog | a3660e65acb3abdaac388b494736a645d825df1f | [
"MIT"
] | null | null | null | defmodule Lifelog.AccountsFixtures do
@moduledoc """
This module defines test helpers for creating
entities via the `Lifelog.Accounts` context.
"""
def unique_user_email, do: "user#{System.unique_integer()}@example.com"
def valid_user_password, do: "hello world!"
def valid_user_attributes(attrs \\ %{}) do
Enum.into(attrs, %{
email: unique_user_email(),
password: valid_user_password()
})
end
def user_fixture(attrs \\ %{}) do
{:ok, user} =
attrs
|> valid_user_attributes()
|> Lifelog.Accounts.register_user()
user
end
def extract_user_token(fun) do
{:ok, captured_email} = fun.(&"[TOKEN]#{&1}[TOKEN]")
[_, token | _] = String.split(captured_email.text_body, "[TOKEN]")
token
end
end
| 24.125 | 73 | 0.65544 |
086d66876f6da83f3192b0670592df1a9eb998d7 | 237 | ex | Elixir | lib/absinthe/blueprint/input/raw_value.ex | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 4,101 | 2016-03-02T03:49:20.000Z | 2022-03-31T05:46:01.000Z | lib/absinthe/blueprint/input/raw_value.ex | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 889 | 2016-03-02T16:06:59.000Z | 2022-03-31T20:24:12.000Z | lib/absinthe/blueprint/input/raw_value.ex | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 564 | 2016-03-02T07:49:59.000Z | 2022-03-06T14:40:59.000Z | defmodule Absinthe.Blueprint.Input.RawValue do
@moduledoc false
alias Absinthe.Blueprint.Input.Object
@enforce_keys [:content]
defstruct [
:content
]
@type t :: %__MODULE__{
content: Object.t()
}
end
| 15.8 | 46 | 0.654008 |
086d75be5276d65a5fcf767f57f8340ca03359b0 | 286 | exs | Elixir | priv/repo/migrations/20161114001005_create_region.exs | rob05c/tox | f54847ca058ad24b909341ad65d595a4069d2471 | [
"Apache-2.0"
] | 2 | 2016-11-16T17:24:21.000Z | 2019-02-15T05:38:27.000Z | priv/repo/migrations/20161114001005_create_region.exs | rob05c/tox | f54847ca058ad24b909341ad65d595a4069d2471 | [
"Apache-2.0"
] | null | null | null | priv/repo/migrations/20161114001005_create_region.exs | rob05c/tox | f54847ca058ad24b909341ad65d595a4069d2471 | [
"Apache-2.0"
] | null | null | null | defmodule Tox.Repo.Migrations.CreateRegion do
use Ecto.Migration
def change do
create table(:regions, primary_key: false) do
add :name, :text, primary_key: true
add :division, references(:divisions, column: :name, type: :text)
timestamps()
end
end
end
| 22 | 71 | 0.685315 |
086d7ca67599db86acb502ac8da51db28efdcd29 | 1,362 | exs | Elixir | test/depot_s3_test.exs | mindreframer/depot_s3 | a984ff8747a21e7aa1c74f5049f4c909d7c6454d | [
"Apache-2.0"
] | null | null | null | test/depot_s3_test.exs | mindreframer/depot_s3 | a984ff8747a21e7aa1c74f5049f4c909d7c6454d | [
"Apache-2.0"
] | null | null | null | test/depot_s3_test.exs | mindreframer/depot_s3 | a984ff8747a21e7aa1c74f5049f4c909d7c6454d | [
"Apache-2.0"
] | null | null | null | defmodule DepotS3Test do
use ExUnit.Case
import DepotS3.AdapterTest
setup do
config = DepotS3.Minio.config()
DepotS3.Minio.clean_bucket("default")
DepotS3.Minio.recreate_bucket("default")
on_exit(fn ->
DepotS3.Minio.clean_bucket("default")
end)
{:ok, config: config, bucket: "default"}
end
adapter_test %{config: config} do
filesystem = DepotS3.configure(config: config, bucket: "default")
{:ok, filesystem: filesystem}
end
describe "cross bucket" do
setup %{config: config} do
config_b = DepotS3.Minio.config()
DepotS3.Minio.clean_bucket("secondary")
DepotS3.Minio.recreate_bucket("secondary")
on_exit(fn ->
DepotS3.Minio.clean_bucket("secondary")
end)
{:ok, config_a: config, config_b: config_b}
end
test "copy", %{config_a: config_a, config_b: config_b} do
filesystem_a = DepotS3.configure(config: config_a, bucket: "default")
filesystem_b = DepotS3.configure(config: config_b, bucket: "secondary")
:ok = Depot.write(filesystem_a, "test.txt", "Hello World")
assert :ok =
Depot.copy_between_filesystem(
{filesystem_a, "test.txt"},
{filesystem_b, "other.txt"}
)
assert {:ok, "Hello World"} = Depot.read(filesystem_b, "other.txt")
end
end
end
| 26.705882 | 77 | 0.639501 |
086d93ceda21e169abf9b7b0fc31065b42327911 | 532 | exs | Elixir | config/runtime.exs | hanam1ni/codewar-web | 0d7c46ac32d85b1d76c604226e0f3d6f2b76b0ad | [
"MIT"
] | null | null | null | config/runtime.exs | hanam1ni/codewar-web | 0d7c46ac32d85b1d76c604226e0f3d6f2b76b0ad | [
"MIT"
] | null | null | null | config/runtime.exs | hanam1ni/codewar-web | 0d7c46ac32d85b1d76c604226e0f3d6f2b76b0ad | [
"MIT"
] | null | null | null | import Config
if config_env() == :prod do
config :codewar, Codewar.Repo,
ssl: true,
url: System.fetch_env!("DATABASE_URL"),
pool_size: String.to_integer(System.get_env("DATABASE_POOL_SIZE") || "10")
config :codewar, CodewarWeb.Endpoint,
http: [
port: String.to_integer(System.get_env("PORT") || "4000")
],
url: [
host: System.fetch_env!("HOST"),
port: String.to_integer(System.fetch_env!("PORT"))
],
secret_key_base: System.fetch_env!("SECRET_KEY_BASE"),
server: true
end
| 26.6 | 78 | 0.654135 |
086d94bd181456e54598c396fb986a281a13b0c4 | 55 | exs | Elixir | .formatter.exs | thbar/ex-portmidi | ad4abde02af60686f9dee8b066b92b5ffb79019b | [
"MIT"
] | 8 | 2021-05-01T19:01:07.000Z | 2021-05-18T12:19:25.000Z | .formatter.exs | thbar/ex-portmidi | ad4abde02af60686f9dee8b066b92b5ffb79019b | [
"MIT"
] | 1 | 2021-10-13T13:55:47.000Z | 2021-10-13T13:55:47.000Z | .formatter.exs | thbar/ex-portmidi | ad4abde02af60686f9dee8b066b92b5ffb79019b | [
"MIT"
] | null | null | null | [
inputs: ["mix.exs", "{lib,test}/**/*.{ex, exs}"]
]
| 13.75 | 50 | 0.436364 |
086dcb19ba3daa489b7894be8b84e40581df3e3f | 1,214 | exs | Elixir | config/config.exs | cschneid/approximate_histogram | ee48b3003a0a1543043ed353da4fa6b7a4ee11ee | [
"MIT"
] | 2 | 2017-03-30T17:00:04.000Z | 2020-03-29T15:54:03.000Z | config/config.exs | cschneid/approximate_histogram | ee48b3003a0a1543043ed353da4fa6b7a4ee11ee | [
"MIT"
] | null | null | null | config/config.exs | cschneid/approximate_histogram | ee48b3003a0a1543043ed353da4fa6b7a4ee11ee | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :approximate_histogram, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:approximate_histogram, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
if Mix.env == :dev do
config :mix_test_watch,
clear: true
end
| 33.722222 | 73 | 0.751236 |
086dd76e43e3317e0fc4997f785df5328f181ade | 2,161 | ex | Elixir | test/support/test_cluster.ex | pggalaviz/exnowflake | efd273a7f9c143e03130dcde960b90428cf460d0 | [
"MIT"
] | 11 | 2020-01-29T03:18:26.000Z | 2021-03-31T22:00:11.000Z | test/support/test_cluster.ex | pggalaviz/exnowflake | efd273a7f9c143e03130dcde960b90428cf460d0 | [
"MIT"
] | null | null | null | test/support/test_cluster.ex | pggalaviz/exnowflake | efd273a7f9c143e03130dcde960b90428cf460d0 | [
"MIT"
] | 1 | 2021-02-12T02:46:24.000Z | 2021-02-12T02:46:24.000Z | defmodule Exnowflake.TestCluster do
@moduledoc """
This module starts 3 slave nodes to run tests in a cluster environment.
Example:
mix test --include cluster
"""
require Logger
@node_names [:"exnow-1", :"exnow-2", :"exnow-3"]
def start do
IO.puts("==> Starting tests in cluster mode...")
# Allow spawned nodes to fetch all code from this node
:erl_boot_server.start([{127, 0, 0, 1}])
results = @node_names
|> Enum.map(fn node ->
_spawn_node(node)
:timer.sleep(50)
end)
IO.puts("==> Warming up...")
Process.sleep(1_000) # Warm up
IO.puts("==> Test cluster running...")
{:ok, results}
end
def start_node(node) when is_atom(node) do
_spawn_node(node)
end
def stop do
Node.list(:connected)
|> Enum.map(&Task.async(fn -> stop_node(&1) end))
|> Enum.map(&Task.await(&1, 10_000))
end
def stop_node(node) do
:ok = :slave.stop(node)
end
# =================
# Private Functions
# =================
defp _spawn_node(node_host) do
{:ok, node} = :slave.start('127.0.0.1', node_host, _slave_args())
_set_up_node(node)
{:ok, node}
end
defp _slave_args do
'-loader inet -hosts 127.0.0.1 -setcookie #{:erlang.get_cookie()} -logger level #{Logger.level()}'
end
defp _rpc(node, module, fun, args) do
:rpc.block_call(node, module, fun, args)
end
defp _set_up_node(node) do
_add_code_paths(node)
_transfer_configuration(node)
_ensure_applications_started(node)
end
defp _add_code_paths(node) do
_rpc(node, :code, :add_paths, [:code.get_path()])
end
defp _transfer_configuration(node) do
for {app_name, _, _} <- Application.loaded_applications() do
for {key, val} <- Application.get_all_env(app_name) do
_rpc(node, Application, :put_env, [app_name, key, val])
end
end
end
defp _ensure_applications_started(node) do
_rpc(node, Application, :ensure_all_started, [:mix])
_rpc(node, Mix, :env, [Mix.env()])
for {app_name, _, _} <- Application.loaded_applications() do
_rpc(node, Application, :ensure_all_started, [app_name])
end
end
end
| 25.127907 | 102 | 0.633966 |
086de1503d4dfd6beffb5835c927f37e04a1a3c2 | 1,513 | exs | Elixir | test/yuri_template_test.exs | sirikid/yuri_template | 66846ed0f5a8407749408a4ebf526f9c27cb9366 | [
"Apache-2.0"
] | 3 | 2020-05-03T15:30:21.000Z | 2021-05-09T01:35:41.000Z | test/yuri_template_test.exs | sirikid/yuri_template | 66846ed0f5a8407749408a4ebf526f9c27cb9366 | [
"Apache-2.0"
] | 1 | 2021-04-15T20:08:14.000Z | 2021-04-18T20:48:17.000Z | test/yuri_template_test.exs | sirikid/yuri_template | 66846ed0f5a8407749408a4ebf526f9c27cb9366 | [
"Apache-2.0"
] | null | null | null | defmodule YuriTemplateTest do
use ExUnit.Case
test "parse/1 #1" do
assert match?({:ok, _}, YuriTemplate.parse("a string"))
end
test "parse/1 #2" do
assert match?({:ok, _}, YuriTemplate.parse("correct {template}"))
end
test "parse/1 #3" do
assert match?({:error, _}, YuriTemplate.parse("incorrect template {"))
end
test "expand/2 #1" do
assert match?({:ok, _}, YuriTemplate.expand("a string", []))
end
test "expand/2 #2" do
assert match?({:ok, _}, YuriTemplate.expand("correct {template}", []))
end
test "expand/2 #3" do
assert match?({:error, _}, YuriTemplate.expand("incorrect template {", []))
end
test "expand!/2 #1" do
assert match?(_, YuriTemplate.expand!("a string", []))
end
test "expand!/2 #2" do
assert match?(_, YuriTemplate.expand!("correct {template}", []))
end
test "expand!/2 #3" do
assert_raise YuriTemplate.ParseError, fn ->
YuriTemplate.expand!("incorrect template {", [])
end
end
test "expand!/2 #4" do
{:ok, template} = YuriTemplate.RFC6570.parse("{.x,y,z}")
assert match?(_, YuriTemplate.expand!(template, []))
end
test "parameters/1 #1" do
{:ok, template} = YuriTemplate.parse("{foo,bar*,baz:10}")
assert [:foo, :bar, :baz] == YuriTemplate.parameters(template)
end
test "parameters/1 #2" do
{:ok, template} = YuriTemplate.parse("http://example.com/people/{first_name}-{last_name}")
assert [:first_name, :last_name] == YuriTemplate.parameters(template)
end
end
| 26.54386 | 94 | 0.634501 |
086e29fda4e2727ad2774ae30692c7f8a026090f | 2,796 | ex | Elixir | lib/chisel/font/bdf/lexer.ex | fstiewitz/chisel | 98d0351e337a83ba21c4bedfd56eb340e0606711 | [
"Apache-2.0"
] | 34 | 2019-12-05T02:15:21.000Z | 2022-03-11T10:07:28.000Z | lib/chisel/font/bdf/lexer.ex | fstiewitz/chisel | 98d0351e337a83ba21c4bedfd56eb340e0606711 | [
"Apache-2.0"
] | 5 | 2019-12-03T04:20:39.000Z | 2021-05-24T23:38:09.000Z | lib/chisel/font/bdf/lexer.ex | fstiewitz/chisel | 98d0351e337a83ba21c4bedfd56eb340e0606711 | [
"Apache-2.0"
] | 2 | 2019-12-22T10:41:00.000Z | 2021-05-24T19:53:54.000Z | defmodule Chisel.Font.BDF.Lexer do
@moduledoc false
defstruct line: nil, col: nil, buffer: [], tokens: [], in_string?: false, in_comment?: false
@token_whitespace [" ", "\t", "\r", "\n"]
@keywords [
"STARTFONT",
"COMMENT",
"FONT",
"SIZE",
"FONTBOUNDINGBOX",
"STARTPROPERTIES",
"ENDPROPERTIES",
"CHARS",
"STARTCHAR",
"ENCODING",
"SWIDTH",
"DWIDTH",
"BBX",
"BITMAP",
"ENDCHAR",
"ENDFONT"
]
def scan!(stream) do
context = %__MODULE__{line: 1, col: 1}
stream
|> Stream.flat_map(&String.codepoints/1)
|> Stream.transform(context, fn ch, context1 ->
{tokens, context1} = parse_char(ch, context1)
context1 = handle_position(context1, ch)
{tokens, context1}
end)
end
defp parse_char("\"", %{in_string?: false, buffer: []} = context) do
context
|> string_start()
|> continue()
end
defp parse_char("\"", %{in_string?: true} = context) do
context
|> string_stop()
|> buffer_clean()
|> emit_token(:value, get_buffer(context))
|> continue()
end
defp parse_char(ch, %{in_string?: false, buffer: []} = context) when ch in @token_whitespace do
context
|> buffer_clean()
|> maybe_eol(ch)
|> continue()
end
defp parse_char(ch, %{in_string?: false} = context)
when ch in @token_whitespace do
{type, value} =
context
|> get_buffer()
|> detect_token()
context
|> emit_token(type, value)
|> buffer_clean()
|> maybe_eol(ch)
|> continue()
end
defp parse_char(ch, context) do
context
|> append_char(ch)
|> continue()
end
defp detect_token(content) do
cond do
content in @keywords ->
{:keyword, content}
true ->
{:value, content}
end
end
defp string_start(ctx),
do: %{ctx | in_string?: true}
defp string_stop(ctx),
do: %{ctx | in_string?: false}
defp maybe_eol(ctx, "\n"),
do: emit_token(ctx, :eol, "\n")
defp maybe_eol(ctx, _ch),
do: ctx
defp append_char(%{buffer: buffer} = ctx, ch) do
%{ctx | buffer: [ch | buffer]}
end
defp buffer_clean(ctx),
do: %{ctx | buffer: []}
defp get_buffer(%{buffer: buffer}),
do: Enum.reverse(buffer) |> to_string()
defp continue(%{tokens: []} = ctx),
do: {[], ctx}
defp continue(%{tokens: tokens} = ctx),
do: {Enum.reverse(tokens), %{ctx | tokens: []}}
defp emit_token(%{tokens: tokens} = ctx, type, value) do
token = {type, value, {ctx.line, ctx.col}}
%{ctx | tokens: [token | tokens]}
end
defp handle_position(%{line: line} = ctx, "\n"),
do: %{ctx | line: line + 1, col: 1}
defp handle_position(%{col: col} = ctx, _ch),
do: %{ctx | col: col + 1}
defp handle_position(ctx, _ch),
do: ctx
end
| 21.022556 | 97 | 0.578326 |
086e52882254f2257dc112a44e3bd13ed01448c9 | 3,598 | ex | Elixir | lib/trademark_free_strategic_land_warfare/players/semi_random_another.ex | WizardOfOgz/trademark_free_strategic_land_warfare | a14287eab1f60c13d43f70ac2309391c291a6704 | [
"MIT"
] | 1 | 2020-06-30T16:37:50.000Z | 2020-06-30T16:37:50.000Z | lib/trademark_free_strategic_land_warfare/players/semi_random_another.ex | WizardOfOgz/trademark_free_strategic_land_warfare | a14287eab1f60c13d43f70ac2309391c291a6704 | [
"MIT"
] | null | null | null | lib/trademark_free_strategic_land_warfare/players/semi_random_another.ex | WizardOfOgz/trademark_free_strategic_land_warfare | a14287eab1f60c13d43f70ac2309391c291a6704 | [
"MIT"
] | 13 | 2020-06-04T23:10:19.000Z | 2020-06-05T01:25:26.000Z | defmodule TrademarkFreeStrategicLandWarfare.Players.SemiRandomAnother do
alias TrademarkFreeStrategicLandWarfare.{Board, Player, Piece}
@behaviour Player
@type direction() :: :north | :west | :east | :south
@type count() :: Integer.t()
@type state() :: any()
@spec name() :: binary()
def name() do
"Semi-Random-Another"
end
# should return a list with 4 lists of 10 piece-name atoms (:miner, :colonel, etc) per list
@spec initial_pieces_placement() :: nonempty_list([Atom.t(), ...])
def initial_pieces_placement() do
Board.piece_name_counts()
|> Enum.flat_map(fn {type, count} ->
for _ <- 1..count, do: type
end)
|> Enum.shuffle()
|> Enum.chunk_every(10)
end
@spec turn(
%TrademarkFreeStrategicLandWarfare.Board{},
%TrademarkFreeStrategicLandWarfare.Player{},
state()
) :: {binary(), direction(), count(), state()}
def turn(%Board{rows: rows} = board, %Player{number: number}, state) do
# find all eligible pieces
move_partitioned_pieces =
rows
|> List.flatten()
|> Enum.flat_map(fn
%Piece{player: ^number, name: name} = piece when name not in [:bomb, :flag] -> [piece]
_ -> []
end)
|> partition_by_move(board)
# select from them, biasing towards pieces that can win, then those that can advance,
# then west/east, then move backward
eligible_moves =
Enum.find(
[
Map.get(move_partitioned_pieces, :win, []),
Map.get(
move_partitioned_pieces,
Board.maybe_invert_player_direction(:north, number),
[]
),
Map.get(move_partitioned_pieces, :west, []) ++
Map.get(move_partitioned_pieces, :east, []),
Map.get(
move_partitioned_pieces,
Board.maybe_invert_player_direction(:south, number),
[]
)
],
fn list -> length(list) > 0 end
)
# randomly select one from the list returned
case eligible_moves do
nil ->
raise "no move possible"
moves ->
moves
|> Enum.random()
|> Tuple.append(state)
end
end
defp partition_by_move(pieces, board) do
# TODO: reduce_while and halt when preferred one found (win, progressing forward)
Enum.reduce(pieces, %{}, fn piece, acc ->
Enum.reduce([:north, :west, :east, :south], acc, fn direction, dir_acc ->
case Board.move(board, piece.player, piece.uuid, direction, 1) do
{:ok, :win, _} ->
# this shouldn't ever get hit, because we'll never know as a player
# where the opponent's flag is without trying to capture it. putting
# this here for that note, and just in case.
Map.update(
dir_acc,
:win,
[{piece.uuid, direction, 1}],
&[{piece.uuid, direction, 1} | &1]
)
{:error, :unknown_result} ->
# allowed move, but masked piece. include in the possibles.
Map.update(
dir_acc,
direction,
[{piece.uuid, direction, 1}],
&[{piece.uuid, direction, 1} | &1]
)
{:ok, %Board{}} ->
# allowed move -- no differentiation on whether attack happened
Map.update(
dir_acc,
direction,
[{piece.uuid, direction, 1}],
&[{piece.uuid, direction, 1} | &1]
)
_ ->
dir_acc
end
end)
end)
end
end
| 31.017241 | 94 | 0.555586 |
086f2aee3918c6f864c3ccb0ddbf7bf4a0fd3497 | 286 | exs | Elixir | priv/repo/migrations/20180111202738_create_users.exs | djordje/budget_app.backend | 3febe64892e700f3174b8eddbc4b96260c444308 | [
"MIT"
] | null | null | null | priv/repo/migrations/20180111202738_create_users.exs | djordje/budget_app.backend | 3febe64892e700f3174b8eddbc4b96260c444308 | [
"MIT"
] | null | null | null | priv/repo/migrations/20180111202738_create_users.exs | djordje/budget_app.backend | 3febe64892e700f3174b8eddbc4b96260c444308 | [
"MIT"
] | null | null | null | defmodule BudgetApp.Repo.Migrations.CreateUsers do
use Ecto.Migration
def change do
create table(:users) do
add :email, :string, size: 50, unique: true
add :secret, :string, size: 128
timestamps()
end
create unique_index(:users, [:email])
end
end
| 19.066667 | 50 | 0.660839 |
086f2f7d058963fe4ab5ffc4a59529a1715ef941 | 233 | ex | Elixir | lib/zygalski.ex | lucidstack/zygalski | 6c39f8f6cd47852f15e8d2ea800b407059235874 | [
"MIT"
] | null | null | null | lib/zygalski.ex | lucidstack/zygalski | 6c39f8f6cd47852f15e8d2ea800b407059235874 | [
"MIT"
] | null | null | null | lib/zygalski.ex | lucidstack/zygalski | 6c39f8f6cd47852f15e8d2ea800b407059235874 | [
"MIT"
] | null | null | null | defmodule Zygalski do
use Application
alias Zygalski.Router
def start(_type, _args) do
Plug.Adapters.Cowboy.http Router, [], port: port
{:ok, self()}
end
defp port,
do: Application.get_env(:zygalski, :port)
end
| 17.923077 | 52 | 0.686695 |
086f30c6d8d0d79e34d88a3c2c47ab5d734d43ee | 3,490 | ex | Elixir | lib/splitwise/expenses.ex | nathanbegbie/ex_splitwise | 6de8b9f59db9834b342b86dfcd5c41354f349e5d | [
"MIT"
] | 3 | 2019-09-29T04:15:29.000Z | 2021-04-02T14:52:04.000Z | lib/splitwise/expenses.ex | nathanbegbie/ex_splitwise | 6de8b9f59db9834b342b86dfcd5c41354f349e5d | [
"MIT"
] | null | null | null | lib/splitwise/expenses.ex | nathanbegbie/ex_splitwise | 6de8b9f59db9834b342b86dfcd5c41354f349e5d | [
"MIT"
] | 1 | 2022-02-22T15:32:16.000Z | 2022-02-22T15:32:16.000Z | defmodule ExSplitwise.Expenses do
@moduledoc """
This module defines the functions to manage Splitwise expenses.
"""
alias ExSplitwise.Client
@doc """
Get user's expenses.
## Example
iex> ExSplitwise.Expenses.all()
%ExSplitwise.Client.Response{
body: %{
"expenses" => [
%{ ... },
%{ ... }
]
},
headers: [
...
],
status: 200
}
"""
def all() do
Client.get!("/api/v3.0/get_expenses")
end
@doc """
Get expense by id.
## Example
iex> ExSplitwise.Expenses.get(28091891)
%ExSplitwise.Client.Response{
body: %{
"expense" => %{
...
}
},
headers: [
...
],
status: 200
}
"""
def get(id) do
Client.get!("/api/v3.0/get_expense/#{id}")
end
@doc """
Create new expense.
In the response body will include a map with the errors, or a list with the created expense.
To pay debt, create a new expense with the value `payment: true`.
## Example
iex> expense = %ExSplitwise.Models.Expense{
cost: 500,
currency_code: "UYU",
group_id: 987675,
users: [
%{id: 12345, paid_share: 0, owed_share: 500},
%{id: 23456, paid_share: 500, owed_share: 0},
],
category_id: 18,
description: "Sample expense",
creation_method: "equal",
payment: false
}
iex> ExSplitwise.Expenses.create(expense)
%ExSplitwise.Client.Response{
body: %{
"errors" => %{},
"expenses" => [
%{ ... }
]
},
headers: [
{"Content-Type", "application/json; charset=utf-8"},
],
status: 200
}
## Example: paying debt
iex> expense = %ExSplitwise.Models.Expense{
cost: 100,
currency_code: "UYU",
group_id: 987675,
users: [
%{id: 12345, paid_share: 100, owed_share: 0},
%{id: 23456, paid_share: 0, owed_share: 100},
],
category_id: 18,
description: "Payment",
payment: true
}
iex> ExSplitwise.Expenses.create(expense)
%ExSplitwise.Client.Response{
body: %{
"errors" => %{},
"expenses" => [
%{ ... }
]
},
headers: [
...
],
status: 200
}
## Example: error response
iex> ExSplitwise.Expenses.create(expense)
%ExSplitwise.Client.Response{
body: %{
"errors" => %{
"base" => ["Splitwise cannot sync this expense. For further help, contact ..."]
},
"expenses" => []
},
headers: [
...
],
status: 200
}
"""
def create(data) do
body = ExSplitwise.Models.Expense.convert(data)
Client.post!("/api/v3.0/create_expense", body)
end
@doc """
Update expense by id.
"""
def update(id, data) do
body = ExSplitwise.Models.Expense.convert(data)
Client.post!("/api/v3.0/update_expense/#{id}", body)
end
@doc """
Delete expense by id.
## Example
iex> ExSplitwise.Expenses.delete(id)
%ExSplitwise.Client.Response{
body: %{"errors" => %{}, "success" => true},
headers: [
...
],
status: 200
}
"""
def delete(id) do
Client.post!("/api/v3.0/delete_expense/#{id}")
end
end
| 22.371795 | 94 | 0.495415 |
086f3322dad6f2d7eba00e94715acfa882bb17de | 2,004 | exs | Elixir | test/hipchat_test.exs | ymtszw/hipchat_elixir | 36d8e329f4f24d3cf1f3aacbeff1eaf603287904 | [
"MIT"
] | 6 | 2016-12-19T21:11:05.000Z | 2018-06-15T03:36:47.000Z | test/hipchat_test.exs | ymtszw/hipchat_elixir | 36d8e329f4f24d3cf1f3aacbeff1eaf603287904 | [
"MIT"
] | 2 | 2018-06-06T14:51:59.000Z | 2018-06-12T04:32:34.000Z | test/hipchat_test.exs | ymtszw/hipchat_elixir | 36d8e329f4f24d3cf1f3aacbeff1eaf603287904 | [
"MIT"
] | 4 | 2017-01-19T12:23:54.000Z | 2018-06-06T15:00:07.000Z | defmodule HipchatTest do
use ExUnit.Case
use ExUnitProperties
alias Hipchat.{Typespec, Httpc.Response}
all_v2_modules =
:code.lib_dir(:hipchat_elixir, :ebin)
|> File.ls!()
|> Enum.filter(&String.starts_with?(&1, "Elixir.Hipchat.V2"))
|> Enum.map(fn beam_file_name ->
beam_file_name
|> String.replace_suffix(".beam", "")
|> String.to_existing_atom()
end)
@token_from_env System.get_env("HIPCHAT_ACCESS_TOKEN")
@dummy_client Hipchat.ApiClient.new(@token_from_env || "dummy_access_token", true)
setup do
on_exit(fn ->
Application.put_env(:hipchat_elixir, :serializer, Poison)
end)
end
# Here we are using stream_data just for generating a single random parameter set per API,
# since overloading HipChat Cloud server with too much requests are not desirable.
# When `@dummy_client` uses a dummy token, all requests should fail with 401 Unauthorized
# (provided all APIs require authentication).
for mod <- all_v2_modules, {{fun, arity}, {[[Hipchat.Client, :t] | other_input_types], _output_type}} <- Typespec.extract(mod) do
property "#{inspect(mod)}.#{fun}/#{arity} should properly send request" do
args_generator = unquote(other_input_types) |> Enum.map(fn type -> Typespec.generator(type) end) |> StreamData.fixed_list()
check all args <- args_generator, not "" in args, max_runs: 1 do
assert {:ok, %Response{} = res} = apply(unquote(mod), unquote(fun), [@dummy_client | args])
assert_status(res)
Application.put_env(:hipchat_elixir, :serializer, :form)
assert {:ok, %Response{} = res} = apply(unquote(mod), unquote(fun), [@dummy_client | args])
assert_status(res)
end
end
end
if @token_from_env do
defp assert_status(%Response{status: status}) when status in [202, 400, 403, 404, 429], do: :ok
else
defp assert_status(%Response{status: 401}), do: :ok
end
defp assert_status(res), do: flunk("Unexpected response: #{inspect(res)}")
end
| 40.897959 | 131 | 0.687625 |
086f391cb87893e149c2d7b84a971f79be5e4930 | 547 | exs | Elixir | apps/management/mix.exs | msomji/smartcitiesdata | fc96abc1ef1306f7af6bd42bbcb4ed041a6d922c | [
"Apache-2.0"
] | null | null | null | apps/management/mix.exs | msomji/smartcitiesdata | fc96abc1ef1306f7af6bd42bbcb4ed041a6d922c | [
"Apache-2.0"
] | null | null | null | apps/management/mix.exs | msomji/smartcitiesdata | fc96abc1ef1306f7af6bd42bbcb4ed041a6d922c | [
"Apache-2.0"
] | null | null | null | defmodule Management.MixProject do
use Mix.Project
def project do
[
app: :management,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.8",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
def application do
[
extra_applications: [:logger]
]
end
defp deps do
[
{:brook, "~> 0.4"},
{:credo, "~> 1.3", only: [:dev]}
]
end
end
| 17.645161 | 45 | 0.499086 |
086f4343edea9d8fa8e40af06db2a76b4070945b | 113 | exs | Elixir | exercises/02-higher-order-functions/02-goodbye-loops/10-all/student.exs | engelenv/distributed-applications | e78693a4ffa22b653e2e840427bcc1704040af4e | [
"BSD-3-Clause"
] | null | null | null | exercises/02-higher-order-functions/02-goodbye-loops/10-all/student.exs | engelenv/distributed-applications | e78693a4ffa22b653e2e840427bcc1704040af4e | [
"BSD-3-Clause"
] | null | null | null | exercises/02-higher-order-functions/02-goodbye-loops/10-all/student.exs | engelenv/distributed-applications | e78693a4ffa22b653e2e840427bcc1704040af4e | [
"BSD-3-Clause"
] | null | null | null | defmodule Grades do
def all_passed?(grades) do
Enum.all?(grades, fn x -> x == :na or x >= 10 end)
end
end | 22.6 | 54 | 0.628319 |
086f6e00b4956b4be40e86d078f4c00c37e364ee | 2,687 | ex | Elixir | lib/flamelex/fluxus/reducers/radix_reducer.ex | JediLuke/franklin | 8eb77a342547de3eb43d28dcf9f835ff443ad489 | [
"Apache-2.0"
] | 1 | 2020-02-09T23:04:33.000Z | 2020-02-09T23:04:33.000Z | lib/flamelex/fluxus/reducers/radix_reducer.ex | JediLuke/franklin | 8eb77a342547de3eb43d28dcf9f835ff443ad489 | [
"Apache-2.0"
] | null | null | null | lib/flamelex/fluxus/reducers/radix_reducer.ex | JediLuke/franklin | 8eb77a342547de3eb43d28dcf9f835ff443ad489 | [
"Apache-2.0"
] | null | null | null | defmodule Flamelex.Fluxus.RadixReducer do
@moduledoc """
The RootReducer for all flamelex actions.
These pure-functions are called by ActionListener, to handle specific
actions within the application. Every action that gets processed, is
routed down to the sub-reducers, through this module. Every possible
action, must also be declared inside this file.
A reducer is a function that determines changes to an application's state.
All the reducers in Flamelex.Fluxus (and this includes both action
handlers, and user-input handlers) work the same way - they take in
the application state, & an action, & return an updated state. They
may also fire off side-effects along the way, including further actions.
```
A reducer is a function that determines changes to an application's state.
It uses the action it receives to determine this change. We have tools,
like Redux, that help manage an application's state changes in a single
store so that they behave consistently.
```
https://css-tricks.com/understanding-how-reducers-are-used-in-redux/
Here we have the function which `reduces` a radix_state and an action.
Our main way of handling actions is simply to broadcast them on to the
`:actions` broker, which will forward it to all the main Manager processes
in turn (GUiManager, BufferManager, AgentManager, etc.)
The reason for this is, what's going to happen is, say I send a command
like `open_buffer` to open my journal. We spin up this action handler
task - say that takes 2 seconds to run for some reason. If I send the
same action again, another process will spin up. Eventually, they're
both going to finish, and whoever is getting the results (FluxusRadix)
is going to get 2 messages, and then have to handle the situation of
dealing with double-processes of actions (yuck!)
what we want to do instead is, the reducer broadcasts the message to
the "actions" channel - all the managers are able to react to this event.
"""
require Logger
def process(radix_state, {reducer, action}) when is_atom(reducer) do
reducer.process(radix_state, action)
end
@memex_actions [
:open_memex, :close_memex
]
def process(%{memex: %{active?: false}}, action) when action in @memex_actions do
Logger.warn "#{__MODULE__} ignoring a memex action, because the memex is set to `inactive`"
:ignore
end
def process(%{memex: %{active?: true}} = radix_state, action) when action in @memex_actions do
Flamelex.Fluxus.Reducers.Memex.process(radix_state, action)
end
def process(radix_state, action) do
{:error, "RootReducer bottomed-out! No match was found."}
end
end
| 33.5875 | 96 | 0.743952 |
086f76497387d38e9a1a4330a42aa42354280a2f | 13,610 | exs | Elixir | test/mix/tasks/hex.build_test.exs | hexpm/hex | e732ccb3daf13a7c271b3c8442b5db6832a7f298 | [
"Apache-2.0"
] | 824 | 2015-01-05T09:12:36.000Z | 2022-03-28T12:02:29.000Z | test/mix/tasks/hex.build_test.exs | hexpm/hex | e732ccb3daf13a7c271b3c8442b5db6832a7f298 | [
"Apache-2.0"
] | 737 | 2015-01-01T05:48:46.000Z | 2022-03-29T12:56:12.000Z | test/mix/tasks/hex.build_test.exs | hexpm/hex | e732ccb3daf13a7c271b3c8442b5db6832a7f298 | [
"Apache-2.0"
] | 220 | 2015-03-14T17:55:11.000Z | 2022-03-23T22:17:07.000Z | defmodule Mix.Tasks.Hex.BuildTest do
use HexTest.IntegrationCase
defp package_created?(name) do
File.exists?("#{name}.tar")
end
defp extract(name, path) do
{:ok, files} = :mix_hex_erl_tar.extract(name, [:memory])
files = Enum.into(files, %{})
:ok = :mix_hex_erl_tar.extract({:binary, files['contents.tar.gz']}, [:compressed, cwd: path])
end
test "create" do
Process.put(:hex_test_app_name, :build_app_name)
Mix.Project.push(ReleaseSimple.MixProject)
in_tmp(fn ->
Hex.State.put(:cache_home, tmp_path())
File.write!("myfile.txt", "hello")
File.chmod!("myfile.txt", 0o100644)
Mix.Tasks.Hex.Build.run([])
assert package_created?("build_app_name-0.0.1")
end)
after
purge([ReleaseSimple.MixProject])
end
test "create with missing licenses" do
Process.put(:hex_test_app_name, :release_missing_licenses)
Mix.Project.push(ReleaseMissingLicenses.MixProject)
in_tmp(fn ->
Hex.State.put(:cache_home, tmp_path())
File.write!("myfile.txt", "hello")
Mix.Tasks.Hex.Build.run([])
assert_received {:mix_shell, :info, ["\e[33m\nYou have not included any licenses\n\e[0m"]}
assert package_created?("release_missing_licenses-0.0.1")
end)
after
purge([ReleaseMissingLicenses.MixProject])
end
test "create with invalid licenses" do
Process.put(:hex_test_app_name, :release_invalid_licenses)
Mix.Project.push(ReleaseInvalidLicenses.MixProject)
in_tmp(fn ->
Hex.State.put(:cache_home, tmp_path())
File.write!("myfile.txt", "hello")
Mix.Tasks.Hex.Build.run([])
assert_received {:mix_shell, :info,
[
"\e[33mThe following licenses are not recognized by SPDX:\n * CustomLicense\n\nConsider using licenses from https://spdx.org/licenses\e[0m"
]}
assert package_created?("release_invalid_licenses-0.0.1")
end)
after
purge([ReleaseInvalidLicenses.MixProject])
end
test "create private package with invalid licenses" do
Process.put(:hex_test_app_name, :release_repo_invalid_licenses)
Mix.Project.push(ReleaseRepoInvalidLicenses.MixProject)
in_tmp(fn ->
Hex.State.put(:cache_home, tmp_path())
File.write!("myfile.txt", "hello")
Mix.Tasks.Hex.Build.run([])
refute_received {:mix_shell, :info,
[
"\e[33m\nYou have chosen 1 or more licenses that are not recognized by SPDX\nConsider using a license from https://spdx.org/licenses/\n\e[0m"
]}
assert package_created?("release_repo_invalid_licenses-0.0.1")
end)
after
purge([ReleaseRepoInvalidLicenses.MixProject])
end
test "create with package name" do
Process.put(:hex_test_package_name, :build_package_name)
Mix.Project.push(ReleaseName.MixProject)
in_tmp(fn ->
Hex.State.put(:cache_home, tmp_path())
File.write!("myfile.txt", "hello")
File.chmod!("myfile.txt", 0o100644)
Mix.Tasks.Hex.Build.run([])
assert package_created?("build_package_name-0.0.1")
end)
after
purge([ReleaseName.MixProject])
end
test "create with files" do
Process.put(:hex_test_app_name, :build_with_files)
Mix.Project.push(ReleaseFiles.MixProject)
in_tmp(fn ->
Hex.State.put(:cache_home, tmp_path())
File.mkdir!("dir")
File.mkdir!("empty_dir")
File.write!("dir/.dotfile", "")
File.ln_s("dir2", "dir/a_link_to_dir2")
File.mkdir!("dir/dir2")
File.ln_s("empty_dir", "link_dir")
# mtime_dir = File.stat!("dir").mtime
mtime_empty_dir = File.stat!("empty_dir").mtime
mtime_file = File.stat!("dir/.dotfile").mtime
mtime_link = File.stat!("link_dir").mtime
File.write!("myfile.txt", "hello")
File.write!("executable.sh", "world")
File.write!("dir/dir2/test.txt", "and")
File.chmod!("myfile.txt", 0o100644)
File.chmod!("executable.sh", 0o100755)
File.chmod!("dir/dir2/test.txt", 0o100644)
Mix.Tasks.Hex.Build.run([])
extract("build_with_files-0.0.1.tar", "unzip")
# Check that mtimes are not retained for files and directories and symlinks
# erl_tar does not set mtime from tar if a directory contain files
# assert File.stat!("unzip/dir").mtime != mtime_dir
assert File.stat!("unzip/empty_dir").mtime != mtime_empty_dir
assert File.stat!("unzip/dir/.dotfile").mtime != mtime_file
assert File.stat!("unzip/link_dir").mtime != mtime_link
assert Hex.Stdlib.file_lstat!("unzip/link_dir").type == :symlink
assert Hex.Stdlib.file_lstat!("unzip/dir/a_link_to_dir2").type == :symlink
assert Hex.Stdlib.file_lstat!("unzip/empty_dir").type == :directory
assert File.read!("unzip/myfile.txt") == "hello"
assert File.read!("unzip/dir/.dotfile") == ""
assert File.read!("unzip/dir/dir2/test.txt") == "and"
assert File.stat!("unzip/myfile.txt").mode == 0o100644
assert File.stat!("unzip/executable.sh").mode == 0o100755
end)
after
purge([ReleaseFiles.MixProject])
end
test "create with excluded files" do
Process.put(:hex_test_app_name, :build_with_excluded_files)
Mix.Project.push(ReleaseExcludePatterns.MixProject)
in_tmp(fn ->
Hex.State.put(:cache_home, tmp_path())
File.write!("myfile.txt", "hello")
File.write!("exclude.txt", "world")
File.chmod!("myfile.txt", 0o100644)
File.chmod!("exclude.txt", 0o100644)
Mix.Tasks.Hex.Build.run([])
extract("build_with_excluded_files-0.0.1.tar", "unzip")
assert File.ls!("unzip/") == ["myfile.txt"]
assert File.read!("unzip/myfile.txt") == "hello"
assert File.stat!("unzip/myfile.txt").mode == 0o100644
end)
after
purge([ReleaseExcludePatterns.MixProject])
end
test "create with custom output path" do
Process.put(:hex_test_app_name, :build_custom_output_path)
Mix.Project.push(Sample.MixProject)
in_tmp(fn ->
Hex.State.put(:cache_home, tmp_path())
File.write!("mix.exs", "mix.exs")
File.chmod!("mix.exs", 0o100644)
File.write!("myfile.txt", "hello")
File.chmod!("myfile.txt", 0o100644)
Mix.Tasks.Hex.Build.run(["-o", "custom.tar"])
assert File.exists?("custom.tar")
end)
after
purge([Sample.MixProject])
end
test "create with deps" do
Process.put(:hex_test_app_name, :build_with_deps)
Mix.Project.push(ReleaseDeps.MixProject)
in_tmp(fn ->
Hex.State.put(:cache_home, tmp_path())
Mix.Tasks.Deps.Get.run([])
error_msg = "Stopping package build due to errors.\nMissing metadata fields: links"
assert_raise Mix.Error, error_msg, fn ->
Mix.Tasks.Hex.Build.run([])
assert_received {:mix_shell, :error, ["No files"]}
refute package_created?("release_b-0.0.2")
end
end)
after
purge([ReleaseDeps.MixProject])
end
# TODO: convert to integration test
test "create with custom repo deps" do
Process.put(:hex_test_app_name, :build_with_custom_repo_deps)
Mix.Project.push(ReleaseCustomRepoDeps.MixProject)
in_tmp(fn ->
Hex.State.put(:cache_home, tmp_path())
build = Mix.Tasks.Hex.Build.prepare_package()
assert [
%{name: "ex_doc", repository: "hexpm"},
%{name: "ecto", repository: "my_repo"}
] = build.meta.requirements
end)
after
purge([ReleaseCustomRepoDeps.MixProject])
end
test "errors when there is a git dependency" do
Process.put(:hex_test_app_name, :build_git_dependency)
Mix.Project.push(ReleaseGitDeps.MixProject)
in_tmp(fn ->
Hex.State.put(:cache_home, tmp_path())
error_msg =
"Stopping package build due to errors.\n" <>
"Dependencies excluded from the package (only Hex packages can be dependencies): ecto, gettext"
assert_raise Mix.Error, error_msg, fn ->
Mix.Tasks.Hex.Build.run([])
end
end)
after
purge([ReleaseGitDeps.MixProject])
end
test "errors with app false dependency" do
Process.put(:hex_test_app_name, :build_app_false_dependency)
Mix.Project.push(ReleaseAppFalseDep.MixProject)
in_tmp(fn ->
Hex.State.put(:cache_home, tmp_path())
error_msg = "Can't build package when :app is set for dependency ex_doc, remove `app: ...`"
assert_raise Mix.Error, error_msg, fn ->
Mix.Tasks.Hex.Build.run([])
end
end)
after
purge([ReleaseAppFalseDep.MixProject])
end
test "create with meta" do
Process.put(:hex_test_app_name, :build_with_meta)
Mix.Project.push(ReleaseMeta.MixProject)
in_tmp(fn ->
Hex.State.put(:cache_home, tmp_path())
error_msg =
"Stopping package build due to errors.\n" <> "Missing files: missing.txt, missing/*"
assert_raise Mix.Error, error_msg, fn ->
File.write!("myfile.txt", "hello")
Mix.Tasks.Hex.Build.run([])
assert_received {:mix_shell, :info, ["Building release_c 0.0.3"]}
assert_received {:mix_shell, :info, [" Files:"]}
assert_received {:mix_shell, :info, [" myfile.txt"]}
end
end)
after
purge([ReleaseMeta.MixProject])
end
test "reject package if description is missing" do
Process.put(:hex_test_app_name, :build_no_description)
Mix.Project.push(ReleaseNoDescription.MixProject)
in_tmp(fn ->
Hex.State.put(:cache_home, tmp_path())
error_msg =
"Stopping package build due to errors.\n" <>
"Missing metadata fields: description, licenses, links"
assert_raise Mix.Error, error_msg, fn ->
Mix.Tasks.Hex.Build.run([])
assert_received {:mix_shell, :info, ["Building release_e 0.0.1"]}
refute package_created?("release_e-0.0.1")
end
end)
after
purge([ReleaseNoDescription.MixProject])
end
test "error if description is too long" do
Process.put(:hex_test_app_name, :build_too_long_description)
Mix.Project.push(ReleaseTooLongDescription.MixProject)
in_tmp(fn ->
Hex.State.put(:cache_home, tmp_path())
error_msg =
"Stopping package build due to errors.\n" <>
"Missing metadata fields: licenses, links\n" <>
"Package description is too long (exceeds 300 characters)"
assert_raise Mix.Error, error_msg, fn ->
Mix.Tasks.Hex.Build.run([])
end
end)
after
purge([ReleaseTooLongDescription.MixProject])
end
test "error if package has unstable dependencies" do
Process.put(:hex_test_app_name, :build_unstable_deps)
Mix.Project.push(ReleasePreDeps.MixProject)
in_tmp(fn ->
Hex.State.put(:cache_home, tmp_path())
error_msg = "A stable package release cannot have a pre-release dependency"
assert_raise Mix.Error, error_msg, fn ->
Mix.Tasks.Hex.Build.run([])
end
end)
after
purge([ReleasePreDeps.MixProject])
end
test "error if misspelled organization" do
Process.put(:hex_test_app_name, :build_misspelled_organization)
Mix.Project.push(ReleaseMisspelledOrganization.MixProject)
in_tmp(fn ->
Hex.State.put(:cache_home, tmp_path())
error_msg = "Invalid Hex package config :organisation, use spelling :organization"
assert_raise Mix.Error, error_msg, fn ->
Mix.Tasks.Hex.Build.run([])
end
end)
after
purge([ReleaseMisspelledOrganization.MixProject])
end
test "warn if misplaced config" do
Process.put(:hex_test_app_name, :build_warn_config_location)
Mix.Project.push(ReleaseOrganizationWrongLocation.MixProject)
in_tmp(fn ->
Hex.State.put(:cache_home, tmp_path())
File.write!("myfile.txt", "hello")
File.chmod!("myfile.txt", 0o100644)
Mix.Tasks.Hex.Build.run([])
assert_received {:mix_shell, :info, ["Building build_warn_config_location 0.0.1"]}
message =
"\e[33mMix project configuration :organization belongs under the :package key, " <>
"did you misplace it?\e[0m"
assert_received {:mix_shell, :info, [^message]}
end)
after
purge([ReleaseOrganizationWrongLocation.MixProject])
end
test "error if hex_metadata.config is included" do
Process.put(:hex_test_app_name, :build_reserved_file)
Mix.Project.push(ReleaseIncludeReservedFile.MixProject)
in_tmp(fn ->
Hex.State.put(:cache_home, tmp_path())
error_msg =
"Stopping package build due to errors.\n" <>
"Do not include this file: hex_metadata.config"
assert_raise Mix.Error, error_msg, fn ->
File.write!("hex_metadata.config", "hello")
Mix.Tasks.Hex.Build.run([])
end
end)
after
purge([ReleaseIncludeReservedFile.MixProject])
end
test "build and unpack" do
Process.put(:hex_test_app_name, :build_and_unpack)
Mix.Project.push(Sample.MixProject)
in_fixture("sample", fn ->
Hex.State.put(:cache_home, tmp_path())
File.write!("myfile.txt", "hello")
File.chmod!("myfile.txt", 0o100644)
Mix.Tasks.Hex.Build.run(["--unpack"])
assert_received({:mix_shell, :info, ["Saved to build_and_unpack-0.0.1"]})
assert File.exists?("build_and_unpack-0.0.1/mix.exs")
assert File.exists?("build_and_unpack-0.0.1/hex_metadata.config")
Mix.Tasks.Hex.Build.run(["--unpack", "-o", "custom"])
assert_received({:mix_shell, :info, ["Saved to custom"]})
assert File.exists?("custom/mix.exs")
assert File.exists?("custom/hex_metadata.config")
end)
after
purge([Sample.MixProject])
end
end
| 30.515695 | 166 | 0.657384 |
086f79cdf79751f197c78e359756c2ef94779d81 | 2,063 | exs | Elixir | phoenix/phoenix_tailwind/config/prod.exs | toff63/elixir-sandbox | ac63f839b97ccb7a11e07fb1e505b7cd24e19cb3 | [
"Apache-2.0"
] | null | null | null | phoenix/phoenix_tailwind/config/prod.exs | toff63/elixir-sandbox | ac63f839b97ccb7a11e07fb1e505b7cd24e19cb3 | [
"Apache-2.0"
] | null | null | null | phoenix/phoenix_tailwind/config/prod.exs | toff63/elixir-sandbox | ac63f839b97ccb7a11e07fb1e505b7cd24e19cb3 | [
"Apache-2.0"
] | null | null | null | use Mix.Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :phoenix_tailwind, PhoenixTailwindWeb.Endpoint,
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :phoenix_tailwind, PhoenixTailwindWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH"),
# transport_options: [socket_opts: [:inet6]]
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :phoenix_tailwind, PhoenixTailwindWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# Finally import the config/prod.secret.exs which loads secrets
# and configuration from environment variables.
import_config "prod.secret.exs"
| 36.839286 | 66 | 0.721764 |
086f85e8c82d89a48c6bd681ac640ed8c90194f7 | 3,751 | ex | Elixir | lib/kommissar/resources.ex | patatoid/kommissar | c6c06ba29257ba7641daf03e8e4a94fa967ecfd6 | [
"MIT"
] | null | null | null | lib/kommissar/resources.ex | patatoid/kommissar | c6c06ba29257ba7641daf03e8e4a94fa967ecfd6 | [
"MIT"
] | null | null | null | lib/kommissar/resources.ex | patatoid/kommissar | c6c06ba29257ba7641daf03e8e4a94fa967ecfd6 | [
"MIT"
] | null | null | null | defmodule Kommissar.Resources do
@moduledoc """
The Resources context.
"""
import Ecto.Query, warn: false
alias Kommissar.Repo
alias Kommissar.Resources.Client
@doc """
Returns the list of clients.
## Examples
iex> list_clients()
[%Client{}, ...]
"""
def list_clients do
Client
|> Repo.all
|> Repo.preload(:tags)
end
@doc """
Gets a single client.
Raises `Ecto.NoResultsError` if the Client does not exist.
## Examples
iex> get_client!(123)
%Client{}
iex> get_client!(456)
** (Ecto.NoResultsError)
"""
def get_client!(id) do
Client
|> Repo.get!(id)
|> Repo.preload(:tags)
end
@doc """
Creates a client.
## Examples
iex> create_client(%{field: value})
{:ok, %Client{}}
iex> create_client(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_client(attrs \\ %{}) do
%Client{}
|> Client.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a client.
## Examples
iex> update_client(client, %{field: new_value})
{:ok, %Client{}}
iex> update_client(client, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_client(%Client{} = client, attrs) do
client
|> Client.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Client.
## Examples
iex> delete_client(client)
{:ok, %Client{}}
iex> delete_client(client)
{:error, %Ecto.Changeset{}}
"""
def delete_client(%Client{} = client) do
Repo.delete(client)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking client changes.
## Examples
iex> change_client(client)
%Ecto.Changeset{source: %Client{}}
"""
def change_client(%Client{} = client) do
Client.changeset(client, %{})
end
alias Kommissar.Resources.Commission
@doc """
Returns the list of commissions.
## Examples
iex> list_commissions()
[%Commission{}, ...]
"""
def list_commissions do
Commission
|> Repo.all
|> Repo.preload(:tags)
end
@doc """
Gets a single commission.
Raises `Ecto.NoResultsError` if the Commission does not exist.
## Examples
iex> get_commission!(123)
%Commission{}
iex> get_commission!(456)
** (Ecto.NoResultsError)
"""
def get_commission!(id) do
Commission
|> Repo.get!(id)
|> Repo.preload(:tags)
end
@doc """
Creates a commission.
## Examples
iex> create_commission(%{field: value})
{:ok, %Commission{}}
iex> create_commission(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_commission(attrs \\ %{}) do
%Commission{}
|> Commission.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a commission.
## Examples
iex> update_commission(commission, %{field: new_value})
{:ok, %Commission{}}
iex> update_commission(commission, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_commission(%Commission{} = commission, attrs) do
commission
|> Commission.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Commission.
## Examples
iex> delete_commission(commission)
{:ok, %Commission{}}
iex> delete_commission(commission)
{:error, %Ecto.Changeset{}}
"""
def delete_commission(%Commission{} = commission) do
Repo.delete(commission)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking commission changes.
## Examples
iex> change_commission(commission)
%Ecto.Changeset{source: %Commission{}}
"""
def change_commission(%Commission{} = commission) do
Commission.changeset(commission, %{})
end
end
| 17.610329 | 65 | 0.603572 |
086f965ac951e0230df052468d487c66d8acc992 | 5,800 | exs | Elixir | test/ex_oauth2_provider/device_grants/device_grants_test.exs | heroinbob/ex_oauth2_provider | 80c21a53bba0955ab3b66f1bd32cc81db0f04f49 | [
"MIT"
] | null | null | null | test/ex_oauth2_provider/device_grants/device_grants_test.exs | heroinbob/ex_oauth2_provider | 80c21a53bba0955ab3b66f1bd32cc81db0f04f49 | [
"MIT"
] | null | null | null | test/ex_oauth2_provider/device_grants/device_grants_test.exs | heroinbob/ex_oauth2_provider | 80c21a53bba0955ab3b66f1bd32cc81db0f04f49 | [
"MIT"
] | null | null | null | defmodule ExOauth2Provider.DeviceGrantsTest do
use ExOauth2Provider.TestCase
alias ExOauth2Provider.DeviceGrants
alias ExOauth2Provider.Test.Fixtures
alias ExOauth2Provider.Test.QueryHelpers
alias Dummy.{OauthDeviceGrants.OauthDeviceGrant, Users.User}
@config [otp_app: :ex_oauth2_provider]
setup do
application = Fixtures.application()
{:ok, %{application: application}}
end
describe "#authorize/3" do
test "updates the given grant and returns the result tuple", context do
%{application: application} = context
grant = Fixtures.device_grant(application: application)
user = Fixtures.resource_owner()
{:ok, updated_grant} = DeviceGrants.authorize(grant, user, @config)
assert updated_grant.id == grant.id
assert updated_grant.resource_owner_id == user.id
assert updated_grant.user_code == nil
end
test "returns an error tuple when the changeset is invalid", context do
%{application: application} = context
grant = Fixtures.device_grant(application: application)
user = %User{id: "abc"}
{:error, _changeset} = DeviceGrants.authorize(grant, user, @config)
end
end
describe "#authorized?/1" do
test "returns true when the given schema has been authorized" do
grant = %OauthDeviceGrant{resource_owner_id: "abc", user_code: nil}
assert DeviceGrants.authorized?(grant) == true
end
test "returns false when the given schema is not authorized" do
grant = %OauthDeviceGrant{resource_owner_id: nil, user_code: "abc"}
assert DeviceGrants.authorized?(grant) == false
end
end
describe "#create_grant/3" do
test "inserts a new record and returns the result tuple", context do
%{application: application} = context
attrs = %{
"device_code" => "dc",
"expires_in" => 10,
"user_code" => "uc"
}
{:ok, grant} = DeviceGrants.create_grant(application, attrs, @config)
refute grant.id == nil
assert grant.device_code == "dc"
assert grant.expires_in == 10
assert grant.last_polled_at == nil
# Default behavior when not specified
assert grant.scopes == "public"
assert grant.user_code == "uc"
end
test "accepts valid scopes", %{application: application} do
attrs = %{
"device_code" => "dc",
"expires_in" => 10,
"scopes" => "read",
"user_code" => "uc"
}
{:ok, grant} = DeviceGrants.create_grant(application, attrs, @config)
assert grant.scopes == "read"
end
test "returns an error tuple when the changeset is invalid", context do
%{application: application} = context
{:error, _changeset} = DeviceGrants.create_grant(application, %{}, @config)
end
end
describe "#delete_expired/1" do
test "deletes all expired grants and returns the result tuple", context do
%{application: application} = context
grant = Fixtures.device_grant(application: application)
inserted_at =
QueryHelpers.timestamp(
OauthDeviceGrant,
:inserted_at,
seconds: -grant.expires_in
)
QueryHelpers.change!(grant, inserted_at: inserted_at)
{1, nil} = DeviceGrants.delete_expired(@config)
assert QueryHelpers.count(OauthDeviceGrant) == 0
end
end
describe "#delete!/1" do
test "deletes the grant and returns it", %{application: application} do
grant = Fixtures.device_grant(application: application)
deleted_grant = DeviceGrants.delete!(grant, @config)
assert deleted_grant.id == grant.id
assert QueryHelpers.count(OauthDeviceGrant) == 0
end
test "raises an error when the changeset is invalid" do
assert_raise Ecto.StaleEntryError, fn ->
DeviceGrants.delete!(%OauthDeviceGrant{id: 123}, @config)
end
end
end
describe "#find_by_application_and_device_code/3" do
test "returns the matching DeviceGrant", %{application: application} do
grant = Fixtures.device_grant(application: application)
found_grant =
DeviceGrants.find_by_application_and_device_code(
application,
grant.device_code,
@config
)
assert grant.id == found_grant.id
end
test "returns the nil when no matching grant exists", %{application: application} do
result =
DeviceGrants.find_by_application_and_device_code(
application,
"foo",
@config
)
assert result == nil
end
end
describe "#find_by_user_code/2" do
test "returns the grant matching the user code", %{application: application} do
grant = Fixtures.device_grant(application: application)
found_grant = DeviceGrants.find_by_user_code(grant.user_code, @config)
assert grant.id == found_grant.id
end
test "returns the nil when no matching grant exists" do
result = DeviceGrants.find_by_user_code("foo", @config)
assert result == nil
end
end
describe "#update_last_polled_at!/2" do
test "updates last polled at timestamp and returns the updated schema", context do
%{application: application} = context
grant = Fixtures.device_grant(application: application)
assert grant.last_polled_at == nil
grant = DeviceGrants.update_last_polled_at!(grant, @config)
refute grant.last_polled_at == nil
end
test "raises an error when the changeset is invalid", context do
%{application: application} = context
grant =
[application: application]
|> Fixtures.device_grant()
|> DeviceGrants.delete!(@config)
assert_raise Ecto.StaleEntryError, fn ->
DeviceGrants.update_last_polled_at!(grant, @config)
end
end
end
end
| 29.145729 | 88 | 0.669483 |
086f9fbac46fcb76fc322625edc66503b8dc2271 | 16,789 | ex | Elixir | lib/mix/lib/mix/utils.ex | maartenvanvliet/elixir | 62d75fd7c657b2e08bfaff4002a851efaccf5a65 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/utils.ex | maartenvanvliet/elixir | 62d75fd7c657b2e08bfaff4002a851efaccf5a65 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/utils.ex | maartenvanvliet/elixir | 62d75fd7c657b2e08bfaff4002a851efaccf5a65 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Utils do
@moduledoc false
@doc """
Gets the Mix home.
It defaults to `~/.mix` unless the `MIX_HOME`
environment variable is set.
Developers should only store entries in the
`MIX_HOME` directory which are guaranteed to
work across multiple Elixir versions, as it is
not recommended to swap the `MIX_HOME` directory
as configuration and other important data may be
stored there.
"""
def mix_home do
System.get_env("MIX_HOME") || Path.expand("~/.mix")
end
@doc """
Gets all paths defined in the MIX_PATH env variable.
`MIX_PATH` may contain multiple paths. If on Windows, those
paths should be separated by `;`, if on Unix systems, use `:`.
"""
def mix_paths do
if path = System.get_env("MIX_PATH") do
String.split(path, path_separator())
else
[]
end
end
defp path_separator do
case :os.type() do
{:win32, _} -> ";"
{:unix, _} -> ":"
end
end
@doc """
Parses a string into module, function and arity.
It returns `{:ok, mfa_list}`, where a `mfa_list` is
`[module, function, arity]`, `[module, function]` or `[module]`,
or the atom `:error`.
iex> Mix.Utils.parse_mfa("Foo.bar/1")
{:ok, [Foo, :bar, 1]}
iex> Mix.Utils.parse_mfa(":foo.bar/1")
{:ok, [:foo, :bar, 1]}
iex> Mix.Utils.parse_mfa(":foo.bar")
{:ok, [:foo, :bar]}
iex> Mix.Utils.parse_mfa(":foo")
{:ok, [:foo]}
iex> Mix.Utils.parse_mfa("Foo")
{:ok, [Foo]}
iex> Mix.Utils.parse_mfa("Foo.")
:error
iex> Mix.Utils.parse_mfa("Foo.bar.baz")
:error
iex> Mix.Utils.parse_mfa("Foo.bar/2/2")
:error
"""
def parse_mfa(mfa) do
with {:ok, quoted} <- Code.string_to_quoted(mfa),
[_ | _] = mfa_list <- quoted_to_mfa(quoted) do
{:ok, mfa_list}
else
_ -> :error
end
end
defp quoted_to_mfa({:/, _, [dispatch, arity]}) when is_integer(arity) do
quoted_to_mf(dispatch, [arity])
end
defp quoted_to_mfa(dispatch) do
quoted_to_mf(dispatch, [])
end
defp quoted_to_mf({{:., _, [module, fun]}, _, []}, acc) when is_atom(fun) do
quoted_to_m(module, [fun | acc])
end
defp quoted_to_mf(module, acc) do
quoted_to_m(module, acc)
end
defp quoted_to_m({:__aliases__, _, aliases}, acc) do
[Module.concat(aliases) | acc]
end
defp quoted_to_m(atom, acc) when is_atom(atom) do
[atom | acc]
end
defp quoted_to_m(_, _acc) do
[]
end
@doc """
Takes a `command` name and attempts to load a module
with the command name converted to a module name
in the given `at` scope.
Returns `{:module, module}` in case a module
exists and is loaded, `{:error, reason}` otherwise.
## Examples
iex> Mix.Utils.command_to_module("compile", Mix.Tasks)
{:module, Mix.Tasks.Compile}
"""
def command_to_module(command, at \\ Elixir) do
module = Module.concat(at, command_to_module_name(command))
Code.ensure_loaded(module)
end
@doc """
Returns `true` if any of the `sources` are stale
compared to the given `targets`.
"""
def stale?(sources, targets) do
Enum.any?(stale_stream(sources, targets))
end
@doc """
Extracts all stale `sources` compared to the given `targets`.
"""
def extract_stale(_sources, []), do: []
def extract_stale([], _targets), do: []
def extract_stale(sources, targets) do
stale_stream(sources, targets) |> Enum.to_list()
end
defp stale_stream(sources, targets) do
modified_target = targets |> Enum.map(&last_modified/1) |> Enum.min()
Stream.filter(sources, fn source ->
last_modified(source) > modified_target
end)
end
@doc """
Returns the date the given path was last modified.
If the path does not exist, it returns the Unix epoch
(1970-01-01 00:00:00).
"""
def last_modified(path)
def last_modified({{_, _, _}, {_, _, _}} = timestamp) do
timestamp
end
def last_modified(path) do
{mtime, _size} = last_modified_and_size(path)
mtime
end
@doc false
def last_modified_and_size(path) do
now = :calendar.universal_time()
case :elixir_utils.read_mtime_and_size(path) do
{:ok, mtime, size} when mtime > now ->
message = "warning: mtime (modified time) for \"#{path}\" was set to the future, resetting to now"
Mix.shell().error(message)
File.touch!(path, now)
{mtime, size}
{:ok, mtime, size} ->
{mtime, size}
{:error, _} ->
{{{1970, 1, 1}, {0, 0, 0}}, 0}
end
end
@doc """
Prints n files are being compiled with the given extension.
"""
def compiling_n(1, ext), do: Mix.shell().info("Compiling 1 file (.#{ext})")
def compiling_n(n, ext), do: Mix.shell().info("Compiling #{n} files (.#{ext})")
@doc """
Extracts files from a list of paths.
`exts_or_pattern` may be a list of extensions or a
`Path.wildcard/1` pattern.
If the path in `paths` is a file, it is included in
the return result. If it is a directory, it is searched
recursively for files with the given extensions or matching
the given patterns.
"""
def extract_files(paths, exts_or_pattern)
def extract_files(paths, exts) when is_list(exts) do
extract_files(paths, "*.{#{Enum.join(exts, ",")}}")
end
def extract_files(paths, pattern) do
Enum.flat_map(paths, fn path ->
case :elixir_utils.read_file_type(path) do
{:ok, :directory} -> Path.wildcard("#{path}/**/#{pattern}")
{:ok, :regular} -> [path]
_ -> []
end
end)
|> Enum.uniq()
end
@type tree_node :: {name :: String.Chars.t(), edge_info :: String.Chars.t()}
@doc """
Prints the given tree according to the callback.
The callback will be invoked for each node and it
must return a `{printed, children}` tuple.
"""
@spec print_tree([tree_node], (tree_node -> {tree_node, [tree_node]}), keyword) :: :ok
def print_tree(nodes, callback, opts \\ []) do
pretty? =
case Keyword.get(opts, :format) do
"pretty" -> true
"plain" -> false
_other -> elem(:os.type(), 0) != :win32
end
print_tree(nodes, _depth = [], _parent = nil, _seen = MapSet.new(), pretty?, callback)
:ok
end
defp print_tree(_nodes = [], _depth, _parent, seen, _pretty, _callback) do
seen
end
defp print_tree([node | nodes], depth, parent, seen, pretty?, callback) do
{{name, info}, children} = callback.(node)
key = {parent, name}
if MapSet.member?(seen, key) do
seen
else
info = if(info, do: " #{info}", else: "")
Mix.shell().info("#{depth(pretty?, depth)}#{prefix(pretty?, depth, nodes)}#{name}#{info}")
seen =
print_tree(
children,
[nodes != [] | depth],
name,
MapSet.put(seen, key),
pretty?,
callback
)
print_tree(nodes, depth, parent, seen, pretty?, callback)
end
end
defp depth(_pretty?, []), do: ""
defp depth(pretty?, depth), do: Enum.reverse(depth) |> tl |> Enum.map(&entry(pretty?, &1))
defp entry(false, true), do: "| "
defp entry(false, false), do: " "
defp entry(true, true), do: "│ "
defp entry(true, false), do: " "
defp prefix(false, [], _), do: ""
defp prefix(false, _, []), do: "`-- "
defp prefix(false, _, _), do: "|-- "
defp prefix(true, [], _), do: ""
defp prefix(true, _, []), do: "└── "
defp prefix(true, _, _), do: "├── "
@doc """
Outputs the given tree according to the callback as a DOT graph.
The callback will be invoked for each node and it
must return a `{printed, children}` tuple.
"""
@spec write_dot_graph!(
Path.t(),
String.t(),
[tree_node],
(tree_node -> {tree_node, [tree_node]}),
keyword
) :: :ok
def write_dot_graph!(path, title, nodes, callback, _opts \\ []) do
{dot, _} = build_dot_graph(make_ref(), nodes, MapSet.new(), callback)
File.write!(path, "digraph \"#{title}\" {\n#{dot}}\n")
end
defp build_dot_graph(_parent, [], seen, _callback), do: {"", seen}
defp build_dot_graph(parent, [node | nodes], seen, callback) do
{{name, edge_info}, children} = callback.(node)
key = {parent, name}
if MapSet.member?(seen, key) do
{"", seen}
else
seen = MapSet.put(seen, key)
current = build_dot_current(parent, name, edge_info)
{children, seen} = build_dot_graph(name, children, seen, callback)
{siblings, seen} = build_dot_graph(parent, nodes, seen, callback)
{current <> children <> siblings, seen}
end
end
defp build_dot_current(parent, name, edge_info) do
edge_info =
if edge_info do
~s( [label="#{edge_info}"])
end
parent =
unless is_reference(parent) do
~s("#{parent}" -> )
end
~s( #{parent}"#{name}"#{edge_info}\n)
end
@doc false
def underscore(value) do
IO.warn("Mix.Utils.underscore/1 is deprecated, use Macro.underscore/1 instead")
Macro.underscore(value)
end
@doc false
def camelize(value) do
IO.warn("Mix.Utils.camelize/1 is deprecated, use Macro.camelize/1 instead")
Macro.camelize(value)
end
@doc """
Takes a module and converts it to a command.
The nesting argument can be given in order to remove
the nesting of a module.
## Examples
iex> Mix.Utils.module_name_to_command(Mix.Tasks.Compile, 2)
"compile"
iex> Mix.Utils.module_name_to_command("Mix.Tasks.Compile.Elixir", 2)
"compile.elixir"
"""
def module_name_to_command(module, nesting \\ 0)
def module_name_to_command(module, nesting) when is_atom(module) do
module_name_to_command(inspect(module), nesting)
end
def module_name_to_command(module, nesting) do
module
|> to_string()
|> String.split(".")
|> Enum.drop(nesting)
|> Enum.map_join(".", &Macro.underscore/1)
end
@doc """
Takes a command and converts it to the module name format.
## Examples
iex> Mix.Utils.command_to_module_name("compile.elixir")
"Compile.Elixir"
"""
def command_to_module_name(command) do
command
|> to_string()
|> String.split(".")
|> Enum.map_join(".", &Macro.camelize/1)
end
@doc """
Symlinks directory `source` to `target` or copies it recursively
in case symlink fails.
Expects source and target to be absolute paths as it generates
a relative symlink.
"""
def symlink_or_copy(source, target) do
if File.exists?(source) do
# Relative symbolic links on Windows are broken
link =
case :os.type() do
{:win32, _} -> source
_ -> make_relative_path(source, target)
end
|> String.to_charlist()
case :file.read_link(target) do
{:ok, ^link} ->
:ok
{:ok, _} ->
File.rm!(target)
do_symlink_or_copy(source, target, link)
{:error, :enoent} ->
do_symlink_or_copy(source, target, link)
{:error, _} ->
unless File.dir?(target) do
File.rm_rf!(target)
end
do_symlink_or_copy(source, target, link)
end
else
{:error, :enoent}
end
end
defp do_symlink_or_copy(source, target, link) do
case :file.make_symlink(link, target) do
:ok ->
:ok
{:error, _} ->
file =
File.cp_r!(source, target, fn orig, dest ->
File.stat!(orig).mtime > File.stat!(dest).mtime
end)
{:ok, file}
end
end
# Make a relative path between the two given paths.
# Expects both paths to be fully expanded.
defp make_relative_path(source, target) do
do_make_relative_path(Path.split(source), Path.split(target))
end
defp do_make_relative_path([h | t1], [h | t2]) do
do_make_relative_path(t1, t2)
end
defp do_make_relative_path(source, target) do
base = List.duplicate("..", max(length(target) - 1, 0))
Path.join(base ++ source)
end
@doc """
Opens and reads content from either a URL or a local filesystem path.
Returns the contents as a `{:ok, binary}`, `:badpath` for invalid
paths or `{:local, message}` for local errors and `{:remote, message}`
for remote ones.
## Options
* `:sha512` - checks against the given SHA-512 checksum. Returns
`{:checksum, message}` in case it fails
"""
@spec read_path(String.t(), keyword) ::
{:ok, binary}
| :badpath
| {:remote, String.t()}
| {:local, String.t()}
| {:checksum, String.t()}
def read_path(path, opts \\ []) do
cond do
url?(path) ->
read_httpc(path) |> checksum(opts)
file?(path) ->
read_file(path) |> checksum(opts)
true ->
:badpath
end
end
@checksums [:sha512]
defp checksum({:ok, binary} = return, opts) do
Enum.find_value(@checksums, return, fn hash ->
with expected when expected != nil <- opts[hash],
actual when actual != expected <- hexhash(binary, hash) do
message = """
Data does not match the given SHA-512 checksum.
Expected: #{expected}
Actual: #{actual}
"""
{:checksum, message}
else
_ -> nil
end
end)
end
defp checksum({_, _} = error, _opts) do
error
end
defp hexhash(binary, hash) do
Base.encode16(:crypto.hash(hash, binary), case: :lower)
end
@doc """
Prompts the user to overwrite the file if it exists. Returns
the user input.
"""
def can_write?(path) do
if File.exists?(path) do
full = Path.expand(path)
Mix.shell().yes?(Path.relative_to_cwd(full) <> " already exists, overwrite?")
else
true
end
end
defp read_file(path) do
try do
{:ok, File.read!(path)}
rescue
e in [File.Error] -> {:local, Exception.message(e)}
end
end
defp read_httpc(path) do
{:ok, _} = Application.ensure_all_started(:ssl)
{:ok, _} = Application.ensure_all_started(:inets)
# Starting an HTTP client profile allows us to scope
# the effects of using an HTTP proxy to this function
{:ok, _pid} = :inets.start(:httpc, [{:profile, :mix}])
headers = [{'user-agent', 'Mix/#{System.version()}'}]
request = {:binary.bin_to_list(path), headers}
# We are using relaxed: true because some servers is returning a Location
# header with relative paths, which does not follow the spec. This would
# cause the request to fail with {:error, :no_scheme} unless :relaxed
# is given.
#
# If a proxy environment variable was supplied add a proxy to httpc.
http_options = [relaxed: true] ++ proxy_config(path)
case :httpc.request(:get, request, http_options, [body_format: :binary], :mix) do
{:ok, {{_, status, _}, _, body}} when status in 200..299 ->
{:ok, body}
{:ok, {{_, status, _}, _, _}} ->
{:remote, "httpc request failed with: {:bad_status_code, #{status}}"}
{:error, reason} ->
{:remote, "httpc request failed with: #{inspect(reason)}"}
end
after
:inets.stop(:httpc, :mix)
end
defp file?(path) do
File.regular?(path)
end
defp url?(path) do
URI.parse(path).scheme in ["http", "https"]
end
def proxy_config(url) do
{http_proxy, https_proxy} = proxy_env()
proxy_auth(URI.parse(url), http_proxy, https_proxy)
end
defp proxy_env do
http_proxy = System.get_env("HTTP_PROXY") || System.get_env("http_proxy")
https_proxy = System.get_env("HTTPS_PROXY") || System.get_env("https_proxy")
no_proxy = no_proxy_env() |> no_proxy_list()
{proxy_setup(:http, http_proxy, no_proxy), proxy_setup(:https, https_proxy, no_proxy)}
end
defp no_proxy_env() do
System.get_env("NO_PROXY") || System.get_env("no_proxy")
end
defp no_proxy_list(nil) do
[]
end
defp no_proxy_list(no_proxy) do
no_proxy
|> String.split(",")
|> Enum.map(&String.to_charlist/1)
end
defp proxy_setup(scheme, proxy, no_proxy) do
uri = URI.parse(proxy || "")
if uri.host && uri.port do
host = String.to_charlist(uri.host)
:httpc.set_options([{proxy_scheme(scheme), {{host, uri.port}, no_proxy}}], :mix)
end
uri
end
defp proxy_scheme(scheme) do
case scheme do
:http -> :proxy
:https -> :https_proxy
end
end
defp proxy_auth(%URI{scheme: "http"}, http_proxy, _https_proxy), do: proxy_auth(http_proxy)
defp proxy_auth(%URI{scheme: "https"}, _http_proxy, https_proxy), do: proxy_auth(https_proxy)
defp proxy_auth(nil), do: []
defp proxy_auth(%URI{userinfo: nil}), do: []
defp proxy_auth(%URI{userinfo: auth}) do
destructure [user, pass], String.split(auth, ":", parts: 2)
user = String.to_charlist(user)
pass = String.to_charlist(pass || "")
[proxy_auth: {user, pass}]
end
end
| 26.15109 | 106 | 0.614033 |
086fad5478100ce2a049407a00e892c0c299aa4a | 1,306 | ex | Elixir | apps/dockup_ui/lib/dockup_ui/application.ex | rudydydy/dockup | 0d05d1ef65cc5523800bd852178361521cd3e7d8 | [
"MIT"
] | null | null | null | apps/dockup_ui/lib/dockup_ui/application.ex | rudydydy/dockup | 0d05d1ef65cc5523800bd852178361521cd3e7d8 | [
"MIT"
] | null | null | null | apps/dockup_ui/lib/dockup_ui/application.ex | rudydydy/dockup | 0d05d1ef65cc5523800bd852178361521cd3e7d8 | [
"MIT"
] | null | null | null | defmodule DockupUi.Application do
use Application
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec
# Overrides configurations from ENV variables
DockupUi.Config.set_configs_from_env()
backend = Application.fetch_env!(:dockup_ui, :backend_module)
backend.initialize()
# Set id and secret for google oauth config here
Application.put_env :ueberauth,
Ueberauth.Strategy.Google.OAuth,
[client_secret: Application.get_env(:dockup_ui, :google_client_secret),
client_id: Application.get_env(:dockup_ui, :google_client_id)]
children = [
# Start the endpoint when the application starts
supervisor(DockupUi.Endpoint, []),
# Start the Ecto repository
supervisor(DockupUi.Repo, []),
worker(DockupUi.Scheduler, []),
worker(DockupUi.DeploymentQueue, []),
worker(DockupUi.RetryWorker, [])
]
opts = [strategy: :one_for_one, name: DockupUi.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
DockupUi.Endpoint.config_change(changed, removed)
:ok
end
end
| 31.853659 | 77 | 0.718224 |
086fdda444ba7dcad907719155c2afcd40bc1a1b | 8,051 | ex | Elixir | lib/mix/lib/mix/deps.ex | Nicd/elixir | e62ef92a4be1b562033d35b2d822cc9d6c661077 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/deps.ex | Nicd/elixir | e62ef92a4be1b562033d35b2d822cc9d6c661077 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/deps.ex | Nicd/elixir | e62ef92a4be1b562033d35b2d822cc9d6c661077 | [
"Apache-2.0"
] | null | null | null | defrecord Mix.Dep, [ scm: nil, app: nil, requirement: nil, status: nil, opts: nil,
deps: [], source: nil, manager: nil ] do
@moduledoc """
This is a record that keeps information about your project
dependencies. It keeps:
* scm - a module representing the source code management tool (SCM) operations;
* app - the app name as an atom;
* requirements - a binary or regexp with the deps requirement;
* status - the current status of dependency, check `Mix.Deps.format_status/1` for more info;
* opts - the options given by the developer
* source - any possible configuration associated with the manager field,
rebar.config for rebar or the Mix.Project for Mix
* manager - the project management, possible values: :rebar | :mix | :make | nil
"""
end
defmodule Mix.Deps do
@moduledoc """
A module with common functions to work with dependencies.
"""
@doc """
Returns all dependencies recursively as `Mix.Dep` record.
## Exceptions
This function raises an exception in case the developer
provides a dependency in the wrong format.
"""
def all do
{ deps, _ } = Mix.Deps.Converger.all(nil, fn(dep, acc) -> { dep, acc } end)
deps
end
@doc """
Returns all dependencies but with a custom callback and
accumulator.
"""
def all(acc, callback) do
{ _deps, acc } = Mix.Deps.Converger.all(acc, callback)
acc
end
@doc """
Returns all direct child dependencies.
"""
defdelegate children(), to: Mix.Deps.Retriever
@doc """
Returns all dependencies depending on given dependencies.
"""
def depending(deps, all_deps // all)
def depending([], _all_deps) do
[]
end
def depending(deps, all_deps) do
dep_names = Enum.map(deps, fn dep -> dep.app end)
parents = Enum.filter all_deps, fn dep ->
Enum.any?(dep.deps, fn child_dep -> child_dep.app in dep_names end)
end
parents ++ depending(parents, all_deps)
end
@doc """
Receives a list of deps names and returns deps records.
Logs a message if the dependency could not be found.
"""
def by_name(given, all_deps // all) do
# Ensure all apps are atoms
apps = Enum.map given, fn(app) ->
if is_binary(app), do: binary_to_atom(app), else: app
end
# We need to keep the order of all, which properly orders deps
deps = Enum.filter all_deps, fn(dep) -> dep.app in apps end
# Now we validate the given atoms
index = Mix.Dep.__index__(:app)
Enum.each apps, fn(app) ->
unless List.keyfind(deps, app, index) do
Mix.shell.info "unknown dependency #{app} for env #{Mix.env}"
end
end
deps
end
@doc """
Runs the given `fun` inside the given dependency project by
changing the current working directory and loading the given
project into the project stack.
"""
def in_dependency(dep, post_config // [], fun)
def in_dependency(Mix.Dep[manager: :rebar, opts: opts], post_config, fun) do
# Use post_config for rebar deps
Mix.Project.post_config(post_config)
Mix.Project.push(Mix.Rebar)
try do
File.cd!(opts[:dest], fn -> fun.(nil) end)
after
Mix.Project.pop
end
end
def in_dependency(Mix.Dep[app: app, opts: opts], post_config, fun) do
env = opts[:env] || :prod
old_env = Mix.env
try do
Mix.env(env)
Mix.Project.in_project(app, opts[:dest], post_config, fun)
after
Mix.env(old_env)
end
end
@doc """
Formats the status of a dependency.
"""
def format_status(Mix.Dep[status: { :ok, _vsn }]),
do: "ok"
def format_status(Mix.Dep[status: { :noappfile, path }]),
do: "could not find app file at #{Mix.Utils.relative_to_cwd(path)}"
def format_status(Mix.Dep[status: { :invalidapp, path }]),
do: "the app file at #{Mix.Utils.relative_to_cwd(path)} is invalid"
def format_status(Mix.Dep[status: { :invalidvsn, vsn }]),
do: "the dependency does not match the specified version, got #{vsn}"
def format_status(Mix.Dep[status: { :lockmismatch, _ }]),
do: "lock mismatch: the dependency is out of date"
def format_status(Mix.Dep[status: :nolock]),
do: "the dependency is not locked"
def format_status(Mix.Dep[status: { :diverged, other }, opts: opts]),
do: "different specs were given for this dependency, choose one in your deps:\n" <>
"$ #{inspect_kw opts}\n$ #{inspect_kw other.opts}\n"
def format_status(Mix.Dep[status: { :unavailable, _ }]),
do: "the dependency is not available, run `mix deps.get`"
defp inspect_kw(list) do
middle = lc { key, value } inlist Enum.sort(list), do: "#{key}: #{inspect value, raw: true}"
"[ " <> Enum.join(middle, ",\n ") <> " ]"
end
@doc """
Checks the lock for the given dependency and update its status accordingly.
"""
def check_lock(Mix.Dep[scm: scm, app: app, opts: opts] = dep, lock) do
if available?(dep) do
rev = lock[app]
opts = Keyword.put(opts, :lock, rev)
if scm.matches_lock?(opts) do
dep
else
status = if rev, do: { :lockmismatch, rev }, else: :nolock
dep.status(status)
end
else
dep
end
end
@doc """
Updates the dependency inside the given project.
"""
defdelegate update(dep), to: Mix.Deps.Retriever
@doc """
Check if a dependency is ok.
"""
def ok?(Mix.Dep[status: { :ok, _ }]), do: true
def ok?(_), do: false
@doc """
Check if a dependency is available.
"""
def available?(Mix.Dep[status: { :diverged, _ }]), do: false
def available?(Mix.Dep[status: { :unavailable, _ }]), do: false
def available?(_), do: true
@doc """
Check if a dependency is part of an umbrella project as a top level project.
"""
def in_umbrella?(Mix.Dep[opts: opts], apps_path) do
apps_path == Path.expand(Path.join(opts[:dest], ".."))
end
@doc """
Check if a dependency is out of date or not, considering its
lock status. Therefore, be sure to call `check_lock` before
invoking this function.
"""
def out_of_date?(Mix.Dep[status: { :lockmismatch, _ }]), do: true
def out_of_date?(Mix.Dep[status: :nolock]), do: true
def out_of_date?(dep), do: not available?(dep)
@doc """
Format the dependency for printing.
"""
def format_dep(Mix.Dep[scm: scm, app: app, status: status, opts: opts]) do
version =
case status do
{ :ok, vsn } when vsn != nil -> "(#{vsn}) "
_ -> ""
end
"#{app} #{version}#{inspect scm.format(opts)}"
end
@doc """
Returns all compile paths for the dependency.
"""
def compile_paths(Mix.Dep[app: app, opts: opts, manager: manager]) do
if manager == :mix do
Mix.Project.in_project app, opts[:dest], fn _ ->
Mix.Project.compile_paths
end
else
[ Path.join(opts[:dest], "ebin") ]
end
end
@doc """
Returns all load paths for the dependency.
"""
def load_paths(Mix.Dep[manager: :mix, app: app, opts: opts]) do
paths = Mix.Project.in_project app, opts[:dest], fn _ ->
Mix.Project.load_paths
end
Enum.uniq paths
end
def load_paths(Mix.Dep[manager: :rebar, opts: opts, source: source]) do
# Add root dir and all sub dirs with ebin/ directory
sub_dirs = Enum.map(source[:sub_dirs] || [], fn path ->
Path.join(opts[:dest], path)
end)
[ opts[:dest] | sub_dirs ]
|> Enum.map(Path.wildcard(&1))
|> List.concat
|> Enum.map(Path.join(&1, "ebin"))
|> Enum.filter(File.dir?(&1))
end
def load_paths(Mix.Dep[manager: manager, opts: opts]) when manager in [:make, nil] do
[ Path.join(opts[:dest], "ebin") ]
end
@doc """
Returns true if dependency is a mix project.
"""
def mix?(Mix.Dep[manager: manager]) do
manager == :mix
end
@doc """
Returns true if dependency is a rebar project.
"""
def rebar?(Mix.Dep[manager: manager]) do
manager == :rebar
end
@doc """
Returns true if dependency is a make project.
"""
def make?(Mix.Dep[manager: manager]) do
manager == :make
end
end
| 28.448763 | 96 | 0.632965 |
087005129ed8ebdf8c9e4693b5a059ffb0e3b14c | 4,465 | ex | Elixir | lib/teiserver/account/caches/user_cache.ex | badosu/teiserver | 19b623aeb7c2ab28756405f7486e92b714777c54 | [
"MIT"
] | 4 | 2021-07-29T16:23:20.000Z | 2022-02-23T05:34:36.000Z | lib/teiserver/account/caches/user_cache.ex | badosu/teiserver | 19b623aeb7c2ab28756405f7486e92b714777c54 | [
"MIT"
] | 14 | 2021-08-01T02:36:14.000Z | 2022-01-30T21:15:03.000Z | lib/teiserver/account/caches/user_cache.ex | badosu/teiserver | 19b623aeb7c2ab28756405f7486e92b714777c54 | [
"MIT"
] | 7 | 2021-05-13T12:55:28.000Z | 2022-01-14T06:39:06.000Z | defmodule Teiserver.Account.UserCache do
import Central.Helpers.NumberHelper, only: [int_parse: 1]
alias Teiserver.{Account, User}
alias Teiserver.Data.Types, as: T
alias Central.Account.Guardian
alias Teiserver.Client
require Logger
@spec get_username(T.userid()) :: String.t()
def get_username(userid) do
ConCache.get(:users_lookup_name_with_id, int_parse(userid))
end
@spec get_userid(String.t()) :: integer() | nil
def get_userid(username) do
ConCache.get(:users_lookup_id_with_name, cachename(username))
end
@spec get_user_by_name(String.t()) :: User.t() | nil
def get_user_by_name(username) do
id = get_userid(username)
ConCache.get(:users, id)
end
@spec get_user_by_email(String.t()) :: User.t() | nil
def get_user_by_email(email) do
id = ConCache.get(:users_lookup_id_with_email, cachename(email))
ConCache.get(:users, id)
end
@spec get_user_by_token(String.t()) :: User.t() | nil
def get_user_by_token(token) do
case Guardian.resource_from_token(token) do
{:error, _bad_token} ->
nil
{:ok, db_user, _claims} ->
get_user_by_id(db_user.id)
end
end
@spec get_user_by_id(T.userid()) :: User.t() | nil
def get_user_by_id(id) do
ConCache.get(:users, int_parse(id))
end
@spec list_users(list) :: list
def list_users(id_list) do
id_list
|> Enum.map(fn userid ->
ConCache.get(:users, userid)
end)
|> Enum.filter(fn user -> user != nil end)
end
@spec recache_user(Integer.t()) :: :ok
def recache_user(id) do
if get_user_by_id(id) do
Account.get_user!(id)
|> convert_user
|> update_user
else
Account.get_user!(id)
|> convert_user
|> add_user
end
:ok
end
@spec pre_cache_users(:active | :remaining) :: :ok
def pre_cache_users(:active) do
user_count =
Account.list_users(limit: :infinity)
|> Parallel.map(fn user ->
user
|> convert_user
|> add_user
end)
|> Enum.count()
Logger.info("pre_cache_users:active, got #{user_count} users")
end
# def pre_cache_users(:remaining) do
# user_count =
# Account.list_users(limit: :infinity)
# |> Parallel.map(fn user ->
# user
# |> convert_user
# |> add_user
# end)
# |> Enum.count()
# Logger.info("pre_cache_users:remaining, got #{user_count} users")
# end
@spec convert_user(User.t()) :: User.t()
def convert_user(user) do
data =
User.data_keys
|> Map.new(fn k -> {k, Map.get(user.data || %{}, to_string(k), User.default_data()[k])} end)
|> Map.put(:spring_password, (user.data["password_hash"] == user.password))
user
|> Map.take(User.keys())
|> Map.merge(User.default_data())
|> Map.merge(data)
end
@spec add_user(User.t()) :: User.t()
def add_user(user) do
update_user(user)
ConCache.put(:users_lookup_name_with_id, user.id, user.name)
ConCache.put(:users_lookup_id_with_name, cachename(user.name), user.id)
ConCache.put(:users_lookup_id_with_email, cachename(user.email), user.id)
user
end
# Persists the changes into the database so they will
# be pulled out next time the user is accessed/recached
# The special case here is to prevent the benchmark and test users causing issues
@spec persist_user(User.t()) :: User.t() | nil
defp persist_user(%{name: "TEST_" <> _}), do: nil
defp persist_user(user) do
db_user = Account.get_user!(user.id)
data =
User.data_keys
|> Map.new(fn k -> {to_string(k), Map.get(user, k, User.default_data()[k])} end)
Account.update_user(db_user, %{"data" => data})
end
@spec update_user(User.t(), boolean) :: User.t()
def update_user(user, persist \\ false) do
ConCache.put(:users, user.id, user)
if persist, do: persist_user(user)
user
end
@spec delete_user(T.userid()) :: :ok | :no_user
def delete_user(userid) do
user = get_user_by_id(userid)
if user do
Client.disconnect(userid, "User cache deletion")
:timer.sleep(100)
ConCache.delete(:users, userid)
ConCache.delete(:users_lookup_name_with_id, user.id)
ConCache.delete(:users_lookup_id_with_name, cachename(user.name))
ConCache.delete(:users_lookup_id_with_email, cachename(user.email))
:ok
else
:no_user
end
end
defp cachename(str) do
str
|> String.trim
|> String.downcase
end
end
| 26.577381 | 98 | 0.649496 |
08700b4cb92969c043c50d25a17e42fb36d4d449 | 1,042 | ex | Elixir | lib/mlpp.ex | ayarodionov/xtlsmlpp | bf9d3cf7b3097ed8cc01035cd9aec43379823f99 | [
"MIT"
] | null | null | null | lib/mlpp.ex | ayarodionov/xtlsmlpp | bf9d3cf7b3097ed8cc01035cd9aec43379823f99 | [
"MIT"
] | null | null | null | lib/mlpp.ex | ayarodionov/xtlsmlpp | bf9d3cf7b3097ed8cc01035cd9aec43379823f99 | [
"MIT"
] | null | null | null | # -----------------------------------------------------------------------------------------------
defmodule MLPP do
@moduledoc """
MLLP coder and decoder.H
See <a href="https://www.hl7.org/documentcenter/public/wg/inm/mllp_transport_specification.PDF">mlpp specification</a>
"""
@sb 0x0B
@eb 0x1C
@cr 0x0D
# -----------------------------------------------------------------------------------------------
@doc """
Encodes to MLLP format
"""
@spec encode(binary()) :: binary()
def encode(msg) do
<<@sb::8, msg::binary, @eb::8, @cr::8>>
end
# -----------------------------------------------------------------------------------------------
@doc """
Decodes from MLLP format
"""
@spec decode(binary()) :: binary()
def decode(<<@sb::8, m::binary>>) do
l = byte_size(m) - 2
<<msg::binary-size(l), @eb::8, @cr::8>> = m
msg
end
end
# -----------------------------------------------------------------------------------------------
# msg = "qwerty"
# msg == MLPP.decode(MLPP.encode(msg))
| 28.944444 | 121 | 0.365643 |
087025bb5a40f2b3da3fb9b75b3ad9fe9192af70 | 353 | exs | Elixir | apps/astarte_realm_management/config/prod.exs | Spidey20202022/astarte | 3950855c592b34363af0cf7f8a921762ce64e512 | [
"Apache-2.0"
] | 1 | 2020-02-04T13:15:22.000Z | 2020-02-04T13:15:22.000Z | apps/astarte_realm_management/config/prod.exs | Spidey20202022/astarte | 3950855c592b34363af0cf7f8a921762ce64e512 | [
"Apache-2.0"
] | 1 | 2020-01-20T09:52:48.000Z | 2020-01-20T09:52:48.000Z | apps/astarte_realm_management/config/prod.exs | Spidey20202022/astarte | 3950855c592b34363af0cf7f8a921762ce64e512 | [
"Apache-2.0"
] | 1 | 2020-02-04T13:15:50.000Z | 2020-02-04T13:15:50.000Z | use Mix.Config
config :cqerl,
cassandra_nodes: [{System.get_env("CASSANDRA_DB_HOST"), System.get_env("CASSANDRA_DB_PORT")}]
config :logger,
compile_time_purge_matching: [
[level_lower_than: :info]
]
config :logger, :console,
format: {PrettyLog.LogfmtFormatter, :format},
metadata: [
:realm,
:module,
:function,
:tag
]
| 18.578947 | 95 | 0.685552 |
087057485a357515c41ce7a08266d9281d39e259 | 178 | ex | Elixir | lib/hexpm_web/views/api/audit_log_view.ex | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 691 | 2017-03-08T09:15:45.000Z | 2022-03-23T22:04:47.000Z | lib/hexpm_web/views/api/audit_log_view.ex | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 491 | 2017-03-07T12:58:42.000Z | 2022-03-29T23:32:54.000Z | lib/hexpm_web/views/api/audit_log_view.ex | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 200 | 2017-03-12T23:03:39.000Z | 2022-03-05T17:55:52.000Z | defmodule HexpmWeb.API.AuditLogView do
use HexpmWeb, :view
def render("show", %{audit_log: audit_log}) do
Map.take(audit_log, [:action, :user_agent, :params])
end
end
| 22.25 | 56 | 0.707865 |
0870601398c29e2ce922ba3280862ea77c1c6da1 | 2,289 | exs | Elixir | test/mix/tasks/ecto.migrations_test.exs | dgvncsz0f/ecto | bae06fe650328cc1060c09fe889a2de9a10edb1b | [
"Apache-2.0"
] | null | null | null | test/mix/tasks/ecto.migrations_test.exs | dgvncsz0f/ecto | bae06fe650328cc1060c09fe889a2de9a10edb1b | [
"Apache-2.0"
] | null | null | null | test/mix/tasks/ecto.migrations_test.exs | dgvncsz0f/ecto | bae06fe650328cc1060c09fe889a2de9a10edb1b | [
"Apache-2.0"
] | 1 | 2018-09-21T16:05:29.000Z | 2018-09-21T16:05:29.000Z | defmodule Mix.Tasks.Ecto.MigrationsTest do
use ExUnit.Case
import Mix.Tasks.Ecto.Migrations, only: [run: 3]
import Support.FileHelpers
migrations_path = Path.join([tmp_path(), inspect(Ecto.Migrations), "migrations"])
setup do
File.mkdir_p!(unquote(migrations_path))
:ok
end
defmodule Repo do
def start_link(_) do
Process.put(:started, true)
Task.start_link fn ->
Process.flag(:trap_exit, true)
receive do
{:EXIT, _, :normal} -> :ok
end
end
end
def stop(_pid) do
:ok
end
def __adapter__ do
Ecto.TestAdapter
end
def config do
[priv: "tmp/#{inspect(Ecto.Migrations)}", otp_app: :ecto]
end
end
test "migrations displays the up and down status for the default repo" do
Application.put_env(:ecto, :ecto_repos, [Repo])
migrations = fn _ ->
[
{:up, 20160000000001, "up_migration_1"},
{:up, 20160000000002, "up_migration_2"},
{:up, 20160000000003, "up_migration_3"},
{:down, 20160000000004, "down_migration_1"},
{:down, 20160000000005, "down_migration_2"}
]
end
expected_output = """
Repo: Mix.Tasks.Ecto.MigrationsTest.Repo
Status Migration ID Migration Name
--------------------------------------------------
up 20160000000001 up_migration_1
up 20160000000002 up_migration_2
up 20160000000003 up_migration_3
down 20160000000004 down_migration_1
down 20160000000005 down_migration_2
"""
run [], migrations, fn i -> assert(i == expected_output) end
end
test "migrations displays the up and down status for any given repo" do
migrations = fn _ ->
[
{:up, 20160000000001, "up_migration_1"},
{:down, 20160000000002, "down_migration_1"}
]
end
expected_output = """
Repo: Mix.Tasks.Ecto.MigrationsTest.Repo
Status Migration ID Migration Name
--------------------------------------------------
up 20160000000001 up_migration_1
down 20160000000002 down_migration_1
"""
run ["-r", to_string(Repo)], migrations, fn i -> assert(i == expected_output) end
end
end
| 26.011364 | 85 | 0.584972 |
08706d3db21290d2ff994432ae403dd01cead8c8 | 1,006 | ex | Elixir | lib/mix/tasks/create_secrets.ex | fletchermoore/cip | 5dd1c6f105d6a4c8c34a9e824cbad2fcf78a0655 | [
"Unlicense"
] | null | null | null | lib/mix/tasks/create_secrets.ex | fletchermoore/cip | 5dd1c6f105d6a4c8c34a9e824cbad2fcf78a0655 | [
"Unlicense"
] | null | null | null | lib/mix/tasks/create_secrets.ex | fletchermoore/cip | 5dd1c6f105d6a4c8c34a9e824cbad2fcf78a0655 | [
"Unlicense"
] | null | null | null | defmodule Mix.Tasks.CreateSecrets do
use Mix.Task
def run(_) do
# Mix.Task.run "app.start", []
IO.puts "Generating secrets file."
guardian_secret = gen_secret()
phx_secret = gen_secret()
IO.puts "Guardian: #{guardian_secret}"
IO.puts "Phx: #{phx_secret}"
lines = [
"use Mix.Config",
"",
"config :cip, CipWeb.Endpoint,",
" secret_key_base: \"#{phx_secret}\"",
"",
"config :cip, Cip.Guardian,",
" secret_key: \"#{guardian_secret}\""
]
filename = "./config/cip.secret.exs"
case File.open filename, [:write] do
{:ok, file} ->
IO.binwrite file, Enum.join(lines, "\n")
File.close file
{:error, msg } ->
IO.puts "Error trying to write file #{filename}"
IO.puts msg
end
end
# stolen from phx.gen.secret & guardian.gen.secret which I suppose copied each other
defp gen_secret do
64 |> :crypto.strong_rand_bytes() |> Base.encode64() |> binary_part(0, 64)
end
end
| 25.15 | 86 | 0.594433 |
087071918bf9a77ebf673cf1a4ca5aaaf559a5ed | 133 | ex | Elixir | lib/w/role.ex | ponyatov/w | 3211fd253018f07da382871cf2d08b80574b72e6 | [
"MIT"
] | null | null | null | lib/w/role.ex | ponyatov/w | 3211fd253018f07da382871cf2d08b80574b72e6 | [
"MIT"
] | null | null | null | lib/w/role.ex | ponyatov/w | 3211fd253018f07da382871cf2d08b80574b72e6 | [
"MIT"
] | null | null | null | defmodule W.Role do
use Ecto.Schema
schema "role" do
field :name, :string
field :skr, :integer
end
# %W.Role{}
end
| 12.090909 | 24 | 0.616541 |
08707c3466cfda5fe0b8f54e209d85b3fa5db40e | 2,938 | ex | Elixir | lib/changeset.ex | Brate/Brcpfcnpj | be2c14e85d0a13dcd837311e18192b02a56841d4 | [
"MIT"
] | null | null | null | lib/changeset.ex | Brate/Brcpfcnpj | be2c14e85d0a13dcd837311e18192b02a56841d4 | [
"MIT"
] | null | null | null | lib/changeset.ex | Brate/Brcpfcnpj | be2c14e85d0a13dcd837311e18192b02a56841d4 | [
"MIT"
] | null | null | null | if Code.ensure_compiled(Ecto) do
defmodule Brcpfcnpj.Changeset do
@moduledoc """
Define funções para serem utilizadas em conjunto com a API de changeset do Ecto.
"""
@type changeset :: Ecto.Changeset.t()
@type error :: {atom, error_message}
@type error_message :: String.t() | {String.t(), Keyword.t()}
@doc """
Valida se essa mudança é um cnpj válido. Aceita um ou mais fields
## Options
* `:message` - A mensagem em caso de erro, o default é "Invalid Cnpj"
## Examples
validate_cnpj(changeset, :cnpj)
validate_cnpj(changeset, [:cnpj, :other_cnpj])
"""
@spec validate_cnpj(changeset(), atom() | list(), Keyword.t()) :: changeset()
def validate_cnpj(changeset, field), do: validate_cnpj(changeset, field, [])
def validate_cnpj(changeset, field, opts) when is_atom(field) do
validate(changeset, field, fn value ->
if Brcpfcnpj.cnpj_valid?(value) do
[]
else
[{field, message(opts, {"Invalid Cnpj", validation: :cnpj})}]
end
end)
end
def validate_cnpj(changeset, fields, opts) when is_list(fields) do
Enum.reduce(fields, changeset, fn field, acc_changeset ->
validate_cnpj(acc_changeset, field, opts)
end)
end
@doc """
Valida se essa mudança é um cpf válido. Aceita um ou mais fields
## Options
* `:message` - A mensagem em caso de erro, o default é "Invalid Cpf"
## Examples
validate_cpf(changeset, :cpf)
validate_cpf(changeset, [:cpf, :cnpj])
"""
@spec validate_cpf(changeset(), atom() | list(), Keyword.t()) :: changeset()
def validate_cpf(changeset, field), do: validate_cpf(changeset, field, [])
def validate_cpf(changeset, field, opts) when is_atom(field) do
validate(changeset, field, fn value ->
if Brcpfcnpj.cpf_valid?(value) do
[]
else
[{field, message(opts, {"Invalid Cpf", validation: :cpf})}]
end
end)
end
def validate_cpf(changeset, fields, opts) when is_list(fields) do
Enum.reduce(fields, changeset, fn field, acc_changeset ->
validate_cpf(acc_changeset, field, opts)
end)
end
defp validate(changeset, field, validator) do
%{changes: changes, errors: errors} = changeset
value = Map.get(changes, field)
new = if is_nil(value), do: [], else: validator.(value)
case new do
[] ->
changeset = %{changeset | errors: Keyword.delete(errors, field)}
%{changeset | valid?: Enum.count(changeset.errors) == 0}
[_ | _] -> %{changeset | errors: new ++ errors, valid?: false}
end
end
defp message(opts, default) do
message = Keyword.get(opts, :message, default)
format_message(message)
end
defp format_message(msg = {_, _}), do: msg
defp format_message(msg) when is_binary(msg), do: {msg, []}
end
end
| 29.38 | 84 | 0.618108 |
087089a034d7d2a6014f88506e9821f7f6f318e8 | 505 | ex | Elixir | apps/web/lib/web_web/router.ex | bitwalker/distillery-umbrella-test | 8ef95eba16f769eacf7a636cabc6dcc7b3d9f7e7 | [
"Apache-2.0"
] | 31 | 2016-11-14T20:44:37.000Z | 2021-12-06T09:27:17.000Z | apps/web/lib/web_web/router.ex | bitwalker/distillery-umbrella-test | 8ef95eba16f769eacf7a636cabc6dcc7b3d9f7e7 | [
"Apache-2.0"
] | 8 | 2016-11-01T17:59:34.000Z | 2021-05-07T15:52:26.000Z | apps/web/lib/web_web/router.ex | bitwalker/distillery-umbrella-test | 8ef95eba16f769eacf7a636cabc6dcc7b3d9f7e7 | [
"Apache-2.0"
] | 11 | 2016-11-01T21:24:13.000Z | 2020-01-24T22:34:52.000Z | defmodule WebWeb.Router do
use WebWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", WebWeb do
pipe_through :browser # Use the default browser stack
get "/", PageController, :index
end
# Other scopes may use custom stacks.
# scope "/api", WebWeb do
# pipe_through :api
# end
end
| 18.703704 | 57 | 0.669307 |
08709e892501eca23201e62ace1b04981bedfb7c | 2,630 | ex | Elixir | lib/phoenix/live_dashboard/live/ets_info_component.ex | Blond11516/phoenix_live_dashboard | 9b70670d4ad1dc49ebe2eef45f2f082ff6b03a1b | [
"MIT"
] | 1 | 2020-04-23T11:36:03.000Z | 2020-04-23T11:36:03.000Z | lib/phoenix/live_dashboard/live/ets_info_component.ex | Blond11516/phoenix_live_dashboard | 9b70670d4ad1dc49ebe2eef45f2f082ff6b03a1b | [
"MIT"
] | null | null | null | lib/phoenix/live_dashboard/live/ets_info_component.ex | Blond11516/phoenix_live_dashboard | 9b70670d4ad1dc49ebe2eef45f2f082ff6b03a1b | [
"MIT"
] | 1 | 2021-02-04T03:06:20.000Z | 2021-02-04T03:06:20.000Z | defmodule Phoenix.LiveDashboard.EtsInfoComponent do
use Phoenix.LiveDashboard.Web, :live_component
alias Phoenix.LiveDashboard.SystemInfo
@info_keys [
:id,
:name,
:size,
:node,
:named_table,
:read_concurrency,
:write_concurrency,
:compressed,
:memory,
:owner,
:heir,
:type,
:keypos,
:protection
]
@impl true
def render(assigns) do
~L"""
<div class="tabular-info">
<%= if @alive do %>
<table class="table tabular-table-info-table">
<tbody>
<tr><td class="border-top-0">ID</td><td class="border-top-0"><pre><%= @id %></pre></td></tr>
<tr><td>Name</td><td><pre><%= @name %></pre></td></tr>
<tr><td>Size</td><td><pre><%= @size %></pre></td></tr>
<tr><td>Node</td><td><pre><%= @node %></pre></td></tr>
<tr><td>Named table</td><td><pre><%= @named_table %></pre></td></tr>
<tr><td>Read concurrency</td><td><pre><%= @read_concurrency %></pre></td></tr>
<tr><td>Write concurrency</td><td><pre><%= @write_concurrency %></pre></td></tr>
<tr><td>Compressed</td><td><pre><%= @compressed %></pre></td></tr>
<tr><td>Memory</td><td><pre><%= @memory %></pre></td></tr>
<tr><td>Owner</td><td><pre><%= @owner %></pre></td></tr>
<tr><td>Heir</td><td><pre><%= @heir %></pre></td></tr>
<tr><td>Type</td><td><pre><%= @type %></pre></td></tr>
<tr><td>Keypos</td><td><pre><%= @keypos %></pre></td></tr>
<tr><td>Protection</td><td><pre><%= @protection %></pre></td></tr>
</tbody>
</table>
<% else %>
<div class="tabular-info-not-exists mt-1 mb-3">ETS does not exist.</div>
<% end %>
</div>
"""
end
@impl true
def mount(socket) do
{:ok, Enum.reduce(@info_keys, socket, &assign(&2, &1, nil))}
end
@impl true
def update(%{id: ref} = assigns, socket) do
{:ok,
socket
|> assign(assigns)
|> assign(ref: ref)
|> assign_info()}
end
defp assign_info(%{assigns: assigns} = socket) do
case SystemInfo.fetch_ets_info(socket.assigns.node, assigns.ref) do
{:ok, info} ->
Enum.reduce(info, socket, fn {key, val}, acc ->
assign(acc, key, format_info(key, val, assigns.live_dashboard_path))
end)
|> assign(alive: true)
:error ->
assign(socket, alive: false)
end
end
defp format_info(:memory, val, _live_dashboard_path), do: format_words(val)
defp format_info(_key, val, live_dashboard_path), do: format_value(val, live_dashboard_path)
end
| 31.686747 | 104 | 0.538783 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.