hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
087d93aab0dc54cd833e2d72c1bfdfab9adb29ac | 1,258 | ex | Elixir | lib/vega/board_overview.ex | Fudoshiki/vega | 0577024afc734933048645976705784512fbc1f4 | [
"MIT"
] | 4 | 2020-03-22T22:12:29.000Z | 2020-07-01T22:32:01.000Z | lib/vega/board_overview.ex | Fudoshiki/vega | 0577024afc734933048645976705784512fbc1f4 | [
"MIT"
] | 3 | 2021-03-10T11:53:41.000Z | 2021-10-17T11:18:54.000Z | lib/vega/board_overview.ex | Fudoshiki/vega | 0577024afc734933048645976705784512fbc1f4 | [
"MIT"
] | 3 | 2020-03-30T19:03:23.000Z | 2022-01-17T20:21:42.000Z | defmodule Vega.BoardOverview do
@moduledoc """
The module is used to fetch the title of the boards connected to the user. It only fetches some attributes (title, background color and the id) of each board.
"""
@collection "boards"
alias Vega.User
alias Vega.Board
@doc """
Fetch all boards which are connected to the user:
* personal boards
* visited boards
* starred boards
"""
def fetch_all_for_user(nil) do
{[], [], [], []}
end
def fetch_all_for_user(%User{_id: id}) do
{personal, closed} = :mongo
|> Mongo.find(@collection, %{"members.id" => id}, projection: %{title: 1, options: 1, closed: 1})
|> Enum.to_list()
|> transform()
|> Enum.split_with(fn board -> Board.is_open?(board) end)
{personal, [], [], closed}
end
def fetch_personal_boards(%User{_id: id}, projection \\ %{title: 1}) do
:mongo
|> Mongo.find(@collection, %{"members.id" => id}, projection: projection)
|> Enum.to_list()
|> transform()
end
defp transform(boards) when is_list(boards) do
Enum.map(boards, fn board -> transform(board) end)
end
defp transform(%{"_id" => id} = board) do
Map.put(board, :id, BSON.ObjectId.encode!(id))
end
end
| 27.347826 | 160 | 0.618442 |
087d993fc62c2d456c8002f6491cf2482dcb099c | 900 | ex | Elixir | clients/service_management/lib/google_api/service_management/v1/metadata.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/service_management/lib/google_api/service_management/v1/metadata.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/service_management/lib/google_api/service_management/v1/metadata.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceManagement.V1 do
@moduledoc """
API client metadata for GoogleApi.ServiceManagement.V1.
"""
@discovery_revision "20200605"
def discovery_revision(), do: @discovery_revision
end
| 33.333333 | 74 | 0.763333 |
087dbc24a819b6c2d978eabf7ae8a3041c62db8c | 4,413 | ex | Elixir | clients/testing/lib/google_api/testing/v1/model/android_instrumentation_test.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/testing/lib/google_api/testing/v1/model/android_instrumentation_test.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/testing/lib/google_api/testing/v1/model/android_instrumentation_test.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Testing.V1.Model.AndroidInstrumentationTest do
@moduledoc """
A test of an Android application that can control an Android component independently of its normal lifecycle. Android instrumentation tests run an application APK and test APK inside the same process on a virtual or physical AndroidDevice. They also specify a test runner class, such as com.google.GoogleTestRunner, which can vary on the specific instrumentation framework chosen. See <http://developer.android.com/tools/testing/testing_android.html> for more information on types of Android tests.
## Attributes
- appApk (FileReference): The APK for the application under test. Defaults to: `null`.
- appBundle (AppBundle): A multi-apk app bundle for the application under test. Defaults to: `null`.
- appPackageId (String.t): The java package for the application under test. The default value is determined by examining the application's manifest. Defaults to: `null`.
- orchestratorOption (String.t): The option of whether running each test within its own invocation of instrumentation with Android Test Orchestrator or not. ** Orchestrator is only compatible with AndroidJUnitRunner version 1.0 or higher! ** Orchestrator offers the following benefits: - No shared state - Crashes are isolated - Logs are scoped per test See <https://developer.android.com/training/testing/junit-runner.html#using-android-test-orchestrator> for more information about Android Test Orchestrator. If not set, the test will be run without the orchestrator. Defaults to: `null`.
- Enum - one of [ORCHESTRATOR_OPTION_UNSPECIFIED, USE_ORCHESTRATOR, DO_NOT_USE_ORCHESTRATOR]
- testApk (FileReference): Required. The APK containing the test code to be executed. Defaults to: `null`.
- testPackageId (String.t): The java package for the test to be executed. The default value is determined by examining the application's manifest. Defaults to: `null`.
- testRunnerClass (String.t): The InstrumentationTestRunner class. The default value is determined by examining the application's manifest. Defaults to: `null`.
- testTargets ([String.t]): Each target must be fully qualified with the package name or class name, in one of these formats: - \"package package_name\" - \"class package_name.class_name\" - \"class package_name.class_name#method_name\" If empty, all targets in the module will be run. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:appApk => GoogleApi.Testing.V1.Model.FileReference.t(),
:appBundle => GoogleApi.Testing.V1.Model.AppBundle.t(),
:appPackageId => any(),
:orchestratorOption => any(),
:testApk => GoogleApi.Testing.V1.Model.FileReference.t(),
:testPackageId => any(),
:testRunnerClass => any(),
:testTargets => list(any())
}
field(:appApk, as: GoogleApi.Testing.V1.Model.FileReference)
field(:appBundle, as: GoogleApi.Testing.V1.Model.AppBundle)
field(:appPackageId)
field(:orchestratorOption)
field(:testApk, as: GoogleApi.Testing.V1.Model.FileReference)
field(:testPackageId)
field(:testRunnerClass)
field(:testTargets, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Testing.V1.Model.AndroidInstrumentationTest do
def decode(value, options) do
GoogleApi.Testing.V1.Model.AndroidInstrumentationTest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Testing.V1.Model.AndroidInstrumentationTest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 63.042857 | 603 | 0.755495 |
087dd10d4d2da18d278ba790f2e243101c6c6450 | 3,582 | exs | Elixir | test/vintage_net/name_resolver_test.exs | danielspofford/vintage_net | 2fc6f251069aa0ae283524aed6714991c0137773 | [
"Apache-2.0"
] | null | null | null | test/vintage_net/name_resolver_test.exs | danielspofford/vintage_net | 2fc6f251069aa0ae283524aed6714991c0137773 | [
"Apache-2.0"
] | null | null | null | test/vintage_net/name_resolver_test.exs | danielspofford/vintage_net | 2fc6f251069aa0ae283524aed6714991c0137773 | [
"Apache-2.0"
] | null | null | null | defmodule VintageNet.NameResolverTest do
use VintageNetTest.Case
alias VintageNet.NameResolver
import ExUnit.CaptureLog
@resolvconf_path "fake_resolv.conf"
# See resolv_conf_test.exs for more involved testing of the configuration file
# The purpose of this set of tests is to exercise the GenServer and file writing
# aspects of NameResolver.
setup do
# Run the tests with the application stopped.
capture_log(fn ->
Application.stop(:vintage_net)
end)
on_exit(fn -> Application.start(:vintage_net) end)
:ok
end
test "empty resolvconf is empty", context do
in_tmp(context.test, fn ->
NameResolver.start_link(resolvconf: @resolvconf_path)
assert File.exists?(@resolvconf_path)
assert File.read!(@resolvconf_path) == ""
NameResolver.stop()
end)
end
test "adding one interface", context do
in_tmp(context.test, fn ->
NameResolver.start_link(resolvconf: @resolvconf_path)
NameResolver.setup("eth0", "example.com", ["1.1.1.1", "8.8.8.8"])
contents = File.read!(@resolvconf_path)
assert contents == """
search example.com
nameserver 1.1.1.1
nameserver 8.8.8.8
"""
NameResolver.clear("eth0")
contents = File.read!(@resolvconf_path)
assert contents == ""
NameResolver.stop()
end)
end
test "adding two interfaces", context do
in_tmp(context.test, fn ->
NameResolver.start_link(resolvconf: @resolvconf_path)
NameResolver.setup("eth0", "example.com", ["1.1.1.1", "8.8.8.8"])
NameResolver.setup("wlan0", "example2.com", ["1.1.1.2", "8.8.8.9"])
contents = File.read!(@resolvconf_path)
assert contents == """
search example.com
search example2.com
nameserver 1.1.1.1
nameserver 8.8.8.8
nameserver 1.1.1.2
nameserver 8.8.8.9
"""
NameResolver.clear("eth0")
contents = File.read!(@resolvconf_path)
assert contents == """
search example2.com
nameserver 1.1.1.2
nameserver 8.8.8.9
"""
NameResolver.stop()
end)
end
test "clearing all interfaces", context do
in_tmp(context.test, fn ->
NameResolver.start_link(resolvconf: @resolvconf_path)
NameResolver.setup("eth0", "example.com", ["1.1.1.1", "8.8.8.8"])
NameResolver.setup("wlan0", "example2.com", ["1.1.1.2", "8.8.8.9"])
NameResolver.clear_all()
assert File.read!(@resolvconf_path) == ""
NameResolver.stop()
end)
end
test "tuple IP addresses", context do
in_tmp(context.test, fn ->
NameResolver.start_link(resolvconf: @resolvconf_path)
NameResolver.setup("eth0", "example.com", [{1, 1, 1, 1}])
contents = File.read!(@resolvconf_path)
assert contents == """
search example.com
nameserver 1.1.1.1
"""
NameResolver.clear("eth0")
contents = File.read!(@resolvconf_path)
assert contents == ""
NameResolver.stop()
end)
end
test "no search domain", context do
in_tmp(context.test, fn ->
NameResolver.start_link(resolvconf: @resolvconf_path)
NameResolver.setup("eth0", nil, [{1, 1, 1, 1}])
contents = File.read!(@resolvconf_path)
assert contents == """
nameserver 1.1.1.1
"""
NameResolver.clear("eth0")
contents = File.read!(@resolvconf_path)
assert contents == ""
NameResolver.stop()
end)
end
end
| 27.136364 | 82 | 0.606924 |
087dd2ad0d5ad3069abd87739345e514ca2d31b3 | 3,865 | ex | Elixir | clients/monitoring/lib/google_api/monitoring/v3/model/service_level_objective.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/monitoring/lib/google_api/monitoring/v3/model/service_level_objective.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/monitoring/lib/google_api/monitoring/v3/model/service_level_objective.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Monitoring.V3.Model.ServiceLevelObjective do
@moduledoc """
A Service-Level Objective (SLO) describes a level of desired good service. It consists of a service-level indicator (SLI), a performance goal, and a period over which the objective is to be evaluated against that goal. The SLO can use SLIs defined in a number of different manners. Typical SLOs might include "99% of requests in each rolling week have latency below 200 milliseconds" or "99.5% of requests in each calendar month return successfully."
## Attributes
* `calendarPeriod` (*type:* `String.t`, *default:* `nil`) - A calendar period, semantically "since the start of the current ". At this time, only DAY, WEEK, FORTNIGHT, and MONTH are supported.
* `displayName` (*type:* `String.t`, *default:* `nil`) - Name used for UI elements listing this SLO.
* `goal` (*type:* `float()`, *default:* `nil`) - The fraction of service that must be good in order for this objective to be met. 0 < goal <= 0.999.
* `name` (*type:* `String.t`, *default:* `nil`) - Resource name for this ServiceLevelObjective. The format is: projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID]/serviceLevelObjectives/[SLO_NAME]
* `rollingPeriod` (*type:* `String.t`, *default:* `nil`) - A rolling time period, semantically "in the past ". Must be an integer multiple of 1 day no larger than 30 days.
* `serviceLevelIndicator` (*type:* `GoogleApi.Monitoring.V3.Model.ServiceLevelIndicator.t`, *default:* `nil`) - The definition of good service, used to measure and calculate the quality of the Service's performance with respect to a single aspect of service quality.
* `userLabels` (*type:* `map()`, *default:* `nil`) - Labels which have been used to annotate the service-level objective. Label keys must start with a letter. Label keys and values may contain lowercase letters, numbers, underscores, and dashes. Label keys and values have a maximum length of 63 characters, and must be less than 128 bytes in size. Up to 64 label entries may be stored. For labels which do not have a semantic value, the empty string may be supplied for the label value.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:calendarPeriod => String.t() | nil,
:displayName => String.t() | nil,
:goal => float() | nil,
:name => String.t() | nil,
:rollingPeriod => String.t() | nil,
:serviceLevelIndicator => GoogleApi.Monitoring.V3.Model.ServiceLevelIndicator.t() | nil,
:userLabels => map() | nil
}
field(:calendarPeriod)
field(:displayName)
field(:goal)
field(:name)
field(:rollingPeriod)
field(:serviceLevelIndicator, as: GoogleApi.Monitoring.V3.Model.ServiceLevelIndicator)
field(:userLabels, type: :map)
end
defimpl Poison.Decoder, for: GoogleApi.Monitoring.V3.Model.ServiceLevelObjective do
def decode(value, options) do
GoogleApi.Monitoring.V3.Model.ServiceLevelObjective.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Monitoring.V3.Model.ServiceLevelObjective do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 59.461538 | 491 | 0.727296 |
087e2f39d9725102ac63e79a75b028c530564779 | 398 | exs | Elixir | test/nabo/compilers/markdown_test.exs | bechanh5/nabo | 3e406a5875622a8f702649ced30b39a296039f71 | [
"MIT"
] | null | null | null | test/nabo/compilers/markdown_test.exs | bechanh5/nabo | 3e406a5875622a8f702649ced30b39a296039f71 | [
"MIT"
] | null | null | null | test/nabo/compilers/markdown_test.exs | bechanh5/nabo | 3e406a5875622a8f702649ced30b39a296039f71 | [
"MIT"
] | null | null | null | defmodule Nabo.Compilers.MarkdownTest do
use ExUnit.Case, async: true
test "from_string/1 with meta string" do
string =
~s(
{"title":"Hello","slug":"hello","datetime":"2017-01-01T00:00:00Z"}
---
Welcome to your first Nabo post
---
This is the content of your first Nabo post
)
Nabo.Compilers.Markdown.compile(string)
end
end
| 24.875 | 75 | 0.605528 |
087e4c7570f34728e55520819439af991b88fe28 | 4,948 | exs | Elixir | test/protox/message_test.exs | EasyMile/protox | 6b58b7aaba00b2b5faf1d7e8e2e226b9d2b72a7b | [
"MIT"
] | 18 | 2017-03-02T16:46:10.000Z | 2019-12-07T03:31:16.000Z | test/protox/message_test.exs | EasyMile/protox | 6b58b7aaba00b2b5faf1d7e8e2e226b9d2b72a7b | [
"MIT"
] | 8 | 2017-09-21T21:45:33.000Z | 2020-05-13T18:54:01.000Z | test/protox/message_test.exs | EasyMile/protox | 6b58b7aaba00b2b5faf1d7e8e2e226b9d2b72a7b | [
"MIT"
] | 2 | 2018-02-26T13:13:08.000Z | 2020-05-12T07:01:36.000Z | defmodule Protox.MessageTest do
use ExUnit.Case
test "Protobuf 2, replace only set scalar fields" do
r1 = %Protobuf2{a: 0, s: :ONE}
r2 = %Protobuf2{a: nil, s: :TWO}
r3 = %Protobuf2{a: 1, s: nil}
assert Protox.Message.merge(r1, r2) == %Protobuf2{a: 0, s: :TWO}
assert Protox.Message.merge(r1, r3) == %Protobuf2{a: 1, s: :ONE}
assert Protox.Message.merge(r2, r1) == %Protobuf2{a: 0, s: :ONE}
assert Protox.Message.merge(r3, r1) == %Protobuf2{a: 0, s: :ONE}
end
test "Replace scalar fields" do
r1 = %Required{a: 3, b: 4}
r2 = %Required{a: 5, b: 7}
assert Protox.Message.merge(r1, r2) == %Required{a: 5, b: 7}
assert Protox.Message.merge(r2, r1) == %Required{a: 3, b: 4}
end
test "Concatenate repeated fields" do
m1 = %Sub{g: [], j: [4, 5, 6]}
m2 = %Sub{g: [10, 20], j: [1, 2, 3]}
assert Protox.Message.merge(m1, m2) == %Sub{g: [10, 20], j: [4, 5, 6, 1, 2, 3]}
assert Protox.Message.merge(m2, m1) == %Sub{g: [10, 20], j: [1, 2, 3, 4, 5, 6]}
end
test "Recursively merge messages" do
m1 = %Msg{msg_e: true, msg_f: %Sub{g: [], j: [4, 5, 6]}}
m2 = %Msg{msg_e: false, msg_f: %Sub{g: [10, 20], j: [1, 2, 3]}}
assert Protox.Message.merge(m1, m2) == %Msg{
msg_e: true,
msg_f: %Sub{g: [10, 20], j: [4, 5, 6, 1, 2, 3]}
}
assert Protox.Message.merge(m2, m1) == %Msg{
msg_e: true,
msg_f: %Sub{g: [10, 20], j: [1, 2, 3, 4, 5, 6]}
}
end
test "Overwrite nil messages" do
m1 = %Msg{msg_f: nil}
m2 = %Msg{msg_f: %Sub{g: [10, 20], j: [1, 2, 3]}}
assert Protox.Message.merge(m1, m2) == %Msg{
msg_f: %Sub{g: [10, 20], j: [1, 2, 3]}
}
end
test "Don't overwrite with nil messages" do
m1 = %Msg{msg_f: nil}
m2 = %Msg{msg_f: %Sub{g: [10, 20], j: [1, 2, 3]}}
assert Protox.Message.merge(m2, m1) == %Msg{
msg_f: %Sub{g: [10, 20], j: [1, 2, 3]}
}
end
test "Don't overwrite oneof with nil" do
m1 = %Msg{msg_m: {:msg_o, %Sub{k: 2, j: [4, 5, 6]}}}
m2 = %Msg{msg_m: nil}
assert Protox.Message.merge(m1, m2) == %Msg{msg_m: {:msg_o, %Sub{k: 2, j: [4, 5, 6]}}}
end
test "Overwrite nil oneof" do
m1 = %Msg{msg_m: {:msg_o, %Sub{k: 2, j: [4, 5, 6]}}}
m2 = %Msg{msg_m: nil}
assert Protox.Message.merge(m2, m1) == %Msg{msg_m: {:msg_o, %Sub{k: 2, j: [4, 5, 6]}}}
end
test "Recursively merge messages in oneof" do
m1 = %Msg{msg_m: {:msg_o, %Sub{k: 2, j: [4, 5, 6]}}}
m2 = %Msg{msg_m: {:msg_o, %Sub{k: 3, j: [1, 2, 3]}}}
assert Protox.Message.merge(m1, m2) == %Msg{
msg_m: {:msg_o, %Sub{k: 3, j: [4, 5, 6, 1, 2, 3]}}
}
assert Protox.Message.merge(m2, m1) == %Msg{
msg_m: {:msg_o, %Sub{k: 2, j: [1, 2, 3, 4, 5, 6]}}
}
end
test "Overwrite non-messages oneof" do
m1 = %Msg{msg_m: {:msg_n, :FOO}}
m2 = %Msg{msg_m: {:msg_n, :BAR}}
assert Protox.Message.merge(m1, m2) == %Msg{
msg_m: {:msg_n, :BAR}
}
assert Protox.Message.merge(m2, m1) == %Msg{
msg_m: {:msg_n, :FOO}
}
end
test "Merge scalar maps" do
m1 = %Msg{msg_k: %{1 => "a", 2 => "b", 100 => "c"}}
m2 = %Msg{msg_k: %{1 => "x", 2 => "y", 101 => "z"}}
assert Protox.Message.merge(m1, m2) == %Msg{
msg_k: %{1 => "x", 2 => "y", 100 => "c", 101 => "z"}
}
assert Protox.Message.merge(m2, m1) == %Msg{
msg_k: %{1 => "a", 2 => "b", 100 => "c", 101 => "z"}
}
end
test "Merge messages maps" do
m1 = %Upper{
msg_map: %{
"1" => %Msg{msg_e: true, msg_f: %Sub{g: [], j: [4, 5, 6]}},
"2" => %Msg{msg_d: :FOO, msg_m: {:msg_n, "FOO"}},
"100" => %Msg{msg_a: 33}
}
}
m2 = %Upper{
msg_map: %{
"1" => %Msg{msg_e: false, msg_f: %Sub{g: [10, 20], j: [1, 2, 3]}},
"2" => %Msg{msg_d: :BAR, msg_m: {:msg_o, %Sub{}}},
"101" => %Msg{msg_a: 44}
}
}
assert Protox.Message.merge(m1, m2) == %Upper{
msg_map: %{
"1" => %Msg{msg_e: true, msg_f: %Sub{g: [10, 20], j: [4, 5, 6, 1, 2, 3]}},
"2" => %Msg{msg_d: :BAR, msg_m: {:msg_o, %Sub{}}},
"100" => %Msg{msg_a: 33},
"101" => %Msg{msg_a: 44}
}
}
assert Protox.Message.merge(m2, m1) == %Upper{
msg_map: %{
"1" => %Msg{msg_e: true, msg_f: %Sub{g: [10, 20], j: [1, 2, 3, 4, 5, 6]}},
"2" => %Msg{msg_d: :BAR, msg_m: {:msg_n, "FOO"}},
"100" => %Msg{msg_a: 33},
"101" => %Msg{msg_a: 44}
}
}
end
test "Merge with nil" do
m = %Msg{msg_k: %{1 => "a", 2 => "b", 100 => "c"}}
assert Protox.Message.merge(m, nil) == m
assert Protox.Message.merge(nil, m) == m
assert Protox.Message.merge(nil, nil) == nil
end
end
| 30.732919 | 90 | 0.473525 |
087e64b7c0fece0c53d08f6bf2d756df314c2656 | 970 | ex | Elixir | lib/accent/scopes/version.ex | samuelnygaard/accent | db753badab1d885397b48a42ac3fb43024345467 | [
"BSD-3-Clause"
] | 1 | 2020-07-01T16:08:34.000Z | 2020-07-01T16:08:34.000Z | lib/accent/scopes/version.ex | samuelnygaard/accent | db753badab1d885397b48a42ac3fb43024345467 | [
"BSD-3-Clause"
] | 2 | 2021-09-28T05:37:00.000Z | 2022-02-26T10:10:15.000Z | lib/accent/scopes/version.ex | samuelnygaard/accent | db753badab1d885397b48a42ac3fb43024345467 | [
"BSD-3-Clause"
] | null | null | null | defmodule Accent.Scopes.Version do
import Ecto.Query, only: [from: 2]
@doc """
## Examples
iex> Accent.Scopes.Version.from_project(Accent.Version, "test")
#Ecto.Query<from v0 in Accent.Version, where: v0.project_id == ^"test">
iex> Accent.Scopes.Version.from_project(Accent.Version, nil)
Accent.Version
"""
@spec from_project(Ecto.Queryable.t(), any()) :: Ecto.Queryable.t()
def from_project(query, nil), do: query
def from_project(query, project_id) do
from(query, where: [project_id: ^project_id])
end
@doc """
## Examples
iex> Accent.Scopes.Version.from_tag(Accent.Version, "test")
#Ecto.Query<from v0 in Accent.Version, where: v0.tag == ^"test">
iex> Accent.Scopes.Version.from_tag(Accent.Version, nil)
Accent.Version
"""
@spec from_tag(Ecto.Queryable.t(), any()) :: Ecto.Queryable.t()
def from_tag(query, nil), do: query
def from_tag(query, tag) do
from(query, where: [tag: ^tag])
end
end
| 28.529412 | 75 | 0.673196 |
087e9a5a2a3e7c9b9cc22ec49d502120ead8ac4d | 11,802 | ex | Elixir | hangman/b2/lib/b2_web/live/game/figure.ex | neal-bpm/TinyHangman | c2b9bcfe9b3c6d918e83271dc943f09706693b4e | [
"MIT"
] | null | null | null | hangman/b2/lib/b2_web/live/game/figure.ex | neal-bpm/TinyHangman | c2b9bcfe9b3c6d918e83271dc943f09706693b4e | [
"MIT"
] | null | null | null | hangman/b2/lib/b2_web/live/game/figure.ex | neal-bpm/TinyHangman | c2b9bcfe9b3c6d918e83271dc943f09706693b4e | [
"MIT"
] | null | null | null | defmodule B2Web.Live.Game.Figure do
use B2Web, :live_component
def render(assigns) do
~H"""
<div class="figure">
<svg id="drawing"
xmlns="http://www.w3.org/2000/svg"
x-height="155mm"
x-width="130mm"
version="1.1"
viewBox="0 0 460.9899 548.03535">
<g transform="translate(-27.2 -220)">
<!-- 1 -->
<path id="rope"
class={hide_if_left_gt(@tally.turns_left, 6)}
style="fill: #a28d5d; stroke: #a28d5d"
d="m392 243c0.0695 21.3-1.32
43.1 0.826 63.9 0.816 14.4-9.87 35.9-13.3 21.9-0.538-15.6
1.04-31.7-0.836-46.9-1.52-22.5 6.28-32.3 13.3-38.9z"/>
<!-- 2 -->
<path id="brace" style="fill: #c27207; stroke: #c27207"
d="m264 261c-8.46 10.1-15 23.2-26.4 28.4-4.63 8.07-10.4
14.4-15.8 21.4-3.39 1.66-7.4 13.8-10.7 10.1-2.03
7.56-13.3 13.4-16.3 24.1-6.12 5.6-9.68 17.5-24.6-6.21
8.21-9.9 16.4-20.6 26.2-27.3 3.13-13.5 13.3-16.4
18.2-27.4 9.05-4.29 15-16.7 23-24.6 7.56-7.78 8.4-25.1
26.4 1.5z"/>
<!-- 3 -->
<path id="topbar"
style="fill: #c27207; stroke: #c27207;"
d="m153 242c-2.85 7.47 15-4.18 19.9 1.77 16.8-0.55
33.6-0.496 50.3 0.4 17.4-4.26 35.2-5.2 53-3.1 14.7-0.0166
28.3-3.66 42.9-2.61 13.2-1.21 26.6-4.69 39.8-1.7
16.6-4.35 34-1.58 50.8-4.67 6.82 3.47 19.4-5.5 25.4-0.814
7.54-6.27 8.07-1.21 6.51 0.087 11.2-6.69 24-0.784
33.3-4.21 6.26-4.06 7.15-1.99 5.75-0.117 11.8-4.39 7.23
7.6-0.809 8.74-10 9.43-20.6 16.4-33.6 17.3-3.15 2.25-12.2
7.5-10.7-0.601-12.1 6.85-19.2-3.05-30.5 5.64-9.12
1.37-15.3-4.47-25.7-1.47-13.9 1.89-27.7 4.61-41.9
3.98-14.5 0.434-29-1.27-43.4 1.52-12.3 2.38-24.7 0.756-37
0.0478-17.9 0.437-36 1.92-53.2 6.96-5.15-13.5-22.8
2.74-32.7-2.81-14.7 4.16-31.9 2.45-45.2 2.26-5.27
5.56-16.3 2.32-5.49-2.97 10.9-7.8 21.7-15.8 32.7-23.7z"/>
<!-- 4 -->
<path id="post"
style="fill: #c27207; stroke: #c27207"
d="m192 220c2.46 52.1 9.22 104 4.99 156 3.68 33.9 4.84 68.4
7.03 103-1.16 15.9 6.7 56.1 0.805 60.4 5.24 23.4 1.75 76.8
8.97 110 0.27 34.2 8.28 72.3-24.9
76.8-2.52-50.9-5.76-103-4.18-153-9.74-39.9-4.41-83.3-10.5-124
3.21-42.1-3.39-90-4.19-136-1.11-44-16.1-83.5 22.1-93.1z"/>
<!-- 5 -->
<path id="ground"
style="fill: #a28d5d; stroke: #a28d5d"
d="m62.1 740c6.71-4.98 8.63-1.57 6.68-2.12 15.2-0.241
32.4-16.2 43.4-13.5 17.4-2.14 34.3-13.3 51.6-10.9 14.2-10.8
35.4-8.34 53.5-11 19.5-2.67 39 0.209 58.1-6.5 16.4 1.8
34.1-2.28 49.8 1.11 17.6-1.56 36.8-7.35 52.8-2.24 19.2-4.7
36.1-0.348 55-0.635 17.4 4.18 35.2 4.18 53 4.87 6.89
3.15-6.83 7.56-9.39 11l-18 13c-4.92-5.01-21.2
8.32-17.8-4.02-16.6 9.05-34.3-2.65-50.3-1.14-18.1 4.6-37.5
3.21-55.2-2.47-22.5 3.33-46.5 0.805-68
6.81-17.4-3.99-33.6-3.13-50.5 2.25-14.8-4.4-31.5 5.54-43.3
5.39-9.97-8.24-32.3 6.26-47.3 5.7-16.3 3.96-32.7 8.52-47.9
15.6 8.82-9.11-12.9 6.34-19 4.49-5.77-0.753-16.2 10.3-13.5
3.66-5.76 2.36-29.4 14.5-13.5 2.99 9.85-7.48 19.7-14.9
29.6-22.4z"/>
<!-- 6 -->
<path id="arm1"
class={hide_if_left_gt(@tally.turns_left, 0)}
style="fill: #777; stroke: #777; stroke-width:3.5;"
d="m324 512c8.62-14.1 21.6-24.6 33.8-35.3-0.693-4.06
7.34-5.86 2.11-7.06 5.35-8.79 12.7-18.7 23.7-20.1
5.52-0.197-7.1 5.22-1.45 6.29-8.4 12.7-18 25.2-30.9
33.5-5.91 10.6-15.5 19-27.2 22.6z"/>
<!-- 7 -->
<path id="arm2"
class={hide_if_left_gt(@tally.turns_left, 1)}
style="fill: #777; stroke: #777; stroke-width: 3.5;"
d="m404 441c-2.77 11 12.8 8.21 14.7 17.3 5.98 4.84 15.2 7.98
17 15.4 6.58 4 10.3 14.8 0.993 17.9-9.24
5.71-14.3-6.87-17.9-13.2-7.1-2.4-16-14.6-18.3-16.8-3.98-3.2-9.01-7.24-11.2-10.9
3.97-4.36 10.1-5.98 14.7-9.59z"/>
<!-- 8 -->
<path id="leg1"
class={hide_if_left_gt(@tally.turns_left, 2)}
style="fill: #777; stroke: #777; stroke-width: 3.5;"
d="m390 567c0.232 6.64-9.96 13-13.5 19.6-3.54 7.9-10.2
12.3-13.7 20.2-4.05 11-5.5 25-17.1 30.9-4.53 4.61-10.3
1.26-5.7-4.31 5.68-12.7 10.4-26 19.1-37 5.83-7.71 11.1-16
18.1-22.8 3.83-3 8.35-4.96 12.9-6.48z"/>
<!-- 9 -->
<path id="leg2"
class={hide_if_left_gt(@tally.turns_left, 3)}
style="fill: #777; stroke: #777; stroke-width: 3.5;"
d="m399 568c-3.45 8.95 2.33 17.4 3.11 26.1-2.56 8.71 2.93
16.9 6.7 24.5 1.69 7.91 16 17.3 4.05 23.1-6.17
3.54-17.5-1.07-13.1-9.18-6.86-2.85-9.21-12.5-11.1-18-1.1-11.3-3.95-22.5-5.49-33.7
0.64-7.97 10.3-9.48 15.9-12.9z"/>
<!-- 10 -->
<path id="body"
class={hide_if_left_gt(@tally.turns_left, 4)}
style="fill: #777; stroke: #777; stroke-width: 3.5;"
d="m397 430c-8.22 13.8 0.942 30.2-3.8 44.8 0.446 18.4 0.722
36.9-1.04 55.2-0.36 13.7 3.83 28.3-2.33 41.3-5.4
9.94-17.8 1.96-12.2-7.45-1.23-8.65 0.92-17.8
0.272-26.7-0.977-9.11 0.801-18.4
0.338-27.6-0.679-17.8-0.0439-35.6 0.649-53.4
0.58-8.26-3.27-19.6 8.51-21.4 3.28-1.35 6.8-2.39
9.58-4.68z"/>
<!-- 11 -->
<path id="head"
class={hide_if_left_gt(@tally.turns_left, 5)}
style="fill:none; stroke: #777; stroke-width: 3.5;"
d="m440 389c-0.665 10.8-6.11 21.3-15.1 29.3-7.12 6.41-16.2
11.1-26.1 13.3m13.2-15.3c-8.42 4.22-17.7 6.53-26.9
6.62-9.15 0.0947-17.9-1.98-25.6-6.07m4.87
10.2c-8.87-2.54-16.6-6.97-22.8-13-8.15-7.94-13.1-18.3-14.4-29.6m9.2
15.2c-2.44-5.82-3.68-12.2-3.54-18.7 0.273-12.8 5.89-25.3
16.2-34.3 3.72-3.24 7.98-5.95 12.7-8.02m-24.6
4.13c1.26-1.63 2.65-3.18 4.15-4.64 9.88-9.59 24.3-15
40.1-15.3 6.86-0.164 13.8 0.618 20.5 2.27m-16.8 3.21c13.2
0.844 26.6 5.3 37.3 12.8 5.5 3.87 10.1 8.41 13.5
13.4m-14.2-7.47c7.02 8.71 11.3 19.3 11.9 30.2 0.483
8.22-1.11 16.2-4.62 23.3m10.7-20.6c-1.44 10.3-6.91
20.2-15.5 28.3-5.87 5.49-13 9.97-21 13.2m9.16-3.79c-8.29
5.29-18.3 7.86-28.7
7.45-14.4-0.563-28.6-6.81-39.2-17.3-0.041-0.0409-0.082-0.0818-0.123-0.123m20.1
9.56c-6.36-2.1-12-5.37-16.7-9.54-9.79-8.76-14.7-21-13.8-33.3
0.289-3.74 1.12-7.43 2.48-11m-5.77
20c-0.259-2.41-0.344-4.86-0.248-7.32 0.504-13 6-25.6
15.8-35.1 4.1-3.96 8.86-7.27 14.1-9.78m-18 3.7c10.7-8.22
23.8-13.2 37.2-13.9 10-0.459 19.7 1.59 28.2 5.96m-18.6
1.19c10.6 2.1 20.1 7.24 27.7 14.8 9.27 9.27 15.1 21.7 17
35.4m-0.75-22.6c2.71 6.71 4.04 13.9 3.81 21.2-0.412
12.9-5.68 25.5-15.3 36.2m15.6-32.6c-1.49 10.3-7.03
20.2-15.8 27.9-7.38 6.47-16.7 11.1-27.1
13.3m14.4-2.02c-8.18 4.43-17.1 6.8-26 7.03-13.7
0.355-26.4-4.45-35.2-12.9-0.615-0.589-1.21-1.19-1.78-1.81m15.3
9.62c-5.03-2.23-9.67-5.37-13.7-9.28-9.95-9.61-15.7-23.3-15.8-37.9-0.0322-7.37
1.37-14.8 4.16-21.9m-5.57
22.6c-0.0399-0.781-0.0579-1.56-0.0542-2.34 0.0607-12.7
5.85-24.8 16.2-33.2 8.64-7.01 20-11 32.2-10.9m-23.1
0.57c8.5-4.65 18.5-7.26 28.9-7.51 12.5-0.296 24.9 2.81
35.6 8.82m-25.6 1.99c7.52 2.26 14.3 6 20.1 10.9 10.7 9.02
17.1 21.4 18.4 34 0.294 2.75 0.346 5.49 0.168
8.18m1.59-7.13c-0.154 12.4-5.82 24.2-15.7 32.7-8.78
7.49-20.3 11.9-32.7 12.1m23.4-4.39c-8.21 3.47-17.1
5.47-25.9 5.65-13.9
0.285-26.7-3.99-35.9-12.4-1.86-1.71-3.55-3.57-5.05-5.57m14.4
15.3c-4.25-2.31-8.16-5.2-11.6-8.61-10-9.89-15.7-23.6-15.7-38.5
0.006-7.72 1.52-15.6 4.47-23.1m-10.6
27.4c-0.009-0.232-0.0152-0.465-0.02-0.697-0.272-13.2
5.58-25.8 16.1-35 9.53-8.36 22.3-13.4
35.8-14.4m-21-2.65c8.29-3.63 17.3-5.36 26.4-4.81 14.1
0.859 27.2 7.19 36.8 18.4 0.326 0.381 0.647 0.767 0.963
1.16m-11.8-8.11c5.25 2.39 10 5.64 14.2 9.6 9.47 9.12 14.9
21.5 14.7 34.7m1.25-8.98c1.31 4.04 2 8.26 2.06 12.5 0.167
12.8-5.38 25.4-15.2 34.7-3.75 3.53-8.05 6.5-12.7
8.78m25.6-39.1c-1.54 11.9-7.23 22.8-16.4 30.9-5.7
5.01-12.6 8.75-20.2 11m11.4-3.98c-8.86 5.75-19.1
8.42-29.4 7.69-11.1-0.781-21.8-5.48-30.2-13.4m9.21
0.64c-8-2.75-15.1-6.8-20.8-12-9.86-8.92-15-20.6-14.4-32.7
0.208-4.31 1.15-8.59 2.78-12.7m-1.3
30.4c-0.638-2.79-0.957-5.65-0.957-8.55 0.001-12.6
6.06-25.2 16.8-34.5 4.08-3.51 8.77-6.46 13.9-8.72m-3.56
0.45l0.0612-0.0546c10.2-9.1 23.8-14.2 37.9-14.1 7.7
0.0671 15.3 1.7 22.3 4.82m-18.6-1.06c12 1.4 22.6 6.59
30.3 14.7 8.7 9.19 13.2 21.6 12.5 34.9m-4.44-26.2c3.28
6.52 5 13.7 5 21.1 0.002 13.2-5.47 26-15.4 35.6-2.87
2.79-6.08 5.27-9.55 7.39m29.4-41.5c-0.891 11-6.21
21.4-14.8 29.9-8.84 8.64-20.6 14.6-33.1 17.3m8.16
6.25c-7.38 3.41-15.3 5.15-23.2 5.17-13.9
0.0228-26.8-5.31-36-14.6-1.47-1.5-2.84-3.09-4.08-4.75m14.6-2.68c-4.22-2.39-8.03-5.15-11.4-8.24-10.3-9.51-15.7-21.7-15.4-34.4
0.0219-0.985 0.0778-1.97 0.168-2.95m3.63
22.6c-0.479-2.96-0.728-5.97-0.739-8.98-0.0498-13.3
4.52-26 12.7-35.7 5.58-6.64 12.5-11.5 20-14.2m-7.96
14.1c9.99-6.69 21.9-10.4 34.2-10.4 13.5 0.0434 26.6 4.65
37 13.1m-28-26.2c7.72 2.53 14.9 6.76 20.9 12.4 8.84 8.23
14.9 19 17.2 30.9m-2.65 2.06c2.18 5.91 3.41 12.1 3.57
18.2 0.347 12.9-4.08 24.8-12.4 33.2-2.27 2.29-4.8
4.28-7.53 5.93m-81.8-77.6 34.7-21.6m54.7 15.8 15.2
34.8m-44.6-50.6 34.7 21.6m-2.43 63.2-36.4 19.3m-63.1-36.3
2.07-37.2m41.7-30.9 41.6 7.31m-19.6-7.13 35.3 20.8m5.92
9.96 2.07 37.2m-37.2 34.5-42.3-2.76m28.9
5.07-40.3-11.8m-19.4-53.8 23.5-31.1m25
96.2-36.9-18.5m-13.2-31.1 12.3-35.7m69.6-15 28.5
27.7m-89.5-21.1 40.3-11.8m8.15 105-41.8-6.41"
transform="translate(1.43 2.86)"/>
</g>
</svg>
<p class="turns-left">
Turns left: <%= @tally.turns_left %>
</p>
</div>
"""
end
defp hide_if_left_gt(left, level) do
if left > level, do: "hide-component", else: ""
end
end
| 53.402715 | 142 | 0.458651 |
087ec051766d49e7b376aadbdcd07e1932cbf5f6 | 2,078 | exs | Elixir | mix.exs | ricardopadua/introduction_phoenix_liveview | 3e320b4ea7f80b079dfcd9ac2d5714b1204ce4ed | [
"Apache-2.0"
] | null | null | null | mix.exs | ricardopadua/introduction_phoenix_liveview | 3e320b4ea7f80b079dfcd9ac2d5714b1204ce4ed | [
"Apache-2.0"
] | null | null | null | mix.exs | ricardopadua/introduction_phoenix_liveview | 3e320b4ea7f80b079dfcd9ac2d5714b1204ce4ed | [
"Apache-2.0"
] | null | null | null | defmodule IntroductionPhoenixLiveview.MixProject do
use Mix.Project
def project do
[
app: :introduction_phoenix_liveview,
version: "0.1.0",
elixir: "~> 1.12",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {IntroductionPhoenixLiveview.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.6.6"},
{:phoenix_ecto, "~> 4.4"},
{:ecto_sql, "~> 3.6"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 3.0"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:phoenix_live_view, "~> 0.17.5"},
{:floki, ">= 0.30.0", only: :test},
{:phoenix_live_dashboard, "~> 0.6"},
{:esbuild, "~> 0.3", runtime: Mix.env() == :dev},
{:swoosh, "~> 1.3"},
{:telemetry_metrics, "~> 0.6"},
{:telemetry_poller, "~> 1.0"},
{:gettext, "~> 0.18"},
{:jason, "~> 1.2"},
{:plug_cowboy, "~> 2.5"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get", "ecto.setup"],
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"],
"assets.deploy": ["esbuild default --minify", "phx.digest"]
]
end
end
| 29.267606 | 84 | 0.578441 |
087ec49e5758510470b49f12dca3ca34cb0e38db | 15,549 | ex | Elixir | lib/mix/tasks/docs.ex | supersimple/ex_doc | 6368a128e3c871f25e3dc7c1f86d51cdf84e3e3d | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | lib/mix/tasks/docs.ex | supersimple/ex_doc | 6368a128e3c871f25e3dc7c1f86d51cdf84e3e3d | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | lib/mix/tasks/docs.ex | supersimple/ex_doc | 6368a128e3c871f25e3dc7c1f86d51cdf84e3e3d | [
"Apache-2.0",
"CC-BY-4.0"
] | 1 | 2020-05-11T16:54:10.000Z | 2020-05-11T16:54:10.000Z | defmodule Mix.Tasks.Docs do
use Mix.Task
@shortdoc "Generate documentation for the project"
@moduledoc ~S"""
Uses ExDoc to generate a static web page from the project documentation.
## Command line options
* `--canonical`, `-n` - Indicate the preferred URL with
rel="canonical" link element, defaults to no canonical path
* `--formatter`, `-f` - Which formatters to use, "html" or
"epub". This option can be given more than once. By default,
both html and epub are generated.
* `--output`, `-o` - Output directory for the generated
docs, default: `"doc"`
* `--language` - Specifies the language to annotate the
EPUB output in valid [BCP 47](https://tools.ietf.org/html/bcp47)
The command line options have higher precedence than the options
specified in your `mix.exs` file below.
## Configuration
ExDoc will automatically pull in information from your project,
like the application and version. However, you may want to set
`:name`, `:source_url` and `:homepage_url` to have a nicer output
from ExDoc, for example:
def project do
[app: :my_app,
version: "0.1.0-dev",
deps: deps(),
# Docs
name: "My App",
source_url: "https://github.com/USER/PROJECT",
homepage_url: "http://YOUR_PROJECT_HOMEPAGE",
docs: [main: "MyApp", # The main page in the docs
logo: "path/to/logo.png",
extras: ["README.md"]]]
end
ExDoc also allows configuration specific to the documentation to
be set. The following options should be put under the `:docs` key
in your project's main configuration. The `:docs` options should
be a keyword list or a function returning a keyword list that will
be lazily executed.
* `:api_reference` - Whether to generate `api-reference.html`; default: `true`.
If this is set to false, `:main` must also be set.
* `:assets` - Path to a directory that will be copied as is to the "assets"
directory in the output path. Its entries may be referenced in your docs
under "assets/ASSET.EXTENSION"; defaults to no assets directory.
* `:before_closing_body_tag` - a function that takes as argument an atom specifying
the formatter being used (`:html` or `:epub`) and returns a literal HTML string
to be included just before the closing body tag (`</body>`).
The atom given as argument can be used to include different content in both formats.
Useful to inject custom assets, such as Javascript.
* `:before_closing_head_tag` - a function that takes as argument an atom specifying
the formatter being used (`:html` or `:epub`) and returns a literal HTML string
to be included just before the closing head tag (`</head>`).
The atom given as argument can be used to include different content in both formats.
Useful to inject custom assets, such as CSS stylesheets.
* `:canonical` - String that defines the preferred URL with the rel="canonical"
element; defaults to no canonical path.
* `:deps` - A keyword list application names and their documentation URL.
ExDoc will by default include all dependencies and assume they are hosted on
HexDocs. This can be overridden by your own values. Example: `[plug: "https://myserver/plug/"]`
* `:extra_section` - String that defines the section title of the additional
Markdown pages; default: "PAGES". Example: "GUIDES"
* `:extras` - List of keywords, each key must indicate the path to additional
Markdown pages, the value for each keyword (optional) gives you more control
about the PATH and the title of the output files; default: `[]`. Example:
`["README.md", "CONTRIBUTING.md": [filename: "contributing", title: "Contributing"]]`
* `:filter_prefix` - Include only modules that match the given prefix in
the generated documentation. Example: "MyApp.Core"
* `:formatters` - Formatter to use; default: ["html"], options: "html", "epub".
* `:groups_for_extras`, `:groups_for_modules`, `:groups_for_functions` - See the "Groups" section
* `:javascript_config_path` - Path of an additional JavaScript file to be included on all pages
to provide up-to-date data for features like the version dropdown - See the "Additional
JavaScript config" section. Example: `"../versions.js"`
* `:nest_modules_by_prefix` - See the "Nesting" section
* `:language` - Identify the primary language of the documents, its value must be
a valid [BCP 47](https://tools.ietf.org/html/bcp47) language tag; default: "en"
* `:logo` - Path to the image logo of the project (only PNG or JPEG accepted)
The image size will be 64x64. When specified, the logo will be placed under
the "assets" directory in the output path under the name "logo" and the
appropriate extension.
* `:cover` - Path to the epub cover image (only PNG or JPEG accepted)
The image size should be around 1600x2400. When specified, the cover will be placed under
the "assets" directory in the output path under the name "cover" and the
appropriate extension. This option has no effect when using the "html" formatter.
* `:authors` - List of authors for the generated docs or epub.
* `:main` - Main page of the documentation. It may be a module or a
generated page, like "Plug" or "api-reference"; default: "api-reference".
* `:markdown_processor` - The markdown processor to use;
* `:markdown_processor_options` - Configuration options for the markdown processor;
* `:source_beam` - Path to the beam directory; default: mix's compile path.
* `:source_ref` - The branch/commit/tag used for source link inference;
default: "master".
* `:source_url_pattern` - Public URL of the project for source links. This is derived
automatically from the project's `:source_url` and `:source_ref` when using one of
the supported public hosting services (currently GitHub, GitLab, or Bitbucket). If
you are using one of those services with their default public hostname, you do not
need to set this configuration.
However, if using a different solution, or self-hosting, you will need to set this
configuration variable to a pattern for source code links. The value must be a string
of the full URI to use for links with the following variables available for interpolation:
* `%{path}`: the path of a file in the repo
* `%{line}`: the line number in the file
For GitLab/GitHub:
https://mydomain.org/user_or_team/repo_name/blob/master/%{path}#L%{line}"
For Bitbucket:
https://mydomain.org/user_or_team/repo_name/src/master/%{path}#cl-%{line}
* `:output` - Output directory for the generated docs; default: "doc".
May be overridden by command line argument.
* `:ignore_apps` - Apps to be ignored when generating documentation in an umbrella project.
Receives a list of atoms. Example: `[:first_app, :second_app]`.
* `:skip_undefined_reference_warnings_on` - ExDoc warns when it can't create a `Mod.fun/arity`
reference in the current project docs e.g. because of a typo. This list controls
which docs pages to skip the warnings on, which is useful for e.g. deprecation pages;
default: `[]`.
## Groups
ExDoc content can be organized in groups. This is done via the `:groups_for_extras`
and `:groups_for_modules`. For example, imagine you are storing extra guides in
your documentation which are organized per directory. In the extras section you
have:
extras: [
"guides/introduction/foo.md",
"guides/introduction/bar.md",
...
"guides/advanced/baz.md",
"guides/advanced/bat.md",
]
You can have those grouped as follows:
groups_for_extras: [
"Introduction": Path.wildcard("guides/introduction/*.md"),
"Advanced": Path.wildcard("guides/advanced/*.md")
]
Or via a regex:
groups_for_extras: [
"Introduction": ~r"/introduction/"
"Advanced": ~r"/advanced/"
]
Similar can be done for modules:
groups_for_modules: [
"Data types": [Atom, Regex, URI],
"Collections": [Enum, MapSet, Stream],
]
A regex or the string name of the module is also supported.
### Grouping functions
Functions inside a module can also be organized in groups. This is done via
the `:groups_for_functions` configuration which is a keyword list of group
titles and filtering functions that receive the documentation metadata of
functions as argument.
For example, imagine that you have an API client library with a large surface
area for all the API endpoints you need to support. It would be helpful to
group the functions with similar responsibilities together. In this case in
your module you might have:
defmodule APIClient do
@doc section: :auth
def refresh_token(params \\ [])
@doc subject: :object
def update_status(id, new_status)
@doc permission: :grant
def grant_privilege(resource, privilege)
end
And then in the configuration you can group these with:
groups_for_functions: [
Authentication: & &1[:section] == :auth,
Resource: & &1[:subject] == :object,
Admin: & &1[:permission] in [:grant, :write]
]
A function can belong to a single group only. If multiple group filters match,
the first will take precedence. Functions that don't have a custom group will
be listed under the default "Functions" group.
## Additional JavaScript config
Since version `0.20.0` ExDoc includes a way to enrich the documentation
with new information without having to re-generate it, through a JavaScript
file that can be shared across documentation for multiple versions of the
package. If `:javascript_config_path` is set when building the documentation,
this script will be referenced in each page's `<head>` using a `<script>` tag.
The script should define data in global JavaScript variables that will be
interpreted by `ex_doc` when viewing the documentation.
Currenly supported variables:
### `versionNodes`
This global JavaScript variable should be providing an array of objects that
define all versions of this Mix package which should appear in the package
versions dropdown in the documentation sidebar. The versions dropdown allows
for switching between package versions' documentation.
Example:
```javascript
var versionNodes = [
{
version: "v0.0.0", // version number or name (required)
url: "https://hexdocs.pm/ex_doc/0.19.3/" // documentation URL (required)
}
]
```
## Nesting
ExDoc also allows module names in the sidebar to appear nested under a given
prefix. The `:nest_modules_by_prefix` expects a list of module names, such as
`[Foo.Bar, Bar.Baz]`. In this case, a module named `Foo.Bar.Baz` will appear
nested within `Foo.Bar` and only the name `Baz` will be shown in the sidebar.
Note the `Foo.Bar` module itself is not affected.
This option is mainly intended to improve the display of long module names in
the sidebar, particularly when they are too long for the sidebar or when many
modules share a long prefix. If you mean to group modules logically or call
attention to them in the docs, you should probably use `:groups_for_modules`
(which can be used in conjuction with `:nest_modules_by_prefix`).
## Umbrella project
ExDoc can be used in an umbrella project and generates a single documentation
for all child apps. You can use the `:ignore_apps` configuration to exclude
certain projects in the umbrella from documentation.
Generating documentation per each child app can be achieved by running:
mix cmd mix docs
See `mix help cmd` for more information.
"""
@switches [
canonical: :string,
formatter: :keep,
language: :string,
output: :string
]
@aliases [n: :canonical, f: :formatter, o: :output]
@doc false
def run(args, config \\ Mix.Project.config(), generator \\ &ExDoc.generate_docs/3) do
{:ok, _} = Application.ensure_all_started(:ex_doc)
Mix.Task.run("compile")
unless Code.ensure_loaded?(ExDoc.Config) do
Mix.raise(
"Could not load ExDoc configuration. Please make sure you are running the " <>
"docs task in the same Mix environment it is listed in your deps"
)
end
{cli_opts, args, _} = OptionParser.parse(args, aliases: @aliases, switches: @switches)
if args != [] do
Mix.raise("Extraneous arguments on the command line")
end
project = to_string(config[:name] || config[:app])
version = config[:version] || "dev"
options =
config
|> get_docs_opts()
|> Keyword.merge(cli_opts)
# accepted at root level config
|> normalize_source_url(config)
# accepted at root level config
|> normalize_homepage_url(config)
|> normalize_source_beam(config)
|> normalize_main()
|> normalize_deps()
for formatter <- get_formatters(options) do
index = generator.(project, version, Keyword.put(options, :formatter, formatter))
log(index)
index
end
end
defp get_formatters(options) do
case Keyword.get_values(options, :formatter) do
[] -> options[:formatters] || ["html", "epub"]
values -> values
end
end
defp get_docs_opts(config) do
docs = config[:docs]
cond do
is_function(docs, 0) -> docs.()
is_nil(docs) -> []
true -> docs
end
end
defp log(index) do
Mix.shell().info([:green, "Docs successfully generated."])
Mix.shell().info([:green, "View them at #{inspect(index)}."])
end
defp normalize_source_url(options, config) do
if source_url = config[:source_url] do
Keyword.put(options, :source_url, source_url)
else
options
end
end
defp normalize_homepage_url(options, config) do
if homepage_url = config[:homepage_url] do
Keyword.put(options, :homepage_url, homepage_url)
else
options
end
end
defp normalize_source_beam(options, config) do
compile_path =
if Mix.Project.umbrella?(config) do
umbrella_compile_paths(Keyword.get(options, :ignore_apps, []))
else
Mix.Project.compile_path()
end
Keyword.put_new(options, :source_beam, compile_path)
end
defp umbrella_compile_paths(ignored_apps) do
build = Mix.Project.build_path()
for {app, _} <- Mix.Project.apps_paths(),
app not in ignored_apps do
Path.join([build, "lib", Atom.to_string(app), "ebin"])
end
end
defp normalize_main(options) do
main = options[:main]
cond do
is_nil(main) ->
Keyword.delete(options, :main)
is_atom(main) ->
Keyword.put(options, :main, inspect(main))
is_binary(main) ->
options
end
end
defp normalize_deps(options) do
user_deps = Keyword.get(options, :deps, [])
deps =
for {app, doc} <- Keyword.merge(get_deps(), user_deps),
lib_dir = :code.lib_dir(app),
is_list(lib_dir),
do: {List.to_string(lib_dir), doc}
Keyword.put(options, :deps, deps)
end
defp get_deps do
for {key, _} <- Mix.Project.deps_paths(),
_ = Application.load(key),
vsn = Application.spec(key, :vsn) do
{key, "https://hexdocs.pm/#{key}/#{vsn}/"}
end
end
end
| 35.827189 | 101 | 0.68088 |
087edad091062c8666b6a2f43559871228bfcf3b | 515 | ex | Elixir | Day-2/part1.ex | alekxeyuk/ADOC-2021 | 9b9f31724b940d62dcc2de7354b0054f46810760 | [
"MIT"
] | 1 | 2021-12-03T13:34:13.000Z | 2021-12-03T13:34:13.000Z | Day-2/part1.ex | alekxeyuk/ADOC-2021 | 9b9f31724b940d62dcc2de7354b0054f46810760 | [
"MIT"
] | null | null | null | Day-2/part1.ex | alekxeyuk/ADOC-2021 | 9b9f31724b940d62dcc2de7354b0054f46810760 | [
"MIT"
] | null | null | null | procedure main()
object f = get_text("input.txt", GT_LF_STRIPPED)
if f=-1 then
puts(1, "Can't find input.txt file")
abort(1)
end if
integer {hor, dep, x} @= 0
for i=1 to length(f) do
seq s = split(f[i])
x = to_integer(s[2])
switch s[1] do
case "forward": hor += x
case "down": dep += x
case "up": dep -= x
end switch
end for
printf(1, "Answer is: %d\n", hor * dep)
end procedure
main()
| 21.458333 | 52 | 0.485437 |
087ee2b61e7d145d14f943f18ef8567d3a99354e | 1,896 | exs | Elixir | config/dev.exs | ShaneKilkelly/teal | 494de9b9a3848ab57a5c3731c9d462bad335a3c0 | [
"MIT"
] | null | null | null | config/dev.exs | ShaneKilkelly/teal | 494de9b9a3848ab57a5c3731c9d462bad335a3c0 | [
"MIT"
] | null | null | null | config/dev.exs | ShaneKilkelly/teal | 494de9b9a3848ab57a5c3731c9d462bad335a3c0 | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :teal, Teal.Web.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [node: ["node_modules/brunch/bin/brunch", "watch", "--stdin",
cd: Path.expand("../assets", __DIR__)]]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# command from your terminal:
#
# openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" -keyout priv/server.key -out priv/server.pem
#
# The `http:` config above can be replaced with:
#
# https: [port: 4000, keyfile: "priv/server.key", certfile: "priv/server.pem"],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :teal, Teal.Web.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/teal/web/views/.*(ex)$},
~r{lib/teal/web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Configure your database
config :teal, Teal.Repo,
adapter: Ecto.Adapters.Postgres,
username: "teal",
password: "teal",
database: "teal_dev",
hostname: "localhost",
pool_size: 10
| 32.135593 | 170 | 0.695675 |
087ee9585476513fe0027417c1d9da89c0b065d5 | 3,284 | ex | Elixir | lib/erlef/members.ex | paulo-ferraz-oliveira/website | c7a1aa02d4d402d3e0e7f3973619ba0e7b53cbd8 | [
"Apache-2.0"
] | 1 | 2021-03-13T01:34:28.000Z | 2021-03-13T01:34:28.000Z | lib/erlef/members.ex | paulo-ferraz-oliveira/website | c7a1aa02d4d402d3e0e7f3973619ba0e7b53cbd8 | [
"Apache-2.0"
] | null | null | null | lib/erlef/members.ex | paulo-ferraz-oliveira/website | c7a1aa02d4d402d3e0e7f3973619ba0e7b53cbd8 | [
"Apache-2.0"
] | null | null | null | defmodule Erlef.Members do
@moduledoc """
Members context
"""
alias Erlef.Repo
alias Erlef.Members.{EmailRequest, EmailRequestNotification, Notifications}
alias Erlef.Mailer
alias Erlef.Admins
alias Erlef.Accounts.Member
import Ecto.Query
def new_email_request(params \\ %{}) do
EmailRequest.changeset(%EmailRequest{}, params)
end
def notify(type, params) do
type
|> Notifications.new(params)
|> Mailer.deliver()
end
def create_email_request(params) do
result =
params
|> new_email_request()
|> Repo.insert()
case result do
{:ok, _} ->
Admins.notify(:new_email_request)
result
err ->
err
end
end
def get_email_request(id), do: Repo.get(EmailRequest, id)
def has_email_request?(member) do
case Repo.get_by(EmailRequest, submitted_by: member.id) do
nil -> false
_ -> true
end
end
def get_email_request_by_member(member) do
Repo.get_by(EmailRequest, submitted_by: member.id)
end
def update_email_request(%EmailRequest{} = req, params) do
req
|> EmailRequest.changeset(params)
|> Repo.update()
end
def update_email_request(id, params) do
case get_email_request(id) do
%EmailRequest{} = req ->
update_email_request(req, params)
nil ->
{:error, :not_found}
end
end
def complete_email_request(%{id: id, password: password}) do
with %EmailRequest{} = req <- get_email_request(id),
{:ok, member} <- update_member_fields(req),
{:ok, _req} <- update_email_request(req.id, %{status: :completed}) do
do_notify(req, %{member: member, password: password})
end
end
def complete_email_request(%{id: id}) do
with %EmailRequest{} = req <- get_email_request(id),
{:ok, member} <- update_member_fields(req),
{:ok, req} <- update_email_request(req.id, %{status: :completed}) do
do_notify(req, %{member: member})
end
end
def outstanding_email_requests() do
q =
from(p in EmailRequest,
where: p.status not in [:completed]
)
Repo.all(q)
end
def outstanding_email_requests_count() do
q =
from(p in EmailRequest,
where: p.status not in [:completed],
select: count(p.id)
)
Repo.one(q)
end
defp update_member_fields(req) do
email = "#{req.username}@erlef.org"
update =
case req.type do
:email_box ->
%{erlef_email_address: email, has_email_box?: true, has_email_alias?: false}
:email_alias ->
%{erlef_email_address: email, has_email_box?: false, has_email_alias?: true}
end
case Erlef.Accounts.get_member!(req.submitted_by) do
%Member{} = member ->
Erlef.Accounts.update_member(member, update, update_external: true)
err ->
err
end
end
def do_notify(%EmailRequest{type: :email_alias}, params) do
{:ok, _} =
params.member
|> EmailRequestNotification.email_alias_created()
|> Mailer.deliver()
:ok
end
def do_notify(%EmailRequest{type: :email_box}, params) do
{:ok, _} =
params.member
|> EmailRequestNotification.email_box_created(params.password)
|> Erlef.Mailer.deliver()
:ok
end
end
| 22.965035 | 86 | 0.63581 |
087efe4dd9c84e737c385bc7683622917a1817d1 | 374 | ex | Elixir | lib/basic_web/controllers/shotrize/liveview_controller.ex | ysaito8015/communitex | d469447a62029d59883d95df4df3c9b09e0022e2 | [
"Apache-2.0"
] | 7 | 2021-07-14T15:45:55.000Z | 2022-01-25T11:13:01.000Z | lib/basic_web/controllers/shotrize/liveview_controller.ex | ysaito8015/communitex | d469447a62029d59883d95df4df3c9b09e0022e2 | [
"Apache-2.0"
] | 10 | 2021-08-09T15:54:05.000Z | 2022-02-17T04:18:38.000Z | lib/basic_web/controllers/shotrize/liveview_controller.ex | ysaito8015/communitex | d469447a62029d59883d95df4df3c9b09e0022e2 | [
"Apache-2.0"
] | 5 | 2021-07-23T05:54:35.000Z | 2022-01-28T04:14:51.000Z | defmodule BasicWeb.LiveViewController do
use Phoenix.LiveView
def render(assigns) do
params = assigns.params
template = if params["path_"] == nil, do: "index.html", else: Path.join(params["path_"]) <> ".html"
BasicWeb.PageLiveView.render(template, assigns)
end
def mount(params, _session, socket) do
{:ok, assign(socket, params: params)}
end
end
| 26.714286 | 103 | 0.695187 |
087f5e35f60c56b63332df21697db83337c88cf6 | 5,839 | ex | Elixir | clients/docs/lib/google_api/docs/v1/model/document_style_suggestion_state.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/docs/lib/google_api/docs/v1/model/document_style_suggestion_state.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/docs/lib/google_api/docs/v1/model/document_style_suggestion_state.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Docs.V1.Model.DocumentStyleSuggestionState do
@moduledoc """
A mask that indicates which of the fields on the base DocumentStyle have been changed in this suggestion.
For any field set to true, there is a new suggested value.
## Attributes
* `backgroundSuggestionState` (*type:* `GoogleApi.Docs.V1.Model.BackgroundSuggestionState.t`, *default:* `nil`) - A mask that indicates which of the fields in background have been changed in this
suggestion.
* `defaultFooterIdSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to default_footer_id.
* `defaultHeaderIdSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to default_header_id.
* `evenPageFooterIdSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to even_page_footer_id.
* `evenPageHeaderIdSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to even_page_header_id.
* `firstPageFooterIdSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to first_page_footer_id.
* `firstPageHeaderIdSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to first_page_header_id.
* `marginBottomSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to margin_bottom.
* `marginFooterSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to margin_footer.
* `marginHeaderSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to margin_header.
* `marginLeftSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to margin_left.
* `marginRightSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to margin_right.
* `marginTopSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to margin_top.
* `pageNumberStartSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to page_number_start.
* `pageSizeSuggestionState` (*type:* `GoogleApi.Docs.V1.Model.SizeSuggestionState.t`, *default:* `nil`) - A mask that indicates which of the fields in size have been changed in this
suggestion.
* `useCustomHeaderFooterMarginsSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to
use_custom_header_footer_margins.
* `useEvenPageHeaderFooterSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to use_even_page_header_footer.
* `useFirstPageHeaderFooterSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to use_first_page_header_footer.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:backgroundSuggestionState => GoogleApi.Docs.V1.Model.BackgroundSuggestionState.t(),
:defaultFooterIdSuggested => boolean(),
:defaultHeaderIdSuggested => boolean(),
:evenPageFooterIdSuggested => boolean(),
:evenPageHeaderIdSuggested => boolean(),
:firstPageFooterIdSuggested => boolean(),
:firstPageHeaderIdSuggested => boolean(),
:marginBottomSuggested => boolean(),
:marginFooterSuggested => boolean(),
:marginHeaderSuggested => boolean(),
:marginLeftSuggested => boolean(),
:marginRightSuggested => boolean(),
:marginTopSuggested => boolean(),
:pageNumberStartSuggested => boolean(),
:pageSizeSuggestionState => GoogleApi.Docs.V1.Model.SizeSuggestionState.t(),
:useCustomHeaderFooterMarginsSuggested => boolean(),
:useEvenPageHeaderFooterSuggested => boolean(),
:useFirstPageHeaderFooterSuggested => boolean()
}
field(:backgroundSuggestionState, as: GoogleApi.Docs.V1.Model.BackgroundSuggestionState)
field(:defaultFooterIdSuggested)
field(:defaultHeaderIdSuggested)
field(:evenPageFooterIdSuggested)
field(:evenPageHeaderIdSuggested)
field(:firstPageFooterIdSuggested)
field(:firstPageHeaderIdSuggested)
field(:marginBottomSuggested)
field(:marginFooterSuggested)
field(:marginHeaderSuggested)
field(:marginLeftSuggested)
field(:marginRightSuggested)
field(:marginTopSuggested)
field(:pageNumberStartSuggested)
field(:pageSizeSuggestionState, as: GoogleApi.Docs.V1.Model.SizeSuggestionState)
field(:useCustomHeaderFooterMarginsSuggested)
field(:useEvenPageHeaderFooterSuggested)
field(:useFirstPageHeaderFooterSuggested)
end
defimpl Poison.Decoder, for: GoogleApi.Docs.V1.Model.DocumentStyleSuggestionState do
def decode(value, options) do
GoogleApi.Docs.V1.Model.DocumentStyleSuggestionState.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Docs.V1.Model.DocumentStyleSuggestionState do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 57.245098 | 199 | 0.730947 |
087f7bff0b89e9ce951e2c8635c511a62bf21dbc | 1,329 | ex | Elixir | lib/teiserver_web/controllers/report/report_controller.ex | badosu/teiserver | 19b623aeb7c2ab28756405f7486e92b714777c54 | [
"MIT"
] | 4 | 2021-07-29T16:23:20.000Z | 2022-02-23T05:34:36.000Z | lib/teiserver_web/controllers/report/report_controller.ex | badosu/teiserver | 19b623aeb7c2ab28756405f7486e92b714777c54 | [
"MIT"
] | 14 | 2021-08-01T02:36:14.000Z | 2022-01-30T21:15:03.000Z | lib/teiserver_web/controllers/report/report_controller.ex | badosu/teiserver | 19b623aeb7c2ab28756405f7486e92b714777c54 | [
"MIT"
] | 7 | 2021-05-13T12:55:28.000Z | 2022-01-14T06:39:06.000Z | defmodule TeiserverWeb.Report.ReportController do
use CentralWeb, :controller
plug(AssignPlug,
sidemenu_active: ["teiserver"]
)
plug Bodyguard.Plug.Authorize,
policy: Teiserver.Moderator,
action: {Phoenix.Controller, :action_name},
user: {Central.Account.AuthLib, :current_user}
plug(:add_breadcrumb, name: 'Teiserver', url: '/teiserver')
plug(:add_breadcrumb, name: 'Reports', url: '/teiserver/reports')
@spec show(Plug.Conn.t(), Map.t()) :: Plug.Conn.t()
def show(conn, params) do
name = params["name"]
{data, assigns} =
case name do
"time_spent" ->
Teiserver.Account.TimeSpentReport.run(conn, params)
"active" ->
Teiserver.Account.ActiveReport.run(conn, params)
"ranks" ->
Teiserver.Account.RanksReport.run(conn, params)
"verified" ->
Teiserver.Account.VerifiedReport.run(conn, params)
"retention" ->
Teiserver.Account.RetentionReport.run(conn, params)
_ ->
raise "No handler for name of '#{name}'"
end
assigns
|> Enum.reduce(conn, fn {key, value}, conn ->
assign(conn, key, value)
end)
|> assign(:data, data)
|> add_breadcrumb(name: name |> String.capitalize(), url: conn.request_path)
|> render("#{name}.html")
end
end
| 26.58 | 80 | 0.62453 |
087f8686f490726a1b5a5b861cd24ed1e2d81461 | 547 | ex | Elixir | lib/opencov/core.ex | ramkrishna70/opencov | 7a3415f8eebb797ad1f7b6c832daa4f04d70af8d | [
"MIT"
] | 189 | 2018-09-25T09:02:41.000Z | 2022-03-09T13:52:06.000Z | lib/opencov/core.ex | ramkrishna70/opencov | 7a3415f8eebb797ad1f7b6c832daa4f04d70af8d | [
"MIT"
] | 29 | 2018-09-26T05:51:18.000Z | 2021-11-05T08:55:03.000Z | lib/opencov/core.ex | ramkrishna70/opencov | 7a3415f8eebb797ad1f7b6c832daa4f04d70af8d | [
"MIT"
] | 32 | 2018-10-21T12:28:11.000Z | 2022-03-28T02:20:19.000Z | defmodule Opencov.Core do
defmacro __using__(_opts) do
quote do
import Opencov.Core, only: [pipe_when: 3]
end
end
defmacro pipe_when(left, condition, fun) do
quote do
if Opencov.Core.should_pipe(left, unquote(condition)) do
unquote(left) |> unquote(fun)
else
unquote(left)
end
end
end
defmacro should_pipe(left, condition) when is_function(condition) do
quote do
unquote(left) |> unquote(condition)
end
end
defmacro should_pipe(_, condition), do: condition
end
| 21.88 | 70 | 0.665448 |
087fa0c4ce624c55962ebe4668068ba45431d4ad | 1,259 | ex | Elixir | lib/findmy_personal_web/controllers/api/teacher_controller.ex | carlosviana/find_mypersonal | 6fadea6135b074a296c6f2a501a694b5222ec6db | [
"MIT"
] | null | null | null | lib/findmy_personal_web/controllers/api/teacher_controller.ex | carlosviana/find_mypersonal | 6fadea6135b074a296c6f2a501a694b5222ec6db | [
"MIT"
] | 6 | 2021-07-06T17:41:35.000Z | 2021-07-10T10:26:30.000Z | lib/findmy_personal_web/controllers/api/teacher_controller.ex | carlosviana/find_mypersonal | 6fadea6135b074a296c6f2a501a694b5222ec6db | [
"MIT"
] | null | null | null | defmodule FindmyPersonalWeb.Api.TeacherController do
use FindmyPersonalWeb, :controller
alias FindmyPersonal.Teachers
alias FindmyPersonal.Teachers.Teacher
action_fallback FindmyPersonalWeb.FallbackController
def index(conn, _params) do
teacher = Teachers.list_teacher()
render(conn, "index.json", teacher: teacher)
end
def create(conn, %{"teacher" => teacher_params}) do
with {:ok, %Teacher{} = teacher} <- Teachers.create_teacher(teacher_params) do
conn
|> put_status(:created)
|> put_resp_header("location", Routes.teacher_path(conn, :show, teacher))
|> render("show.json", teacher: teacher)
end
end
def show(conn, %{"id" => id}) do
teacher = Teachers.get_teacher!(id)
render(conn, "show.json", teacher: teacher)
end
def update(conn, %{"id" => id, "teacher" => teacher_params}) do
teacher = Teachers.get_teacher!(id)
with {:ok, %Teacher{} = teacher} <- Teachers.update_teacher(teacher, teacher_params) do
render(conn, "show.json", teacher: teacher)
end
end
def delete(conn, %{"id" => id}) do
teacher = Teachers.get_teacher!(id)
with {:ok, %Teacher{}} <- Teachers.delete_teacher(teacher) do
send_resp(conn, :no_content, "")
end
end
end
| 28.613636 | 91 | 0.676728 |
087fcd9902655cbb93093600c02af0b1999973ad | 596 | exs | Elixir | test/pixel_font/table_source/otf_layout/feature_test.exs | Dalgona/pixel_font | 6a65bf85e5228296eb29fddbfdd690565767ff76 | [
"MIT"
] | 17 | 2020-09-14T15:25:38.000Z | 2022-03-05T17:14:24.000Z | test/pixel_font/table_source/otf_layout/feature_test.exs | Dalgona/pixel_font | 6a65bf85e5228296eb29fddbfdd690565767ff76 | [
"MIT"
] | 1 | 2021-08-19T05:05:37.000Z | 2021-08-19T05:05:37.000Z | test/pixel_font/table_source/otf_layout/feature_test.exs | Dalgona/pixel_font | 6a65bf85e5228296eb29fddbfdd690565767ff76 | [
"MIT"
] | null | null | null | defmodule PixelFont.TableSource.OTFLayout.FeatureTest do
use PixelFont.Case, async: true
alias PixelFont.TableSource.OTFLayout.Feature
describe "compile/2" do
test "compiles a OpenType feature table" do
feature = %Feature{
tag: "liga",
name: "Test Feature",
lookups: ["Lookup 1", "Lookup 2"]
}
lookup_indices = %{"Lookup 1" => 10, "Lookup 2" => 20}
compiled_feature = Feature.compile(feature, lookup_indices: lookup_indices)
expected = to_wordstring([0, 2, [10, 20]])
assert compiled_feature === expected
end
end
end
| 28.380952 | 81 | 0.654362 |
087fdc9a23a63c66d6fc7a86007893cf176ccfa2 | 136 | exs | Elixir | test/grovepi/potentiometer/default_trigger_test.exs | asummers/grovepi | 8092fd704457265929e4d9676bedd8cf2176f48d | [
"Apache-2.0"
] | 34 | 2017-08-28T22:44:59.000Z | 2022-02-15T06:37:40.000Z | test/grovepi/potentiometer/default_trigger_test.exs | schainks/grovepi | 2de21f12a2ab28f9788a2add4c6409871e098479 | [
"Apache-2.0"
] | 19 | 2017-08-14T17:27:44.000Z | 2019-05-26T02:49:39.000Z | test/grovepi/potentiometer/default_trigger_test.exs | schainks/grovepi | 2de21f12a2ab28f9788a2add4c6409871e098479 | [
"Apache-2.0"
] | 5 | 2017-09-06T02:20:28.000Z | 2020-03-29T06:05:16.000Z | defmodule GrovePi.Potentiometer.DefaultTriggerTest do
use ExUnit.Case, async: true
doctest GrovePi.Potentiometer.DefaultTrigger
end
| 27.2 | 53 | 0.845588 |
087ff0828a047d670eb3d09cde8397699fe98461 | 1,614 | exs | Elixir | secret-handshake/secret_handshake_test.exs | mauricius/exercism-elixir | c6babb343f9f024a84cfa8328c6adf7a8aa504a5 | [
"MIT"
] | null | null | null | secret-handshake/secret_handshake_test.exs | mauricius/exercism-elixir | c6babb343f9f024a84cfa8328c6adf7a8aa504a5 | [
"MIT"
] | null | null | null | secret-handshake/secret_handshake_test.exs | mauricius/exercism-elixir | c6babb343f9f024a84cfa8328c6adf7a8aa504a5 | [
"MIT"
] | null | null | null | if !System.get_env("EXERCISM_TEST_EXAMPLES") do
Code.load_file("secret_handshake.exs", __DIR__)
end
ExUnit.start()
ExUnit.configure(trace: true, exclude: :pending)
defmodule SecretHandshakeTest do
use ExUnit.Case
describe "Create a handshake for a number" do
test "wink for 1" do
assert SecretHandshake.commands(1) == ["wink"]
end
test "double blink for 10" do
assert SecretHandshake.commands(2) == ["double blink"]
end
test "close your eyes for 100" do
assert SecretHandshake.commands(4) == ["close your eyes"]
end
test "jump for 1000" do
assert SecretHandshake.commands(8) == ["jump"]
end
test "combine two actions" do
assert SecretHandshake.commands(3) == ["wink", "double blink"]
end
test "reverse two actions" do
assert SecretHandshake.commands(19) == ["double blink", "wink"]
end
test "reversing one action gives the same action" do
assert SecretHandshake.commands(24) == ["jump"]
end
test "reversing no actions still gives no actions" do
assert SecretHandshake.commands(16) == []
end
test "all possible actions" do
assert SecretHandshake.commands(15) == ["wink", "double blink", "close your eyes", "jump"]
end
test "reverse all possible actions" do
assert SecretHandshake.commands(31) == ["jump", "close your eyes", "double blink", "wink"]
end
test "do nothing for zero" do
assert SecretHandshake.commands(0) == []
end
test "do nothing if lower 5 bits not set" do
assert SecretHandshake.commands(32) == []
end
end
end | 26.9 | 96 | 0.660471 |
087ff3f544d6040e967b912bda40b3fabfd34608 | 12,348 | ex | Elixir | lib/ex_admin/index.ex | ArthurPai/ex_admin | 8894a5e2d4a8ca0c0bfeceded16df3b7058ca665 | [
"MIT"
] | null | null | null | lib/ex_admin/index.ex | ArthurPai/ex_admin | 8894a5e2d4a8ca0c0bfeceded16df3b7058ca665 | [
"MIT"
] | null | null | null | lib/ex_admin/index.ex | ArthurPai/ex_admin | 8894a5e2d4a8ca0c0bfeceded16df3b7058ca665 | [
"MIT"
] | 1 | 2021-04-14T16:18:31.000Z | 2021-04-14T16:18:31.000Z | defmodule ExAdmin.Index do
@moduledoc """
Override the default index page for an ExAdmin resource
By default, ExAdmin renders the index table without any additional
configuration. It renders each column in the model, except the id,
inserted_at, and updated_at columns.
## Default Table Type
ExAdmin displays a selection checkbox column on the left with a batch
action control that enables when a checkbox is selected.
To customize the index page, use the `index` macro.
For example, the following will show on the id an name fields, as
well place a selection column and batch actions row on the page:
defmodule MyProject.ExAdmin.MyModel do
use ExAdmin.Register
register_resource MyProject.MyModel do
index do
selectable_column()
column :id
column :name
actions # display the default actions column
end
end
end
### Image fields
For image fields, use the `image: true` option. For example:
index do
column :name
column :image, [image: true, height: 100], &(ExAdminDemo.Image.url({&1.image, &1}, :thumb))
end
### Custom columns
Columns can be customized with column/2 where the second argument is
an anonymous function called with model. Here are a couple examples:
index do
column :id
column :name, fn(category) ->
Phoenix.HTML.Tag.content_tag :span, category.name,
"data-id": category.id, class: "category"
end
column "Created", fn(category) ->
category.created_at
end
end
### Override the Actions column
The Actions column can be customized by adding `column "Actions", fn(x) -> ...`
column "Actions", fn(r) ->
safe_concat link_to("Restore", "/admin/backuprestores/restore/#\{r.id}", "data-method": :put,
"data-confirm": "You are about to restore #\{r.file_name}. Are you sure?",
class: "member_link restore-link"),
link_to("Delete", "/admin/backuprestores/#\{r.id}", "data-method": :delete,
"data-confirm": "Are you sure you want to delete this?",
class: "member_link")
end
### Associations
By default, ExAdmin will attempt to render a belongs_to association with a
select control, using name field in the association. If you would like to
render an association with another field name, or would like to use more than
one field, use the :field option.
column :account, fields: [:username]
### Change the column label
Use the :label option to override the column name:
column :name, label: "Custom Name"
## As Grid
By providing option `as: :grid` to the `index` macro, a grid index page
is rendered.
### For Example:
index as: :grid, default: true do
cell fn(p) ->
markup do
div do
a href: admin_resource_path(p, :show) do
img(src: ExAdminDemo.Image.url({p.image_file_name, p}, :thumb), height: 100)
end
end
a truncate(p.title), href: admin_resource_path(p, :show)
end
end
end
"""
require Logger
require Integer
import ExAdmin.Utils
import ExAdmin.Helpers
import ExAdmin.Gettext
import Kernel, except: [div: 2, to_string: 1]
use Xain
# alias ExAdmin.Schema
@doc false
defmacro __using__(_) do
quote do
import unquote(__MODULE__)
end
end
@default_actions [:show, :edit, :delete]
@doc """
The index macro is used to customize the index page of a resource.
"""
defmacro index(opts \\ [], do: contents) do
quote location: :keep do
import ExAdmin.CSV, only: [csv: 1, csv: 2]
import ExAdmin.Register
import ExAdmin.Index
def index_view(var!(conn), page, scope_counts) do
import ExAdmin.Form, except: [actions: 1]
import ExAdmin.Register, except: [actions: 1]
import ExAdmin.ViewHelpers
var!(columns, ExAdmin.Show) = []
var!(selectable_column, ExAdmin.Index) = nil
var!(actions, ExAdmin.Index) = nil
var!(cell, ExAdmin.Index) = nil
opts = unquote(opts)
unquote(contents)
selectable = case var!(selectable_column, ExAdmin.Index) do
nil -> false
other -> other
end
actions = ExAdmin.Index.get_index_actions(var!(conn).assigns.defn, var!(actions, ExAdmin.Index))
opts = Enum.into(opts, %{})
|> Map.put(:column_list, var!(columns, ExAdmin.Show) |> Enum.reverse)
|> Map.put(:selectable_column, selectable)
|> Map.put(:actions, actions)
markup safe: true do
ExAdmin.Index.render_index_pages(var!(conn), page, scope_counts, var!(cell, ExAdmin.Index), opts)
end
end
end
end
@doc false
def get_index_actions(defn, actions) do
actions = case actions do
[] -> @default_actions
nil -> @default_actions
false -> []
list -> list
end
actions -- (@default_actions -- defn.actions)
end
@doc """
Define which actions will be displayed in the index view.
## Examples
actions
actions [:new, :delete]
"""
defmacro actions(opts \\ []) do
if opts != nil and opts != false and (opts -- @default_actions) != [] do
raise ArgumentError, "Only #{inspect @default_actions} are allowed!"
end
quote do
var!(actions, ExAdmin.Index) = unquote(opts)
end
end
@doc """
Define a grid cell for grid view.
## Example
index as: :grid, default: true, columns: 6 do
import Kernel, except: [div: 2]
cell fn(p) ->
div ".box" do
div ".box-body" do
a href: admin_resource_path(p, :show) do
img(src: ExAdminDemo.Image.url({p.image_file_name, p}, :thumb), height: 100)
end
end
div ".box-footer" do
a truncate(p.title), href: admin_resource_path(p, :show)
end
end
end
end
"""
defmacro cell(fun) do
quote do
var!(cell, ExAdmin.Index) = unquote(fun)
end
end
@doc """
Add a column of selection check boxes
Allows users to select individual rows on the index page. Selecting
columns activates the batch actions button.
"""
defmacro selectable_column do
quote do
var!(selectable_column, ExAdmin.Index) = true
end
end
@doc false
def default_index_view(conn, page, scope_counts) do
case conn.assigns.defn do
nil ->
throw :invalid_route
%{__struct__: _} = defn ->
columns = case defn.index_filters do
[] -> []
[false] -> []
[_] ->
ExAdmin.Filter.fields(conn.assigns.defn)
|> Keyword.keys
end
|> case do
[] ->
defn.resource_model.__schema__(:fields)
|> Enum.filter(&(not &1 in [:inserted_at, :updated_at]))
other ->
other
end
|> Enum.map(&({translate_field(defn, &1), %{}}))
columns = if :id in defn.resource_model.__schema__(:fields) and Enum.any?(columns, (&(elem(&1, 0) == :id))) do
Keyword.put columns, :id, %{link: true}
else
columns
end
opts = %{}
|> Map.put(:column_list, columns)
|> Map.put(:selectable_column, true)
|> Map.put(:actions, get_index_actions(defn, []))
markup safe: true do
ExAdmin.Index.render_index_pages(var!(conn), page, scope_counts, nil, opts)
end
end
end
defp get_resource_fields([]), do: []
defp get_resource_fields([resource | _]), do: resource.__struct__.__schema__(:fields)
@doc false
def render_index_pages(conn, page, scope_counts, cell, page_opts) do
# require IEx
# IEx.pry
name = resource_model(conn) |> titleize |> Inflex.pluralize
defn = conn.assigns.defn
label = get_resource_label(conn) |> Inflex.pluralize
batch_actions = (not false in defn.batch_actions) and :delete in page_opts[:actions]
opts = %{
columns: Map.get(page_opts, :columns, 3),
column_list: Map.get(page_opts, :column_list),
count: page.total_entries,
name: name,
order: ExQueb.get_sort_order(conn.params["order"]),
href: admin_resource_path(conn, :index) <> "?order=",
defn: defn,
batch_actions: batch_actions,
scopes: defn.scopes,
label: label,
resource_model: conn.params["resource"],
page: page,
cell: cell,
scope_counts: scope_counts,
opts: page_opts,
resources: page.entries,
selectable_column: page_opts[:selectable_column],
actions: page_opts[:actions]
}
_render_index_page(conn, opts, page_opts)
end
defp _render_index_page(conn, opts, %{as: :grid}) do
Module.concat(conn.assigns.theme, Index).wrap_index_grid fn ->
Module.concat(conn.assigns.theme, Index).batch_action_form conn,
false, opts[:scopes], opts[:resource_model], opts[:scope_counts], fn ->
if opts[:count] == 0 do
Module.concat(conn.assigns.theme, Index).blank_slate_page(conn, opts)
else
Module.concat(conn.assigns.theme, Index).paginated_collection_grid(conn, opts)
end
end
end
end
defp _render_index_page(conn, opts, page_opts) do
page = opts[:page]
actions = opts[:actions]
opts = Map.put(opts, :fields, get_resource_fields page.entries)
columns = page_opts[:column_list]
custom_actions_column? = Enum.any? columns, &((elem &1, 0) == "Actions")
columns = if custom_actions_column? || Enum.empty?(actions) do
columns
else
columns ++ [{"Actions", %{fun: fn(resource) -> build_index_links(conn, resource, actions) end,
label: ExAdmin.Gettext.gettext("Actions") }}]
end
opts = Map.put opts, :column_list, columns
Module.concat(conn.assigns.theme, Index).wrap_index_grid fn ->
Module.concat(conn.assigns.theme, Index).batch_action_form conn,
opts[:batch_actions], opts[:scopes], opts[:resource_model], opts[:scope_counts], fn ->
if opts[:count] == 0 do
Module.concat(conn.assigns.theme, Index).blank_slate_page(conn, opts)
else
Module.concat(conn.assigns.theme, Index).paginated_collection_table(conn, opts)
end
end
end
end
@doc """
Build the scope link.
"""
def build_scope_href(href, nil), do: href
def build_scope_href(href, scope) do
String.replace(href, "?", "?scope=#{scope}&")
end
@doc """
Build the order link.
"""
def build_order_href(href, {name, sort}), do: href <> "#{name}_#{sort}"
def build_order_href(href, _), do: href
@doc """
Build the filter link.
"""
def build_filter_href(href, nil), do: href
def build_filter_href(href, q) do
q
|> Map.to_list
|> Enum.reduce(href, fn({name, value}, acc) ->
acc <> "&q%5B" <> name <> "%5D=" <> value
end)
end
@doc false
def download_links(conn, opts) do
div ".download_links " <> (gettext "Download:") <> " " do
a "CSV", href: build_csv_href(conn, opts)
end
end
@doc false
def build_csv_href(conn, opts) do
admin_resource_path(conn, :csv) <> "?order="
|> build_scope_href(conn.params["scope"])
|> build_order_href(opts[:order])
|> build_filter_href(conn.params["q"])
end
@doc false
def parameterize(name, seperator \\ "_")
def parameterize(atom, seperator) when is_atom(atom) do
Atom.to_string(atom)
|> parameterize(seperator)
end
def parameterize(string, seperator) do
Inflex.parameterize(string, seperator)
end
@doc false
def build_index_links(conn, resource, actions) do
resource_model = resource.__struct__
links = case actions do
nil -> []
other -> other
end
list = get_authorized_links(conn, links, resource_model) |> Enum.reverse
labels = conn.assigns.defn.action_labels
Module.concat(conn.assigns.theme, Index).handle_action_links(list, resource, labels)
end
@doc false
def get_authorized_links(conn, links, resource_model) do
Enum.reduce links, [], fn(item, acc) ->
if ExAdmin.Utils.authorized_action?(conn, item, resource_model), do: [item | acc], else: acc
end
end
end
| 29.611511 | 118 | 0.621963 |
08800e62009bb5c52bbfcfb3d9b377361d3a96d3 | 984 | ex | Elixir | lib/xgen/generator/standard_callbacks.ex | ejpcmac/ex_gen | ccd2beec8912fa6ea6ff9839454986bf3124b001 | [
"MIT"
] | 1 | 2019-10-06T07:31:06.000Z | 2019-10-06T07:31:06.000Z | lib/xgen/generator/standard_callbacks.ex | ejpcmac/xgen | ccd2beec8912fa6ea6ff9839454986bf3124b001 | [
"MIT"
] | 30 | 2018-08-29T12:52:59.000Z | 2020-05-28T04:54:33.000Z | lib/xgen/generator/standard_callbacks.ex | ejpcmac/ex_gen | ccd2beec8912fa6ea6ff9839454986bf3124b001 | [
"MIT"
] | 1 | 2020-09-25T09:39:56.000Z | 2020-09-25T09:39:56.000Z | defmodule XGen.Generator.StandardCallbacks do
@moduledoc """
Standard callbacks to use in generators.
"""
import Marcus
@doc """
Initialises a Git repository and sets correct permissions on the `.gitsetup`
script if necessary.
"""
@spec init_git(map()) :: map()
def init_git(opts) do
if opts[:git?] do
green_info("* initializing an empty Git repository")
_ = System.cmd("git", ["init"])
File.chmod!(".gitsetup", 0o755)
end
opts
end
@doc """
Prints a project created message.
"""
@spec project_created(map()) :: map()
def project_created(opts) do
success("\nYour project has been successfully created.\n")
opts
end
@doc """
Prints instructions to setup gitflow.
"""
@spec gitsetup_instructions(map()) :: map()
def gitsetup_instructions(opts) do
if opts[:git?] do
info("""
After your first commit, you can setup gitflow:
./.gitsetup
""")
end
opts
end
end
| 20.5 | 78 | 0.629065 |
0880125e54313079be83803fb53e5905b69587fd | 1,237 | exs | Elixir | all-your-base/all_your_base.exs | ravanscafi/exercism-elixir | 0f5c8c923166a0a795c323c7e2d6ccc9da572fcf | [
"MIT"
] | null | null | null | all-your-base/all_your_base.exs | ravanscafi/exercism-elixir | 0f5c8c923166a0a795c323c7e2d6ccc9da572fcf | [
"MIT"
] | null | null | null | all-your-base/all_your_base.exs | ravanscafi/exercism-elixir | 0f5c8c923166a0a795c323c7e2d6ccc9da572fcf | [
"MIT"
] | null | null | null | defmodule AllYourBase do
@doc """
Given a number in base a, represented as a sequence of digits, converts it to base b,
or returns nil if either of the bases are less than 2
"""
@spec convert(list, integer, integer) :: list
def convert([], _, _), do: nil
def convert(_, base_a, base_b) when base_a < 2 or base_b < 2, do: nil
def convert(digits, base_a, base_b) do
if valid?(digits, base_a) do
digits
|> digits_to_number(base_a)
|> number_to_digits(base_b)
end
end
defp valid?(digits, base) do
not Enum.any?(digits, &(&1 < 0 or &1 >= base))
end
defp digits_to_number(digits, base) do
digits
|> Enum.reduce(
{0, Enum.count(digits) - 1},
fn digit, {acc, power} -> {acc + digit_to_number(digit, base, power), power - 1} end
)
|> elem(0)
end
defp digit_to_number(digit, base, expoent) do
(digit * :math.pow(base, expoent))
|> round()
end
defp number_to_digits(num, base), do: number_to_digits_acc(num, base, [])
defp number_to_digits_acc(0, _, []), do: [0]
defp number_to_digits_acc(0, _, acc), do: acc
defp number_to_digits_acc(num, base, acc) do
number_to_digits_acc(div(num, base), base, [rem(num, base) | acc])
end
end
| 26.319149 | 90 | 0.640259 |
08807600addf3d6c662d8c881f82c0436fbd992c | 16,784 | exs | Elixir | test/broadway_dashboard/counters_test.exs | leductam/broadway_dashboard | 57b6b2ddcbfedeaca22a7daf96ee643580b83eb6 | [
"Apache-2.0"
] | 132 | 2021-05-27T12:18:09.000Z | 2022-02-15T09:30:56.000Z | test/broadway_dashboard/counters_test.exs | leductam/broadway_dashboard | 57b6b2ddcbfedeaca22a7daf96ee643580b83eb6 | [
"Apache-2.0"
] | 9 | 2021-05-26T23:22:02.000Z | 2021-09-08T11:43:08.000Z | test/broadway_dashboard/counters_test.exs | leductam/broadway_dashboard | 57b6b2ddcbfedeaca22a7daf96ee643580b83eb6 | [
"Apache-2.0"
] | 9 | 2021-06-28T18:20:01.000Z | 2022-02-21T14:13:28.000Z | defmodule BroadwayDashboard.CountersTest do
use ExUnit.Case, async: true
use ExUnitProperties
alias BroadwayDashboard.Counters
test "build/1 builds counters with a topology without batchers" do
topology = [
producers: [%{name: :default, concurrency: 1}],
processors: [%{name: :default, concurrency: 40}],
batchers: []
]
assert %Counters{stages: 40, counters: counters, atomics: atomics, batchers_positions: %{}} =
Counters.build(topology)
assert %{size: 2} = :counters.info(counters)
assert %{size: 120} = :atomics.info(atomics)
end
test "build/1 builds counters with a topology with batchers" do
topology = [
producers: [%{name: :default, concurrency: 1}],
processors: [%{name: :default, concurrency: 40}],
batchers: [
%{name: :default, batcher_key: :default, concurrency: 5},
%{name: :s3, batcher_key: :s3, concurrency: 3}
]
]
assert %Counters{stages: 50, counters: counters, atomics: atomics, batchers_positions: pos} =
Counters.build(topology)
assert %{size: 2} = :counters.info(counters)
assert %{size: 150} = :atomics.info(atomics)
assert %{default: 41, s3: 47} == pos
end
test "incr/3 increments successes and failures" do
ref = :counters.new(2, [:write_concurrency])
counters = %Counters{counters: ref}
assert :ok = Counters.incr(counters, 15, 1)
assert :counters.get(ref, 1) == 15
assert :counters.get(ref, 2) == 1
end
test "put_processor_start/3 sets the start time of a processor" do
topology = [
producers: [%{name: :default, concurrency: 1}],
processors: [%{name: :default, concurrency: 40}],
batchers: []
]
counters = Counters.build(topology)
start = System.monotonic_time()
assert :ok = Counters.put_processor_start(counters, 0, start)
assert :atomics.get(counters.atomics, 1) == start
assert :ok = Counters.put_processor_start(counters, 19, start)
assert :atomics.get(counters.atomics, 20) == start
assert :ok = Counters.put_processor_start(counters, 39, start)
assert :atomics.get(counters.atomics, 40) == start
end
test "put_processor_end/3 sets the end time of a processor" do
topology = [
producers: [%{name: :default, concurrency: 1}],
processors: [%{name: :default, concurrency: 40}],
batchers: []
]
counters = Counters.build(topology)
initial_pos = counters.stages
end_time = System.monotonic_time()
assert :ok = Counters.put_processor_end(counters, 0, end_time)
assert :atomics.get(counters.atomics, initial_pos + 1) == end_time
assert :ok = Counters.put_processor_end(counters, 19, end_time)
assert :atomics.get(counters.atomics, initial_pos + 20) == end_time
assert :ok = Counters.put_processor_end(counters, 39, end_time)
assert :atomics.get(counters.atomics, initial_pos + 40) == end_time
end
test "put_processor_workload/3 sets the processing workload of a processor" do
topology = [
producers: [%{name: :default, concurrency: 1}],
processors: [%{name: :default, concurrency: 40}],
batchers: []
]
counters = Counters.build(topology)
initial_pos = counters.stages * 2
workload = 80
assert :ok = Counters.put_processor_workload(counters, 0, workload)
assert :atomics.get(counters.atomics, initial_pos + 1) == workload
assert :ok = Counters.put_processor_workload(counters, 19, workload)
assert :atomics.get(counters.atomics, initial_pos + 20) == workload
assert :ok = Counters.put_processor_workload(counters, 39, workload)
assert :atomics.get(counters.atomics, initial_pos + 40) == workload
end
test "get_processor_start/2 returns the start time of a processor" do
topology = [
producers: [%{name: :default, concurrency: 1}],
processors: [%{name: :default, concurrency: 40}],
batchers: []
]
counters = Counters.build(topology)
start = System.monotonic_time()
Counters.put_processor_start(counters, 0, start)
assert {:ok, ^start} = Counters.fetch_processor_start(counters, 0)
Counters.put_processor_start(counters, 19, start)
assert {:ok, ^start} = Counters.fetch_processor_start(counters, 19)
Counters.put_processor_start(counters, 39, start)
assert {:ok, ^start} = Counters.fetch_processor_start(counters, 39)
end
test "get_processor_end/2 returns the end time of a processor" do
topology = [
producers: [%{name: :default, concurrency: 1}],
processors: [%{name: :default, concurrency: 40}],
batchers: []
]
counters = Counters.build(topology)
end_time = System.monotonic_time()
Counters.put_processor_end(counters, 0, end_time)
assert {:ok, ^end_time} = Counters.fetch_processor_end(counters, 0)
Counters.put_processor_end(counters, 19, end_time)
assert {:ok, ^end_time} = Counters.fetch_processor_end(counters, 19)
Counters.put_processor_end(counters, 39, end_time)
assert {:ok, ^end_time} = Counters.fetch_processor_end(counters, 39)
end
test "get_processor_workload/2 returns the end time of a processor" do
topology = [
producers: [%{name: :default, concurrency: 1}],
processors: [%{name: :default, concurrency: 40}],
batchers: []
]
counters = Counters.build(topology)
end_time = System.monotonic_time()
Counters.put_processor_workload(counters, 0, end_time)
assert {:ok, ^end_time} = Counters.fetch_processor_workload(counters, 0)
Counters.put_processor_workload(counters, 19, end_time)
assert {:ok, ^end_time} = Counters.fetch_processor_workload(counters, 19)
Counters.put_processor_workload(counters, 39, end_time)
assert {:ok, ^end_time} = Counters.fetch_processor_workload(counters, 39)
end
test "put_batcher_start/3 sets the start time for a batcher" do
proc_concurrency = 40
topology = [
producers: [%{name: :default, concurrency: 1}],
processors: [%{name: :default, concurrency: proc_concurrency}],
batchers: [
%{name: :default, batcher_key: :default, concurrency: 5},
%{name: :s3, batcher_key: :s3, concurrency: 3}
]
]
counters = Counters.build(topology)
start = System.monotonic_time()
assert :ok = Counters.put_batcher_start(counters, :default, start)
assert :atomics.get(counters.atomics, proc_concurrency + 1) == start
assert :ok = Counters.put_batcher_start(counters, :s3, start)
# This is 7 because it's 1 from default, + 5 batch processors from default, + 1 s3 batcher
assert :atomics.get(counters.atomics, proc_concurrency + 7) == start
assert {:error, :batcher_position_not_found} =
Counters.put_batcher_start(counters, :sqs, start)
end
test "put_batcher_end/3 sets the end time for a batcher" do
proc_concurrency = 40
topology = [
producers: [%{name: :default, concurrency: 1}],
processors: [%{name: :default, concurrency: proc_concurrency}],
batchers: [
%{name: :default, batcher_key: :default, concurrency: 5},
%{name: :s3, batcher_key: :s3, concurrency: 3}
]
]
counters = Counters.build(topology)
end_time = System.monotonic_time()
assert :ok = Counters.put_batcher_end(counters, :default, end_time)
assert :atomics.get(counters.atomics, counters.stages + proc_concurrency + 1) == end_time
assert :ok = Counters.put_batcher_end(counters, :s3, end_time)
# This is 7 because it's 1 from default, + 5 batch processors from default, + 1 s3 batcher
assert :atomics.get(counters.atomics, counters.stages + proc_concurrency + 7) == end_time
assert {:error, :batcher_position_not_found} =
Counters.put_batcher_end(counters, :sqs, end_time)
end
test "put_batcher_workload/3 sets the processing workload for a batcher" do
proc_concurrency = 40
topology = [
producers: [%{name: :default, concurrency: 1}],
processors: [%{name: :default, concurrency: proc_concurrency}],
batchers: [
%{name: :default, batcher_key: :default, concurrency: 5},
%{name: :s3, batcher_key: :s3, concurrency: 3}
]
]
counters = Counters.build(topology)
workload = System.monotonic_time()
assert :ok = Counters.put_batcher_workload(counters, :default, workload)
assert :atomics.get(counters.atomics, counters.stages * 2 + proc_concurrency + 1) == workload
assert :ok = Counters.put_batcher_workload(counters, :s3, workload)
# This is 7 because it's 1 from default, + 5 batch processors from default, + 1 s3 batcher
assert :atomics.get(counters.atomics, counters.stages * 2 + proc_concurrency + 7) == workload
assert {:error, :batcher_position_not_found} =
Counters.put_batcher_workload(counters, :sqs, workload)
end
test "get_batcher_start/2 gets the start time for a batcher" do
proc_concurrency = 40
topology = [
producers: [%{name: :default, concurrency: 1}],
processors: [%{name: :default, concurrency: proc_concurrency}],
batchers: [
%{name: :default, batcher_key: :default, concurrency: 5},
%{name: :s3, batcher_key: :s3, concurrency: 3}
]
]
counters = Counters.build(topology)
start = System.monotonic_time()
:ok = Counters.put_batcher_start(counters, :default, start)
assert {:ok, ^start} = Counters.fetch_batcher_start(counters, :default)
assert {:error, :batcher_position_not_found} = Counters.fetch_batcher_start(counters, :sqs)
end
test "get_batcher_end/2 gets the end time for a batcher" do
proc_concurrency = 40
topology = [
producers: [%{name: :default, concurrency: 1}],
processors: [%{name: :default, concurrency: proc_concurrency}],
batchers: [
%{name: :default, batcher_key: :default, concurrency: 5},
%{name: :s3, batcher_key: :s3, concurrency: 3}
]
]
counters = Counters.build(topology)
end_time = System.monotonic_time()
Counters.put_batcher_end(counters, :default, end_time)
assert {:ok, ^end_time} = Counters.fetch_batcher_end(counters, :default)
Counters.put_batcher_end(counters, :s3, end_time)
assert {:ok, ^end_time} = Counters.fetch_batcher_end(counters, :s3)
assert {:error, :batcher_position_not_found} = Counters.fetch_batcher_end(counters, :sqs)
end
test "get_batcher_workload/3 gets the processing workload for a batcher" do
proc_concurrency = 40
topology = [
producers: [%{name: :default, concurrency: 1}],
processors: [%{name: :default, concurrency: proc_concurrency}],
batchers: [
%{name: :default, batcher_key: :default, concurrency: 5},
%{name: :s3, batcher_key: :s3, concurrency: 3}
]
]
counters = Counters.build(topology)
workload = System.monotonic_time()
Counters.put_batcher_workload(counters, :default, workload)
assert {:ok, ^workload} = Counters.fetch_batcher_workload(counters, :default)
Counters.put_batcher_workload(counters, :s3, workload)
assert {:ok, ^workload} = Counters.fetch_batcher_workload(counters, :s3)
assert {:error, :batcher_position_not_found} = Counters.fetch_batcher_workload(counters, :sqs)
end
test "put_batch_processor_start/4 puts the value" do
proc_concurrency = 40
topology = [
producers: [%{name: :default, concurrency: 1}],
processors: [%{name: :default, concurrency: proc_concurrency}],
batchers: [
%{name: :default, batcher_key: :default, concurrency: 5},
%{name: :s3, batcher_key: :s3, concurrency: 3}
]
]
counters = Counters.build(topology)
start = System.monotonic_time()
Counters.put_batch_processor_start(counters, :default, 1, start)
assert {:ok, ^start} = Counters.fetch_batch_processor_start(counters, :default, 1)
Counters.put_batch_processor_start(counters, :s3, 3, start)
assert {:ok, ^start} = Counters.fetch_batch_processor_start(counters, :s3, 3)
assert {:error, :batcher_position_not_found} =
Counters.fetch_batch_processor_workload(counters, :sqs, 4)
end
property "the setters never conflict" do
topology = [
producers: [%{name: :default, concurrency: 1}],
processors: [%{name: :default, concurrency: 30}],
batchers: [
%{name: :default, batcher_key: :default, concurrency: 5},
%{name: :s3, batcher_key: :s3, concurrency: 10}
]
]
counters = Counters.build(topology)
funs =
expand_args([
{:fetch_processor_start, :put_processor_start, [0..29]},
{:fetch_processor_end, :put_processor_end, [0..29]},
{:fetch_processor_workload, :put_processor_workload, [0..29]},
# default
{:fetch_batcher_start, :put_batcher_start, [:default]},
{:fetch_batcher_end, :put_batcher_end, [:default]},
{:fetch_batcher_workload, :put_batcher_workload, [:default]},
# s3
{:fetch_batcher_start, :put_batcher_start, [:s3]},
{:fetch_batcher_end, :put_batcher_end, [:s3]},
{:fetch_batcher_workload, :put_batcher_workload, [:s3]},
# default batch processors
{:fetch_batch_processor_start, :put_batch_processor_start, [:default, 0..4]},
{:fetch_batch_processor_end, :put_batch_processor_end, [:default, 0..4]},
{:fetch_batch_processor_workload, :put_batch_processor_workload, [:default, 0..4]},
# s3 batch processors
{:fetch_batch_processor_start, :put_batch_processor_start, [:s3, 0..9]},
{:fetch_batch_processor_end, :put_batch_processor_end, [:s3, 0..9]},
{:fetch_batch_processor_workload, :put_batch_processor_workload, [:s3, 0..9]}
])
assert counters.stages * 3 == length(funs)
check all values <- uniq_list_of(integer(10..10_000), length: length(funs)) do
counters = Counters.build(topology)
for {value, {getter, setter, args}} <- Enum.zip(values, Enum.shuffle(funs)) do
args = [counters | args]
assert {:ok, 0} = apply(Counters, getter, args)
assert :ok = apply(Counters, setter, args ++ [value])
assert {:ok, ^value} = apply(Counters, getter, args)
end
end
end
defp expand_args(functions) do
Enum.flat_map(functions, fn fun ->
{getter, setter, args} = fun
[maybe_range | args] = Enum.reverse(args)
if match?(%Range{}, maybe_range) do
for i <- maybe_range, do: {getter, setter, Enum.reverse([i | args])}
else
[fun]
end
end)
end
test "topology_workload/2 builds the workload of a topology" do
topology = [
producers: [%{name: :default, concurrency: 1}],
processors: [%{name: :default, concurrency: 3}],
batchers: [
%{name: :default, batcher_key: :default, concurrency: 5},
%{name: :s3, batcher_key: :s3, concurrency: 2}
]
]
counters = Counters.build(topology)
assert [
producers: [%{name: :default, concurrency: 1}],
processors: [%{name: :default, concurrency: 3, workloads: [0, 0, 0]}],
batchers: [
%{
name: :default,
batcher_key: :default,
concurrency: 5,
batcher_workload: 0,
workloads: [0, 0, 0, 0, 0]
},
%{
name: :s3,
batcher_key: :s3,
concurrency: 2,
batcher_workload: 0,
workloads: [0, 0]
}
]
] = Counters.topology_workload(counters, topology)
:ok = Counters.put_processor_workload(counters, 1, 53)
:ok = Counters.put_processor_workload(counters, 2, 89)
:ok = Counters.put_batcher_workload(counters, :s3, 42)
:ok = Counters.put_batcher_workload(counters, :default, 13)
:ok = Counters.put_batch_processor_workload(counters, :default, 2, 5)
:ok = Counters.put_batch_processor_workload(counters, :default, 4, 8)
:ok = Counters.put_batch_processor_workload(counters, :s3, 0, 42)
assert [
producers: [%{name: :default, concurrency: 1}],
processors: [%{name: :default, concurrency: 3, workloads: [0, 53, 89]}],
batchers: [
%{
name: :default,
batcher_key: :default,
concurrency: 5,
batcher_workload: 13,
workloads: [0, 0, 5, 0, 8]
},
%{
name: :s3,
batcher_key: :s3,
concurrency: 2,
batcher_workload: 42,
workloads: [42, 0]
}
]
] = Counters.topology_workload(counters, topology)
end
end
| 35.112971 | 98 | 0.645794 |
0880a38b165c56eebe578d87d379bd8b33d29b3d | 1,086 | exs | Elixir | clients/tasks/mix.exs | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/tasks/mix.exs | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/tasks/mix.exs | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | defmodule GoogleApi.Tasks.V1.Mixfile do
use Mix.Project
@version "0.1.0"
def project do
[app: :google_api_tasks,
version: @version,
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/GoogleCloudPlatform/elixir-google-api/tree/master/clients/tasks"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.1.0"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Lets you manage your tasks and task lists.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/GoogleCloudPlatform/elixir-google-api/tree/master/clients/tasks",
"Homepage" => "https://developers.google.com/google-apps/tasks/firstapp"
}
]
end
end
| 22.625 | 105 | 0.592081 |
0880d4533f929c7a48e22b8abc5ab989d3286279 | 3,047 | exs | Elixir | test/ex_check/project_cases/config_and_scripts_test.exs | gerbal/ex_check | 1247075a64d17f69c3e6e9699bd95664cc128466 | [
"MIT"
] | 225 | 2019-07-21T14:44:17.000Z | 2022-03-31T11:08:07.000Z | test/ex_check/project_cases/config_and_scripts_test.exs | gerbal/ex_check | 1247075a64d17f69c3e6e9699bd95664cc128466 | [
"MIT"
] | 23 | 2019-07-30T03:05:42.000Z | 2022-03-06T18:11:50.000Z | test/ex_check/project_cases/config_and_scripts_test.exs | gerbal/ex_check | 1247075a64d17f69c3e6e9699bd95664cc128466 | [
"MIT"
] | 9 | 2019-11-23T23:04:39.000Z | 2022-03-29T00:54:34.000Z | defmodule ExCheck.ProjectCases.ConfigAndScriptsTest do
use ExCheck.ProjectCase, async: true
@config """
[
parallel: false,
skipped: false,
fix: true,
tools: [
{:compiler, false},
{:formatter, enabled: false},
{:ex_unit, order: 2, command: ~w[mix test --cover]},
{:my_mix_task, order: 1, command: "mix my_task a", env: %{"MIX_ENV" => "prod"}},
{:my_elixir_script, "elixir priv/scripts/script.exs a"},
{:my_shell_script, command: ["script.sh", "a b"], cd: "scripts", env: %{"SOME" => "xyz"}},
]
]
"""
@mix_task ~S"""
defmodule Mix.Tasks.MyTask do
def run(args) do
IO.puts(IO.ANSI.format([:yellow, "my mix task #{Enum.join(args)} #{Mix.env}"]))
end
end
"""
@elixir_script ~S"""
IO.puts(IO.ANSI.format([:blue, "my elixir script #{Enum.join(System.argv())}"]))
"""
@shell_script """
#!/bin/sh
echo my shell script $1 $SOME
"""
@ansi_code_regex ~r/(\x1b\[[0-9;]*m)/
test "config and scripts", %{project_dir: project_dir} do
config_path = Path.join(project_dir, ".check.exs")
File.write!(config_path, @config)
mix_task_path = Path.join([project_dir, "lib", "mix", "tasks", "my_task.ex"])
File.mkdir_p!(Path.dirname(mix_task_path))
File.write!(mix_task_path, @mix_task)
elixir_script_path = Path.join([project_dir, "priv", "scripts", "script.exs"])
File.mkdir_p!(Path.dirname(elixir_script_path))
File.write!(elixir_script_path, @elixir_script)
shell_script_path = Path.join([project_dir, "scripts", "script.sh"])
File.mkdir_p!(Path.dirname(shell_script_path))
File.write!(shell_script_path, @shell_script)
File.chmod!(shell_script_path, 0o755)
supports_erl_config = Version.match?(System.version(), ">= 1.9.0")
assert {output, 0} =
System.cmd(
"elixir",
["-e", "Application.put_env(:elixir, :ansi_enabled, true)", "-S", "mix", "check"],
cd: project_dir,
stderr_to_stdout: true
)
plain_output = String.replace(output, @ansi_code_regex, "")
assert plain_output =~ "compiler success"
refute plain_output =~ "formatter success"
assert plain_output =~ "ex_unit success"
if Version.match?(System.version(), ">= 1.10.0") do
assert plain_output =~ "unused_deps fix success"
end
refute plain_output =~ "credo skipped due to missing package credo"
assert plain_output =~ "my_mix_task success"
assert plain_output =~ "my_elixir_script success"
assert plain_output =~ "my_shell_script success"
assert output =~ "Generated HTML coverage results"
assert output =~ IO.ANSI.yellow() <> IO.ANSI.faint() <> "my mix task a prod"
if supports_erl_config do
assert output =~ IO.ANSI.blue() <> IO.ANSI.faint() <> "my elixir script a"
else
assert output =~ "my elixir script a"
end
assert output =~ "my shell script a b xyz"
assert plain_output =~ ~r/running my_shell_script.*running my_mix_task.*running ex_unit/s
end
end
| 32.073684 | 97 | 0.636364 |
0880e497ecf08588549d46de3d7239e3ae7fc575 | 179 | exs | Elixir | config/dev.exs | 4eek/wax | eee7c5460267dce2cd8fe76df2e05f46e90e50b6 | [
"Apache-2.0"
] | null | null | null | config/dev.exs | 4eek/wax | eee7c5460267dce2cd8fe76df2e05f46e90e50b6 | [
"Apache-2.0"
] | null | null | null | config/dev.exs | 4eek/wax | eee7c5460267dce2cd8fe76df2e05f46e90e50b6 | [
"Apache-2.0"
] | null | null | null | use Mix.Config
config :wax,
origin: "http://localhost:4000",
rp_id: :auto,
metadata_update_interval: 3600,
allowed_attestation_types: [:basic, :uncertain, :attca, :self]
| 22.375 | 64 | 0.72067 |
0880fb7a7db66a89acd733bc4be128bd3e833d7e | 3,289 | exs | Elixir | test/snowpack_test.exs | HGInsights/snowpack | adf1cd8521056896b2e76ad071f761c5f21fc6d5 | [
"Apache-2.0"
] | null | null | null | test/snowpack_test.exs | HGInsights/snowpack | adf1cd8521056896b2e76ad071f761c5f21fc6d5 | [
"Apache-2.0"
] | 13 | 2021-03-25T19:50:58.000Z | 2022-01-26T23:26:28.000Z | test/snowpack_test.exs | HGInsights/snowpack | adf1cd8521056896b2e76ad071f761c5f21fc6d5 | [
"Apache-2.0"
] | null | null | null | defmodule SnowpackTest do
use ExUnit.Case, async: true
import Snowpack.TestHelper
alias Snowpack.Result
describe "connect" do
@tag ciskip: true
test "using ODBC.ini" do
{:ok, pid} = Snowpack.start_link(odbc_ini_opts())
assert {:ok, %Result{columns: ["1"], num_rows: 1, rows: [[1]]}} =
Snowpack.query(pid, "SELECT 1")
end
test "using SNOWFLAKE_JWT key pair" do
{:ok, pid} = Snowpack.start_link(key_pair_opts())
assert {:ok, %Result{columns: ["1"], num_rows: 1, rows: [[1]]}} =
Snowpack.query(pid, "SELECT 1")
end
@tag ciskip: true
test "using Okta Authenticator" do
{:ok, pid} = Snowpack.start_link(okta_opts())
assert {:ok, %Result{columns: ["1"], num_rows: 1, rows: [[1]]}} =
Snowpack.query(pid, "SELECT 1")
end
end
describe "simple query" do
setup [:connect]
test "default protocol", %{pid: pid} do
self = self()
{:ok, _} = Snowpack.query(pid, "SELECT 42", [], log: &send(self, &1))
assert_received %DBConnection.LogEntry{} = entry
assert %Snowpack.Query{} = entry.query
end
test "with params", context do
assert [[6]] = query("SELECT ? * ?", [2, 3])
end
end
describe "snowflake sample db query" do
setup [:connect]
test "with params and rows", context do
rows = query("SELECT * FROM SNOWFLAKE_SAMPLE_DATA.TPCH_SF1.CUSTOMER LIMIT ?;", [5])
assert length(rows) == 5
end
test "with join, custom column, where like, and date", context do
assert [first_row, _second_row] =
query(
"""
SELECT ord.O_ORDERKEY, ord.O_ORDERSTATUS, ord.O_ORDERDATE, item.L_PARTKEY, 9 as number
FROM SNOWFLAKE_SAMPLE_DATA.TPCH_SF1.ORDERS ord
INNER JOIN SNOWFLAKE_SAMPLE_DATA.TPCH_SF1.LINEITEM item ON ord.O_ORDERKEY = item.L_ORDERKEY
WHERE ord.O_COMMENT LIKE ?
LIMIT ? OFFSET ?;
""",
["%he carefully stealthy deposits.%", 2, 0]
)
assert %Date{} = Enum.at(first_row, 2)
assert Enum.at(first_row, 4) == 9
end
end
describe "prepare & execute" do
setup [:connect]
test "succeeds", %{pid: pid} do
{:ok, %Snowpack.Query{name: "times", statement: "SELECT ? * ?"} = query} =
Snowpack.prepare(pid, "times", "SELECT ? * ?")
{:ok, _query, %Snowpack.Result{rows: [row]}} = Snowpack.execute(pid, query, [2, 3])
assert row == [6]
end
end
# credo:disable-for-next-line Credo.Check.Design.TagTODO
# TODO: Need to figure out a way to create tables in the standard Snwoflake Set of DBs or create a test DB
# describe "create objects" do
# setup [:connect]
#
# test "can create and drop table", %{pid: pid} do
# assert {:ok, %Result{columns: nil, num_rows: 1, rows: nil}} =
# Snowpack.query(pid, "CREATE OR REPLACE TABLE test_table (amount number)")
#
# assert {:ok, %Result{columns: nil, num_rows: 1, rows: nil}} =
# Snowpack.query(pid, "DROP TABLE test_table")
# end
# end
defp connect(_context) do
{:ok, pid} = Snowpack.start_link(key_pair_opts())
{:ok, [pid: pid]}
end
end
| 30.453704 | 109 | 0.583156 |
0880fbc29d33f43b7dcfc1036dbcfc31305287e0 | 139 | ex | Elixir | lib/turret_web/controllers/page_controller.ex | rosswilson/turret-elixir | effbc34a14e95d73db2075c66fe78f8432f83977 | [
"MIT"
] | 1 | 2021-02-03T23:34:04.000Z | 2021-02-03T23:34:04.000Z | lib/turret_web/controllers/page_controller.ex | rosswilson/turret-elixir | effbc34a14e95d73db2075c66fe78f8432f83977 | [
"MIT"
] | 58 | 2021-02-16T10:16:08.000Z | 2022-03-07T10:57:32.000Z | lib/turret_web/controllers/page_controller.ex | rosswilson/turret-elixir | effbc34a14e95d73db2075c66fe78f8432f83977 | [
"MIT"
] | null | null | null | defmodule TurretWeb.PageController do
use TurretWeb, :controller
def index(conn, _params) do
render(conn, "index.html")
end
end
| 17.375 | 37 | 0.733813 |
0881038a8cf8c279d315d20ae248d5c8f1a21385 | 2,124 | ex | Elixir | clients/service_user/lib/google_api/service_user/v1/model/backend_rule.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/service_user/lib/google_api/service_user/v1/model/backend_rule.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/service_user/lib/google_api/service_user/v1/model/backend_rule.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.ServiceUser.V1.Model.BackendRule do
@moduledoc """
A backend rule provides configuration for an individual API element.
## Attributes
- address (String.t): The address of the API backend. Defaults to: `null`.
- deadline (float()): The number of seconds to wait for a response from a request. The default deadline for gRPC is infinite (no deadline) and HTTP requests is 5 seconds. Defaults to: `null`.
- minDeadline (float()): Minimum deadline in seconds needed for this method. Calls having deadline value lower than this will be rejected. Defaults to: `null`.
- selector (String.t): Selects the methods to which this rule applies. Refer to selector for syntax details. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:address => any(),
:deadline => any(),
:minDeadline => any(),
:selector => any()
}
field(:address)
field(:deadline)
field(:minDeadline)
field(:selector)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceUser.V1.Model.BackendRule do
def decode(value, options) do
GoogleApi.ServiceUser.V1.Model.BackendRule.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceUser.V1.Model.BackendRule do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.263158 | 194 | 0.728814 |
08810a29d41ac906d112943c6cdecfdcd9f774c6 | 906 | ex | Elixir | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v2/metadata.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v2/metadata.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v2/metadata.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudResourceManager.V2 do
@moduledoc """
API client metadata for GoogleApi.CloudResourceManager.V2.
"""
@discovery_revision "20210328"
def discovery_revision(), do: @discovery_revision
end
| 33.555556 | 74 | 0.764901 |
088135c33939698a90f18352ccabca09266efe55 | 890 | ex | Elixir | clients/text_to_speech/lib/google_api/text_to_speech/v1/metadata.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/text_to_speech/lib/google_api/text_to_speech/v1/metadata.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/text_to_speech/lib/google_api/text_to_speech/v1/metadata.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.TextToSpeech.V1 do
@moduledoc """
API client metadata for GoogleApi.TextToSpeech.V1.
"""
@discovery_revision "20210226"
def discovery_revision(), do: @discovery_revision
end
| 32.962963 | 74 | 0.760674 |
088139bd8e33a04d3c2d29260d5ac72db3c28d13 | 8,294 | exs | Elixir | test/mix/tasks/phx.gen.schema_test.exs | blunckr/fenix | aeccae9658ed3d85d8af8f28ce2584d407b43d6b | [
"MIT"
] | null | null | null | test/mix/tasks/phx.gen.schema_test.exs | blunckr/fenix | aeccae9658ed3d85d8af8f28ce2584d407b43d6b | [
"MIT"
] | null | null | null | test/mix/tasks/phx.gen.schema_test.exs | blunckr/fenix | aeccae9658ed3d85d8af8f28ce2584d407b43d6b | [
"MIT"
] | null | null | null | Code.require_file "../../../installer/test/mix_helper.exs", __DIR__
defmodule Phoenix.DupSchema do
end
defmodule Mix.Tasks.Phx.Gen.SchemaTest do
use ExUnit.Case
import MixHelper
alias Mix.Tasks.Phx.Gen
alias Mix.Phoenix.Schema
setup do
Mix.Task.clear()
:ok
end
test "build" do
in_tmp_project "build", fn ->
schema = Gen.Schema.build(~w(Blog.Post posts title:string), [])
assert %Schema{
alias: Post,
module: Phoenix.Blog.Post,
repo: Phoenix.Repo,
migration?: true,
migration_defaults: %{title: ""},
plural: "posts",
singular: "post",
human_plural: "Posts",
human_singular: "Post",
attrs: [title: :string],
types: %{title: :string},
defaults: %{title: ""},
} = schema
assert String.ends_with?(schema.file, "lib/phoenix/blog/post.ex")
end
end
test "table name missing from references", config do
in_tmp_project config.test, fn ->
assert_raise Mix.Error, ~r/expect the table to be given to user_id:references/, fn ->
Gen.Schema.run(~w(Blog.Post posts user_id:references))
end
end
end
test "plural can't contain a colon" do
assert_raise Mix.Error, fn ->
Gen.Schema.run(~w(Blog Post title:string))
end
end
test "plural can't have uppercased characters or camelized format" do
assert_raise Mix.Error, fn ->
Gen.Schema.run(~w(Blog Post Posts title:string))
end
assert_raise Mix.Error, fn ->
Gen.Schema.run(~w(Blog Post BlogPosts title:string))
end
end
test "table name omitted", config do
in_tmp_project config.test, fn ->
assert_raise Mix.Error, fn ->
Gen.Schema.run(~w(Blog.Post))
end
end
end
test "generates schema", config do
in_tmp_project config.test, fn ->
Gen.Schema.run(~w(Blog.Post blog_posts title:string))
assert_file "lib/phoenix/blog/post.ex"
assert [migration] = Path.wildcard("priv/repo/migrations/*_create_blog_post.exs")
assert_file migration, fn file ->
assert file =~ "create table(:blog_posts) do"
end
end
end
test "generates nested schema", config do
in_tmp_project config.test, fn ->
Gen.Schema.run(~w(Blog.Admin.User users name:string))
assert [migration] = Path.wildcard("priv/repo/migrations/*_create_blog_admin_user.exs")
assert_file migration, fn file ->
assert file =~ "defmodule Phoenix.Repo.Migrations.CreatePhoenix.Blog.Admin.User do"
assert file =~ "create table(:users) do"
end
assert_file "lib/phoenix/blog/admin/user.ex", fn file ->
assert file =~ "defmodule Phoenix.Blog.Admin.User do"
assert file =~ "schema \"users\" do"
end
end
end
test "generates custom table name", config do
in_tmp_project config.test, fn ->
Gen.Schema.run(~w(Blog.Post posts --table cms_posts))
assert [migration] = Path.wildcard("priv/repo/migrations/*_create_blog_post.exs")
assert_file migration, fn file ->
assert file =~ "create table(:cms_posts) do"
end
end
end
test "generates unique indices" , config do
in_tmp_project config.test, fn ->
Gen.Schema.run(~w(Blog.Post posts title:unique unique_int:integer:unique))
assert [migration] = Path.wildcard("priv/repo/migrations/*_create_blog_post.exs")
assert_file migration, fn file ->
assert file =~ "defmodule Phoenix.Repo.Migrations.CreatePhoenix.Blog.Post do"
assert file =~ "create table(:posts) do"
assert file =~ "add :title, :string"
assert file =~ "add :unique_int, :integer"
assert file =~ "create unique_index(:posts, [:title])"
assert file =~ "create unique_index(:posts, [:unique_int])"
end
assert_file "lib/phoenix/blog/post.ex", fn file ->
assert file =~ "defmodule Phoenix.Blog.Post do"
assert file =~ "schema \"posts\" do"
assert file =~ "field :title, :string"
assert file =~ "field :unique_int, :integer"
end
end
end
test "generates references and belongs_to associations", config do
in_tmp_project config.test, fn ->
Gen.Schema.run(~w(Blog.Post posts title user_id:references:users))
assert [migration] = Path.wildcard("priv/repo/migrations/*_create_blog_post.exs")
assert_file migration, fn file ->
assert file =~ "add :user_id, references(:users, on_delete: :nothing)"
assert file =~ "create index(:posts, [:user_id])"
end
assert_file "lib/phoenix/blog/post.ex", fn file ->
assert file =~ "field :user_id, :id"
end
end
end
test "generates references with unique indexes", config do
in_tmp_project config.test, fn ->
Gen.Schema.run(~w(Blog.Post posts title user_id:references:users unique_post_id:references:posts:unique))
assert [migration] = Path.wildcard("priv/repo/migrations/*_create_blog_post.exs")
assert_file migration, fn file ->
assert file =~ "defmodule Phoenix.Repo.Migrations.CreatePhoenix.Blog.Post do"
assert file =~ "create table(:posts) do"
assert file =~ "add :user_id, references(:users, on_delete: :nothing)"
assert file =~ "add :unique_post_id, references(:posts, on_delete: :nothing)"
assert file =~ "create index(:posts, [:user_id])"
assert file =~ "create unique_index(:posts, [:unique_post_id])"
end
assert_file "lib/phoenix/blog/post.ex", fn file ->
assert file =~ "defmodule Phoenix.Blog.Post do"
assert file =~ "field :user_id, :id"
assert file =~ "field :unique_post_id, :id"
end
end
end
test "generates schema with proper datetime types", config do
in_tmp_project config.test, fn ->
Gen.Schema.run(~w(Blog.Comment comments title:string drafted_at:datetime published_at:naive_datetime edited_at:utc_datetime))
assert_file "lib/phoenix/blog/comment.ex", fn file ->
assert file =~ "field :drafted_at, :naive_datetime"
assert file =~ "field :published_at, :naive_datetime"
assert file =~ "field :edited_at, :utc_datetime"
end
assert [path] = Path.wildcard("priv/repo/migrations/*_create_blog_comment.exs")
assert_file path, fn file ->
assert file =~ "create table(:comments)"
assert file =~ "add :drafted_at, :naive_datetime"
assert file =~ "add :published_at, :naive_datetime"
assert file =~ "add :edited_at, :utc_datetime"
end
end
end
test "generates migration with binary_id", config do
in_tmp_project config.test, fn ->
Gen.Schema.run(~w(Blog.Post posts title user_id:references:users --binary-id))
assert_file "lib/phoenix/blog/post.ex", fn file ->
assert file =~ "field :user_id, :binary_id"
end
assert [migration] = Path.wildcard("priv/repo/migrations/*_create_blog_post.exs")
assert_file migration, fn file ->
assert file =~ "create table(:posts, primary_key: false) do"
assert file =~ "add :id, :binary_id, primary_key: true"
assert file =~ "add :user_id, references(:users, on_delete: :nothing, type: :binary_id)"
end
end
end
test "skips migration with --no-migration option", config do
in_tmp_project config.test, fn ->
Gen.Schema.run(~w(Blog.Post posts --no-migration))
assert [] = Path.wildcard("priv/repo/migrations/*_create_blog_post.exs")
end
end
test "uses defaults from :generators configuration" do
in_tmp_project "uses defaults from generators configuration (migration)", fn ->
with_generator_env [migration: false], fn ->
Gen.Schema.run(~w(Blog.Post posts))
assert [] = Path.wildcard("priv/repo/migrations/*_create_blog_post.exs")
end
end
in_tmp_project "uses defaults from generators configuration (binary_id)", fn ->
with_generator_env [binary_id: true], fn ->
Gen.Schema.run(~w(Blog.Post posts))
assert [migration] = Path.wildcard("priv/repo/migrations/*_create_blog_post.exs")
assert_file migration, fn file ->
assert file =~ "create table(:posts, primary_key: false) do"
assert file =~ "add :id, :binary_id, primary_key: true"
end
end
end
end
end
| 34.558333 | 131 | 0.652399 |
088165e5c4f8ba36fc86cbfcc5870352977e63cd | 917 | exs | Elixir | apps/feedex_core/test/feedex_core/api/reg_feed_test.exs | andyl/ragged | 2baab0849e2dfc068652ecb2fe88a7c6fe5437d0 | [
"MIT"
] | null | null | null | apps/feedex_core/test/feedex_core/api/reg_feed_test.exs | andyl/ragged | 2baab0849e2dfc068652ecb2fe88a7c6fe5437d0 | [
"MIT"
] | 10 | 2021-02-08T00:01:41.000Z | 2021-05-27T12:54:28.000Z | apps/feedex_core/test/feedex_core/api/reg_feed_test.exs | andyl/ragged | 2baab0849e2dfc068652ecb2fe88a7c6fe5437d0 | [
"MIT"
] | null | null | null | defmodule FeedexCore.Api.RegFeedTest do
use ExUnit.Case, async: true
use FeedexCore.DataCase
alias FeedexCore.Api
describe "#find_or_create_regfeed" do
test "with no precursors" do
fld = insert(:folder)
assert count(Feed) == 0
assert count(Register) == 0
assert count(Folder) == 1
assert Api.RegFeed.find_or_create_regfeed(fld.id, "bong", "http://bing.com")
assert count(Feed) == 1
assert count(Register) == 1
assert count(Folder) == 1
end
test "with existing feed" do
url = "http://ping.com"
fld = insert(:folder)
insert(:feed, url: url)
assert count(Feed) == 1
assert count(Register) == 0
assert count(Folder) == 1
assert Api.RegFeed.find_or_create_regfeed(fld.id, "bong", url)
assert count(Feed) == 1
assert count(Register) == 1
assert count(Folder) == 1
end
end
end
| 25.472222 | 82 | 0.619411 |
0881a2e8da8b17433bc3835878fdb867d51e63f2 | 1,000 | ex | Elixir | test/support/conn_case.ex | kupferwerk/ci_visuals | 95b6d56b074d6dbc0aad33764fb1dae38f1e3822 | [
"MIT"
] | null | null | null | test/support/conn_case.ex | kupferwerk/ci_visuals | 95b6d56b074d6dbc0aad33764fb1dae38f1e3822 | [
"MIT"
] | null | null | null | test/support/conn_case.ex | kupferwerk/ci_visuals | 95b6d56b074d6dbc0aad33764fb1dae38f1e3822 | [
"MIT"
] | null | null | null | defmodule CiVisuals.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
imports other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
alias CiVisuals.Repo
import Ecto.Model
import Ecto.Query, only: [from: 2]
import CiVisuals.Router.Helpers
# The default endpoint for testing
@endpoint CiVisuals.Endpoint
end
end
setup tags do
unless tags[:async] do
Ecto.Adapters.SQL.restart_test_transaction(CiVisuals.Repo, [])
end
:ok
end
end
| 23.809524 | 68 | 0.709 |
0881b0238d7594592a8b2b85049d058abe6d878b | 456 | ex | Elixir | lib/forge_sdk/protocol/display/protocol_state.ex | ArcBlock/forge-elixir-sdk | 61ff656a59116d3da18055d54655e8eb5ca15f63 | [
"Apache-2.0"
] | 21 | 2019-05-08T01:25:42.000Z | 2022-02-08T02:30:18.000Z | lib/forge_sdk/protocol/display/protocol_state.ex | ArcBlock/forge-elixir-sdk | 61ff656a59116d3da18055d54655e8eb5ca15f63 | [
"Apache-2.0"
] | 6 | 2019-05-27T23:07:00.000Z | 2020-03-10T04:33:10.000Z | lib/forge_sdk/protocol/display/protocol_state.ex | ArcBlock/forge-elixir-sdk | 61ff656a59116d3da18055d54655e8eb5ca15f63 | [
"Apache-2.0"
] | 2 | 2019-07-21T18:12:47.000Z | 2021-07-31T21:18:38.000Z | defimpl ForgeSdk.Display, for: ForgeAbi.ProtocolState do
@moduledoc """
Implementation of `Display` protocol for `ProtocolState`
"""
alias ForgeSdk.Display
def display(data, expand? \\ false) do
basic = Map.from_struct(data)
group = basic.itx |> Map.get(:tags) |> List.first()
Map.merge(basic, %{
group: group || "",
status: Display.display(basic.status),
itx: Display.display(basic.itx, expand?)
})
end
end
| 25.333333 | 58 | 0.651316 |
0881b55c55cf4ebb8059645d483bc4b6eaf2cc72 | 2,001 | exs | Elixir | lib/oban/crontab/parser.ex.exs | hartzell/oban | 2901cd4655d61eda95813cc1880b003f37cd8983 | [
"Apache-2.0"
] | 1 | 2021-11-20T19:16:24.000Z | 2021-11-20T19:16:24.000Z | lib/oban/crontab/parser.ex.exs | hartzell/oban | 2901cd4655d61eda95813cc1880b003f37cd8983 | [
"Apache-2.0"
] | null | null | null | lib/oban/crontab/parser.ex.exs | hartzell/oban | 2901cd4655d61eda95813cc1880b003f37cd8983 | [
"Apache-2.0"
] | null | null | null | defmodule Oban.Crontab.Parser do
@moduledoc false
# parsec:Oban.Crontab.Parser
import NimbleParsec
month_alias =
[
replace(string("JAN"), 1),
replace(string("FEB"), 2),
replace(string("MAR"), 3),
replace(string("APR"), 4),
replace(string("MAY"), 5),
replace(string("JUN"), 6),
replace(string("JUL"), 7),
replace(string("AUG"), 8),
replace(string("SEP"), 9),
replace(string("OCT"), 10),
replace(string("NOV"), 11),
replace(string("DEC"), 12)
]
|> choice()
|> unwrap_and_tag(:literal)
weekday_alias =
[
replace(string("MON"), 1),
replace(string("TUE"), 2),
replace(string("WED"), 3),
replace(string("THU"), 4),
replace(string("FRI"), 5),
replace(string("SAT"), 6),
replace(string("SUN"), 0)
]
|> choice()
|> unwrap_and_tag(:literal)
range =
integer(min: 1, max: 2)
|> ignore(string("-"))
|> integer(min: 1, max: 2)
|> tag(:range)
step =
string("*/")
|> ignore()
|> integer(min: 1, max: 2)
|> unwrap_and_tag(:step)
literal = integer(min: 1, max: 2) |> unwrap_and_tag(:literal)
wild = string("*") |> unwrap_and_tag(:wild)
separator = string(",") |> ignore()
expression = choice([range, literal, step, wild, separator])
minutes =
expression
|> times(min: 1)
|> tag(:minutes)
hours =
expression
|> times(min: 1)
|> tag(:hours)
days =
expression
|> times(min: 1)
|> tag(:days)
months =
[month_alias, expression]
|> choice()
|> times(min: 1)
|> tag(:months)
weekdays =
[weekday_alias, expression]
|> choice()
|> times(min: 1)
|> tag(:weekdays)
defparsec(
:cron,
minutes
|> ignore(string(" "))
|> concat(hours)
|> ignore(string(" "))
|> concat(days)
|> ignore(string(" "))
|> concat(months)
|> ignore(string(" "))
|> concat(weekdays)
)
# parsec:Oban.Crontab.Parser
end
| 19.811881 | 63 | 0.537231 |
0881bc34b82d57092b90fa1663457125a3fa49a0 | 894 | exs | Elixir | test/kantan_cluster/config_test.exs | mnishiguchi/kantan_cluster | 7398a64f003d38c2fd9674a17dcde0c72053ba2a | [
"MIT"
] | 2 | 2021-10-30T02:37:28.000Z | 2021-10-31T12:43:24.000Z | test/kantan_cluster/config_test.exs | mnishiguchi/kantan_cluster | 7398a64f003d38c2fd9674a17dcde0c72053ba2a | [
"MIT"
] | 1 | 2022-01-09T03:50:16.000Z | 2022-01-10T02:46:55.000Z | test/kantan_cluster/config_test.exs | mnishiguchi/kantan_cluster | 7398a64f003d38c2fd9674a17dcde0c72053ba2a | [
"MIT"
] | null | null | null | defmodule KantanCluster.ConfigTest do
use ExUnit.Case
import KantanCluster.Config
describe "get_node_option/1" do
test "explicit longnames mode" do
opts = [node: {:longnames, :"node@127.0.0.1"}]
assert {:longnames, :"node@127.0.0.1"} == get_node_option(opts)
end
test "explicit shortnames mode" do
opts = [node: {:shortnames, :"node@my-machine"}]
assert {:shortnames, :"node@my-machine"} == get_node_option(opts)
end
test "implicit name only" do
opts = [node: "nerves"]
{:ok, hostname} = :inet.gethostname()
assert {:longnames, :"nerves@#{hostname}.local"} == get_node_option(opts)
end
test "implicit blank" do
{:ok, hostname} = :inet.gethostname()
assert {:longnames, long_name} = get_node_option([])
assert Atom.to_string(long_name) =~ Regex.compile!(".*@#{hostname}.local")
end
end
end
| 29.8 | 80 | 0.638702 |
0881d00d82a29d30326b24e3aab391a7ae1414a2 | 484 | exs | Elixir | config/config.exs | bitcrowd/carbonite | 663af92caa2afa8838cb43800da8e320f323276a | [
"Apache-2.0"
] | 26 | 2021-10-30T06:32:20.000Z | 2022-03-28T14:34:24.000Z | config/config.exs | bitcrowd/carbonite | 663af92caa2afa8838cb43800da8e320f323276a | [
"Apache-2.0"
] | 8 | 2021-10-20T08:03:34.000Z | 2022-02-25T08:48:14.000Z | config/config.exs | bitcrowd/carbonite | 663af92caa2afa8838cb43800da8e320f323276a | [
"Apache-2.0"
] | null | null | null | # SPDX-License-Identifier: Apache-2.0
import Config
if config_env() in [:dev, :test] do
config :carbonite, Carbonite.TestRepo,
database: "carbonite_#{config_env()}",
username: "postgres",
password: "postgres",
hostname: "localhost"
config :carbonite, ecto_repos: [Carbonite.TestRepo]
end
if config_env() == :test do
# Set to :debug to see SQL logs.
config :logger, level: :info
config :carbonite, Carbonite.TestRepo, pool: Ecto.Adapters.SQL.Sandbox
end
| 23.047619 | 72 | 0.702479 |
0881d678abd6b848e952bc68f9d51d7022e718ad | 1,449 | ex | Elixir | lib/mail_slurp_api/model/page_alias.ex | mailslurp/mailslurp-client-elixir | 5b98b91bb327de5216e873cd45b4fbb3c1b55c90 | [
"MIT"
] | 1 | 2021-06-17T18:07:49.000Z | 2021-06-17T18:07:49.000Z | lib/mail_slurp_api/model/page_alias.ex | mailslurp/mailslurp-client-elixir | 5b98b91bb327de5216e873cd45b4fbb3c1b55c90 | [
"MIT"
] | null | null | null | lib/mail_slurp_api/model/page_alias.ex | mailslurp/mailslurp-client-elixir | 5b98b91bb327de5216e873cd45b4fbb3c1b55c90 | [
"MIT"
] | 1 | 2021-03-16T18:55:56.000Z | 2021-03-16T18:55:56.000Z | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule MailSlurpAPI.Model.PageAlias do
@moduledoc """
Paginated email alias results. Page index starts at zero. Projection results may omit larger entity fields. For fetching a full entity use the projection ID with individual method calls.
"""
@derive [Poison.Encoder]
defstruct [
:"content",
:"empty",
:"first",
:"last",
:"number",
:"numberOfElements",
:"pageable",
:"size",
:"sort",
:"totalElements",
:"totalPages"
]
@type t :: %__MODULE__{
:"content" => [AliasProjection] | nil,
:"empty" => boolean() | nil,
:"first" => boolean() | nil,
:"last" => boolean() | nil,
:"number" => integer() | nil,
:"numberOfElements" => integer() | nil,
:"pageable" => Pageable | nil,
:"size" => integer() | nil,
:"sort" => Sort | nil,
:"totalElements" => integer() | nil,
:"totalPages" => integer() | nil
}
end
defimpl Poison.Decoder, for: MailSlurpAPI.Model.PageAlias do
import MailSlurpAPI.Deserializer
def decode(value, options) do
value
|> deserialize(:"content", :list, MailSlurpAPI.Model.AliasProjection, options)
|> deserialize(:"pageable", :struct, MailSlurpAPI.Model.Pageable, options)
|> deserialize(:"sort", :struct, MailSlurpAPI.Model.Sort, options)
end
end
| 28.98 | 188 | 0.641132 |
0881d9bf5c18fb7b4175e4ffffda7735e3805041 | 1,524 | exs | Elixir | test/plug/adapters/translator_test.exs | DavidAlphaFox/Plug | 3a98e7667d76ba8d2eb629e518bcb7ac83a1a188 | [
"Apache-2.0"
] | 1 | 2019-05-07T15:05:52.000Z | 2019-05-07T15:05:52.000Z | test/plug/adapters/translator_test.exs | DavidAlphaFox/Plug | 3a98e7667d76ba8d2eb629e518bcb7ac83a1a188 | [
"Apache-2.0"
] | null | null | null | test/plug/adapters/translator_test.exs | DavidAlphaFox/Plug | 3a98e7667d76ba8d2eb629e518bcb7ac83a1a188 | [
"Apache-2.0"
] | null | null | null | defmodule Plug.Adapters.TranslatorTest do
use ExUnit.Case
def init(opts) do
opts
end
def call(%{path_info: ["warn"]}, _opts) do
raise Plug.Parsers.UnsupportedMediaTypeError, media_type: "foo/bar"
end
def call(%{path_info: ["error"]}, _opts) do
raise "oops"
end
import ExUnit.CaptureIO
test "ranch/cowboy 500 logs" do
{:ok, _pid} = Plug.Adapters.Cowboy.http __MODULE__, [], port: 9001
on_exit fn -> Plug.Adapters.Cowboy.shutdown(__MODULE__.HTTP) end
output = capture_log fn ->
:hackney.get("http://127.0.0.1:9001/error", [], "", [])
end
assert output =~ ~r"#PID<0\.\d+\.0> running Plug\.Adapters\.TranslatorTest terminated"
assert output =~ "Server: 127.0.0.1:9001 (http)"
assert output =~ "Request: GET /"
assert output =~ "** (exit) an exception was raised:"
assert output =~ "** (RuntimeError) oops"
end
test "ranch/cowboy non-500 skips" do
{:ok, _pid} = Plug.Adapters.Cowboy.http __MODULE__, [], port: 9001
on_exit fn -> Plug.Adapters.Cowboy.shutdown(__MODULE__.HTTP) end
output = capture_log fn ->
:hackney.get("http://127.0.0.1:9001/warn", [], "", [])
end
refute output =~ ~r"#PID<0\.\d+\.0> running Plug\.Adapters\.TranslatorTest terminated"
refute output =~ "Server: 127.0.0.1:9001 (http)"
refute output =~ "Request: GET /"
refute output =~ "** (exit) an exception was raised:"
end
defp capture_log(fun) do
capture_io(:user, fn ->
fun.()
Logger.flush()
end)
end
end
| 28.222222 | 90 | 0.632546 |
0881dec4507e594d3c58064e9842dc40dc814b3b | 1,067 | exs | Elixir | mix.exs | smeevil/pay_nl | 8b62ed5c01405aba432e56e8c2b6c5774da1470a | [
"WTFPL"
] | 3 | 2017-10-03T12:30:57.000Z | 2020-01-06T00:23:59.000Z | mix.exs | smeevil/pay_nl | 8b62ed5c01405aba432e56e8c2b6c5774da1470a | [
"WTFPL"
] | null | null | null | mix.exs | smeevil/pay_nl | 8b62ed5c01405aba432e56e8c2b6c5774da1470a | [
"WTFPL"
] | 1 | 2019-02-11T11:12:17.000Z | 2019-02-11T11:12:17.000Z | defmodule PayNL.Mixfile do
use Mix.Project
def project do
[
app: :pay_nl,
version: "0.1.0",
elixir: "~> 1.7",
start_permanent: Mix.env == :prod,
deps: deps(),
test_coverage: [
tool: ExCoveralls
],
preferred_cli_env: [
coveralls: :test
],
dialyzer: [
ignore_warnings: "dialyzer.ignore-warnings",
flags: [
:unmatched_returns,
:error_handling,
:race_conditions,
],
paths: ["_build/dev/lib/pay_nl/ebin"]
]
]
end
def application do
[
extra_applications: [:logger],
mod: {PayNL.Supervisor, []}
]
end
defp deps do
[
{:cortex, ">= 0.0.0", only: [:dev, :test]},
{:exvcr, ">= 0.0.0", only: [:dev, :test]},
{:ecto, ">= 0.0.0"},
{:poison, ">= 0.0.0"},
{:httpotion, ">= 0.0.0"},
{:excoveralls, ">= 0.0.0", only: [:test]},
{:dialyxir, "~> 1.0.0-rc.3", only: [:dev], runtime: false},
{:credo, ">= 0.0.0", only: [:test, :dev]},
]
end
end
| 21.77551 | 65 | 0.477976 |
0881fa1ccb61d705c1224e0e73e95f73ff49477f | 1,027 | ex | Elixir | lib/asteroid/object_store/authenticated_session/mnesia/purge.ex | tanguilp/asteroid | 8e03221d365da7f03f82df192c535d3ba2101f4d | [
"Apache-2.0"
] | 36 | 2019-07-23T20:01:05.000Z | 2021-08-05T00:52:34.000Z | lib/asteroid/object_store/authenticated_session/mnesia/purge.ex | tanguilp/asteroid | 8e03221d365da7f03f82df192c535d3ba2101f4d | [
"Apache-2.0"
] | 19 | 2019-08-23T19:04:50.000Z | 2021-05-07T22:12:25.000Z | lib/asteroid/object_store/authenticated_session/mnesia/purge.ex | tanguilp/asteroid | 8e03221d365da7f03f82df192c535d3ba2101f4d | [
"Apache-2.0"
] | 3 | 2019-09-06T10:47:20.000Z | 2020-09-09T03:43:31.000Z | defmodule Asteroid.ObjectStore.AuthenticatedSession.Mnesia.Purge do
@moduledoc false
use GenServer
require Logger
def start_link(opts) do
GenServer.start_link(__MODULE__, opts)
end
def init(opts) do
if opts[:purge_interval] != :no_purge do
Process.send_after(self(), :purge, opts[:purge_interval] * 1000)
end
{:ok, opts}
end
def handle_info(:purge, opts) do
purge(opts)
Process.send_after(self(), :purge, opts[:purge_interval] * 1000)
{:noreply, opts}
end
defp purge(opts) do
Logger.info("#{__MODULE__}: starting authenticated session purge process on #{node()}")
table_name = opts[:table_name] || :asteroid_authenticated_session
matchspec = [
{
{:_, :"$1", :_, %{"exp" => :"$2"}},
[{:<, :"$2", :os.system_time(:second)}],
[:"$1"]
}
]
for authenticated_session_id <- :mnesia.dirty_select(table_name, matchspec) do
Asteroid.OIDC.AuthenticatedSession.delete(authenticated_session_id)
end
end
end
| 22.822222 | 91 | 0.64557 |
0881fa6fdebb240afc2852f10b6781a3c411ff68 | 2,569 | ex | Elixir | astreu/deps/prometheus_ex/lib/prometheus/injector.ex | wesleimp/Astreu | 4d430733e7ecc8b3eba8e27811a152aa2c6d79c1 | [
"Apache-2.0"
] | null | null | null | astreu/deps/prometheus_ex/lib/prometheus/injector.ex | wesleimp/Astreu | 4d430733e7ecc8b3eba8e27811a152aa2c6d79c1 | [
"Apache-2.0"
] | null | null | null | astreu/deps/prometheus_ex/lib/prometheus/injector.ex | wesleimp/Astreu | 4d430733e7ecc8b3eba8e27811a152aa2c6d79c1 | [
"Apache-2.0"
] | null | null | null | defmodule Prometheus.Injector do
def inject(callback, env, ast) do
ast
|> Macro.prewalk(fn thing ->
case thing do
{:def, _, _} = defun ->
defun
# otherwise e in RuntimeError will be rewritten
{:in, _, _} = arrow ->
arrow
_ ->
Macro.expand(thing, env)
end
end)
|> inject_(callback)
end
# lambda
def inject_({:fn, fn_meta, [{:->, arrow_meta, [args, do_block]}]}, callback) do
case args do
[] ->
callback.(
{{:., [], [{:fn, fn_meta, [{:->, arrow_meta, [[], do_block]}]}]}, [], []}
)
_ ->
names =
args
|> Enum.map(fn {name, _, _} -> name end)
raise Prometheus.InvalidBlockArityError, args: names
end
end
# do_blocks can be simple calls or defs
def inject_([{:do, {:__block__, [], do_blocks}}], callback) do
do_blocks = List.flatten(do_blocks)
if have_defs(do_blocks) do
Enum.map(do_blocks, &inject_to_def(&1, callback))
else
callback.({:__block__, [], do_blocks})
end
end
# just do
def inject_([{:do, do_block}], callback) do
inject_([{:do, {:__block__, [], [do_block]}}], callback)
end
# implicit try
def inject_([{:do, _do_block} | rest] = all, callback) do
if is_try_unwrapped(rest) do
callback.(
quote do
try unquote(all)
end
)
else
raise "Unexpected do block #{inspect(rest)}"
end
end
# single do, or other non-block stuff like function calls
def inject_(thing, callback) do
inject_([{:do, {:__block__, [], [thing]}}], callback)
end
defp is_try_unwrapped(block) do
Keyword.has_key?(block, :catch) || Keyword.has_key?(block, :rescue) ||
Keyword.has_key?(block, :after) || Keyword.has_key?(block, :else)
end
defp have_defs(blocks) do
defs_count =
Enum.count(blocks, fn
{:def, _, _} -> true
_ -> false
end)
blocks_count = Enum.count(blocks)
case defs_count do
0 -> false
^blocks_count -> true
_ -> raise "Mixing defs and other blocks isn't allowed"
end
end
defp inject_to_def({:def, def_meta, [head, [do: body]]}, callback) do
{:def, def_meta, [head, [do: callback.(body)]]}
end
defp inject_to_def({:def, def_meta, [head, [{:do, _do_block} | _rest] = all]}, callback) do
{:def, def_meta,
[
head,
[
do:
callback.(
quote do
try unquote(all)
end
)
]
]}
end
end
| 23.144144 | 93 | 0.546516 |
08823695766806daee14529942e6cf4edd05ca89 | 384 | exs | Elixir | clients/people/test/test_helper.exs | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/people/test/test_helper.exs | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/people/test/test_helper.exs | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | ExUnit.start()
defmodule GoogleApi.People.V1.TestHelper do
defmacro __using__(opts) do
quote do
use ExUnit.Case, unquote(opts)
import GoogleApi.People.V1.TestHelper
end
end
def for_scope(scopes) when is_list(scopes), do: for_scope(Enum.join(scopes, " "))
def for_scope(scope) do
{:ok, token} = Goth.Token.for_scope(scope)
token.token
end
end
| 20.210526 | 83 | 0.695313 |
0882370e3255161379626747d2875d1f2daf42b5 | 658 | ex | Elixir | lib/hello_world.ex | chrta/hello_world_elixir | 6fbcca1042d3c49b43c1650178c2de3bd80fe3f5 | [
"MIT"
] | null | null | null | lib/hello_world.ex | chrta/hello_world_elixir | 6fbcca1042d3c49b43c1650178c2de3bd80fe3f5 | [
"MIT"
] | null | null | null | lib/hello_world.ex | chrta/hello_world_elixir | 6fbcca1042d3c49b43c1650178c2de3bd80fe3f5 | [
"MIT"
] | null | null | null | defmodule HelloWorld do
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
# Define workers and child supervisors to be supervised
# worker(HelloWorld.Worker, [arg1, arg2, arg3]),
worker(HelloWorld.Printer, [[], [name: :printer]])
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: HelloWorld.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 29.909091 | 67 | 0.705167 |
08824460cc390683f7d52d8be941319a0bae9095 | 54 | exs | Elixir | test/test_helper.exs | kianmeng/spider_man | 48941f42e901d03f7c6cc1f8aaf6f8a35d576614 | [
"Apache-2.0"
] | 5 | 2021-05-08T08:50:58.000Z | 2021-11-27T09:59:56.000Z | test/test_helper.exs | kianmeng/spider_man | 48941f42e901d03f7c6cc1f8aaf6f8a35d576614 | [
"Apache-2.0"
] | null | null | null | test/test_helper.exs | kianmeng/spider_man | 48941f42e901d03f7c6cc1f8aaf6f8a35d576614 | [
"Apache-2.0"
] | 1 | 2021-08-05T15:50:49.000Z | 2021-08-05T15:50:49.000Z | System.put_env("TMPDIR", "tmp")
ExUnit.start(seed: 0)
| 18 | 31 | 0.703704 |
088246f1bd420c76193e880851bd910e2dedd454 | 137 | ex | Elixir | lib/execution/frame.ex | ElixiumNetwork/WaspVM | 7a313825ed50720256dda6e61e83356621357963 | [
"MIT"
] | 112 | 2018-12-14T23:43:38.000Z | 2019-03-22T22:02:27.000Z | lib/execution/frame.ex | ElixiumNetwork/Elixium-Wasm | 7a313825ed50720256dda6e61e83356621357963 | [
"MIT"
] | 21 | 2018-12-22T23:53:14.000Z | 2019-02-21T00:18:05.000Z | lib/execution/frame.ex | ElixiumNetwork/Elixium-Wasm | 7a313825ed50720256dda6e61e83356621357963 | [
"MIT"
] | 4 | 2019-05-11T16:37:19.000Z | 2021-05-18T13:45:06.000Z | defmodule AlchemyVM.Frame do
defstruct [:module, :instructions, :locals, :gas_limit, labels: [], snapshots: []]
@moduledoc false
end
| 27.4 | 84 | 0.722628 |
08825e03c83bccfb4e3dc96a514cb897b089188c | 276 | exs | Elixir | backend/priv/repo/migrations/20220129204713_create_logs.exs | ugbots/ggj2022 | 1c7b9f6694268951f93a11fde91fa9573c179c26 | [
"MIT"
] | null | null | null | backend/priv/repo/migrations/20220129204713_create_logs.exs | ugbots/ggj2022 | 1c7b9f6694268951f93a11fde91fa9573c179c26 | [
"MIT"
] | null | null | null | backend/priv/repo/migrations/20220129204713_create_logs.exs | ugbots/ggj2022 | 1c7b9f6694268951f93a11fde91fa9573c179c26 | [
"MIT"
] | null | null | null | defmodule Backend.Repo.Migrations.CreateLogs do
use Ecto.Migration
def change do
create table(:logs) do
add :message, :string
add :user_id, references(:users, on_delete: :nothing)
timestamps()
end
create index(:logs, [:user_id])
end
end
| 18.4 | 59 | 0.666667 |
08826ede1175ee0fdf460b42a55b02f143713f7d | 917 | exs | Elixir | rel/config/config.exs | styx/co2_offset | ac4b2bce8142e2d33ea089322c8dade34839448b | [
"Apache-2.0"
] | 15 | 2018-12-26T10:31:16.000Z | 2020-12-01T09:27:01.000Z | rel/config/config.exs | styx/co2_offset | ac4b2bce8142e2d33ea089322c8dade34839448b | [
"Apache-2.0"
] | 267 | 2018-12-26T07:46:17.000Z | 2020-04-04T17:05:47.000Z | rel/config/config.exs | styx/co2_offset | ac4b2bce8142e2d33ea089322c8dade34839448b | [
"Apache-2.0"
] | 1 | 2019-07-12T13:53:25.000Z | 2019-07-12T13:53:25.000Z | use Mix.Config
defmodule Helpers do
def get_env(name) do
case System.get_env(name) do
nil -> raise "Environment variable #{name} is not set!"
val -> val
end
end
end
# ensure that other env variables exists
Helpers.get_env("ERLANG_COOKIE")
config :co2_offset, Co2Offset.Endpoint, secret_key_base: Helpers.get_env("SECRET_KEY_BASE")
config :co2_offset, Co2OffsetWeb.Endpoint,
live_view: [signing_salt: Helpers.get_env("LIVE_VIEW_SALT")],
https: [
port: 443,
cipher_suite: :strong,
keyfile: Helpers.get_env("CO2_OFFSET_SSL_KEY_PATH"),
cacertfile: Helpers.get_env("CO2_OFFSET_SSL_CACERT_PATH"),
certfile: Helpers.get_env("CO2_OFFSET_SSL_CERT_PATH")
]
config :co2_offset, Co2Offset.Repo,
username: Helpers.get_env("DATABASE_USER"),
password: Helpers.get_env("DATABASE_PASS"),
database: Helpers.get_env("DATABASE_NAME"),
hostname: "localhost",
pool_size: 15
| 27.787879 | 91 | 0.738277 |
0882769aa573ff786ff2185c925de6d189865f7a | 2,105 | ex | Elixir | clients/game_services/lib/google_api/game_services/v1/model/list_game_server_configs_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/game_services/lib/google_api/game_services/v1/model/list_game_server_configs_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/game_services/lib/google_api/game_services/v1/model/list_game_server_configs_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.GameServices.V1.Model.ListGameServerConfigsResponse do
@moduledoc """
Response message for GameServerConfigsService.ListGameServerConfigs.
## Attributes
* `gameServerConfigs` (*type:* `list(GoogleApi.GameServices.V1.Model.GameServerConfig.t)`, *default:* `nil`) - The list of game server configs.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - Token to retrieve the next page of results, or empty if there are no more results in the list.
* `unreachable` (*type:* `list(String.t)`, *default:* `nil`) - List of locations that could not be reached.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:gameServerConfigs => list(GoogleApi.GameServices.V1.Model.GameServerConfig.t()),
:nextPageToken => String.t(),
:unreachable => list(String.t())
}
field(:gameServerConfigs, as: GoogleApi.GameServices.V1.Model.GameServerConfig, type: :list)
field(:nextPageToken)
field(:unreachable, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.GameServices.V1.Model.ListGameServerConfigsResponse do
def decode(value, options) do
GoogleApi.GameServices.V1.Model.ListGameServerConfigsResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.GameServices.V1.Model.ListGameServerConfigsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.716981 | 157 | 0.741568 |
08827af0729e61664ec513cb81a1f8042de7d4c1 | 1,366 | ex | Elixir | address_validator/apps/address_validator_api/lib/address_validator_api.ex | aikssen/address_hackaton | 38f608d23705052776ce26f38ba67a6c1ef52ab2 | [
"MIT"
] | null | null | null | address_validator/apps/address_validator_api/lib/address_validator_api.ex | aikssen/address_hackaton | 38f608d23705052776ce26f38ba67a6c1ef52ab2 | [
"MIT"
] | null | null | null | address_validator/apps/address_validator_api/lib/address_validator_api.ex | aikssen/address_hackaton | 38f608d23705052776ce26f38ba67a6c1ef52ab2 | [
"MIT"
] | null | null | null | defmodule AddressValidatorApi do
@moduledoc """
Documentation for AddressValidatorApi.
"""
defstruct [:address, :response]
alias AddressValidatorApi.Repo
alias AddressValidatorApi.Address
@doc """
Validate a given address
"""
def validate_address(address) do
# If addres is in cache return value from cache
response = Repo.get_by(Address, address: address)
case response do
%Address{response: "ZERO_RESULTS"}
-> {:not_valid, "ZERO_RESULTS"}
%Address{response: "Server Error"}
-> {:error, "Server Error"}
%Address{response: cache_response}
-> {:ok, cache_response}
nil
-> get_response_from_api(address)
end
end
def call_api_and_save(address) do
api_response = get_response_from_api(address)
save_address(api_response, address)
api_response
end
def get_response_from_api(address) do
case address |> AddressValidatorApi.Server.address_validator do
%{"results" => [%{"formatted_address" => formatted_address} | _t]}
-> {:ok, formatted_address}
%{"results" => [], "status" => zero_results}
-> {:not_valid, zero_results}
:error
-> {:error, "Server Error"}
end
end
def save_address({_, formatted_address}, address) do
Repo.insert(%Address{address: address, response: formatted_address})
end
end
| 27.32 | 72 | 0.668375 |
0882ac25144cb50e63b7801151918c142a69c752 | 1,144 | ex | Elixir | core/ets/config_cache.ex | wses-yoshida/antikythera | e108e59d2339edd0b0fad31ad4f41f56df45be55 | [
"Apache-2.0"
] | null | null | null | core/ets/config_cache.ex | wses-yoshida/antikythera | e108e59d2339edd0b0fad31ad4f41f56df45be55 | [
"Apache-2.0"
] | null | null | null | core/ets/config_cache.ex | wses-yoshida/antikythera | e108e59d2339edd0b0fad31ad4f41f56df45be55 | [
"Apache-2.0"
] | null | null | null | # Copyright(c) 2015-2019 ACCESS CO., LTD. All rights reserved.
use Croma
defmodule AntikytheraCore.Ets.ConfigCache do
defun init() :: :ok do
AntikytheraCore.Ets.create_read_optimized_table(table_name())
end
defun table_name() :: atom do
:antikythera_config_cache
end
end
defmodule AntikytheraCore.Ets.ConfigCache.Core do
@table_name AntikytheraCore.Ets.ConfigCache.table_name()
defun read() :: map do
:ets.lookup_element(@table_name, :antikythera, 2)
end
defun write(m :: v[map]) :: :ok do
:ets.insert(@table_name, {:antikythera, m})
:ok
end
end
defmodule AntikytheraCore.Ets.ConfigCache.Gear do
@table_name AntikytheraCore.Ets.ConfigCache.table_name()
alias Antikythera.GearName
alias AntikytheraCore.Config.Gear, as: GearConfig
defun read(gear_name :: v[GearName.t]) :: nil | GearConfig.t do
case :ets.lookup(@table_name, gear_name) do
[] -> nil
[{_gear_name, gear_config}] -> gear_config
end
end
defun write(gear_name :: v[GearName.t], conf :: v[GearConfig.t]) :: :ok do
:ets.insert(@table_name, {gear_name, conf})
:ok
end
end
| 25.422222 | 76 | 0.690559 |
0882b1b11506aec5c7a58060798537743436fd0a | 794 | ex | Elixir | lib/flickrex/oauth.ex | christopheradams/flickrex | 67e3a507e128c55969aeda3802f6dacd94bb03c5 | [
"MIT"
] | 13 | 2017-04-02T10:55:10.000Z | 2022-03-02T02:55:40.000Z | lib/flickrex/oauth.ex | christopheradams/flickrex | 67e3a507e128c55969aeda3802f6dacd94bb03c5 | [
"MIT"
] | null | null | null | lib/flickrex/oauth.ex | christopheradams/flickrex | 67e3a507e128c55969aeda3802f6dacd94bb03c5 | [
"MIT"
] | 1 | 2021-01-31T17:47:27.000Z | 2021-01-31T17:47:27.000Z | defmodule Flickrex.OAuth do
@moduledoc false
@type method :: atom | String.t()
@type url :: String.t() | URI.t()
@type params :: Keyword.t()
@type consumer_key :: String.t()
@type consumer_secret :: String.t()
@type token :: nil | String.t()
@type token_secret :: nil | String.t()
@type signed :: [{String.t(), String.Chars.t()}]
@spec sign(method, url, params, consumer_key, consumer_secret, token, token_secret) :: signed
def sign(method, url, params, consumer_key, consumer_secret, token, token_secret) do
credentials =
OAuther.credentials(
consumer_key: consumer_key,
consumer_secret: consumer_secret,
token: token,
token_secret: token_secret
)
OAuther.sign(to_string(method), url, params, credentials)
end
end
| 30.538462 | 95 | 0.664987 |
08832b88a42195466059553e59a6c26f222616b1 | 1,392 | ex | Elixir | lib/ash/api/transformers/ensure_resources_compiled.ex | MrFlorius/ash | 247abbb8333d252da5440a58ddf4f1b7f184342f | [
"MIT"
] | null | null | null | lib/ash/api/transformers/ensure_resources_compiled.ex | MrFlorius/ash | 247abbb8333d252da5440a58ddf4f1b7f184342f | [
"MIT"
] | null | null | null | lib/ash/api/transformers/ensure_resources_compiled.ex | MrFlorius/ash | 247abbb8333d252da5440a58ddf4f1b7f184342f | [
"MIT"
] | null | null | null | defmodule Ash.Api.Transformers.EnsureResourcesCompiled do
@moduledoc """
Ensures that all resources for a given api are compiled.
This is required for later transformers.
"""
use Ash.Dsl.Transformer
alias Ash.Dsl.Transformer
require Logger
def transform(module, dsl, times \\ 3) do
dsl
|> Transformer.get_entities([:resources])
|> Enum.filter(& &1.warn_on_compile_failure?)
|> Enum.map(& &1.resource)
|> Enum.map(fn resource ->
try do
# This is to get the compiler to ensure that the resource is compiled
# For some very strange reason, `Code.ensure_compiled/1` isn't enough
resource.ash_dsl_config()
rescue
_ ->
:ok
end
case Code.ensure_compiled(resource) do
{:module, _module} ->
false
{:error, error} ->
# The module is being compiled but is in a deadlock that may or may not be resolved
{resource, error}
end
end)
|> Enum.filter(& &1)
|> case do
[] ->
{:ok, dsl}
rejected ->
if times == 0 do
for {resource, error} <- rejected do
Logger.error(
"Could not ensure that #{inspect(resource)} was compiled: #{inspect(error)}"
)
end
:halt
else
transform(module, dsl, times - 1)
end
end
end
end
| 24.857143 | 93 | 0.574713 |
08834d9a7f4583617504ebbc02948d88d053c58e | 1,038 | ex | Elixir | web/controllers/todo_controller.ex | tykowale/todo_api | 8e154dc2fef0cac79bd97646a338c569486a0e0a | [
"MIT"
] | null | null | null | web/controllers/todo_controller.ex | tykowale/todo_api | 8e154dc2fef0cac79bd97646a338c569486a0e0a | [
"MIT"
] | null | null | null | web/controllers/todo_controller.ex | tykowale/todo_api | 8e154dc2fef0cac79bd97646a338c569486a0e0a | [
"MIT"
] | null | null | null | defmodule Todos.TodoController do
use Todos.Web, :controller
alias Todos.Todo
plug :scrub_params, "todo" when action in [:create, :update]
plug Todos.Authentication
def index(conn, _params) do
user_id = conn.assigns.current_user.id
query = from t in Todo, where: t.owner_id == ^user_id
todos = Repo.all(query)
render(conn, "index.json", todos: todos)
end
# def show(conn, %{"id" => id}) do
# todo = Repo.get!(Todo, id)
# render(conn, "show.json", todo: todo)
# end
def create(conn, %{"todo" => todo_params}) do
changeset = Todo.changeset(
%Todo{ owner_id: conn.assigns.current_user.id },
todo_params
)
case Repo.insert(changeset) do
{:ok , todo} ->
conn
|> put_status(:created)
|> render("show.json", todo: todo)
_ ->
conn
|> send_resp(401, "Didn't work :(")
end
end
# def delete(conn, %{"id" => id}) do
# Repo.get!(Todo, id)
# |> Repo.delete!
# conn
# |> send_resp(204, [])
# end
end
| 22.085106 | 62 | 0.581888 |
08834f74656423662b24dfb840b9d4964f53b77a | 922 | ex | Elixir | test/support/channel_case.ex | kw7oe/til | 5ecaa3d199c06d535d1bbbfa05e2dc88e86ad815 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | kw7oe/til | 5ecaa3d199c06d535d1bbbfa05e2dc88e86ad815 | [
"MIT"
] | 2 | 2020-04-26T14:03:27.000Z | 2020-12-12T17:14:02.000Z | test/support/channel_case.ex | kw7oe/til | 5ecaa3d199c06d535d1bbbfa05e2dc88e86ad815 | [
"MIT"
] | null | null | null | defmodule TilWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
# The default endpoint for testing
@endpoint TilWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Til.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Til.Repo, {:shared, self()})
end
:ok
end
end
| 24.263158 | 65 | 0.710412 |
088369f1ef58541ca71aace06b7434db00537a80 | 3,651 | ex | Elixir | lib/recase.ex | kempt09/recase | d6bebb6e801c3d70e585ee58bbf6bc142670e3e1 | [
"MIT"
] | null | null | null | lib/recase.ex | kempt09/recase | d6bebb6e801c3d70e585ee58bbf6bc142670e3e1 | [
"MIT"
] | null | null | null | lib/recase.ex | kempt09/recase | d6bebb6e801c3d70e585ee58bbf6bc142670e3e1 | [
"MIT"
] | null | null | null | defmodule Recase do
@moduledoc """
Recase allows you to convert string from any to any case.
This module contains public interface.
"""
alias Recase.{
CamelCase,
ConstantCase,
DotCase,
HeaderCase,
KebabCase,
NameCase,
PascalCase,
PathCase,
SentenceCase,
SnakeCase,
TitleCase
}
@doc """
Converts string to PascalCase (aka UpperCase).
## Examples
iex> Recase.to_pascal("some-value")
"SomeValue"
iex> Recase.to_pascal("some value")
"SomeValue"
"""
@spec to_pascal(String.t()) :: String.t()
def to_pascal(value), do: PascalCase.convert(value)
@doc """
Converts string to camelCase.
## Examples
iex> Recase.to_camel("some-value")
"someValue"
iex> Recase.to_camel("Some Value")
"someValue"
"""
@spec to_camel(String.t()) :: String.t()
def to_camel(value), do: CamelCase.convert(value)
@doc """
Converts string to snake_case.
## Examples
iex> Recase.to_snake("some-value")
"some_value"
iex> Recase.to_snake("someValue")
"some_value"
"""
@spec to_snake(String.t()) :: String.t()
def to_snake(value), do: SnakeCase.convert(value)
defdelegate underscore(value), to: Recase, as: :to_snake
@doc """
Converts string to kebab-case.
## Examples
iex> Recase.to_kebab("SomeValue")
"some-value"
iex> Recase.to_kebab("some value")
"some-value"
"""
@spec to_kebab(String.t()) :: String.t()
def to_kebab(value), do: KebabCase.convert(value)
@doc """
Converts string to CONSTANT_CASE.
## Examples
iex> Recase.to_constant("SomeValue")
"SOME_VALUE"
iex> Recase.to_constant("some value")
"SOME_VALUE"
"""
@spec to_constant(String.t()) :: String.t()
def to_constant(value), do: ConstantCase.convert(value)
@doc ~S"""
Converts string to path/case.
## Examples
iex> Recase.to_path("SomeValue")
"Some/Value"
iex> Recase.to_path("some value", "\\")
"some\\value"
"""
@spec to_path(String.t(), String.t()) :: String.t()
def to_path(value, separator), do: PathCase.convert(value, separator)
@spec to_path(String.t()) :: String.t()
def to_path(value), do: PathCase.convert(value)
@doc """
Converts string to dot.case
## Examples
iex> Recase.to_dot("SomeValue")
"some.value"
iex> Recase.to_dot("some value")
"some.value"
"""
@spec to_dot(String.t()) :: String.t()
def to_dot(value), do: DotCase.convert(value)
@doc """
Converts string to Sentence case
## Examples
iex> Recase.to_sentence("SomeValue")
"Some value"
iex> Recase.to_sentence("some value")
"Some value"
"""
@spec to_sentence(String.t()) :: String.t()
def to_sentence(value), do: SentenceCase.convert(value)
@doc """
Converts string to Title Case
## Examples
iex> Recase.to_title("SomeValue")
"Some Value"
iex> Recase.to_title("some value")
"Some Value"
"""
@spec to_title(String.t()) :: String.t()
def to_title(value), do: TitleCase.convert(value)
@doc """
Converts string to Header-Case
## Examples
iex> Recase.to_header("SomeValue")
"Some-Value"
iex> Recase.to_header("some value")
"Some-Value"
"""
@spec to_header(String.t()) :: String.t()
def to_header(value), do: HeaderCase.convert(value)
@doc """
Converts string to Name Case
## Examples
iex> Recase.to_name("mccarthy o'donnell")
"McCarthy O'Donnell"
iex> Recase.to_name("von streit")
"von Streit"
"""
@spec to_name(String.t()) :: String.t()
def to_name(value), do: NameCase.convert(value)
end
| 20.396648 | 71 | 0.627499 |
08837a4a321bc1b9988d6088ebb49d99fe774b7c | 5,355 | ex | Elixir | test/fixtures/autobahn_client.ex | hkrutzer/mint_web_socket | 213bdc56addbc3d7b5344d14b6e194cc1ca3024f | [
"Apache-2.0"
] | null | null | null | test/fixtures/autobahn_client.ex | hkrutzer/mint_web_socket | 213bdc56addbc3d7b5344d14b6e194cc1ca3024f | [
"Apache-2.0"
] | null | null | null | test/fixtures/autobahn_client.ex | hkrutzer/mint_web_socket | 213bdc56addbc3d7b5344d14b6e194cc1ca3024f | [
"Apache-2.0"
] | null | null | null | defmodule AutobahnClient do
@moduledoc """
A client that uses Mint.WebSocket to test against the Autobahn|Testsuite
WebSocket testing suite
"""
import Kernel, except: [send: 2]
require Logger
defstruct [:conn, :websocket, :ref, messages: [], next: :cont, sent_close?: false, buffer: <<>>]
defguardp is_close_frame(frame)
when frame == :close or (is_tuple(frame) and elem(frame, 0) == :close)
def get_case_count do
%{messages: [{:text, count} | _]} = connect("/getCaseCount") |> decode_buffer()
String.to_integer(count)
end
def run_case(case_number, extensions \\ []) do
_state = connect("/runCase?case=#{case_number}&agent=Mint", extensions) |> loop()
:ok
end
def get_case_status(case_number) do
%{messages: [{:text, status} | _]} =
connect("/getCaseStatus?case=#{case_number}&agent=Mint") |> decode_buffer()
Jason.decode!(status)["behavior"]
end
def get_case_info(case_number) do
%{messages: [{:text, status} | _]} =
connect("/getCaseInfo?case=#{case_number}&agent=Mint") |> decode_buffer()
Jason.decode!(status, keys: :atoms)
end
def update_reports do
_state = connect("/updateReports?agent=Mint") |> loop()
:ok
end
def flush do
receive do
_message -> flush()
after
0 -> :ok
end
end
def connect(resource, extensions \\ []) do
:ok = flush()
host = System.get_env("FUZZINGSERVER_HOST") || "localhost"
{:ok, conn} = Mint.HTTP.connect(:http, host, 9001)
{:ok, conn, ref} = Mint.WebSocket.upgrade(:ws, conn, resource, [], extensions: extensions)
http_get_message = receive(do: (message -> message))
{:ok, conn, [{:status, ^ref, status}, {:headers, ^ref, resp_headers} | rest]} =
Mint.WebSocket.stream(conn, http_get_message)
buffer =
case rest do
[{:data, ^ref, data}, {:done, ^ref}] -> data
[{:done, ^ref}] -> <<>>
end
{:ok, conn, websocket} = Mint.WebSocket.new(conn, ref, status, resp_headers)
%__MODULE__{
next: :cont,
conn: conn,
ref: ref,
websocket: websocket,
buffer: buffer
}
end
def recv(%{ref: ref} = state) do
{:ok, conn, messages} = Mint.WebSocket.stream(state.conn, receive(do: (message -> message)))
%__MODULE__{
state
| conn: conn,
buffer: join_data_frames(messages, ref),
next: stop_if_done(messages, ref)
}
end
def decode_buffer(state) do
{:ok, websocket, messages} = Mint.WebSocket.decode(state.websocket, state.buffer)
%__MODULE__{state | messages: messages, buffer: <<>>, websocket: websocket}
end
def loop(state) do
case state |> decode_buffer |> handle_messages do
%{next: :cont} = state ->
loop(recv(state))
state ->
state
end
end
def handle_messages(state) do
Enum.reduce(state.messages, state, fn message, state ->
Logger.debug("Handling #{inspect(message, printable_limit: 30)}")
handle_message(message, state)
end)
|> Map.put(:messages, [])
end
defp handle_message({:close, _code, _reason}, state) do
close(state, 1000, "")
end
defp handle_message({:ping, data}, state) do
send(state, {:pong, data})
end
# no-op on unsolicited pongs
defp handle_message({:pong, _body}, state), do: state
defp handle_message({:error, reason}, state) do
Logger.debug("Closing the connection because of a protocol error: #{inspect(reason)}")
code =
case reason do
{:invalid_utf8, _data} -> 1_007
_ -> 1_002
end
close(state, code, "")
end
defp handle_message(frame, state), do: send(state, frame)
def send(%__MODULE__{sent_close?: true} = state, frame) when is_close_frame(frame) do
Logger.debug("Ignoring send of close")
state
end
def send(state, frame) do
Logger.debug("Sending #{inspect(frame, printable_limit: 30)}")
with {:ok, %Mint.WebSocket{} = websocket, data} <-
Mint.WebSocket.encode(state.websocket, frame),
{:ok, conn} <- Mint.WebSocket.stream_request_body(state.conn, state.ref, data) do
Logger.debug("Sent.")
%__MODULE__{state | conn: conn, websocket: websocket, sent_close?: is_close_frame(frame)}
else
{:error, %Mint.WebSocket{} = websocket, reason} ->
Logger.debug(
"Could not send frame #{inspect(frame, printable_limit: 30)} because #{inspect(reason)}, sending close..."
)
send(put_in(state.websocket, websocket), {:close, 1002, ""})
{:error, conn, %Mint.TransportError{reason: :closed}} ->
Logger.debug(
"Could not send frame #{inspect(frame, printable_limit: 30)} because the connection is closed"
)
{:ok, conn} = Mint.HTTP.close(conn)
%__MODULE__{state | conn: conn, next: :stop}
end
end
defp close(state, code, reason) do
state = send(state, {:close, code, reason})
{:ok, conn} = Mint.HTTP.close(state.conn)
%__MODULE__{state | conn: conn, next: :stop}
end
defp join_data_frames(messages, ref) do
messages
|> Enum.filter(fn
{:data, ^ref, _data} -> true
_ -> false
end)
|> Enum.map_join(<<>>, fn {:data, ^ref, data} -> data end)
end
defp stop_if_done(messages, ref) do
if Enum.any?(messages, &match?({:done, ^ref}, &1)), do: :stop, else: :cont
end
end
| 27.603093 | 116 | 0.621849 |
08838218c067835352cd4b4d0cc9f78d697f4bc0 | 1,709 | ex | Elixir | clients/admin/lib/google_api/admin/reports_v1/model/activity_id.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/admin/lib/google_api/admin/reports_v1/model/activity_id.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/admin/lib/google_api/admin/reports_v1/model/activity_id.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Admin.Reports_v1.Model.ActivityId do
@moduledoc """
Unique identifier for each activity record.
## Attributes
- applicationName (String): Application name to which the event belongs. Defaults to: `null`.
- customerId (String): Obfuscated customer ID of the source customer. Defaults to: `null`.
- time (DateTime): Time of occurrence of the activity. Defaults to: `null`.
- uniqueQualifier (String): Unique qualifier if multiple events have the same time. Defaults to: `null`.
"""
defstruct [
:"applicationName",
:"customerId",
:"time",
:"uniqueQualifier"
]
end
defimpl Poison.Decoder, for: GoogleApi.Admin.Reports_v1.Model.ActivityId do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.Admin.Reports_v1.Model.ActivityId do
def encode(value, options) do
GoogleApi.Admin.Reports_v1.Deserializer.serialize_non_nil(value, options)
end
end
| 32.865385 | 106 | 0.746635 |
088384093d3b54fa7751458d08f6f024e9717354 | 3,264 | ex | Elixir | clients/container_analysis/lib/google_api/container_analysis/v1beta1/model/in_toto.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/container_analysis/lib/google_api/container_analysis/v1beta1/model/in_toto.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/container_analysis/lib/google_api/container_analysis/v1beta1/model/in_toto.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ContainerAnalysis.V1beta1.Model.InToto do
@moduledoc """
This contains the fields corresponding to the definition of a software supply chain step in an in-toto layout. This information goes into a Grafeas note.
## Attributes
* `expectedCommand` (*type:* `list(String.t)`, *default:* `nil`) - This field contains the expected command used to perform the step.
* `expectedMaterials` (*type:* `list(GoogleApi.ContainerAnalysis.V1beta1.Model.ArtifactRule.t)`, *default:* `nil`) - The following fields contain in-toto artifact rules identifying the artifacts that enter this supply chain step, and exit the supply chain step, i.e. materials and products of the step.
* `expectedProducts` (*type:* `list(GoogleApi.ContainerAnalysis.V1beta1.Model.ArtifactRule.t)`, *default:* `nil`) -
* `signingKeys` (*type:* `list(GoogleApi.ContainerAnalysis.V1beta1.Model.SigningKey.t)`, *default:* `nil`) - This field contains the public keys that can be used to verify the signatures on the step metadata.
* `stepName` (*type:* `String.t`, *default:* `nil`) - This field identifies the name of the step in the supply chain.
* `threshold` (*type:* `String.t`, *default:* `nil`) - This field contains a value that indicates the minimum number of keys that need to be used to sign the step's in-toto link.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:expectedCommand => list(String.t()),
:expectedMaterials => list(GoogleApi.ContainerAnalysis.V1beta1.Model.ArtifactRule.t()),
:expectedProducts => list(GoogleApi.ContainerAnalysis.V1beta1.Model.ArtifactRule.t()),
:signingKeys => list(GoogleApi.ContainerAnalysis.V1beta1.Model.SigningKey.t()),
:stepName => String.t(),
:threshold => String.t()
}
field(:expectedCommand, type: :list)
field(:expectedMaterials,
as: GoogleApi.ContainerAnalysis.V1beta1.Model.ArtifactRule,
type: :list
)
field(:expectedProducts, as: GoogleApi.ContainerAnalysis.V1beta1.Model.ArtifactRule, type: :list)
field(:signingKeys, as: GoogleApi.ContainerAnalysis.V1beta1.Model.SigningKey, type: :list)
field(:stepName)
field(:threshold)
end
defimpl Poison.Decoder, for: GoogleApi.ContainerAnalysis.V1beta1.Model.InToto do
def decode(value, options) do
GoogleApi.ContainerAnalysis.V1beta1.Model.InToto.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ContainerAnalysis.V1beta1.Model.InToto do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 48 | 306 | 0.735294 |
0883a8a997dd548d007c608a85747557bb7b5194 | 951 | exs | Elixir | services/fc_inventory/mix.exs | fleadope/freshcom | 8d5944befaa6eea8d31e5f5995939be2a1a44262 | [
"BSD-3-Clause"
] | 46 | 2018-10-13T23:18:13.000Z | 2021-08-07T07:46:51.000Z | services/fc_inventory/mix.exs | fleadope/freshcom | 8d5944befaa6eea8d31e5f5995939be2a1a44262 | [
"BSD-3-Clause"
] | 25 | 2018-10-14T00:56:07.000Z | 2019-12-23T19:41:02.000Z | services/fc_inventory/mix.exs | fleadope/freshcom | 8d5944befaa6eea8d31e5f5995939be2a1a44262 | [
"BSD-3-Clause"
] | 5 | 2018-12-16T04:39:51.000Z | 2020-10-01T12:17:03.000Z | defmodule FCInventory.MixProject do
use Mix.Project
def project do
[
app: :fc_inventory,
name: "Freshcom Inventory",
version: "0.1.0",
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
mod: {FCInventory.Application, []},
extra_applications: [
:logger,
:eventstore
]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:fc_base, path: "../../base/fc_base"},
{:hackney, "~> 1.9"},
{:faker, "~> 0.11", only: [:test, :dev]},
{:ex_doc, "~> 0.19", only: :dev, runtime: false},
{:dialyxir, "~> 1.0.0-rc.3", only: [:dev], runtime: false}
]
end
end
| 23.195122 | 64 | 0.553102 |
0883b000a94e5969916ba2667956aa259d3282c8 | 205 | ex | Elixir | test/support/custom_parsers.ex | mediapress-ltd/solid | 025b49ab9d8286d4fc557085331cc7643e0beb20 | [
"MIT"
] | 1 | 2021-11-16T19:10:02.000Z | 2021-11-16T19:10:02.000Z | test/support/custom_parsers.ex | mediapress-ltd/solid | 025b49ab9d8286d4fc557085331cc7643e0beb20 | [
"MIT"
] | null | null | null | test/support/custom_parsers.ex | mediapress-ltd/solid | 025b49ab9d8286d4fc557085331cc7643e0beb20 | [
"MIT"
] | null | null | null | defmodule CustomDateParser do
use Solid.Parser.Base, custom_tags: ["current_date", "get_year_of_date"]
end
defmodule CustomFooParser do
use Solid.Parser.Base, custom_tags: ["foobar", "foobarval"]
end
| 25.625 | 74 | 0.780488 |
0883e2c61871ef05bdbbf5e9a16b731de10da577 | 17,732 | ex | Elixir | lib/aws/generated/work_mail.ex | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/work_mail.ex | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/work_mail.ex | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.WorkMail do
@moduledoc """
Amazon WorkMail is a secure, managed business email and calendaring service with
support for existing desktop and mobile email clients.
You can access your email, contacts, and calendars using Microsoft Outlook, your
browser, or other native iOS and Android email applications. You can integrate
WorkMail with your existing corporate directory and control both the keys that
encrypt your data and the location in which your data is stored.
The WorkMail API is designed for the following scenarios:
* Listing and describing organizations
* Managing users
* Managing groups
* Managing resources
All WorkMail API operations are Amazon-authenticated and certificate-signed.
They not only require the use of the AWS SDK, but also allow for the exclusive
use of AWS Identity and Access Management users and roles to help facilitate
access, trust, and permission policies. By creating a role and allowing an IAM
user to access the WorkMail site, the IAM user gains full administrative
visibility into the entire WorkMail organization (or as set in the IAM policy).
This includes, but is not limited to, the ability to create, update, and delete
users, groups, and resources. This allows developers to perform the scenarios
listed above, as well as give users the ability to grant access on a selective
basis using the IAM model.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2017-10-01",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "workmail",
global?: false,
protocol: "json",
service_id: "WorkMail",
signature_version: "v4",
signing_name: "workmail",
target_prefix: "WorkMailService"
}
end
@doc """
Adds a member (user or group) to the resource's set of delegates.
"""
def associate_delegate_to_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AssociateDelegateToResource", input, options)
end
@doc """
Adds a member (user or group) to the group's set.
"""
def associate_member_to_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AssociateMemberToGroup", input, options)
end
@doc """
Cancels a mailbox export job.
If the mailbox export job is near completion, it might not be possible to cancel
it.
"""
def cancel_mailbox_export_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CancelMailboxExportJob", input, options)
end
@doc """
Adds an alias to the set of a given member (user or group) of Amazon WorkMail.
"""
def create_alias(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateAlias", input, options)
end
@doc """
Creates a group that can be used in Amazon WorkMail by calling the
`RegisterToWorkMail` operation.
"""
def create_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateGroup", input, options)
end
@doc """
Creates a new Amazon WorkMail organization.
Optionally, you can choose to associate an existing AWS Directory Service
directory with your organization. If an AWS Directory Service directory ID is
specified, the organization alias must match the directory alias. If you choose
not to associate an existing directory with your organization, then we create a
new Amazon WorkMail directory for you. For more information, see [Adding an organization](https://docs.aws.amazon.com/workmail/latest/adminguide/add_new_organization.html)
in the *Amazon WorkMail Administrator Guide*.
You can associate multiple email domains with an organization, then set your
default email domain from the Amazon WorkMail console. You can also associate a
domain that is managed in an Amazon Route 53 public hosted zone. For more
information, see [Adding a domain](https://docs.aws.amazon.com/workmail/latest/adminguide/add_domain.html)
and [Choosing the default domain](https://docs.aws.amazon.com/workmail/latest/adminguide/default_domain.html)
in the *Amazon WorkMail Administrator Guide*.
Optionally, you can use a customer managed master key from AWS Key Management
Service (AWS KMS) to encrypt email for your organization. If you don't associate
an AWS KMS key, Amazon WorkMail creates a default AWS managed master key for
you.
"""
def create_organization(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateOrganization", input, options)
end
@doc """
Creates a new Amazon WorkMail resource.
"""
def create_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateResource", input, options)
end
@doc """
Creates a user who can be used in Amazon WorkMail by calling the
`RegisterToWorkMail` operation.
"""
def create_user(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateUser", input, options)
end
@doc """
Deletes an access control rule for the specified WorkMail organization.
"""
def delete_access_control_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteAccessControlRule", input, options)
end
@doc """
Remove one or more specified aliases from a set of aliases for a given user.
"""
def delete_alias(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteAlias", input, options)
end
@doc """
Deletes a group from Amazon WorkMail.
"""
def delete_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteGroup", input, options)
end
@doc """
Deletes permissions granted to a member (user or group).
"""
def delete_mailbox_permissions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteMailboxPermissions", input, options)
end
@doc """
Deletes an Amazon WorkMail organization and all underlying AWS resources managed
by Amazon WorkMail as part of the organization.
You can choose whether to delete the associated directory. For more information,
see [Removing an organization](https://docs.aws.amazon.com/workmail/latest/adminguide/remove_organization.html)
in the *Amazon WorkMail Administrator Guide*.
"""
def delete_organization(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteOrganization", input, options)
end
@doc """
Deletes the specified resource.
"""
def delete_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteResource", input, options)
end
@doc """
Deletes the specified retention policy from the specified organization.
"""
def delete_retention_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteRetentionPolicy", input, options)
end
@doc """
Deletes a user from Amazon WorkMail and all subsequent systems.
Before you can delete a user, the user state must be `DISABLED`. Use the
`DescribeUser` action to confirm the user state.
Deleting a user is permanent and cannot be undone. WorkMail archives user
mailboxes for 30 days before they are permanently removed.
"""
def delete_user(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteUser", input, options)
end
@doc """
Mark a user, group, or resource as no longer used in Amazon WorkMail.
This action disassociates the mailbox and schedules it for clean-up. WorkMail
keeps mailboxes for 30 days before they are permanently removed. The
functionality in the console is *Disable*.
"""
def deregister_from_work_mail(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeregisterFromWorkMail", input, options)
end
@doc """
Returns the data available for the group.
"""
def describe_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeGroup", input, options)
end
@doc """
Describes the current status of a mailbox export job.
"""
def describe_mailbox_export_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeMailboxExportJob", input, options)
end
@doc """
Provides more information regarding a given organization based on its
identifier.
"""
def describe_organization(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeOrganization", input, options)
end
@doc """
Returns the data available for the resource.
"""
def describe_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeResource", input, options)
end
@doc """
Provides information regarding the user.
"""
def describe_user(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeUser", input, options)
end
@doc """
Removes a member from the resource's set of delegates.
"""
def disassociate_delegate_from_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisassociateDelegateFromResource", input, options)
end
@doc """
Removes a member from a group.
"""
def disassociate_member_from_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisassociateMemberFromGroup", input, options)
end
@doc """
Gets the effects of an organization's access control rules as they apply to a
specified IPv4 address, access protocol action, or user ID.
"""
def get_access_control_effect(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetAccessControlEffect", input, options)
end
@doc """
Gets the default retention policy details for the specified organization.
"""
def get_default_retention_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetDefaultRetentionPolicy", input, options)
end
@doc """
Requests a user's mailbox details for a specified organization and user.
"""
def get_mailbox_details(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetMailboxDetails", input, options)
end
@doc """
Lists the access control rules for the specified organization.
"""
def list_access_control_rules(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListAccessControlRules", input, options)
end
@doc """
Creates a paginated call to list the aliases associated with a given entity.
"""
def list_aliases(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListAliases", input, options)
end
@doc """
Returns an overview of the members of a group.
Users and groups can be members of a group.
"""
def list_group_members(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListGroupMembers", input, options)
end
@doc """
Returns summaries of the organization's groups.
"""
def list_groups(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListGroups", input, options)
end
@doc """
Lists the mailbox export jobs started for the specified organization within the
last seven days.
"""
def list_mailbox_export_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListMailboxExportJobs", input, options)
end
@doc """
Lists the mailbox permissions associated with a user, group, or resource
mailbox.
"""
def list_mailbox_permissions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListMailboxPermissions", input, options)
end
@doc """
Returns summaries of the customer's organizations.
"""
def list_organizations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListOrganizations", input, options)
end
@doc """
Lists the delegates associated with a resource.
Users and groups can be resource delegates and answer requests on behalf of the
resource.
"""
def list_resource_delegates(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListResourceDelegates", input, options)
end
@doc """
Returns summaries of the organization's resources.
"""
def list_resources(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListResources", input, options)
end
@doc """
Lists the tags applied to an Amazon WorkMail organization resource.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Returns summaries of the organization's users.
"""
def list_users(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListUsers", input, options)
end
@doc """
Adds a new access control rule for the specified organization.
The rule allows or denies access to the organization for the specified IPv4
addresses, access protocol actions, and user IDs. Adding a new rule with the
same name as an existing rule replaces the older rule.
"""
def put_access_control_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutAccessControlRule", input, options)
end
@doc """
Sets permissions for a user, group, or resource.
This replaces any pre-existing permissions.
"""
def put_mailbox_permissions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutMailboxPermissions", input, options)
end
@doc """
Puts a retention policy to the specified organization.
"""
def put_retention_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutRetentionPolicy", input, options)
end
@doc """
Registers an existing and disabled user, group, or resource for Amazon WorkMail
use by associating a mailbox and calendaring capabilities.
It performs no change if the user, group, or resource is enabled and fails if
the user, group, or resource is deleted. This operation results in the
accumulation of costs. For more information, see
[Pricing](https://aws.amazon.com/workmail/pricing). The equivalent console
functionality for this operation is *Enable*.
Users can either be created by calling the `CreateUser` API operation or they
can be synchronized from your directory. For more information, see
`DeregisterFromWorkMail`.
"""
def register_to_work_mail(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RegisterToWorkMail", input, options)
end
@doc """
Allows the administrator to reset the password for a user.
"""
def reset_password(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ResetPassword", input, options)
end
@doc """
Starts a mailbox export job to export MIME-format email messages and calendar
items from the specified mailbox to the specified Amazon Simple Storage Service
(Amazon S3) bucket.
For more information, see [Exporting mailbox content](https://docs.aws.amazon.com/workmail/latest/adminguide/mail-export.html)
in the *Amazon WorkMail Administrator Guide*.
"""
def start_mailbox_export_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartMailboxExportJob", input, options)
end
@doc """
Applies the specified tags to the specified Amazon WorkMail organization
resource.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Untags the specified tags from the specified Amazon WorkMail organization
resource.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Updates a user's current mailbox quota for a specified organization and user.
"""
def update_mailbox_quota(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateMailboxQuota", input, options)
end
@doc """
Updates the primary email for a user, group, or resource.
The current email is moved into the list of aliases (or swapped between an
existing alias and the current primary email), and the email provided in the
input is promoted as the primary.
"""
def update_primary_email_address(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdatePrimaryEmailAddress", input, options)
end
@doc """
Updates data for the resource.
To have the latest information, it must be preceded by a `DescribeResource`
call. The dataset in the request should be the one expected when performing
another `DescribeResource` call.
"""
def update_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateResource", input, options)
end
end
| 37.096234 | 173 | 0.718419 |
0883f6d6a7e0dec4cfa51f4117557043bc5c1f51 | 6,990 | exs | Elixir | test/phoenix/endpoint/render_errors_test.exs | coderberry/phoenix | 9abaa84ea675d3db97cfba77745e76300acb6fd2 | [
"MIT"
] | null | null | null | test/phoenix/endpoint/render_errors_test.exs | coderberry/phoenix | 9abaa84ea675d3db97cfba77745e76300acb6fd2 | [
"MIT"
] | null | null | null | test/phoenix/endpoint/render_errors_test.exs | coderberry/phoenix | 9abaa84ea675d3db97cfba77745e76300acb6fd2 | [
"MIT"
] | null | null | null | defmodule Phoenix.Endpoint.RenderErrorsTest do
use ExUnit.Case, async: true
use RouterHelper
import ExUnit.CaptureLog
view = __MODULE__
def render("app.html", %{view_template: view_template} = assigns) do
"Layout: " <> render(view_template, assigns)
end
def render("404.html", %{kind: kind, reason: _reason, stack: _stack, conn: conn}) do
"Got 404 from #{kind} with #{conn.method}"
end
def render("404.json", %{kind: kind, reason: _reason, stack: _stack, conn: conn}) do
%{error: "Got 404 from #{kind} with #{conn.method}"}
end
def render("415.html", %{kind: kind, reason: _reason, stack: _stack, conn: conn}) do
"Got 415 from #{kind} with #{conn.method}"
end
def render("500.html", %{kind: kind, reason: _reason, stack: _stack, conn: conn}) do
"Got 500 from #{kind} with #{conn.method}"
end
def render("500.text", _) do
"500 in TEXT"
end
defmodule Router do
use Plug.Router
use Phoenix.Endpoint.RenderErrors, view: view, accepts: ~w(html json)
plug :match
plug :dispatch
get "/boom" do
resp conn, 200, "oops"
raise "oops"
end
get "/send_and_boom" do
send_resp conn, 200, "oops"
raise "oops"
end
get "/send_and_wrapped" do
raise Plug.Conn.WrapperError, conn: conn,
kind: :error, stack: System.stacktrace,
reason: ArgumentError.exception("oops")
end
match _ do
raise Phoenix.Router.NoRouteError, conn: conn, router: __MODULE__
end
end
test "call/2 is overridden" do
assert_raise RuntimeError, "oops", fn ->
call(Router, :get, "/boom")
end
assert_received {:plug_conn, :sent}
end
test "call/2 is overridden but is a no-op when response is already sent" do
assert_raise RuntimeError, "oops", fn ->
call(Router, :get, "/send_and_boom")
end
assert_received {:plug_conn, :sent}
end
test "call/2 is overridden with no route match as HTML" do
assert_raise Phoenix.Router.NoRouteError,
"no route found for GET /unknown (Phoenix.Endpoint.RenderErrorsTest.Router)", fn ->
call(Router, :get, "/unknown")
end
assert_received {:plug_conn, :sent}
end
test "call/2 is overridden with no route match as JSON" do
assert_raise Phoenix.Router.NoRouteError,
"no route found for GET /unknown (Phoenix.Endpoint.RenderErrorsTest.Router)", fn ->
call(Router, :get, "/unknown?_format=json")
end
assert_received {:plug_conn, :sent}
end
@tag :capture_log
test "call/2 is overridden with no route match while malformed format" do
assert_raise Phoenix.Router.NoRouteError,
"no route found for GET /unknown (Phoenix.Endpoint.RenderErrorsTest.Router)", fn ->
call(Router, :get, "/unknown?_format=unknown")
end
assert_received {:plug_conn, :sent}
end
test "call/2 is overridden and unwraps wrapped errors" do
assert_raise ArgumentError, "oops", fn ->
conn(:get, "/send_and_wrapped") |> Router.call([])
end
assert_received {:plug_conn, :sent}
end
defp render(conn, opts, fun) do
opts =
opts
|> Keyword.put_new(:view, __MODULE__)
|> Keyword.put_new(:accepts, ~w(html))
try do
fun.()
catch
kind, error ->
Phoenix.Endpoint.RenderErrors.render(conn, kind, error, System.stacktrace, opts)
else
_ -> flunk "function should have failed"
end
end
test "exception page for throws" do
conn = render(conn(:get, "/"), [], fn ->
throw :hello
end)
assert conn.status == 500
assert conn.resp_body == "Got 500 from throw with GET"
end
test "exception page for errors" do
conn = render(conn(:get, "/"), [], fn ->
:erlang.error :badarg
end)
assert conn.status == 500
assert conn.resp_body == "Got 500 from error with GET"
end
test "exception page for exceptions" do
conn = render(conn(:get, "/"), [], fn ->
raise Plug.Parsers.UnsupportedMediaTypeError, media_type: "foo/bar"
end)
assert conn.status == 415
assert conn.resp_body == "Got 415 from error with GET"
end
test "exception page for exits" do
conn = render(conn(:get, "/"), [], fn ->
exit {:timedout, {GenServer, :call, [:foo, :bar]}}
end)
assert conn.status == 500
assert conn.resp_body == "Got 500 from exit with GET"
end
test "exception page ignores params _format" do
conn = render(conn(:get, "/", _format: "text"), [accepts: ["html", "text"]], fn ->
throw :hello
end)
assert conn.status == 500
assert conn.resp_body == "500 in TEXT"
end
test "exception page uses stored _format" do
conn = conn(:get, "/") |> put_private(:phoenix_format, "text")
conn = render(conn, [accepts: ["html", "text"]], fn -> throw :hello end)
assert conn.status == 500
assert conn.resp_body == "500 in TEXT"
end
test "exception page with custom format" do
conn = render(conn(:get, "/"), [accepts: ~w(text)], fn ->
throw :hello
end)
assert conn.status == 500
assert conn.resp_body == "500 in TEXT"
end
test "exception page with layout" do
conn =
conn(:get, "/")
|> render([layout: {__MODULE__, :app}], fn -> throw :hello end)
assert conn.status == 500
assert conn.resp_body == "Layout: Got 500 from throw with GET"
end
@tag :capture_log
test "exception page is shown even with invalid format" do
conn =
conn(:get, "/")
|> put_req_header("accept", "unknown/unknown")
|> render([], fn -> throw :hello end)
assert conn.status == 500
assert conn.resp_body == "Got 500 from throw with GET"
end
test "exception page is shown even with invalid query parameters" do
conn =
conn(:get, "/?q=%{")
|> render([], fn -> throw :hello end)
assert conn.status == 500
assert conn.resp_body == "Got 500 from throw with GET"
end
test "captures warning when format is not supported" do
assert capture_log(fn ->
conn(:get, "/")
|> put_req_header("accept", "unknown/unknown")
|> render([], fn -> throw :hello end)
end) =~ "Could not render errors due to no supported media type in accept header"
end
test "captures warning when format does not match error view" do
assert capture_log(fn ->
conn(:get, "/?_format=unknown")
|> render([], fn -> throw :hello end)
end) =~ "Could not render errors due to unknown format \"unknown\""
end
test "does not capture warning when format does match ErrorView" do
assert capture_log(fn ->
conn(:get, "/")
|> put_req_header("accept", "text/html")
|> render([], fn -> throw :hello end)
end) == ""
end
test "exception page for NoRouteError with plug_status 404" do
conn = render(conn(:get, "/"), [], fn ->
raise Phoenix.Router.NoRouteError, conn: conn(:get, "/"), router: nil, plug_status: 404
end)
assert conn.status == 404
assert conn.resp_body == "Got 404 from error with GET"
end
end
| 27.96 | 93 | 0.634335 |
0884226f9be29746ddc1cf1715bc166f59da1d09 | 1,446 | ex | Elixir | lib/dogma/rule/function_arity.ex | lpil/dogma | e0f55c27ec3053be0313ac93f3d036437ee590a1 | [
"MIT"
] | 558 | 2015-06-21T18:20:59.000Z | 2021-12-30T16:25:47.000Z | lib/dogma/rule/function_arity.ex | lpil/dogma | e0f55c27ec3053be0313ac93f3d036437ee590a1 | [
"MIT"
] | 252 | 2015-06-19T13:00:47.000Z | 2021-04-21T08:04:16.000Z | lib/dogma/rule/function_arity.ex | lpil/dogma | e0f55c27ec3053be0313ac93f3d036437ee590a1 | [
"MIT"
] | 79 | 2015-06-21T14:18:30.000Z | 2021-12-30T16:26:09.000Z | use Dogma.RuleBuilder
defrule Dogma.Rule.FunctionArity, max: 4 do
@moduledoc """
A rule that disallows functions and macros with arity greater than 4, meaning
a function may not take more than 4 arguments.
By default this function is considered invalid by this rule:
def transform(a, b, c, d, e) do
# Do something
end
The maximum allowed arity for this rule can be configured with the `max`
option in your mix config.
"""
alias Dogma.Script
alias Dogma.Error
def test(rule, script) do
Script.walk(script, &check_ast(&1, &2, rule.max))
end
@defs ~w(def defp defmacro)a
for type <- @defs do
defp check_ast({unquote(type), _, nil} = ast, errors, _) do
{ast, errors}
end
defp check_ast({unquote(type), _, _} = ast, errors, max) do
{name, line, args} = get_fun_details(ast)
arity = args |> length
if arity > max do
{ast, [error(line, name, max, arity) | errors]}
else
{ast, errors}
end
end
end
defp check_ast(ast, errors, _) do
{ast, errors}
end
defp get_fun_details(ast) do
{_, [line: line], details} = ast
{name, _, args} = hd( details )
args = args || []
{name, line, args}
end
defp error(pos, name, max, arity) do
%Error{
rule: __MODULE__,
message: "Arity of `#{name}` should be less than #{max} (was #{arity}).",
line: Dogma.Script.line(pos),
}
end
end
| 23.322581 | 79 | 0.613416 |
08846b38ceccb3b6efaa848efbf56f3235114a71 | 327 | ex | Elixir | lib/repoex_web/auth/error_handler.ex | RomuloHe4rt/Repoex | 6ccf09e1f0be63d3021533c3c9bd7b8e5fd73925 | [
"MIT"
] | null | null | null | lib/repoex_web/auth/error_handler.ex | RomuloHe4rt/Repoex | 6ccf09e1f0be63d3021533c3c9bd7b8e5fd73925 | [
"MIT"
] | null | null | null | lib/repoex_web/auth/error_handler.ex | RomuloHe4rt/Repoex | 6ccf09e1f0be63d3021533c3c9bd7b8e5fd73925 | [
"MIT"
] | null | null | null | defmodule RepoexWeb.Auth.ErrorHandler do
import Plug.Conn
alias Guardian.Plug.ErrorHandler
@behaviour ErrorHandler
def auth_error(conn, {error, _reason}, _opts) do
body = Jason.encode!(%{message: to_string(error)})
conn
|> put_resp_content_type("application/json")
|> send_resp(401, body)
end
end
| 20.4375 | 54 | 0.715596 |
0884a63f5562c8644ac56cafac8af05c614ed96d | 400 | exs | Elixir | test/api_key_test.exs | norbertnull/pubg_api | 54b20ec91feeefc72d6dc2034cd70da62f06083a | [
"Apache-2.0"
] | null | null | null | test/api_key_test.exs | norbertnull/pubg_api | 54b20ec91feeefc72d6dc2034cd70da62f06083a | [
"Apache-2.0"
] | null | null | null | test/api_key_test.exs | norbertnull/pubg_api | 54b20ec91feeefc72d6dc2034cd70da62f06083a | [
"Apache-2.0"
] | null | null | null | defmodule PubgApiTest.ApiKey do
use ExUnit.Case
doctest PubgApi.ApiKey
test "Generate API Key string" do
assert PubgApi.ApiKey.generate("api_key") == "Bearer api_key"
end
test "Add API Key string to header map" do
api_key = "api_key"
header_map =
%{}
|> PubgApi.ApiKey.add_to_headers(api_key)
assert header_map["Authorization"] == "Bearer api_key"
end
end
| 21.052632 | 65 | 0.6875 |
0884adc401f8fa66fc92c49ec958b3996c54fe80 | 10,357 | exs | Elixir | test/expression_test.exs | turnhub/expression | 51b857bc1d9f276a2bd8750892eddebd5f35f922 | [
"Apache-2.0"
] | 4 | 2020-12-13T21:11:13.000Z | 2022-01-18T19:20:30.000Z | test/expression_test.exs | turnhub/expression | 51b857bc1d9f276a2bd8750892eddebd5f35f922 | [
"Apache-2.0"
] | 31 | 2021-02-08T07:12:49.000Z | 2022-03-04T10:54:40.000Z | test/expression_test.exs | turnhub/expression | 51b857bc1d9f276a2bd8750892eddebd5f35f922 | [
"Apache-2.0"
] | null | null | null | defmodule ExpressionTest do
use ExUnit.Case, async: true
doctest Expression
describe "types" do
test "text" do
assert {:ok, [text: "hello"]} = Expression.parse("hello")
end
test "decimal" do
value = Decimal.new("1.23")
assert {:ok, [substitution: [block: [literal: ^value]]]} = Expression.parse("@(1.23)")
end
test "datetime" do
{:ok, value, 0} = DateTime.from_iso8601("2020-11-21T20:13:51.921042Z")
assert {:ok, [substitution: [block: [literal: ^value]]]} =
Expression.parse("@(2020-11-21T20:13:51.921042Z)")
{:ok, value, 0} = DateTime.from_iso8601("2020-02-01T23:23:23Z")
assert {:ok, [substitution: [block: [literal: ^value]]]} =
Expression.parse("@(01-02-2020 23:23:23)")
full_minute = %{value | second: 0}
assert {:ok, [substitution: [block: [literal: ^full_minute]]]} =
Expression.parse("@(01-02-2020 23:23)")
end
test "boolean" do
assert {:ok, [substitution: [block: [literal: true]]]} = Expression.parse("@(true)")
assert {:ok, [substitution: [block: [literal: true]]]} = Expression.parse("@(True)")
assert {:ok, [substitution: [block: [literal: false]]]} = Expression.parse("@(false)")
assert {:ok, [substitution: [block: [literal: false]]]} = Expression.parse("@(False)")
end
end
describe "case insensitive" do
test "variables" do
assert {:ok, [substitution: [variable: ["contact", "name"]]]} =
Expression.parse("@CONTACT.Name")
end
test "functions" do
assert {:ok, [substitution: [function: ["hour"]]]} = Expression.parse("@hour()")
assert {:ok, [substitution: [function: ["hour", {:arguments, [function: ["now"]]}]]]} =
Expression.parse("@hour(Now())")
end
end
describe "templating" do
test "substitution" do
assert {:ok, [substitution: [variable: ["contact"]]]} = Expression.parse("@contact")
assert {:ok, [substitution: [variable: ["contact", "name"]]]} =
Expression.parse("@contact.name")
end
end
describe "blocks" do
test "block" do
assert {:ok, [substitution: [block: [variable: ["contact", "name"]]]]} =
Expression.parse("@(contact.name)")
end
end
describe "functions" do
test "without arguments" do
assert {:ok, [substitution: [function: ["hour"]]]} = Expression.parse("@HOUR()")
end
test "with a single argument" do
assert {:ok,
[
substitution: [
function: ["hour", {:arguments, [variable: ["contact", "timestamp"]]}]
]
]} = Expression.parse("@HOUR(contact.timestamp)")
end
test "with a multiple argument" do
assert {:ok,
[
substitution: [
function: [
"edate",
{:arguments,
[
{
:variable,
["date", "today"]
},
{
:literal,
1
}
]}
]
]
]} = Expression.parse("@EDATE(date.today, 1)")
end
test "with functions" do
assert {:ok,
[
substitution: [function: ["hour", {:arguments, [{:function, ["now"]}]}]]
]} = Expression.parse("@HOUR(NOW())")
end
end
describe "logic" do
test "lte" do
assert {
:ok,
[
{:substitution, [block: [<=: [variable: ["block", "value"], literal: 30]]]}
]
} == Expression.parse("@(block.value <= 30)")
end
test "add" do
assert {:ok,
[
substitution: [
block: [+: [literal: 1, variable: ["a"]]]
]
]} = Expression.parse("@(1 + a)")
assert {:ok,
[
substitution: [
block: [+: [{:variable, ["contact", "age"]}, {:literal, 1}]]
]
]} = Expression.parse("@(contact.age+1)")
end
test "join" do
assert {:ok,
[
substitution: [
block: [
&: [
{:&, [variable: ["contact", "first_name"], literal: " "]},
{:variable, ["contact", "last_name"]}
]
]
]
]} = Expression.parse("@(contact.first_name & \" \" & contact.last_name)")
end
end
describe "evaluate" do
test "calculation with explicit precedence" do
assert {:ok, 8} = Expression.evaluate("@(2 + (2 * 3))")
end
test "calculation with default precedence" do
assert {:ok, 8} = Expression.evaluate("@(2 + 2 * 3)")
end
test "exponent precendence over addition" do
assert {:ok, 10.0} = Expression.evaluate("@(2 + 2 ^ 3)")
end
test "exponent precendence over multiplication" do
assert {:ok, 16.0} = Expression.evaluate("@(2 * 2 ^ 3)")
end
test "example calculation from floip expression docs" do
assert {:ok, 0.999744} = Expression.evaluate("@(1 + (2 - 3) * 4 / 5 ^ 6)")
end
test "example logical comparison" do
assert {:ok, true} ==
Expression.evaluate("@(contact.age > 18)", %{"contact" => %{"age" => 20}})
assert {:ok, true} ==
Expression.evaluate("@(contact.age >= 20)", %{"contact" => %{"age" => 20}})
assert {:ok, false} ==
Expression.evaluate("@(contact.age < 18)", %{"contact" => %{"age" => 20}})
assert {:ok, true} ==
Expression.evaluate("@(contact.age <= 20)", %{"contact" => %{"age" => 20}})
assert {:ok, true} ==
Expression.evaluate("@(contact.age <= 30)", %{"contact" => %{"age" => 20}})
assert {:ok, false} ==
Expression.evaluate("@(contact.age == 18)", %{"contact" => %{"age" => 20}})
assert {:ok, false} ==
Expression.evaluate("@(contact.age = 18)", %{"contact" => %{"age" => 20}})
assert {:ok, true} ==
Expression.evaluate("@(contact.age != 18)", %{"contact" => %{"age" => 20}})
assert {:ok, true} ==
Expression.evaluate("@(contact.age <> 18)", %{"contact" => %{"age" => 20}})
assert {:ok, true} ==
Expression.evaluate("@(contact.age == 18)", %{"contact" => %{"age" => 18}})
end
test "escaping @s" do
assert {:ok, "user@example.org"} = Expression.evaluate("user@@example.org")
assert {:ok, "user@example.org"} = Expression.evaluate("@('user' & '@example.org')")
end
test "substitution" do
assert {:ok, "hello name"} =
Expression.evaluate("hello @(contact.name)", %{
"contact" => %{
"name" => "name"
}
})
end
test "addition" do
assert {:ok, "next year you are 41 years old"} =
Expression.evaluate("next year you are @(contact.age + 1) years old", %{
"contact" => %{
"age" => 40
}
})
end
test "function name case insensitivity" do
assert {:ok, dt} = Expression.evaluate("@(NOW())")
assert dt.year == DateTime.utc_now().year
assert {:ok, dt} = Expression.evaluate("@(noW())")
assert dt.year == DateTime.utc_now().year
end
test "function calls with zero arguments" do
assert {:ok, dt} = Expression.evaluate("@(NOW())")
assert dt.year == DateTime.utc_now().year
end
test "function calls with one or more arguments" do
assert {:ok, dt} = Expression.evaluate("@(DATE(2020, 12, 30))")
assert dt.year == 2020
assert dt.month == 12
assert dt.day == 30
end
test "function calls default arguments" do
expected = Timex.format!(DateTime.utc_now(), "%Y-%m-%d %H:%M:%S", :strftime)
assert {:ok, expected} == Expression.evaluate("@(DATEVALUE(NOW()))")
expected = Timex.format!(DateTime.utc_now(), "%Y-%m-%d", :strftime)
assert {:ok, expected} == Expression.evaluate("@(DATEVALUE(NOW(), \"%Y-%m-%d\"))")
end
test "function calls with expressions" do
assert {:ok,
[
text: "Dear ",
substitution: [
function: [
"if",
{:arguments,
[
{
:==,
[variable: ["contact", "gender"], literal: "M"]
},
{:literal, "Sir"},
{:literal, "lovely client"}
]}
]
]
]} = Expression.parse("Dear @IF(contact.gender = 'M', 'Sir', 'lovely client')")
assert {:ok, "Dear lovely client"} =
Expression.evaluate("Dear @IF(contact.gender = 'M', 'Sir', 'lovely client')", %{
"contact" => %{"gender" => "O"}
})
end
test "evaluate_block" do
assert {:ok, true} ==
Expression.evaluate_block("contact.age > 10", %{contact: %{age: 21}})
assert {:ok, 2} == Expression.evaluate_block("1 + 1")
end
test "return an error tuple" do
assert {:error, "expression is not a number: `\"not a number\"`"} =
Expression.evaluate_block("block.value > 0", %{block: %{value: "not a number"}})
end
test "return an error tuple when variables are not defined" do
assert {:error, "variable \"block.value\" is undefined or null"} =
Expression.evaluate_block("block.value > 0", %{block: %{}})
end
test "throw an error when variables are not defined" do
assert_raise RuntimeError, "variable \"block.value\" is undefined or null", fn ->
Expression.evaluate_block!("block.value > 0", %{block: %{}})
end
end
test "throw an error" do
assert_raise RuntimeError, "expression is not a number: `\"not a number\"`", fn ->
Expression.evaluate_block!("block.value > 0", %{block: %{value: "not a number"}})
end
end
end
end
| 32.569182 | 95 | 0.487883 |
0884b908392fef5c4386ecb8d687a80a4e9021f1 | 517 | ex | Elixir | lib/webpipe/session_store.ex | hyperngn/webpipe | 67393cdc6f5fc26fccc24d1081341239f2d7d733 | [
"MIT"
] | 36 | 2020-05-25T07:55:10.000Z | 2021-11-09T11:18:47.000Z | lib/webpipe/session_store.ex | hyperngn/webpipe | 67393cdc6f5fc26fccc24d1081341239f2d7d733 | [
"MIT"
] | null | null | null | lib/webpipe/session_store.ex | hyperngn/webpipe | 67393cdc6f5fc26fccc24d1081341239f2d7d733 | [
"MIT"
] | 1 | 2020-05-25T15:04:24.000Z | 2020-05-25T15:04:24.000Z | defmodule Webpipe.SessionStore do
use GenServer
def start_link(_opts) do
GenServer.start_link(__MODULE__, :state)
end
@impl GenServer
def init(_opts) do
# {"foo" => [#<PID1>, #<PID2>], "bar"....}
:ets.new(:sessions, [:named_table, :public, :bag])
{:ok, :state}
end
def get_listeners(session_id) do
:ets.lookup(:sessions, session_id)
|> Enum.map(fn {_id, pid} -> pid end)
end
def register_listener(session_id) do
:ets.insert(:sessions, {session_id, self()})
end
end
| 21.541667 | 54 | 0.642166 |
0884d532e8c2f7fc1842928e0090a64ca20d93c9 | 482 | exs | Elixir | priv/repo/migrations/20190118235356_add_transactions_table.exs | pakorn186c/blockchain-api | 3c9fbc892e645f9bb144414f3da36749603f37bc | [
"Apache-2.0"
] | 17 | 2019-11-03T03:02:41.000Z | 2022-01-13T17:03:32.000Z | priv/repo/migrations/20190118235356_add_transactions_table.exs | AddressXception/blockchain-api | eea98fa78af2887cc84762f84532c602c3b8b666 | [
"Apache-2.0"
] | 5 | 2019-11-07T23:26:53.000Z | 2020-11-24T21:45:35.000Z | priv/repo/migrations/20190118235356_add_transactions_table.exs | AddressXception/blockchain-api | eea98fa78af2887cc84762f84532c602c3b8b666 | [
"Apache-2.0"
] | 11 | 2019-12-04T07:03:16.000Z | 2022-01-13T17:03:50.000Z | defmodule BlockchainAPI.Repo.Migrations.AddTransactionsTable do
use Ecto.Migration
def change do
create table(:transactions) do
add :hash, :binary, null: false
add :type, :string, null: false
add :status, :string, null: false, default: "cleared"
add :block_height, references(:blocks, on_delete: :nothing, column: :height), null: false
timestamps()
end
create unique_index(:transactions, [:hash], name: :unique_txn_hash)
end
end
| 25.368421 | 95 | 0.688797 |
0884daaeb4e2494b60a8486a7a534e5b1dfc808d | 7,382 | exs | Elixir | test/brando/blueprints/blueprint_test.exs | univers-agency/brando | 69c3c52498a3f64518da3522cd9f27294a52cc68 | [
"Apache-2.0"
] | 1 | 2020-04-26T09:53:02.000Z | 2020-04-26T09:53:02.000Z | test/brando/blueprints/blueprint_test.exs | univers-agency/brando | 69c3c52498a3f64518da3522cd9f27294a52cc68 | [
"Apache-2.0"
] | 198 | 2019-08-20T16:16:07.000Z | 2020-07-03T15:42:07.000Z | test/brando/blueprints/blueprint_test.exs | univers-agency/brando | 69c3c52498a3f64518da3522cd9f27294a52cc68 | [
"Apache-2.0"
] | null | null | null | defmodule Brando.Blueprint.BlueprintTest do
use ExUnit.Case
alias Brando.Blueprint.Asset
alias Brando.Blueprint.Attribute
alias Brando.Blueprint.Relation
test "naming" do
assert Brando.BlueprintTest.Project.__naming__().application == "Brando"
assert Brando.BlueprintTest.Project.__naming__().domain == "Projects"
assert Brando.BlueprintTest.Project.__naming__().schema == "Project"
assert Brando.BlueprintTest.Project.__naming__().singular == "project"
assert Brando.BlueprintTest.Project.__naming__().plural == "projects"
end
test "modules" do
assert Brando.BlueprintTest.Project.__modules__(:application) == Brando
assert Brando.BlueprintTest.Project.__modules__(:context) == Brando.Projects
assert Brando.BlueprintTest.Project.__modules__(:schema) == Brando.Projects.Project
end
test "traits" do
assert Brando.BlueprintTest.Project.__traits__() == [
{Brando.Trait.Creator, []},
{Brando.Trait.SoftDelete, []},
{Brando.Trait.Sequenced, []},
{Brando.Trait.Timestamped, []}
]
end
test "changeset mutators" do
mutated_cs =
Brando.BlueprintTest.Project.changeset(
%Brando.BlueprintTest.Project{},
%{title: "my title", slug: "my-title"},
%{id: 1}
)
assert mutated_cs.changes.creator_id == 1
assert mutated_cs.changes.title == "my title"
end
test "__required_attrs__" do
required_attrs = Brando.BlueprintTest.Project.__required_attrs__()
assert required_attrs == [:slug, :creator_id]
end
test "__optional_attrs__" do
optional_attrs = Brando.BlueprintTest.Project.__optional_attrs__()
assert optional_attrs == [:title, :deleted_at, :sequence, :updated_at, :inserted_at]
end
test "attributes" do
attrs = Brando.BlueprintTest.Project.__attributes__()
assert attrs == [
%Attribute{name: :title, opts: %{}, type: :string},
%Attribute{
name: :slug,
opts: %{required: true},
type: :slug
},
%Attribute{name: :deleted_at, opts: %{}, type: :datetime},
%Attribute{
name: :sequence,
opts: %{default: 0},
type: :integer
},
%Brando.Blueprint.Attribute{name: :updated_at, opts: %{}, type: :datetime},
%Brando.Blueprint.Attribute{name: :inserted_at, opts: %{}, type: :datetime}
]
end
test "attribute_opts" do
attr_opts = Brando.BlueprintTest.Project.__attribute_opts__(:slug)
assert attr_opts == %{required: true}
end
test "assets" do
assets = Brando.BlueprintTest.Project.__assets__()
assert assets == [
%Asset{
name: :cover,
opts: %{
cfg: %Brando.Type.ImageConfig{
allowed_mimetypes: ["image/jpeg", "image/png", "image/gif"],
default_size: "medium",
formats: [:original],
overwrite: false,
random_filename: true,
size_limit: 10_240_000,
sizes: %{
"large" => %{"crop" => true, "quality" => 65, "size" => "700x700"},
"medium" => %{"crop" => true, "quality" => 65, "size" => "500x500"},
"micro" => %{"crop" => false, "quality" => 10, "size" => "25"},
"small" => %{"crop" => true, "quality" => 65, "size" => "300x300"},
"thumb" => %{"crop" => true, "quality" => 65, "size" => "150x150"},
"xlarge" => %{"crop" => true, "quality" => 65, "size" => "900x900"},
"crop_medium" => %{"crop" => true, "quality" => 65, "size" => "500x500"},
"crop_small" => %{"crop" => true, "quality" => 65, "size" => "300x300"}
},
srcset: %{
cropped: [{"crop_small", "300w"}, {"crop_medium", "500w"}],
default: [
{"small", "300w"},
{"medium", "500w"},
{"large", "700w"},
{"xlarge", "900w"}
]
},
upload_path: "images/avatars"
},
module: Brando.Images.Image
},
type: :image
},
%Brando.Blueprint.Asset{
name: :pdf,
opts: %{
cfg: %Brando.Type.FileConfig{
accept: :any,
allowed_mimetypes: ["application/pdf"],
overwrite: false,
random_filename: false,
size_limit: 10_240_000,
upload_path: "files/projects"
},
module: Brando.Files.File
},
type: :file
}
]
end
test "asset_opts" do
%{cfg: cfg} = Brando.BlueprintTest.Project.__asset_opts__(:cover)
assert cfg == %Brando.Type.ImageConfig{
allowed_mimetypes: ["image/jpeg", "image/png", "image/gif"],
default_size: "medium",
formats: [:original],
overwrite: false,
random_filename: true,
size_limit: 10_240_000,
sizes: %{
"large" => %{"crop" => true, "quality" => 65, "size" => "700x700"},
"medium" => %{"crop" => true, "quality" => 65, "size" => "500x500"},
"micro" => %{"crop" => false, "quality" => 10, "size" => "25"},
"small" => %{"crop" => true, "quality" => 65, "size" => "300x300"},
"thumb" => %{"crop" => true, "quality" => 65, "size" => "150x150"},
"xlarge" => %{"crop" => true, "quality" => 65, "size" => "900x900"},
"crop_medium" => %{"crop" => true, "quality" => 65, "size" => "500x500"},
"crop_small" => %{"crop" => true, "quality" => 65, "size" => "300x300"}
},
srcset: %{
cropped: [
{"crop_small", "300w"},
{"crop_medium", "500w"}
],
default: [
{"small", "300w"},
{"medium", "500w"},
{"large", "700w"},
{"xlarge", "900w"}
]
},
upload_path: "images/avatars"
}
end
test "relations" do
relations = Brando.BlueprintTest.Project.__relations__()
assert relations == [
%Brando.Blueprint.Relation{
name: :properties,
opts: %{module: Brando.BlueprintTest.Property},
type: :embeds_many
},
%Relation{
name: :creator,
opts: %{module: Brando.Users.User, required: true},
type: :belongs_to
}
]
end
test "ecto schema" do
schema = Brando.BlueprintTest.Project.__schema__(:fields)
assert schema == [
:id,
:title,
:slug,
:deleted_at,
:sequence,
:inserted_at,
:updated_at,
:cover_id,
:pdf_id,
:properties,
:creator_id
]
end
end
| 35.834951 | 94 | 0.483744 |
088501a93aef8f88b00c6700e9174753fddced8c | 1,274 | exs | Elixir | mix.exs | henb/phoenix_swoosh | 9815955a92a7550379f56e40cea4212b04fde9c9 | [
"MIT"
] | null | null | null | mix.exs | henb/phoenix_swoosh | 9815955a92a7550379f56e40cea4212b04fde9c9 | [
"MIT"
] | null | null | null | mix.exs | henb/phoenix_swoosh | 9815955a92a7550379f56e40cea4212b04fde9c9 | [
"MIT"
] | null | null | null | defmodule PhoenixSwoosh.Mixfile do
use Mix.Project
@version "0.2.0"
def project do
[app: :phoenix_swoosh,
version: @version,
elixir: "~> 1.2",
compilers: compilers(Mix.env),
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps(),
# Hex
description: description(),
package: package(),
# Docs
name: "Phoenix.Swoosh",
docs: [source_ref: "v#{@version}", main: "Phoenix.Swoosh",
canonical: "http://hexdocs.pm/phoenix_swoosh",
source_url: "https://github.com/swoosh/phoenix_swoosh"]]
end
defp compilers(:test), do: [:phoenix] ++ Mix.compilers
defp compilers(_), do: Mix.compilers
def application do
[applications: [:logger, :swoosh]]
end
defp deps do
[{:swoosh, "~> 0.1"},
{:phoenix, "~> 1.0"},
{:phoenix_html, "~> 2.2"},
{:credo, "~> 0.8", only: [:dev, :test]},
{:ex_doc, "~> 0.16", only: :docs},
{:inch_ex, ">= 0.0.0", only: :docs}]
end
defp description do
"""
Use Swoosh to easily send emails in your Phoenix project.
"""
end
defp package do
[maintainers: ["Steve Domin"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/swoosh/phoenix_swoosh"}]
end
end
| 23.592593 | 70 | 0.578493 |
0885119450b62c730cda2474fbdfcddb84080219 | 3,139 | ex | Elixir | clients/sql_admin/lib/google_api/sql_admin/v1beta4/api/flags.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/sql_admin/lib/google_api/sql_admin/v1beta4/api/flags.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/sql_admin/lib/google_api/sql_admin/v1beta4/api/flags.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.SQLAdmin.V1beta4.Api.Flags do
@moduledoc """
API calls for all endpoints tagged `Flags`.
"""
alias GoogleApi.SQLAdmin.V1beta4.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
List all available database flags for Cloud SQL instances.
## Parameters
* `connection` (*type:* `GoogleApi.SQLAdmin.V1beta4.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:databaseVersion` (*type:* `String.t`) - Database type and version you want to retrieve flags for. By default, this method returns flags for all database types and versions.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SQLAdmin.V1beta4.Model.FlagsListResponse{}}` on success
* `{:error, info}` on failure
"""
@spec sql_flags_list(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.SQLAdmin.V1beta4.Model.FlagsListResponse.t()} | {:error, Tesla.Env.t()}
def sql_flags_list(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:databaseVersion => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/flags", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SQLAdmin.V1beta4.Model.FlagsListResponse{}])
end
end
| 43 | 187 | 0.675056 |
08852a4714d27bd393aa7fa303a5914a406f3564 | 61 | exs | Elixir | apps/kv_server/config/prod.exs | WhiteRookPL/production-debugging-workshop.ex | 26e81d14ba39c33764ddaee5d6d65a6061f4e823 | [
"MIT"
] | 5 | 2017-05-03T08:05:54.000Z | 2022-03-11T04:11:00.000Z | apps/kv_server/config/prod.exs | WhiteRookPL/production-debugging-workshop.ex | 26e81d14ba39c33764ddaee5d6d65a6061f4e823 | [
"MIT"
] | null | null | null | apps/kv_server/config/prod.exs | WhiteRookPL/production-debugging-workshop.ex | 26e81d14ba39c33764ddaee5d6d65a6061f4e823 | [
"MIT"
] | null | null | null | use Mix.Config
config :kv_server, :persistence_enabled, true | 20.333333 | 45 | 0.819672 |
08852ed571ef51399b3e0b9a52d5c6e81cb3fe20 | 560 | exs | Elixir | mix.exs | robinhilliard/lightplane | 8b73829ada29b31574df48aa437fd8087e9d1e60 | [
"MIT-0"
] | 3 | 2020-07-15T10:08:09.000Z | 2020-07-15T10:31:07.000Z | mix.exs | robinhilliard/lightplane | 8b73829ada29b31574df48aa437fd8087e9d1e60 | [
"MIT-0"
] | null | null | null | mix.exs | robinhilliard/lightplane | 8b73829ada29b31574df48aa437fd8087e9d1e60 | [
"MIT-0"
] | null | null | null | defmodule Aero.MixProject do
use Mix.Project
def project do
[
app: :lightplane,
version: "0.1.0",
elixir: "~> 1.9",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:dialyzex, "~> 1.2.0", only: :dev, runtime: false},
{:ex_doc, "~> 0.20.2", only: :dev, runtime: false}
]
end
end
| 19.310345 | 59 | 0.566071 |
088547611552a8d05a63fa5e0ed33203f191d4c6 | 116 | exs | Elixir | exercises/02-higher-order-functions/02-goodbye-loops/13-max_by/solution.exs | DennisWinnepenninckx/distributed-applications | 06743e4e2a09dc52ff52be831e486bb073916173 | [
"BSD-3-Clause"
] | 1 | 2021-09-22T09:52:11.000Z | 2021-09-22T09:52:11.000Z | exercises/02-higher-order-functions/02-goodbye-loops/13-max_by/solution.exs | DennisWinnepenninckx/distributed-applications | 06743e4e2a09dc52ff52be831e486bb073916173 | [
"BSD-3-Clause"
] | 22 | 2019-06-19T18:58:13.000Z | 2020-03-16T14:43:06.000Z | exercises/02-higher-order-functions/02-goodbye-loops/13-max_by/solution.exs | DennisWinnepenninckx/distributed-applications | 06743e4e2a09dc52ff52be831e486bb073916173 | [
"BSD-3-Clause"
] | 32 | 2019-09-19T03:25:11.000Z | 2020-10-06T15:01:47.000Z | defmodule Grades do
def best_student(grades) do
elem(Enum.max_by(grades, fn {_, _, g} -> g end), 1)
end
end
| 19.333333 | 55 | 0.655172 |
088567efe349e9b69b7e900d37f832c04e271ba0 | 779 | ex | Elixir | lib/anchore_engine_api_server/model/image_selection_rule.ex | michaeljguarino/anchore-elixir-client | 156a44f429ecb62433729a2b4c52de5dc0ef44d2 | [
"MIT"
] | null | null | null | lib/anchore_engine_api_server/model/image_selection_rule.ex | michaeljguarino/anchore-elixir-client | 156a44f429ecb62433729a2b4c52de5dc0ef44d2 | [
"MIT"
] | null | null | null | lib/anchore_engine_api_server/model/image_selection_rule.ex | michaeljguarino/anchore-elixir-client | 156a44f429ecb62433729a2b4c52de5dc0ef44d2 | [
"MIT"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule AnchoreEngineAPIServer.Model.ImageSelectionRule do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:id,
:image,
:name,
:registry,
:repository
]
@type t :: %__MODULE__{
:id => String.t,
:image => ImageRef,
:name => String.t,
:registry => String.t,
:repository => String.t
}
end
defimpl Poison.Decoder, for: AnchoreEngineAPIServer.Model.ImageSelectionRule do
import AnchoreEngineAPIServer.Deserializer
def decode(value, options) do
value
|> deserialize(:image, :struct, AnchoreEngineAPIServer.Model.ImageRef, options)
end
end
| 21.638889 | 83 | 0.691913 |
088572f2cfaa4dd8c20f2c6c4f58d315c15ac185 | 39,225 | ex | Elixir | lib/livebook/session.ex | alaadahmed/livebook | 24668c6edb6ee638a3f5291b27bd42a3dfc0c18d | [
"Apache-2.0"
] | 1,846 | 2021-04-13T14:46:36.000Z | 2021-07-14T20:37:40.000Z | lib/livebook/session.ex | alaadahmed/livebook | 24668c6edb6ee638a3f5291b27bd42a3dfc0c18d | [
"Apache-2.0"
] | 411 | 2021-07-15T07:41:54.000Z | 2022-03-31T21:34:22.000Z | lib/livebook/session.ex | alaadahmed/livebook | 24668c6edb6ee638a3f5291b27bd42a3dfc0c18d | [
"Apache-2.0"
] | 130 | 2021-04-13T15:43:55.000Z | 2021-07-12T16:57:46.000Z | defmodule Livebook.Session do
@moduledoc false
# Server corresponding to a single notebook session.
#
# The process keeps the current notebook state and serves
# as a source of truth that multiple clients talk to.
# Receives update requests from the clients and notifies
# them of any changes applied to the notebook.
#
# ## Collaborative state
#
# The core concept is the `Livebook.Session.Data` structure
# to which we can apply reproducible operations.
# See `Livebook.Session.Data` for more information.
#
# ## Evaluation
#
# All regular sections are evaluated in the same process
# (the :main_flow evaluation container). On the other hand,
# each branching section is evaluated in its own process
# and thus runs concurrently.
#
# ### Implementation considerations
#
# In practice, every evaluation container is a `Livebook.Evaluator`
# process, so we have one such process for the main flow and one
# for each branching section. Since a branching section inherits
# the evaluation context from the parent section, the last context
# needs to be copied from the main flow evaluator to the branching
# section evaluator. The latter synchronously asks the former for
# that context using `Livebook.Evaluator.fetch_evaluation_context/3`.
# Consequently, in order to evaluate the first cell in a branching
# section, the main flow needs to be free of work, otherwise we wait.
# This assumptions are mirrored in by `Livebook.Session.Data` when
# determining cells for evaluation.
#
# Note: the context could be copied asynchronously if evaluator
# kept the contexts in its process dictionary, however the other
# evaluator could only read the whole process dictionary, thus
# allocating a lot of memory unnecessarily, which would be unacceptable
# for large data. By making a synchronous request to the evalutor
# for a single specific evaluation context we make sure to copy
# as little memory as necessary.
# The struct holds the basic session information that we track
# and pass around. The notebook and evaluation state is kept
# within the process state.
defstruct [:id, :pid, :origin, :notebook_name, :file, :images_dir, :created_at]
use GenServer, restart: :temporary
alias Livebook.Session.{Data, FileGuard}
alias Livebook.{Utils, Notebook, Delta, Runtime, LiveMarkdown, FileSystem}
alias Livebook.Users.User
alias Livebook.Notebook.{Cell, Section}
@type t :: %__MODULE__{
id: id(),
pid: pid(),
origin: Livebook.ContentLoader.location() | nil,
notebook_name: String.t(),
file: FileSystem.File.t() | nil,
images_dir: FileSystem.File.t(),
created_at: DateTime.t()
}
@type state :: %{
session_id: id(),
data: Data.t(),
created_at: DateTime.t(),
runtime_monitor_ref: reference() | nil,
autosave_timer_ref: reference() | nil,
save_task_pid: pid() | nil,
saved_default_file: FileSystem.File.t() | nil
}
@typedoc """
An id assigned to every running session process.
"""
@type id :: Utils.id()
## API
@doc """
Starts a session server process.
## Options
* `:id` (**required**) - a unique session identifier
* `:notebook` - the initial `Notebook` structure (e.g. imported from a file)
* `:origin` - location from where the notebook was obtained, can be either
`{:file, file}`, a remote `{:url, url}`, or `nil`
* `:file` - the file to which the notebook should be saved
* `:copy_images_from` - a directory file to copy notebook images from
* `:images` - a map from image name to its binary content, an alternative
to `:copy_images_from` when the images are in memory
* `:autosave_path` - a local directory to save notebooks without a file into.
Defaults to `Livebook.Config.autosave_path/1`
"""
@spec start_link(keyword()) :: {:ok, pid} | {:error, any()}
def start_link(opts) do
GenServer.start_link(__MODULE__, opts)
end
@doc """
Fetches session information from the session server.
"""
@spec get_by_pid(pid()) :: Session.t()
def get_by_pid(pid) do
GenServer.call(pid, :describe_self)
end
@doc """
Registers a session client, so that the session is aware of it.
The client process is automatically unregistered when it terminates.
Returns the current session data, which the client can than
keep in sync with the server by subscribing to the `sessions:id` topic
and receiving operations to apply.
"""
@spec register_client(pid(), pid(), User.t()) :: Data.t()
def register_client(pid, client_pid, user) do
GenServer.call(pid, {:register_client, client_pid, user})
end
@doc """
Returns data of the given session.
"""
@spec get_data(pid()) :: Data.t()
def get_data(pid) do
GenServer.call(pid, :get_data)
end
@doc """
Returns the current notebook structure.
"""
@spec get_notebook(pid()) :: Notebook.t()
def get_notebook(pid) do
GenServer.call(pid, :get_notebook)
end
@doc """
Fetches assets matching the given hash.
The assets are cached locally and fetched from the runtime
only once.
See `local_asset_path/2` for locating a specific asset.
"""
@spec fetch_assets(pid(), String.t()) :: :ok | {:error, String.t()}
def fetch_assets(pid, hash) do
local_assets_path = local_assets_path(hash)
if File.exists?(local_assets_path) do
:ok
else
with {:ok, runtime, archive_path} <-
GenServer.call(pid, {:get_runtime_and_archive_path, hash}) do
fun = fn ->
# Make sure the file hasn't been fetched by this point
unless File.exists?(local_assets_path) do
{:ok, archive_binary} = Runtime.read_file(runtime, archive_path)
extract_archive!(archive_binary, local_assets_path)
end
end
# Fetch assets in a separate process and avoid several
# simultaneous fateches of the same assets
case Livebook.UniqueTask.run(hash, fun) do
:ok -> :ok
:error -> {:error, "failed to fetch assets"}
end
end
end
end
@doc """
Sends notebook attributes update to the server.
"""
@spec set_notebook_attributes(pid(), map()) :: :ok
def set_notebook_attributes(pid, attrs) do
GenServer.cast(pid, {:set_notebook_attributes, self(), attrs})
end
@doc """
Sends section insertion request to the server.
"""
@spec insert_section(pid(), non_neg_integer()) :: :ok
def insert_section(pid, index) do
GenServer.cast(pid, {:insert_section, self(), index})
end
@doc """
Sends section insertion request to the server.
"""
@spec insert_section_into(pid(), Section.id(), non_neg_integer()) :: :ok
def insert_section_into(pid, section_id, index) do
GenServer.cast(pid, {:insert_section_into, self(), section_id, index})
end
@doc """
Sends parent update request to the server.
"""
@spec set_section_parent(pid(), Section.id(), Section.id()) :: :ok
def set_section_parent(pid, section_id, parent_id) do
GenServer.cast(pid, {:set_section_parent, self(), section_id, parent_id})
end
@doc """
Sends parent update request to the server.
"""
@spec unset_section_parent(pid(), Section.id()) :: :ok
def unset_section_parent(pid, section_id) do
GenServer.cast(pid, {:unset_section_parent, self(), section_id})
end
@doc """
Sends cell insertion request to the server.
"""
@spec insert_cell(pid(), Section.id(), non_neg_integer(), Cell.type()) :: :ok
def insert_cell(pid, section_id, index, type) do
GenServer.cast(pid, {:insert_cell, self(), section_id, index, type})
end
@doc """
Sends section deletion request to the server.
"""
@spec delete_section(pid(), Section.id(), boolean()) :: :ok
def delete_section(pid, section_id, delete_cells) do
GenServer.cast(pid, {:delete_section, self(), section_id, delete_cells})
end
@doc """
Sends cell deletion request to the server.
"""
@spec delete_cell(pid(), Cell.id()) :: :ok
def delete_cell(pid, cell_id) do
GenServer.cast(pid, {:delete_cell, self(), cell_id})
end
@doc """
Sends cell restoration request to the server.
"""
@spec restore_cell(pid(), Cell.id()) :: :ok
def restore_cell(pid, cell_id) do
GenServer.cast(pid, {:restore_cell, self(), cell_id})
end
@doc """
Sends cell move request to the server.
"""
@spec move_cell(pid(), Cell.id(), integer()) :: :ok
def move_cell(pid, cell_id, offset) do
GenServer.cast(pid, {:move_cell, self(), cell_id, offset})
end
@doc """
Sends section move request to the server.
"""
@spec move_section(pid(), Section.id(), integer()) :: :ok
def move_section(pid, section_id, offset) do
GenServer.cast(pid, {:move_section, self(), section_id, offset})
end
@doc """
Sends cell evaluation request to the server.
"""
@spec queue_cell_evaluation(pid(), Cell.id()) :: :ok
def queue_cell_evaluation(pid, cell_id) do
GenServer.cast(pid, {:queue_cell_evaluation, self(), cell_id})
end
@doc """
Sends section evaluation request to the server.
"""
@spec queue_section_evaluation(pid(), Section.id()) :: :ok
def queue_section_evaluation(pid, section_id) do
GenServer.cast(pid, {:queue_section_evaluation, self(), section_id})
end
@doc """
Sends input bound cells evaluation request to the server.
"""
@spec queue_bound_cells_evaluation(pid(), Data.input_id()) :: :ok
def queue_bound_cells_evaluation(pid, input_id) do
GenServer.cast(pid, {:queue_bound_cells_evaluation, self(), input_id})
end
@doc """
Sends full evaluation request to the server.
All outdated (new/stale/changed) cells, as well as cells given
as `forced_cell_ids` are scheduled for evaluation.
"""
@spec queue_full_evaluation(pid(), list(Cell.id())) :: :ok
def queue_full_evaluation(pid, forced_cell_ids) do
GenServer.cast(pid, {:queue_full_evaluation, self(), forced_cell_ids})
end
@doc """
Sends cell evaluation cancellation request to the server.
"""
@spec cancel_cell_evaluation(pid(), Cell.id()) :: :ok
def cancel_cell_evaluation(pid, cell_id) do
GenServer.cast(pid, {:cancel_cell_evaluation, self(), cell_id})
end
@doc """
Sends erase outputs request to the server.
"""
@spec erase_outputs(pid()) :: :ok
def erase_outputs(pid) do
GenServer.cast(pid, {:erase_outputs, self()})
end
@doc """
Sends notebook name update request to the server.
"""
@spec set_notebook_name(pid(), String.t()) :: :ok
def set_notebook_name(pid, name) do
GenServer.cast(pid, {:set_notebook_name, self(), name})
end
@doc """
Sends section name update request to the server.
"""
@spec set_section_name(pid(), Section.id(), String.t()) :: :ok
def set_section_name(pid, section_id, name) do
GenServer.cast(pid, {:set_section_name, self(), section_id, name})
end
@doc """
Sends a cell delta to apply to the server.
"""
@spec apply_cell_delta(pid(), Cell.id(), Delta.t(), Data.cell_revision()) :: :ok
def apply_cell_delta(pid, cell_id, delta, revision) do
GenServer.cast(pid, {:apply_cell_delta, self(), cell_id, delta, revision})
end
@doc """
Informs at what revision the given client is.
This helps to remove old deltas that are no longer necessary.
"""
@spec report_cell_revision(pid(), Cell.id(), Data.cell_revision()) :: :ok
def report_cell_revision(pid, cell_id, revision) do
GenServer.cast(pid, {:report_cell_revision, self(), cell_id, revision})
end
@doc """
Sends a cell attributes update to the server.
"""
@spec set_cell_attributes(pid(), Cell.id(), map()) :: :ok
def set_cell_attributes(pid, cell_id, attrs) do
GenServer.cast(pid, {:set_cell_attributes, self(), cell_id, attrs})
end
@doc """
Sends a input value update to the server.
"""
@spec set_input_value(pid(), Data.input_id(), term()) :: :ok
def set_input_value(pid, input_id, value) do
GenServer.cast(pid, {:set_input_value, self(), input_id, value})
end
@doc """
Connects to the given runtime.
Note that this results in initializing the corresponding remote node
with modules and processes required for evaluation.
"""
@spec connect_runtime(pid(), Runtime.t()) :: :ok
def connect_runtime(pid, runtime) do
GenServer.cast(pid, {:connect_runtime, self(), runtime})
end
@doc """
Disconnects from the current runtime.
Note that this results in clearing the evaluation state.
"""
@spec disconnect_runtime(pid()) :: :ok
def disconnect_runtime(pid) do
GenServer.cast(pid, {:disconnect_runtime, self()})
end
@doc """
Sends file location update request to the server.
"""
@spec set_file(pid(), FileSystem.File.t() | nil) :: :ok
def set_file(pid, file) do
GenServer.cast(pid, {:set_file, self(), file})
end
@doc """
Sends save request to the server.
If there's a file set and the notebook changed since the last save,
it will be persisted to said file.
Note that notebooks are automatically persisted every @autosave_interval milliseconds.
"""
@spec save(pid()) :: :ok
def save(pid) do
GenServer.cast(pid, :save)
end
@doc """
Synchronous version of `save/1`.
"""
@spec save_sync(pid()) :: :ok
def save_sync(pid) do
GenServer.call(pid, :save_sync)
end
@doc """
Sends a close request to the server.
This results in saving the file and broadcasting
a :closed message to the session topic.
"""
@spec close(pid()) :: :ok
def close(pid) do
GenServer.cast(pid, :close)
end
## Callbacks
@impl true
def init(opts) do
with {:ok, state} <- init_state(opts),
:ok <-
if(copy_images_from = opts[:copy_images_from],
do: copy_images(state, copy_images_from),
else: :ok
),
:ok <-
if(images = opts[:images],
do: dump_images(state, images),
else: :ok
) do
state = schedule_autosave(state)
{:ok, state}
else
{:error, error} ->
{:stop, error}
end
end
defp init_state(opts) do
id = Keyword.fetch!(opts, :id)
with {:ok, data} <- init_data(opts) do
state = %{
session_id: id,
data: data,
created_at: DateTime.utc_now(),
runtime_monitor_ref: nil,
autosave_timer_ref: nil,
autosave_path: opts[:autosave_path],
save_task_pid: nil,
saved_default_file: nil
}
{:ok, state}
end
end
defp init_data(opts) do
notebook = Keyword.get_lazy(opts, :notebook, &default_notebook/0)
file = opts[:file]
origin = opts[:origin]
data = Data.new(notebook)
data = %{data | origin: origin}
if file do
case FileGuard.lock(file, self()) do
:ok ->
{:ok, %{data | file: file}}
{:error, :already_in_use} ->
{:error, "the given file is already in use"}
end
else
{:ok, data}
end
end
defp default_notebook() do
%{Notebook.new() | sections: [%{Section.new() | cells: [Cell.new(:elixir)]}]}
end
defp schedule_autosave(state) do
if interval_s = state.data.notebook.autosave_interval_s do
ref = Process.send_after(self(), :autosave, interval_s * 1000)
%{state | autosave_timer_ref: ref}
else
%{state | autosave_timer_ref: nil}
end
end
@impl true
def handle_call(:describe_self, _from, state) do
{:reply, self_from_state(state), state}
end
def handle_call({:register_client, client_pid, user}, _from, state) do
Process.monitor(client_pid)
state = handle_operation(state, {:client_join, client_pid, user})
{:reply, state.data, state}
end
def handle_call(:get_data, _from, state) do
{:reply, state.data, state}
end
def handle_call({:get_runtime_and_archive_path, hash}, _from, state) do
assets_info = Notebook.find_asset_info(state.data.notebook, hash)
runtime = state.data.runtime
reply =
cond do
assets_info == nil ->
{:error, "unknown hash"}
runtime == nil ->
{:error, "no runtime"}
true ->
{:ok, runtime, assets_info.archive_path}
end
{:reply, reply, state}
end
def handle_call(:get_notebook, _from, state) do
{:reply, state.data.notebook, state}
end
def handle_call(:save_sync, _from, state) do
{:reply, :ok, maybe_save_notebook_sync(state)}
end
@impl true
def handle_cast({:set_notebook_attributes, client_pid, attrs}, state) do
operation = {:set_notebook_attributes, client_pid, attrs}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:insert_section, client_pid, index}, state) do
# Include new id in the operation, so it's reproducible
operation = {:insert_section, client_pid, index, Utils.random_id()}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:insert_section_into, client_pid, section_id, index}, state) do
# Include new id in the operation, so it's reproducible
operation = {:insert_section_into, client_pid, section_id, index, Utils.random_id()}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:set_section_parent, client_pid, section_id, parent_id}, state) do
# Include new id in the operation, so it's reproducible
operation = {:set_section_parent, client_pid, section_id, parent_id}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:unset_section_parent, client_pid, section_id}, state) do
# Include new id in the operation, so it's reproducible
operation = {:unset_section_parent, client_pid, section_id}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:insert_cell, client_pid, section_id, index, type}, state) do
# Include new id in the operation, so it's reproducible
operation = {:insert_cell, client_pid, section_id, index, type, Utils.random_id()}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:delete_section, client_pid, section_id, delete_cells}, state) do
operation = {:delete_section, client_pid, section_id, delete_cells}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:delete_cell, client_pid, cell_id}, state) do
operation = {:delete_cell, client_pid, cell_id}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:restore_cell, client_pid, cell_id}, state) do
operation = {:restore_cell, client_pid, cell_id}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:move_cell, client_pid, cell_id, offset}, state) do
operation = {:move_cell, client_pid, cell_id, offset}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:move_section, client_pid, section_id, offset}, state) do
operation = {:move_section, client_pid, section_id, offset}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:queue_cell_evaluation, client_pid, cell_id}, state) do
operation = {:queue_cells_evaluation, client_pid, [cell_id]}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:queue_section_evaluation, client_pid, section_id}, state) do
case Notebook.fetch_section(state.data.notebook, section_id) do
{:ok, section} ->
cell_ids = for cell <- section.cells, is_struct(cell, Cell.Elixir), do: cell.id
operation = {:queue_cells_evaluation, client_pid, cell_ids}
{:noreply, handle_operation(state, operation)}
:error ->
{:noreply, state}
end
end
def handle_cast({:queue_bound_cells_evaluation, client_pid, input_id}, state) do
cell_ids =
for {bound_cell, _} <- Data.bound_cells_with_section(state.data, input_id),
do: bound_cell.id
operation = {:queue_cells_evaluation, client_pid, cell_ids}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:queue_full_evaluation, client_pid, forced_cell_ids}, state) do
cell_ids = Data.cell_ids_for_full_evaluation(state.data, forced_cell_ids)
operation = {:queue_cells_evaluation, client_pid, cell_ids}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:cancel_cell_evaluation, client_pid, cell_id}, state) do
operation = {:cancel_cell_evaluation, client_pid, cell_id}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:erase_outputs, client_pid}, state) do
operation = {:erase_outputs, client_pid}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:set_notebook_name, client_pid, name}, state) do
operation = {:set_notebook_name, client_pid, name}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:set_section_name, client_pid, section_id, name}, state) do
operation = {:set_section_name, client_pid, section_id, name}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:apply_cell_delta, client_pid, cell_id, delta, revision}, state) do
operation = {:apply_cell_delta, client_pid, cell_id, delta, revision}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:report_cell_revision, client_pid, cell_id, revision}, state) do
operation = {:report_cell_revision, client_pid, cell_id, revision}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:set_cell_attributes, client_pid, cell_id, attrs}, state) do
operation = {:set_cell_attributes, client_pid, cell_id, attrs}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:set_input_value, client_pid, input_id, value}, state) do
operation = {:set_input_value, client_pid, input_id, value}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:connect_runtime, client_pid, runtime}, state) do
if state.data.runtime do
Runtime.disconnect(state.data.runtime)
end
runtime_monitor_ref = Runtime.connect(runtime)
{:noreply,
%{state | runtime_monitor_ref: runtime_monitor_ref}
|> handle_operation({:set_runtime, client_pid, runtime})}
end
def handle_cast({:disconnect_runtime, client_pid}, state) do
Runtime.disconnect(state.data.runtime)
{:noreply,
%{state | runtime_monitor_ref: nil}
|> handle_operation({:set_runtime, client_pid, nil})}
end
def handle_cast({:set_file, client_pid, file}, state) do
if file do
FileGuard.lock(file, self())
else
:ok
end
|> case do
:ok ->
if state.data.file do
FileGuard.unlock(state.data.file)
end
{:noreply, handle_operation(state, {:set_file, client_pid, file})}
{:error, :already_in_use} ->
broadcast_error(state.session_id, "failed to set new file because it is already in use")
{:noreply, state}
end
end
def handle_cast(:save, state) do
{:noreply, maybe_save_notebook_async(state)}
end
def handle_cast(:close, state) do
maybe_save_notebook_sync(state)
broadcast_message(state.session_id, :session_closed)
{:stop, :normal, state}
end
@impl true
def handle_info({:DOWN, ref, :process, _, _}, %{runtime_monitor_ref: ref} = state) do
broadcast_info(state.session_id, "runtime node terminated unexpectedly")
{:noreply,
%{state | runtime_monitor_ref: nil}
|> handle_operation({:set_runtime, self(), nil})}
end
def handle_info({:DOWN, _, :process, pid, _}, state) do
state =
if Map.has_key?(state.data.clients_map, pid) do
handle_operation(state, {:client_leave, pid})
else
state
end
{:noreply, state}
end
def handle_info({:evaluation_output, cell_id, output}, state) do
operation = {:add_cell_evaluation_output, self(), cell_id, output}
{:noreply, handle_operation(state, operation)}
end
def handle_info({:evaluation_response, cell_id, response, metadata}, state) do
operation = {:add_cell_evaluation_response, self(), cell_id, response, metadata}
{:noreply, handle_operation(state, operation)}
end
def handle_info({:evaluation_input, cell_id, reply_to, input_id}, state) do
{reply, state} =
with {:ok, cell, _section} <- Notebook.fetch_cell_and_section(state.data.notebook, cell_id),
{:ok, value} <- Map.fetch(state.data.input_values, input_id) do
state = handle_operation(state, {:bind_input, self(), cell.id, input_id})
{{:ok, value}, state}
else
_ -> {:error, state}
end
send(reply_to, {:evaluation_input_reply, reply})
{:noreply, state}
end
def handle_info({:runtime_broadcast, topic, subtopic, message}, state) do
full_topic = runtime_messages_topic(state.session_id, topic, subtopic)
Phoenix.PubSub.broadcast(Livebook.PubSub, full_topic, message)
{:noreply, state}
end
def handle_info({:container_down, container_ref, message}, state) do
broadcast_error(state.session_id, "evaluation process terminated - #{message}")
operation =
case container_ref do
:main_flow -> {:reflect_main_evaluation_failure, self()}
section_id -> {:reflect_evaluation_failure, self(), section_id}
end
{:noreply, handle_operation(state, operation)}
end
def handle_info(:autosave, state) do
{:noreply, state |> maybe_save_notebook_async() |> schedule_autosave()}
end
def handle_info({:user_change, user}, state) do
operation = {:update_user, self(), user}
{:noreply, handle_operation(state, operation)}
end
def handle_info({:save_finished, pid, result, file, default?}, %{save_task_pid: pid} = state) do
state = %{state | save_task_pid: nil}
{:noreply, handle_save_finished(state, result, file, default?)}
end
def handle_info(_message, state), do: {:noreply, state}
@impl true
def terminate(_reason, state) do
cleanup_tmp_dir(state.session_id)
:ok
end
# ---
defp self_from_state(state) do
%__MODULE__{
id: state.session_id,
pid: self(),
origin: state.data.origin,
notebook_name: state.data.notebook.name,
file: state.data.file,
images_dir: images_dir_from_state(state),
created_at: state.created_at
}
end
defp images_dir_from_state(%{data: %{file: nil}, session_id: id}) do
tmp_dir = session_tmp_dir(id)
FileSystem.File.resolve(tmp_dir, "images/")
end
defp images_dir_from_state(%{data: %{file: file}}) do
images_dir_for_notebook(file)
end
@doc """
Returns images directory corresponding to the given notebook file.
"""
@spec images_dir_for_notebook(FileSystem.File.t()) :: FileSystem.File.t()
def images_dir_for_notebook(file) do
file
|> FileSystem.File.containing_dir()
|> FileSystem.File.resolve("images/")
end
defp session_tmp_dir(session_id) do
livebook_tmp_path()
|> Path.join("sessions/#{session_id}")
|> FileSystem.Utils.ensure_dir_path()
|> FileSystem.File.local()
end
defp cleanup_tmp_dir(session_id) do
tmp_dir = session_tmp_dir(session_id)
FileSystem.File.remove(tmp_dir)
end
defp local_assets_path(hash) do
Path.join([livebook_tmp_path(), "assets", encode_path_component(hash)])
end
@doc """
Returns a local path to asset matching the given
hash and path.
The file is not guaranteed to exist. See `fetch_assets/2`
for fetching assets through a particular session.
The path is expected to be a simple relative path
within the assets directory, otherwise an error is
returned.
"""
@spec local_asset_path(String.t(), String.t()) :: {:ok, String.t()} | :error
def local_asset_path(hash, asset_path) do
assets_path = local_assets_path(hash)
local_asset_path = Path.expand(asset_path, assets_path)
if String.starts_with?(local_asset_path, assets_path <> "/") do
{:ok, local_asset_path}
else
:error
end
end
defp encode_path_component(component) do
String.replace(component, [".", "/", "\\", ":"], "_")
end
defp livebook_tmp_path() do
tmp_dir = System.tmp_dir!() |> Path.expand()
Path.join(tmp_dir, "livebook")
end
defp copy_images(state, source) do
images_dir = images_dir_from_state(state)
with {:ok, source_exists?} <- FileSystem.File.exists?(source) do
if source_exists? do
FileSystem.File.copy(source, images_dir)
else
:ok
end
end
end
defp move_images(state, source) do
images_dir = images_dir_from_state(state)
with {:ok, source_exists?} <- FileSystem.File.exists?(source) do
if source_exists? do
with {:ok, destination_exists?} <- FileSystem.File.exists?(images_dir) do
if not destination_exists? do
# If the directory doesn't exist, we can just change
# the directory name, which is more efficient if
# available in the given file system
FileSystem.File.rename(source, images_dir)
else
# If the directory exists, we use copy to place
# the images there
with :ok <- FileSystem.File.copy(source, images_dir) do
FileSystem.File.remove(source)
end
end
end
else
:ok
end
end
end
defp dump_images(state, images) do
images_dir = images_dir_from_state(state)
Enum.reduce(images, :ok, fn {filename, content}, result ->
with :ok <- result do
file = FileSystem.File.resolve(images_dir, filename)
FileSystem.File.write(file, content)
end
end)
end
# Given any operation on `Livebook.Session.Data`, the process
# does the following:
#
# * broadcasts the operation to all clients immediately,
# so that they can update their local `Livebook.Session.Data`
#
# * applies the operation to own local `Livebook.Session.Data`
#
# * if necessary, performs the relevant actions (e.g. starts cell evaluation),
# to reflect the new `Livebook.Session.Data`
#
defp handle_operation(state, operation) do
broadcast_operation(state.session_id, operation)
case Data.apply_operation(state.data, operation) do
{:ok, new_data, actions} ->
%{state | data: new_data}
|> after_operation(state, operation)
|> handle_actions(actions)
:error ->
state
end
end
defp after_operation(state, _prev_state, {:set_notebook_name, _pid, _name}) do
notify_update(state)
state
end
defp after_operation(state, prev_state, {:set_file, _pid, _file}) do
prev_images_dir = images_dir_from_state(prev_state)
if prev_state.data.file do
copy_images(state, prev_images_dir)
else
move_images(state, prev_images_dir)
end
|> case do
:ok ->
:ok
{:error, message} ->
broadcast_error(state.session_id, "failed to copy images - #{message}")
end
notify_update(state)
state
end
defp after_operation(
state,
_prev_state,
{:set_notebook_attributes, _client_pid, %{autosave_interval_s: _}}
) do
if ref = state.autosave_timer_ref do
Process.cancel_timer(ref)
end
schedule_autosave(state)
end
defp after_operation(state, prev_state, {:client_join, _client_pid, user}) do
unless Map.has_key?(prev_state.data.users_map, user.id) do
Phoenix.PubSub.subscribe(Livebook.PubSub, "users:#{user.id}")
end
state
end
defp after_operation(state, prev_state, {:client_leave, client_pid}) do
user_id = prev_state.data.clients_map[client_pid]
unless Map.has_key?(state.data.users_map, user_id) do
Phoenix.PubSub.unsubscribe(Livebook.PubSub, "users:#{user_id}")
end
state
end
defp after_operation(state, _prev_state, {:delete_cell, _client_pid, cell_id}) do
entry = Enum.find(state.data.bin_entries, fn entry -> entry.cell.id == cell_id end)
# The session LV drops cell's source, so we send them
# the complete bin entry to override
broadcast_message(state.session_id, {:hydrate_bin_entries, [entry]})
state
end
defp after_operation(state, prev_state, {:delete_section, _client_pid, section_id, true}) do
{:ok, section} = Notebook.fetch_section(prev_state.data.notebook, section_id)
cell_ids = Enum.map(section.cells, & &1.id)
entries = Enum.filter(state.data.bin_entries, fn entry -> entry.cell.id in cell_ids end)
broadcast_message(state.session_id, {:hydrate_bin_entries, entries})
state
end
defp after_operation(state, _prev_state, _operation), do: state
defp handle_actions(state, actions) do
Enum.reduce(actions, state, &handle_action(&2, &1))
end
defp handle_action(state, :start_runtime) do
{runtime_module, args} = Livebook.Config.default_runtime()
case apply(runtime_module, :init, args) do
{:ok, runtime} ->
runtime_monitor_ref = Runtime.connect(runtime)
%{state | runtime_monitor_ref: runtime_monitor_ref}
|> handle_operation({:set_runtime, self(), runtime})
{:error, error} ->
broadcast_error(state.session_id, "failed to setup runtime - #{error}")
handle_operation(state, {:set_runtime, self(), nil})
end
end
defp handle_action(state, {:start_evaluation, cell, section}) do
path =
case state.data.file do
nil -> ""
file -> file.path
end
file = path <> "#cell"
opts = [file: file]
locator = {container_ref_for_section(section), cell.id}
prev_locator = find_prev_locator(state.data.notebook, cell, section)
Runtime.evaluate_code(state.data.runtime, cell.source, locator, prev_locator, opts)
evaluation_digest = :erlang.md5(cell.source)
handle_operation(state, {:evaluation_started, self(), cell.id, evaluation_digest})
end
defp handle_action(state, {:stop_evaluation, section}) do
if state.data.runtime do
Runtime.drop_container(state.data.runtime, container_ref_for_section(section))
end
state
end
defp handle_action(state, {:forget_evaluation, cell, section}) do
if state.data.runtime do
Runtime.forget_evaluation(state.data.runtime, {container_ref_for_section(section), cell.id})
end
state
end
defp handle_action(state, _action), do: state
defp broadcast_operation(session_id, operation) do
broadcast_message(session_id, {:operation, operation})
end
defp broadcast_error(session_id, error) do
broadcast_message(session_id, {:error, error})
end
defp broadcast_info(session_id, info) do
broadcast_message(session_id, {:info, info})
end
defp broadcast_message(session_id, message) do
Phoenix.PubSub.broadcast(Livebook.PubSub, "sessions:#{session_id}", message)
end
defp notify_update(state) do
session = self_from_state(state)
Livebook.Sessions.update_session(session)
broadcast_message(state.session_id, {:session_updated, session})
end
defp maybe_save_notebook_async(state) do
{file, default?} = notebook_autosave_file(state)
if file && should_save_notebook?(state) do
pid = self()
notebook = state.data.notebook
{:ok, pid} =
Task.start(fn ->
content = LiveMarkdown.Export.notebook_to_markdown(notebook)
result = FileSystem.File.write(file, content)
send(pid, {:save_finished, self(), result, file, default?})
end)
%{state | save_task_pid: pid}
else
state
end
end
defp maybe_save_notebook_sync(state) do
{file, default?} = notebook_autosave_file(state)
if file && should_save_notebook?(state) do
content = LiveMarkdown.Export.notebook_to_markdown(state.data.notebook)
result = FileSystem.File.write(file, content)
handle_save_finished(state, result, file, default?)
else
state
end
end
defp should_save_notebook?(state) do
state.data.dirty and state.save_task_pid == nil
end
defp notebook_autosave_file(state) do
file = state.data.file || default_notebook_file(state)
default? = state.data.file == nil
{file, default?}
end
defp default_notebook_file(state) do
if path = state.autosave_path || Livebook.Config.autosave_path() do
dir = path |> FileSystem.Utils.ensure_dir_path() |> FileSystem.File.local()
notebook_rel_path = default_notebook_path(state)
FileSystem.File.resolve(dir, notebook_rel_path)
end
end
defp default_notebook_path(state) do
title_str =
state.data.notebook.name
|> String.downcase()
|> String.replace(~r/\s+/, "_")
|> String.replace(~r/[^\w]/, "")
# We want a random, but deterministic part, so we
# use a few trailing characters from the session id,
# which are random already
random_str = String.slice(state.session_id, -4..-1)
[date_str, time_str, _] =
state.created_at
|> DateTime.to_iso8601()
|> String.replace(["-", ":"], "_")
|> String.split(["T", "."])
"#{date_str}/#{time_str}_#{title_str}_#{random_str}.livemd"
end
defp handle_save_finished(state, result, file, default?) do
state =
if default? do
if state.saved_default_file && state.saved_default_file != file do
FileSystem.File.remove(state.saved_default_file)
end
%{state | saved_default_file: file}
else
state
end
case result do
:ok ->
handle_operation(state, {:mark_as_not_dirty, self()})
{:error, message} ->
broadcast_error(state.session_id, "failed to save notebook - #{message}")
state
end
end
defp extract_archive!(binary, path) do
:ok = :erl_tar.extract({:binary, binary}, [:compressed, {:cwd, path}])
end
@doc """
Subscribes the caller to runtime messages under the given topic.
"""
@spec subscribe_to_runtime_events(id(), String.t(), String.t()) :: :ok | {:error, term()}
def subscribe_to_runtime_events(session_id, topic, subtopic) do
Phoenix.PubSub.subscribe(Livebook.PubSub, runtime_messages_topic(session_id, topic, subtopic))
end
@doc """
Unsubscribes the caller from runtime messages subscribed earlier
with `subscribe_to_runtime_events/3`.
"""
@spec unsubscribe_from_runtime_events(id(), String.t(), String.t()) :: :ok | {:error, term()}
def unsubscribe_from_runtime_events(session_id, topic, subtopic) do
Phoenix.PubSub.unsubscribe(
Livebook.PubSub,
runtime_messages_topic(session_id, topic, subtopic)
)
end
defp runtime_messages_topic(session_id, topic, subtopic) do
"sessions:#{session_id}:runtime_messages:#{topic}:#{subtopic}"
end
@doc """
Determines locator of the evaluation that the given
cell depends on.
"""
@spec find_prev_locator(Notebook.t(), Cell.t(), Section.t()) :: Runtime.locator()
def find_prev_locator(notebook, cell, section) do
default = {container_ref_for_section(section), nil}
notebook
|> Notebook.parent_cells_with_section(cell.id)
|> Enum.find_value(default, fn {cell, section} ->
is_struct(cell, Cell.Elixir) && {container_ref_for_section(section), cell.id}
end)
end
defp container_ref_for_section(%{parent_id: nil}), do: :main_flow
defp container_ref_for_section(section), do: section.id
end
| 30.837264 | 98 | 0.678751 |
088573657dd31515766036900b587638a2156e96 | 974 | exs | Elixir | apps/hard_hat/mix.exs | gullintanni/gullintanni | 63c58b7ea438a4c6885a13842d8e33d3b1273ced | [
"0BSD"
] | 15 | 2016-08-09T21:27:54.000Z | 2020-12-03T11:21:10.000Z | apps/hard_hat/mix.exs | gullintanni/gullintanni | 63c58b7ea438a4c6885a13842d8e33d3b1273ced | [
"0BSD"
] | 15 | 2016-08-04T21:11:05.000Z | 2017-08-02T17:46:17.000Z | apps/hard_hat/mix.exs | gullintanni/gullintanni | 63c58b7ea438a4c6885a13842d8e33d3b1273ced | [
"0BSD"
] | null | null | null | defmodule HardHat.Mixfile do
use Mix.Project
def project() do
[
app: :hard_hat,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.3",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps(),
# Docs
name: "Hard Hat",
docs: [
main: "readme",
extras: ["README.md": [title: "README"]],
],
# Tests
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [coveralls: :test],
]
end
def application() do
[applications: [:httpoison, :logger]]
end
defp deps() do
[
{:httpoison, "~> 0.10"},
{:poison, "~> 3.0"},
{:credo, "~> 0.5", only: :dev},
{:dialyxir, "~> 0.4", only: :dev},
{:ex_doc, "~> 0.14", only: :dev},
{:excoveralls, "~> 0.5", only: :test},
]
end
end
| 20.723404 | 49 | 0.488706 |
0885bf40629c9e5c9a0c63a9ac25d810d9a88592 | 1,881 | exs | Elixir | mix.exs | membraneframework/membrane-element-lame | fb8db6170ed8c4cb95432e941d707a1e87175780 | [
"Apache-2.0"
] | 2 | 2018-07-27T14:08:58.000Z | 2019-03-01T10:19:37.000Z | mix.exs | membraneframework/membrane-element-lame | fb8db6170ed8c4cb95432e941d707a1e87175780 | [
"Apache-2.0"
] | 9 | 2018-10-15T07:58:53.000Z | 2020-10-22T13:51:43.000Z | mix.exs | membraneframework/membrane-element-lame | fb8db6170ed8c4cb95432e941d707a1e87175780 | [
"Apache-2.0"
] | 1 | 2019-02-06T10:28:42.000Z | 2019-02-06T10:28:42.000Z | defmodule Membrane.MP3.Lame.Mixfile do
use Mix.Project
@version "0.14.0"
@github_url "https://github.com/membraneframework/membrane_mp3_lame_plugin"
def project do
[
app: :membrane_mp3_lame_plugin,
version: @version,
elixir: "~> 1.9",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:unifex, :bundlex] ++ Mix.compilers(),
deps: deps(),
description: "Membrane MP3 encoder based on Lame",
package: package(),
name: "Membrane MP3 Lame Plugin",
source_url: @github_url,
homepage_url: "https://membraneframework.org",
docs: docs(),
preferred_cli_env: [espec: :test, format: :test]
]
end
def application do
[
extra_applications: []
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_env), do: ["lib"]
defp docs do
[
main: "readme",
extras: ["README.md", "LICENSE"],
source_ref: "v#{@version}",
nest_modules_by_prefix: [Membrane.MP3.Lame]
]
end
defp package do
[
maintainers: ["Membrane Team"],
licenses: ["Apache 2.0"],
files: ["c_src", "lib", "mix.exs", "README*", "LICENSE*", ".formatter.exs", "bundlex.exs"],
links: %{
"GitHub" => @github_url,
"Membrane Framework Homepage" => "https://membraneframework.org"
}
]
end
defp deps do
[
{:membrane_core, "~> 0.10.0"},
{:membrane_raw_audio_format, "~> 0.9.0"},
{:membrane_caps_audio_mpeg, "~> 0.2.0"},
{:membrane_common_c, "~> 0.13.0"},
{:bunch, "~> 1.0"},
{:unifex, "~> 1.0"},
{:ex_doc, "~> 0.28", only: :dev, runtime: false},
{:espec, "~> 1.7", only: [:dev, :test]},
{:membrane_file_plugin, "~> 0.12.0", only: :test},
{:credo, "~> 1.6.0", runtime: false},
{:dialyxir, ">= 0.0.0", runtime: false}
]
end
end
| 26.492958 | 97 | 0.565657 |
0885c9f284681ba6667e352d50f4814184e48c7b | 499 | exs | Elixir | test/rfx_cli/arg_test.exs | andyl/rfxi | 9007c75693d643555c45a20e9634dd4b3867deba | [
"MIT"
] | 1 | 2021-08-10T14:46:10.000Z | 2021-08-10T14:46:10.000Z | test/rfx_cli/arg_test.exs | andyl/rfxi | 9007c75693d643555c45a20e9634dd4b3867deba | [
"MIT"
] | 2 | 2021-06-22T14:12:37.000Z | 2021-06-28T05:06:23.000Z | test/rfx_cli/arg_test.exs | andyl/rfxi | 9007c75693d643555c45a20e9634dd4b3867deba | [
"MIT"
] | null | null | null | defmodule RfxCli.ArgTest do
use ExUnit.Case
alias RfxCli.Arg
alias RfxCli.State
describe "#gen_token" do
test "generates output" do
assert Arg.gen_token(:ok)
end
end
describe "#build" do
test "basic output" do
result = %{argv: "--help"} |> State.new() |> Arg.build()
assert result
assert result[:name]
assert result[:flags]
assert result[:version]
assert result[:description]
assert result[:subcommands]
end
end
end
| 19.192308 | 62 | 0.629259 |
0885e413084c15af0fe1fdfbfbc7c6ce9e468fe0 | 6,097 | exs | Elixir | lib/elixir/test/elixir/record_test.exs | Nicd/elixir | e62ef92a4be1b562033d35b2d822cc9d6c661077 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/record_test.exs | Nicd/elixir | e62ef92a4be1b562033d35b2d822cc9d6c661077 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/record_test.exs | Nicd/elixir | e62ef92a4be1b562033d35b2d822cc9d6c661077 | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defrecord RecordTest.FileInfo,
Record.extract(:file_info, from_lib: "kernel/include/file.hrl")
defrecord RecordTest.SomeRecord, a: 0, b: 1
defrecord RecordTest.WithNoField, []
## Record import
defmodule RecordTest.FileInfo.Helper do
Record.import RecordTest.FileInfo, as: :file_info
def new do
file_info
end
def size(file_info(size: size)), do: size
end
## Dynamic names and overridable
name = RecordTest.DynamicName
defrecord name, a: 0, b: 1 do
def get_a(RecordTest.DynamicName[a: a]) do
a
end
defoverridable [update_b: 2]
def update_b(_, _) do
:not_optimizable
end
end
defmodule RecordTest.DynamicOpts do
@a [foo: 1..30]
defrecord State, (lc {name, _interval} inlist @a, do: {name, nil})
end
## With types
defrecord RecordTest.WithTypeOverriden, a: 0, b: 1 do
@type t :: __MODULE__[a: integer, b: any]
end
defrecord RecordTest.WithRecordType, a: 0, b: 1 do
record_type a: non_pos_integer
record_type a: integer
end
defmodule RecordTest.Macros do
defrecordp :_user, name: "José", age: 25
defmacro gen do
quote do
alias RecordTest.Macros.Nested
def this_works, do: RecordTest.Macros.Nested[]
def this_should_too, do: Nested[]
end
end
defrecord Nested do
def nested_record_alias?(Nested[]) do
true
end
defrecord NestedInNested, it_compiles: true
end
# Ensure there is no conflict in a nested module
# named record.
defrecord Record, [a: 1, b: 2]
def new() do
_user()
end
def new(name, age) do
_user(name: name, age: age)
end
def name(_user(name: name)) do
name
end
def add_bar_to_name(_user(name: name) = user) do
_user(user, name: name <> " bar")
end
def age(user) do
_user(user, :age)
end
def to_keywords(user) do
_user(user)
end
def name_and_age(user) do
_user(user, [:name, :age])
end
def age_and_name(user) do
_user(user, [:age, :name])
end
end
defmodule RecordTest do
use ExUnit.Case, async: true
# Check the access from the generated macro works
# as expected. If it compiles, we are good to go.
require RecordTest.Macros
RecordTest.Macros.gen
test :record_access_with_nil_keyword do
record = RecordTest.DynamicName.new(a: nil)
record_access = RecordTest.DynamicName[a: nil]
assert record == record_access
end
test :record_constructor_with_dict do
record = RecordTest.FileInfo.new(type: :regular)
assert record.type == :regular
assert record.access == :undefined
end
test :record_accessors do
record = RecordTest.FileInfo.new(file_info)
assert record.type == :regular
assert record.access == :read_write
new_record = record.access :read
assert new_record.access == :read
end
test :dynamic_record_name do
record = RecordTest.DynamicName.new
assert record.a == 0
assert record.b == 1
end
test :dynamic_update do
record = RecordTest.DynamicName.new
assert record.update_a(10 + &1).a == 10
end
test :is_record do
assert is_record(RecordTest.FileInfo.new, RecordTest.FileInfo)
assert is_record(RecordTest.WithNoField.new)
refute is_record(empty_tuple)
refute is_record(a_list)
refute is_record(empty_tuple, RecordTest.FileInfo)
refute is_record(a_tuple, RecordTest.FileInfo)
refute is_record(a_list, RecordTest.FileInfo)
refute is_record(RecordTest.FileInfo.new, List)
end
test :__index__ do
record = RecordTest.DynamicName.new(a: "a", b: "b")
assert elem(record, record.__index__(:a)) == "a"
assert elem(record, record.__index__(:b)) == "b"
assert record.__index__(:c) == nil
record = RecordTest.FileInfo.new
assert RecordTest.FileInfo.__index__(:atime) == record.__index__(:atime)
end
test :to_keywords do
record = RecordTest.DynamicName.new(a: "a", b: "b")
assert record.to_keywords[:a] == "a"
assert record.to_keywords[:b] == "b"
end
test :underscore_record_syntax do
record = RecordTest.DynamicName[_: "a"]
assert RecordTest.DynamicName[a: "a", b: "a"] == record
assert RecordTest.DynamicName[_: _] = RecordTest.DynamicName[_: "x"]
assert { :badmatch, RecordTest.DynamicName[a: "y", b: "y"] } =
catch_error(RecordTest.DynamicName[_: "x"] = RecordTest.DynamicName[_: "y"])
end
test :access_protocol_on_being_defined_record do
assert RecordTest.DynamicName.new(a: "a").get_a == "a"
end
test :record_macros do
record = RecordTest.Macros.new
assert record.name == "José"
record = RecordTest.Macros.new("Foo", 25)
assert record.name == "Foo"
record = record.add_bar_to_name
assert record.name == "Foo bar"
assert record.age == 25
assert record.to_keywords == [name: record.name, age: record.age]
assert record.name_and_age == [record.name, record.age]
assert record.age_and_name == [record.age, record.name]
end
test :record_update do
record = RecordTest.SomeRecord.new
assert RecordTest.SomeRecord.a(record.update(a: 2, b: 3)) == 2
assert RecordTest.SomeRecord.b(record.update(a: 2, b: 3)) == 3
assert RecordTest.SomeRecord.a(record.update(a: 2)) == 2
assert RecordTest.SomeRecord.b(record.update(b: 2)) == 2
end
test :optimizable do
assert { :b, 1 } in RecordTest.SomeRecord.__record__(:optimizable)
assert { :b, 2 } in RecordTest.SomeRecord.__record__(:optimizable)
assert { :update_b, 2 } in RecordTest.SomeRecord.__record__(:optimizable)
refute { :update_b, 2 } in RecordTest.DynamicName.__record__(:optimizable)
end
test :result do
assert { :module, _, _, "result"} = (defrecord WithResult, foo: :bar do
"result"
end)
end
test :import do
assert RecordTest.FileInfo.Helper.new == RecordTest.FileInfo.new
assert RecordTest.FileInfo.Helper.size(RecordTest.FileInfo.new(size: 100)) == 100
end
defp file_info do
{ :ok, file_info } = :file.read_file_info(__FILE__)
file_info
end
defp empty_tuple, do: {}
defp a_tuple, do: { :foo, :bar, :baz }
defp a_list, do: [ :foo, :bar, :baz ]
end
| 25.51046 | 85 | 0.689683 |
0885ffe3e99b883b5ed500d6d1b340b84a9c1183 | 717 | exs | Elixir | day_03/exercise1.exs | dams/adventofcode | dac9638c6fe3c99b726de8899e9baedf2efbb15a | [
"Artistic-2.0"
] | null | null | null | day_03/exercise1.exs | dams/adventofcode | dac9638c6fe3c99b726de8899e9baedf2efbb15a | [
"Artistic-2.0"
] | null | null | null | day_03/exercise1.exs | dams/adventofcode | dac9638c6fe3c99b726de8899e9baedf2efbb15a | [
"Artistic-2.0"
] | null | null | null | defmodule Exercise1 do
def parse(input) do
parse(input, 0, 0, %{ "0x0" => 1 } )
end
def parse(">" <> rest, x, y, dict) do
parse(rest, x+1, y, Dict.put_new(dict, gen_key(x+1, y), 1))
end
def parse("<" <> rest, x, y, dict) do
parse(rest, x-1, y, Dict.put_new(dict, gen_key(x-1, y), 1))
end
def parse("^" <> rest, x, y, dict) do
parse(rest, x, y-1, Dict.put_new(dict, gen_key(x, y-1), 1))
end
def parse("v" <> rest, x, y, dict) do
parse(rest, x, y+1, Dict.put_new(dict, gen_key(x, y+1), 1))
end
def parse("", _x, _y, dict) do
dict |> Dict.keys |> Enum.count
end
def gen_key(x,y) do
to_string(x) <> "x" <> to_string(y)
end
end
input = File.read! "input"
IO.puts Exercise1.parse(input)
| 21.727273 | 61 | 0.592748 |
08860c566d755ab729bca657f3f1de33afead099 | 1,464 | ex | Elixir | architect/lib/architect_web/context.ex | VJftw/velocity | 8335c39c510dbde1446e6cde03eebb450339d212 | [
"Apache-2.0"
] | 3 | 2017-12-09T21:05:54.000Z | 2019-08-06T08:13:34.000Z | architect/lib/architect_web/context.ex | VJftw/velocity | 8335c39c510dbde1446e6cde03eebb450339d212 | [
"Apache-2.0"
] | 63 | 2017-09-09T15:44:24.000Z | 2022-03-03T22:16:24.000Z | architect/lib/architect_web/context.ex | VJftw/velocity | 8335c39c510dbde1446e6cde03eebb450339d212 | [
"Apache-2.0"
] | 5 | 2017-09-14T00:17:22.000Z | 2019-11-27T14:43:45.000Z | defmodule ArchitectWeb.Context do
@behaviour Plug
alias Architect.Accounts
alias Architect.Accounts.User
alias Architect.Repo
import Plug.Conn
def init(opts), do: opts
def call(conn, _) do
context = build_context(conn)
put_private(conn, :absinthe, %{context: context})
end
defp build_context(conn) do
%{}
|> add_remote_ip_to_context(conn)
|> add_user_to_context(conn)
end
defp add_remote_ip_to_context(%{} = context, conn) do
case conn.remote_ip do
remote_ip when is_tuple(remote_ip) -> Map.put(context, :remote_ip, get_string_ip(remote_ip))
_ -> context
end
end
defp add_user_to_context(%{} = context, conn) do
with ["Bearer " <> token] <- get_req_header(conn, "authorization"),
true <- present?(token),
{:ok, user} <- get_user(token) do
Map.put(context, :current_user, user)
else
_ -> context
end
end
@spec get_user(String.t()) :: {:ok, User}
defp get_user(token) do
with {:ok, claims} <- Accounts.decode_and_verify(token, %{"typ" => "access"}),
{:ok, user} <- Accounts.resource_from_claims(claims) do
{:ok, user}
end
end
defp get_string_ip(address) when is_tuple(address) do
address
|> :inet_parse.ntoa()
|> IO.iodata_to_binary()
end
@spec present?(String.t()) :: boolean()
defp present?(nil), do: false
defp present?(string) do
string |> String.trim() |> String.length() > 0
end
end
| 24.4 | 98 | 0.645492 |
088639f8246803aca9b0ba7e742b4711e1ebc540 | 1,891 | ex | Elixir | clients/books/lib/google_api/books/v1/model/series_series_series_subscription_release_info_next_release_info.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/books/lib/google_api/books/v1/model/series_series_series_subscription_release_info_next_release_info.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/books/lib/google_api/books/v1/model/series_series_series_subscription_release_info_next_release_info.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Books.V1.Model.SeriesSeriesSeriesSubscriptionReleaseInfoNextReleaseInfo do
@moduledoc """
## Attributes
* `amountInMicros` (*type:* `float()`, *default:* `nil`) -
* `currencyCode` (*type:* `String.t`, *default:* `nil`) -
* `releaseNumber` (*type:* `String.t`, *default:* `nil`) -
* `releaseTime` (*type:* `String.t`, *default:* `nil`) -
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:amountInMicros => float(),
:currencyCode => String.t(),
:releaseNumber => String.t(),
:releaseTime => String.t()
}
field(:amountInMicros)
field(:currencyCode)
field(:releaseNumber)
field(:releaseTime)
end
defimpl Poison.Decoder,
for: GoogleApi.Books.V1.Model.SeriesSeriesSeriesSubscriptionReleaseInfoNextReleaseInfo do
def decode(value, options) do
GoogleApi.Books.V1.Model.SeriesSeriesSeriesSubscriptionReleaseInfoNextReleaseInfo.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Books.V1.Model.SeriesSeriesSeriesSubscriptionReleaseInfoNextReleaseInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31 | 94 | 0.710735 |
08865eb81a884f93c4a8872074617053d8f92e0a | 13,329 | ex | Elixir | lib/ex_unit/lib/ex_unit/assertions.ex | jeregrine/elixir | 080201477955bcd2d755fb4162966dc6882b1521 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/lib/ex_unit/assertions.ex | jeregrine/elixir | 080201477955bcd2d755fb4162966dc6882b1521 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/lib/ex_unit/assertions.ex | jeregrine/elixir | 080201477955bcd2d755fb4162966dc6882b1521 | [
"Apache-2.0"
] | null | null | null | defexception ExUnit.AssertionError, message: "assertion failed"
defexception ExUnit.ExpectationError, expected: nil, actual: nil, assertion: "",
negation: false, prelude: "Expected", expr: nil do
def message(exception) do
if desc = exception.expr do
"#{exception.prelude} #{desc} #{exception.full_assertion} " <>
"#{exception.expected}. Instead got #{exception.actual}"
else
"#{exception.prelude} #{exception.expected} " <>
"#{exception.full_assertion} #{exception.actual}"
end
end
def full_assertion(exception) do
"to" <> if(exception.negation, do: " not ", else: " ") <> exception.assertion
end
end
defmodule ExUnit.Assertions do
@moduledoc """
This module contains a set of assertion functions that are
imported by default into your test cases.
In general, a developer will want to use the general
`assert` macro in tests. This macro tries to be smart
and provide good reporting whenever there is a failure.
For example, `assert some_fun() == 10` will fail (assuming
`some_fun()` returns 13):
Expected 10 to be equal to 13
This module also provides other convenience functions
like `assert_in_delta` and `assert_raise` to easily handle other
common cases such as checking a floating point number or handling exceptions.
"""
@doc """
Asserts the `expected` value is true.
`assert` in general tries to be smart and provide good
reporting whenever there is a failure. For example,
`assert 10 > 15` is going to fail with the message:
Expected 10 to be more than 15
## Examples
assert true
"""
defmacro assert(expected) do
case translate_assertion(expected) do
nil ->
# Default message in case no transform was performed
quote do
value = unquote(expected)
unless value do
raise ExUnit.ExpectationError,
expr: unquote(Macro.to_string(expected)),
assertion: "be",
expected: "true",
actual: inspect(value)
end
value
end
value -> value
end
end
@doc """
Refutes the `expected` value is true.
`refute` in general tries to be smart and provide good
reporting whenever there is a failure.
## Examples
refute false
"""
defmacro refute(expected) do
contents = case translate_assertion({ :!, [], [expected] }) do
nil ->
# Default message in case no transform was performed
quote do
value = unquote(expected)
if value do
raise ExUnit.ExpectationError,
expr: unquote(Macro.to_string(expected)),
assertion: "be",
expected: "false",
actual: inspect(value)
end
true
end
value -> value
end
{ :!, [], [contents] }
end
## START HELPERS
defp translate_assertion({ :=, _, [left, right] }) do
{ :case, meta, args } =
quote do
case right do
unquote(left) ->
right
_ ->
raise ExUnit.ExpectationError,
expected: inspect(right),
actual: unquote(Macro.to_string(left)),
assertion: "match pattern (=)"
end
end
quote do
right = unquote(right)
unquote({ :case, [{:export_all,true}|meta], args })
end
end
defp translate_assertion({ :==, _, [left, right] }) do
assert_operator :==, left, right, "be equal to (==)"
end
defp translate_assertion({ :<, _, [left, right] }) do
assert_operator :<, left, right, "be less than"
end
defp translate_assertion({ :>, _, [left, right] }) do
assert_operator :>, left, right, "be more than"
end
defp translate_assertion({ :<=, _, [left, right] }) do
assert_operator :<=, left, right, "be less than or equal to"
end
defp translate_assertion({ :>=, _, [left, right] }) do
assert_operator :>=, left, right, "be more than or equal to"
end
defp translate_assertion({ :===, _, [left, right] }) do
assert_operator :===, left, right, "be equal to (===)"
end
defp translate_assertion({ :!==, _, [left, right] }) do
assert_operator :!==, left, right, "be not equal to (!==)"
end
defp translate_assertion({ :!=, _, [left, right] }) do
assert_operator :!=, left, right, "be not equal to (!=)"
end
defp translate_assertion({ :=~, _, [left, right] }) do
assert_operator :=~, left, right, "match (=~)"
end
defp translate_assertion({ :in, _, [left, right] }) do
quote do
left = unquote(left)
right = unquote(right)
assert Enum.member?(right, left), left, right, assertion: "be in"
end
end
## Negative versions
defp translate_assertion({ :!, _, [{ :=, _, [left, right] }] }) do
quote do
right = unquote(right)
case right do
unquote(left) ->
raise ExUnit.ExpectationError,
expected: inspect(right),
actual: unquote(Macro.to_string(left)),
assertion: "match pattern (=)",
negation: true
_ ->
nil
end
end
end
defp translate_assertion({ :!, _, [{ :=~, _, [left, right] }] }) do
quote do
left = unquote(left)
right = unquote(right)
assert !(left =~ right), left, right, assertion: "match (=~)", negation: true
end
end
defp translate_assertion({ negation, _, [{ :in, _, [left, right] }] }) when negation in [:!, :not] do
quote do
left = unquote(left)
right = unquote(right)
assert !Enum.member?(right, left), left, right, assertion: "be in", negation: true
end
end
## Fallback
defp translate_assertion(_expected) do
nil
end
defp assert_operator(operator, expected, actual, text) do
quote do
left = unquote(expected)
right = unquote(actual)
assert unquote(operator)(left, right), left, right, unquote(text)
end
end
## END HELPERS
@doc """
Asserts the `expected` value is true.
If it fails, raises the given `message`.
## Examples
assert false, "it will never be true"
"""
def assert(expected, message) when is_binary(message) do
unless expected, do: raise(ExUnit.AssertionError, message: message)
true
end
@doc """
Asserts the `expected` value is true.
If it fails, it raises an expectation error
using the given `expected` and `actual` values.
## Examples
assert this > that, this, that, "more than"
"""
def assert(value, expected, actual, content) when is_binary(content) do
assert(value, expected, actual, assertion: content)
end
def assert(value, expected, actual, opts) do
unless value do
raise ExUnit.ExpectationError,
Keyword.merge([expected: inspect(expected), actual: inspect(actual)], opts)
end
true
end
@doc """
Asserts a message was or is going to be received. Unlike
`assert_received`, it has a default timeout of 100 milliseconds.
The given `expected` argument has to be a pattern.
## Examples
assert_receive :hello
Asserts against a larger timeout:
assert_receive :hello, 20_000
You can also match against specific patterns:
assert_receive { :hello, _ }
x = 5
assert_receive { :count, ^x }
"""
defmacro assert_receive(expected, timeout // 100, message // nil) do
do_assert_receive(expected, timeout, message)
end
@doc """
Asserts a message was received and is in the current process' mailbox.
Timeout is set to 0, so there is no waiting time.
The given `expected` argument has to be a pattern.
## Examples
send self, :hello
assert_received :hello
You can also match against specific patterns:
send self, { :hello, "world" }
assert_received { :hello, _ }
"""
defmacro assert_received(expected, message // nil) do
do_assert_receive(expected, 0, message)
end
defp do_assert_receive(expected, timeout, message) do
binary = Macro.to_string(expected)
quote do
receive do
unquote(expected) = received -> received
after
unquote(timeout) ->
flunk unquote(message) || "Expected to have received message matching #{unquote binary}"
end
end
end
@doc """
Asserts the `exception` is raised during `function` execution with
the `expected_message`. Returns the rescued exception, fails otherwise.
## Examples
assert_raise ArithmeticError, "bad argument in arithmetic expression", fn ->
1 + "test"
end
"""
def assert_raise(exception, message, function) when is_function(function) do
error = assert_raise(exception, function)
is_match = case message do
re when is_regex(re) -> error.message =~ re
bin when is_binary(bin) -> error.message == bin
end
assert is_match, message, error.message,
prelude: "Expected error message", assertion: "match"
error
end
@doc """
Asserts the `exception` is raised during `function` execution.
Returns the rescued exception, fails otherwise.
## Examples
assert_raise ArithmeticError, fn ->
1 + "test"
end
"""
def assert_raise(exception, function) when is_function(function) do
try do
function.()
flunk "Expected #{inspect exception} exception but nothing was raised"
rescue
error in [exception] -> error
error ->
name = error.__record__(:name)
if name in [ExUnit.AssertionError, ExUnit.ExpectationError] do
raise(error)
else
flunk "Expected exception #{inspect exception}, got #{inspect name} (#{error.message})"
end
end
end
@doc """
Asserts the `expected` and `received` are within `delta`.
## Examples
assert_in_delta 1.1, 1.5, 0.2
assert_in_delta 10, 15, 4
"""
def assert_in_delta(expected, received, delta, message // nil) do
diff = abs(expected - received)
message = message ||
"Expected |#{inspect expected} - #{inspect received}| (#{inspect diff}) to be < #{inspect delta}"
assert diff < delta, message
end
@doc """
Asserts the given `expression` will throw a value.
Returns the thrown value or fails otherwise.
## Examples
assert catch_throw(throw 1) == 1
"""
defmacro catch_throw(expression) do
do_catch(:throw, expression)
end
@doc """
Asserts the given `expression` will exit.
Returns the exit status/message or fails otherwise.
## Examples
assert catch_exit(exit 1) == 1
"""
defmacro catch_exit(expression) do
do_catch(:exit, expression)
end
@doc """
Asserts the given `expression` will cause an error.
Returns the error or fails otherwise.
## Examples
assert catch_error(error 1) == 1
"""
defmacro catch_error(expression) do
do_catch(:error, expression)
end
defp do_catch(kind, expr) do
quote do
try do
unquote(expr)
flunk "Expected to catch #{unquote(kind)}, got nothing"
rescue
e in [ExUnit.AssertionError, ExUnit.ExpectationError] -> raise(e)
catch
unquote(kind), what_we_got -> what_we_got
end
end
end
@doc """
Asserts the `not_expected` value is `nil` or `false`.
In case it is a truthy value, raises the given message.
## Examples
refute true, "This will obviously fail"
"""
def refute(not_expected, message) do
not assert(!not_expected, message)
end
@doc """
Asserts a message was not received and won't be within
the `timeout` period.
The `not_expected` argument must be a match pattern.
## Examples
refute_receive :bye
Refute received with a explicit timeout:
refute_receive :bye, 1000
"""
defmacro refute_receive(not_expected, timeout // 100, message // nil) do
do_refute_receive(not_expected, timeout, message)
end
@doc """
Asserts a message was not received (i.e. it is not in the current process mailbox).
The `not_expected` argument must be a match pattern.
Timeout is set to 0, so there is no waiting time.
## Examples
send self, :hello
refute_received :bye
"""
defmacro refute_received(not_expected, message // nil) do
do_refute_receive(not_expected, 0, message)
end
defp do_refute_receive(not_expected, timeout, message) do
binary = Macro.to_string(not_expected)
quote do
receive do
unquote(not_expected) = actual ->
flunk unquote(message) || "Expected to not have received message matching #{unquote binary}, got #{inspect actual}"
after
unquote(timeout) -> false
end
end
end
@doc """
Asserts the `expected` and `received` are not within `delta`.
## Examples
refute_in_delta 1.1, 1.2, 0.2
refute_in_delta 10, 11, 2
"""
def refute_in_delta(expected, received, delta, message // nil) do
diff = abs(expected - received)
message = message ||
"Expected |#{inspect expected} - #{inspect received}| (#{inspect diff}) to not be < #{inspect delta}"
refute diff < delta, message
end
@doc """
Fails with a message.
## Examples
flunk "This should raise an error"
"""
@spec flunk :: no_return
@spec flunk(String.t) :: no_return
def flunk(message // "Flunked!") do
raise ExUnit.AssertionError, message: message
end
end
| 25.054511 | 125 | 0.633281 |
0886742dfe2a1a4e6299ba43c68e99e07161e962 | 3,495 | exs | Elixir | test/req_test.exs | walkr/exns | 40379d38ce363b6f9208182dc3309529b1939fe3 | [
"MIT"
] | 6 | 2016-06-12T12:59:21.000Z | 2018-09-23T06:57:42.000Z | test/req_test.exs | walkr/exns | 40379d38ce363b6f9208182dc3309529b1939fe3 | [
"MIT"
] | null | null | null | test/req_test.exs | walkr/exns | 40379d38ce363b6f9208182dc3309529b1939fe3 | [
"MIT"
] | null | null | null | defmodule Exns.RequestWorkerTest do
use ExUnit.Case, async: true
setup_all do
# Start math service
pid1 = spawn fn ->
path = Path.join(System.cwd, "priv/math_service.py")
System.cmd "python", [path]
end
# Start string service
pid2 = spawn fn ->
path = Path.join(System.cwd, "priv/string_service.py")
System.cmd "python", [path]
end
# Kill processes
on_exit fn ->
:erlang.exit pid1, :kill
:erlang.exit pid2, :kill
end
end
setup do
Logger.configure(level: :error)
Application.put_env(:exns, :nanoservices,
[[name: :math_service,
address: "ipc:///tmp/math-test-service.sock",
timeout: 1000,
workers: 10,
encoder: "msgpack"],
[name: :unreal_service,
address: "ipc:///tmp/no-service-here.sock",
timeout: 1,
workers: 10,
encoder: "msgpack"],
[name: :string_service,
address: "ipc:///tmp/string-test-service.sock",
timeout: 1000,
workers: 10,
encoder: "json"]]
)
Application.stop(:exns)
:ok = Application.start(:exns)
end
# *********************
# PONG COLLECTOR
# *********************
def collector(parent, total) do
collector(parent, total, 0)
end
def collector(parent, total, total) do
send parent, {:done, total}
end
def collector(parent, total, acc) do
receive do
:pong -> collector(parent, total, acc + 1)
end
end
# *********************
# TESTS
# *********************
test "concurrent pings to match service and msgpack encoding" do
max = 2000
parent_pid = self()
collector_pid = spawn fn-> collector(parent_pid, max) end
started = :erlang.timestamp()
# Launch `max` pings then collect pongs
for _ <- 1..max, do: spawn(fn ->
assert {:ok, "pong"} == Exns.call(:math_service, "ping")
send collector_pid, :pong
end)
# Wait until all pongs are collected
receive do
{:done, ^max} -> :ok
end
IO.puts "\nStats for simple pings to math service:\n---"
show_stats(started, max)
end
test "service method with args" do
assert {:ok, 3} == Exns.call(:math_service, "add", [1, 2])
assert {:ok, "HELLO"} == Exns.call(:string_service, "uppercase", ["hello"])
end
test "call!" do
assert 3 == Exns.call!(:math_service, "add", [1,2])
end
test "service unknown method" do
{:error, error} = Exns.call(:math_service, "some-inexisting-method")
assert error != nil
end
test "service timeout" do
assert {:error, :timeout} == Exns.call(:unreal_service, "add", [1,2])
end
# Show test statistics
def show_stats(started, max) do
ended = :erlang.timestamp()
duration = :timer.now_diff(ended, started) / 1_000_000
throughput = max / duration
avg_req_time = Float.round(duration / max * 1000, 2)
IO.puts String.ljust("Concurrency: ", 32) <> "#{max} clients"
IO.puts String.ljust("Throughput", 32) <> "#{round throughput} req/sec"
IO.puts String.ljust("Avg. Request Time: ", 32) <> "#{avg_req_time} ms"
end
end | 26.278195 | 83 | 0.526466 |
08868099ab317c4c3f57f8847fbc8a2ce66c3f45 | 620 | exs | Elixir | test/configuration_test.exs | schultyy/localci | 0105ff01de1fd4a05dfa9d1e60f8ba79dc120d25 | [
"MIT"
] | null | null | null | test/configuration_test.exs | schultyy/localci | 0105ff01de1fd4a05dfa9d1e60f8ba79dc120d25 | [
"MIT"
] | null | null | null | test/configuration_test.exs | schultyy/localci | 0105ff01de1fd4a05dfa9d1e60f8ba79dc120d25 | [
"MIT"
] | null | null | null | defmodule ConfigurationTest do
use ExUnit.Case
def read_config do
File.read!("test/example_config.json")
end
setup do
config = LocalCi.Configuration.parse(read_config)
{:ok, config: config}
end
test "returns object from json", meta do
assert meta[:config] != nil
end
test "has repository property", meta do
assert meta[:config]["repository"] == "git@github.com:schultyy/pulp.git"
end
test "has name property", meta do
assert meta[:config]["name"] == "pulp"
end
test "has command property", meta do
assert meta[:config]["command"] == "bundle exec rake"
end
end
| 21.37931 | 76 | 0.675806 |
088688a0627c4e9264a251e1218f159fdbf754de | 1,735 | ex | Elixir | clients/script/lib/google_api/script/v1/model/create_project_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/script/lib/google_api/script/v1/model/create_project_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/script/lib/google_api/script/v1/model/create_project_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Script.V1.Model.CreateProjectRequest do
@moduledoc """
Request to create a script project.
## Attributes
* `parentId` (*type:* `String.t`, *default:* `nil`) - The Drive ID of a parent file that the created script project is bound to.
This is usually the ID of a Google Doc, Google Sheet, Google Form, or
Google Slides file. If not set, a standalone script project is created.
* `title` (*type:* `String.t`, *default:* `nil`) - The title for the project.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:parentId => String.t(),
:title => String.t()
}
field(:parentId)
field(:title)
end
defimpl Poison.Decoder, for: GoogleApi.Script.V1.Model.CreateProjectRequest do
def decode(value, options) do
GoogleApi.Script.V1.Model.CreateProjectRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Script.V1.Model.CreateProjectRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.365385 | 132 | 0.720461 |
088690bb8318273e1a2bb97a7351f3ed447a7307 | 59 | ex | Elixir | lib/bitpal_web/views/home_view.ex | bitpal/bitpal | 0e10eeaacf7a65b23945cfb95e4dbda8bffd4590 | [
"BSD-3-Clause-Clear"
] | 5 | 2021-05-04T21:28:00.000Z | 2021-12-01T11:19:48.000Z | lib/bitpal_web/views/home_view.ex | bitpal/bitpal | 0e10eeaacf7a65b23945cfb95e4dbda8bffd4590 | [
"BSD-3-Clause-Clear"
] | 71 | 2021-04-21T05:48:49.000Z | 2022-03-23T06:30:37.000Z | lib/bitpal_web/views/home_view.ex | bitpal/bitpal | 0e10eeaacf7a65b23945cfb95e4dbda8bffd4590 | [
"BSD-3-Clause-Clear"
] | 1 | 2021-04-25T10:35:41.000Z | 2021-04-25T10:35:41.000Z | defmodule BitPalWeb.HomeView do
use BitPalWeb, :view
end
| 14.75 | 31 | 0.79661 |
088699518c8c416b1aa7dec9ef78229da88b1311 | 4,233 | exs | Elixir | test/review_scraper_test.exs | andrhevictor/review_scraper | 7628b814d7d35b185bd9bfbe83400c68486bd19a | [
"MIT"
] | null | null | null | test/review_scraper_test.exs | andrhevictor/review_scraper | 7628b814d7d35b185bd9bfbe83400c68486bd19a | [
"MIT"
] | null | null | null | test/review_scraper_test.exs | andrhevictor/review_scraper | 7628b814d7d35b185bd9bfbe83400c68486bd19a | [
"MIT"
] | null | null | null | defmodule ReviewScraperTest do
use ExUnit.Case
import ReviewScraper.DealerRaterMock
alias ReviewScraper.DealerRater.Review
setup do
mock_dealer_rater_endpoint(200)
end
describe "get_overly_positive_reviews/2" do
test "loads the three most positive reviews from a dealership" do
assert {:ok, reviews} = ReviewScraper.get_overly_positive_reviews("Dealership Name")
assert length(reviews) == 3
end
test "loads the specified amount of reviews from a dealership" do
options = [reviews_count: 5]
assert {:ok, reviews} =
ReviewScraper.get_overly_positive_reviews("Dealership Name", options)
assert length(reviews) == 5
end
test "returns error if a dealership is not found by its name" do
mock_dealer_rater_endpoint(500)
assert {:error, :dealership_not_found} =
ReviewScraper.get_overly_positive_reviews("Dealership Name")
end
end
describe "sort_reviews_by_positiveness/1" do
test "filters reviews that haven't recommended the dealer" do
reviews = [
%Review{recommend_dealer?: false},
%Review{recommend_dealer?: false},
%Review{recommend_dealer?: false},
%Review{description: "I recommend this dealer.", recommend_dealer?: true}
]
assert [
%Review{
description: "I recommend this dealer.",
recommend_dealer?: true
}
] = ReviewScraper.sort_reviews_by_positiveness(reviews)
end
test "sorts reviews by average rating" do
review_1 = %Review{
recommend_dealer?: true,
dealership_rating: 1,
customer_service_rating: 1,
friendliness_rating: 1,
pricing_rating: 1,
overall_experience_rating: 1
}
review_2 = %Review{
recommend_dealer?: true,
dealership_rating: 2,
customer_service_rating: 2,
friendliness_rating: 2,
pricing_rating: 2,
overall_experience_rating: 2
}
review_3 = %Review{
recommend_dealer?: true,
dealership_rating: 3,
customer_service_rating: 3,
friendliness_rating: 3,
pricing_rating: 3,
overall_experience_rating: 3
}
review_4 = %Review{
recommend_dealer?: true,
dealership_rating: 4,
customer_service_rating: 4,
friendliness_rating: 4,
pricing_rating: 4,
overall_experience_rating: 4
}
review_5 = %Review{
recommend_dealer?: true,
dealership_rating: 5,
customer_service_rating: 5,
friendliness_rating: 5,
pricing_rating: 5,
overall_experience_rating: 5
}
reviews = [review_1, review_2, review_3, review_4, review_5]
assert ReviewScraper.sort_reviews_by_positiveness(reviews) == [
review_5,
review_4,
review_3,
review_2,
review_1
]
end
test "sorts by usage of positive words" do
review_1 = %Review{
description: "The most amazing dealer",
recommend_dealer?: true,
dealership_rating: 5,
customer_service_rating: 5,
friendliness_rating: 5,
pricing_rating: 5,
overall_experience_rating: 5
}
review_2 = %Review{
description: "Super excellent and perfect dealer",
recommend_dealer?: true,
dealership_rating: 5,
customer_service_rating: 5,
friendliness_rating: 5,
pricing_rating: 5,
overall_experience_rating: 5
}
review_3 = %Review{
description:
"Extremely fantastic. An wonderful experience. " <>
"I'm sure it's the most amazing dealer in the world.",
recommend_dealer?: true,
dealership_rating: 5,
customer_service_rating: 5,
friendliness_rating: 5,
pricing_rating: 5,
overall_experience_rating: 5
}
reviews = [review_1, review_2, review_3]
assert ReviewScraper.sort_reviews_by_positiveness(reviews) == [
review_3,
review_2,
review_1
]
end
end
end
| 27.666667 | 90 | 0.616112 |
08869d8dfca9e97aaa00ab64b8b7274f5bb82579 | 543 | exs | Elixir | priv/repo/migrations/20180606004418_create_book_instance.exs | zephraph/readtome | 64a5f773bdc3c19d9c5ac50a04aa14e446e36c55 | [
"MIT"
] | 1 | 2021-09-05T20:54:57.000Z | 2021-09-05T20:54:57.000Z | priv/repo/migrations/20180606004418_create_book_instance.exs | zephraph/readtome | 64a5f773bdc3c19d9c5ac50a04aa14e446e36c55 | [
"MIT"
] | 17 | 2019-07-06T17:31:56.000Z | 2021-06-22T15:31:06.000Z | priv/repo/migrations/20180606004418_create_book_instance.exs | zephraph/readtome | 64a5f773bdc3c19d9c5ac50a04aa14e446e36c55 | [
"MIT"
] | 1 | 2021-03-15T20:50:27.000Z | 2021-03-15T20:50:27.000Z | defmodule Readtome.Repo.Migrations.CreateBookInstances do
use Ecto.Migration
def change do
execute "CREATE EXTENSION IF NOT EXISTS postgis"
create table(:book_instances) do
add :condition, :string
add :medium, :string
add :offerings, :string
add :location, :geometry
add :availability, :string
add :book_id, references(:books, on_delete: :nothing)
add :user_id, references(:users, on_delete: :nothing)
timestamps()
end
create index(:book_instances, [:book_id])
end
end
| 24.681818 | 59 | 0.6814 |
0886c398a28dd79b8e8ae1174b591acc1232b664 | 2,624 | ex | Elixir | clients/tool_results/lib/google_api/tool_results/v1/model/non_sdk_api_insight.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/tool_results/lib/google_api/tool_results/v1/model/non_sdk_api_insight.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/tool_results/lib/google_api/tool_results/v1/model/non_sdk_api_insight.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ToolResults.V1.Model.NonSdkApiInsight do
@moduledoc """
Non-SDK API insights (to address debugging solutions).
## Attributes
* `exampleTraceMessages` (*type:* `list(String.t)`, *default:* `nil`) - Optional sample stack traces, for which this insight applies (there
should be at least one).
* `matcherId` (*type:* `String.t`, *default:* `nil`) - A unique ID, to be used for determining the effectiveness of this
particular insight in the context of a matcher. (required)
* `pendingGoogleUpdateInsight` (*type:* `GoogleApi.ToolResults.V1.Model.PendingGoogleUpdateInsight.t`, *default:* `nil`) - An insight indicating that the hidden API usage originates from a
Google-provided library.
* `upgradeInsight` (*type:* `GoogleApi.ToolResults.V1.Model.UpgradeInsight.t`, *default:* `nil`) - An insight indicating that the hidden API usage originates from the
use of a library that needs to be upgraded.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:exampleTraceMessages => list(String.t()),
:matcherId => String.t(),
:pendingGoogleUpdateInsight =>
GoogleApi.ToolResults.V1.Model.PendingGoogleUpdateInsight.t(),
:upgradeInsight => GoogleApi.ToolResults.V1.Model.UpgradeInsight.t()
}
field(:exampleTraceMessages, type: :list)
field(:matcherId)
field(:pendingGoogleUpdateInsight, as: GoogleApi.ToolResults.V1.Model.PendingGoogleUpdateInsight)
field(:upgradeInsight, as: GoogleApi.ToolResults.V1.Model.UpgradeInsight)
end
defimpl Poison.Decoder, for: GoogleApi.ToolResults.V1.Model.NonSdkApiInsight do
def decode(value, options) do
GoogleApi.ToolResults.V1.Model.NonSdkApiInsight.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ToolResults.V1.Model.NonSdkApiInsight do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.650794 | 192 | 0.73628 |
0886fe6535085e41f1410f82508ae9adc983ee6f | 6,156 | ex | Elixir | lib/oban/repo.ex | kianmeng/oban | 2ee44412f8ed3b5940017131eb29bf5296a7cf55 | [
"Apache-2.0"
] | null | null | null | lib/oban/repo.ex | kianmeng/oban | 2ee44412f8ed3b5940017131eb29bf5296a7cf55 | [
"Apache-2.0"
] | null | null | null | lib/oban/repo.ex | kianmeng/oban | 2ee44412f8ed3b5940017131eb29bf5296a7cf55 | [
"Apache-2.0"
] | null | null | null | defmodule Oban.Repo do
@moduledoc """
Wrappers around `Ecto.Repo` callbacks.
These functions should be used when working with an Ecto repo inside a plugin. These functions
will resolve the correct repo instance, and set the schema prefix and the log level, according
to the Oban configuration.
"""
@type config :: %{
:repo => module,
optional(:get_dynamic_repo) => (() -> pid | atom),
optional(:log) => false | Logger.level(),
optional(:prefix) => binary(),
optional(any) => any
}
@doc "Wraps `c:Ecto.Repo.transaction/2`."
@doc since: "2.2.0"
@spec transaction(config(), (... -> any()) | Ecto.Multi.t(), opts :: Keyword.t()) ::
{:ok, any()}
| {:error, any()}
| {:error, Ecto.Multi.name(), any(), %{required(Ecto.Multi.name()) => any()}}
def transaction(conf, fun_or_multi, opts \\ []) do
with_dynamic_repo(
conf,
fn -> conf.repo.transaction(fun_or_multi, with_default_opts(opts, conf)) end
)
end
@doc "Wraps `c:Ecto.Repo.update/2`."
@doc since: "2.2.0"
@spec update(config(), Ecto.Changeset.t(), Keyword.t()) ::
{:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
def update(conf, changeset, opts \\ []) do
with_dynamic_repo(
conf,
fn -> conf.repo.update(changeset, query_opts(opts, conf)) end
)
end
@doc "Wraps `c:Ecto.Repo.update_all/3`."
@doc since: "2.2.0"
@spec update_all(config(), Ecto.Queryable.t(), Keyword.t(), Keyword.t()) ::
{integer(), nil | [term()]}
def update_all(conf, queryable, updates, opts \\ []) do
with_dynamic_repo(
conf,
fn -> conf.repo.update_all(queryable, updates, query_opts(opts, conf)) end
)
end
@doc "Wraps `Ecto.Adapters.SQL.Repo.query/4`."
@doc since: "2.2.0"
@spec query(config(), String.t(), [term()], Keyword.t()) ::
{:ok,
%{
:rows => nil | [[term()] | binary()],
:num_rows => non_neg_integer(),
optional(atom()) => any()
}}
| {:error, Exception.t()}
def query(conf, sql, params \\ [], opts \\ []) do
with_dynamic_repo(
conf,
fn -> conf.repo.query(sql, params, query_opts(opts, conf)) end
)
end
@doc "Wraps `c:Ecto.Repo.all/2`."
@doc since: "2.2.0"
@spec all(config(), Ecto.Queryable.t(), Keyword.t()) :: [Ecto.Schema.t()]
def all(conf, queryable, opts \\ []) do
with_dynamic_repo(
conf,
fn -> conf.repo.all(queryable, query_opts(opts, conf)) end
)
end
@doc "Wraps `c:Ecto.Repo.one/2`."
@doc since: "2.2.0"
@spec one(config(), Ecto.Queryable.t(), Keyword.t()) :: Ecto.Schema.t() | nil
def one(conf, queryable, opts \\ []) do
with_dynamic_repo(
conf,
fn -> conf.repo.one(queryable, query_opts(opts, conf)) end
)
end
@doc "Wraps `c:Ecto.Repo.delete/2`."
@doc since: "2.4.0"
@spec delete(
config(),
struct_or_changeset :: Ecto.Schema.t() | Ecto.Changeset.t(),
opts :: Keyword.t()
) :: {:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
def delete(conf, struct_or_changeset, opts \\ []) do
with_dynamic_repo(
conf,
fn -> conf.repo.delete(struct_or_changeset, query_opts(opts, conf)) end
)
end
@doc "Wraps `c:Ecto.Repo.insert/2`."
@doc since: "2.2.0"
@spec insert(config(), Ecto.Schema.t() | Ecto.Changeset.t(), Keyword.t()) ::
{:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
def insert(conf, struct_or_changeset, opts \\ []) do
with_dynamic_repo(
conf,
fn -> conf.repo.insert(struct_or_changeset, query_opts(opts, conf)) end
)
end
@doc "Wraps `c:Ecto.Repo.insert_all/3`."
@doc since: "2.2.0"
@spec insert_all(
config(),
binary() | {binary(), module()} | module(),
[map() | [{atom(), term() | Ecto.Query.t()}]],
Keyword.t()
) :: {integer(), nil | [term()]}
def insert_all(conf, schema_or_source, entries, opts \\ []) do
with_dynamic_repo(
conf,
fn -> conf.repo.insert_all(schema_or_source, entries, query_opts(opts, conf)) end
)
end
@doc "Wraps `c:Ecto.Repo.delete_all/2`."
@doc since: "2.2.0"
@spec delete_all(config(), Ecto.Queryable.t(), Keyword.t()) :: {integer(), nil | [term()]}
def delete_all(conf, queryable, opts \\ []) do
with_dynamic_repo(
conf,
fn -> conf.repo.delete_all(queryable, query_opts(opts, conf)) end
)
end
@doc "Wraps `c:Ecto.Repo.checkout/2`."
@doc since: "2.2.0"
@spec checkout(config(), (() -> result), Keyword.t()) :: result when result: var
def checkout(conf, function, opts \\ []) do
with_dynamic_repo(
conf,
fn -> conf.repo.checkout(function, query_opts(opts, conf)) end
)
end
@doc "Wraps `Ecto.Adapters.SQL.Repo.to_sql/2`."
@doc since: "2.2.0"
@spec to_sql(config(), :all | :update_all | :delete_all, Ecto.Queryable.t()) ::
{String.t(), [term()]}
def to_sql(conf, kind, queryable) do
queryable =
case Map.fetch(conf, :prefix) do
:error -> queryable
{:ok, prefix} -> queryable |> Ecto.Queryable.to_query() |> Map.put(:prefix, prefix)
end
conf.repo.to_sql(kind, queryable)
end
@doc "Wraps `c:Ecto.Repo.config/0`."
@doc since: "2.2.0"
@spec config(config()) :: Keyword.t()
def config(conf), do: with_dynamic_repo(conf, &conf.repo.config/0)
defp with_dynamic_repo(conf, fun) do
case get_dynamic_repo(conf) do
nil ->
fun.()
instance ->
prev_instance = conf.repo.get_dynamic_repo()
try do
conf.repo.put_dynamic_repo(instance)
fun.()
after
conf.repo.put_dynamic_repo(prev_instance)
end
end
end
defp get_dynamic_repo(%{get_dynamic_repo: fun}) when is_function(fun, 0), do: fun.()
defp get_dynamic_repo(_conf), do: nil
defp query_opts(opts, conf) do
opts
|> with_default_opts(conf)
|> Keyword.merge(conf |> Map.take([:prefix]) |> Map.to_list())
end
defp with_default_opts(opts, conf),
do: Keyword.merge(opts, conf |> Map.take([:log]) |> Map.to_list())
end
| 31.090909 | 96 | 0.585932 |
08874816fb743e6af1f670a7ec5a6fcc9504b6ec | 1,238 | exs | Elixir | config/test.exs | MatthieuSegret/yummy-phoenix-graphql | f0b258293697b0b120ef8e8a3b3905043c998617 | [
"MIT"
] | 122 | 2017-11-24T11:28:17.000Z | 2022-02-25T17:05:20.000Z | config/test.exs | MatthieuSegret/yummy-phoenix-graphql | f0b258293697b0b120ef8e8a3b3905043c998617 | [
"MIT"
] | 6 | 2018-01-11T22:07:44.000Z | 2021-11-21T15:41:42.000Z | config/test.exs | MatthieuSegret/yummy-phoenix-graphql | f0b258293697b0b120ef8e8a3b3905043c998617 | [
"MIT"
] | 25 | 2018-04-01T02:43:21.000Z | 2022-02-15T03:22:54.000Z | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :yummy, YummyWeb.Endpoint,
http: [port: System.get_env("PORT") || 4000],
server: true
config :yummy, :sql_sandbox, true
config :wallaby,
driver: Wallaby.Experimental.Chrome,
# screenshot_on_failure: true,
screenshot_dir: "test/screenshots"
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :yummy, Yummy.Repo,
adapter: Ecto.Adapters.Postgres,
username: System.get_env("POSTGRES_USER") || "postgres",
password: System.get_env("POSTGRES_PASSWORD") || "postgres",
database: System.get_env("POSTGRES_DB") || "yummy_graphql_test",
hostname: System.get_env("POSTGRES_HOST") || "localhost",
pool: Ecto.Adapters.SQL.Sandbox
# Configures Bamboo
config :yummy, Yummy.Mailer, adapter: Bamboo.TestAdapter
config :ex_aws,
access_key_id: ["fake", :instance_role],
secret_access_key: ["fake", :instance_role],
region: "fakes3"
config :ex_aws, :s3,
scheme: "http://",
host: "localhost",
port: 4567
config :arc,
storage: Arc.Storage.S3,
asset_host: "http://localhost:4567/yummy-phoenix-graphql",
bucket: "yummy-phoenix-graphql"
| 27.511111 | 66 | 0.727787 |
08874b8480c8a438b8173acaa799b603a95cfe16 | 1,555 | ex | Elixir | lib/store_card_web/views/error_helpers.ex | manojsamanta/stripe-store-card | 5acc474240fe0eb85cfa8a3ef1d696d12694ad14 | [
"MIT"
] | null | null | null | lib/store_card_web/views/error_helpers.ex | manojsamanta/stripe-store-card | 5acc474240fe0eb85cfa8a3ef1d696d12694ad14 | [
"MIT"
] | null | null | null | lib/store_card_web/views/error_helpers.ex | manojsamanta/stripe-store-card | 5acc474240fe0eb85cfa8a3ef1d696d12694ad14 | [
"MIT"
] | null | null | null | defmodule StoreCardWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error),
class: "invalid-feedback",
phx_feedback_for: input_name(form, field)
)
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(StoreCardWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(StoreCardWeb.Gettext, "errors", msg, opts)
end
end
end
| 32.395833 | 78 | 0.666881 |
088759230c3a9d47df8263f9d848215e4a15774c | 1,900 | ex | Elixir | lib/new_relic/transaction.ex | runtastic/newrelic-elixir | 6e9b1337277962402e4f910e39168bbed3ff4991 | [
"MIT"
] | 7 | 2017-03-14T14:29:55.000Z | 2018-03-27T20:27:27.000Z | lib/new_relic/transaction.ex | runtastic/newrelic-elixir | 6e9b1337277962402e4f910e39168bbed3ff4991 | [
"MIT"
] | 1 | 2017-05-15T11:45:53.000Z | 2017-05-16T17:56:44.000Z | lib/new_relic/transaction.ex | runtastic/newrelic-elixir | 6e9b1337277962402e4f910e39168bbed3ff4991 | [
"MIT"
] | 1 | 2017-05-15T10:33:56.000Z | 2017-05-15T10:33:56.000Z | defmodule NewRelic.Transaction do
@moduledoc """
Records information about an instrumented web transaction.
"""
defstruct [:name, :start_time]
@typedoc "A New Relixir transaction context."
@opaque t :: %__MODULE__{name: String.t, start_time: :erlang.timestamp}
@typedoc "The name of a query."
@type query :: String.t
@typedoc "Elapsed time in microseconds."
@type interval :: non_neg_integer
@typedoc "Event types that can be recorded"
@type event_type :: :event | :db | :error | :ext
@doc """
Creates a new web transaction.
This method should be called just before processing a web transaction.
"""
@spec start(String.t) :: t
def start(name) when is_binary(name) do
%__MODULE__{name: name, start_time: :os.timestamp}
end
@doc """
Updates the name of an existing transaction
This method allows you to specify the name of a transaction after start to
facilitate the use case where the transaction name is not known at start time.
"""
@spec update_name(t, String.t) :: t
def update_name(transaction, new_name) do
%{transaction | name: new_name}
end
@doc """
Finishes a web transaction.
This method should be called just after processing a web transaction. It will record the elapsed
time of the transaction.
"""
@spec finish(t) :: :ok
def finish(%__MODULE__{name: name, start_time: start_time}) do
end_time = :os.timestamp
elapsed = :timer.now_diff(end_time, start_time)
NewRelic.Collector.record_value(name, :total, elapsed)
end
@spec record(t, event_type, String.t, interval | String.t) :: any
def record(%__MODULE__{name: name}, type, payload, elapsed) when type in [:event, :db, :ext] do
NewRelic.Collector.record_value(name, {type, payload}, elapsed)
end
def record(%__MODULE__{name: name}, :error, type, error) do
NewRelic.Collector.record_error(name, {type, error})
end
end
| 30.15873 | 98 | 0.705263 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.