hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ce968e2059002cf3e3be84eff2c67f45da775689
| 4,590
|
py
|
Python
|
neuro_evolutional_net/main.py
|
DominikSpiljak/Fuzzy-Evolutionary-and-Neuro-computing
|
fb0d54c35966ed0516c15519a6abcbffeb170f9b
|
[
"Apache-2.0"
] | null | null | null |
neuro_evolutional_net/main.py
|
DominikSpiljak/Fuzzy-Evolutionary-and-Neuro-computing
|
fb0d54c35966ed0516c15519a6abcbffeb170f9b
|
[
"Apache-2.0"
] | null | null | null |
neuro_evolutional_net/main.py
|
DominikSpiljak/Fuzzy-Evolutionary-and-Neuro-computing
|
fb0d54c35966ed0516c15519a6abcbffeb170f9b
|
[
"Apache-2.0"
] | 1
|
2022-03-02T03:23:34.000Z
|
2022-03-02T03:23:34.000Z
|
from neural_net import NeuralNet
from dataset import Dataset
import numpy as np
import matplotlib.pyplot as plt
import genetics
from genetic_algorithm import GeneticAlgorithm
import time
def plot_data(dataset, save_file=None, neural_net=None, params=None, model_specs=None):
fig, axes = plt.subplots(1)
fig.set_size_inches(20, 15)
fig.suptitle('Vizualizacija podataka')
if model_specs is not None:
fig.suptitle('Vizualizacija podataka, model: ({})'.format(
", ".join([k + ": " + str(v) for k, v in model_specs.items()])))
markers = [".", ",", "o"]
colors = ['r', 'g', 'b']
if params is None:
for i in range(dataset.size()):
x, y = dataset.get_sample(i)
label = np.argmax(y)
axes.scatter(x[0], x[1], marker=markers[label], c=colors[label])
else:
preds = neural_net.predict(neural_net.decode_params(params), dataset.X)
for i in range(dataset.size()):
x = dataset.X[i]
if preds[i] == np.argmax(dataset.y[i]):
axes.scatter(x[0], x[1], marker=markers[2], c=colors[2])
else:
axes.scatter(x[0], x[1], marker=markers[0], c=colors[0])
w_type_1, s_type_1, _, _ = neural_net.decode_params(params)
for i in range(int(model_specs['layers'].split(',')[1])):
axes.scatter(w_type_1[:, 0], w_type_1[:, 1],
marker="*", c=colors[1])
print('s = {}'.format(s_type_1))
if save_file is not None:
plt.savefig(save_file)
plt.show()
def test_neuron(w, s, save_file=None):
fig, axes = plt.subplots(1)
fig.set_size_inches(18, 10)
fig.suptitle('Ovisnost izlaza neurona o parametru s')
for s_ in sorted(s):
x = np.linspace(-8, 10, num=100)
y = 1 / (1 + np.abs(x - w) / s_)
axes.plot(x, y, label='s = {}'.format(s_))
axes.legend()
if save_file is not None:
plt.savefig(save_file)
plt.show()
def main():
dataset = Dataset('dataset.txt')
layers = [2, 8, 3]
neural_net = NeuralNet(layers, dataset)
#test_neuron(2, [1, 0.25, 4], save_file='test_neuron.png')
#plot_data(dataset, save_file='data_visualisation.png')
population_size = 40
num_iter = 5000000
k = 3
mutation_chooser_probs = [10, 3, 5]
mutation_prob = 0.05
genetic_algorithm = GeneticAlgorithm(population_generation=genetics.generate_population(neural_net.get_num_params(), population_size, neural_net),
num_iter=num_iter,
selection=genetics.tournament_selection(
population_size, k=k),
combination=genetics.cross_chooser(
[genetics.weight_recombination(neural_net),
genetics.simulated_binary_recombination(
neural_net),
genetics.whole_arithmetic_recombination(neural_net)]),
mutation=genetics.mutation_chooser([genetics.mutation_1(mutation_prob, 0.05),
genetics.mutation_1(
mutation_prob, 0.3),
genetics.mutation_2(mutation_prob, 1)],
probs=mutation_chooser_probs, neural_net=neural_net),
solution=genetics.solution(),
goal_error=1e-7)
start_time = time.time()
best = genetic_algorithm.evolution(neural_net)
print("--- {} seconds ---".format(time.time() - start_time))
best.save_individual("best_individual_{}_1.pickle".format(
''.join([str(layer) for layer in layers])))
print(neural_net.calculate_error(
[best.w_type_1, best.s_type_1, best.w, best.b]))
plot_data(dataset, neural_net=neural_net, model_specs={
"pop_size": population_size,
"num_iter": num_iter,
"k": k,
"mutation_chooser_probs": ', '.join([str(prob) for prob in mutation_chooser_probs]),
"mutation_prob": mutation_prob,
"layers": ', '.join([str(layer) for layer in layers])
},
params=best.value,
save_file="data_visualisation_with_neuron_weights_{}_1.png".format(
''.join([str(layer) for layer in layers])))
if __name__ == "__main__":
main()
"""import pickle
dataset = Dataset('dataset.txt')
layers = [2, 8, 4, 3]
neural_net = NeuralNet(layers, dataset)
with open('best_individual_2643_1.pickle', 'rb') as inp:
best = pickle.load(inp)
# neural_net.show(best.value, save_file='neural_net_283.png')
w_type_1, s_type_1, w, b = neural_net.decode_params(best.value)
print(w_type_1, s_type_1, w, b.T, sep="\n")
"""
| 35.859375
| 150
| 0.613508
|
dfe6d2f8f4edefcaf63e0ccf3067f482a126f067
| 4,295
|
py
|
Python
|
webapi_active_query_builder/models/query_column.py
|
ActiveDbSoft/webapi-active-query-builder-python
|
81d65f454617d913d8d3b707283a42830c08192d
|
[
"Apache-2.0"
] | 2
|
2018-11-01T18:03:04.000Z
|
2020-03-21T17:34:51.000Z
|
webapi_active_query_builder/models/query_column.py
|
ActiveDbSoft/webapi-active-query-builder-python
|
81d65f454617d913d8d3b707283a42830c08192d
|
[
"Apache-2.0"
] | null | null | null |
webapi_active_query_builder/models/query_column.py
|
ActiveDbSoft/webapi-active-query-builder-python
|
81d65f454617d913d8d3b707283a42830c08192d
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
QueryBuilderApi
Active Query Builder Web API lets create, analyze and modify SQL queries for different database servers using RESTful HTTP requests to a cloud-based service. It requires SQL execution context (information about database schema and used database server) to be stored under the registered account at https://webapi.activequerybuilder.com/.
OpenAPI spec version: 1.1.8
Contact: support@activedbsoft.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pprint import pformat
from six import iteritems
import re
class QueryColumn(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, name=None, data_type=None):
"""
QueryColumn - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'name': 'str',
'data_type': 'str'
}
self.attribute_map = {
'name': 'name',
'data_type': 'dataType'
}
self._name = name
self._data_type = data_type
@property
def name(self):
"""
Gets the name of this QueryColumn.
Column name.
:return: The name of this QueryColumn.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this QueryColumn.
Column name.
:param name: The name of this QueryColumn.
:type: str
"""
self._name = name
@property
def data_type(self):
"""
Gets the data_type of this QueryColumn.
Data type.
:return: The data_type of this QueryColumn.
:rtype: str
"""
return self._data_type
@data_type.setter
def data_type(self, data_type):
"""
Sets the data_type of this QueryColumn.
Data type.
:param data_type: The data_type of this QueryColumn.
:type: str
"""
self._data_type = data_type
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 28.256579
| 341
| 0.573458
|
a0c24de2e3d0ebad15e015cd26ff42271cd50b89
| 4,125
|
py
|
Python
|
python-interface/test/vector/test_vector_slp_consecutive.py
|
lukasmweber/spn-compiler
|
acab827e8b8df69d1a4e83a209e14f62bd8d967e
|
[
"Apache-2.0"
] | 8
|
2021-07-07T17:19:16.000Z
|
2022-03-30T06:08:44.000Z
|
python-interface/test/vector/test_vector_slp_consecutive.py
|
lukasmweber/spn-compiler
|
acab827e8b8df69d1a4e83a209e14f62bd8d967e
|
[
"Apache-2.0"
] | 9
|
2021-06-01T15:03:19.000Z
|
2021-11-19T02:48:35.000Z
|
python-interface/test/vector/test_vector_slp_consecutive.py
|
lukasmweber/spn-compiler
|
acab827e8b8df69d1a4e83a209e14f62bd8d967e
|
[
"Apache-2.0"
] | 2
|
2021-07-07T17:19:36.000Z
|
2022-02-28T15:08:36.000Z
|
import numpy as np
import pytest
import time
from spn.algorithms.Inference import log_likelihood
from spn.structure.Base import Product, Sum
from spn.structure.leaves.parametric.Parametric import Gaussian
from spnc.cpu import CPUCompiler
@pytest.mark.skipif(not CPUCompiler.isVectorizationSupported(), reason="CPU vectorization not supported")
def test_vector_slp_consecutive():
g0 = Gaussian(mean=0.11, stdev=1, scope=0)
g1 = Gaussian(mean=0.12, stdev=0.75, scope=1)
g2 = Gaussian(mean=0.13, stdev=0.5, scope=2)
g3 = Gaussian(mean=0.14, stdev=0.25, scope=3)
g4 = Gaussian(mean=0.21, stdev=1, scope=4)
g5 = Gaussian(mean=0.22, stdev=0.75, scope=5)
g6 = Gaussian(mean=0.23, stdev=0.5, scope=6)
g7 = Gaussian(mean=0.24, stdev=0.25, scope=7)
g8 = Gaussian(mean=0.31, stdev=1, scope=8)
g9 = Gaussian(mean=0.32, stdev=0.75, scope=9)
g10 = Gaussian(mean=0.33, stdev=0.5, scope=10)
g11 = Gaussian(mean=0.34, stdev=0.25, scope=11)
g12 = Gaussian(mean=0.41, stdev=1, scope=12)
g13 = Gaussian(mean=0.42, stdev=0.75, scope=13)
g14 = Gaussian(mean=0.43, stdev=0.5, scope=14)
g15 = Gaussian(mean=0.44, stdev=0.25, scope=15)
p1 = Product(children=[g0, g4, g8, g12])
p2 = Product(children=[g1, g5, g6, g7])
p3 = Product(children=[g10, g9, g2, g11])
p4 = Product(children=[g15, g3, g14, g13])
spn = Sum(children=[p1, p2, p3, p4], weights=[0.25, 0.25, 0.25, 0.25])
# Randomly sample input values from Gaussian (normal) distributions.
num_samples = 100
inputs = np.column_stack((np.random.normal(loc=0.5, scale=1, size=num_samples),
np.random.normal(loc=0.125, scale=0.25, size=num_samples),
np.random.normal(loc=0.345, scale=0.24, size=num_samples),
np.random.normal(loc=0.456, scale=0.1, size=num_samples),
np.random.normal(loc=0.94, scale=0.48, size=num_samples),
np.random.normal(loc=0.56, scale=0.42, size=num_samples),
np.random.normal(loc=0.76, scale=0.14, size=num_samples),
np.random.normal(loc=0.32, scale=0.58, size=num_samples),
np.random.normal(loc=0.58, scale=0.219, size=num_samples),
np.random.normal(loc=0.14, scale=0.52, size=num_samples),
np.random.normal(loc=0.24, scale=0.42, size=num_samples),
np.random.normal(loc=0.34, scale=0.1, size=num_samples),
np.random.normal(loc=0.44, scale=0.9, size=num_samples),
np.random.normal(loc=0.54, scale=0.7, size=num_samples),
np.random.normal(loc=0.64, scale=0.5, size=num_samples),
np.random.normal(loc=0.74, scale=0.4, size=num_samples))).astype("float64")
# Compute the reference results using the inference from SPFlow.
reference = log_likelihood(spn, inputs)
reference = reference.reshape(num_samples)
# Compile the kernel with batch size 1 to enable SLP vectorization.
compiler = CPUCompiler(vectorize=True, computeInLogSpace=True, vectorLibrary="LIBMVEC")
kernel = compiler.compile_ll(spn=spn, batchSize=1, supportMarginal=False)
# Execute the compiled Kernel.
time_sum = 0
for i in range(len(reference)):
# Check the computation results against the reference
start = time.time()
result = compiler.execute(kernel, inputs=np.array([inputs[i]]))
time_sum = time_sum + time.time() - start
print(f"evaluation #{i}: result: {result[0]:16.8f}, reference: {reference[i]:16.8f}", end='\r')
if not np.isclose(result, reference[i]):
print(f"\nevaluation #{i} failed: result: {result[0]:16.8f}, reference: {reference[i]:16.8f}")
raise AssertionError()
print(f"\nExecution of {len(reference)} samples took {time_sum} seconds.")
if __name__ == "__main__":
test_vector_slp_consecutive()
print("COMPUTATION OK")
| 50.304878
| 106
| 0.616
|
97574810905ea0c61eee2a539b4801b4a97a5b9b
| 613
|
py
|
Python
|
revgraph/core/functions/operations/math/add.py
|
shhoalex/revgraph
|
7060945aa46fbd9584861715f15b6fc8037ba53f
|
[
"MIT"
] | 9
|
2020-06-27T07:01:00.000Z
|
2020-10-23T13:50:04.000Z
|
revgraph/core/functions/operations/math/add.py
|
shinghinho/revgraph
|
7060945aa46fbd9584861715f15b6fc8037ba53f
|
[
"MIT"
] | null | null | null |
revgraph/core/functions/operations/math/add.py
|
shinghinho/revgraph
|
7060945aa46fbd9584861715f15b6fc8037ba53f
|
[
"MIT"
] | null | null | null |
import numpy as np
from revgraph.core.functions.base.binary_function import BinaryFunction
class Add(BinaryFunction):
def apply(self,
a: np.ndarray,
b: np.ndarray) -> np.ndarray:
return a+b
def gradient_wrt_a(self,
gradient: np.ndarray,
a: np.ndarray,
b: np.ndarray) -> np.ndarray:
return gradient
def gradient_wrt_b(self,
gradient: np.ndarray,
a: np.ndarray,
b: np.ndarray) -> np.ndarray:
return gradient
| 26.652174
| 71
| 0.508972
|
367ea52b3a2bcd919d35ce3230b07faf6507b5e9
| 39,878
|
py
|
Python
|
tests/components/generic_thermostat/test_climate.py
|
alemuro/home-assistant
|
9b1315d8e55f0ca906c4c8a1b2ae8c2ea511dc90
|
[
"Apache-2.0"
] | 3
|
2020-01-21T18:09:09.000Z
|
2022-01-17T08:06:03.000Z
|
tests/components/generic_thermostat/test_climate.py
|
alemuro/home-assistant
|
9b1315d8e55f0ca906c4c8a1b2ae8c2ea511dc90
|
[
"Apache-2.0"
] | 39
|
2016-12-16T12:40:34.000Z
|
2017-02-13T17:53:42.000Z
|
tests/components/generic_thermostat/test_climate.py
|
alemuro/home-assistant
|
9b1315d8e55f0ca906c4c8a1b2ae8c2ea511dc90
|
[
"Apache-2.0"
] | 3
|
2020-01-11T15:44:13.000Z
|
2022-01-17T08:06:09.000Z
|
"""The tests for the generic_thermostat."""
import datetime
from asynctest import mock
import pytest
import pytz
import voluptuous as vol
from homeassistant.components import input_boolean, switch
from homeassistant.components.climate.const import (
ATTR_PRESET_MODE,
DOMAIN,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_NONE,
)
from homeassistant.const import (
ATTR_TEMPERATURE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
import homeassistant.core as ha
from homeassistant.core import DOMAIN as HASS_DOMAIN, CoreState, State, callback
from homeassistant.setup import async_setup_component
from homeassistant.util.unit_system import METRIC_SYSTEM
from tests.common import assert_setup_component, mock_restore_cache
from tests.components.climate import common
ENTITY = "climate.test"
ENT_SENSOR = "sensor.test"
ENT_SWITCH = "switch.test"
HEAT_ENTITY = "climate.test_heat"
COOL_ENTITY = "climate.test_cool"
ATTR_AWAY_MODE = "away_mode"
MIN_TEMP = 3.0
MAX_TEMP = 65.0
TARGET_TEMP = 42.0
COLD_TOLERANCE = 0.5
HOT_TOLERANCE = 0.5
async def test_setup_missing_conf(hass):
"""Test set up heat_control with missing config values."""
config = {
"platform": "generic_thermostat",
"name": "test",
"target_sensor": ENT_SENSOR,
}
with assert_setup_component(0):
await async_setup_component(hass, "climate", {"climate": config})
async def test_valid_conf(hass):
"""Test set up generic_thermostat with valid config values."""
assert await async_setup_component(
hass,
"climate",
{
"climate": {
"platform": "generic_thermostat",
"name": "test",
"heater": ENT_SWITCH,
"target_sensor": ENT_SENSOR,
}
},
)
@pytest.fixture
def setup_comp_1(hass):
"""Initialize components."""
hass.config.units = METRIC_SYSTEM
assert hass.loop.run_until_complete(
async_setup_component(hass, "homeassistant", {})
)
async def test_heater_input_boolean(hass, setup_comp_1):
"""Test heater switching input_boolean."""
heater_switch = "input_boolean.test"
assert await async_setup_component(
hass, input_boolean.DOMAIN, {"input_boolean": {"test": None}}
)
assert await async_setup_component(
hass,
DOMAIN,
{
"climate": {
"platform": "generic_thermostat",
"name": "test",
"heater": heater_switch,
"target_sensor": ENT_SENSOR,
"initial_hvac_mode": HVAC_MODE_HEAT,
}
},
)
assert STATE_OFF == hass.states.get(heater_switch).state
_setup_sensor(hass, 18)
await hass.async_block_till_done()
await common.async_set_temperature(hass, 23)
assert STATE_ON == hass.states.get(heater_switch).state
async def test_heater_switch(hass, setup_comp_1):
"""Test heater switching test switch."""
platform = getattr(hass.components, "test.switch")
platform.init()
switch_1 = platform.DEVICES[1]
assert await async_setup_component(
hass, switch.DOMAIN, {"switch": {"platform": "test"}}
)
heater_switch = switch_1.entity_id
assert await async_setup_component(
hass,
DOMAIN,
{
"climate": {
"platform": "generic_thermostat",
"name": "test",
"heater": heater_switch,
"target_sensor": ENT_SENSOR,
"initial_hvac_mode": HVAC_MODE_HEAT,
}
},
)
await hass.async_block_till_done()
assert STATE_OFF == hass.states.get(heater_switch).state
_setup_sensor(hass, 18)
await common.async_set_temperature(hass, 23)
await hass.async_block_till_done()
assert STATE_ON == hass.states.get(heater_switch).state
def _setup_sensor(hass, temp):
"""Set up the test sensor."""
hass.states.async_set(ENT_SENSOR, temp)
@pytest.fixture
def setup_comp_2(hass):
"""Initialize components."""
hass.config.units = METRIC_SYSTEM
assert hass.loop.run_until_complete(
async_setup_component(
hass,
DOMAIN,
{
"climate": {
"platform": "generic_thermostat",
"name": "test",
"cold_tolerance": 2,
"hot_tolerance": 4,
"heater": ENT_SWITCH,
"target_sensor": ENT_SENSOR,
"away_temp": 16,
"initial_hvac_mode": HVAC_MODE_HEAT,
}
},
)
)
async def test_setup_defaults_to_unknown(hass):
"""Test the setting of defaults to unknown."""
hass.config.units = METRIC_SYSTEM
await async_setup_component(
hass,
DOMAIN,
{
"climate": {
"platform": "generic_thermostat",
"name": "test",
"cold_tolerance": 2,
"hot_tolerance": 4,
"heater": ENT_SWITCH,
"target_sensor": ENT_SENSOR,
"away_temp": 16,
}
},
)
assert HVAC_MODE_OFF == hass.states.get(ENTITY).state
async def test_default_setup_params(hass, setup_comp_2):
"""Test the setup with default parameters."""
state = hass.states.get(ENTITY)
assert 7 == state.attributes.get("min_temp")
assert 35 == state.attributes.get("max_temp")
assert 7 == state.attributes.get("temperature")
async def test_get_hvac_modes(hass, setup_comp_2):
"""Test that the operation list returns the correct modes."""
state = hass.states.get(ENTITY)
modes = state.attributes.get("hvac_modes")
assert [HVAC_MODE_HEAT, HVAC_MODE_OFF] == modes
async def test_set_target_temp(hass, setup_comp_2):
"""Test the setting of the target temperature."""
await common.async_set_temperature(hass, 30)
state = hass.states.get(ENTITY)
assert 30.0 == state.attributes.get("temperature")
with pytest.raises(vol.Invalid):
await common.async_set_temperature(hass, None)
state = hass.states.get(ENTITY)
assert 30.0 == state.attributes.get("temperature")
async def test_set_away_mode(hass, setup_comp_2):
"""Test the setting away mode."""
await common.async_set_temperature(hass, 23)
await common.async_set_preset_mode(hass, PRESET_AWAY)
state = hass.states.get(ENTITY)
assert 16 == state.attributes.get("temperature")
async def test_set_away_mode_and_restore_prev_temp(hass, setup_comp_2):
"""Test the setting and removing away mode.
Verify original temperature is restored.
"""
await common.async_set_temperature(hass, 23)
await common.async_set_preset_mode(hass, PRESET_AWAY)
state = hass.states.get(ENTITY)
assert 16 == state.attributes.get("temperature")
await common.async_set_preset_mode(hass, PRESET_NONE)
state = hass.states.get(ENTITY)
assert 23 == state.attributes.get("temperature")
async def test_set_away_mode_twice_and_restore_prev_temp(hass, setup_comp_2):
"""Test the setting away mode twice in a row.
Verify original temperature is restored.
"""
await common.async_set_temperature(hass, 23)
await common.async_set_preset_mode(hass, PRESET_AWAY)
await common.async_set_preset_mode(hass, PRESET_AWAY)
state = hass.states.get(ENTITY)
assert 16 == state.attributes.get("temperature")
await common.async_set_preset_mode(hass, PRESET_NONE)
state = hass.states.get(ENTITY)
assert 23 == state.attributes.get("temperature")
async def test_sensor_bad_value(hass, setup_comp_2):
"""Test sensor that have None as state."""
state = hass.states.get(ENTITY)
temp = state.attributes.get("current_temperature")
_setup_sensor(hass, None)
await hass.async_block_till_done()
state = hass.states.get(ENTITY)
assert temp == state.attributes.get("current_temperature")
async def test_set_target_temp_heater_on(hass, setup_comp_2):
"""Test if target temperature turn heater on."""
calls = _setup_switch(hass, False)
_setup_sensor(hass, 25)
await hass.async_block_till_done()
await common.async_set_temperature(hass, 30)
assert 1 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_ON == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_set_target_temp_heater_off(hass, setup_comp_2):
"""Test if target temperature turn heater off."""
calls = _setup_switch(hass, True)
_setup_sensor(hass, 30)
await hass.async_block_till_done()
await common.async_set_temperature(hass, 25)
assert 2 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_OFF == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_temp_change_heater_on_within_tolerance(hass, setup_comp_2):
"""Test if temperature change doesn't turn on within tolerance."""
calls = _setup_switch(hass, False)
await common.async_set_temperature(hass, 30)
_setup_sensor(hass, 29)
await hass.async_block_till_done()
assert 0 == len(calls)
async def test_temp_change_heater_on_outside_tolerance(hass, setup_comp_2):
"""Test if temperature change turn heater on outside cold tolerance."""
calls = _setup_switch(hass, False)
await common.async_set_temperature(hass, 30)
_setup_sensor(hass, 27)
await hass.async_block_till_done()
assert 1 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_ON == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_temp_change_heater_off_within_tolerance(hass, setup_comp_2):
"""Test if temperature change doesn't turn off within tolerance."""
calls = _setup_switch(hass, True)
await common.async_set_temperature(hass, 30)
_setup_sensor(hass, 33)
await hass.async_block_till_done()
assert 0 == len(calls)
async def test_temp_change_heater_off_outside_tolerance(hass, setup_comp_2):
"""Test if temperature change turn heater off outside hot tolerance."""
calls = _setup_switch(hass, True)
await common.async_set_temperature(hass, 30)
_setup_sensor(hass, 35)
await hass.async_block_till_done()
assert 1 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_OFF == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_running_when_hvac_mode_is_off(hass, setup_comp_2):
"""Test that the switch turns off when enabled is set False."""
calls = _setup_switch(hass, True)
await common.async_set_temperature(hass, 30)
await common.async_set_hvac_mode(hass, HVAC_MODE_OFF)
assert 1 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_OFF == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_no_state_change_when_hvac_mode_off(hass, setup_comp_2):
"""Test that the switch doesn't turn on when enabled is False."""
calls = _setup_switch(hass, False)
await common.async_set_temperature(hass, 30)
await common.async_set_hvac_mode(hass, HVAC_MODE_OFF)
_setup_sensor(hass, 25)
await hass.async_block_till_done()
assert 0 == len(calls)
async def test_hvac_mode_heat(hass, setup_comp_2):
"""Test change mode from OFF to HEAT.
Switch turns on when temp below setpoint and mode changes.
"""
await common.async_set_hvac_mode(hass, HVAC_MODE_OFF)
await common.async_set_temperature(hass, 30)
_setup_sensor(hass, 25)
await hass.async_block_till_done()
calls = _setup_switch(hass, False)
await common.async_set_hvac_mode(hass, HVAC_MODE_HEAT)
assert 1 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_ON == call.service
assert ENT_SWITCH == call.data["entity_id"]
def _setup_switch(hass, is_on):
"""Set up the test switch."""
hass.states.async_set(ENT_SWITCH, STATE_ON if is_on else STATE_OFF)
calls = []
@callback
def log_call(call):
"""Log service calls."""
calls.append(call)
hass.services.async_register(ha.DOMAIN, SERVICE_TURN_ON, log_call)
hass.services.async_register(ha.DOMAIN, SERVICE_TURN_OFF, log_call)
return calls
@pytest.fixture
def setup_comp_3(hass):
"""Initialize components."""
hass.config.temperature_unit = TEMP_CELSIUS
assert hass.loop.run_until_complete(
async_setup_component(
hass,
DOMAIN,
{
"climate": {
"platform": "generic_thermostat",
"name": "test",
"cold_tolerance": 2,
"hot_tolerance": 4,
"away_temp": 30,
"heater": ENT_SWITCH,
"target_sensor": ENT_SENSOR,
"ac_mode": True,
"initial_hvac_mode": HVAC_MODE_COOL,
}
},
)
)
async def test_set_target_temp_ac_off(hass, setup_comp_3):
"""Test if target temperature turn ac off."""
calls = _setup_switch(hass, True)
_setup_sensor(hass, 25)
await hass.async_block_till_done()
await common.async_set_temperature(hass, 30)
assert 2 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_OFF == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_turn_away_mode_on_cooling(hass, setup_comp_3):
"""Test the setting away mode when cooling."""
_setup_switch(hass, True)
_setup_sensor(hass, 25)
await hass.async_block_till_done()
await common.async_set_temperature(hass, 19)
await common.async_set_preset_mode(hass, PRESET_AWAY)
state = hass.states.get(ENTITY)
assert 30 == state.attributes.get("temperature")
async def test_hvac_mode_cool(hass, setup_comp_3):
"""Test change mode from OFF to COOL.
Switch turns on when temp below setpoint and mode changes.
"""
await common.async_set_hvac_mode(hass, HVAC_MODE_OFF)
await common.async_set_temperature(hass, 25)
_setup_sensor(hass, 30)
await hass.async_block_till_done()
calls = _setup_switch(hass, False)
await common.async_set_hvac_mode(hass, HVAC_MODE_COOL)
assert 1 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_ON == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_set_target_temp_ac_on(hass, setup_comp_3):
"""Test if target temperature turn ac on."""
calls = _setup_switch(hass, False)
_setup_sensor(hass, 30)
await hass.async_block_till_done()
await common.async_set_temperature(hass, 25)
assert 1 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_ON == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_temp_change_ac_off_within_tolerance(hass, setup_comp_3):
"""Test if temperature change doesn't turn ac off within tolerance."""
calls = _setup_switch(hass, True)
await common.async_set_temperature(hass, 30)
_setup_sensor(hass, 29.8)
await hass.async_block_till_done()
assert 0 == len(calls)
async def test_set_temp_change_ac_off_outside_tolerance(hass, setup_comp_3):
"""Test if temperature change turn ac off."""
calls = _setup_switch(hass, True)
await common.async_set_temperature(hass, 30)
_setup_sensor(hass, 27)
await hass.async_block_till_done()
assert 1 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_OFF == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_temp_change_ac_on_within_tolerance(hass, setup_comp_3):
"""Test if temperature change doesn't turn ac on within tolerance."""
calls = _setup_switch(hass, False)
await common.async_set_temperature(hass, 25)
_setup_sensor(hass, 25.2)
await hass.async_block_till_done()
assert 0 == len(calls)
async def test_temp_change_ac_on_outside_tolerance(hass, setup_comp_3):
"""Test if temperature change turn ac on."""
calls = _setup_switch(hass, False)
await common.async_set_temperature(hass, 25)
_setup_sensor(hass, 30)
await hass.async_block_till_done()
assert 1 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_ON == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_running_when_operating_mode_is_off_2(hass, setup_comp_3):
"""Test that the switch turns off when enabled is set False."""
calls = _setup_switch(hass, True)
await common.async_set_temperature(hass, 30)
await common.async_set_hvac_mode(hass, HVAC_MODE_OFF)
assert 1 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_OFF == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_no_state_change_when_operation_mode_off_2(hass, setup_comp_3):
"""Test that the switch doesn't turn on when enabled is False."""
calls = _setup_switch(hass, False)
await common.async_set_temperature(hass, 30)
await common.async_set_hvac_mode(hass, HVAC_MODE_OFF)
_setup_sensor(hass, 35)
await hass.async_block_till_done()
assert 0 == len(calls)
@pytest.fixture
def setup_comp_4(hass):
"""Initialize components."""
hass.config.temperature_unit = TEMP_CELSIUS
assert hass.loop.run_until_complete(
async_setup_component(
hass,
DOMAIN,
{
"climate": {
"platform": "generic_thermostat",
"name": "test",
"cold_tolerance": 0.3,
"hot_tolerance": 0.3,
"heater": ENT_SWITCH,
"target_sensor": ENT_SENSOR,
"ac_mode": True,
"min_cycle_duration": datetime.timedelta(minutes=10),
"initial_hvac_mode": HVAC_MODE_COOL,
}
},
)
)
async def test_temp_change_ac_trigger_on_not_long_enough(hass, setup_comp_4):
"""Test if temperature change turn ac on."""
calls = _setup_switch(hass, False)
await common.async_set_temperature(hass, 25)
_setup_sensor(hass, 30)
await hass.async_block_till_done()
assert 0 == len(calls)
async def test_temp_change_ac_trigger_on_long_enough(hass, setup_comp_4):
"""Test if temperature change turn ac on."""
fake_changed = datetime.datetime(
1918, 11, 11, 11, 11, 11, tzinfo=datetime.timezone.utc
)
with mock.patch(
"homeassistant.helpers.condition.dt_util.utcnow", return_value=fake_changed
):
calls = _setup_switch(hass, False)
await common.async_set_temperature(hass, 25)
_setup_sensor(hass, 30)
await hass.async_block_till_done()
assert 1 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_ON == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_temp_change_ac_trigger_off_not_long_enough(hass, setup_comp_4):
"""Test if temperature change turn ac on."""
calls = _setup_switch(hass, True)
await common.async_set_temperature(hass, 30)
_setup_sensor(hass, 25)
await hass.async_block_till_done()
assert 0 == len(calls)
async def test_temp_change_ac_trigger_off_long_enough(hass, setup_comp_4):
"""Test if temperature change turn ac on."""
fake_changed = datetime.datetime(
1918, 11, 11, 11, 11, 11, tzinfo=datetime.timezone.utc
)
with mock.patch(
"homeassistant.helpers.condition.dt_util.utcnow", return_value=fake_changed
):
calls = _setup_switch(hass, True)
await common.async_set_temperature(hass, 30)
_setup_sensor(hass, 25)
await hass.async_block_till_done()
assert 1 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_OFF == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_mode_change_ac_trigger_off_not_long_enough(hass, setup_comp_4):
"""Test if mode change turns ac off despite minimum cycle."""
calls = _setup_switch(hass, True)
await common.async_set_temperature(hass, 30)
_setup_sensor(hass, 25)
await hass.async_block_till_done()
assert 0 == len(calls)
await common.async_set_hvac_mode(hass, HVAC_MODE_OFF)
assert 1 == len(calls)
call = calls[0]
assert "homeassistant" == call.domain
assert SERVICE_TURN_OFF == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_mode_change_ac_trigger_on_not_long_enough(hass, setup_comp_4):
"""Test if mode change turns ac on despite minimum cycle."""
calls = _setup_switch(hass, False)
await common.async_set_temperature(hass, 25)
_setup_sensor(hass, 30)
await hass.async_block_till_done()
assert 0 == len(calls)
await common.async_set_hvac_mode(hass, HVAC_MODE_HEAT)
assert 1 == len(calls)
call = calls[0]
assert "homeassistant" == call.domain
assert SERVICE_TURN_ON == call.service
assert ENT_SWITCH == call.data["entity_id"]
@pytest.fixture
def setup_comp_5(hass):
"""Initialize components."""
hass.config.temperature_unit = TEMP_CELSIUS
assert hass.loop.run_until_complete(
async_setup_component(
hass,
DOMAIN,
{
"climate": {
"platform": "generic_thermostat",
"name": "test",
"cold_tolerance": 0.3,
"hot_tolerance": 0.3,
"heater": ENT_SWITCH,
"target_sensor": ENT_SENSOR,
"ac_mode": True,
"min_cycle_duration": datetime.timedelta(minutes=10),
"initial_hvac_mode": HVAC_MODE_COOL,
}
},
)
)
async def test_temp_change_ac_trigger_on_not_long_enough_2(hass, setup_comp_5):
"""Test if temperature change turn ac on."""
calls = _setup_switch(hass, False)
await common.async_set_temperature(hass, 25)
_setup_sensor(hass, 30)
await hass.async_block_till_done()
assert 0 == len(calls)
async def test_temp_change_ac_trigger_on_long_enough_2(hass, setup_comp_5):
"""Test if temperature change turn ac on."""
fake_changed = datetime.datetime(
1918, 11, 11, 11, 11, 11, tzinfo=datetime.timezone.utc
)
with mock.patch(
"homeassistant.helpers.condition.dt_util.utcnow", return_value=fake_changed
):
calls = _setup_switch(hass, False)
await common.async_set_temperature(hass, 25)
_setup_sensor(hass, 30)
await hass.async_block_till_done()
assert 1 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_ON == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_temp_change_ac_trigger_off_not_long_enough_2(hass, setup_comp_5):
"""Test if temperature change turn ac on."""
calls = _setup_switch(hass, True)
await common.async_set_temperature(hass, 30)
_setup_sensor(hass, 25)
await hass.async_block_till_done()
assert 0 == len(calls)
async def test_temp_change_ac_trigger_off_long_enough_2(hass, setup_comp_5):
"""Test if temperature change turn ac on."""
fake_changed = datetime.datetime(
1918, 11, 11, 11, 11, 11, tzinfo=datetime.timezone.utc
)
with mock.patch(
"homeassistant.helpers.condition.dt_util.utcnow", return_value=fake_changed
):
calls = _setup_switch(hass, True)
await common.async_set_temperature(hass, 30)
_setup_sensor(hass, 25)
await hass.async_block_till_done()
assert 1 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_OFF == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_mode_change_ac_trigger_off_not_long_enough_2(hass, setup_comp_5):
"""Test if mode change turns ac off despite minimum cycle."""
calls = _setup_switch(hass, True)
await common.async_set_temperature(hass, 30)
_setup_sensor(hass, 25)
await hass.async_block_till_done()
assert 0 == len(calls)
await common.async_set_hvac_mode(hass, HVAC_MODE_OFF)
assert 1 == len(calls)
call = calls[0]
assert "homeassistant" == call.domain
assert SERVICE_TURN_OFF == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_mode_change_ac_trigger_on_not_long_enough_2(hass, setup_comp_5):
"""Test if mode change turns ac on despite minimum cycle."""
calls = _setup_switch(hass, False)
await common.async_set_temperature(hass, 25)
_setup_sensor(hass, 30)
await hass.async_block_till_done()
assert 0 == len(calls)
await common.async_set_hvac_mode(hass, HVAC_MODE_HEAT)
assert 1 == len(calls)
call = calls[0]
assert "homeassistant" == call.domain
assert SERVICE_TURN_ON == call.service
assert ENT_SWITCH == call.data["entity_id"]
@pytest.fixture
def setup_comp_6(hass):
"""Initialize components."""
hass.config.temperature_unit = TEMP_CELSIUS
assert hass.loop.run_until_complete(
async_setup_component(
hass,
DOMAIN,
{
"climate": {
"platform": "generic_thermostat",
"name": "test",
"cold_tolerance": 0.3,
"hot_tolerance": 0.3,
"heater": ENT_SWITCH,
"target_sensor": ENT_SENSOR,
"min_cycle_duration": datetime.timedelta(minutes=10),
"initial_hvac_mode": HVAC_MODE_HEAT,
}
},
)
)
async def test_temp_change_heater_trigger_off_not_long_enough(hass, setup_comp_6):
"""Test if temp change doesn't turn heater off because of time."""
calls = _setup_switch(hass, True)
await common.async_set_temperature(hass, 25)
_setup_sensor(hass, 30)
await hass.async_block_till_done()
assert 0 == len(calls)
async def test_temp_change_heater_trigger_on_not_long_enough(hass, setup_comp_6):
"""Test if temp change doesn't turn heater on because of time."""
calls = _setup_switch(hass, False)
await common.async_set_temperature(hass, 30)
_setup_sensor(hass, 25)
await hass.async_block_till_done()
assert 0 == len(calls)
async def test_temp_change_heater_trigger_on_long_enough(hass, setup_comp_6):
"""Test if temperature change turn heater on after min cycle."""
fake_changed = datetime.datetime(
1918, 11, 11, 11, 11, 11, tzinfo=datetime.timezone.utc
)
with mock.patch(
"homeassistant.helpers.condition.dt_util.utcnow", return_value=fake_changed
):
calls = _setup_switch(hass, False)
await common.async_set_temperature(hass, 30)
_setup_sensor(hass, 25)
await hass.async_block_till_done()
assert 1 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_ON == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_temp_change_heater_trigger_off_long_enough(hass, setup_comp_6):
"""Test if temperature change turn heater off after min cycle."""
fake_changed = datetime.datetime(
1918, 11, 11, 11, 11, 11, tzinfo=datetime.timezone.utc
)
with mock.patch(
"homeassistant.helpers.condition.dt_util.utcnow", return_value=fake_changed
):
calls = _setup_switch(hass, True)
await common.async_set_temperature(hass, 25)
_setup_sensor(hass, 30)
await hass.async_block_till_done()
assert 1 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_OFF == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_mode_change_heater_trigger_off_not_long_enough(hass, setup_comp_6):
"""Test if mode change turns heater off despite minimum cycle."""
calls = _setup_switch(hass, True)
await common.async_set_temperature(hass, 25)
_setup_sensor(hass, 30)
await hass.async_block_till_done()
assert 0 == len(calls)
await common.async_set_hvac_mode(hass, HVAC_MODE_OFF)
assert 1 == len(calls)
call = calls[0]
assert "homeassistant" == call.domain
assert SERVICE_TURN_OFF == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_mode_change_heater_trigger_on_not_long_enough(hass, setup_comp_6):
"""Test if mode change turns heater on despite minimum cycle."""
calls = _setup_switch(hass, False)
await common.async_set_temperature(hass, 30)
_setup_sensor(hass, 25)
await hass.async_block_till_done()
assert 0 == len(calls)
await common.async_set_hvac_mode(hass, HVAC_MODE_HEAT)
assert 1 == len(calls)
call = calls[0]
assert "homeassistant" == call.domain
assert SERVICE_TURN_ON == call.service
assert ENT_SWITCH == call.data["entity_id"]
@pytest.fixture
def setup_comp_7(hass):
"""Initialize components."""
hass.config.temperature_unit = TEMP_CELSIUS
assert hass.loop.run_until_complete(
async_setup_component(
hass,
DOMAIN,
{
"climate": {
"platform": "generic_thermostat",
"name": "test",
"cold_tolerance": 0.3,
"hot_tolerance": 0.3,
"heater": ENT_SWITCH,
"target_temp": 25,
"target_sensor": ENT_SENSOR,
"ac_mode": True,
"min_cycle_duration": datetime.timedelta(minutes=15),
"keep_alive": datetime.timedelta(minutes=10),
"initial_hvac_mode": HVAC_MODE_COOL,
}
},
)
)
async def test_temp_change_ac_trigger_on_long_enough_3(hass, setup_comp_7):
"""Test if turn on signal is sent at keep-alive intervals."""
calls = _setup_switch(hass, True)
await hass.async_block_till_done()
_setup_sensor(hass, 30)
await hass.async_block_till_done()
await common.async_set_temperature(hass, 25)
test_time = datetime.datetime.now(pytz.UTC)
_send_time_changed(hass, test_time)
await hass.async_block_till_done()
assert 0 == len(calls)
_send_time_changed(hass, test_time + datetime.timedelta(minutes=5))
await hass.async_block_till_done()
assert 0 == len(calls)
_send_time_changed(hass, test_time + datetime.timedelta(minutes=10))
await hass.async_block_till_done()
assert 1 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_ON == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_temp_change_ac_trigger_off_long_enough_3(hass, setup_comp_7):
"""Test if turn on signal is sent at keep-alive intervals."""
calls = _setup_switch(hass, False)
await hass.async_block_till_done()
_setup_sensor(hass, 20)
await hass.async_block_till_done()
await common.async_set_temperature(hass, 25)
test_time = datetime.datetime.now(pytz.UTC)
_send_time_changed(hass, test_time)
await hass.async_block_till_done()
assert 0 == len(calls)
_send_time_changed(hass, test_time + datetime.timedelta(minutes=5))
await hass.async_block_till_done()
assert 0 == len(calls)
_send_time_changed(hass, test_time + datetime.timedelta(minutes=10))
await hass.async_block_till_done()
assert 1 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_OFF == call.service
assert ENT_SWITCH == call.data["entity_id"]
def _send_time_changed(hass, now):
"""Send a time changed event."""
hass.bus.async_fire(ha.EVENT_TIME_CHANGED, {ha.ATTR_NOW: now})
@pytest.fixture
def setup_comp_8(hass):
"""Initialize components."""
hass.config.temperature_unit = TEMP_CELSIUS
assert hass.loop.run_until_complete(
async_setup_component(
hass,
DOMAIN,
{
"climate": {
"platform": "generic_thermostat",
"name": "test",
"cold_tolerance": 0.3,
"hot_tolerance": 0.3,
"target_temp": 25,
"heater": ENT_SWITCH,
"target_sensor": ENT_SENSOR,
"min_cycle_duration": datetime.timedelta(minutes=15),
"keep_alive": datetime.timedelta(minutes=10),
"initial_hvac_mode": HVAC_MODE_HEAT,
}
},
)
)
async def test_temp_change_heater_trigger_on_long_enough_2(hass, setup_comp_8):
"""Test if turn on signal is sent at keep-alive intervals."""
calls = _setup_switch(hass, True)
await hass.async_block_till_done()
_setup_sensor(hass, 20)
await hass.async_block_till_done()
await common.async_set_temperature(hass, 25)
test_time = datetime.datetime.now(pytz.UTC)
_send_time_changed(hass, test_time)
await hass.async_block_till_done()
assert 0 == len(calls)
_send_time_changed(hass, test_time + datetime.timedelta(minutes=5))
await hass.async_block_till_done()
assert 0 == len(calls)
_send_time_changed(hass, test_time + datetime.timedelta(minutes=10))
await hass.async_block_till_done()
assert 1 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_ON == call.service
assert ENT_SWITCH == call.data["entity_id"]
async def test_temp_change_heater_trigger_off_long_enough_2(hass, setup_comp_8):
"""Test if turn on signal is sent at keep-alive intervals."""
calls = _setup_switch(hass, False)
await hass.async_block_till_done()
_setup_sensor(hass, 30)
await hass.async_block_till_done()
await common.async_set_temperature(hass, 25)
test_time = datetime.datetime.now(pytz.UTC)
_send_time_changed(hass, test_time)
await hass.async_block_till_done()
assert 0 == len(calls)
_send_time_changed(hass, test_time + datetime.timedelta(minutes=5))
await hass.async_block_till_done()
assert 0 == len(calls)
_send_time_changed(hass, test_time + datetime.timedelta(minutes=10))
await hass.async_block_till_done()
assert 1 == len(calls)
call = calls[0]
assert HASS_DOMAIN == call.domain
assert SERVICE_TURN_OFF == call.service
assert ENT_SWITCH == call.data["entity_id"]
@pytest.fixture
def setup_comp_9(hass):
"""Initialize components."""
hass.config.temperature_unit = TEMP_FAHRENHEIT
assert hass.loop.run_until_complete(
async_setup_component(
hass,
DOMAIN,
{
"climate": {
"platform": "generic_thermostat",
"name": "test",
"cold_tolerance": 0.3,
"hot_tolerance": 0.3,
"target_temp": 25,
"heater": ENT_SWITCH,
"target_sensor": ENT_SENSOR,
"min_cycle_duration": datetime.timedelta(minutes=15),
"keep_alive": datetime.timedelta(minutes=10),
"precision": 0.1,
}
},
)
)
async def test_precision(hass, setup_comp_9):
"""Test that setting precision to tenths works as intended."""
await common.async_set_temperature(hass, 23.27)
state = hass.states.get(ENTITY)
assert 23.3 == state.attributes.get("temperature")
async def test_custom_setup_params(hass):
"""Test the setup with custom parameters."""
result = await async_setup_component(
hass,
DOMAIN,
{
"climate": {
"platform": "generic_thermostat",
"name": "test",
"heater": ENT_SWITCH,
"target_sensor": ENT_SENSOR,
"min_temp": MIN_TEMP,
"max_temp": MAX_TEMP,
"target_temp": TARGET_TEMP,
}
},
)
assert result
state = hass.states.get(ENTITY)
assert state.attributes.get("min_temp") == MIN_TEMP
assert state.attributes.get("max_temp") == MAX_TEMP
assert state.attributes.get("temperature") == TARGET_TEMP
async def test_restore_state(hass):
"""Ensure states are restored on startup."""
mock_restore_cache(
hass,
(
State(
"climate.test_thermostat",
HVAC_MODE_OFF,
{ATTR_TEMPERATURE: "20", ATTR_PRESET_MODE: PRESET_AWAY},
),
),
)
hass.state = CoreState.starting
await async_setup_component(
hass,
DOMAIN,
{
"climate": {
"platform": "generic_thermostat",
"name": "test_thermostat",
"heater": ENT_SWITCH,
"target_sensor": ENT_SENSOR,
"away_temp": 14,
}
},
)
state = hass.states.get("climate.test_thermostat")
assert state.attributes[ATTR_TEMPERATURE] == 20
assert state.attributes[ATTR_PRESET_MODE] == PRESET_AWAY
assert state.state == HVAC_MODE_OFF
async def test_no_restore_state(hass):
"""Ensure states are restored on startup if they exist.
Allows for graceful reboot.
"""
mock_restore_cache(
hass,
(
State(
"climate.test_thermostat",
HVAC_MODE_OFF,
{ATTR_TEMPERATURE: "20", ATTR_PRESET_MODE: PRESET_AWAY},
),
),
)
hass.state = CoreState.starting
await async_setup_component(
hass,
DOMAIN,
{
"climate": {
"platform": "generic_thermostat",
"name": "test_thermostat",
"heater": ENT_SWITCH,
"target_sensor": ENT_SENSOR,
"target_temp": 22,
}
},
)
state = hass.states.get("climate.test_thermostat")
assert state.attributes[ATTR_TEMPERATURE] == 22
assert state.state == HVAC_MODE_OFF
async def test_restore_state_uncoherence_case(hass):
"""
Test restore from a strange state.
- Turn the generic thermostat off
- Restart HA and restore state from DB
"""
_mock_restore_cache(hass, temperature=20)
calls = _setup_switch(hass, False)
_setup_sensor(hass, 15)
await _setup_climate(hass)
await hass.async_block_till_done()
state = hass.states.get(ENTITY)
assert 20 == state.attributes[ATTR_TEMPERATURE]
assert HVAC_MODE_OFF == state.state
assert 0 == len(calls)
calls = _setup_switch(hass, False)
await hass.async_block_till_done()
state = hass.states.get(ENTITY)
assert HVAC_MODE_OFF == state.state
async def _setup_climate(hass):
assert await async_setup_component(
hass,
DOMAIN,
{
"climate": {
"platform": "generic_thermostat",
"name": "test",
"cold_tolerance": 2,
"hot_tolerance": 4,
"away_temp": 30,
"heater": ENT_SWITCH,
"target_sensor": ENT_SENSOR,
"ac_mode": True,
}
},
)
def _mock_restore_cache(hass, temperature=20, hvac_mode=HVAC_MODE_OFF):
mock_restore_cache(
hass,
(
State(
ENTITY,
hvac_mode,
{ATTR_TEMPERATURE: str(temperature), ATTR_PRESET_MODE: PRESET_AWAY},
),
),
)
| 32.90264
| 84
| 0.655274
|
2f66327295bb144eb1167b64225a2c5331daaf87
| 4,348
|
py
|
Python
|
rest_framework_simplejwt/authentication.py
|
atomjuice/django-rest-framework-simplejwt
|
0fd42091ecaf7033f66fd8a9c59951da887051c2
|
[
"MIT"
] | null | null | null |
rest_framework_simplejwt/authentication.py
|
atomjuice/django-rest-framework-simplejwt
|
0fd42091ecaf7033f66fd8a9c59951da887051c2
|
[
"MIT"
] | null | null | null |
rest_framework_simplejwt/authentication.py
|
atomjuice/django-rest-framework-simplejwt
|
0fd42091ecaf7033f66fd8a9c59951da887051c2
|
[
"MIT"
] | null | null | null |
from django.contrib.auth import get_user_model
from django.utils.translation import gettext_lazy as _
from rest_framework import HTTP_HEADER_ENCODING, authentication
from .exceptions import AuthenticationFailed, InvalidToken, TokenError
from .models import TokenUser
from .settings import api_settings
AUTH_HEADER_TYPES = api_settings.AUTH_HEADER_TYPES
if not isinstance(api_settings.AUTH_HEADER_TYPES, (list, tuple)):
AUTH_HEADER_TYPES = (AUTH_HEADER_TYPES,)
AUTH_HEADER_TYPE_BYTES = set(
h.encode(HTTP_HEADER_ENCODING)
for h in AUTH_HEADER_TYPES
)
class JWTAuthentication(authentication.BaseAuthentication):
"""
An authentication plugin that authenticates requests through a JSON web
token provided in a request header.
"""
www_authenticate_realm = 'api'
def __init__(self):
self.user_model = get_user_model()
def authenticate(self, request):
header = self.get_header(request)
if header is None:
return None
raw_token = self.get_raw_token(header)
if raw_token is None:
return None
validated_token = self.get_validated_token(raw_token)
return self.get_user(validated_token), validated_token
def authenticate_header(self, request):
return '{0} realm="{1}"'.format(
AUTH_HEADER_TYPES[0],
self.www_authenticate_realm,
)
def get_header(self, request):
"""
Extracts the header containing the JSON web token from the given
request.
"""
header = request.META.get('HTTP_AUTHORIZATION')
if isinstance(header, str):
# Work around django test client oddness
header = header.encode(HTTP_HEADER_ENCODING)
return header
def get_raw_token(self, header):
"""
Extracts an unvalidated JSON web token from the given "Authorization"
header value.
"""
parts = header.split()
if len(parts) == 0:
# Empty AUTHORIZATION header sent
return None
if parts[0] not in AUTH_HEADER_TYPE_BYTES:
# Assume the header does not contain a JSON web token
return None
if len(parts) != 2:
raise AuthenticationFailed(
_('Authorization header must contain two space-delimited values'),
code='bad_authorization_header',
)
return parts[1]
def get_validated_token(self, raw_token):
"""
Validates an encoded JSON web token and returns a validated token
wrapper object.
"""
messages = []
for AuthToken in api_settings.AUTH_TOKEN_CLASSES:
try:
return AuthToken(raw_token)
except TokenError as e:
messages.append({'token_class': AuthToken.__name__,
'token_type': AuthToken.token_type,
'message': e.args[0]})
raise InvalidToken({
'detail': _('Given token not valid for any token type'),
'messages': messages,
})
def get_user(self, validated_token):
"""
Attempts to find and return a user using the given validated token.
"""
try:
user_id = validated_token[api_settings.USER_ID_CLAIM]
except KeyError:
raise InvalidToken(_('Token contained no recognizable user identification'))
try:
user = self.user_model.objects.get(**{api_settings.USER_ID_FIELD: user_id})
except self.user_model.DoesNotExist:
raise AuthenticationFailed(_('User not found'), code='user_not_found')
if not user.is_active:
raise AuthenticationFailed(_('User is inactive'), code='user_inactive')
return user
class JWTTokenUserAuthentication(JWTAuthentication):
def get_user(self, validated_token):
"""
Returns a stateless user object which is backed by the given validated
token.
"""
if api_settings.USER_ID_CLAIM not in validated_token:
# The TokenUser class assumes tokens will have a recognizable user
# identifier claim.
raise InvalidToken(_('Token contained no recognizable user identification'))
return TokenUser(validated_token)
| 31.970588
| 88
| 0.638224
|
f1bc14a2e9998dc3fcabd2b96f78b07b4fb02dd2
| 25,152
|
py
|
Python
|
tensorflow_probability/python/distributions/power_spherical_test.py
|
mayou36/probability
|
f185c852146894af6dc02223020413bf26ecdd5c
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_probability/python/distributions/power_spherical_test.py
|
mayou36/probability
|
f185c852146894af6dc02223020413bf26ecdd5c
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_probability/python/distributions/power_spherical_test.py
|
mayou36/probability
|
f185c852146894af6dc02223020413bf26ecdd5c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for multivariate Power Spherical distribution."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
from scipy import special as sp_special
import tensorflow.compat.v2 as tf
import tensorflow_probability as tfp
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import tensorshape_util
from tensorflow_probability.python.internal import test_util
class _PowerSphericalTest(object):
def testReproducibleGraph(self):
pspherical = tfp.distributions.PowerSpherical(
mean_direction=tf.math.l2_normalize(np.array(
[1., 2.], dtype=self.dtype)),
concentration=self.dtype(1.2))
seed = test_util.test_seed()
s1 = self.evaluate(pspherical.sample(50, seed=seed))
if tf.executing_eagerly():
tf.random.set_seed(seed)
s2 = self.evaluate(pspherical.sample(50, seed=seed))
self.assertAllEqual(s1, s2)
def VerifySampleMean(self, mean_dirs, concentration, batch_shape):
pspherical = tfp.distributions.PowerSpherical(
mean_direction=mean_dirs,
concentration=concentration,
validate_args=True,
allow_nan_stats=False)
self.assertEqual([mean_dirs.shape[-1]],
tensorshape_util.as_list(pspherical.event_shape))
self.assertEqual(
batch_shape,
tensorshape_util.as_list(pspherical.batch_shape))
nsamples = int(5e4)
samples = pspherical.sample(nsamples, seed=test_util.test_seed())
self.assertEqual([nsamples] + batch_shape + [mean_dirs.shape[-1]],
tensorshape_util.as_list(samples.shape))
sample_mean = self.evaluate(samples).mean(axis=0)
sample_dir = (
sample_mean / np.linalg.norm(sample_mean, axis=-1, keepdims=True))
# Assert that positive-concentration distributions have samples with
# the expected mean direction.
inner_product = self.evaluate(
tf.reduce_sum(sample_dir * pspherical.mean_direction, axis=-1))
# Inner products should be roughly ascending by concentration.
self.assertAllClose(np.round(np.sort(inner_product, axis=0), decimals=3),
np.round(inner_product, decimals=3),
atol=.007)
means = self.evaluate(pspherical.mean())
# Mean vector for 0-concentration is precisely (0, 0).
self.assertAllEqual(np.zeros_like(means[0]), means[0])
mean_lengths = np.linalg.norm(means, axis=-1)
# Length of the mean vector is strictly ascending with concentration.
self.assertAllEqual(mean_lengths, np.sort(mean_lengths, axis=0))
self.assertAllClose(np.linalg.norm(sample_mean, axis=-1), mean_lengths,
atol=0.03)
def testSampleMeanDir2d(self):
mean_dirs = tf.math.l2_normalize(
np.array([[1., 1], [-2, 1], [0, -1]], dtype=self.dtype), axis=-1)
concentration = np.array(
[[0], [0.1], [2], [40], [1000]], dtype=self.dtype)
self.VerifySampleMean(mean_dirs, concentration, [5, 3])
def testSampleMeanDir3d(self):
mean_dirs = tf.math.l2_normalize(
np.array([[1., 2, 3], [-2, -3, -1]], dtype=self.dtype), axis=-1)
concentration = np.array(
[[0], [0.1], [2], [40], [1000]], dtype=self.dtype)
self.VerifySampleMean(mean_dirs, concentration, [5, 2])
def testSampleMeanDir5d(self):
mean_dirs = tf.math.l2_normalize(
np.array([[1., 2, 3, -1., 5.]], dtype=self.dtype), axis=-1)
concentration = np.array(
[[0], [0.1], [2], [40], [1000]], dtype=self.dtype)
self.VerifySampleMean(mean_dirs, concentration, [5, 1])
def VerifyPdfWithNumpy(self, pspherical, atol=1e-4):
"""Verifies log_prob evaluations with numpy/scipy.
Both uniform random points and sampled points are evaluated.
Args:
pspherical: A `tfp.distributions.PowerSpherical` instance.
atol: Absolute difference tolerable.
"""
dim = tf.compat.dimension_value(pspherical.event_shape[-1])
nsamples = 10
# Sample some random points uniformly over the hypersphere using numpy.
sample_shape = [nsamples] + tensorshape_util.as_list(
pspherical.batch_shape) + [dim]
uniforms = np.random.randn(*sample_shape)
uniforms /= np.linalg.norm(uniforms, axis=-1, keepdims=True)
uniforms = uniforms.astype(dtype_util.as_numpy_dtype(pspherical.dtype))
# Concatenate in some sampled points from the distribution under test.
pspherical_samples = pspherical.sample(
sample_shape=[nsamples], seed=test_util.test_seed())
samples = tf.concat([uniforms, pspherical_samples], axis=0)
samples = tf.debugging.check_numerics(samples, 'samples')
samples = self.evaluate(samples)
log_prob = pspherical.log_prob(samples)
log_prob = self.evaluate(log_prob)
# Check that the log_prob is not nan or +inf. It can be -inf since
# if we sample a direction diametrically opposite to the mean direction,
# we'll get an inner product of -1.
self.assertFalse(np.any(np.isnan(log_prob)))
self.assertFalse(np.any(np.isposinf(log_prob)))
conc = self.evaluate(pspherical.concentration)
mean_dir = self.evaluate(pspherical.mean_direction)
alpha = (dim - 1.) / 2. + conc
beta = (dim - 1.) / 2.
expected = (
sp_special.xlog1py(conc, np.sum(samples * mean_dir, axis=-1)) -
(alpha + beta) * np.log(2.) - beta * np.log(np.pi) -
sp_special.gammaln(alpha) + sp_special.gammaln(alpha + beta))
self.assertAllClose(expected, log_prob, atol=atol)
def VerifySampleAndPdfConsistency(self, pspherical, rtol=0.075):
"""Verifies samples are consistent with the PDF using importance sampling.
In particular, we verify an estimate the surface area of the n-dimensional
hypersphere, and the surface areas of the spherical caps demarcated by
a handful of survival rates.
Args:
pspherical: A `PowerSpherical` distribution instance.
rtol: Relative difference tolerable.
"""
dim = tf.compat.dimension_value(pspherical.event_shape[-1])
nsamples = int(1e5)
samples = pspherical.sample(
sample_shape=[nsamples], seed=test_util.test_seed())
samples = tf.debugging.check_numerics(samples, 'samples')
log_prob = pspherical.log_prob(samples)
log_prob = self.evaluate(log_prob)
# Check that the log_prob is not nan or +inf. It can be -inf since
# if we sample a direction diametrically opposite to the mean direction,
# we'll get an inner product of -1.
self.assertFalse(np.any(np.isnan(log_prob)))
self.assertFalse(np.any(np.isposinf(log_prob)))
log_importance = -log_prob
sphere_surface_area_estimate, samples, importance = self.evaluate([
tf.reduce_mean(tf.math.exp(log_importance), axis=0), samples,
tf.exp(log_importance)])
true_sphere_surface_area = 2 * (np.pi)**(dim / 2) * self.evaluate(
tf.exp(-tf.math.lgamma(dim / 2)))
# Broadcast to correct size
true_sphere_surface_area += np.zeros_like(sphere_surface_area_estimate)
# Highly concentrated distributions do not get enough coverage to provide
# a reasonable full-sphere surface area estimate. These are covered below
# by CDF-based hypersphere cap surface area estimates.
# Because the PowerSpherical distribution has zero mass at
# -`mean_direction` (and points close to -`mean_direction` due to floating
# point), we only compute this at concentration = 0, which has guaranteed
# mass everywhere.
self.assertAllClose(
true_sphere_surface_area[0],
sphere_surface_area_estimate[0], rtol=rtol)
# Assert surface area of hyperspherical cap For some CDFs in [.05,.45],
# (h must be greater than 0 for the hypersphere cap surface area
# calculation to hold).
for survival_rate in 0.95, .9, .75, .6:
cdf = (1 - survival_rate)
mean_dir = self.evaluate(pspherical.mean_direction)
dotprods = np.sum(samples * mean_dir, -1)
# Empirical estimate of the effective dot-product of the threshold that
# selects for a given CDF level, that is the cosine of the largest
# passable angle, or the minimum cosine for a within-CDF sample.
dotprod_thresh = np.percentile(
dotprods, 100 * survival_rate, axis=0, keepdims=True)
# We mask this sum because it is possible for the log_prob to be -inf when
# the mean_direction is -mean_dir.
importance_masked = np.ma.array(
importance, mask=dotprods <= dotprod_thresh)
sphere_cap_surface_area_ests = (
cdf * (importance_masked).sum(0) /
(dotprods > dotprod_thresh).sum(0))
h = (1 - dotprod_thresh)
self.assertGreaterEqual(h.min(), 0) # h must be >= 0 for the eqn below
true_sphere_cap_surface_area = (
0.5 * true_sphere_surface_area *
self.evaluate(tf.math.betainc((dim - 1) / 2, 0.5, 2 * h - h**2)))
if dim == 3: # For 3-d we have a simpler form we can double-check.
self.assertAllClose(2 * np.pi * h, true_sphere_cap_surface_area)
self.assertAllClose(
true_sphere_cap_surface_area,
sphere_cap_surface_area_ests +
np.zeros_like(true_sphere_cap_surface_area),
rtol=rtol)
def testSampleAndPdfConsistency2d(self):
mean_dir = tf.math.l2_normalize([[1., 2], [-2, -3]], axis=-1)
concentration = [[0], [1e-5], [0.1], [1], [4]]
pspherical = tfp.distributions.PowerSpherical(
mean_direction=mean_dir, concentration=concentration,
validate_args=True, allow_nan_stats=False)
self.VerifySampleAndPdfConsistency(pspherical)
self.VerifyPdfWithNumpy(pspherical)
def testSampleAndPdfConsistency3d(self):
mean_dir = tf.math.l2_normalize([[1., 2, 3], [-2, -3, -1]], axis=-1)
concentration = [[0], [1e-5], [0.1], [1], [4]]
pspherical = tfp.distributions.PowerSpherical(
mean_direction=mean_dir, concentration=concentration,
validate_args=True, allow_nan_stats=False)
self.VerifySampleAndPdfConsistency(pspherical)
self.VerifyPdfWithNumpy(pspherical, atol=.002)
def testSampleAndPdfConsistency4d(self):
mean_dir = tf.math.l2_normalize([[1., 2, 3, 4], [-2, -3, -1, 0]], axis=-1)
concentration = [[0], [1e-4], [0.1], [1], [4]]
pspherical = tfp.distributions.PowerSpherical(
mean_direction=mean_dir, concentration=concentration,
validate_args=True, allow_nan_stats=False)
self.VerifySampleAndPdfConsistency(pspherical)
self.VerifyPdfWithNumpy(pspherical)
def testSampleAndPdfConsistency5d(self):
mean_dir = tf.math.l2_normalize(
[[1., 2, 3, 4, 5], [-2, -3, -1, 0, 1]], axis=-1)
concentration = [[0], [5e-2], [0.1], [1], [4]]
pspherical = tfp.distributions.PowerSpherical(
mean_direction=mean_dir, concentration=concentration,
validate_args=True, allow_nan_stats=False)
self.VerifySampleAndPdfConsistency(pspherical)
self.VerifyPdfWithNumpy(pspherical, atol=2e-4)
def VerifyCovariance(self, dim):
seed_stream = test_util.test_seed_stream()
num_samples = int(5e4)
mean_direction = tf.random.uniform(
shape=[5, dim],
minval=self.dtype(1.),
maxval=self.dtype(2.),
dtype=self.dtype,
seed=seed_stream())
mean_direction = tf.nn.l2_normalize(mean_direction, axis=-1)
concentration = tf.math.log(
tf.random.uniform(
shape=[2, 1],
minval=self.dtype(1.),
maxval=self.dtype(100.),
dtype=self.dtype,
seed=seed_stream()))
ps = tfp.distributions.PowerSpherical(
mean_direction=mean_direction,
concentration=concentration,
validate_args=True,
allow_nan_stats=False)
samples = ps.sample(num_samples, seed=test_util.test_seed())
sample_cov = tfp.stats.covariance(samples, sample_axis=0)
true_cov, sample_cov = self.evaluate([
ps.covariance(), sample_cov])
self.assertAllClose(true_cov, sample_cov, rtol=0.15, atol=1.5e-3)
def testCovarianceDim2(self):
self.VerifyCovariance(dim=2)
def testCovarianceDim5(self):
self.VerifyCovariance(dim=5)
def testCovarianceDim10(self):
self.VerifyCovariance(dim=10)
def VerifyEntropy(self, dim):
seed_stream = test_util.test_seed_stream()
mean_direction = tf.random.uniform(
shape=[5, dim],
minval=self.dtype(1.),
maxval=self.dtype(2.),
dtype=self.dtype,
seed=seed_stream())
mean_direction = tf.nn.l2_normalize(mean_direction, axis=-1)
concentration = tf.math.log(
tf.random.uniform(
shape=[2, 1],
minval=self.dtype(1.),
maxval=self.dtype(100.),
dtype=self.dtype,
seed=seed_stream()))
ps = tfp.distributions.PowerSpherical(
mean_direction=mean_direction,
concentration=concentration,
validate_args=True,
allow_nan_stats=False)
samples = ps.sample(int(3e4), seed=test_util.test_seed())
sample_entropy = -tf.reduce_mean(ps.log_prob(samples), axis=0)
true_entropy, sample_entropy = self.evaluate([
ps.entropy(), sample_entropy])
self.assertAllClose(sample_entropy, true_entropy, rtol=3e-2)
def testEntropyDim2(self):
self.VerifyEntropy(dim=2)
def testEntropyDim3(self):
self.VerifyEntropy(dim=3)
def testEntropyDim5(self):
self.VerifyEntropy(dim=5)
def testEntropyDim10(self):
self.VerifyEntropy(dim=10)
def testAssertsValidImmutableParams(self):
with self.assertRaisesOpError('`concentration` must be non-negative'):
pspherical = tfp.distributions.PowerSpherical(
mean_direction=tf.math.l2_normalize([1., 2, 3], axis=-1),
concentration=-1.,
validate_args=True,
allow_nan_stats=False)
self.evaluate(pspherical.mean())
with self.assertRaisesOpError(
'`mean_direction` must be a vector of at least size 2'):
pspherical = tfp.distributions.PowerSpherical(
mean_direction=[1.],
concentration=0.,
validate_args=True,
allow_nan_stats=False)
self.evaluate(pspherical.mean())
with self.assertRaisesOpError('`mean_direction` must be unit-length'):
pspherical = tfp.distributions.PowerSpherical(
mean_direction=tf.convert_to_tensor([1., 2, 3]),
concentration=1.,
validate_args=True,
allow_nan_stats=False)
self.evaluate(pspherical.mean())
def testAssertsValidMutableParams(self):
mean_direction = tf.Variable(tf.math.l2_normalize([1., 2, 3], axis=-1))
concentration = tf.Variable(1.)
pspherical = tfp.distributions.PowerSpherical(
mean_direction=mean_direction,
concentration=concentration,
validate_args=True,
allow_nan_stats=False)
self.evaluate([mean_direction.initializer, concentration.initializer])
self.evaluate(concentration.assign(-1.))
with self.assertRaisesOpError('`concentration` must be non-negative'):
self.evaluate(pspherical.mean())
self.evaluate((concentration.assign(1.),
mean_direction.assign([1., 2., 3.])))
with self.assertRaisesOpError('`mean_direction` must be unit-length'):
self.evaluate(pspherical.mean())
mean_direction = tf.Variable([1.])
with self.assertRaisesOpError(
'`mean_direction` must be a vector of at least size 2'):
pspherical = tfp.distributions.PowerSpherical(
mean_direction=mean_direction,
concentration=concentration,
validate_args=True,
allow_nan_stats=False)
self.evaluate(mean_direction.initializer)
self.evaluate(pspherical.mean())
def testAssertValidSample(self):
mean_dir = tf.math.l2_normalize([[1., 2, 3], [-2, -3, -1]], axis=-1)
concentration = [[0.], [2.]]
pspherical = tfp.distributions.PowerSpherical(
mean_direction=mean_dir,
concentration=concentration,
validate_args=True,
allow_nan_stats=False)
with self.assertRaisesOpError('Samples must be unit length.'):
self.evaluate(pspherical.prob([0.5, 0.5, 0.5]))
msg = 'must have innermost dimension matching'
static_shape_assertion = self.assertRaisesRegexp(ValueError, msg)
dynamic_shape_assertion = self.assertRaisesOpError(msg)
x = [[1., 0., 0., 0.]]
with static_shape_assertion:
self.evaluate(pspherical.log_prob(x))
x_var = tf.Variable(x, shape=tf.TensorShape(None))
shape_assertion = (static_shape_assertion if tf.executing_eagerly()
else dynamic_shape_assertion)
self.evaluate(x_var.initializer)
with shape_assertion:
self.evaluate(pspherical.log_prob(x_var))
def testSupportBijectorOutsideRange(self):
mean_dir = np.array([[1., 2., 3.], [-2., -3., -1.]]).astype(np.float32)
mean_dir /= np.linalg.norm(mean_dir, axis=-1)[:, np.newaxis]
concentration = [[0], [0.1], [2], [40], [1000]]
dist = tfp.distributions.PowerSpherical(
mean_direction=mean_dir,
concentration=concentration,
validate_args=True)
x = mean_dir
x[0][0] += 0.01
with self.assertRaisesOpError('must sum to `1`'):
self.evaluate(
dist._experimental_default_event_space_bijector().inverse(x[0]))
with self.assertRaisesOpError('must be non-negative'):
self.evaluate(
dist._experimental_default_event_space_bijector().inverse(x[1]))
def VerifyPowerSphericaUniformZeroKL(self, dim):
seed_stream = test_util.test_seed_stream()
mean_direction = tf.random.uniform(
shape=[5, dim],
minval=self.dtype(1.),
maxval=self.dtype(2.),
dtype=self.dtype,
seed=seed_stream())
mean_direction = tf.nn.l2_normalize(mean_direction, axis=-1)
# Zero concentration is the same as a uniform distribution on the sphere.
# Check that the log_probs agree and the KL divergence is zero.
concentration = self.dtype(0.)
ps = tfp.distributions.PowerSpherical(
mean_direction=mean_direction,
concentration=concentration)
su = tfp.distributions.SphericalUniform(dimension=dim, dtype=self.dtype)
x = ps.sample(int(5e4), seed=test_util.test_seed())
ps_lp = ps.log_prob(x)
su_lp = su.log_prob(x)
ps_lp_, su_lp_ = self.evaluate([ps_lp, su_lp])
self.assertAllClose(ps_lp_, su_lp_, rtol=1e-6)
true_kl = tfp.distributions.kl_divergence(ps, su)
true_kl_ = self.evaluate([true_kl])
self.assertAllClose(true_kl_, np.zeros_like(true_kl_), atol=1e-4)
def VerifyPowerSphericaUniformKL(self, dim):
seed_stream = test_util.test_seed_stream()
mean_direction = tf.random.uniform(
shape=[5, dim],
minval=self.dtype(1.),
maxval=self.dtype(2.),
dtype=self.dtype,
seed=seed_stream())
mean_direction = tf.nn.l2_normalize(mean_direction, axis=-1)
concentration = tf.math.log(
tf.random.uniform(
shape=[2, 1],
minval=self.dtype(1.),
maxval=self.dtype(100.),
dtype=self.dtype,
seed=seed_stream()))
ps = tfp.distributions.PowerSpherical(
mean_direction=mean_direction,
concentration=concentration)
su = tfp.distributions.SphericalUniform(dimension=dim, dtype=self.dtype)
x = ps.sample(int(5e4), seed=test_util.test_seed())
kl_sample = tf.reduce_mean(ps.log_prob(x) - su.log_prob(x), axis=0)
true_kl = tfp.distributions.kl_divergence(ps, su)
true_kl_, kl_sample_ = self.evaluate([true_kl, kl_sample])
self.assertAllClose(true_kl_, kl_sample_, atol=0.0, rtol=7e-2)
def testKLPowerSphericalSphericalUniformDim2(self):
self.VerifyPowerSphericaUniformZeroKL(dim=2)
self.VerifyPowerSphericaUniformKL(dim=2)
def testKLPowerSphericalSphericalUniformDim3(self):
self.VerifyPowerSphericaUniformZeroKL(dim=3)
self.VerifyPowerSphericaUniformKL(dim=3)
def testKLPowerSphericalSphericalUniformDim5(self):
self.VerifyPowerSphericaUniformZeroKL(dim=5)
self.VerifyPowerSphericaUniformKL(dim=5)
def testKLPowerSphericalSphericalUniformDim10(self):
self.VerifyPowerSphericaUniformZeroKL(dim=10)
self.VerifyPowerSphericaUniformKL(dim=10)
def VerifyPowerSphericalVonMisesFisherZeroKL(self, dim):
seed_stream = test_util.test_seed_stream()
mean_direction = tf.random.uniform(
shape=[5, dim],
minval=self.dtype(1.),
maxval=self.dtype(2.),
dtype=self.dtype,
seed=seed_stream())
mean_direction = tf.nn.l2_normalize(mean_direction, axis=-1)
# Zero concentration is the same as a uniform distribution on the sphere.
# Check that the KL divergence is zero.
concentration = self.dtype(0.)
ps = tfp.distributions.PowerSpherical(
mean_direction=mean_direction,
concentration=concentration)
vmf = tfp.distributions.VonMisesFisher(
mean_direction=mean_direction,
concentration=concentration)
true_kl = tfp.distributions.kl_divergence(ps, vmf)
true_kl_ = self.evaluate(true_kl)
self.assertAllClose(true_kl_, np.zeros_like(true_kl_), atol=1e-4)
def testInvalidPowerSphericalvMFKl(self):
seed_stream = test_util.test_seed_stream()
mean_direction1 = tf.random.uniform(
shape=[5, 3],
minval=self.dtype(1.),
maxval=self.dtype(2.),
dtype=self.dtype,
seed=seed_stream())
mean_direction1 = tf.nn.l2_normalize(mean_direction1, axis=-1)
mean_direction2 = tf.random.uniform(
shape=[5, 4],
minval=self.dtype(1.),
maxval=self.dtype(2.),
dtype=self.dtype,
seed=seed_stream())
mean_direction2 = tf.nn.l2_normalize(mean_direction2, axis=-1)
concentration = self.dtype(0.)
ps = tfp.distributions.PowerSpherical(
mean_direction=mean_direction1,
concentration=concentration)
vmf = tfp.distributions.VonMisesFisher(
mean_direction=mean_direction2,
concentration=concentration)
with self.assertRaisesRegexp(ValueError, 'Can not compute the KL'):
tfp.distributions.kl_divergence(ps, vmf)
def VerifyPowerSphericalVonMisesFisherKL(self, dim):
seed_stream = test_util.test_seed_stream()
mean_direction1 = tf.random.uniform(
shape=[5, dim],
minval=self.dtype(1.),
maxval=self.dtype(2.),
dtype=self.dtype,
seed=seed_stream())
mean_direction2 = tf.random.uniform(
shape=[5, dim],
minval=self.dtype(1.),
maxval=self.dtype(2.),
dtype=self.dtype,
seed=seed_stream())
mean_direction1 = tf.nn.l2_normalize(mean_direction1, axis=-1)
mean_direction2 = tf.nn.l2_normalize(mean_direction2, axis=-1)
concentration1 = tf.math.log(
tf.random.uniform(
shape=[2, 1],
minval=self.dtype(1.),
maxval=self.dtype(100.),
dtype=self.dtype,
seed=seed_stream()))
concentration2 = tf.math.log(
tf.random.uniform(
shape=[2, 1],
minval=self.dtype(1.),
maxval=self.dtype(100.),
dtype=self.dtype,
seed=seed_stream()))
ps = tfp.distributions.PowerSpherical(
mean_direction=mean_direction1,
concentration=concentration1)
vmf = tfp.distributions.VonMisesFisher(
mean_direction=mean_direction2,
concentration=concentration2)
x = ps.sample(int(6e4), seed=test_util.test_seed())
kl_sample = tf.reduce_mean(ps.log_prob(x) - vmf.log_prob(x), axis=0)
true_kl = tfp.distributions.kl_divergence(ps, vmf)
true_kl_, kl_sample_ = self.evaluate([true_kl, kl_sample])
self.assertAllClose(true_kl_, kl_sample_, atol=0.0, rtol=7e-2)
def testKLPowerSphericalVonMisesFisherDim2(self):
self.VerifyPowerSphericalVonMisesFisherZeroKL(dim=2)
self.VerifyPowerSphericalVonMisesFisherKL(dim=2)
def testKLPowerSphericalVonMisesFisherDim3(self):
self.VerifyPowerSphericalVonMisesFisherZeroKL(dim=3)
self.VerifyPowerSphericalVonMisesFisherKL(dim=3)
@test_util.test_all_tf_execution_regimes
class PowerSphericalTestFloat32(
test_util.VectorDistributionTestHelpers,
test_util.TestCase,
_PowerSphericalTest):
dtype = np.float32
@test_util.test_all_tf_execution_regimes
class PowerSphericalTestFloat64(
test_util.VectorDistributionTestHelpers,
test_util.TestCase,
_PowerSphericalTest):
dtype = np.float64
if __name__ == '__main__':
tf.test.main()
| 39.797468
| 80
| 0.684121
|
f86dd4e3989feb3eeaf3475aef1a03d7881ad83d
| 1,421
|
py
|
Python
|
src/interview-cake/product-of-every-integer-but/test_product_of_every_integer_but.py
|
nwthomas/code-challenges
|
49c2532ff597495474e67b13f2ed9b9ad93d40b5
|
[
"MIT"
] | 1
|
2020-12-11T05:54:59.000Z
|
2020-12-11T05:54:59.000Z
|
src/interview-cake/product-of-every-integer-but/test_product_of_every_integer_but.py
|
nwthomas/code-challenges
|
49c2532ff597495474e67b13f2ed9b9ad93d40b5
|
[
"MIT"
] | 1
|
2021-04-10T06:53:30.000Z
|
2021-04-10T06:53:30.000Z
|
src/interview-cake/product-of-every-integer-but/test_product_of_every_integer_but.py
|
nwthomas/code-challenges
|
49c2532ff597495474e67b13f2ed9b9ad93d40b5
|
[
"MIT"
] | 7
|
2019-11-24T12:10:35.000Z
|
2020-12-14T22:36:31.000Z
|
from product_of_every_integer_but import get_products_of_all_ints_except_at_index
import unittest
class TestGetProductsOfAllIntsExceptAtIndex(unittest.TestCase):
def test_raises_typeerror_if_argument_is_not_list(self):
"""Raises a new TypeError if the argument is not of type list"""
def result(): get_products_of_all_ints_except_at_index("test")
self.assertRaises(TypeError, result)
def test_raises_exception_for_empty_list(self):
"""Raises an exception if the argument is []"""
def result(): get_products_of_all_ints_except_at_index([])
self.assertRaises(Exception, result)
def test_raises_exception_for_list_of_length_one(self):
"""Raises a new error if the length of the list is 1"""
def result(): get_products_of_all_ints_except_at_index([1])
self.assertRaises(Exception, result)
def test_returns_result_for_list_of_len_two(self):
"""Returns the correct result for a list of length 2"""
self.assertEqual(
get_products_of_all_ints_except_at_index([3, 10]), [10, 3])
def test_returns_result_for_list_of_ints(self):
"""Returns the correct result for a long list of integers"""
int_list = [1, 4, 7, 2, 3]
result = get_products_of_all_ints_except_at_index(int_list)
self.assertEqual(result, [168, 42, 24, 84, 56])
if __name__ == "__main__":
unittest.main()
| 40.6
| 81
| 0.719212
|
2ccef0dee4a40eca57a88b632de01c7a973b5a6d
| 145
|
py
|
Python
|
appengine_config.py
|
gtlambert/potato
|
9d0a144ddfaacf8543d98357af9aa052f1433345
|
[
"Apache-2.0"
] | null | null | null |
appengine_config.py
|
gtlambert/potato
|
9d0a144ddfaacf8543d98357af9aa052f1433345
|
[
"Apache-2.0"
] | null | null | null |
appengine_config.py
|
gtlambert/potato
|
9d0a144ddfaacf8543d98357af9aa052f1433345
|
[
"Apache-2.0"
] | null | null | null |
import os
on_appengine = os.environ.get('SERVER_SOFTWARE','').startswith('Development')
if on_appengine and os.name == 'nt':
os.name = None
| 24.166667
| 77
| 0.710345
|
6329da74d147b5daecea61d567f569ea9820abf0
| 848
|
py
|
Python
|
tests/musicstream/test_two_voices.py
|
alexgorji/music_score
|
b4176da52295361f3436826903485c5cb8054c5e
|
[
"MIT"
] | 2
|
2020-06-22T13:33:28.000Z
|
2020-12-30T15:09:00.000Z
|
tests/musicstream/test_two_voices.py
|
alexgorji/music_score
|
b4176da52295361f3436826903485c5cb8054c5e
|
[
"MIT"
] | 37
|
2020-02-18T12:15:00.000Z
|
2021-12-13T20:01:14.000Z
|
tests/musicstream/test_two_voices.py
|
alexgorji/music_score
|
b4176da52295361f3436826903485c5cb8054c5e
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
import os
from musicscore.musicstream.streamvoice import SimpleFormat
from musicscore.musictree.treescoretimewise import TreeScoreTimewise
from tests.score_templates.xml_test_score import TestScore
path = os.path.abspath(__file__).split('.')[0]
class Test(TestCase):
def setUp(self) -> None:
self.score = TreeScoreTimewise()
def test_1(self):
sf = SimpleFormat(midis=[70, 72, 73], quarter_durations=[2, 0.5, 0.5])
voice1 = sf.to_stream_voice(1)
voice1.add_to_score(self.score)
sf = SimpleFormat(midis=[50, 52, 53], quarter_durations=[0.5, 1, 2])
voice2 = sf.to_stream_voice(2)
voice2.add_to_score(self.score)
result_path = path + '_test_1'
self.score.write(result_path)
TestScore().assert_template(result_path=result_path)
| 31.407407
| 78
| 0.701651
|
e4ac63aa536780b3b917cee2b2737def6330bdb1
| 25,994
|
py
|
Python
|
donkeycar/templates/complete.py
|
wallarug/donkeycar
|
93023641f7877520dbd1a59f525fc22b9f2182c5
|
[
"MIT"
] | null | null | null |
donkeycar/templates/complete.py
|
wallarug/donkeycar
|
93023641f7877520dbd1a59f525fc22b9f2182c5
|
[
"MIT"
] | null | null | null |
donkeycar/templates/complete.py
|
wallarug/donkeycar
|
93023641f7877520dbd1a59f525fc22b9f2182c5
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
"""
Scripts to drive a donkey 2 car
Usage:
manage.py (drive) [--model=<model>] [--js] [--type=(linear|categorical)] [--camera=(single|stereo)] [--meta=<key:value> ...] [--myconfig=<filename>]
manage.py (train) [--tubs=tubs] (--model=<model>) [--type=(linear|inferred|tensorrt_linear|tflite_linear)]
Options:
-h --help Show this screen.
--js Use physical joystick.
-f --file=<file> A text file containing paths to tub files, one per line. Option may be used more than once.
--meta=<key:value> Key/Value strings describing describing a piece of meta data about this drive. Option may be used more than once.
--myconfig=filename Specify myconfig file to use.
[default: myconfig.py]
"""
import os
import time
from docopt import docopt
import numpy as np
import donkeycar as dk
from donkeycar.parts.transform import TriggeredCallback, DelayedTrigger
from donkeycar.parts.tub_v2 import TubWriter
from donkeycar.parts.controller import LocalWebController, JoystickController, WebFpv
from donkeycar.parts.throttle_filter import ThrottleFilter
from donkeycar.parts.behavior import BehaviorPart
from donkeycar.parts.file_watcher import FileWatcher
from donkeycar.parts.launch import AiLaunch
from donkeycar.utils import *
def drive(cfg, model_path=None, use_joystick=False, model_type=None, camera_type='single', meta=[]):
'''
Construct a working robotic vehicle from many parts.
Each part runs as a job in the Vehicle loop, calling either
it's run or run_threaded method depending on the constructor flag `threaded`.
All parts are updated one after another at the framerate given in
cfg.DRIVE_LOOP_HZ assuming each part finishes processing in a timely manner.
Parts may have named outputs and inputs. The framework handles passing named outputs
to parts requesting the same named input.
'''
if cfg.DONKEY_GYM:
#the simulator will use cuda and then we usually run out of resources
#if we also try to use cuda. so disable for donkey_gym.
os.environ["CUDA_VISIBLE_DEVICES"]="-1"
if model_type is None:
if cfg.TRAIN_LOCALIZER:
model_type = "localizer"
elif cfg.TRAIN_BEHAVIORS:
model_type = "behavior"
else:
model_type = cfg.DEFAULT_MODEL_TYPE
#Initialize car
V = dk.vehicle.Vehicle()
print("cfg.CAMERA_TYPE", cfg.CAMERA_TYPE)
if camera_type == "stereo":
if cfg.CAMERA_TYPE == "WEBCAM":
from donkeycar.parts.camera import Webcam
camA = Webcam(image_w=cfg.IMAGE_W, image_h=cfg.IMAGE_H, image_d=cfg.IMAGE_DEPTH, iCam = 0)
camB = Webcam(image_w=cfg.IMAGE_W, image_h=cfg.IMAGE_H, image_d=cfg.IMAGE_DEPTH, iCam = 1)
elif cfg.CAMERA_TYPE == "CVCAM":
from donkeycar.parts.cv import CvCam
camA = CvCam(image_w=cfg.IMAGE_W, image_h=cfg.IMAGE_H, image_d=cfg.IMAGE_DEPTH, iCam = 0)
camB = CvCam(image_w=cfg.IMAGE_W, image_h=cfg.IMAGE_H, image_d=cfg.IMAGE_DEPTH, iCam = 1)
else:
raise(Exception("Unsupported camera type: %s" % cfg.CAMERA_TYPE))
V.add(camA, outputs=['cam/image_array_a'], threaded=True)
V.add(camB, outputs=['cam/image_array_b'], threaded=True)
from donkeycar.parts.image import StereoPair
V.add(StereoPair(), inputs=['cam/image_array_a', 'cam/image_array_b'],
outputs=['cam/image_array'])
elif cfg.CAMERA_TYPE == "D435":
from donkeycar.parts.realsense435i import RealSense435i
cam = RealSense435i(
enable_rgb=cfg.REALSENSE_D435_RGB,
enable_depth=cfg.REALSENSE_D435_DEPTH,
enable_imu=cfg.REALSENSE_D435_IMU,
device_id=cfg.REALSENSE_D435_ID)
V.add(cam, inputs=[],
outputs=['cam/image_array', 'cam/depth_array',
'imu/acl_x', 'imu/acl_y', 'imu/acl_z',
'imu/gyr_x', 'imu/gyr_y', 'imu/gyr_z'],
threaded=True)
else:
if cfg.DONKEY_GYM:
from donkeycar.parts.dgym import DonkeyGymEnv
inputs = []
threaded = True
if cfg.DONKEY_GYM:
from donkeycar.parts.dgym import DonkeyGymEnv
cam = DonkeyGymEnv(cfg.DONKEY_SIM_PATH, host=cfg.SIM_HOST, env_name=cfg.DONKEY_GYM_ENV_NAME, conf=cfg.GYM_CONF, delay=cfg.SIM_ARTIFICIAL_LATENCY)
threaded = True
inputs = ['angle', 'throttle', 'brake']
elif cfg.CAMERA_TYPE == "PICAM":
from donkeycar.parts.camera import PiCamera
cam = PiCamera(image_w=cfg.IMAGE_W, image_h=cfg.IMAGE_H, image_d=cfg.IMAGE_DEPTH, framerate=cfg.CAMERA_FRAMERATE, vflip=cfg.CAMERA_VFLIP, hflip=cfg.CAMERA_HFLIP)
elif cfg.CAMERA_TYPE == "WEBCAM":
from donkeycar.parts.camera import Webcam
cam = Webcam(image_w=cfg.IMAGE_W, image_h=cfg.IMAGE_H, image_d=cfg.IMAGE_DEPTH)
elif cfg.CAMERA_TYPE == "CVCAM":
from donkeycar.parts.cv import CvCam
cam = CvCam(image_w=cfg.IMAGE_W, image_h=cfg.IMAGE_H, image_d=cfg.IMAGE_DEPTH)
elif cfg.CAMERA_TYPE == "CSIC":
from donkeycar.parts.camera import CSICamera
cam = CSICamera(image_w=cfg.IMAGE_W, image_h=cfg.IMAGE_H, image_d=cfg.IMAGE_DEPTH, framerate=cfg.CAMERA_FRAMERATE, gstreamer_flip=cfg.CSIC_CAM_GSTREAMER_FLIP_PARM)
elif cfg.CAMERA_TYPE == "V4L":
from donkeycar.parts.camera import V4LCamera
cam = V4LCamera(image_w=cfg.IMAGE_W, image_h=cfg.IMAGE_H, image_d=cfg.IMAGE_DEPTH, framerate=cfg.CAMERA_FRAMERATE)
elif cfg.CAMERA_TYPE == "MOCK":
from donkeycar.parts.camera import MockCamera
cam = MockCamera(image_w=cfg.IMAGE_W, image_h=cfg.IMAGE_H, image_d=cfg.IMAGE_DEPTH)
elif cfg.CAMERA_TYPE == "IMAGE_LIST":
from donkeycar.parts.camera import ImageListCamera
cam = ImageListCamera(path_mask=cfg.PATH_MASK)
elif cfg.CAMERA_TYPE == "LEOPARD":
from donkeycar.parts.leopard_imaging import LICamera
cam = LICamera(width=cfg.IMAGE_W, height=cfg.IMAGE_H, fps=cfg.CAMERA_FRAMERATE)
else:
raise(Exception("Unkown camera type: %s" % cfg.CAMERA_TYPE))
V.add(cam, inputs=inputs, outputs=['cam/image_array'], threaded=threaded)
if use_joystick or cfg.USE_JOYSTICK_AS_DEFAULT:
#modify max_throttle closer to 1.0 to have more power
#modify steering_scale lower than 1.0 to have less responsive steering
if cfg.CONTROLLER_TYPE == "MM1":
from donkeycar.parts.robohat import RoboHATController
ctr = RoboHATController(cfg)
elif "custom" == cfg.CONTROLLER_TYPE:
#
# custom controller created with `donkey createjs` command
#
from my_joystick import MyJoystickController
ctr = MyJoystickController(
throttle_dir=cfg.JOYSTICK_THROTTLE_DIR,
throttle_scale=cfg.JOYSTICK_MAX_THROTTLE,
steering_scale=cfg.JOYSTICK_STEERING_SCALE,
auto_record_on_throttle=cfg.AUTO_RECORD_ON_THROTTLE)
ctr.set_deadzone(cfg.JOYSTICK_DEADZONE)
else:
from donkeycar.parts.controller import get_js_controller
ctr = get_js_controller(cfg)
if cfg.USE_NETWORKED_JS:
from donkeycar.parts.controller import JoyStickSub
netwkJs = JoyStickSub(cfg.NETWORK_JS_SERVER_IP)
V.add(netwkJs, threaded=True)
ctr.js = netwkJs
V.add(ctr,
inputs=['cam/image_array'],
outputs=['user/angle', 'user/throttle', 'user/mode', 'recording'],
threaded=True)
else:
#This web controller will create a web server that is capable
#of managing steering, throttle, and modes, and more.
ctr = LocalWebController(port=cfg.WEB_CONTROL_PORT, mode=cfg.WEB_INIT_MODE)
V.add(ctr,
inputs=['cam/image_array', 'tub/num_records'],
outputs=['user/angle', 'user/throttle', 'user/brake', 'user/mode', 'recording'],
threaded=True)
#this throttle filter will allow one tap back for esc reverse
th_filter = ThrottleFilter()
V.add(th_filter, inputs=['user/throttle'], outputs=['user/throttle'])
#See if we should even run the pilot module.
#This is only needed because the part run_condition only accepts boolean
class PilotCondition:
def run(self, mode):
if mode == 'user':
return False
else:
return True
V.add(PilotCondition(), inputs=['user/mode'], outputs=['run_pilot'])
class LedConditionLogic:
def __init__(self, cfg):
self.cfg = cfg
def run(self, mode, recording, recording_alert, behavior_state, model_file_changed, track_loc):
#returns a blink rate. 0 for off. -1 for on. positive for rate.
if track_loc is not None:
led.set_rgb(*self.cfg.LOC_COLORS[track_loc])
return -1
if model_file_changed:
led.set_rgb(self.cfg.MODEL_RELOADED_LED_R, self.cfg.MODEL_RELOADED_LED_G, self.cfg.MODEL_RELOADED_LED_B)
return 0.1
else:
led.set_rgb(self.cfg.LED_R, self.cfg.LED_G, self.cfg.LED_B)
if recording_alert:
led.set_rgb(*recording_alert)
return self.cfg.REC_COUNT_ALERT_BLINK_RATE
else:
led.set_rgb(self.cfg.LED_R, self.cfg.LED_G, self.cfg.LED_B)
if behavior_state is not None and model_type == 'behavior':
r, g, b = self.cfg.BEHAVIOR_LED_COLORS[behavior_state]
led.set_rgb(r, g, b)
return -1 #solid on
if recording:
return -1 #solid on
elif mode == 'user':
return 1
elif mode == 'local_angle':
return 0.5
elif mode == 'local':
return 0.1
return 0
if cfg.HAVE_RGB_LED and not cfg.DONKEY_GYM:
from donkeycar.parts.led_status import RGB_LED
led = RGB_LED(cfg.LED_PIN_R, cfg.LED_PIN_G, cfg.LED_PIN_B, cfg.LED_INVERT)
led.set_rgb(cfg.LED_R, cfg.LED_G, cfg.LED_B)
V.add(LedConditionLogic(cfg), inputs=['user/mode', 'recording', "records/alert", 'behavior/state', 'modelfile/modified', "pilot/loc"],
outputs=['led/blink_rate'])
V.add(led, inputs=['led/blink_rate'])
def get_record_alert_color(num_records):
col = (0, 0, 0)
for count, color in cfg.RECORD_ALERT_COLOR_ARR:
if num_records >= count:
col = color
return col
class RecordTracker:
def __init__(self):
self.last_num_rec_print = 0
self.dur_alert = 0
self.force_alert = 0
def run(self, num_records):
if num_records is None:
return 0
if self.last_num_rec_print != num_records or self.force_alert:
self.last_num_rec_print = num_records
if num_records % 10 == 0:
print("recorded", num_records, "records")
if num_records % cfg.REC_COUNT_ALERT == 0 or self.force_alert:
self.dur_alert = num_records // cfg.REC_COUNT_ALERT * cfg.REC_COUNT_ALERT_CYC
self.force_alert = 0
if self.dur_alert > 0:
self.dur_alert -= 1
if self.dur_alert != 0:
return get_record_alert_color(num_records)
return 0
rec_tracker_part = RecordTracker()
V.add(rec_tracker_part, inputs=["tub/num_records"], outputs=['records/alert'])
if cfg.AUTO_RECORD_ON_THROTTLE and isinstance(ctr, JoystickController):
#then we are not using the circle button. hijack that to force a record count indication
def show_record_acount_status():
rec_tracker_part.last_num_rec_print = 0
rec_tracker_part.force_alert = 1
ctr.set_button_down_trigger('circle', show_record_acount_status)
#Sombrero
if cfg.HAVE_SOMBRERO:
from donkeycar.parts.sombrero import Sombrero
s = Sombrero()
#IMU
if cfg.HAVE_IMU:
from donkeycar.parts.imu import IMU
imu = IMU(sensor=cfg.IMU_SENSOR, dlp_setting=cfg.IMU_DLP_CONFIG)
V.add(imu, outputs=['imu/acl_x', 'imu/acl_y', 'imu/acl_z',
'imu/gyr_x', 'imu/gyr_y', 'imu/gyr_z'], threaded=True)
# Use the FPV preview, which will show the cropped image output, or the full frame.
if cfg.USE_FPV:
V.add(WebFpv(), inputs=['cam/image_array'], threaded=True)
#Behavioral state
if cfg.TRAIN_BEHAVIORS:
bh = BehaviorPart(cfg.BEHAVIOR_LIST)
V.add(bh, outputs=['behavior/state', 'behavior/label', "behavior/one_hot_state_array"])
try:
ctr.set_button_down_trigger('L1', bh.increment_state)
except:
pass
inputs = ['cam/image_array', "behavior/one_hot_state_array"]
#IMU
elif model_type == "imu":
assert(cfg.HAVE_IMU)
#Run the pilot if the mode is not user.
inputs=['cam/image_array',
'imu/acl_x', 'imu/acl_y', 'imu/acl_z',
'imu/gyr_x', 'imu/gyr_y', 'imu/gyr_z']
else:
inputs=['cam/image_array']
def load_model(kl, model_path):
start = time.time()
print('loading model', model_path)
kl.load(model_path)
print('finished loading in %s sec.' % (str(time.time() - start)) )
def load_weights(kl, weights_path):
start = time.time()
try:
print('loading model weights', weights_path)
kl.model.load_weights(weights_path)
print('finished loading in %s sec.' % (str(time.time() - start)) )
except Exception as e:
print(e)
print('ERR>> problems loading weights', weights_path)
def load_model_json(kl, json_fnm):
start = time.time()
print('loading model json', json_fnm)
from tensorflow.python import keras
try:
with open(json_fnm, 'r') as handle:
contents = handle.read()
kl.model = keras.models.model_from_json(contents)
print('finished loading json in %s sec.' % (str(time.time() - start)) )
except Exception as e:
print(e)
print("ERR>> problems loading model json", json_fnm)
if model_path:
#When we have a model, first create an appropriate Keras part
kl = dk.utils.get_model_by_type(model_type, cfg)
model_reload_cb = None
if '.h5' in model_path or '.uff' in model_path or 'tflite' in model_path or '.pkl' in model_path:
#when we have a .h5 extension
#load everything from the model file
load_model(kl, model_path)
def reload_model(filename):
load_model(kl, filename)
model_reload_cb = reload_model
elif '.json' in model_path:
#when we have a .json extension
#load the model from there and look for a matching
#.wts file with just weights
load_model_json(kl, model_path)
weights_path = model_path.replace('.json', '.weights')
load_weights(kl, weights_path)
def reload_weights(filename):
weights_path = filename.replace('.json', '.weights')
load_weights(kl, weights_path)
model_reload_cb = reload_weights
else:
print("ERR>> Unknown extension type on model file!!")
return
#this part will signal visual LED, if connected
V.add(FileWatcher(model_path, verbose=True), outputs=['modelfile/modified'])
#these parts will reload the model file, but only when ai is running so we don't interrupt user driving
V.add(FileWatcher(model_path), outputs=['modelfile/dirty'], run_condition="ai_running")
V.add(DelayedTrigger(100), inputs=['modelfile/dirty'], outputs=['modelfile/reload'], run_condition="ai_running")
V.add(TriggeredCallback(model_path, model_reload_cb), inputs=["modelfile/reload"], run_condition="ai_running")
outputs=['pilot/angle', 'pilot/throttle']
if cfg.TRAIN_LOCALIZER:
outputs.append("pilot/loc")
V.add(kl, inputs=inputs,
outputs=outputs,
run_condition='run_pilot')
if cfg.STOP_SIGN_DETECTOR:
from donkeycar.parts.object_detector.stop_sign_detector import StopSignDetector
V.add(StopSignDetector(cfg.STOP_SIGN_MIN_SCORE, cfg.STOP_SIGN_SHOW_BOUNDING_BOX), inputs=['cam/image_array', 'pilot/throttle'], outputs=['pilot/throttle', 'cam/image_array'])
#Choose what inputs should change the car.
class DriveMode:
def run(self, mode,
user_angle, user_throttle, user_brake,
pilot_angle, pilot_throttle, pilot_brake):
if mode == 'user':
return user_angle, user_throttle, user_brake
elif mode == 'local_angle':
return pilot_angle if pilot_angle else 0.0, user_throttle. user_brake
else:
return pilot_angle if pilot_angle else 0.0, pilot_throttle * cfg.AI_THROTTLE_MULT if pilot_throttle else 0.0, pilot_brake if pilot_brake else 0.0
V.add(DriveMode(),
inputs=['user/mode', 'user/angle', 'user/throttle', 'user/brake',
'pilot/angle', 'pilot/throttle', 'pilot/brake'],
outputs=['angle', 'throttle', 'brake'])
#to give the car a boost when starting ai mode in a race.
aiLauncher = AiLaunch(cfg.AI_LAUNCH_DURATION, cfg.AI_LAUNCH_THROTTLE, cfg.AI_LAUNCH_KEEP_ENABLED)
V.add(aiLauncher,
inputs=['user/mode', 'throttle', 'brake'],
outputs=['throttle', 'brake'])
if isinstance(ctr, JoystickController):
ctr.set_button_down_trigger(cfg.AI_LAUNCH_ENABLE_BUTTON, aiLauncher.enable_ai_launch)
class AiRunCondition:
'''
A bool part to let us know when ai is running.
'''
def run(self, mode):
if mode == "user":
return False
return True
V.add(AiRunCondition(), inputs=['user/mode'], outputs=['ai_running'])
#Ai Recording
class AiRecordingCondition:
'''
return True when ai mode, otherwize respect user mode recording flag
'''
def run(self, mode, recording):
if mode == 'user':
return recording
return True
if cfg.RECORD_DURING_AI:
V.add(AiRecordingCondition(), inputs=['user/mode', 'recording'], outputs=['recording'])
#Drive train setup
if cfg.DONKEY_GYM or cfg.DRIVE_TRAIN_TYPE == "MOCK":
pass
elif cfg.DRIVE_TRAIN_TYPE == "SERVO_ESC":
from donkeycar.parts.actuator import PCA9685, PWMSteering, PWMThrottle
steering_controller = PCA9685(cfg.STEERING_CHANNEL, cfg.PCA9685_I2C_ADDR, busnum=cfg.PCA9685_I2C_BUSNUM)
steering = PWMSteering(controller=steering_controller,
left_pulse=cfg.STEERING_LEFT_PWM,
right_pulse=cfg.STEERING_RIGHT_PWM)
throttle_controller = PCA9685(cfg.THROTTLE_CHANNEL, cfg.PCA9685_I2C_ADDR, busnum=cfg.PCA9685_I2C_BUSNUM)
throttle = PWMThrottle(controller=throttle_controller,
max_pulse=cfg.THROTTLE_FORWARD_PWM,
zero_pulse=cfg.THROTTLE_STOPPED_PWM,
min_pulse=cfg.THROTTLE_REVERSE_PWM)
V.add(steering, inputs=['angle'], threaded=True)
V.add(throttle, inputs=['throttle'], threaded=True)
elif cfg.DRIVE_TRAIN_TYPE == "DC_STEER_THROTTLE":
from donkeycar.parts.actuator import Mini_HBridge_DC_Motor_PWM
steering = Mini_HBridge_DC_Motor_PWM(cfg.HBRIDGE_PIN_LEFT, cfg.HBRIDGE_PIN_RIGHT)
throttle = Mini_HBridge_DC_Motor_PWM(cfg.HBRIDGE_PIN_FWD, cfg.HBRIDGE_PIN_BWD)
V.add(steering, inputs=['angle'])
V.add(throttle, inputs=['throttle'])
elif cfg.DRIVE_TRAIN_TYPE == "DC_TWO_WHEEL":
from donkeycar.parts.actuator import TwoWheelSteeringThrottle, Mini_HBridge_DC_Motor_PWM
left_motor = Mini_HBridge_DC_Motor_PWM(cfg.HBRIDGE_PIN_LEFT_FWD, cfg.HBRIDGE_PIN_LEFT_BWD)
right_motor = Mini_HBridge_DC_Motor_PWM(cfg.HBRIDGE_PIN_RIGHT_FWD, cfg.HBRIDGE_PIN_RIGHT_BWD)
two_wheel_control = TwoWheelSteeringThrottle()
V.add(two_wheel_control,
inputs=['throttle', 'angle'],
outputs=['left_motor_speed', 'right_motor_speed'])
V.add(left_motor, inputs=['left_motor_speed'])
V.add(right_motor, inputs=['right_motor_speed'])
elif cfg.DRIVE_TRAIN_TYPE == "SERVO_HBRIDGE_PWM":
from donkeycar.parts.actuator import ServoBlaster, PWMSteering
steering_controller = ServoBlaster(cfg.STEERING_CHANNEL) #really pin
#PWM pulse values should be in the range of 100 to 200
assert(cfg.STEERING_LEFT_PWM <= 200)
assert(cfg.STEERING_RIGHT_PWM <= 200)
steering = PWMSteering(controller=steering_controller,
left_pulse=cfg.STEERING_LEFT_PWM,
right_pulse=cfg.STEERING_RIGHT_PWM)
from donkeycar.parts.actuator import Mini_HBridge_DC_Motor_PWM
motor = Mini_HBridge_DC_Motor_PWM(cfg.HBRIDGE_PIN_FWD, cfg.HBRIDGE_PIN_BWD)
V.add(steering, inputs=['angle'], threaded=True)
V.add(motor, inputs=["throttle"])
elif cfg.DRIVE_TRAIN_TYPE == "MM1":
from donkeycar.parts.robohat import RoboHATDriver
V.add(RoboHATDriver(cfg), inputs=['angle', 'throttle'])
elif cfg.DRIVE_TRAIN_TYPE == "PIGPIO_PWM":
from donkeycar.parts.actuator import PWMSteering, PWMThrottle, PiGPIO_PWM
steering_controller = PiGPIO_PWM(cfg.STEERING_PWM_PIN, freq=cfg.STEERING_PWM_FREQ, inverted=cfg.STEERING_PWM_INVERTED)
steering = PWMSteering(controller=steering_controller,
left_pulse=cfg.STEERING_LEFT_PWM,
right_pulse=cfg.STEERING_RIGHT_PWM)
throttle_controller = PiGPIO_PWM(cfg.THROTTLE_PWM_PIN, freq=cfg.THROTTLE_PWM_FREQ, inverted=cfg.THROTTLE_PWM_INVERTED)
throttle = PWMThrottle(controller=throttle_controller,
max_pulse=cfg.THROTTLE_FORWARD_PWM,
zero_pulse=cfg.THROTTLE_STOPPED_PWM,
min_pulse=cfg.THROTTLE_REVERSE_PWM)
V.add(steering, inputs=['angle'], threaded=True)
V.add(throttle, inputs=['throttle'], threaded=True)
# OLED setup
if cfg.USE_SSD1306_128_32:
from donkeycar.parts.oled import OLEDPart
auto_record_on_throttle = cfg.USE_JOYSTICK_AS_DEFAULT and cfg.AUTO_RECORD_ON_THROTTLE
oled_part = OLEDPart(cfg.SSD1306_128_32_I2C_BUSNUM, auto_record_on_throttle=auto_record_on_throttle)
V.add(oled_part, inputs=['recording', 'tub/num_records', 'user/mode'], outputs=[], threaded=True)
#add tub to save data
inputs=['cam/image_array',
'user/angle', 'user/throttle',
'user/mode']
types=['image_array',
'float', 'float',
'str']
if cfg.TRAIN_BEHAVIORS:
inputs += ['behavior/state', 'behavior/label', "behavior/one_hot_state_array"]
types += ['int', 'str', 'vector']
if cfg.CAMERA_TYPE == "D435" and cfg.REALSENSE_D435_DEPTH:
inputs += ['cam/depth_array']
types += ['gray16_array']
if cfg.HAVE_IMU or (cfg.CAMERA_TYPE == "D435" and cfg.REALSENSE_D435_IMU):
inputs += ['imu/acl_x', 'imu/acl_y', 'imu/acl_z',
'imu/gyr_x', 'imu/gyr_y', 'imu/gyr_z']
types +=['float', 'float', 'float',
'float', 'float', 'float']
if cfg.RECORD_DURING_AI:
inputs += ['pilot/angle', 'pilot/throttle']
types += ['float', 'float']
# do we want to store new records into own dir or append to existing
tub_path = cfg.DATA_PATH
tub_writer = TubWriter(tub_path, inputs=inputs, types=types, metadata=meta)
V.add(tub_writer, inputs=inputs, outputs=["tub/num_records"], run_condition='recording')
if cfg.PUB_CAMERA_IMAGES:
from donkeycar.parts.network import TCPServeValue
from donkeycar.parts.image import ImgArrToJpg
pub = TCPServeValue("camera")
V.add(ImgArrToJpg(), inputs=['cam/image_array'], outputs=['jpg/bin'])
V.add(pub, inputs=['jpg/bin'])
if type(ctr) is LocalWebController:
if cfg.DONKEY_GYM:
print("You can now go to http://localhost:%d to drive your car." % cfg.WEB_CONTROL_PORT)
else:
print("You can now go to <your hostname.local>:%d to drive your car." % cfg.WEB_CONTROL_PORT)
elif isinstance(ctr, JoystickController):
print("You can now move your joystick to drive your car.")
ctr.set_tub(tub_writer.tub)
ctr.print_controls()
#run the vehicle for 20 seconds
V.start(rate_hz=cfg.DRIVE_LOOP_HZ, max_loop_count=cfg.MAX_LOOPS)
if __name__ == '__main__':
args = docopt(__doc__)
cfg = dk.load_config(myconfig=args['--myconfig'])
if args['drive']:
model_type = args['--type']
camera_type = args['--camera']
drive(cfg, model_path=args['--model'], use_joystick=args['--js'],
model_type=model_type, camera_type=camera_type,
meta=args['--meta'])
elif args['train']:
print('Use python train.py instead.\n')
| 41.790997
| 182
| 0.632107
|
7fc169d36a47a2dd552c70de9f185cf3d82e6d87
| 3,646
|
py
|
Python
|
opentracing_instrumentation/client_hooks/celery.py
|
wyattanderson/opentracing-python-instrumentation
|
5ac66b5c9537b16b422d7504871ebd2f76e46fbb
|
[
"MIT"
] | 176
|
2016-03-18T00:47:07.000Z
|
2022-03-03T09:16:04.000Z
|
opentracing_instrumentation/client_hooks/celery.py
|
wyattanderson/opentracing-python-instrumentation
|
5ac66b5c9537b16b422d7504871ebd2f76e46fbb
|
[
"MIT"
] | 108
|
2016-02-04T15:37:24.000Z
|
2022-02-07T17:42:49.000Z
|
opentracing_instrumentation/client_hooks/celery.py
|
wyattanderson/opentracing-python-instrumentation
|
5ac66b5c9537b16b422d7504871ebd2f76e46fbb
|
[
"MIT"
] | 63
|
2016-01-20T20:17:31.000Z
|
2022-02-23T06:43:00.000Z
|
from __future__ import absolute_import
import opentracing
from opentracing.ext import tags
from ..request_context import get_current_span, span_in_context
from ._patcher import Patcher
try:
from celery.app.task import Task
from celery.signals import (
before_task_publish, task_prerun, task_success, task_failure
)
except ImportError:
pass
else:
_task_apply_async = Task.apply_async
def task_apply_async_wrapper(task, args=None, kwargs=None, **other_kwargs):
operation_name = 'Celery:apply_async:{}'.format(task.name)
span = opentracing.tracer.start_span(operation_name=operation_name,
child_of=get_current_span())
set_common_tags(span, task, tags.SPAN_KIND_RPC_CLIENT)
with span_in_context(span), span:
result = _task_apply_async(task, args, kwargs, **other_kwargs)
span.set_tag('celery.task_id', result.task_id)
return result
def set_common_tags(span, task, span_kind):
span.set_tag(tags.SPAN_KIND, span_kind)
span.set_tag(tags.COMPONENT, 'Celery')
span.set_tag('celery.task_name', task.name)
def before_task_publish_handler(headers, **kwargs):
headers['parent_span_context'] = span_context = {}
opentracing.tracer.inject(span_context=get_current_span().context,
format=opentracing.Format.TEXT_MAP,
carrier=span_context)
def task_prerun_handler(task, task_id, **kwargs):
request = task.request
operation_name = 'Celery:run:{}'.format(task.name)
child_of = None
if request.delivery_info.get('is_eager'):
child_of = get_current_span()
else:
if getattr(request, 'headers', None) is not None:
# Celery 3.x
parent_span_context = request.headers.get('parent_span_context')
else:
# Celery 4.x
parent_span_context = getattr(request, 'parent_span_context', None)
if parent_span_context:
child_of = opentracing.tracer.extract(
opentracing.Format.TEXT_MAP, parent_span_context
)
task.request.span = span = opentracing.tracer.start_span(
operation_name=operation_name,
child_of=child_of,
)
set_common_tags(span, task, tags.SPAN_KIND_RPC_SERVER)
span.set_tag('celery.task_id', task_id)
request.tracing_context = span_in_context(span)
request.tracing_context.__enter__()
def finish_current_span(task, exc_type=None, exc_val=None, exc_tb=None):
task.request.span.finish()
task.request.tracing_context.__exit__(exc_type, exc_val, exc_tb)
def task_success_handler(sender, **kwargs):
finish_current_span(task=sender)
def task_failure_handler(sender, exception, traceback, **kwargs):
finish_current_span(
task=sender,
exc_type=type(exception),
exc_val=exception,
exc_tb=traceback,
)
class CeleryPatcher(Patcher):
applicable = '_task_apply_async' in globals()
def _install_patches(self):
Task.apply_async = task_apply_async_wrapper
before_task_publish.connect(before_task_publish_handler)
task_prerun.connect(task_prerun_handler)
task_success.connect(task_success_handler)
task_failure.connect(task_failure_handler)
def _reset_patches(self):
Task.apply_async = _task_apply_async
before_task_publish.disconnect(before_task_publish_handler)
task_prerun.disconnect(task_prerun_handler)
task_success.disconnect(task_success_handler)
task_failure.disconnect(task_failure_handler)
CeleryPatcher.configure_hook_module(globals())
| 31.982456
| 79
| 0.706528
|
903404144f72889c83f6b4089e79b01c222461c8
| 833
|
py
|
Python
|
linkysets/common/mixins.py
|
hqrrylyu/linkysets
|
1b8c319820bdf116a5cad7efff69178e739cf26b
|
[
"MIT"
] | null | null | null |
linkysets/common/mixins.py
|
hqrrylyu/linkysets
|
1b8c319820bdf116a5cad7efff69178e739cf26b
|
[
"MIT"
] | 5
|
2021-04-08T19:20:07.000Z
|
2021-09-22T19:03:30.000Z
|
linkysets/common/mixins.py
|
hqrrylyu/polemicflow
|
1b8c319820bdf116a5cad7efff69178e739cf26b
|
[
"MIT"
] | null | null | null |
from typing import Any, ClassVar, Dict, Sequence
from django.db.models import Model
from django.http import HttpRequest
class ObjectPermissionMixin:
request: HttpRequest
object: Model
perms: ClassVar[Sequence[str]] = ["view", "add", "change", "delete"]
def get_context_data(self, **kwargs) -> Dict[str, Any]:
context = super().get_context_data(**kwargs) # type: ignore
user = self.request.user
model = type(self.object)
app_label = model._meta.app_label
model_name = model._meta.model_name
object_perms = {}
for perm in self.perms:
perm_codename = f"{app_label}.{perm}_{model_name}"
object_perms[f"has_{perm}_perm"] = user.has_perm(perm_codename, self.object)
context["object_perms"] = object_perms
return context
| 32.038462
| 88
| 0.659064
|
4fe473ecf2be612d95f7d7cf79b40aba5691388c
| 2,433
|
py
|
Python
|
venv/Lib/site-packages/pyrogram/raw/functions/messages/get_scheduled_messages.py
|
D1ne2021/jjhhhjj
|
a090da30983b3ef276dfe4cef2ded4526f36002a
|
[
"MIT"
] | 2
|
2021-12-13T07:09:55.000Z
|
2022-01-12T12:15:20.000Z
|
venv/Lib/site-packages/pyrogram/raw/functions/messages/get_scheduled_messages.py
|
hoangkiet1906/Botcie_ver1
|
c133b915edde06dac690a7dc6ca160f6792fc4c8
|
[
"MIT"
] | null | null | null |
venv/Lib/site-packages/pyrogram/raw/functions/messages/get_scheduled_messages.py
|
hoangkiet1906/Botcie_ver1
|
c133b915edde06dac690a7dc6ca160f6792fc4c8
|
[
"MIT"
] | null | null | null |
# Pyrogram - Telegram MTProto API Client Library for Python
# Copyright (C) 2017-2021 Dan <https://github.com/delivrance>
#
# This file is part of Pyrogram.
#
# Pyrogram is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pyrogram is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Pyrogram. If not, see <http://www.gnu.org/licenses/>.
from io import BytesIO
from pyrogram.raw.core.primitives import Int, Long, Int128, Int256, Bool, Bytes, String, Double, Vector
from pyrogram.raw.core import TLObject
from pyrogram import raw
from typing import List, Union, Any
# # # # # # # # # # # # # # # # # # # # # # # #
# !!! WARNING !!! #
# This is a generated file! #
# All changes made in this file will be lost! #
# # # # # # # # # # # # # # # # # # # # # # # #
class GetScheduledMessages(TLObject): # type: ignore
"""Telegram API method.
Details:
- Layer: ``126``
- ID: ``0xbdbb0464``
Parameters:
peer: :obj:`InputPeer <pyrogram.raw.base.InputPeer>`
id: List of ``int`` ``32-bit``
Returns:
:obj:`messages.Messages <pyrogram.raw.base.messages.Messages>`
"""
__slots__: List[str] = ["peer", "id"]
ID = 0xbdbb0464
QUALNAME = "functions.messages.GetScheduledMessages"
def __init__(self, *, peer: "raw.base.InputPeer", id: List[int]) -> None:
self.peer = peer # InputPeer
self.id = id # Vector<int>
@staticmethod
def read(data: BytesIO, *args: Any) -> "GetScheduledMessages":
# No flags
peer = TLObject.read(data)
id = TLObject.read(data, Int)
return GetScheduledMessages(peer=peer, id=id)
def write(self) -> bytes:
data = BytesIO()
data.write(Int(self.ID, False))
# No flags
data.write(self.peer.write())
data.write(Vector(self.id, Int))
return data.getvalue()
| 31.192308
| 103
| 0.614468
|
e0ca31e4a045d542f5b977549fbd03429dc1ba32
| 1,082
|
py
|
Python
|
error_correction/log.py
|
BoiseState/NAM
|
ba47a02487555ee2d6340900b9b4a1dbe14d88fd
|
[
"MIT"
] | 1
|
2021-04-08T14:41:51.000Z
|
2021-04-08T14:41:51.000Z
|
error_correction/log.py
|
gmortuza/dnam
|
816b5d7e4d1adfd95a56f6494a12856f08a38bee
|
[
"MIT"
] | null | null | null |
error_correction/log.py
|
gmortuza/dnam
|
816b5d7e4d1adfd95a56f6494a12856f08a38bee
|
[
"MIT"
] | 1
|
2022-02-06T22:32:48.000Z
|
2022-02-06T22:32:48.000Z
|
import logging
import sys
def get_logger(verbose=0, logger_name=__name__):
"""
Format the logger to show the debug information
:param: verbose: 0 -> Print only error. This is the default.
1 -> Print debug, information, warning and error
2 -> Print information, warning and error
3 -> Print warning and error
:return: Logger
"""
logger = logging.getLogger(logger_name)
stream_handler = logging.StreamHandler(sys.stdout)
if verbose == 1:
stream_handler.setLevel(logging.DEBUG)
logger.setLevel(logging.DEBUG)
elif verbose == 2:
stream_handler.setLevel(logging.INFO)
logger.setLevel(logging.INFO)
elif verbose == 3:
stream_handler.setLevel(logging.WARNING)
logger.setLevel(logging.WARNING)
else:
stream_handler.setLevel(logging.ERROR)
logger.setLevel(logging.ERROR)
stream_handler.setFormatter(logging.Formatter('%(levelname)s:%(name)s:%(message)s'))
logger.addHandler(stream_handler)
return logger
| 32.787879
| 88
| 0.656192
|
9197e7ff8bf5eaa7d0276f80f2bf77fad7ada844
| 6,136
|
py
|
Python
|
motion_compensation_loss_func/compensation_loss.py
|
Hazeliii/E-GMA-master
|
8ed41079bfaf2834a0df991739699ec80704f1b6
|
[
"MIT"
] | null | null | null |
motion_compensation_loss_func/compensation_loss.py
|
Hazeliii/E-GMA-master
|
8ed41079bfaf2834a0df991739699ec80704f1b6
|
[
"MIT"
] | null | null | null |
motion_compensation_loss_func/compensation_loss.py
|
Hazeliii/E-GMA-master
|
8ed41079bfaf2834a0df991739699ec80704f1b6
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn.functional as F
from motion_compensation_loss_func.warp_and_gen_IWE import *
import numpy as np
'''
基于运动补偿的损失函数,包括
1.Squared average timestamp images objective (Zhu et al, Unsupervised Event-based
Learning of Optical Flow, Depth, and Egomotion, CVPR19)
2.sobel_based_loss
3.Stoffregen et al, Event Cameras, Contrast Maximization and Reward Functions: an Analysis, CVPR19
'''
class motion_compensation_losses:
def __init__(self, events, flow_pre, device):
# print('In class motion_compensation_losses .')
# self.warp_fun = Warp_MVSEC_events(events, flow_pre, device)
self.warped_xs, self.warped_ys, self.ts, self.ps = Warp_MVSEC_events(events, flow_pre, device).warp_events()
self.device = device
def sobel_xy(self, im):
sobel_x = np.array([[-1, 0, 1], [-2, 0, 2], [-1, 0, 1]], dtype='float32')
sobel_x = np.reshape(sobel_x, (1, 3, 3))
sobel_y = np.array([[-1, -2, -1], [0, 0, 0], [1, 2, 1]], dtype='float32')
sobel_y = np.reshape(sobel_y, (1, 3, 3))
sobel = np.concatenate(
[np.repeat(sobel_x, 1, axis=0).reshape((1, 1, 3, 3)),
np.repeat(sobel_y, 1, axis=0).reshape((1, 1, 3, 3))],
axis=0
)
# print('sobel.shape:', sobel.shape) sobel.shape: (2, 1, 3, 3)
conv = nn.Conv2d(1, 2, kernel_size=3, padding=1, bias=False)
conv.weight.data = torch.from_numpy(sobel)
conv.cuda()
edge = conv(Variable(im))
# print('edge.shape:', edge.shape) edge.shape: torch.Size([4, 2, 256, 256])
edge_x = edge[0, 0, :, :].squeeze()
edge_y = edge[0, 1, :, :].squeeze()
# print('edge_x.shape, edge_y.shape:', edge_x.shape, edge_y.shape) torch.Size([256, 256]) torch.Size([256, 256])
return edge_x, edge_y
def zhu_based_loss(self):
'''
Squared average timestamp images objective (Zhu et al, Unsupervised Event-based
Learning of Optical Flow, Depth, and Egomotion, CVPR19)
Loss given by g(x)^2*h(x)^2 where g(x) is image of average timestamps of positive events
and h(x) is image of average timestamps of negative events.
'''
gen_func = GenIwe(self.warped_xs, self.warped_ys, self.ts, self.ps)
img_pos, img_neg = gen_func.events_to_timestamp_image()
loss = torch.sum(img_pos * img_pos) + torch.sum(img_neg * img_neg)
return loss
def sobel_based_loss(self):
# 基于sobel算子的边缘强度函数作为损失函数,并将edge_x和edge_y分别归一化到[0,1]
gen_func = GenIwe(self.warped_xs, self.warped_ys, self.ts, self.ps, using_polatity=False)
events_count_img = gen_func.events_to_count_img()
# 归一化到[0,255]
max1 = torch.tensor(255.0, dtype=torch.float).cuda()
img_max = torch.max(events_count_img)
img_min = torch.min(events_count_img)
norm_events_count_img = (max1 * (events_count_img - img_min) / (img_max - img_min))
events_count_img = torch.unsqueeze(torch.unsqueeze(norm_events_count_img, dim=0), dim=0) # (1, 1, H, W)
edge_x, edge_y = self.sobel_xy(events_count_img)
# edge_x, edge_y归一化到[0,1]
max2 = torch.tensor(1.0, dtype=torch.float).cuda()
edge_x_abs, edge_y_abs = torch.abs(edge_x), torch.abs(edge_y)
x_max, x_min = torch.max(edge_x_abs), torch.min(edge_x_abs)
y_max, y_min = torch.max(edge_y_abs), torch.min(edge_y_abs)
norm_edge_x = (max2 * (edge_x_abs - x_min) / (x_max - x_min))
norm_edge_y = (max2 * (edge_y_abs - y_min) / (y_max - y_min))
print()
edge = torch.mean(norm_edge_x + norm_edge_y) # [0, 2]
return torch.tensor(2., device=self.device) - edge # min=0, max=2, the min the better
def sos_objective(self, img):
"""
Loss given by g(x)^2 where g(x) is IWE
"""
sos = torch.mean(img * img) # [0,1]
return -sos
def soe_objective(self, img):
"""
Sum of exponentials objective (Stoffregen et al, Event Cameras, Contrast
Maximization and Reward Functions: an Analysis, CVPR19)
Loss given by e^g(x) where g(x) is IWE
"""
exp = torch.exp(img)
soe = torch.mean(exp)
return -soe
def moa_objective(self, img):
"""
Max of accumulations objective
Loss given by max(g(x)) where g(x) is IWE
"""
moa = torch.max(img)
return moa
def isoa_objective(self, img, thresh=0.5):
"""
Inverse sum of accumulations objective
Loss given by sum(1 where g(x)>1 else 0) where g(x) is IWE.
This formulation has similar properties to original ISoA, but negation makes derivative
more stable than inversion.
"""
isoa = torch.sum(torch.where(img > thresh, 1., 0.))
return isoa
def sosa_objective(self, img, p=3.):
"""
Sum of Supressed Accumulations objective
Loss given by e^(-p*g(x)) where g(x) is IWE. p is arbitrary shifting factor,
higher values give better noise performance but lower accuracy.
"""
exp = torch.exp(-p * img)
sosa = torch.sum(exp) # [32636.928, 65536] 256*256= 65536
return -sosa
def r1_objective(self, img, p=3.):
"""
R1 objective (Stoffregen et al, Event Cameras, Contrast
Maximization and Reward Functions: an Analysis, CVPR19)
Loss given by SOS and SOSA combined
"""
sos = self.sos_objective(img)
sosa = self.sosa_objective(img, p)
return -sos * sosa
def contrast_max_based_loss(self):
# print('In contrast_max_baed_loss.')
gen_func = GenIwe(self.warped_xs, self.warped_ys, self.ts, self.ps)
events_count_img = gen_func.events_to_count_img()
# 归一化到[0,1]
max1 = torch.tensor(1.0, dtype=torch.float).cuda()
img_max = torch.max(events_count_img)
img_min = torch.min(events_count_img)
norm_events_count_img = (max1 * (events_count_img - img_min) / (img_max - img_min))
return self.r1_objective(norm_events_count_img) # [-65536,0]
| 39.333333
| 120
| 0.621252
|
706f7f11dc754ed9c368c81c899f0fab4314fdc9
| 395
|
py
|
Python
|
traveling/asgi.py
|
julesc00/travel-agency
|
61c92087bc50ef4dbdb5e46636b2802e3a94bc0c
|
[
"MIT"
] | null | null | null |
traveling/asgi.py
|
julesc00/travel-agency
|
61c92087bc50ef4dbdb5e46636b2802e3a94bc0c
|
[
"MIT"
] | null | null | null |
traveling/asgi.py
|
julesc00/travel-agency
|
61c92087bc50ef4dbdb5e46636b2802e3a94bc0c
|
[
"MIT"
] | null | null | null |
"""
ASGI config for traveling project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'traveling.settings')
application = get_asgi_application()
| 23.235294
| 78
| 0.787342
|
1b69c96011689c37b4f5199f9ceaaf6bc8c0fec8
| 890
|
py
|
Python
|
scripts/check_python.py
|
lesperry/Metagenomics
|
a1d8b7d96b32ab83cebe513e889b6ef82f7c1dd6
|
[
"CC-BY-3.0"
] | 1
|
2021-02-28T18:59:16.000Z
|
2021-02-28T18:59:16.000Z
|
scripts/check_python.py
|
lesperry/Metagenomics
|
a1d8b7d96b32ab83cebe513e889b6ef82f7c1dd6
|
[
"CC-BY-3.0"
] | 12
|
2020-07-24T23:55:19.000Z
|
2021-12-19T11:40:06.000Z
|
scripts/check_python.py
|
lesperry/Metagenomics
|
a1d8b7d96b32ab83cebe513e889b6ef82f7c1dd6
|
[
"CC-BY-3.0"
] | null | null | null |
"""
If the current installed Python version is not supported, prints an error
message to stderr and returns 1
"""
from __future__ import print_function
import sys
def check_python():
if sys.version_info[:2] >= (3, 5):
# supported
return
else:
version_string = '.'.join(str(_) for _ in sys.version_info[:3])
msg = """\
ERROR: Your Python version is: %s
Galaxy is currently supported on Python >=3.5 .
To run Galaxy, please install a supported Python version.
If a supported version is already installed but is not your default,
https://docs.galaxyproject.org/en/latest/admin/python.html contains instructions
on how to force Galaxy to use a different version.""" % version_string
print(msg, file=sys.stderr)
raise Exception(msg)
if __name__ == '__main__':
try:
check_python()
except Exception:
sys.exit(1)
| 27.8125
| 80
| 0.686517
|
671aafc37e8dbdcc1b0158417ec6c2c264118234
| 797
|
py
|
Python
|
pages/remotepython.py
|
przor3n/selenium_automation
|
88b43ea40936cf4eee8370e53dae8bfec9a8b19e
|
[
"MIT"
] | null | null | null |
pages/remotepython.py
|
przor3n/selenium_automation
|
88b43ea40936cf4eee8370e53dae8bfec9a8b19e
|
[
"MIT"
] | null | null | null |
pages/remotepython.py
|
przor3n/selenium_automation
|
88b43ea40936cf4eee8370e53dae8bfec9a8b19e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from selenium.webdriver.common.by import By
from codelib.automation_testing.selenium_automation.selenium_automation.pages.base import \
BasePage
from codelib.automation_testing.selenium_automation.selenium_automation.timeunits import \
five
__all__ = []
url = "https://www.remotepython.com"
search = (By.XPATH, "//input[@name='q']")
search_result = ".item"
link = ".item h3 > a" # for link and text
class RemotePython(BasePage):
"""Actions for We Work Remotely
"""
def search(self, phrase):
self.open(url)
self.b.clear_field(search)
# click remote and wait
self.b.enter_text(phrase, search)
self.b.press_key('ENTER', search)
self.b.sleep(five)
return self.source()
| 24.151515
| 91
| 0.673777
|
cdc5278eee43070bc3afaebe52c0e0c5f8c91509
| 8,512
|
py
|
Python
|
batteries/Li2o2_Data_Processing.py
|
coresresearch/data-analysis
|
d13c677fcda5950c4939c11dbef335591243b94a
|
[
"BSD-3-Clause"
] | null | null | null |
batteries/Li2o2_Data_Processing.py
|
coresresearch/data-analysis
|
d13c677fcda5950c4939c11dbef335591243b94a
|
[
"BSD-3-Clause"
] | null | null | null |
batteries/Li2o2_Data_Processing.py
|
coresresearch/data-analysis
|
d13c677fcda5950c4939c11dbef335591243b94a
|
[
"BSD-3-Clause"
] | 1
|
2020-08-04T14:16:10.000Z
|
2020-08-04T14:16:10.000Z
|
import pandas as pd
import numpy as np
import io
import matplotlib.pyplot as plt
import os
"===================== USER INPUTS ============================="
#Reference https://www.fuelcellstore.com/avcarb-mgl190
carb_area=(9**2)*np.pi #in mm^2
carb_v = carb_area*0.19 #in mm^3
carb_den = 0.44*.001 #in g/mm^3
porosity = 0.78
carb_m = carb_v*carb_den*(1. - porosity)
# Point to the data files
path = '/Users/decaluwe/OneDrive - Colorado School of Mines/CORES Research Group/Laboratory Resources/Gamry Data/Li-O2 Battery/20200317_50_40_10_CarbonCatalystBinder'
# Set equal to 1 to see a graph, 0 to hide it:
flag_charge_discharge = 1
flag_voltage_gap = 0
# Number of cycles you are plotting:
n_cycles = 8
first_cycle = 2
# Plot file name preamble:
savename = '50_40_10_omit_cycle_1'
"===================== PLOTTING OPTIONS ============================="
cmap = plt.get_cmap('plasma')
color_ind = np.linspace(0,1,n_cycles)
colors = list()
for i in np.arange(n_cycles):
colors.append(cmap(color_ind[i]))
fontname = 'Times New Roman'
"===================== PLOTTING FUNCTION ============================="
def graph(file):
Data = pd.read_table(file, sep='deliminator', engine='python', header=None)
Data.dropna(inplace = True)
Data = Data[0].str.split("\t", expand = True)
# This means we cut the data when the time is zero. To look at full data,
# use d_row = list(D[0]).index('#') + 1
d_row = list(Data[1]).index('0')
Data = Data.iloc[d_row+1 : ,]
Data[1] = Data[1].astype(float) # Time [s]
Data[2] = Data[2].astype(float) # Voltage [V]
Data[3] = Data[3].astype(float) # Current [A]
Data['capacity'] = Data[1] * abs(Data[3])/carb_m*1000/3600 #convert to mAh/g
title = file.split("_cycle")
# Read out the cycle number:
title2 = title[1].split(".")
n_cycle = int(title2[0]) # cycle number
i_cycle = n_cycle - first_cycle #python indexing starts at zero.
#---SCD NOTE: IN GENERAL, CAN YOU DOCUMENT THESE LINES? WHAT DO THEY DO?---#
for cell in Data[2]:
if cell > Data.iloc[0,2]:
row = list(Data[2]).index(cell)
newc=Data.iloc[row:,2]
Data['adjust']=newc
Data['charge']=Data['adjust'].shift(-1*row)
break
Data['discharge'] = Data[2].where(Data[2]<=Data.iloc[0,2])
Discharge = Data[Data[3] < 0][:]
Charge = Data[Data[3] > 0][:]
discharge_capacity = max(Discharge['capacity'])
Charge['capacity'] = Charge['capacity'].iloc[:] - discharge_capacity
charge_capacity = max(Charge['capacity'])
plt.figure(0)
p, = plt.plot(Discharge['capacity'], Discharge['charge'], \
linewidth = 2., label = 'Cycle '+title2[0],color = colors[i_cycle], \
zorder = 10*i_cycle)
plt.plot(Discharge['capacity'], Discharge['discharge'], linewidth = 2., \
label = '_nolegend_', color = colors[i_cycle], zorder = 10*i_cycle)
Data['Voltgap'] = Data['charge'] -Data['discharge']
Voltgap = Data.Voltgap.mean()
Voltgap2 = 5
return [Voltgap, Voltgap2, p, title2[0], discharge_capacity, charge_capacity]
"=============== LOOP THROUGH CYCLES AND PLOT THEM ======================="
i=0
iform =[]
V = []
V2 = []
plots = {}
# Initialize figure:
fig1 = plt.figure(0)
fig1.add_axes([0.2,0.25,0.78,0.73])
fig1.set_size_inches((3,2.25))
plt.xlabel('Capacity (mAh/g-carbon)', fontsize=14, fontname = 'Times New Roman')
plt.ylabel('Potential (V)', fontsize=14, fontname = 'Times New Roman')
discharge_capacities = np.zeros(n_cycles)
charge_capacities = np.zeros(n_cycles)
for file in os.listdir(path):
if file.find('cycle') > -1:
if file.find('EIS') < 0:
print(file)
Voltgap_1, Voltgap_2, p, n, cap_d, cap_c =graph(path + "/"+ file)
discharge_capacities[int(n)-first_cycle] = cap_d
charge_capacities[int(n) - first_cycle] = cap_c
#print(Voltgap)
plots[n] = p
V.append(Voltgap_1)
V2.append(Voltgap_2)
iform.append(i)
i=i+1
plt.ylim([1.5, 5.])
font = plt.matplotlib.font_manager.FontProperties(family=fontname,size=10,weight='normal')
legend_plots = []
legend_strings = []
for i in np.arange(n_cycles):
legend_plots.append(plots[str(i+first_cycle)])
legend_strings.append('Cycle '+str(i+first_cycle))
plt.legend(legend_plots, legend_strings, frameon = False, columnspacing = 0.35, \
loc=(0.35,0.25), borderaxespad = 0.25, handletextpad = 0.5, prop=font, ncol=2, \
handlelength=1.)
# Get current axes handle:
ax = plt.gca()
#Format tick labels:
for tick in ax.xaxis.get_major_ticks():
tick.label1.set_fontsize(12)
tick.label1.set_fontname('Times New Roman')
for tick in ax.yaxis.get_major_ticks():
tick.label1.set_fontsize(12)
tick.label1.set_fontname('Times New Roman')
plt.savefig(savename+'charge-discharge.pdf',format='pdf',dpi=350)
Report = pd.DataFrame()
Report['V1'] = V
Report['V2'] = V2
if flag_voltage_gap:
plt.figure(1)
plt.scatter(iform, Report['V1'], marker='o')
# Plot capacity vs. cycle number:
fig3 = plt.figure(2)
fig3.add_axes([0.2,0.25,0.75,0.7])
fig3.set_size_inches((3,2.25))
font = plt.matplotlib.font_manager.FontProperties(family=fontname,size=10,weight='normal')
plt.xlabel('Cycle', fontsize=14, fontname = 'Times New Roman')
plt.ylabel('Capacity (mAh/g-carbon)', fontsize=12, fontname = 'Times New Roman')
plt.scatter(np.arange(n_cycles)+1,discharge_capacities,marker='o',color=colors[0])
plt.scatter(np.arange(n_cycles)+1,charge_capacities,marker='o',color=colors[6])
# Get current axes handle:
ax = plt.gca()
#Format tick labels:
for tick in ax.xaxis.get_major_ticks():
tick.label1.set_fontsize(12)
tick.label1.set_fontname('Times New Roman')
for tick in ax.yaxis.get_major_ticks():
tick.label1.set_fontsize(12)
tick.label1.set_fontname('Times New Roman')
plt.xticks([1,2,3,4,5,6,7,8,9,10])
plt.ylim([20., 100.])
plt.legend(['Discharge','Charge'], loc= 'upper right', frameon = False, prop=font, handletextpad = 0.01)
plt.savefig(savename+'capacity-vs-cycle.pdf',format='pdf',dpi=350)
plt.show()
"=========== COMPARE CATALYST AND NON-CATALYST PERFORMANCE ==================="
path = '/Users/decaluwe/OneDrive - Colorado School of Mines/CORES Research Group/Laboratory Resources/Gamry Data/Li-O2 Battery/catalyst_comparison' #'C:/Users/Amy LeBar/Documents/Data'
# Set equal to 1 to see a graph, 0 to hide it:
flag_charge_discharge = 1
flag_voltage_gap = 0
# Number of cycles you are plotting:
n_cycles = 2
fontname = 'Times New Roman'
colors = [colors[0],colors[6]]#['r','b']
fontname = 'Times New Roman'
i=0
iform =[]
V = []
V2 = []
plots = {}
fig1 = plt.figure(0)
fig1.add_axes([0.2,0.25,0.75,0.7])
fig1.set_size_inches((3,2.25))
plt.xlabel('Capacity (mAh/g-carbon)', fontsize=14, fontname = 'Times New Roman')
plt.ylabel('Potential (V)', fontsize=14, fontname = 'Times New Roman')
discharge_capacities = np.zeros(n_cycles)
charge_capacities = np.zeros(n_cycles)
iplot = 0
for file in os.listdir(path):
if file.find('cycle') > -1:
if file.find('EIS') < 0:
print(file)
Voltgap_1, Voltgap_2, p, n, cap_d, cap_c =graph(path + "/"+ file)
discharge_capacities[int(n)-1] = cap_d
charge_capacities[int(n) - 1] = cap_c
#print(Voltgap)
plots[str(iplot)] = p
V.append(Voltgap_1)
V2.append(Voltgap_2)
iform.append(i)
i=i+1
iplot+=1
plt.xticks([0., 20., 40., 60., 80.])
plt.yticks([1., 2., 3., 4., 5.])
#plt.xlim([0.,4.5])
#plt.ylim([1.75, 4.75])
font = plt.matplotlib.font_manager.FontProperties(family=fontname,size=12,weight='normal')
legend_plots = []
for i in np.arange(n_cycles):
legend_plots.append(plots[str(i)])
plt.legend(legend_plots, ['No catalyst','Catalyst'], frameon = False, columnspacing = 0.35,\
loc=[0.5,0.42], borderaxespad = 0.25, handletextpad = 0.5, prop=font, ncol=1, \
handlelength=1.)
# Get current axes handle:
ax = plt.gca()
#Format tick labels:
for tick in ax.xaxis.get_major_ticks():
tick.label1.set_fontsize(12)
tick.label1.set_fontname('Times New Roman')
for tick in ax.yaxis.get_major_ticks():
tick.label1.set_fontsize(12)
tick.label1.set_fontname('Times New Roman')
plt.savefig(savename+'_catalyst_comparison.pdf',format='pdf',dpi=350)
plt.show()
| 31.065693
| 184
| 0.636043
|
ca7992412472181799f3ebfca2ebb37517eaf42f
| 30,985
|
py
|
Python
|
charmhelpers/contrib/database/mysql.py
|
AurelienLourot/charm-helpers
|
b5725ac546372e7d4004d15095f79cdd5e7da687
|
[
"Apache-2.0"
] | 15
|
2017-09-20T13:37:10.000Z
|
2021-11-03T13:31:15.000Z
|
charmhelpers/contrib/database/mysql.py
|
AurelienLourot/charm-helpers
|
b5725ac546372e7d4004d15095f79cdd5e7da687
|
[
"Apache-2.0"
] | 313
|
2017-09-15T13:22:58.000Z
|
2022-02-25T17:55:01.000Z
|
charmhelpers/contrib/database/mysql.py
|
AurelienLourot/charm-helpers
|
b5725ac546372e7d4004d15095f79cdd5e7da687
|
[
"Apache-2.0"
] | 136
|
2017-09-19T13:37:33.000Z
|
2022-03-29T11:08:00.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper for working with a MySQL database"""
import collections
import copy
import json
import re
import sys
import platform
import os
import glob
import six
# from string import upper
from charmhelpers.core.host import (
CompareHostReleases,
lsb_release,
mkdir,
pwgen,
write_file
)
from charmhelpers.core.hookenv import (
config as config_get,
relation_get,
related_units,
unit_get,
log,
DEBUG,
ERROR,
INFO,
WARNING,
leader_get,
leader_set,
is_leader,
)
from charmhelpers.fetch import (
apt_install,
apt_update,
filter_installed_packages,
)
from charmhelpers.contrib.network.ip import get_host_ip
try:
import MySQLdb
except ImportError:
apt_update(fatal=True)
if six.PY2:
apt_install(filter_installed_packages(['python-mysqldb']), fatal=True)
else:
apt_install(filter_installed_packages(['python3-mysqldb']), fatal=True)
import MySQLdb
class MySQLSetPasswordError(Exception):
pass
class MySQLHelper(object):
def __init__(self, rpasswdf_template, upasswdf_template, host='localhost',
migrate_passwd_to_leader_storage=True,
delete_ondisk_passwd_file=True, user="root", password=None,
port=None, connect_timeout=None):
self.user = user
self.host = host
self.password = password
self.port = port
# default timeout of 30 seconds.
self.connect_timeout = connect_timeout or 30
# Password file path templates
self.root_passwd_file_template = rpasswdf_template
self.user_passwd_file_template = upasswdf_template
self.migrate_passwd_to_leader_storage = migrate_passwd_to_leader_storage
# If we migrate we have the option to delete local copy of root passwd
self.delete_ondisk_passwd_file = delete_ondisk_passwd_file
self.connection = None
def connect(self, user='root', password=None, host=None, port=None,
connect_timeout=None):
_connection_info = {
"user": user or self.user,
"passwd": password or self.password,
"host": host or self.host
}
# set the connection timeout; for mysql8 it can hang forever, so some
# timeout is required.
timeout = connect_timeout or self.connect_timeout
if timeout:
_connection_info["connect_timeout"] = timeout
# port cannot be None but we also do not want to specify it unless it
# has been explicit set.
port = port or self.port
if port is not None:
_connection_info["port"] = port
log("Opening db connection for %s@%s" % (user, host), level=DEBUG)
try:
self.connection = MySQLdb.connect(**_connection_info)
except Exception as e:
log("Failed to connect to database due to '{}'".format(str(e)),
level=ERROR)
raise
def database_exists(self, db_name):
cursor = self.connection.cursor()
try:
cursor.execute("SHOW DATABASES")
databases = [i[0] for i in cursor.fetchall()]
finally:
cursor.close()
return db_name in databases
def create_database(self, db_name):
cursor = self.connection.cursor()
try:
cursor.execute("CREATE DATABASE `{}` CHARACTER SET UTF8"
.format(db_name))
finally:
cursor.close()
def grant_exists(self, db_name, db_user, remote_ip):
cursor = self.connection.cursor()
priv_string = "GRANT ALL PRIVILEGES ON `{}`.* " \
"TO '{}'@'{}'".format(db_name, db_user, remote_ip)
try:
cursor.execute("SHOW GRANTS for '{}'@'{}'".format(db_user,
remote_ip))
grants = [i[0] for i in cursor.fetchall()]
except MySQLdb.OperationalError:
return False
finally:
cursor.close()
# TODO: review for different grants
return priv_string in grants
def create_grant(self, db_name, db_user, remote_ip, password):
cursor = self.connection.cursor()
try:
# TODO: review for different grants
cursor.execute("GRANT ALL PRIVILEGES ON `{}`.* TO '{}'@'{}' "
"IDENTIFIED BY '{}'".format(db_name,
db_user,
remote_ip,
password))
finally:
cursor.close()
def create_admin_grant(self, db_user, remote_ip, password):
cursor = self.connection.cursor()
try:
cursor.execute("GRANT ALL PRIVILEGES ON *.* TO '{}'@'{}' "
"IDENTIFIED BY '{}'".format(db_user,
remote_ip,
password))
finally:
cursor.close()
def cleanup_grant(self, db_user, remote_ip):
cursor = self.connection.cursor()
try:
cursor.execute("DROP FROM mysql.user WHERE user='{}' "
"AND HOST='{}'".format(db_user,
remote_ip))
finally:
cursor.close()
def flush_priviledges(self):
cursor = self.connection.cursor()
try:
cursor.execute("FLUSH PRIVILEGES")
finally:
cursor.close()
def execute(self, sql):
"""Execute arbitrary SQL against the database."""
cursor = self.connection.cursor()
try:
cursor.execute(sql)
finally:
cursor.close()
def select(self, sql):
"""
Execute arbitrary SQL select query against the database
and return the results.
:param sql: SQL select query to execute
:type sql: string
:returns: SQL select query result
:rtype: list of lists
:raises: MySQLdb.Error
"""
cursor = self.connection.cursor()
try:
cursor.execute(sql)
results = [list(i) for i in cursor.fetchall()]
finally:
cursor.close()
return results
def migrate_passwords_to_leader_storage(self, excludes=None):
"""Migrate any passwords storage on disk to leader storage."""
if not is_leader():
log("Skipping password migration as not the lead unit",
level=DEBUG)
return
dirname = os.path.dirname(self.root_passwd_file_template)
path = os.path.join(dirname, '*.passwd')
for f in glob.glob(path):
if excludes and f in excludes:
log("Excluding %s from leader storage migration" % (f),
level=DEBUG)
continue
key = os.path.basename(f)
with open(f, 'r') as passwd:
_value = passwd.read().strip()
try:
leader_set(settings={key: _value})
if self.delete_ondisk_passwd_file:
os.unlink(f)
except ValueError:
# NOTE cluster relation not yet ready - skip for now
pass
def get_mysql_password_on_disk(self, username=None, password=None):
"""Retrieve, generate or store a mysql password for the provided
username on disk."""
if username:
template = self.user_passwd_file_template
passwd_file = template.format(username)
else:
passwd_file = self.root_passwd_file_template
_password = None
if os.path.exists(passwd_file):
log("Using existing password file '%s'" % passwd_file, level=DEBUG)
with open(passwd_file, 'r') as passwd:
_password = passwd.read().strip()
else:
log("Generating new password file '%s'" % passwd_file, level=DEBUG)
if not os.path.isdir(os.path.dirname(passwd_file)):
# NOTE: need to ensure this is not mysql root dir (which needs
# to be mysql readable)
mkdir(os.path.dirname(passwd_file), owner='root', group='root',
perms=0o770)
# Force permissions - for some reason the chmod in makedirs
# fails
os.chmod(os.path.dirname(passwd_file), 0o770)
_password = password or pwgen(length=32)
write_file(passwd_file, _password, owner='root', group='root',
perms=0o660)
return _password
def passwd_keys(self, username):
"""Generator to return keys used to store passwords in peer store.
NOTE: we support both legacy and new format to support mysql
charm prior to refactor. This is necessary to avoid LP 1451890.
"""
keys = []
if username == 'mysql':
log("Bad username '%s'" % (username), level=WARNING)
if username:
# IMPORTANT: *newer* format must be returned first
keys.append('mysql-%s.passwd' % (username))
keys.append('%s.passwd' % (username))
else:
keys.append('mysql.passwd')
for key in keys:
yield key
def get_mysql_password(self, username=None, password=None):
"""Retrieve, generate or store a mysql password for the provided
username using peer relation cluster."""
excludes = []
# First check peer relation.
try:
for key in self.passwd_keys(username):
_password = leader_get(key)
if _password:
break
# If root password available don't update peer relation from local
if _password and not username:
excludes.append(self.root_passwd_file_template)
except ValueError:
# cluster relation is not yet started; use on-disk
_password = None
# If none available, generate new one
if not _password:
_password = self.get_mysql_password_on_disk(username, password)
# Put on wire if required
if self.migrate_passwd_to_leader_storage:
self.migrate_passwords_to_leader_storage(excludes=excludes)
return _password
def get_mysql_root_password(self, password=None):
"""Retrieve or generate mysql root password for service units."""
return self.get_mysql_password(username=None, password=password)
def set_mysql_password(self, username, password, current_password=None):
"""Update a mysql password for the provided username changing the
leader settings
To update root's password pass `None` in the username
:param username: Username to change password of
:type username: str
:param password: New password for user.
:type password: str
:param current_password: Existing password for user.
:type current_password: str
"""
if username is None:
username = 'root'
# get root password via leader-get, it may be that in the past (when
# changes to root-password were not supported) the user changed the
# password, so leader-get is more reliable source than
# config.previous('root-password').
rel_username = None if username == 'root' else username
if not current_password:
current_password = self.get_mysql_password(rel_username)
# password that needs to be set
new_passwd = password
# update password for all users (e.g. root@localhost, root@::1, etc)
try:
self.connect(user=username, password=current_password)
cursor = self.connection.cursor()
except MySQLdb.OperationalError as ex:
raise MySQLSetPasswordError(('Cannot connect using password in '
'leader settings (%s)') % ex, ex)
try:
# NOTE(freyes): Due to skip-name-resolve root@$HOSTNAME account
# fails when using SET PASSWORD so using UPDATE against the
# mysql.user table is needed, but changes to this table are not
# replicated across the cluster, so this update needs to run in
# all the nodes. More info at
# http://galeracluster.com/documentation-webpages/userchanges.html
release = CompareHostReleases(lsb_release()['DISTRIB_CODENAME'])
if release < 'bionic':
SQL_UPDATE_PASSWD = ("UPDATE mysql.user SET password = "
"PASSWORD( %s ) WHERE user = %s;")
else:
# PXC 5.7 (introduced in Bionic) uses authentication_string
SQL_UPDATE_PASSWD = ("UPDATE mysql.user SET "
"authentication_string = "
"PASSWORD( %s ) WHERE user = %s;")
cursor.execute(SQL_UPDATE_PASSWD, (new_passwd, username))
cursor.execute('FLUSH PRIVILEGES;')
self.connection.commit()
except MySQLdb.OperationalError as ex:
raise MySQLSetPasswordError('Cannot update password: %s' % str(ex),
ex)
finally:
cursor.close()
# check the password was changed
try:
self.connect(user=username, password=new_passwd)
self.execute('select 1;')
except MySQLdb.OperationalError as ex:
raise MySQLSetPasswordError(('Cannot connect using new password: '
'%s') % str(ex), ex)
if not is_leader():
log('Only the leader can set a new password in the relation',
level=DEBUG)
return
for key in self.passwd_keys(rel_username):
_password = leader_get(key)
if _password:
log('Updating password for %s (%s)' % (key, rel_username),
level=DEBUG)
leader_set(settings={key: new_passwd})
def set_mysql_root_password(self, password, current_password=None):
"""Update mysql root password changing the leader settings
:param password: New password for user.
:type password: str
:param current_password: Existing password for user.
:type current_password: str
"""
self.set_mysql_password(
'root',
password,
current_password=current_password)
def normalize_address(self, hostname):
"""Ensure that address returned is an IP address (i.e. not fqdn)"""
if config_get('prefer-ipv6'):
# TODO: add support for ipv6 dns
return hostname
if hostname != unit_get('private-address'):
return get_host_ip(hostname, fallback=hostname)
# Otherwise assume localhost
return '127.0.0.1'
def get_allowed_units(self, database, username, relation_id=None, prefix=None):
"""Get list of units with access grants for database with username.
This is typically used to provide shared-db relations with a list of
which units have been granted access to the given database.
"""
if not self.connection:
self.connect(password=self.get_mysql_root_password())
allowed_units = set()
if not prefix:
prefix = database
for unit in related_units(relation_id):
settings = relation_get(rid=relation_id, unit=unit)
# First check for setting with prefix, then without
for attr in ["%s_hostname" % (prefix), 'hostname']:
hosts = settings.get(attr, None)
if hosts:
break
if hosts:
# hostname can be json-encoded list of hostnames
try:
hosts = json.loads(hosts)
except ValueError:
hosts = [hosts]
else:
hosts = [settings['private-address']]
if hosts:
for host in hosts:
host = self.normalize_address(host)
if self.grant_exists(database, username, host):
log("Grant exists for host '%s' on db '%s'" %
(host, database), level=DEBUG)
if unit not in allowed_units:
allowed_units.add(unit)
else:
log("Grant does NOT exist for host '%s' on db '%s'" %
(host, database), level=DEBUG)
else:
log("No hosts found for grant check", level=INFO)
return allowed_units
def configure_db(self, hostname, database, username, admin=False):
"""Configure access to database for username from hostname."""
if not self.connection:
self.connect(password=self.get_mysql_root_password())
if not self.database_exists(database):
self.create_database(database)
remote_ip = self.normalize_address(hostname)
password = self.get_mysql_password(username)
if not self.grant_exists(database, username, remote_ip):
if not admin:
self.create_grant(database, username, remote_ip, password)
else:
self.create_admin_grant(username, remote_ip, password)
self.flush_priviledges()
return password
# `_singleton_config_helper` stores the instance of the helper class that is
# being used during a hook invocation.
_singleton_config_helper = None
def get_mysql_config_helper():
global _singleton_config_helper
if _singleton_config_helper is None:
_singleton_config_helper = MySQLConfigHelper()
return _singleton_config_helper
class MySQLConfigHelper(object):
"""Base configuration helper for MySQL."""
# Going for the biggest page size to avoid wasted bytes.
# InnoDB page size is 16MB
DEFAULT_PAGE_SIZE = 16 * 1024 * 1024
DEFAULT_INNODB_BUFFER_FACTOR = 0.50
DEFAULT_INNODB_BUFFER_SIZE_MAX = 512 * 1024 * 1024
# Validation and lookups for InnoDB configuration
INNODB_VALID_BUFFERING_VALUES = [
'none',
'inserts',
'deletes',
'changes',
'purges',
'all'
]
INNODB_FLUSH_CONFIG_VALUES = {
'fast': 2,
'safest': 1,
'unsafe': 0,
}
def human_to_bytes(self, human):
"""Convert human readable configuration options to bytes."""
num_re = re.compile('^[0-9]+$')
if num_re.match(human):
return human
factors = {
'K': 1024,
'M': 1048576,
'G': 1073741824,
'T': 1099511627776
}
modifier = human[-1]
if modifier in factors:
return int(human[:-1]) * factors[modifier]
if modifier == '%':
total_ram = self.human_to_bytes(self.get_mem_total())
if self.is_32bit_system() and total_ram > self.sys_mem_limit():
total_ram = self.sys_mem_limit()
factor = int(human[:-1]) * 0.01
pctram = total_ram * factor
return int(pctram - (pctram % self.DEFAULT_PAGE_SIZE))
raise ValueError("Can only convert K,M,G, or T")
def is_32bit_system(self):
"""Determine whether system is 32 or 64 bit."""
try:
return sys.maxsize < 2 ** 32
except OverflowError:
return False
def sys_mem_limit(self):
"""Determine the default memory limit for the current service unit."""
if platform.machine() in ['armv7l']:
_mem_limit = self.human_to_bytes('2700M') # experimentally determined
else:
# Limit for x86 based 32bit systems
_mem_limit = self.human_to_bytes('4G')
return _mem_limit
def get_mem_total(self):
"""Calculate the total memory in the current service unit."""
with open('/proc/meminfo') as meminfo_file:
for line in meminfo_file:
key, mem = line.split(':', 2)
if key == 'MemTotal':
mtot, modifier = mem.strip().split(' ')
return '%s%s' % (mtot, modifier[0].upper())
def get_innodb_flush_log_at_trx_commit(self):
"""Get value for innodb_flush_log_at_trx_commit.
Use the innodb-flush-log-at-trx-commit or the tunning-level setting
translated by INNODB_FLUSH_CONFIG_VALUES to get the
innodb_flush_log_at_trx_commit value.
:returns: Numeric value for innodb_flush_log_at_trx_commit
:rtype: Union[None, int]
"""
_iflatc = config_get('innodb-flush-log-at-trx-commit')
_tuning_level = config_get('tuning-level')
if _iflatc:
return _iflatc
elif _tuning_level:
return self.INNODB_FLUSH_CONFIG_VALUES.get(_tuning_level, 1)
def get_innodb_change_buffering(self):
"""Get value for innodb_change_buffering.
Use the innodb-change-buffering validated against
INNODB_VALID_BUFFERING_VALUES to get the innodb_change_buffering value.
:returns: String value for innodb_change_buffering.
:rtype: Union[None, str]
"""
_icb = config_get('innodb-change-buffering')
if _icb and _icb in self.INNODB_VALID_BUFFERING_VALUES:
return _icb
def get_innodb_buffer_pool_size(self):
"""Get value for innodb_buffer_pool_size.
Return the number value of innodb-buffer-pool-size or dataset-size. If
neither is set, calculate a sane default based on total memory.
:returns: Numeric value for innodb_buffer_pool_size.
:rtype: int
"""
total_memory = self.human_to_bytes(self.get_mem_total())
dataset_bytes = config_get('dataset-size')
innodb_buffer_pool_size = config_get('innodb-buffer-pool-size')
if innodb_buffer_pool_size:
innodb_buffer_pool_size = self.human_to_bytes(
innodb_buffer_pool_size)
elif dataset_bytes:
log("Option 'dataset-size' has been deprecated, please use"
"innodb_buffer_pool_size option instead", level="WARN")
innodb_buffer_pool_size = self.human_to_bytes(
dataset_bytes)
else:
# NOTE(jamespage): pick the smallest of 50% of RAM or 512MB
# to ensure that deployments in containers
# without constraints don't try to consume
# silly amounts of memory.
innodb_buffer_pool_size = min(
int(total_memory * self.DEFAULT_INNODB_BUFFER_FACTOR),
self.DEFAULT_INNODB_BUFFER_SIZE_MAX
)
if innodb_buffer_pool_size > total_memory:
log("innodb_buffer_pool_size; {} is greater than system available memory:{}".format(
innodb_buffer_pool_size,
total_memory), level='WARN')
return innodb_buffer_pool_size
class PerconaClusterHelper(MySQLConfigHelper):
"""Percona-cluster specific configuration helper."""
def parse_config(self):
"""Parse charm configuration and calculate values for config files."""
config = config_get()
mysql_config = {}
if 'max-connections' in config:
mysql_config['max_connections'] = config['max-connections']
if 'wait-timeout' in config:
mysql_config['wait_timeout'] = config['wait-timeout']
if self.get_innodb_flush_log_at_trx_commit() is not None:
mysql_config['innodb_flush_log_at_trx_commit'] = \
self.get_innodb_flush_log_at_trx_commit()
if self.get_innodb_change_buffering() is not None:
mysql_config['innodb_change_buffering'] = config['innodb-change-buffering']
if 'innodb-io-capacity' in config:
mysql_config['innodb_io_capacity'] = config['innodb-io-capacity']
# Set a sane default key_buffer size
mysql_config['key_buffer'] = self.human_to_bytes('32M')
mysql_config['innodb_buffer_pool_size'] = self.get_innodb_buffer_pool_size()
return mysql_config
class MySQL8Helper(MySQLHelper):
def grant_exists(self, db_name, db_user, remote_ip):
cursor = self.connection.cursor()
priv_string = ("GRANT ALL PRIVILEGES ON {}.* "
"TO {}@{}".format(db_name, db_user, remote_ip))
try:
cursor.execute("SHOW GRANTS FOR '{}'@'{}'".format(db_user,
remote_ip))
grants = [i[0] for i in cursor.fetchall()]
except MySQLdb.OperationalError:
return False
finally:
cursor.close()
# Different versions of MySQL use ' or `. Ignore these in the check.
return priv_string in [
i.replace("'", "").replace("`", "") for i in grants]
def create_grant(self, db_name, db_user, remote_ip, password):
if self.grant_exists(db_name, db_user, remote_ip):
return
# Make sure the user exists
# MySQL8 must create the user before the grant
self.create_user(db_user, remote_ip, password)
cursor = self.connection.cursor()
try:
cursor.execute("GRANT ALL PRIVILEGES ON `{}`.* TO '{}'@'{}'"
.format(db_name, db_user, remote_ip))
finally:
cursor.close()
def create_user(self, db_user, remote_ip, password):
SQL_USER_CREATE = (
"CREATE USER '{db_user}'@'{remote_ip}' "
"IDENTIFIED BY '{password}'")
cursor = self.connection.cursor()
try:
cursor.execute(SQL_USER_CREATE.format(
db_user=db_user,
remote_ip=remote_ip,
password=password)
)
except MySQLdb._exceptions.OperationalError:
log("DB user {} already exists.".format(db_user),
"WARNING")
finally:
cursor.close()
def create_router_grant(self, db_user, remote_ip, password):
# Make sure the user exists
# MySQL8 must create the user before the grant
self.create_user(db_user, remote_ip, password)
# Mysql-Router specific grants
cursor = self.connection.cursor()
try:
cursor.execute("GRANT CREATE USER ON *.* TO '{}'@'{}' WITH GRANT "
"OPTION".format(db_user, remote_ip))
cursor.execute("GRANT SELECT, INSERT, UPDATE, DELETE, EXECUTE ON "
"mysql_innodb_cluster_metadata.* TO '{}'@'{}'"
.format(db_user, remote_ip))
cursor.execute("GRANT SELECT ON mysql.user TO '{}'@'{}'"
.format(db_user, remote_ip))
cursor.execute("GRANT SELECT ON "
"performance_schema.replication_group_members "
"TO '{}'@'{}'".format(db_user, remote_ip))
cursor.execute("GRANT SELECT ON "
"performance_schema.replication_group_member_stats "
"TO '{}'@'{}'".format(db_user, remote_ip))
cursor.execute("GRANT SELECT ON "
"performance_schema.global_variables "
"TO '{}'@'{}'".format(db_user, remote_ip))
finally:
cursor.close()
def configure_router(self, hostname, username):
if self.connection is None:
self.connect(password=self.get_mysql_root_password())
remote_ip = self.normalize_address(hostname)
password = self.get_mysql_password(username)
self.create_user(username, remote_ip, password)
self.create_router_grant(username, remote_ip, password)
return password
def get_prefix(requested, keys=None):
"""Return existing prefix or None.
:param requested: Request string. i.e. novacell0_username
:type requested: str
:param keys: Keys to determine prefix. Defaults set in function.
:type keys: List of str keys
:returns: String prefix i.e. novacell0
:rtype: Union[None, str]
"""
if keys is None:
# Shared-DB default keys
keys = ["_database", "_username", "_hostname"]
for key in keys:
if requested.endswith(key):
return requested[:-len(key)]
def get_db_data(relation_data, unprefixed):
"""Organize database requests into a collections.OrderedDict
:param relation_data: shared-db relation data
:type relation_data: dict
:param unprefixed: Prefix to use for requests without a prefix. This should
be unique for each side of the relation to avoid
conflicts.
:type unprefixed: str
:returns: Order dict of databases and users
:rtype: collections.OrderedDict
"""
# Deep copy to avoid unintentionally changing relation data
settings = copy.deepcopy(relation_data)
databases = collections.OrderedDict()
# Clear non-db related elements
if "egress-subnets" in settings.keys():
settings.pop("egress-subnets")
if "ingress-address" in settings.keys():
settings.pop("ingress-address")
if "private-address" in settings.keys():
settings.pop("private-address")
singleset = {"database", "username", "hostname"}
if singleset.issubset(settings):
settings["{}_{}".format(unprefixed, "hostname")] = (
settings["hostname"])
settings.pop("hostname")
settings["{}_{}".format(unprefixed, "database")] = (
settings["database"])
settings.pop("database")
settings["{}_{}".format(unprefixed, "username")] = (
settings["username"])
settings.pop("username")
for k, v in settings.items():
db = k.split("_")[0]
x = "_".join(k.split("_")[1:])
if db not in databases:
databases[db] = collections.OrderedDict()
databases[db][x] = v
return databases
| 36.843044
| 96
| 0.588543
|
7e8b0020aece55ef3a75795fd7da8b4ee6c32f4d
| 8,274
|
py
|
Python
|
ggshield/scan/scannable.py
|
boblefrag/gg-shield
|
8eef8e02596ca05b9250482d9ea5cafd4435cfa0
|
[
"MIT"
] | null | null | null |
ggshield/scan/scannable.py
|
boblefrag/gg-shield
|
8eef8e02596ca05b9250482d9ea5cafd4435cfa0
|
[
"MIT"
] | null | null | null |
ggshield/scan/scannable.py
|
boblefrag/gg-shield
|
8eef8e02596ca05b9250482d9ea5cafd4435cfa0
|
[
"MIT"
] | null | null | null |
import concurrent.futures
import os
import re
from typing import Any, Dict, Iterable, List, NamedTuple, Optional, Set
import click
from pygitguardian import GGClient
from pygitguardian.config import MULTI_DOCUMENT_LIMIT
from pygitguardian.models import ScanResult
from ggshield.config import CPU_COUNT, MAX_FILE_SIZE, Cache
from ggshield.filter import remove_ignored_from_result
from ggshield.git_shell import GIT_PATH, shell
from ggshield.text_utils import STYLE, format_text
from ggshield.utils import REGEX_HEADER_INFO, Filemode
from .scannable_errors import handle_scan_error
class Result(NamedTuple):
"""
Return model for a scan which zips the information
between the Scan result and its input content.
"""
content: str # Text content scanned
filemode: Filemode # Filemode (useful for commits)
filename: str # Filename of content scanned
scan: ScanResult # Result of content scan
class ScanCollection(NamedTuple):
id: str
type: str
results: Optional[List[Result]] = None
scans: Optional[List[Any]] = None
# Foward references are not support in mypy for NamedTuples
# Correct typing would be Union
optional_header: Optional[str] = None # To be printed in Text Output
extra_info: Optional[Dict[str, str]] = None # To be included in JSON Output
@property
def scans_with_results(self) -> List[Any]:
if self.scans:
return [scan for scan in self.scans if scan.results]
return []
class File:
""" Class representing a simple file. """
def __init__(self, document: str, filename: str, filesize: Optional[int] = None):
self.document = document
self.filename = filename
self.filemode = Filemode.FILE
self.filesize = filesize if filesize else len(self.document.encode("utf-8"))
@property
def scan_dict(self) -> Dict[str, Any]:
""" Return a payload compatible with the scanning API. """
return {
"filename": self.filename
if len(self.filename) <= 256
else self.filename[-255:],
"document": self.document,
"filemode": self.filemode,
}
class CommitFile(File):
""" Class representing a commit file. """
def __init__(
self,
document: str,
filename: str,
filemode: Filemode,
filesize: Optional[int] = None,
):
super().__init__(document, filename, filesize)
self.filemode = filemode
class Files:
"""
Files is a list of files. Useful for directory scanning.
"""
def __init__(self, files: List[File]):
self._files = {entry.filename: entry for entry in files}
@property
def files(self) -> Dict[str, File]:
return self._files
@property
def scannable_list(self) -> List[Dict[str, Any]]:
return [entry.scan_dict for entry in self.files.values()]
def scan(
self,
client: GGClient,
cache: Cache,
matches_ignore: Iterable[str],
all_policies: bool,
verbose: bool,
) -> List[Result]:
cache.purge()
scannable_list = self.scannable_list
results = []
chunks = []
for i in range(0, len(scannable_list), MULTI_DOCUMENT_LIMIT):
chunks.append(scannable_list[i : i + MULTI_DOCUMENT_LIMIT])
with concurrent.futures.ThreadPoolExecutor(
max_workers=CPU_COUNT * 2, thread_name_prefix="content_scan"
) as executor:
future_to_scan = {
executor.submit(client.multi_content_scan, chunk): chunk
for chunk in chunks
}
for future in concurrent.futures.as_completed(future_to_scan):
chunk = future_to_scan[future]
scan = future.result()
if not scan.success:
handle_scan_error(scan, chunk)
continue
for index, scanned in enumerate(scan.scan_results):
remove_ignored_from_result(scanned, all_policies, matches_ignore)
if scanned.has_policy_breaks:
for policy_break in scanned.policy_breaks:
cache.add_found_policy_break(policy_break)
results.append(
Result(
content=chunk[index]["document"],
scan=scanned,
filemode=chunk[index]["filemode"],
filename=chunk[index]["filename"],
)
)
cache.save()
return results
class CommitInformation(NamedTuple):
author: str
email: str
date: str
class Commit(Files):
"""
Commit represents a commit which is a list of commit files.
"""
def __init__(self, sha: Optional[str] = None, filter_set: Set[str] = set()):
self.sha = sha
self._patch: Optional[str] = None
self._files = {}
self.filter_set = filter_set
self._info: Optional[CommitInformation] = None
@property
def info(self) -> CommitInformation:
if self._info is None:
m = REGEX_HEADER_INFO.search(self.patch)
if m is None:
self._info = CommitInformation("unknown", "", "")
else:
self._info = CommitInformation(**m.groupdict())
return self._info
@property
def optional_header(self) -> str:
""" Return the formatted patch. """
return (
format_text(f"\ncommit {self.sha}\n", STYLE["commit_info"])
+ f"Author: {self.info.author} <{self.info.email}>\n"
+ f"Date: {self.info.date}\n"
)
@property
def patch(self) -> str:
""" Get the change patch for the commit. """
if self._patch is None:
if self.sha:
self._patch = shell([GIT_PATH, "show", self.sha])
else:
self._patch = shell([GIT_PATH, "diff", "--cached"])
return self._patch
@property
def files(self) -> Dict[str, File]:
if not self._files:
self._files = {entry.filename: entry for entry in list(self.get_files())}
return self._files
@staticmethod
def get_filename(line: str) -> str:
"""
Get the file path from the line patch
Example: line = "a/filename.txt b/filename.txt"
"""
return line.split(" ")[1][2:]
@staticmethod
def get_filemode(line: str) -> Filemode:
"""
Get the file mode from the line patch (new, modified or deleted)
:raise: Exception if filemode is not detected
"""
if line.startswith("index"):
return Filemode.MODIFY
if line.startswith("similarity"):
return Filemode.RENAME
if line.startswith("new"):
return Filemode.NEW
if line.startswith("deleted"):
return Filemode.DELETE
if line.startswith("old"):
return Filemode.PERMISSION_CHANGE
raise click.ClickException(f"Filemode is not detected:{line}")
def get_files(self) -> Iterable[CommitFile]:
"""
Format the diff into files and extract the patch for each one of them.
Example :
diff --git a/test.txt b/test.txt\n
new file mode 100644\n
index 0000000..b80e3df\n
--- /dev/null\n
+++ b/test\n
@@ -0,0 +1,28 @@\n
+this is a test patch\n
"""
list_diff = re.split(r"^diff --git ", self.patch, flags=re.MULTILINE)[1:]
work_dir = os.getcwd()
for diff in list_diff:
lines = diff.split("\n")
filename = self.get_filename(lines[0])
if os.path.join(work_dir, filename) in self.filter_set:
continue
filemode = self.get_filemode(lines[1])
document = "\n".join(lines[filemode.start :])
file_size = len(document.encode("utf-8"))
if file_size > MAX_FILE_SIZE:
continue
if document:
yield CommitFile(document, filename, filemode, file_size)
| 31.460076
| 85
| 0.58424
|
701d963d5d5416e4c65f89bf3515e3da33834a6c
| 130
|
py
|
Python
|
people/urls.py
|
brapastor/pygeographic
|
3b1522b62bf06430dca007d64a5b71243fdb71f0
|
[
"MIT"
] | null | null | null |
people/urls.py
|
brapastor/pygeographic
|
3b1522b62bf06430dca007d64a5b71243fdb71f0
|
[
"MIT"
] | null | null | null |
people/urls.py
|
brapastor/pygeographic
|
3b1522b62bf06430dca007d64a5b71243fdb71f0
|
[
"MIT"
] | null | null | null |
from django.urls import path, include
from .views import register
urlpatterns = [
path("register/", register, name="tags"),
]
| 21.666667
| 45
| 0.715385
|
47e5def130cd5177d50c437f1e4bb0b62f7382f6
| 8,628
|
py
|
Python
|
bin/performance_lineage_excel.py
|
fanninpm/mad_river_wf
|
1f796aa98005f9b5607aa746f445fcef51eb85e5
|
[
"Apache-2.0"
] | null | null | null |
bin/performance_lineage_excel.py
|
fanninpm/mad_river_wf
|
1f796aa98005f9b5607aa746f445fcef51eb85e5
|
[
"Apache-2.0"
] | null | null | null |
bin/performance_lineage_excel.py
|
fanninpm/mad_river_wf
|
1f796aa98005f9b5607aa746f445fcef51eb85e5
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
import csv
import json
import sys
import xml.etree.ElementTree as ET
from dataclasses import dataclass
from datetime import datetime
from statistics import mean
from openpyxl import Workbook
if sys.version_info.major < 3 or (sys.version_info.major == 3 and sys.version_info.minor < 8):
print('This script requires the walrus operator to be supported.', file=sys.stderr)
print('Please use Python 3.8 or greater.', file=sys.stderr)
sys.exit(1)
@dataclass
class Sample:
name: str
barcode_sequence: str
pf_clusters: list[int]
percent_of_lanes: list[float]
yield_in_mbases: list[int]
percent_geq_q30_bases: list[float]
mean_quality_score: list[float]
def performance_excel_workbook(stats_json, coverage_summary, run_info, protocol):
stats_object = json.load(stats_json)
samples = dict()
for lane in stats_object["ConversionResults"]:
total_pf_clusters = lane["TotalClustersPF"]
for sample_dict in lane["DemuxResults"]:
read_metrics_yield = sum([x["Yield"] for x in sample_dict["ReadMetrics"]])
read_metrics_yield_q30 = sum([x["YieldQ30"] for x in sample_dict["ReadMetrics"]])
read_metrics_quality_score_sum = sum([x["QualityScoreSum"] for x in sample_dict["ReadMetrics"]])
try:
percent_geq_q30_bases = read_metrics_yield_q30 * 100 / read_metrics_yield
except ZeroDivisionError:
percent_geq_q30_bases = 0
try:
mean_quality_score = read_metrics_quality_score_sum / read_metrics_yield
except ZeroDivisionError:
mean_quality_score = 0
if (sample_id := sample_dict["SampleId"]) in samples:
if sample_dict["SampleName"] == samples[sample_id].name and sample_dict["IndexMetrics"][0]["IndexSequence"] == samples[sample_id].barcode_sequence:
samples[sample_id].pf_clusters.append(sample_dict["NumberReads"])
samples[sample_id].percent_of_lanes.append(sample_dict["NumberReads"]*100/total_pf_clusters)
samples[sample_id].yield_in_mbases.append(sample_dict["Yield"]//1000000)
samples[sample_id].percent_geq_q30_bases.append(percent_geq_q30_bases)
samples[sample_id].mean_quality_score.append(mean_quality_score)
else:
raise ValueError(f"mismatch with {sample_id}")
else:
samples[sample_id] = Sample(
name = sample_dict["SampleName"],
barcode_sequence = sample_dict["IndexMetrics"][0]["IndexSequence"],
pf_clusters = [sample_dict["NumberReads"]],
percent_of_lanes = [sample_dict["NumberReads"]*100/total_pf_clusters],
yield_in_mbases = [sample_dict["Yield"]//1000000],
percent_geq_q30_bases = [percent_geq_q30_bases],
mean_quality_score = [mean_quality_score]
)
sample_names = sorted(list(samples))
coverage_dialect = csv.Sniffer().sniff(coverage_summary.readline())
coverage_summary.seek(0)
coverage_reader = csv.DictReader(coverage_summary, dialect=coverage_dialect)
coverage_samples = [row for row in coverage_reader]
coverage_lookup = {j[coverage_reader.fieldnames[0]]:i for i, j in enumerate(coverage_samples)}
run_info_tree = ET.parse(run_info)
run_info_root = run_info_tree.getroot()
sequence_date_raw = run_info_root[0].find("Date").text
try:
sequence_date = datetime.strptime(sequence_date_raw, "%y%m%d").strftime("%Y-%m-%d")
except ValueError:
sequence_date = datetime.strptime(sequence_date_raw.split(" ")[0], "%m/%d/%Y").strftime("%Y-%m-%d")
instrument_name = run_info_root[0].find("Instrument").text
worksheet_header = ["Sample"] + coverage_reader.fieldnames[1:] + ["Barcode Sequences", "PF Clusters", "% of the lanes", "Yield (Mbases)", "% >= Q30 Bases", "Mean Quality Score", "Date of Sequence", "Instrument", "Protocol"]
wb = Workbook()
ws = wb.active
for i, j in enumerate(worksheet_header):
ws.cell(column=i+1, row=1, value=j)
for idx, sample_name in enumerate(sample_names):
row_template = [sample_name]
try:
full_sample_name = [key for key in coverage_lookup if key.startswith(sample_name)][0]
except IndexError:
for fieldname in coverage_reader.fieldnames[1:]:
row_template.append(0)
else:
coverage_sample = coverage_samples[coverage_lookup[full_sample_name]]
for fieldname in coverage_reader.fieldnames[1:]:
row_template.append(coverage_sample[fieldname])
row_template.append(samples[sample_name].barcode_sequence)
row_template.append(sum(samples[sample_name].pf_clusters))
row_template.append(mean(samples[sample_name].percent_of_lanes))
row_template.append(sum(samples[sample_name].yield_in_mbases))
row_template.append(mean(samples[sample_name].percent_geq_q30_bases))
row_template.append(mean(samples[sample_name].mean_quality_score))
row_template.append(sequence_date)
row_template.append(instrument_name)
row_template.append(protocol)
for i,j in enumerate(row_template):
ws.cell(column=i+1, row=idx+2, value=j)
return wb
def lineage_excel_workbook(pangolin_summary, nextclade_summary, vadr_annotations):
wb = Workbook()
ws_pangolin = wb.active
ws_pangolin.title = "Pangolin"
pangolin_dialect = csv.Sniffer().sniff(pangolin_summary.readline())
pangolin_summary.seek(0)
for row in filter(None, csv.reader(pangolin_summary, dialect=pangolin_dialect)):
ws_pangolin.append(row)
ws_nextclade = wb.create_sheet("Nextclade")
nextclade_dialect = csv.Sniffer().sniff(nextclade_summary.readline())
nextclade_summary.seek(0)
for row in filter(None, csv.reader(nextclade_summary, dialect=nextclade_dialect)):
ws_nextclade.append(row)
ws_vadr_annotations = wb.create_sheet("VADR Annotations")
ws_vadr_annotations.append([
"Name",
"Length",
"Pass/Fail",
"Annotated?",
"Best Model",
"Group",
"Subgroup",
"# of Features Annotated",
"# of Features Not Annotated",
"# of 5' Truncated Features",
"# of 3' Truncated Features",
"# of Per-Feature Alerts",
"Per-Sequence Alerts"
])
for row in filter(lambda x: not x.startswith("#"), vadr_annotations.readlines()):
ws_vadr_annotations.append(row.strip().split()[1:])
return wb
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description = "Make some excel files related to performance and lineage.")
parser.add_argument("prefix", type=str, help="Prefix for output excel files.")
subparsers = parser.add_subparsers(help="Performance or lineage", dest="which")
performance_parser = subparsers.add_parser("performance", help="Generate performance excel")
performance_parser.add_argument("--stats_json", type=argparse.FileType("r"), required=True, help="Stats.json produced by bcl2fastq")
performance_parser.add_argument("--coverage_summary", type=argparse.FileType("r"), required=True, help="TXT file produced by summarizecoverage.sh")
performance_parser.add_argument("--run_info", type=argparse.FileType("r"), required=True, help="RunInfo.xml produced by the Illumina machine")
performance_parser.add_argument("--protocol", type=str, required=True, help="This run's protocol")
lineage_parser = subparsers.add_parser("lineage", help="Generate lineage excel")
lineage_parser.add_argument("--pangolin_summary", type=argparse.FileType("r"), required=True, help="Tabular summary of Pangolin lineages")
lineage_parser.add_argument("--nextclade_summary", type=argparse.FileType("r"), required=True, help="Tabular summary of Nextclade lineages")
lineage_parser.add_argument("--vadr_annotations", type=argparse.FileType("r"), required=True, help="Tabular summary of VADR annotations")
args = parser.parse_args()
if args.which == "performance":
performance_excel_workbook(args.stats_json, args.coverage_summary, args.run_info, args.protocol).save(f"{args.prefix}_performance.xlsx")
if args.which == "lineage":
lineage_excel_workbook(args.pangolin_summary, args.nextclade_summary, args.vadr_annotations).save(f"{args.prefix}_lineage.xlsx")
| 47.406593
| 227
| 0.683936
|
5ac187244340cb421f67cf137ffd340317812aba
| 8,012
|
py
|
Python
|
tests/test_miner.py
|
Andolab/miNER
|
4871fce8907a554734e0e70aea33e2adf03c0ce1
|
[
"MIT"
] | 3
|
2019-04-06T03:14:01.000Z
|
2020-12-14T09:29:58.000Z
|
tests/test_miner.py
|
Andolab/miNER
|
4871fce8907a554734e0e70aea33e2adf03c0ce1
|
[
"MIT"
] | 1
|
2019-01-25T07:52:22.000Z
|
2019-03-29T14:38:06.000Z
|
tests/test_miner.py
|
Andolab/miNER
|
4871fce8907a554734e0e70aea33e2adf03c0ce1
|
[
"MIT"
] | 1
|
2019-01-25T08:07:35.000Z
|
2019-01-25T08:07:35.000Z
|
import unittest
from miner import Miner
class TestMiner(unittest.TestCase):
def setUp(self):
# 花子さんは東京に行きました(IOB2)
# 山田太郎くんは東京駅に向かいました(IOB2)
# 花子さんとボブくんは東京スカイツリーに行きました(BIOES)
self.answers = [
"B-PSN O O B-LOC O O O O".split(" "),
"B-PSN I-PSN O O B-LOC I-LOC O O O O".split(" "),
"S-PSN O O S-PSN O O B-LOC I-LOC E-LOC O O O O".split(" "),
]
self.predicts = [
"B-PSN O O B-LOC O B-LOC O O".split(" "),
"B-PSN B-PSN O O B-LOC I-LOC O O O O".split(" "),
"O O O O O O B-LOC I-LOC E-LOC O B-PSN O O".split(" "),
]
self.sentences = [
"花子 さん は 東京 に 行き まし た".split(" "),
"山田 太郎 君 は 東京 駅 に 向かい まし た".split(" "),
"花子 さん と ボブ くん は 東京 スカイ ツリー に 行き まし た".split(" "),
]
self.knowns = {"PSN": ["花子"], "LOC": ["東京"]}
self.miner = Miner(self.answers, self.predicts, self.sentences, self.knowns)
def test_initialize(self):
self.assertEqual(self.miner.answers, self.answers)
self.assertEqual(self.miner.predicts, self.predicts)
self.assertEqual(self.miner.sentences, self.sentences)
self.assertEqual(self.miner.known_words, self.knowns)
# check no setting known words
m = Miner(self.answers, self.predicts, self.sentences)
self.assertEqual(m.known_words, {"PSN": [], "LOC": [], "overall": []})
def test_default_report(self):
result = self.miner.default_report(False)
self.assertTrue(
all([k in ["PSN", "LOC", "overall"] for k, v in result.items()])
)
self.assertEqual(
[k for k, v in result["PSN"].items()],
["precision", "recall", "f1_score", "num"],
)
self.assertEqual(
{
"LOC": {
"f1_score": 0.8571428571428571,
"num": 3,
"precision": 0.75,
"recall": 1.0,
},
"PSN": {"f1_score": 0.25, "num": 4, "precision": 0.25, "recall": 0.25},
"overall": {
"f1_score": 0.5333333333333333,
"num": 7,
"precision": 0.5,
"recall": 0.5714285714285714,
},
},
result,
)
def test_known_only_report(self):
result = self.miner.known_only_report(False)
self.assertTrue(
all([k in ["PSN", "LOC", "overall"] for k, v in result.items()])
)
self.assertEqual(
[k for k, v in result["PSN"].items()],
["precision", "recall", "f1_score", "num"],
)
self.assertEqual(
{
"LOC": {"f1_score": 1.0, "num": 1, "precision": 1.0, "recall": 1.0},
"PSN": {
"f1_score": 0.6666666666666666,
"num": 2,
"precision": 1.0,
"recall": 0.5,
},
"overall": {
"f1_score": 0.8,
"num": 3,
"precision": 1.0,
"recall": 0.6666666666666666,
},
},
result,
)
def test_unknown_only_report(self):
result = self.miner.unknown_only_report(False)
self.assertTrue(
all([k in ["PSN", "LOC", "overall"] for k, v in result.items()])
)
self.assertEqual(
[k for k, v in result["PSN"].items()],
["precision", "recall", "f1_score", "num"],
)
self.assertEqual(
{
"LOC": {"f1_score": 0.8, "num": 2, "precision": 2 / 3, "recall": 1.0},
"PSN": {"f1_score": 0, "num": 2, "precision": 0.0, "recall": 0.0},
"overall": {
"f1_score": 0.4,
"num": 4,
"precision": 0.3333333333333333,
"recall": 0.5,
},
},
result,
)
def test__entity_indexes(self):
result = self.miner._entity_indexes(self.answers, "PSN")
expect = [("PSN", 0, 0), ("PSN", 9, 10), ("PSN", 20, 20), ("PSN", 23, 23)]
self.assertEqual(result, expect)
result = self.miner._entity_indexes(self.answers, "LOC")
expect = [("LOC", 3, 3), ("LOC", 13, 14), ("LOC", 26, 28)]
self.assertEqual(result, expect)
def test__return_named_entities(self):
result = self.miner._return_named_entities(self.answers)
expect = {
"known": {"PSN": ["花子"], "LOC": ["東京"]},
"unknown": {"PSN": ["山田太郎", "ボブ"], "LOC": ["東京スカイツリー", "東京駅"]},
}
for (rk, rv), (ek, ev) in zip(result.items(), expect.items()):
self.assertTrue(set(rv["PSN"]) & set(ev["PSN"]))
self.assertTrue(set(rv["LOC"]) & set(ev["LOC"]))
def test_return_miss_labelings(self):
result = self.miner.return_miss_labelings()
expect = [
{
"answer": self.answers[0],
"predict": self.predicts[0],
"sentence": self.sentences[0],
},
{
"answer": self.answers[1],
"predict": self.predicts[1],
"sentence": self.sentences[1],
},
{
"answer": self.answers[2],
"predict": self.predicts[2],
"sentence": self.sentences[2],
},
]
self.assertEqual(result, expect)
def test_return_answer_named_entities(self):
result = self.miner.return_answer_named_entities()
expect = self.miner._return_named_entities(self.answers)
for (rk, rv), (ek, ev) in zip(result.items(), expect.items()):
self.assertTrue(set(rv["PSN"]) & set(ev["PSN"]))
self.assertTrue(set(rv["LOC"]) & set(ev["LOC"]))
def test_return_predict_named_entities(self):
result = self.miner.return_predict_named_entities()
expect = self.miner._return_named_entities(self.predicts)
for (rk, rv), (ek, ev) in zip(result.items(), expect.items()):
self.assertTrue(set(rv["PSN"]) & set(ev["PSN"]))
self.assertTrue(set(rv["LOC"]) & set(ev["LOC"]))
def test_segmentation_score(self):
result = self.miner.segmentation_score("default", False)
self.assertEqual(
result,
{
"f1_score": 0.5333333333333333,
"num": 7,
"precision": 0.5,
"recall": 0.5714285714285714,
},
)
result = self.miner.segmentation_score("unknown", False)
self.assertEqual(
result,
{"f1_score": 0.4, "num": 4, "precision": 0.3333333333333333, "recall": 0.5},
)
result = self.miner.segmentation_score("known", False)
self.assertEqual(
result,
{"f1_score": 0.8, "num": 3, "precision": 1.0, "recall": 0.6666666666666666},
)
def test__check_add_entity(self):
# assert all
self.miner.check_known = True
self.miner.check_unknown = True
self.assertTrue(self.miner._check_add_entity("", ""))
self.assertTrue(self.miner._check_add_entity("花子", "PSN"))
# assert known
self.miner.check_known = True
self.miner.check_unknown = False
self.assertTrue(self.miner._check_add_entity("花子", "PSN"))
self.assertTrue(self.miner._check_add_entity("東京", "LOC"))
self.assertFalse(self.miner._check_add_entity("ボブ", "PSN"))
# assert unknown
self.miner.check_known = False
self.miner.check_unknown = True
self.assertTrue(self.miner._check_add_entity("ボブ", "PSN"))
self.assertTrue(self.miner._check_add_entity("東京スカイツリー", "LOC"))
self.assertFalse(self.miner._check_add_entity("東京", "LOC"))
| 35.767857
| 88
| 0.49688
|
b91c2d7b1be5be20b1f6b186c06230c0b548aab1
| 3,526
|
py
|
Python
|
main.py
|
HoangTuan110/Juliet
|
153555e97c515318b4ce5a291ec3c7a5d2562bd4
|
[
"MIT"
] | 2
|
2021-04-11T01:19:02.000Z
|
2021-05-15T19:40:56.000Z
|
main.py
|
HoangTuan110/Juliet
|
153555e97c515318b4ce5a291ec3c7a5d2562bd4
|
[
"MIT"
] | null | null | null |
main.py
|
HoangTuan110/Juliet
|
153555e97c515318b4ce5a291ec3c7a5d2562bd4
|
[
"MIT"
] | null | null | null |
# Import re for Regex support
import re
# List of operators
operator_list = ["+", "-", "*", "/", "^"]
# Operators for integer
class IntOperator:
def Plus(first, second):
print(int(first) + int(second))
def Minus(first, second):
print(int(first) - int(second))
def Multiply(first, second):
print(int(first) * int(second))
def Divide(first, second):
print(int(first) / int(second))
def Power(first, second):
print(int(first) ** int(second))
# Operators for float
class FloatOperator:
def Plus(first, second):
print(float(first) + float(second))
def Minus(first, second):
print(float(first) - float(second))
def Multiply(first, second):
print(float(first) * float(second))
def Divide(first, second):
print(float(first) / float(second))
def Power(first, second):
print(float(first) ** float(second))
# Main function
def main():
print("Juliet Calculator version 1.0")
print("Type 'bye' to exit the REPL.")
while True:
ipt = input("#") # Ask user input
result = ipt.split() # Split input into tokens
# If the tokens meets a comment in there('!')
# or meets a space, then it will skip
if result[0] == "!" or result == "":
pass
# If the tokens meets a integer
elif re.fullmatch(r"\d+", result[0]):
# If there's only the integer in the token,
# Then print it as a int
if len(result) == 1:
print(f"{result[0]} :: Int")
print(int(result[0]))
# If there's a operator in the next token,
# Do math
elif result[1] in operator_list:
# If there's only first number and the operator,
# The program will print an error
if len(result) == 2:
print("Error: Missing second number")
elif result[1] == "+":
IntOperator.Plus(result[0], result[2])
elif result[1] == "-":
IntOperator.Minus(result[0], result[2])
elif result[1] == "*":
IntOperator.Multiply(result[0], result[2])
elif result[1] == "/":
IntOperator.Divide(result[0], result[2])
elif result[1] == "^":
IntOperator.Power(result[0], result[2])
# This pretty much the same as the first one,
# Except that it is for float numbers
elif re.fullmatch(r"\d+\.\d+", result[0]):
if len(result) == 1:
print(f"{result[0]} :: Float")
print(float(result[0]))
elif result[1] in operator_list:
if len(result) == 2:
print("Error: Missing second number")
elif result[1] == "+":
FloatOperator.Plus(result[0], result[2])
elif result[1] == "-":
FloatOperator.Minus(result[0], result[2])
elif result[1] == "*":
FloatOperator.Multiply(result[0], result[2])
elif result[1] == "/":
FloatOperator.Divide(result[0], result[2])
elif result[1] == "^":
FloatOperator.Power(result[0], result[2])
elif result[0] == "bye":
break
else:
print(f"Invalid command: {result[0]}")
if __name__ == "__main__":
main()
| 33.580952
| 64
| 0.50709
|
52ceb5b4551b0321c513bb56fe0dd2efe127860e
| 4,962
|
py
|
Python
|
cds_ils/importer/api.py
|
Pineirin/cds-ils
|
a31911584e908a22990e7b60caa54c2b64578a7b
|
[
"MIT"
] | null | null | null |
cds_ils/importer/api.py
|
Pineirin/cds-ils
|
a31911584e908a22990e7b60caa54c2b64578a7b
|
[
"MIT"
] | null | null | null |
cds_ils/importer/api.py
|
Pineirin/cds-ils
|
a31911584e908a22990e7b60caa54c2b64578a7b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 CERN.
#
# CDS-ILS is free software; you can redistribute it and/or modify it under
# the terms of the MIT License; see LICENSE file for more details.
"""CDS-ILS Importer API module."""
import uuid
from flask import current_app
from invenio_app_ils.errors import IlsValidationError, \
RecordHasReferencesError, VocabularyError
from invenio_db import db
from cds_ils.importer.errors import LossyConversion, MissingRequiredField, \
ProviderNotAllowedDeletion, RecordModelMissing, RecordNotDeletable, \
SeriesImportError, UnexpectedValue, UnknownProvider
from cds_ils.importer.models import ImporterMode, ImporterTaskStatus, \
ImportRecordLog
from cds_ils.importer.parse_xml import get_record_recid_from_xml, \
get_records_list
from cds_ils.importer.vocabularies_validator import \
validator as vocabulary_validator
from cds_ils.importer.XMLRecordLoader import XMLRecordDumpLoader
from cds_ils.importer.XMLRecordToJson import XMLRecordToJson
def create_json(data, source_type, ignore_missing_rules=False):
"""Process record dump."""
record_dump = XMLRecordToJson(data, source_type=source_type,
ignore_missing=ignore_missing_rules)
return XMLRecordDumpLoader.create_json(record_dump)
def import_from_json(json_data, is_deletable, provider, mode):
"""Import from Json."""
try:
report = XMLRecordDumpLoader.import_from_json(json_data, is_deletable,
provider, mode)
db.session.commit()
return report
except Exception as e:
db.session.rollback()
raise e
def validate_import(provider, mode, source_type):
"""Validate import."""
source_type = source_type or "marcxml"
assert source_type in ["marcxml"]
# check if the record is in delete mode
if provider not in current_app.config[
"CDS_ILS_IMPORTER_PROVIDERS_ALLOWED_TO_DELETE_RECORDS"
] and mode == ImporterMode.DELETE.value:
raise ProviderNotAllowedDeletion(provider=provider)
def import_from_xml(log, source_path, source_type, provider, mode,
ignore_missing_rules=False,
eager=False):
"""Load a single xml file."""
try:
# reset vocabularies validator cache
vocabulary_validator.reset()
validate_import(provider, mode, source_type)
with open(source_path, "r") as source:
records_list = list(get_records_list(source))
# update the entries count now that we know it
log.set_entries_count(records_list)
for record in records_list:
if log.status == ImporterTaskStatus.CANCELLED:
break
record_recid = get_record_recid_from_xml(record)
try:
json_data, is_deletable = \
create_json(record,
source_type,
ignore_missing_rules=ignore_missing_rules
)
except (LossyConversion, UnexpectedValue,
RecordModelMissing, MissingRequiredField) as e:
ImportRecordLog.create_failure(log.id, record_recid,
str(e.description))
continue
try:
report = import_from_json(json_data, is_deletable,
provider, mode)
ImportRecordLog.create_success(
log.id, record_recid, report)
except (RecordNotDeletable,
ProviderNotAllowedDeletion,
SeriesImportError,
RecordHasReferencesError, UnknownProvider,
VocabularyError) as e:
ImportRecordLog.create_failure(
log.id, record_recid,
str(e.description), report={"raw_json": json_data})
continue
except IlsValidationError as e:
ImportRecordLog.create_failure(
log.id, record_recid,
str(e.original_exception.message),
report={"raw_json": json_data}
)
continue
except Exception as e:
log.set_failed(e)
raise e
log.finalize()
def allowed_files(filename):
"""Checks the extension of the files."""
allowed_extensions = current_app.config[
"CDS_ILS_IMPORTER_FILE_EXTENSIONS_ALLOWED"
]
return filename.lower().endswith(tuple(allowed_extensions))
def rename_file(filename):
"""Renames filename with an unique name."""
unique_filename = uuid.uuid4().hex
ext = filename.rsplit(".", 1)[1]
return unique_filename + "." + ext
| 37.308271
| 78
| 0.61185
|
f7faee6cb336c35495e1fcc4fe3a7778899c6353
| 1,090
|
py
|
Python
|
run.py
|
Licht-T/Ant-Cuda
|
048d99bce4e018cabe2e51853e5fc22dfa476235
|
[
"MIT"
] | null | null | null |
run.py
|
Licht-T/Ant-Cuda
|
048d99bce4e018cabe2e51853e5fc22dfa476235
|
[
"MIT"
] | null | null | null |
run.py
|
Licht-T/Ant-Cuda
|
048d99bce4e018cabe2e51853e5fc22dfa476235
|
[
"MIT"
] | 2
|
2020-07-24T14:05:49.000Z
|
2021-02-03T08:11:31.000Z
|
import os
import subprocess
import argparse
MAKE_CMD = 'make'
FLAG_WIN = False
ARG_PARSER = argparse.ArgumentParser(
description=(
'A foraging ants multi-agent simulation software.\n'
'The result is output into subdirectories.'
)
)
ARG_PARSER.add_argument(
'--angle', type=int, required=True,
help='Relative angle between two food resources $\\theta$.'
)
ARG_PARSER.add_argument(
'--dist', type=int, required=True,
help='Distance between the nest and each food $R$.'
)
try:
os.uname()
except AttributeError:
FLAG_WIN = True
if FLAG_WIN:
MAKE_CMD = 'mingw32-make'
if __name__ == '__main__':
parsed_args = ARG_PARSER.parse_args()
angle = parsed_args.angle
dist = parsed_args.dist
print('{0}dist, {1}deg. compiling.'.format(dist, angle))
make_args = [MAKE_CMD, 'ANGLE='+str(angle), 'DIST='+str(dist)]
subprocess.call(make_args)
print('{0}dist, {1}deg. started.'.format(dist, angle))
subprocess.call('./{0}dist_{1}deg.exe'.format(dist, angle))
print('{0}dist, {1}deg. ended.'.format(dist, angle))
| 24.222222
| 66
| 0.670642
|
453bdca8067b28fbe6efea04603c1e697d0188ea
| 970
|
py
|
Python
|
gitman/models/group.py
|
sergey-shuyskiy/gitman
|
97fe08f7c1e4637524cedd29ae62903a442e2094
|
[
"MIT"
] | null | null | null |
gitman/models/group.py
|
sergey-shuyskiy/gitman
|
97fe08f7c1e4637524cedd29ae62903a442e2094
|
[
"MIT"
] | null | null | null |
gitman/models/group.py
|
sergey-shuyskiy/gitman
|
97fe08f7c1e4637524cedd29ae62903a442e2094
|
[
"MIT"
] | null | null | null |
import logging
import yorm
from yorm.types import AttributeDictionary, List, String
from .. import exceptions
log = logging.getLogger(__name__)
@yorm.attr(name=String)
@yorm.attr(members=List.of_type(String))
class Group(AttributeDictionary):
"""A group with sources."""
def __init__(self, name, members):
super().__init__()
self.name = name
self.members = members or []
for key in ['name', 'members']:
if not self[key]:
msg = "'{}' required for {}".format(key, repr(self))
raise exceptions.InvalidConfig(msg)
def __repr__(self):
return "<group {}>".format(self)
def __str__(self):
pattern = "['{n}']"
return pattern.format(n=self.name)
def __eq__(self, other):
return self.name == other.name
def __ne__(self, other):
return self.name != other.name
def __lt__(self, other):
return self.name < other.name
| 22.55814
| 68
| 0.605155
|
ef20ae3bd224fb99f21ad1c9240d1f4a4cc1177a
| 1,878
|
py
|
Python
|
optuna/integration/pytorch_lightning.py
|
jeffzi/optuna
|
133e9d678723ad9e5183789f9271b7f96db32322
|
[
"MIT"
] | 1
|
2020-12-29T07:38:45.000Z
|
2020-12-29T07:38:45.000Z
|
optuna/integration/pytorch_lightning.py
|
jeffzi/optuna
|
133e9d678723ad9e5183789f9271b7f96db32322
|
[
"MIT"
] | 1
|
2021-06-25T15:45:42.000Z
|
2021-06-25T15:45:42.000Z
|
optuna/integration/pytorch_lightning.py
|
jeffzi/optuna
|
133e9d678723ad9e5183789f9271b7f96db32322
|
[
"MIT"
] | 1
|
2020-12-25T03:27:09.000Z
|
2020-12-25T03:27:09.000Z
|
import optuna
with optuna._imports.try_import() as _imports:
from pytorch_lightning import LightningModule
from pytorch_lightning import Trainer
from pytorch_lightning.callbacks import EarlyStopping
if not _imports.is_successful():
EarlyStopping = object # NOQA
LightningModule = object # NOQA
Trainer = object # NOQA
class PyTorchLightningPruningCallback(EarlyStopping):
"""PyTorch Lightning callback to prune unpromising trials.
See `the example <https://github.com/optuna/optuna/blob/master/
examples/pytorch_lightning_simple.py>`__
if you want to add a pruning callback which observes accuracy.
Args:
trial:
A :class:`~optuna.trial.Trial` corresponding to the current evaluation of the
objective function.
monitor:
An evaluation metric for pruning, e.g., ``val_loss`` or
``val_acc``. The metrics are obtained from the returned dictionaries from e.g.
``pytorch_lightning.LightningModule.training_step`` or
``pytorch_lightning.LightningModule.validation_epoch_end`` and the names thus depend on
how this dictionary is formatted.
"""
def __init__(self, trial: optuna.trial.Trial, monitor: str) -> None:
_imports.check()
super(PyTorchLightningPruningCallback, self).__init__(monitor=monitor)
self._trial = trial
def on_validation_end(self, trainer: Trainer, pl_module: LightningModule) -> None:
logs = trainer.callback_metrics
epoch = pl_module.current_epoch
current_score = logs.get(self.monitor)
if current_score is None:
return
self._trial.report(current_score, step=epoch)
if self._trial.should_prune():
message = "Trial was pruned at epoch {}.".format(epoch)
raise optuna.TrialPruned(message)
| 35.433962
| 99
| 0.688498
|
9d7806b9cf15429573f61700872dd98738c319bd
| 24,829
|
py
|
Python
|
test/KBaseReport_server_test.py
|
msneddon/KBaseReport
|
b8bb470a4c88d65230e525db48d6a3730069185d
|
[
"MIT"
] | null | null | null |
test/KBaseReport_server_test.py
|
msneddon/KBaseReport
|
b8bb470a4c88d65230e525db48d6a3730069185d
|
[
"MIT"
] | 15
|
2017-03-10T21:15:44.000Z
|
2021-07-08T21:43:56.000Z
|
test/KBaseReport_server_test.py
|
msneddon/KBaseReport
|
b8bb470a4c88d65230e525db48d6a3730069185d
|
[
"MIT"
] | 9
|
2017-01-20T18:27:06.000Z
|
2021-01-05T17:52:38.000Z
|
# -*- coding: utf-8 -*-
import os
import shutil
import time
import unittest
from configparser import ConfigParser # py3
from uuid import uuid4
from template.util import TemplateException
from KBaseReport.KBaseReportImpl import KBaseReport
from KBaseReport.KBaseReportServer import MethodContext
from KBaseReport.authclient import KBaseAuth as _KBaseAuth
from installed_clients.DataFileUtilClient import DataFileUtil
from installed_clients.WorkspaceClient import Workspace
from TemplateUtil_test import get_test_data
class KBaseReportTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
token = os.environ.get('KB_AUTH_TOKEN', None)
config_file = os.environ.get('KB_DEPLOYMENT_CONFIG', None)
cls.cfg = {}
config = ConfigParser()
config.read(config_file)
for nameval in config.items('KBaseReport'):
cls.cfg[nameval[0]] = nameval[1]
# Getting username from Auth profile for token
authServiceUrl = cls.cfg['auth-service-url']
auth_client = _KBaseAuth(authServiceUrl)
user_id = auth_client.get_user(token)
# WARNING: don't call any logging methods on the context object,
# it'll result in a NoneType error
cls.ctx = MethodContext(None)
cls.ctx.update({'token': token,
'user_id': user_id,
'provenance': [
{'service': 'KBaseReport',
'method': 'please_never_use_it_in_production',
'method_params': []
}],
'authenticated': 1})
cls.wsURL = cls.cfg['workspace-url']
cls.wsClient = Workspace(cls.wsURL)
cls.serviceImpl = KBaseReport(cls.cfg)
cls.scratch = cls.cfg['scratch']
cls.callback_url = os.environ['SDK_CALLBACK_URL']
# Custom stuff below
dirname = os.path.dirname(__file__)
cls.dfu = DataFileUtil(cls.callback_url)
cls.a_html_path = os.path.join(cls.scratch, 'a_html')
cls.b_html_path = os.path.join(cls.scratch, 'b_html')
shutil.copytree(os.path.join(dirname, 'data', 'a_html'), cls.a_html_path)
shutil.copytree(os.path.join(dirname, 'data', 'b_html'), cls.b_html_path)
cls.a_file_path = os.path.join(cls.scratch, 'a.txt')
cls.b_file_path = os.path.join(cls.scratch, 'b.txt')
shutil.copy2(os.path.join(dirname, 'data/a.txt'), cls.a_file_path)
shutil.copy2(os.path.join(dirname, 'data/b.txt'), cls.b_file_path)
# Upload files to shock
file_shocks = []
file_paths = [cls.a_file_path, cls.b_file_path]
for i in range(2):
try:
file_shocks.append(cls.dfu.file_to_shock({
'file_path': file_paths[i], 'make_handle': 0
}))
except Exception as e:
print('Unable to store ' + file_paths[i] + str(e))
cls.a_file_shock = file_shocks[0]
cls.b_file_shock = file_shocks[1]
@classmethod
def tearDownClass(cls):
if hasattr(cls, 'wsName'):
cls.wsClient.delete_workspace({'workspace': cls.wsName})
print('Test workspace was deleted')
def getWsClient(self):
return self.__class__.wsClient
def getWsName(self):
if hasattr(self.__class__, 'wsName'):
return self.__class__.wsName
suffix = int(time.time() * 1000)
wsName = "test_KBaseReport_" + str(suffix)
ret = self.getWsClient().create_workspace({'workspace': wsName})
self.__class__.wsName = wsName
self.__class__.wsID = ret[0]
return wsName
def getWsID(self):
"""
Return the workspace ID.
NOTE that this is custom to this SDK app (not auto-generated)
"""
if hasattr(self.__class__, 'wsName'):
return self.__class__.wsID
self.getWsName() # Sets the ID
return self.__class__.wsID
def getImpl(self):
return self.__class__.serviceImpl
def getContext(self):
return self.__class__.ctx
def check_created_report(self, result):
""" basic checks on a created report
Args:
result: output from report creation call
Return:
object data from created report
"""
self.assertEqual(self.getImpl().status(self.getContext())[0]['state'], 'OK')
self.assertTrue(len(result[0]['ref']))
self.assertTrue(len(result[0]['name']))
obj = self.dfu.get_objects({'object_refs': [result[0]['ref']]})
return obj['data'][0]['data']
def check_extended_result(self, result, link_name, file_names):
"""
Test utility: check the file upload results for an extended report
Args:
result - result dictionary from running .create_extended_report
link_name - one of "html_links" or "file_links"
file_names - names of the files for us to check against
Returns:
obj - report object created
"""
obj = self.check_created_report(result)
file_links = obj[link_name]
self.assertEqual(len(file_links), len(file_names))
# Test that all the filenames listed in the report object map correctly
saved_names = set([str(f['name']) for f in file_links])
self.assertEqual(saved_names, set(file_names))
return obj
def check_validation_errors(self, params, error_list):
"""
Check that the appropriate errors are thrown when validating extended report params
Args:
params - parameters to create_extended_report
error_list - set of text regexes to check against the error string
Returns True
"""
err_str = 'KBaseReport parameter validation errors'
with self.assertRaisesRegex(TypeError, err_str) as cm:
self.getImpl().create_extended_report(self.getContext(), params)
error_message = str(cm.exception)
for e in error_list:
self.assertRegex(error_message, e)
return True
def test_create(self):
""" Test the simple report creation with valid data """
msg = str(uuid4())
result = self.getImpl().create(self.getContext(), {
'workspace_name': self.getWsName(),
'report': {'text_message': msg}
})
data = self.check_created_report(result)
self.assertEqual(data['text_message'], msg)
def test_create_with_workspace_id(self):
""" Test the case where we pass in a workspace ID instead of a name """
msg = str(uuid4())
result = self.getImpl().create(self.getContext(), {
'workspace_id': self.getWsID(),
'report': {'text_message': msg}
})
data = self.check_created_report(result)
self.assertEqual(data['text_message'], msg)
def test_create_html_report(self):
""" Test the case where we pass in HTML instead of text_message """
html = '<blink><u>Deprecated</u></blink><nobr>'
result = self.getImpl().create(self.getContext(), {
'workspace_id': self.getWsID(),
'report': {'direct_html': html}
})
data = self.check_created_report(result)
self.assertEqual(data['direct_html'], html)
def test_create_html_report_and_message(self):
""" Test creation of a message AND an HTML report (!) """
msg = str(uuid4())
html = '<blink><u>Deprecated</u></blink><nobr>'
result = self.getImpl().create(self.getContext(), {
'workspace_id': self.getWsID(),
'report': {'direct_html': html, 'text_message': msg}
})
data = self.check_created_report(result)
self.assertEqual(data['direct_html'], html)
self.assertEqual(data['text_message'], msg)
def test_create_report_from_template(self):
""" Test the creation of a simple report using a template to generate data """
TEST_DATA = get_test_data()
for test_item in TEST_DATA['render_test'].keys():
desc = test_item if test_item is not None else 'none'
ref_text = TEST_DATA['render_test'][test_item]
with self.subTest('test content: ' + desc):
test_args = {
'template_file': TEST_DATA['template'],
}
if test_item:
test_args['template_data_json'] = TEST_DATA[test_item + '_json']
result = self.getImpl().create(self.getContext(), {
'workspace_id': self.getWsID(),
'report': {
'template': test_args
},
})
data = self.check_created_report(result)
self.assertMultiLineEqual(
data['direct_html'].rstrip(),
ref_text['abs_path'].rstrip()
)
def test_create_param_errors(self):
"""
See lib/KBaseReport/utils/validation_utils
We aren't testing every validation rule exhaustively here
"""
# Missing workspace id and name
with self.assertRaises(TypeError) as err:
self.getImpl().create(self.getContext(), {'report': {}})
# Missing report
with self.assertRaises(TypeError) as err:
self.getImpl().create(self.getContext(), {'workspace_name': 'x'})
self.assertTrue(str(err.exception))
def test_create_extended_param_errors(self):
"""
See lib/KBaseReport/utils/validation_utils
We aren't testing every validation rule exhaustively here
"""
# Missing workspace id and name
self.check_validation_errors({}, [
"'workspace_id'.*?'required without workspace_name'",
"'workspace_name'.*?'required without workspace_id'",
])
# wrong type for workspace_name
self.check_validation_errors({'workspace_name': 123}, [
"'workspace_name'.*?'must be of string type'",
])
def test_create_more_extended_param_errors(self):
"""
See lib/KBaseReport/utils/validation_utils
We aren't testing every validation rule exhaustively here
"""
html_links = [
{
'name': 'index.html',
'path': self.a_html_path
},
{
'name': 'b',
'path': self.b_html_path
}
]
# require both 'html_links' and 'direct_html_link_index'
params = {
'workspace_id': self.getWsID(),
'html_links': html_links,
}
self.check_validation_errors(params, [
"html_links.*?field 'direct_html_link_index' is required"
])
params = {
'workspace_id': self.getWsID(),
'direct_html_link_index': 0,
}
self.check_validation_errors(params, [
"direct_html_link_index.*?field 'html_links' is required"
])
# type error in the template params
params = {
'workspace_id': self.getWsID(),
'template': 'my_template_file.txt',
}
self.check_validation_errors(params, ['template.*?must be of dict type'])
# no template + direct_html
params = {
'workspace_id': self.getWsID(),
'template': {},
'direct_html': 'This is not valid html',
}
self.check_validation_errors(params, [
"'template' must not be present with 'direct_html'",
"'template'.*?'direct_html', 'direct_html_link_index' must not be present",
])
# no template + direct_html_link_index
params = {
'workspace_id': self.getWsID(),
'template': {'this': 'that'},
'direct_html_link_index': 0,
'html_links': html_links,
}
self.check_validation_errors(params, [
"'direct_html_link_index'.*?'template' must not be present with ",
"'template'.*?'direct_html', 'direct_html_link_index' must not be present"
])
# missing direct_html_link_index
# no direct_html + template
# no template + html_links
params = {
'workspace_id': self.getWsID(),
'template': {'this': 'that'},
'direct_html': '<marquee>My fave HTML tag</marquee>',
'html_links': html_links,
}
self.check_validation_errors(params, [
"'template' must not be present with 'html_links'",
"'direct_html'.*?'template' must not be present with ",
"template.*?'direct_html', 'direct_html_link_index' must not be present",
"field 'direct_html_link_index' is required",
])
def test_invalid_file_html_links(self):
""" Errors connected with file and html links """
for link_type in ['html_links', 'file_links']:
err_list = []
if 'html_links' == link_type:
err_list = ["html_links.*?field 'direct_html_link_index' is required"]
# file errors: no name
params = {
'workspace_id': self.getWsID(),
link_type: [
{'path': 'does/not/exist'},
],
}
self.check_validation_errors(params, [
"'name':.*?'required field'",
"path.*?does not exist on filesystem"
] + err_list)
# file error: no location
params = {
'workspace_id': self.getWsID(),
link_type: [
{'path': self.a_file_path, 'name': 'a'},
{'name': 'b'},
],
}
self.check_validation_errors(params, [
"'path'.*?'required field'",
"'shock_id'.*?'required field'",
"'template'.*?'required field'"
] + err_list)
# invalid path
file = {
'name': 'a',
'description': 'desc',
'label': 'label',
'path': 'tmp/no.txt'
}
params = {
'workspace_name': self.getWsName(),
'report_object_name': 'my_report',
link_type: [file]
}
self.check_validation_errors(params, [
"'path'.*?'does not exist on filesystem'",
] + err_list)
# template-related errors
template_error_list = [
{
'desc': 'missing required param',
'regex': "required field",
'params': {},
},
{
'desc': 'template file is wrong type',
'regex': "must be of string type",
'params': {
'template_file': {'path': '/does/not/exist'},
}
},
{
'desc': 'invalid JSON',
'regex': "Invalid JSON",
'params': {
'template_data_json': '"this is not valid JSON',
}
},
{
'desc': 'invalid JSON',
'regex': "Invalid JSON",
'params': {
'template_data_json': '["this",{"is":"not"},{"valid":"json"]',
}
},
]
for tpl_err in template_error_list:
params = {
'workspace_id': 12345,
'report_object_name': 'my_report',
link_type: [
{'template': tpl_err['params'], 'name': 'file.txt'},
{'path': self.a_file_path, 'name': 'a'},
]
}
self.check_validation_errors(params, [tpl_err['regex']] + err_list)
for path in ['/does/not/exist', 'does/not/exist']:
with self.assertRaisesRegex(TemplateException, 'file error - ' + path + ': not found'):
self.getImpl().create_extended_report(self.getContext(), {
'template': {
'template_file': path,
},
'workspace_id': 12345,
})
def test_create_extended_report_with_file_paths(self):
""" Valid extended report with file_links """
result = self.getImpl().create_extended_report(self.getContext(), {
'workspace_name': self.getWsName(),
'report_object_name': 'my_report',
'file_links': [
{
'name': 'a',
'description': 'a',
'label': 'a',
'path': self.a_file_path
},
{
'name': 'b',
'description': 'b',
'path': self.b_file_path
}
]
})
self.check_extended_result(result, 'file_links', ['a', 'b'])
def test_create_extended_report_with_uploaded_files(self):
result = self.getImpl().create_extended_report(self.getContext(), {
'workspace_name': self.getWsName(),
'report_object_name': 'my_report',
'file_links': [
{
'name': 'a',
'description': 'a',
'label': 'a',
'shock_id': self.a_file_shock['shock_id']
},
{
'name': 'b',
'description': 'b',
'label': 'b',
'shock_id': self.b_file_shock['shock_id']
}
]
})
self.check_extended_result(result, 'file_links', ['a', 'b'])
def test_create_extended_report_with_uploaded_html_files(self):
result = self.getImpl().create_extended_report(self.getContext(), {
'workspace_name': self.getWsName(),
'report_object_name': 'my_report',
'direct_html_link_index': 0,
'html_links': [
{
'name': 'a',
'description': 'a',
'label': 'a',
'shock_id': self.a_file_shock['shock_id']
},
{
'name': 'b',
'description': 'b',
'label': 'b',
'shock_id': self.b_file_shock['shock_id']
}
]
})
self.check_extended_result(result, 'html_links', ['a', 'b'])
def test_create_extended_report_with_html_paths(self):
result = self.getImpl().create_extended_report(self.getContext(), {
'workspace_name': self.getWsName(),
'report_object_name': 'my_report',
'direct_html_link_index': 0,
'html_links': [
{
'name': 'index.html',
'path': self.a_html_path
},
{
'name': 'b',
'path': self.b_html_path
}
]
})
self.check_extended_result(result, 'html_links', ['index.html', 'b'])
def test_create_extended_report_with_templates(self):
""" test the creation of extended reports using `template` directives """
TEST_DATA = get_test_data()
tmpl_arr = [
{
'name': 'none',
'template': {
'template_file': TEST_DATA['template'],
},
},
{
'name': 'content',
'template': {
'template_file': TEST_DATA['template'],
'template_data_json': TEST_DATA['content_json'],
},
},
{
'name': 'title',
'template': {
'template_file': TEST_DATA['template'],
'template_data_json': TEST_DATA['title_json'],
},
}
]
result = self.getImpl().create_extended_report(self.getContext(), {
'workspace_name': self.getWsName(),
'report_object_name': 'my_report',
'direct_html_link_index': 0,
'html_links': tmpl_arr,
})
self.check_extended_result(result, 'html_links', ['none', 'content', 'title'])
# use the same templates to generate files
result = self.getImpl().create_extended_report(self.getContext(), {
'workspace_name': self.getWsName(),
'report_object_name': 'my_report',
'file_links': tmpl_arr,
})
self.check_extended_result(result, 'file_links', ['none', 'content', 'title'])
def test_create_extended_report_with_html_single_file(self):
result = self.getImpl().create_extended_report(self.getContext(), {
'workspace_name': self.getWsName(),
'report_object_name': 'my_report',
'direct_html_link_index': 0,
'html_links': [
{
'name': 'index.html',
'description': 'a',
'label': 'a',
'path': self.a_html_path
},
]
})
self.check_extended_result(result, 'html_links', ['index.html'])
def test_valid_extended_report_with_html_paths(self):
""" Test the case where they set a single HTML file as their 'path' """
result = self.getImpl().create_extended_report(self.getContext(), {
'workspace_name': self.getWsName(),
'direct_html_link_index': 0,
'html_links': [
{
'name': 'main.html',
'path': os.path.join(self.a_html_path, 'index.html')
}
]
})
self.check_extended_result(result, 'html_links', ['main.html'])
def test_html_direct_link_index_out_of_bounds(self):
""" Test the case where they pass an out of bounds html index """
params = {
'workspace_name': self.getWsName(),
'direct_html_link_index': 1,
'html_links': [{'name': 'index.html', 'path': self.a_html_path}]
}
with self.assertRaises(IndexError):
self.getImpl().create_extended_report(self.getContext(), params)
def test_direct_html(self):
""" Test the case where they pass in direct_html """
direct_html = '<p>Hello, world.</p>'
params = {
'workspace_name': self.getWsName(),
'direct_html': direct_html
}
result = self.getImpl().create_extended_report(self.getContext(), params)
report_data = self.check_created_report(result)
self.assertEqual(report_data['direct_html'], direct_html)
def test_direct_html_none(self):
""" Test the case where they pass None for the direct_html param """
params = {
'workspace_name': self.getWsName(),
'message': 'hello world',
'direct_html': None,
}
result = self.getImpl().create_extended_report(self.getContext(), params)
report_data = self.check_created_report(result)
self.assertEqual(report_data['direct_html'], None)
def test_template(self):
""" Test the case where they want to use a template to generate HTML"""
TEST_DATA = get_test_data()
ref_text = TEST_DATA['render_test']['content']
result = self.getImpl().create_extended_report(self.getContext(), {
'workspace_name': self.getWsName(),
'workspace_id': self.getWsID(),
'template': {
'template_file': TEST_DATA['template'],
'template_data_json': TEST_DATA['content_json'],
},
})
report_data = self.check_created_report(result)
direct_html = report_data['direct_html']
self.assertEqual(direct_html.rstrip(), ref_text['abs_path'])
# relative path to template file
result = self.getImpl().create_extended_report(self.getContext(), {
'workspace_id': self.getWsID(),
'template': {
'template_file': TEST_DATA['template_file'],
'template_data_json': TEST_DATA['content_json'],
},
})
report_data = self.check_created_report(result)
direct_html = report_data['direct_html']
self.assertEqual(direct_html.rstrip(), ref_text['rel_path'])
| 38.434985
| 99
| 0.535865
|
53934589cf06e182febe49d6229252055682c620
| 8,060
|
py
|
Python
|
translations/management/commands/synctranslations.py
|
urm8/django-translations
|
e8f66710af9433044937b75c061e1988add398a5
|
[
"BSD-3-Clause"
] | 100
|
2018-11-20T19:30:49.000Z
|
2022-03-10T07:46:27.000Z
|
translations/management/commands/synctranslations.py
|
urm8/django-translations
|
e8f66710af9433044937b75c061e1988add398a5
|
[
"BSD-3-Clause"
] | 30
|
2018-11-27T19:53:53.000Z
|
2022-02-04T14:56:52.000Z
|
translations/management/commands/synctranslations.py
|
urm8/django-translations
|
e8f66710af9433044937b75c061e1988add398a5
|
[
"BSD-3-Clause"
] | 25
|
2019-05-30T13:41:47.000Z
|
2022-03-25T04:28:17.000Z
|
"""
This module contains the synctranslations command for the Translations app.
"""
import sys
from django.core.management.base import (
BaseCommand, CommandError,
)
from django.apps import apps
from django.db.models import Q
from django.contrib.contenttypes.models import ContentType
from translations.models import Translation, Translatable
__docformat__ = 'restructuredtext'
class Command(BaseCommand):
"""
The command which synchronizes the translations with
the apps models configurations.
"""
help = 'Synchronize the translations with the apps models configurations.'
def execute(self, *args, **options):
"""Execute the `Command` with `BaseCommand` arguments."""
self.stdin = options.get('stdin', sys.stdin) # Used for testing
return super(Command, self).execute(*args, **options)
def add_arguments(self, parser):
"""
Add the arguments that the `Command` accepts on an `ArgumentParser`.
"""
parser.add_argument(
'args',
metavar='app_label',
nargs='*',
help=(
'Specify the app label(s) to synchronize the translations for.'
),
)
parser.add_argument(
'--noinput', '--no-input',
action='store_false',
dest='interactive',
help='Tells Django to NOT prompt the user for input of any kind.',
)
def get_content_types(self, *app_labels):
r"""Return the `ContentType`\ s in some apps or all of them."""
if app_labels:
query = Q()
for app_label in app_labels:
try:
apps.get_app_config(app_label)
except LookupError:
raise CommandError(
"App '{}' is not found.".format(app_label)
)
else:
query |= Q(app_label=app_label)
content_types = ContentType.objects.filter(query)
else:
content_types = ContentType.objects.all()
return content_types
def get_obsolete_translations(self, content_types):
r"""Return the obsolete translations of some `ContentType`\ s."""
if content_types:
query = Q()
for content_type in content_types:
model = content_type.model_class()
if issubclass(model, Translatable):
trans_fields = model._get_translatable_fields_names()
model_query = (
Q(content_type=content_type)
&
~Q(field__in=trans_fields)
)
else:
model_query = Q(content_type=content_type)
query |= model_query
obsolete_translations = Translation.objects.filter(query)
else:
obsolete_translations = Translation.objects.none()
return obsolete_translations
def log_obsolete_translations(self, obsolete_translations):
"""Log the details of some obsolete translations."""
if self.verbosity >= 1:
self.stdout.write('Looking for obsolete translations...')
if obsolete_translations:
changes = {}
for translation in obsolete_translations:
app = apps.get_app_config(
translation.content_type.app_label
)
app_name = app.name
model = translation.content_type.model_class()
model_name = model.__name__
changes.setdefault(app_name, {})
changes[app_name].setdefault(model_name, set())
changes[app_name][model_name].add(translation.field)
self.stdout.write(
'Obsolete translations found for the specified fields:'
)
for app_name, models in sorted(
changes.items(),
key=lambda x: x[0]):
self.stdout.write('- App: {}'.format(app_name))
for model_name, fields in sorted(
models.items(),
key=lambda x: x[0]):
self.stdout.write(' - Model: {}'.format(model_name))
for field in sorted(
fields,
key=lambda x: x[0]):
self.stdout.write(' - Field: {}'.format(field))
self.stdout.write(
self.style.WARNING(
'Obsolete translations will be deleted in the '
'synchronization process.'
)
)
else:
self.stdout.write('No obsolete translations found.')
def ask_yes_no(self, message, default=None):
"""Ask the user for yes or no with a message and a default value."""
answer = None
while answer is None:
value = input(message)
# default
if default is not None and value == '':
value = default
# yes or no?
value = value.lower()
if value in ['y', 'yes', True]:
answer = True
elif value in ['n', 'no', False]:
answer = False
else:
answer = None
return answer
def should_run_synchronization(self):
"""Return whether to run the synchronization or not."""
run = None
if self.interactive:
if hasattr(self.stdin, 'isatty') and not self.stdin.isatty():
self.stderr.write(
"Synchronization failed due to not running in a TTY."
)
self.stderr.write(
"If you are sure about synchronization you can run "
"it with the '--no-input' flag."
)
sys.exit(1)
else:
try:
run = self.ask_yes_no(
(
'Are you sure you want to synchronize the '
'translations? [Y/n] '
),
default='Y'
)
except KeyboardInterrupt:
self.stdout.write('\n') # move to the next line of stdin
self.stdout.write('\n') # move another line for division
self.stderr.write("Operation cancelled.")
sys.exit(1)
else:
run = True
return run
def handle(self, *app_labels, **options):
"""Run the `Command` with the configured arguments."""
# get arguments
self.verbosity = options['verbosity']
self.interactive = options['interactive']
# collect all the models which will be affected
content_types = self.get_content_types(*app_labels)
# handle obsolete translations
obsolete_translations = self.get_obsolete_translations(content_types)
self.log_obsolete_translations(obsolete_translations)
# divide initializing synchronization with asking for synchronization
self.stdout.write('\n')
if obsolete_translations:
# ask user if they are sure that they want to synchronize
run_synchronization = self.should_run_synchronization()
# divide asking for synchronization with actual synchronization
self.stdout.write('\n')
if run_synchronization:
obsolete_translations.delete()
else:
self.stdout.write(
'Synchronization cancelled.'
)
return
self.stdout.write(
self.style.SUCCESS(
'Synchronization successful.'
)
)
| 35.982143
| 79
| 0.522457
|
5bbbf43ccb757346cbf3f33989cd35c237861c2e
| 1,585
|
py
|
Python
|
elevator.py
|
coodoing/gistsnippets
|
346dbd9d14a81e56b21a2a98531520a3c777ed49
|
[
"Apache-2.0"
] | 1
|
2019-07-31T05:20:37.000Z
|
2019-07-31T05:20:37.000Z
|
elevator.py
|
coodoing/gistsnippets
|
346dbd9d14a81e56b21a2a98531520a3c777ed49
|
[
"Apache-2.0"
] | null | null | null |
elevator.py
|
coodoing/gistsnippets
|
346dbd9d14a81e56b21a2a98531520a3c777ed49
|
[
"Apache-2.0"
] | null | null | null |
#! /usr/bin/env python
import sys
# pep8 & google python style
class Node(object):
def __init__(self, idx, value):
self.left = None
self.right = None
self.parent = None
self.idx = idx
self.value = value
self.path = [self.idx] # [].append(self.idx)
pass
pass
def findpath(arr, start, end):
first, last = 1, len(arr)
iterator_list, enque_list = [start], [start]
while len(enque_list) > 0:
current = enque_list.pop(0)
current_step = arr[current - 1]
left_idx = current + current_step
if left_idx <= last:
left_node = Node(left_idx, arr[left_idx - 1])
if left_idx not in iterator_list:
enque_list.append(left_idx)
iterator_list.append(left_idx)
tree.append(left_node)
right_idx = current - current_step
if right_idx > first - 1:
right_node = Node(right_idx, arr[right_idx - 1])
if right_idx not in iterator_list:
enque_list.append(right_idx)
iterator_list.append(right_idx)
tree.append(right_node)
pass
if end in iterator_list:
print iterator_list
else:
print 'path not exist'
pass
if __name__ == '__main__':
level = [1, 2, 3, 4, 5]
step_arr = [2, 1, 1, 4, 3]
start, end = 1, 5
root = Node(start, step_arr[start - 1])
tree = [root]
findpath(step_arr, start, end)
step_arr = [2, 1, 2, 3, 3]
start, end = 1, 5
findpath(step_arr, start, end)
| 26.864407
| 60
| 0.564669
|
7fa6ef7f6831ca26445c1997031d369823855e31
| 3,282
|
py
|
Python
|
configs/build.py
|
marsggbo/CovidNet3D
|
0aeca91a775f938a0e568dd88d8162473dacf3ce
|
[
"MIT"
] | 5
|
2021-02-23T06:43:31.000Z
|
2021-07-05T15:24:05.000Z
|
configs/build.py
|
etherx-dev/CovidNet3D
|
b107d7d965cad07f1890ee492857273f3468cc01
|
[
"MIT"
] | 1
|
2021-06-08T21:06:10.000Z
|
2021-06-08T21:06:10.000Z
|
configs/build.py
|
etherx-dev/CovidNet3D
|
b107d7d965cad07f1890ee492857273f3468cc01
|
[
"MIT"
] | 4
|
2021-02-01T03:29:16.000Z
|
2021-08-05T09:13:37.000Z
|
from configs import CN
__all__ = [
'build_config',
'CTConfig',
]
def build_config(cfg, name):
"""
Built the config, defined by `cfg.config.name`.
"""
cfg_dict = {
'ctconfig': CTConfig,
}
if name == '':
return cfg
else:
assert name.lower() in cfg_dict, f"{name} not found."
return cfg_dict[name.lower()](cfg)
def CTConfig(cfg):
# loss
cfg.loss.label_smoothing = 0.1
# dataset
cfg.dataset.slice_num = 16
cfg.dataset.is_color = False # gray slice image
cfg.dataset.is_3d = True
# model_depth
cfg.model.in_channels = 1
cfg.model.dropout = 0
# MobileNet
cfg.model.name = 'Mobile3DNet'
cfg.model.width_stages = [32,64,128,256,512,1024]
cfg.model.n_cell_stages = [4,4,4,4,4,1]
cfg.model.stride_stages = [2,2,2,1,2,1]
cfg.model.width_mult = 1
cfg.model.classes = 3
cfg.model.dropout_rate = 0.
cfg.model.bn_param = (0.1, 1e-3)
# CAM
cfg.cam = CN()
cfg.cam.enable = 0
cfg.cam.scan_path = '' # the path of a scan
cfg.cam.label = -1
cfg.cam.featmaps_module_name = 'global_avg_pooling' # the module name of hook
cfg.cam.weights_module_name = 'classifier' # the module name of hook
cfg.cam.save_path = './cam_results'
cfg.cam.model_path = '' # load the params of the model
cfg.cam.debug = False # if True, use FakeNet3D and FakeData to debug
################################################################
# ct transforms #
# https://torchio.readthedocs.io/ #
################################################################
cfg.transforms.ct = CN()
cfg.transforms.ct.randomflip = CN()
cfg.transforms.ct.randomflip.enable = 1
cfg.transforms.ct.randomflip.p = 0.5
cfg.transforms.ct.randomflip.axes = (0, 1,2)
cfg.transforms.ct.randomflip.flip_probability = 0.5
cfg.transforms.ct.randomaffine = CN()
cfg.transforms.ct.randomaffine.enable = 0
cfg.transforms.ct.randomaffine.scales = (0.5,0.5)
cfg.transforms.ct.randomaffine.degrees = (-10,10)
cfg.transforms.ct.randomaffine.isotropic = True
cfg.transforms.ct.randomaffine.p = 0.5
cfg.transforms.ct.randomblur = CN()
cfg.transforms.ct.randomblur.enable = 0
cfg.transforms.ct.randomblur.p = 0.5
cfg.transforms.ct.randomblur.std = (0, 4)
cfg.transforms.ct.randomnoise = CN()
cfg.transforms.ct.randomnoise.enable = 0
cfg.transforms.ct.randomnoise.p = 0.5
cfg.transforms.ct.randomnoise.mean = (0,0.25)
cfg.transforms.ct.randomnoise.std = (0,0.25)
cfg.transforms.ct.randomswap = CN()
cfg.transforms.ct.randomswap.enable = 0
cfg.transforms.ct.randomswap.p = 0.5
cfg.transforms.ct.randomswap.patch_size = (16,16,16)
cfg.transforms.ct.randomswap.num_iterations = 100
cfg.transforms.ct.randomelasticdeformation = CN()
cfg.transforms.ct.randomelasticdeformation.enable = 0
cfg.transforms.ct.randomelasticdeformation.p = 0.5
cfg.transforms.ct.randomelasticdeformation.num_control_points = (4,4,4)
cfg.transforms.ct.randomelasticdeformation.max_displacement = (7,7,7)
cfg.transforms.ct.randomelasticdeformation.locked_borders = 0
return cfg
| 33.835052
| 81
| 0.627971
|
74d23d404a6a4636f525a13dfb0c67aa315c2281
| 1,356
|
py
|
Python
|
civictechprojects/migrations/0012_auto_20180626_1535.py
|
bhavanapamulaparthi/CivicTechExchange
|
c01b8ffccea19beda6e59290139d09f477ddad95
|
[
"MIT"
] | null | null | null |
civictechprojects/migrations/0012_auto_20180626_1535.py
|
bhavanapamulaparthi/CivicTechExchange
|
c01b8ffccea19beda6e59290139d09f477ddad95
|
[
"MIT"
] | null | null | null |
civictechprojects/migrations/0012_auto_20180626_1535.py
|
bhavanapamulaparthi/CivicTechExchange
|
c01b8ffccea19beda6e59290139d09f477ddad95
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2018-06-26 15:35
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import taggit.managers
class Migration(migrations.Migration):
dependencies = [
('taggit', '0002_auto_20150616_2121'),
('civictechprojects', '0011_auto_20180529_0426'),
]
operations = [
migrations.CreateModel(
name='TaggedOrganization',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content_object', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='civictechprojects.Project')),
('tag', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='civictechprojects_taggedorganization_items', to='taggit.Tag')),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='project',
name='project_organization',
field=taggit.managers.TaggableManager(blank=True, help_text='A comma-separated list of tags.', through='civictechprojects.TaggedOrganization', to='taggit.Tag', verbose_name='Tags'),
),
]
| 38.742857
| 194
| 0.629056
|
b6e3f178d503ff887012acfd252550bd6fb151cf
| 19,455
|
py
|
Python
|
projects/01_fyyur/worked_code/app.py
|
andreqts/UDACITY-Full-Stack-exercises
|
3504f76479de8da999ff3e8b3f02cd3a3689c360
|
[
"MIT"
] | null | null | null |
projects/01_fyyur/worked_code/app.py
|
andreqts/UDACITY-Full-Stack-exercises
|
3504f76479de8da999ff3e8b3f02cd3a3689c360
|
[
"MIT"
] | 34
|
2021-08-19T15:48:26.000Z
|
2022-02-24T00:30:19.000Z
|
projects/01_fyyur/worked_code/app.py
|
andreqts/UDACITY-Full-Stack-exercises
|
3504f76479de8da999ff3e8b3f02cd3a3689c360
|
[
"MIT"
] | null | null | null |
#----------------------------------------------------------------------------#
# Imports
#----------------------------------------------------------------------------#
import json
import dateutil.parser
import babel
from flask import Flask, render_template, request, Response, flash, redirect, url_for, jsonify, make_response
from flask_moment import Moment
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import func, or_, and_
from flask_migrate import Migrate
import logging
from logging import Formatter, FileHandler
from flask_wtf import Form
from forms import *
import os
from models import *
import sys
from models import csrf
moment = Moment(app)
#----------------------------------------------------------------------------#
# Filters.
#----------------------------------------------------------------------------#
def format_datetime(value, format='medium'):
if isinstance(value, str):
date = dateutil.parser.parse(value)
else:
date = value
if format == 'full':
format="EEEE MMMM, d, y 'at' h:mma"
elif format == 'medium':
format="EE MM, dd, y h:mma"
return babel.dates.format_datetime(date, format, locale='en')
app.jinja_env.filters['datetime'] = format_datetime
#----------------------------------------------------------------------------#
# Controllers.
#----------------------------------------------------------------------------#
def flash_errors(form):
"""Flashes form errors"""
for field, errors in form.errors.items():
for error in errors:
flash(u"Error in the %s field - %s" % (
getattr(form, field).label.text,
error
), category='error')
@app.route('/')
def index():
return render_template('pages/home.html')
# Venues
# ----------------------------------------------------------------
@app.route('/venues')
def venues():
data = []
locales = Venue.query.with_entities(Venue.city, Venue.state).group_by(Venue.city, Venue.state)
for city, state in locales:
city_venues = Venue.query.filter(and_(Venue.city==city, Venue.state==state)).all()
venues_list = []
for v in city_venues:
upcomming_shows_cnt = v.get_upcoming_shows_count()
name_value = v.name
if app.debug: #show upcoming count in debug, since it is not shown to the user
name_value += f" [{upcomming_shows_cnt} upcoming shows]"
venues_list.append({
"id": v.id,
"name": name_value,
"num_upcoming_shows": upcomming_shows_cnt,
})
#show Venues with more upcoming shows first, highlithing them (relevance)
venues_list.sort(reverse=True, key=(lambda d : d["num_upcoming_shows"]))
data.append ({
"city": city,
"state": state,
"venues": venues_list
})
form = VenueForm(data)
return render_template('pages/venues.html', areas=data, form=form);
@app.route('/venues/search', methods=['POST'])
def search_venues():
str_to_search = request.form.get('search_term', '')
venues_found = Venue.query.filter(Venue.name.ilike(f'%{str_to_search}%')).all()
found_data = []
for venue in venues_found:
found_data.append({
"id": venue.id,
"name": venue.name,
"num_upcoming_shows": venue.get_upcoming_shows_count(),
})
response={
"count": len(venues_found),
"data": found_data,
}
form=VenueForm()
return render_template('pages/search_venues.html', results=response, form=form, search_term=str_to_search)
@app.route('/venues/<int:pvenue_id>')
def show_venue(pvenue_id):
venueobj = Venue.query.filter_by(id=pvenue_id).one()
venueobj.genres = venueobj.genres.split(',')
venueobj.past_shows = venueobj.get_past_shows()
venueobj.past_shows_count = len(venueobj.past_shows)
venueobj.upcoming_shows = venueobj.get_upcoming_shows()
venueobj.upcoming_shows_count = len(venueobj.upcoming_shows)
return render_template('pages/show_venue.html', venue=venueobj, form=VenueForm())
# Create Venue
# ----------------------------------------------------------------
@app.route('/venues/create', methods=['GET'])
def create_venue_form():
form = VenueForm()
return render_template('forms/new_venue.html', form=form)
@app.route('/venues/create', methods=['POST'])
def create_venue_submission():
newvalueslist = dict(request.form.lists())
no_error_occurred = True
form = VenueForm(request.form)
if not form.validate():
print('New venue form validation failed!')
flash_errors(form)
return render_template('forms/new_venue.html', form=form)
# special treatment, since it is not sent if unchecked
seeking = ('seeking_talent' in newvalueslist.keys()) and (request.form['seeking_talent'] == 'y')
newvenue = Venue(
name = request.form['name'],
genres = ','.join(newvalueslist['genres']),
city = request.form['city'],
state = request.form['state'],
address = request.form['address'],
phone = request.form['phone'],
seeking_talent = seeking,
seeking_description = request.form['seeking_description'],
website_link = request.form['website_link'],
image_link = request.form['image_link'],
facebook_link = request.form['facebook_link'],
)
session = db.session()
try:
session.add(newvenue)
session.commit()
createdvenue = Venue.query.get(newvenue.id)
except:
print(f'Error creating venue {request.form["name"]}: {sys.exc_info()}')
flash('Sorry, Venue ' + request.form['name'] + ' could not be listed, please contact the support!', category='error')
session.rollback()
no_error_occurred = False
finally:
session.close()
if (no_error_occurred):
flash('Venue ' + createdvenue.name + ' successfully listed!')
return render_template('pages/home.html')
@app.route('/venues/<int:venue_id>', methods=['DELETE'])
def delete_venue(venue_id):
print('delete...') #TODO:
try:
venue_to_delete = Venue.query.get(venue_id)
except:
print(f'Error retrieving venue {venue_id}: {sys.exc_info()}')
msgerror = 'Sorry, Venue ' + venue_id + ' could not be accessed, please contact the support!'
flash(msgerror)
no_errors = False
return make_response(jsonify(message=msgerror), 500)
venue_name = venue_to_delete.name
print('Will delete venue {}'.format(venue_name)) #TODO:
try:
db.session.delete(venue_to_delete)
db.session.commit()
except:
db.session.rollback()
print(f'Error deleting venue {venue_id}: {sys.exc_info()}')
msgerror = 'Sorry, Venue ' + venue_id + ' could not be deleted, please check if there are pending shows, or contact the support!'
flash(msgerror)
no_errors = False
return make_response(jsonify(message=msgerror), 500)
finally:
db.session.close()
flash(f'Venue {venue_name}, id={venue_id} successfully deleted!')
return make_response(jsonify(message='ok'), 200)
# Artists
# ----------------------------------------------------------------
@app.route('/artists')
def artists():
artists_data = Artist.query.with_entities(Artist.id, Artist.name).all()
data=[]
for art_id, art_name in artists_data:
data.append({ "id": art_id, "name": art_name })
form = ArtistForm(artists_data)
return render_template('pages/artists.html', artists=data, form=form)
@app.route('/artists/search', methods=['POST'])
def search_artists():
term_to_search = request.form.get('search_term', '')
artists_found = Artist.query.filter(Artist.name.ilike(f'%{term_to_search}%')).all()
artists_data = []
for artist in artists_found:
artists_data.append({
"id": artist.id,
"name": artist.name,
"num_upcoming_shows": artist.get_upcoming_shows_count(),
})
response={
"count": len(artists_found),
"data": artists_data
}
return render_template('pages/search_artists.html', results=response, search_term=term_to_search, form=ArtistForm())
@app.route('/artists/<int:artist_id>')
def show_artist(artist_id):
artist_data = Artist.query.get(artist_id)
artist_data.upcoming_shows = artist_data.get_upcoming_shows()
artist_data.past_shows = artist_data.get_past_shows()
artist_data.past_shows_count = len(artist_data.past_shows)
artist_data.upcoming_shows_count = len(artist_data.upcoming_shows)
artist_data.genres = artist_data.genres.split(',')
return render_template('pages/show_artist.html', artist=artist_data, form=ArtistForm())
# Update
# ----------------------------------------------------------------
@app.route('/artists/<int:artist_id>/edit', methods=['GET'])
def edit_artist(artist_id):
try:
artist = Artist.query.get(artist_id)
except:
print(f'Error retrieving artist {artist_id} data: {sys.exc_info()}')
flash(f'Sorry, could not retrieve data on artist {artist_id}, please contact the support!')
return redirect(url_for('show_artist', artist_id=artist_id))
form = ArtistForm(
name = artist.name,
genres = artist.genres.split(','),
city = artist.city,
state = artist.state,
phone = artist.phone,
website_link = artist.website_link,
facebook_link = artist.facebook_link,
seeking_venue = artist.seeking_venue,
seeking_description = artist.seeking_description,
image_link = artist.image_link,
)
return render_template('forms/edit_artist.html', form=form, artist=artist)
@app.route('/artists/<int:artist_id>/edit', methods=['POST'])
def edit_artist_submission(artist_id):
edited_data_lists = dict(request.form.lists())
try:
artist = Artist.query.get(artist_id)
except:
print(f'Error retrieving artist {artist_id} data: {sys.exc_info()}')
flash(f'Sorry, could not retrieve data on artist {artist_id}, please contact the support!')
return redirect(url_for('show_artist', artist_id=artist_id))
form = ArtistForm(request.form)
if not form.validate():
print('Form Validation failed!')
flash_errors(form)
return render_template('forms/edit_artist.html', form=form, artist=artist)
b_seeking = ('seeking_venue' in edited_data_lists.keys()) and (request.form['seeking_venue'].lower() == 'y')
try:
artist.name = request.form['name']
artist.city = request.form['city']
artist.state = request.form['state']
artist.phone = request.form['phone']
artist.seeking_venue = b_seeking
artist.seeking_description = request.form['seeking_description']
artist.genres = ','.join(edited_data_lists['genres'])
artist.image_link = request.form['image_link']
artist.website_link = request.form['website_link']
artist.facebook_link = request.form['facebook_link']
db.session.commit()
except:
print(f'Error editing artist {artist_id} data: {sys.exc_info()}')
flash(f'Sorry, could not edit data on artist {artist_id}, please contact the support!')
db.session.rollback()
return redirect(url_for('show_artist', artist_id=artist_id))
finally:
db.session.close()
flash(f'Artist {request.form["name"]} with id={artist_id} successfully edited!')
return redirect(url_for('show_artist', artist_id=artist_id, form=form))
@app.route('/venues/<int:venue_id>/edit', methods=['GET'])
def edit_venue(venue_id):
try:
venue = Venue.query.get(venue_id)
except:
print(f'Error retrieving venue {venue_id} data: {sys.exc_info()}')
flash(f'Sorry, could not retrieve data on venue {venue_id}, please contact the support!')
return redirect(url_for('show_venue', pvenue_id=venue_id))
print('=> genres = {}'.format(venue.genres.split(',')))
form = VenueForm(
id = venue.id,
name = venue.name,
genres = venue.genres.split(','),
city = venue.city,
state = venue.state,
address = venue.address,
phone = venue.phone,
seeking_talent = venue.seeking_talent,
seeking_description = venue.seeking_description,
website_link = venue.website_link,
image_link = venue.image_link,
facebook_link = venue.facebook_link,
)
return render_template('forms/edit_venue.html', form=form, venue=venue)
@app.route('/venues/<int:venue_id>/edit', methods=['POST'])
def edit_venue_submission(venue_id):
edited_data_lists = dict(request.form.lists())
b_seeking = ('seeking_talent' in edited_data_lists.keys()) and (request.form['seeking_talent'].lower() == 'y')
try:
venue = Venue.query.get(venue_id)
except:
print(f'Error retrieving venue {venue_id} data: {sys.exc_info()}')
flash(f'Sorry, could not retrieve data on venue {venue_id}, please contact the support!')
return redirect(url_for('show_venue', pvenue_id=venue_id, form=VenueForm()))
form = VenueForm(request.form)
if not form.validate():
print('Edit venue form validation failed!')
flash_errors(form)
return render_template('forms/edit_venue.html', form=form, venue=venue)
try:
venue.name = request.form['name']
venue.genres = ','.join(edited_data_lists['genres'])
venue.city = request.form['city']
venue.state = request.form['state']
venue.address = request.form['address']
venue.phone = request.form['phone']
venue.seeking_talent = b_seeking
venue.seeking_description = request.form['seeking_description']
venue.website_link = request.form['website_link']
venue.image_link = request.form['image_link']
venue.facebook_link = request.form['facebook_link']
db.session.commit()
except:
print(f'Error editing venue {venue_id} data: {sys.exc_info()}')
flash(f'Sorry, could not edit data on venue {venue_id}, please contact the support!')
db.session.rollback()
return redirect(url_for('show_venue', pvenue_id=venue_id))
finally:
db.session.close()
flash(f'Venue {request.form["name"]} with id={venue_id} successfully edited!')
return redirect(url_for('show_venue', pvenue_id=venue_id, form=form))
# Create Artist
# ----------------------------------------------------------------
@app.route('/artists/create', methods=['GET'])
def create_artist_form():
form = ArtistForm()
return render_template('forms/new_artist.html', form=form)
@app.route('/artists/create', methods=['POST'])
def create_artist_submission():
values_lst = dict(request.form.lists())
form = ArtistForm(request.form)
if not form.validate():
print('New artist form validation failed!')
flash_errors(form)
return render_template('forms/new_artist.html', form=form)
b_seeking = ('seeking_venue' in values_lst.keys()) and (request.form['seeking_venue'].lower() == 'y')
no_error_occurred = True
new_artist = Artist(
name = request.form['name'],
city = request.form['city'],
state = request.form['state'],
phone = request.form['phone'],
seeking_venue = b_seeking,
seeking_description = request.form['seeking_description'],
genres = ','.join(values_lst['genres']),
image_link = request.form['image_link'],
website_link = request.form['website_link'],
facebook_link = request.form['facebook_link'],
)
session = db.session()
try:
session.add(new_artist)
session.commit()
createdartist = Artist.query.get(new_artist.id)
except:
print(f'Error creating artist {request.form["name"]}: {sys.exc_info()}')
flash('Sorry, artist ' + request.form['name'] + ' could not be listed, please contact the support!', category='error')
session.rollback()
no_error_occurred = False
finally:
session.close()
if (no_error_occurred):
flash('Artist ' + request.form['name'] + ' was successfully listed!')
return render_template('pages/home.html')
# Shows
# ----------------------------------------------------------------
@app.route('/shows')
def shows():
# displays list of shows at /shows
shows = Show.query.join(Artist).join(Venue).with_entities(Venue.id, Venue.name, Artist.id, Artist.name, Artist.image_link, Show.start_time).all()
shows_data = []
for venue_id, venue_name, artist_id, artist_name, artist_link, show_time in shows:
shows_data.append({
"venue_id": venue_id,
"venue_name": venue_name,
"artist_id": artist_id,
"artist_name": artist_name,
"artist_image_link": artist_link,
"start_time": show_time
})
return render_template('pages/shows.html', shows=shows_data, form=ShowForm())
@app.route('/shows/search', methods=['POST'])
def search_shows():
str_to_search = request.form.get('search_term', '')
try:
shows = Show.query.join(Artist, Venue).with_entities(Venue.id, Venue.name, Artist.id, Artist.name, Artist.image_link, Show.start_time).filter(or_(Artist.name.ilike(f"%%{str_to_search}%%"), Venue.name.ilike(f"%%{str_to_search}%%"))).all()
except:
print(f'Error searching shows by name {str_to_search}: {sys.exc_info()}')
flash('Sorry, and error occurred while searching shows, please contact the support!', category='error')
return render_template('pages/shows.html', shows=[], form=ShowForm())
shows_data = []
for venue_id, venue_name, artist_id, artist_name, artist_link, show_time in shows:
shows_data.append({
"venue_id": venue_id,
"venue_name": venue_name,
"artist_id": artist_id,
"artist_name": artist_name,
"artist_image_link": artist_link,
"start_time": show_time
})
response={
"count": len(shows_data),
"data": shows_data
}
return render_template('pages/shows.html', shows=shows_data, form=ShowForm())
@app.route('/shows/create')
def create_shows():
# renders form. do not touch.
form = ShowForm()
return render_template('forms/new_show.html', form=form)
@app.route('/shows/create', methods=['POST'])
def create_show_submission():
form = ShowForm(request.form)
if not form.validate():
print('New show form validation failed!')
flash_errors(form)
return render_template('forms/new_show.html', form=form)
no_error_occurred = True
new_show = Show(
artist_id = request.form['artist_id'],
venue_id = request.form['venue_id'],
start_time = request.form['start_time'],
)
session = db.session()
try:
session.add(new_show)
session.commit()
except:
print(f'Error creating show "{request.form}": {sys.exc_info()}')
flash('Sorry, new show at venue id ' + request.form['venue_id'] + ' could not be listed, please contact the support!', category='error')
session.rollback()
no_error_occurred = False
finally:
if no_error_occurred:
new_show = session.query(Show).filter_by(artist_id=new_show.artist_id,venue_id=new_show.venue_id,start_time=new_show.start_time).one()
session.close()
if (no_error_occurred):
flash(f'Show by artist {new_show.artist_id} at venue id {new_show.venue_id} successfully listed!')
return render_template('pages/home.html')
@app.errorhandler(404)
def not_found_error(error):
return render_template('errors/404.html'), 404
@app.errorhandler(500)
def server_error(error):
return render_template('errors/500.html'), 500
if not app.debug:
file_handler = FileHandler('error.log')
file_handler.setFormatter(
Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]')
)
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('errors')
#----------------------------------------------------------------------------#
# Launch.
#----------------------------------------------------------------------------#
'''
# Default port:
if __name__ == '__main__':
app.run()
'''
# Or specify port manually:
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
| 33.313356
| 241
| 0.662092
|
6a0551df63c7a299a491f629520ac1194d4821c3
| 923
|
py
|
Python
|
invmonInfra/base/parserBase.py
|
jtom38/invmon-api
|
28f163bef47ee5c95bac0f40198e25e44090758f
|
[
"MIT"
] | 1
|
2021-09-23T16:19:46.000Z
|
2021-09-23T16:19:46.000Z
|
invmonInfra/base/parserBase.py
|
jtom38/invmon-api
|
28f163bef47ee5c95bac0f40198e25e44090758f
|
[
"MIT"
] | 16
|
2021-12-09T06:22:29.000Z
|
2022-03-25T06:26:01.000Z
|
workerInfra/base/parserBase.py
|
jtom38/newsbot.worker
|
6f5d93c474d21542f1af20e3b2537f26e2bcbbc3
|
[
"MIT"
] | null | null | null |
from requests import Response, get
from bs4 import BeautifulSoup
class ParserBase():
def getContent(self, uri: str = '') -> Response:
try:
headers = self.getHeaders()
if uri == "":
return get(self.uri, headers=headers)
else:
return get(url=uri, headers=headers)
except Exception as e:
self.logger.critical(f"Failed to collect data from {self.uri}. {e}")
def getParser(
self, requestsContent: Response = "", seleniumContent: str = ""
) -> BeautifulSoup:
try:
if seleniumContent != "":
return BeautifulSoup(seleniumContent, features="html.parser")
else:
return BeautifulSoup(requestsContent.content, features="html.parser")
except Exception as e:
self.logger.critical(f"failed to parse data returned from requests. {e}")
| 34.185185
| 85
| 0.586132
|
433747d35346acf654d7ca9a2f2d362e6a3a0775
| 2,475
|
py
|
Python
|
pysnmp/Dell-CDB-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/Dell-CDB-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/Dell-CDB-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module Dell-CDB-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Dell-CDB-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:40:41 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint")
rnd, = mibBuilder.importSymbols("Dell-MIB", "rnd")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Gauge32, Unsigned32, Integer32, iso, Counter64, ObjectIdentity, Counter32, Bits, MibScalar, MibTable, MibTableRow, MibTableColumn, ModuleIdentity, IpAddress, MibIdentifier, NotificationType, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "Unsigned32", "Integer32", "iso", "Counter64", "ObjectIdentity", "Counter32", "Bits", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ModuleIdentity", "IpAddress", "MibIdentifier", "NotificationType", "TimeTicks")
TruthValue, TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "TextualConvention", "DisplayString")
rlCDB = ModuleIdentity((1, 3, 6, 1, 4, 1, 89, 94))
rlCDB.setRevisions(('2007-01-02 00:00',))
if mibBuilder.loadTexts: rlCDB.setLastUpdated('200701020000Z')
if mibBuilder.loadTexts: rlCDB.setOrganization('Dell')
rlStartupCDBChanged = MibScalar((1, 3, 6, 1, 4, 1, 89, 94, 1), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlStartupCDBChanged.setStatus('current')
rlManualReboot = MibScalar((1, 3, 6, 1, 4, 1, 89, 94, 2), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlManualReboot.setStatus('current')
rlStartupCDBEmpty = MibScalar((1, 3, 6, 1, 4, 1, 89, 94, 3), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlStartupCDBEmpty.setStatus('current')
mibBuilder.exportSymbols("Dell-CDB-MIB", rlManualReboot=rlManualReboot, rlStartupCDBEmpty=rlStartupCDBEmpty, rlStartupCDBChanged=rlStartupCDBChanged, PYSNMP_MODULE_ID=rlCDB, rlCDB=rlCDB)
| 95.192308
| 477
| 0.77899
|
b5006ac73a65a2c6f17d3747b528de5700cc7a3a
| 5,895
|
py
|
Python
|
src/dataflow/core/program_utils.py
|
luweishuang/task_oriented_dialogue_as_dataflow_synthesis
|
5638adfb2274d76ca1c430e6b727cca41f43c195
|
[
"MIT"
] | null | null | null |
src/dataflow/core/program_utils.py
|
luweishuang/task_oriented_dialogue_as_dataflow_synthesis
|
5638adfb2274d76ca1c430e6b727cca41f43c195
|
[
"MIT"
] | null | null | null |
src/dataflow/core/program_utils.py
|
luweishuang/task_oriented_dialogue_as_dataflow_synthesis
|
5638adfb2274d76ca1c430e6b727cca41f43c195
|
[
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import re
from enum import Enum
from json import dumps
from typing import Any, Dict, Iterable, List, Tuple
from dataflow.core.program import BuildStructOp, CallLikeOp, Expression, ValueOp
# revise args
ROOT_LOCATION = "rootLocation"
OLD_LOCATION = "oldLocation"
NEW = "new"
# BuildStructOp special arg
NON_EMPTY_BASE = "nonEmptyBase"
Idx = int
class OpType(Enum):
"""The type of an op."""
Call = "Call"
Struct = "Struct"
Value = "#"
class DataflowFn(Enum):
"""Special Dataflow functions"""
Find = "find" # search
Abandon = "abandon"
Revise = "ReviseConstraint"
Refer = "refer"
RoleConstraint = "roleConstraint"
Get = "get" # access a member field of an object
def idx_str(idx: Idx) -> str:
return f"[{idx}]"
def is_idx_str(s: str) -> bool:
return s.startswith("[") and s.endswith("]")
def unwrap_idx_str(s: str) -> int:
return int(s[1:-1])
def is_struct_op_schema(name: str) -> bool:
"""BuildStructOp schemas begin with a capital letter."""
if len(name) == 0:
return False
return re.match(r"[A-Z]", name[0]) is not None
def get_named_args(e: Expression) -> List[Tuple[str, str]]:
"""
Gets a list of (arg_name, arg_id) pairs.
If `e` is a BuildStructOp, then `arg_names` are its `fields`, otherwise
they are the 0-indexed argument position.
"""
if isinstance(e.op, BuildStructOp):
bso = e.op
# a non-empty BuildStructOp has an implicit 0-th field name
zeroth_field = [] if bso.empty_base else [NON_EMPTY_BASE]
fields = zeroth_field + list(bso.op_fields)
else:
fields = [f"arg{i}" for i in range(len(e.arg_ids))]
return list(zip(fields, e.arg_ids))
def mk_constraint(
tpe: str, args: Iterable[Tuple[str, int]], idx: Idx,
) -> Tuple[Expression, Idx]:
return mk_struct_op(
schema=f"Constraint[{tpe.capitalize()}]", args=dict(args), idx=idx
)
def mk_equality_constraint(val: int, idx: Idx) -> Tuple[Expression, Idx]:
return mk_call_op(name="?=", args=[val], idx=idx)
def mk_unset_constraint(idx: Idx) -> Tuple[Expression, Idx]:
return mk_struct_op(schema="EmptyConstraint", args={}, idx=idx)
def mk_salience(tpe: str, idx: Idx) -> Tuple[List[Expression], Idx]:
constraint_expr, constraint_idx = mk_constraint(tpe=tpe, args={}, idx=idx)
salience_expr, idx = mk_call_op(
name=DataflowFn.Refer.value, args=[constraint_idx], idx=constraint_idx
)
return [constraint_expr, salience_expr], idx
def mk_salient_action(idx: Idx) -> Tuple[List[Expression], Idx]:
""" (roleConstraint #(Path "output")) """
path_expr, path_idx = mk_value_op(schema="Path", value="output", idx=idx,)
intension_expr, intension_idx = mk_call_op(
name=DataflowFn.RoleConstraint.value, args=[path_idx], idx=path_idx,
)
return [path_expr, intension_expr], intension_idx
def mk_revise(
root_location_idx: Idx, old_location_idx: Idx, new_idx: Idx, idx: Idx,
) -> Tuple[Expression, Idx]:
"""
Revises the salient constraint satisfying the constraint at `old_location_idx`,
in the salient computation satisfying the constraint at `root_location_idx`,
with the constraint at `new_idx`.
In Lispress:
```
(Revise
:rootLocation {root_location}
:oldLocation {old_location}
:new {new})
"""
return mk_struct_op(
schema=DataflowFn.Revise.value,
args={
ROOT_LOCATION: root_location_idx,
OLD_LOCATION: old_location_idx,
NEW: new_idx,
},
idx=idx,
)
def mk_revise_the_main_constraint(
tpe: str, new_idx: Idx
) -> Tuple[List[Expression], Idx]:
"""
Revises the salient constraint (on values of type `tpe`) in the salient action, with the
constraint at `new_idx`.
(An "action" is an argument of `Yield`).
In Lispress:
```
(ReviseConstraint
:rootLocation (RoleConstraint :role #(Path "output"))
:oldLocation (Constraint[Constraint[{tpe}]])
:new {new})
```
"""
salient_action_exprs, salient_action_idx = mk_salient_action(new_idx)
old_loc_expr, old_loc_idx = mk_struct_op(
schema=f"Constraint[Constraint[{tpe.capitalize()}]]",
args={},
idx=salient_action_idx,
)
revise_expr, revise_idx = mk_revise(
root_location_idx=salient_action_idx,
old_location_idx=old_loc_idx,
new_idx=new_idx,
idx=old_loc_idx,
)
return salient_action_exprs + [old_loc_expr, revise_expr], revise_idx
def mk_struct_op(
schema: str, args: Dict[str, Idx], idx: Idx,
) -> Tuple[Expression, Idx]:
new_idx = idx + 1
args = dict(args) # defensive copy
base = args.pop(NON_EMPTY_BASE, None)
is_empty_base = base is None
pairs = sorted(args.items()) # sorts keys alphabetically
arg_names = [k for k, v in pairs]
# nonEmptyBase always comes first
arg_vals = ([] if is_empty_base else [base]) + [v for k, v in pairs]
flat_exp = Expression(
id=idx_str(new_idx),
op=BuildStructOp(
op_schema=schema,
op_fields=arg_names,
empty_base=is_empty_base,
push_go=True,
),
arg_ids=[idx_str(v) for v in arg_vals],
)
return flat_exp, new_idx
def mk_call_op(name: str, args: List[Idx], idx: Idx = 0) -> Tuple[Expression, Idx]:
new_idx = idx + 1
flat_exp = Expression(
id=idx_str(new_idx),
op=CallLikeOp(name=name),
arg_ids=[idx_str(v) for v in args],
)
return flat_exp, new_idx
def mk_value_op(value: Any, schema: str, idx: Idx) -> Tuple[Expression, Idx]:
my_idx = idx + 1
dumped = dumps({"schema": schema, "underlying": value})
expr = Expression(id=idx_str(my_idx), op=ValueOp(value=dumped))
return expr, my_idx
| 29.183168
| 92
| 0.65106
|
2e78bf965a2d0ca7852990907cf9f5fd6781560b
| 55
|
py
|
Python
|
tests/__init__.py
|
jfsolarte/python_clean_architecture
|
56b0c0eff50bc98774a0caee12e3030789476687
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
jfsolarte/python_clean_architecture
|
56b0c0eff50bc98774a0caee12e3030789476687
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
jfsolarte/python_clean_architecture
|
56b0c0eff50bc98774a0caee12e3030789476687
|
[
"MIT"
] | null | null | null |
"""Unit test package for python_clean_architecture."""
| 27.5
| 54
| 0.781818
|
79c8d3ab4db0e0065016feb12a1b2159f8b66baf
| 5,427
|
py
|
Python
|
colcon_coveragepy_result/task/coveragepy.py
|
christophebedard/colcon-coveragepy-result
|
d923559749655d8f6b7ae6feae5808475438aaae
|
[
"Apache-2.0"
] | 1
|
2020-05-21T22:21:08.000Z
|
2020-05-21T22:21:08.000Z
|
colcon_coveragepy_result/task/coveragepy.py
|
colcon/colcon-coveragepy-result
|
d923559749655d8f6b7ae6feae5808475438aaae
|
[
"Apache-2.0"
] | 9
|
2020-05-11T20:15:53.000Z
|
2020-12-19T17:27:40.000Z
|
colcon_coveragepy_result/task/coveragepy.py
|
colcon/colcon-coveragepy-result
|
d923559749655d8f6b7ae6feae5808475438aaae
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Christophe Bedard
# Licensed under the Apache License, Version 2.0
from glob import glob
import os
from pathlib import Path
from shutil import copy2
import subprocess
from colcon_core.logging import colcon_logger
from colcon_core.plugin_system import satisfies_version
from colcon_core.task import TaskExtensionPoint
logger = colcon_logger.getChild(__name__)
class CoveragePyTask(TaskExtensionPoint):
"""Run coverage.py on a package."""
TASK_NAME = 'coveragepy'
def __init__(self, has_command=True): # noqa: D107
super().__init__()
satisfies_version(TaskExtensionPoint.EXTENSION_POINT_VERSION, '^1.0')
self.__has_command = has_command
async def coveragepy(self, *, additional_hooks=None): # noqa: D102
pkg = self.context.pkg
args = self.context.args
# Check if the package has been built
pkg_build_path = Path(os.path.abspath(os.path.join(args.build_base, pkg.name)))
if not pkg_build_path.exists():
logger.info(
"Skipping package '{pkg.name}' since it has not been built".format_map(locals())
)
return 0
logger.info("Running coveragepy task on package '{pkg.name}'".format_map(locals()))
# Get list of .coverage files, depending on package type
coverage_files = []
if 'ros.ament_cmake' == pkg.type:
coverage_files.extend(glob(str(pkg_build_path / 'pytest_cov/*/.coverage')))
elif 'ros.ament_python' == pkg.type:
coverage_files.append(str(pkg_build_path / '.coverage'))
# Filter out non-existing files in case they have not been generated
coverage_files = list(filter(os.path.exists, coverage_files))
if 0 == len(coverage_files):
logger.warning(
"No .coverage files found for package '{pkg.name}' of type '{pkg.type}'"
.format_map(locals())
)
return 0
logger.info(
"Coverage files for package '{pkg.name}': {coverage_files}".format_map(locals())
)
# Copy .coverage files to a new directory, because combining files deletes them
coveragepy_dir = self.get_package_combine_dir(args.build_base, pkg.name)
coveragepy_path = Path(coveragepy_dir)
coveragepy_path.mkdir(exist_ok=True)
logger.info('Copying coverage files to {coveragepy_dir}'.format_map(locals()))
coverage_files_copies = [
str(coveragepy_path / ('.coverage.' + str(i))) for i in range(len(coverage_files))
]
for original, copy in zip(coverage_files, coverage_files_copies):
copy2(original, copy)
# Combine .coverage files
rc, stdout, _ = coverage_combine(
coverage_files_copies,
coveragepy_dir,
self.__has_command,
)
if 0 == rc and args.verbose:
# Report
rc, stdout, _ = coverage_report(
coveragepy_dir,
args.coverage_report_args,
self.__has_command,
)
if 0 == rc:
print('\n' + stdout)
return rc
@staticmethod
def get_package_combine_dir(build_base, pkg_name):
"""Get the directory in which to combine .coverage files for a given package."""
pkg_build_dir = os.path.abspath(os.path.join(build_base, pkg_name))
return str(os.path.abspath(os.path.join(pkg_build_dir, 'coveragepy')))
def run(cmd, cwd=None, ignore_errors=False):
"""Run command in given current working directory."""
cmd_str = ' '.join(cmd)
logger.debug("Running command '{cmd_str}' in {cwd}".format_map(locals()))
return_code = 1
out = ''
err = 'failed to run command'
try:
process = subprocess.Popen(cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
out = stdout.decode()
err = stderr.decode()
return_code = process.returncode
except Exception as e: # noqa: B902
return_code = 1
err = str(e)
if 0 != return_code and not ignore_errors:
logger.error("Command '{cmd_str}' failed: {err}".format_map(locals()))
return return_code, out, err
def has_coverage_command():
"""
Check if the 'coverage' command is available.
Not all installations include the 'coverage' command.
"""
return run(['coverage', '--help'], ignore_errors=True)[0] == 0
def run_coverage(cmd, cwd, has_coverage_command=True):
"""Run coverage command in a specific directory."""
if not has_coverage_command:
cmd = ['python3', '-m'] + cmd
return run(cmd, cwd)
def coverage_combine(files, cwd, has_coverage_command=True):
"""Combine .coverage files."""
assert files, 'must combine at least one file'
cmd = ['coverage', 'combine'] + files
return run_coverage(cmd, cwd, has_coverage_command)
def coverage_html(cwd, additional_args, has_coverage_command=True):
"""Create an HTML report from a .coverage file."""
cmd = ['coverage', 'html'] + (additional_args or [])
return run_coverage(cmd, cwd, has_coverage_command)
def coverage_report(cwd, additional_args, has_coverage_command=True):
"""Produce a report for a .coverage file."""
cmd = ['coverage', 'report'] + (additional_args or [])
return run_coverage(cmd, cwd, has_coverage_command)
| 36.422819
| 96
| 0.646582
|
7f076e7fc075fb7706e179b43708a7abf7379710
| 22,910
|
py
|
Python
|
custom_components/awox/awoxmeshlight/__init__.py
|
fsaris/home-assistant-awox
|
51256aca1067b4f89d0e7e779fe92c032d631c02
|
[
"MIT"
] | 31
|
2020-12-23T14:47:52.000Z
|
2022-01-28T20:40:21.000Z
|
custom_components/awox/awoxmeshlight/__init__.py
|
fsaris/home-assistant-awox
|
51256aca1067b4f89d0e7e779fe92c032d631c02
|
[
"MIT"
] | 39
|
2020-12-29T15:50:05.000Z
|
2022-03-14T22:20:23.000Z
|
custom_components/awox/awoxmeshlight/__init__.py
|
fsaris/home-assistant-awox
|
51256aca1067b4f89d0e7e779fe92c032d631c02
|
[
"MIT"
] | 9
|
2021-01-09T16:23:41.000Z
|
2021-08-19T19:58:42.000Z
|
from __future__ import unicode_literals
from . import packetutils as pckt
from os import urandom
from bluepy import btle
import logging
import struct
import time
# Commands :
#: Set mesh groups.
#: Data : 3 bytes
C_MESH_GROUP = 0xd7
#: Set the mesh id. The light will still answer to the 0 mesh id. Calling the
#: command again replaces the previous mesh id.
#: Data : the new mesh id, 2 bytes in little endian order
C_MESH_ADDRESS = 0xe0
#:
C_MESH_RESET = 0xe3
#: On/Off command. Data : one byte 0, 1
C_POWER = 0xd0
#: Data : one byte
C_LIGHT_MODE = 0x33
#: Data : one byte 0 to 6
C_PRESET = 0xc8
#: White temperature. one byte 0 to 0x7f
C_WHITE_TEMPERATURE = 0xf0
#: one byte 1 to 0x7f
C_WHITE_BRIGHTNESS = 0xf1
#: 4 bytes : 0x4 red green blue
C_COLOR = 0xe2
#: one byte : 0xa to 0x64 ....
C_COLOR_BRIGHTNESS = 0xf2
#: Data 4 bytes : How long a color is displayed in a sequence in milliseconds as
#: an integer in little endian order
C_SEQUENCE_COLOR_DURATION = 0xf5
#: Data 4 bytes : Duration of the fading between colors in a sequence, in
#: milliseconds, as an integer in little endian order
C_SEQUENCE_FADE_DURATION = 0xf6
#: 7 bytes
C_TIME = 0xe4
#: 10 bytes
C_ALARMS = 0xe5
#: Request current light/device status
C_GET_STATUS_SENT = 0xda
#: Response of light/device status request
C_GET_STATUS_RECEIVED = 0xdb
#: State notification
C_NOTIFICATION_RECEIVED = 0xdc
PAIR_CHAR_UUID = '00010203-0405-0607-0809-0a0b0c0d1914'
COMMAND_CHAR_UUID = '00010203-0405-0607-0809-0a0b0c0d1912'
STATUS_CHAR_UUID = '00010203-0405-0607-0809-0a0b0c0d1911'
OTA_CHAR_UUID = '00010203-0405-0607-0809-0a0b0c0d1913'
logger = logging.getLogger(__name__)
class Peripheral(btle.Peripheral):
def _connect(self, addr, addrType=btle.ADDR_TYPE_PUBLIC, iface=None, timeout=5):
"""
Temporary manual patch see https://github.com/IanHarvey/bluepy/pull/434
also added a default `timeout` as this is not part yet of the release bluepy package
"""
if len(addr.split(":")) != 6:
raise ValueError("Expected MAC address, got %s" % repr(addr))
if addrType not in (btle.ADDR_TYPE_PUBLIC, btle.ADDR_TYPE_RANDOM):
raise ValueError("Expected address type public or random, got {}".format(addrType))
self._startHelper(iface)
self.addr = addr
self.addrType = addrType
self.iface = iface
if iface is not None:
self._writeCmd("conn %s %s %s\n" % (addr, addrType, "hci"+str(iface)))
else:
self._writeCmd("conn %s %s\n" % (addr, addrType))
rsp = self._getResp('stat', timeout)
if rsp is None:
self._stopHelper()
raise btle.BTLEDisconnectError("Timed out while trying to connect to peripheral %s, addr type: %s" %
(addr, addrType), rsp)
while rsp and rsp['state'][0] == 'tryconn':
rsp = self._getResp('stat', timeout)
if rsp is None:
self._stopHelper()
raise btle.BTLEDisconnectError("Timed out while trying to connect to peripheral %s, addr type: %s" %
(addr, addrType), rsp)
if rsp['state'][0] != 'conn':
self._stopHelper()
raise btle.BTLEDisconnectError("Failed to connect to peripheral %s, addr type: %s [%s]" % (addr, addrType, rsp), rsp)
def _getResp(self, wantType, timeout=None):
"""
Temporary manual patch see https://github.com/IanHarvey/bluepy/commit/b02b436cb5c71387bd70339a1b472b3a6bfe9ac8
"""
# Temp set max timeout for wr commands (failsave)
if timeout is None and wantType == 'wr':
logger.debug('Set fallback time out - %s', wantType)
timeout = 10
if isinstance(wantType, list) is not True:
wantType = [wantType]
while True:
resp = self._waitResp(wantType + ['ntfy', 'ind'], timeout)
if resp is None:
return None
respType = resp['rsp'][0]
if respType == 'ntfy' or respType == 'ind':
hnd = resp['hnd'][0]
data = resp['d'][0]
if self.delegate is not None:
self.delegate.handleNotification(hnd, data)
if respType not in wantType:
continue
return resp
def _waitResp(self, wantType, timeout=None):
while True:
if self._helper.poll() is not None:
raise btle.BTLEInternalError("Helper exited")
if timeout:
logger.debug("_waitResp - set timeout to %d", timeout)
fds = self._poller.poll(timeout*1000)
if len(fds) == 0:
logger.debug("Select timeout")
return None
rv = self._helper.stdout.readline()
if rv.startswith('#') or rv == '\n' or len(rv)==0:
continue
resp = btle.BluepyHelper.parseResp(rv)
if 'rsp' not in resp:
raise btle.BTLEInternalError("No response type indicator", resp)
respType = resp['rsp'][0]
if respType in wantType:
logger.debug("_waitResp - resp [%s]", resp)
return resp
elif respType == 'stat':
if 'state' in resp and len(resp['state']) > 0 and resp['state'][0] == 'disc':
self._stopHelper()
raise btle.BTLEDisconnectError("Device disconnected", resp)
elif respType == 'err':
errcode=resp['code'][0]
if errcode=='nomgmt':
raise btle.BTLEManagementError("Management not available (permissions problem?)", resp)
elif errcode=='atterr':
raise btle.BTLEGattError("Bluetooth command failed", resp)
else:
raise btle.BTLEException("Error from bluepy-helper (%s)" % errcode, resp)
elif respType == 'scan':
# Scan response when we weren't interested. Ignore it
continue
else:
raise btle.BTLEInternalError("Unexpected response (%s)" % respType, resp)
def stop(self):
self._stopHelper()
class Delegate(btle.DefaultDelegate):
def __init__(self, light):
self.light = light
btle.DefaultDelegate.__init__(self)
def handleNotification(self, cHandle, data):
if self.light.session_key is None:
logger.info(
"Device [%s] is disconnected, ignoring received notification [unable to decrypt without active session]",
self.light.mac)
return
message = pckt.decrypt_packet(self.light.session_key, self.light.mac, data)
if message is None:
logger.warning("Failed to decrypt package [key: %s, data: %s]", self.light.session_key, data)
return
logger.debug("Received notification %s", message)
self.light.parseStatusResult(message)
class AwoxMeshLight:
def __init__(self, mac, mesh_name="unpaired", mesh_password="1234", mesh_id=0):
"""
Args :
mac: The light's MAC address as a string in the form AA:BB:CC:DD:EE:FF
mesh_name: The mesh name as a string.
mesh_password: The mesh password as a string.
mesh_id: The mesh id (address)
"""
self.mac = mac
self.mesh_id = mesh_id
self.btdevice = Peripheral()
self.session_key = None
self.command_char = None
self.status_char = None
self.mesh_name = mesh_name.encode()
self.mesh_password = mesh_password.encode()
# Light status
self.white_brightness = None
self.white_temperature = None
self.color_brightness = None
self.red = None
self.green = None
self.blue = None
self.color_mode = None
self.transition_mode = None
self.state = None
self.status_callback = None
def connect(self, mesh_name=None, mesh_password=None):
"""
Args :
mesh_name: The mesh name as a string.
mesh_password: The mesh password as a string.
"""
if mesh_name: self.mesh_name = mesh_name.encode()
if mesh_password: self.mesh_password = mesh_password.encode()
assert len(self.mesh_name) <= 16, "mesh_name can hold max 16 bytes"
assert len(self.mesh_password) <= 16, "mesh_password can hold max 16 bytes"
self.btdevice.connect(self.mac)
self.btdevice.setDelegate(Delegate(self))
pair_char = self.btdevice.getCharacteristics(uuid=PAIR_CHAR_UUID)[0]
self.session_random = urandom(8)
message = pckt.make_pair_packet(self.mesh_name, self.mesh_password, self.session_random)
pair_char.write(message)
self.status_char = self.btdevice.getCharacteristics(uuid=STATUS_CHAR_UUID)[0]
self.status_char.write(b'\x01')
reply = bytearray(pair_char.read())
if reply[0] == 0xd:
self.session_key = pckt.make_session_key(self.mesh_name, self.mesh_password, self.session_random, reply[1:9])
else:
if reply[0] == 0xe:
logger.info("Auth error : check name and password.")
else:
logger.info("Unexpected pair value : %s", repr(reply))
self.disconnect()
return False
return True
def waitForNotifications(self):
session_key = self.session_key
logger.info('[%s] Started waitForNotifications', self.mac)
while self.session_key == session_key:
try:
self.btdevice.waitForNotifications(5)
except btle.BTLEDisconnectError:
self.session_key = None
except Exception as error:
logger.debug("waitForNotifications error - %s", error)
# If we get the response to a write then we'll break
pass
logger.info('[%s] WaitForNotifications done', self.mac)
def connectWithRetry(self, num_tries=1, mesh_name=None, mesh_password=None):
"""
Args:
num_tries: The number of attempts to connect.
mesh_name: The mesh name as a string.
mesh_password: The mesh password as a string.
"""
connected = False
attempts = 0
while (not connected and attempts < num_tries):
try:
connected = self.connect(mesh_name, mesh_password)
except btle.BTLEDisconnectError:
logger.info("connection_error: retrying for %s time", attempts)
finally:
attempts += 1
return connected
def setMesh(self, new_mesh_name, new_mesh_password, new_mesh_long_term_key):
"""
Sets or changes the mesh network settings.
Args :
new_mesh_name: The new mesh name as a string, 16 bytes max.
new_mesh_password: The new mesh password as a string, 16 bytes max.
new_mesh_long_term_key: The new long term key as a string, 16 bytes max.
Returns :
True on success.
"""
assert (self.session_key), "Not connected"
assert len(new_mesh_name.encode()) <= 16, "new_mesh_name can hold max 16 bytes"
assert len(new_mesh_password.encode()) <= 16, "new_mesh_password can hold max 16 bytes"
assert len(new_mesh_long_term_key.encode()) <= 16, "new_mesh_long_term_key can hold max 16 bytes"
pair_char = self.btdevice.getCharacteristics(uuid=PAIR_CHAR_UUID)[0]
# FIXME : Removing the delegate as a workaround to a bluepy.btle.BTLEException
# similar to https://github.com/IanHarvey/bluepy/issues/182 That may be
# a bluepy bug or I'm using it wrong or both ...
self.btdevice.setDelegate(None)
message = pckt.encrypt(self.session_key, new_mesh_name.encode())
message.insert(0, 0x4)
pair_char.write(message)
message = pckt.encrypt(self.session_key, new_mesh_password.encode())
message.insert(0, 0x5)
pair_char.write(message)
message = pckt.encrypt(self.session_key, new_mesh_long_term_key.encode())
message.insert(0, 0x6)
pair_char.write(message)
time.sleep(1)
reply = bytearray(pair_char.read())
self.btdevice.setDelegate(Delegate(self))
if reply[0] == 0x7:
self.mesh_name = new_mesh_name.encode()
self.mesh_password = new_mesh_password.encode()
logger.info("Mesh network settings accepted.")
return True
else:
logger.info("Mesh network settings change failed : %s", repr(reply))
return False
def setMeshId(self, mesh_id):
"""
Sets the mesh id.
Args :
mesh_id: as a number.
"""
data = struct.pack("<H", mesh_id)
self.writeCommand(C_MESH_ADDRESS, data)
self.mesh_id = mesh_id
def writeCommand(self, command, data, dest=None, withResponse=True):
"""
Args:
command: The command, as a number.
data: The parameters for the command, as bytes.
dest: The destination mesh id, as a number. If None, this lightbulb's
mesh id will be used.
"""
assert (self.session_key)
if dest == None: dest = self.mesh_id
packet = pckt.make_command_packet(self.session_key, self.mac, dest, command, data)
try:
if not self.command_char:
self.command_char = self.btdevice.getCharacteristics(uuid=COMMAND_CHAR_UUID)[0]
logger.info("[%s][%d] Writing command %i data %s", self.mac, dest, command, repr(data))
return self.command_char.write(packet, withResponse=withResponse)
except btle.BTLEDisconnectError as err:
logger.error('Command failed, device is disconnected: %s', err)
self.session_key = None
raise err
except btle.BTLEInternalError as err:
if 'Helper not started' in str(err):
logger.error('Command failed, Helper not started, device is disconnected: %s', err)
self.session_key = None
else:
logger.exception('Command response failed to be correctly processed but we ignore it for now: %s', err)
def resetMesh(self):
"""
Restores the default name and password. Will disconnect the device.
"""
return self.writeCommand(C_MESH_RESET, b'\x00')
def readStatus(self):
packet = self.status_char.read()
return pckt.decrypt_packet(self.session_key, self.mac, packet)
def parseStatusResult(self, data):
command = struct.unpack('B', data[7:8])[0]
status = {}
if command == C_GET_STATUS_RECEIVED:
mode = struct.unpack('B', data[10:11])[0]
mesh_id = (struct.unpack('B', data[4:5])[0] * 256) + struct.unpack('B', data[3:4])[0]
white_brightness, white_temperature = struct.unpack('BB', data[11:13])
color_brightness, red, green, blue = struct.unpack('BBBB', data[13:17])
status = {
'mesh_id': mesh_id,
'state': (mode & 1) == 1,
'color_mode': ((mode >> 1) & 1) == 1,
'transition_mode': ((mode >> 2) & 1) == 1,
'red': red,
'green': green,
'blue': blue,
'white_temperature': white_temperature,
'white_brightness': white_brightness,
'color_brightness': color_brightness,
}
if command == C_NOTIFICATION_RECEIVED:
mesh_id = (struct.unpack('B', data[19:20])[0] * 256) + struct.unpack('B', data[10:11])[0]
mode = struct.unpack('B', data[12:13])[0]
white_brightness, white_temperature = struct.unpack('BB', data[13:15])
color_brightness, red, green, blue = struct.unpack('BBBB', data[15:19])
status = {
'mesh_id': mesh_id,
'state': (mode & 1) == 1,
'color_mode': ((mode >> 1) & 1) == 1,
'transition_mode': ((mode >> 2) & 1) == 1,
'red': red,
'green': green,
'blue': blue,
'white_temperature': white_temperature,
'white_brightness': white_brightness,
'color_brightness': color_brightness,
}
if status:
logger.debug('parsed status %s', status)
else:
logger.info('Unknown command [%d]', command)
if status and status['mesh_id'] == self.mesh_id:
logger.info('Update light status - mesh_id %d', status['mesh_id'])
self.state = status['state']
self.color_mode = status['color_mode']
self.transition_mode = status['transition_mode']
self.white_brightness = status['white_brightness']
self.white_temperature = status['white_temperature']
self.color_brightness = status['color_brightness']
self.red = status['red']
self.green = status['green']
self.blue = status['blue']
if status and self.status_callback:
self.status_callback(status)
def requestStatus(self, dest=None, withResponse=False):
logger.debug('requestStatus(%s)', dest)
data = struct.pack('B', 16)
return self.writeCommand(C_GET_STATUS_SENT, data, dest, withResponse)
def setColor(self, red, green, blue, dest=None):
"""
Args :
red, green, blue: between 0 and 0xff
"""
data = struct.pack('BBBB', 0x04, red, green, blue)
return self.writeCommand(C_COLOR, data, dest)
def setColorBrightness(self, brightness, dest=None):
"""
Args :
brightness: a value between 0xa and 0x64 ...
"""
data = struct.pack('B', brightness)
return self.writeCommand(C_COLOR_BRIGHTNESS, data, dest)
def setSequenceColorDuration(self, duration, dest=None):
"""
Args :
duration: in milliseconds.
"""
data = struct.pack("<I", duration)
return self.writeCommand(C_SEQUENCE_COLOR_DURATION, data, dest)
def setSequenceFadeDuration(self, duration, dest=None):
"""
Args:
duration: in milliseconds.
"""
data = struct.pack("<I", duration)
return self.writeCommand(C_SEQUENCE_FADE_DURATION, data, dest)
def setPreset(self, num, dest=None):
"""
Set a preset color sequence.
Args :
num: number between 0 and 6
"""
data = struct.pack('B', num)
return self.writeCommand(C_PRESET, data, dest)
def setWhiteBrightness(self, brightness, dest=None):
"""
Args :
brightness: between 1 and 0x7f
"""
data = struct.pack('B', brightness)
return self.writeCommand(C_WHITE_BRIGHTNESS, data, dest)
def setWhiteTemperature(self, temp, dest=None):
"""
Args :
temp: between 0 and 0x7f
"""
data = struct.pack('B', temp)
return self.writeCommand(C_WHITE_TEMPERATURE, data, dest)
def setWhite(self, temp, brightness, dest=None):
"""
Args :
temp: between 0 and 0x7f
brightness: between 1 and 0x7f
"""
data = struct.pack('B', temp)
self.writeCommand(C_WHITE_TEMPERATURE, data, dest)
data = struct.pack('B', brightness)
return self.writeCommand(C_WHITE_BRIGHTNESS, data, dest)
def on(self, dest=None):
""" Turns the light on.
"""
return self.writeCommand(C_POWER, b'\x01', dest)
def off(self, dest=None):
""" Turns the light off.
"""
return self.writeCommand(C_POWER, b'\x00', dest)
def reconnect(self):
logger.debug("Reconnecting.")
self.session_key = None
self.connect()
def disconnect(self):
logger.debug("Disconnecting.")
try:
self.btdevice.disconnect()
except Exception as err:
logger.warning('Disconnect failed: %s', err)
self.stop()
self.session_key = None
def stop(self):
logger.debug("force stoppping blue helper")
try:
self.btdevice.stop()
except Exception as err:
logger.warning('Stop failed: %s', err)
self.session_key = None
def getFirmwareRevision(self):
"""
Returns :
The firmware version as a null terminated utf-8 string.
"""
char = self.btdevice.getCharacteristics(uuid=btle.AssignedNumbers.firmwareRevisionString)[0]
return char.read()
def getHardwareRevision(self):
"""
Returns :
The hardware version as a null terminated utf-8 string.
"""
char = self.btdevice.getCharacteristics(uuid=btle.AssignedNumbers.hardwareRevisionString)[0]
return char.read()
def getModelNumber(self):
"""
Returns :
The model as a null terminated utf-8 string.
"""
char = self.btdevice.getCharacteristics(uuid=btle.AssignedNumbers.modelNumberString)[0]
return char.read()
def sendFirmware(self, firmware_path):
"""
Updates the light bulb's firmware. The light will blink green after receiving the new
firmware.
Args:
firmware_path: The path of the firmware file.
"""
assert (self.session_key)
with open(firmware_path, 'rb') as firmware_file:
firmware_data = firmware_file.read()
if not firmware_data:
return
ota_char = self.btdevice.getCharacteristics(uuid=OTA_CHAR_UUID)[0]
count = 0
for i in range(0, len(firmware_data), 0x10):
data = struct.pack('<H', count) + firmware_data[i:i + 0x10].ljust(0x10, b'\xff')
crc = pckt.crc16(data)
packet = data + struct.pack('<H', crc)
logger.debug("Writing packet %i of %i : %s", count + 1, len(firmware_data) / 0x10 + 1, repr(packet))
ota_char.write(packet)
# FIXME : When calling write with withResponse=True bluepy hangs after a few packets.
# Without any delay the light blinks once without accepting the firmware.
# The choosen value is arbitrary.
time.sleep(0.01)
count += 1
data = struct.pack('<H', count)
crc = pckt.crc16(data)
packet = data + struct.pack('<H', crc)
logger.debug("Writing last packet : %s", repr(packet))
ota_char.write(packet)
| 36.022013
| 129
| 0.590048
|
48f76556cc035f694352abc7a2b1bdf11ef14d18
| 766
|
py
|
Python
|
scte/Scte35/DTMFDescriptor.py
|
jamesfining/scte
|
ad0eba0d9354bcc9f9d4858ef5fb385fc01c35c4
|
[
"Apache-2.0"
] | 9
|
2019-05-04T02:03:18.000Z
|
2021-02-25T01:30:12.000Z
|
scte/Scte35/DTMFDescriptor.py
|
jamesfining/scte
|
ad0eba0d9354bcc9f9d4858ef5fb385fc01c35c4
|
[
"Apache-2.0"
] | 3
|
2019-05-14T17:23:05.000Z
|
2020-10-12T15:42:36.000Z
|
scte/Scte35/DTMFDescriptor.py
|
jamesfining/scte
|
ad0eba0d9354bcc9f9d4858ef5fb385fc01c35c4
|
[
"Apache-2.0"
] | 7
|
2018-11-05T19:46:43.000Z
|
2020-07-26T01:38:59.000Z
|
from scte.Scte35 import scte35_enums
import logging
class DTMFDescriptor:
def __init__(self, bitarray_data, logger=None):
if logger is not None:
self._log = logger
else:
self._log = logging.getLogger()
new_descriptor = {}
new_descriptor["preroll"] = bitarray_data.read("uint:8")
new_descriptor["dtmf_count"] = bitarray_data.read("uint:3")
# Reserved
new_descriptor["reserved1"] = bitarray_data.read("uint:5")
if new_descriptor["dtmf_count"] > 0:
new_descriptor["DTMF_chars"] = []
for _ in range(new_descriptor["dtmf_count"]):
new_descriptor["DTMF_chars"].append(bitarray_data.read("uint:8"))
self.as_dict = new_descriptor
| 33.304348
| 81
| 0.626632
|
f95a163a8785ee9fbdcd8e2ccff346567b7c3d86
| 1,506
|
py
|
Python
|
src/nitpick/style/fetchers/http.py
|
mjpieters/nitpick
|
610a884a36e45fc67df5b3c87a8569c3c2f7fd38
|
[
"MIT"
] | null | null | null |
src/nitpick/style/fetchers/http.py
|
mjpieters/nitpick
|
610a884a36e45fc67df5b3c87a8569c3c2f7fd38
|
[
"MIT"
] | null | null | null |
src/nitpick/style/fetchers/http.py
|
mjpieters/nitpick
|
610a884a36e45fc67df5b3c87a8569c3c2f7fd38
|
[
"MIT"
] | null | null | null |
"""Base HTTP fetcher, other fetchers can inherit from this to wrap http errors."""
from __future__ import annotations
from dataclasses import dataclass
import click
import requests
from loguru import logger
from nitpick.enums import OptionEnum
from nitpick.style.fetchers import Scheme
from nitpick.style.fetchers.base import StyleFetcher
@dataclass(repr=True, unsafe_hash=True)
class HttpFetcher(StyleFetcher):
"""Fetch a style from an http/https server."""
requires_connection = True
protocols: tuple[str, ...] = (Scheme.HTTP, Scheme.HTTPS) # type: ignore
def _do_fetch(self, url) -> str:
try:
contents = self._download(url)
except requests.ConnectionError as err:
logger.exception(f"Request failed with {err}")
click.secho(
f"The URL {url} could not be downloaded. Either your network is unreachable or the URL is broken."
f" Check the URL, fix your connection, or use "
f" {OptionEnum.OFFLINE.as_flake8_flag()} / {OptionEnum.OFFLINE.as_envvar()}=1",
fg="red",
err=True,
)
return ""
return contents
def _download(self, url, **kwargs) -> str:
logger.info(f"Downloading style from {url}")
if self.session is None:
raise RuntimeError("No session provided to fetcher")
response = self.session.get(url, **kwargs)
response.raise_for_status()
return response.text
| 33.466667
| 114
| 0.646746
|
2c6e98bf9669d716e23a61416a4165cb16f3a7be
| 3,852
|
py
|
Python
|
src/bloombox/schema/products/Flower_pb2.py
|
Bloombox/Python
|
1b125fbdf54efb390afe12aaa966f093218c4387
|
[
"Apache-2.0"
] | 4
|
2018-01-23T20:13:11.000Z
|
2018-07-28T22:36:09.000Z
|
src/bloombox/schema/products/Flower_pb2.py
|
Bloombox/Python
|
1b125fbdf54efb390afe12aaa966f093218c4387
|
[
"Apache-2.0"
] | 159
|
2018-02-02T09:55:52.000Z
|
2021-07-21T23:41:59.000Z
|
src/bloombox/schema/products/Flower_pb2.py
|
Bloombox/Python
|
1b125fbdf54efb390afe12aaa966f093218c4387
|
[
"Apache-2.0"
] | 3
|
2018-01-23T20:13:15.000Z
|
2020-01-17T01:07:53.000Z
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: products/Flower.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from base import ProductKey_pb2 as base_dot_ProductKey__pb2
from content import MaterialsData_pb2 as content_dot_MaterialsData__pb2
from content import ProductContent_pb2 as content_dot_ProductContent__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='products/Flower.proto',
package='opencannabis.products',
syntax='proto3',
serialized_pb=_b('\n\x15products/Flower.proto\x12\x15opencannabis.products\x1a\x15\x62\x61se/ProductKey.proto\x1a\x1b\x63ontent/MaterialsData.proto\x1a\x1c\x63ontent/ProductContent.proto\"\xa2\x01\n\x06\x46lower\x12*\n\x03key\x18\x01 \x01(\x0b\x32\x1d.opencannabis.base.ProductKey\x12\x35\n\x07product\x18\x02 \x01(\x0b\x32$.opencannabis.content.ProductContent\x12\x35\n\x08material\x18\x03 \x01(\x0b\x32#.opencannabis.content.MaterialsDataB9\n\x1eio.opencannabis.schema.productB\rFlowerProductH\x01P\x00\xa2\x02\x03OCSb\x06proto3')
,
dependencies=[base_dot_ProductKey__pb2.DESCRIPTOR,content_dot_MaterialsData__pb2.DESCRIPTOR,content_dot_ProductContent__pb2.DESCRIPTOR,])
_FLOWER = _descriptor.Descriptor(
name='Flower',
full_name='opencannabis.products.Flower',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='opencannabis.products.Flower.key', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='product', full_name='opencannabis.products.Flower.product', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='material', full_name='opencannabis.products.Flower.material', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=131,
serialized_end=293,
)
_FLOWER.fields_by_name['key'].message_type = base_dot_ProductKey__pb2._PRODUCTKEY
_FLOWER.fields_by_name['product'].message_type = content_dot_ProductContent__pb2._PRODUCTCONTENT
_FLOWER.fields_by_name['material'].message_type = content_dot_MaterialsData__pb2._MATERIALSDATA
DESCRIPTOR.message_types_by_name['Flower'] = _FLOWER
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Flower = _reflection.GeneratedProtocolMessageType('Flower', (_message.Message,), dict(
DESCRIPTOR = _FLOWER,
__module__ = 'products.Flower_pb2'
# @@protoc_insertion_point(class_scope:opencannabis.products.Flower)
))
_sym_db.RegisterMessage(Flower)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\036io.opencannabis.schema.productB\rFlowerProductH\001P\000\242\002\003OCS'))
# @@protoc_insertion_point(module_scope)
| 41.419355
| 534
| 0.786085
|
5c6b5f4af71c3246580e1fa7c203d200d3b0e517
| 20,670
|
py
|
Python
|
graalpython/lib-python/3/distutils/sysconfig.py
|
transposit/graalpython
|
adadf5f211cc67a14bb3aca7c61219513d036b13
|
[
"UPL-1.0",
"Apache-2.0",
"OpenSSL"
] | 1
|
2019-05-28T13:04:32.000Z
|
2019-05-28T13:04:32.000Z
|
graalpython/lib-python/3/distutils/sysconfig.py
|
transposit/graalpython
|
adadf5f211cc67a14bb3aca7c61219513d036b13
|
[
"UPL-1.0",
"Apache-2.0",
"OpenSSL"
] | null | null | null |
graalpython/lib-python/3/distutils/sysconfig.py
|
transposit/graalpython
|
adadf5f211cc67a14bb3aca7c61219513d036b13
|
[
"UPL-1.0",
"Apache-2.0",
"OpenSSL"
] | null | null | null |
"""Provide access to Python's configuration information. The specific
configuration variables available depend heavily on the platform and
configuration. The values may be retrieved using
get_config_var(name), and the list of variables is available via
get_config_vars().keys(). Additional convenience functions are also
available.
Written by: Fred L. Drake, Jr.
Email: <fdrake@acm.org>
"""
import _imp
import os
import re
import sys
from .errors import DistutilsPlatformError
# These are needed in a couple of spots, so just compute them once.
PREFIX = os.path.normpath(sys.prefix)
EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
BASE_PREFIX = os.path.normpath(sys.base_prefix)
BASE_EXEC_PREFIX = os.path.normpath(sys.base_exec_prefix)
# Path to the base directory of the project. On Windows the binary may
# live in project/PCbuild/win32 or project/PCbuild/amd64.
# set for cross builds
if "_PYTHON_PROJECT_BASE" in os.environ:
project_base = os.path.abspath(os.environ["_PYTHON_PROJECT_BASE"])
else:
if sys.executable:
project_base = os.path.dirname(os.path.abspath(sys.executable))
else:
# sys.executable can be empty if argv[0] has been changed and Python is
# unable to retrieve the real program name
project_base = os.getcwd()
# python_build: (Boolean) if true, we're either building Python or
# building an extension with an un-installed Python, so we use
# different (hard-wired) directories.
# Setup.local is available for Makefile builds including VPATH builds,
# Setup.dist is available on Windows
def _is_python_source_dir(d):
for fn in ("Setup.dist", "Setup.local"):
if os.path.isfile(os.path.join(d, "Modules", fn)):
return True
return False
_sys_home = getattr(sys, '_home', None)
if os.name == 'nt':
def _fix_pcbuild(d):
if d and os.path.normcase(d).startswith(
os.path.normcase(os.path.join(PREFIX, "PCbuild"))):
return PREFIX
return d
project_base = _fix_pcbuild(project_base)
_sys_home = _fix_pcbuild(_sys_home)
def _python_build():
if _sys_home:
return _is_python_source_dir(_sys_home)
return _is_python_source_dir(project_base)
python_build = _python_build()
# Calculate the build qualifier flags if they are defined. Adding the flags
# to the include and lib directories only makes sense for an installation, not
# an in-source build.
build_flags = ''
try:
if not python_build:
build_flags = sys.abiflags
except AttributeError:
# It's not a configure-based build, so the sys module doesn't have
# this attribute, which is fine.
pass
def get_python_version():
"""Return a string containing the major and minor Python version,
leaving off the patchlevel. Sample return values could be '1.5'
or '2.2'.
"""
return '%d.%d' % sys.version_info[:2]
def get_python_inc(plat_specific=0, prefix=None):
"""Return the directory containing installed Python header files.
If 'plat_specific' is false (the default), this is the path to the
non-platform-specific header files, i.e. Python.h and so on;
otherwise, this is the path to platform-specific header files
(namely pyconfig.h).
If 'prefix' is supplied, use it instead of sys.base_prefix or
sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
"""
if prefix is None:
prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
if os.name == "posix":
if python_build:
# Assume the executable is in the build directory. The
# pyconfig.h file should be in the same directory. Since
# the build directory may not be the source directory, we
# must use "srcdir" from the makefile to find the "Include"
# directory.
if plat_specific:
return _sys_home or project_base
else:
incdir = os.path.join(get_config_var('srcdir'), 'Include')
return os.path.normpath(incdir)
python_dir = 'python' + get_python_version() + build_flags
return os.path.join(prefix, "include", python_dir)
elif os.name == "nt":
if python_build:
# Include both the include and PC dir to ensure we can find
# pyconfig.h
return (os.path.join(prefix, "include") + os.path.pathsep +
os.path.join(prefix, "PC"))
return os.path.join(prefix, "include")
else:
raise DistutilsPlatformError(
"I don't know where Python installs its C header files "
"on platform '%s'" % os.name)
def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
"""Return the directory containing the Python library (standard or
site additions).
If 'plat_specific' is true, return the directory containing
platform-specific modules, i.e. any module from a non-pure-Python
module distribution; otherwise, return the platform-shared library
directory. If 'standard_lib' is true, return the directory
containing standard Python library modules; otherwise, return the
directory for site-specific modules.
If 'prefix' is supplied, use it instead of sys.base_prefix or
sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
"""
if prefix is None:
if standard_lib:
prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
else:
prefix = plat_specific and EXEC_PREFIX or PREFIX
if os.name == "posix":
libpython = os.path.join(prefix,
"lib", "python" + get_python_version())
if standard_lib:
return libpython
else:
return os.path.join(libpython, "site-packages")
elif os.name == "nt":
if standard_lib:
return os.path.join(prefix, "Lib")
else:
return os.path.join(prefix, "Lib", "site-packages")
else:
raise DistutilsPlatformError(
"I don't know where Python installs its library "
"on platform '%s'" % os.name)
def customize_compiler(compiler):
"""Do any platform-specific customization of a CCompiler instance.
Mainly needed on Unix, so we can plug in the information that
varies across Unices and is stored in Python's Makefile.
"""
if compiler.compiler_type == "unix":
if sys.platform == "darwin":
# Perform first-time customization of compiler-related
# config vars on OS X now that we know we need a compiler.
# This is primarily to support Pythons from binary
# installers. The kind and paths to build tools on
# the user system may vary significantly from the system
# that Python itself was built on. Also the user OS
# version and build tools may not support the same set
# of CPU architectures for universal builds.
global _config_vars
# Use get_config_var() to ensure _config_vars is initialized.
if not get_config_var('CUSTOMIZED_OSX_COMPILER'):
import _osx_support
_osx_support.customize_compiler(_config_vars)
_config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True'
(cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \
get_config_vars('CC', 'CXX', 'CFLAGS',
'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS')
if 'CC' in os.environ:
newcc = os.environ['CC']
if (sys.platform == 'darwin'
and 'LDSHARED' not in os.environ
and ldshared.startswith(cc)):
# On OS X, if CC is overridden, use that as the default
# command for LDSHARED as well
ldshared = newcc + ldshared[len(cc):]
cc = newcc
if 'CXX' in os.environ:
cxx = os.environ['CXX']
if 'LDSHARED' in os.environ:
ldshared = os.environ['LDSHARED']
if 'CPP' in os.environ:
cpp = os.environ['CPP']
else:
cpp = cc + " -E" # not always
if 'LDFLAGS' in os.environ:
ldshared = ldshared + ' ' + os.environ['LDFLAGS']
if 'CFLAGS' in os.environ:
cflags = cflags + ' ' + os.environ['CFLAGS']
ldshared = ldshared + ' ' + os.environ['CFLAGS']
if 'CPPFLAGS' in os.environ:
cpp = cpp + ' ' + os.environ['CPPFLAGS']
cflags = cflags + ' ' + os.environ['CPPFLAGS']
ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
if 'AR' in os.environ:
ar = os.environ['AR']
if 'ARFLAGS' in os.environ:
archiver = ar + ' ' + os.environ['ARFLAGS']
else:
archiver = ar + ' ' + ar_flags
cc_cmd = cc + ' ' + cflags
compiler.set_executables(
preprocessor=cpp,
compiler=cc_cmd,
compiler_so=cc_cmd + ' ' + ccshared,
compiler_cxx=cxx,
linker_so=ldshared,
linker_exe=cc,
archiver=archiver)
compiler.shared_lib_extension = shlib_suffix
def get_config_h_filename():
"""Return full pathname of installed pyconfig.h file."""
if python_build:
if os.name == "nt":
inc_dir = os.path.join(_sys_home or project_base, "PC")
else:
inc_dir = _sys_home or project_base
else:
inc_dir = get_python_inc(plat_specific=1)
return os.path.join(inc_dir, 'pyconfig.h')
def get_makefile_filename():
"""Return full pathname of installed Makefile from the Python build."""
if python_build:
return os.path.join(_sys_home or project_base, "Makefile")
lib_dir = get_python_lib(plat_specific=0, standard_lib=1)
config_file = 'config-{}{}'.format(get_python_version(), build_flags)
if hasattr(sys.implementation, '_multiarch'):
config_file += '-%s' % sys.implementation._multiarch
return os.path.join(lib_dir, config_file, 'Makefile')
def parse_config_h(fp, g=None):
"""Parse a config.h-style file.
A dictionary containing name/value pairs is returned. If an
optional dictionary is passed in as the second argument, it is
used instead of a new dictionary.
"""
if g is None:
g = {}
define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")
#
while True:
line = fp.readline()
if not line:
break
m = define_rx.match(line)
if m:
n, v = m.group(1, 2)
try: v = int(v)
except ValueError: pass
g[n] = v
else:
m = undef_rx.match(line)
if m:
g[m.group(1)] = 0
return g
# Regexes needed for parsing Makefile (and similar syntaxes,
# like old-style Setup files).
_variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
def parse_makefile(fn, g=None):
"""Parse a Makefile-style file.
A dictionary containing name/value pairs is returned. If an
optional dictionary is passed in as the second argument, it is
used instead of a new dictionary.
"""
from distutils.text_file import TextFile
fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape")
if g is None:
g = {}
done = {}
notdone = {}
while True:
line = fp.readline()
if line is None: # eof
break
m = _variable_rx.match(line)
if m:
n, v = m.group(1, 2)
v = v.strip()
# `$$' is a literal `$' in make
tmpv = v.replace('$$', '')
if "$" in tmpv:
notdone[n] = v
else:
try:
v = int(v)
except ValueError:
# insert literal `$'
done[n] = v.replace('$$', '$')
else:
done[n] = v
# Variables with a 'PY_' prefix in the makefile. These need to
# be made available without that prefix through sysconfig.
# Special care is needed to ensure that variable expansion works, even
# if the expansion uses the name without a prefix.
renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS')
# do variable interpolation here
while notdone:
for name in list(notdone):
value = notdone[name]
m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
if m:
n = m.group(1)
found = True
if n in done:
item = str(done[n])
elif n in notdone:
# get it on a subsequent round
found = False
elif n in os.environ:
# do it like make: fall back to environment
item = os.environ[n]
elif n in renamed_variables:
if name.startswith('PY_') and name[3:] in renamed_variables:
item = ""
elif 'PY_' + n in notdone:
found = False
else:
item = str(done['PY_' + n])
else:
done[n] = item = ""
if found:
after = value[m.end():]
value = value[:m.start()] + item + after
if "$" in after:
notdone[name] = value
else:
try: value = int(value)
except ValueError:
done[name] = value.strip()
else:
done[name] = value
del notdone[name]
if name.startswith('PY_') \
and name[3:] in renamed_variables:
name = name[3:]
if name not in done:
done[name] = value
else:
# bogus variable reference; just drop it since we can't deal
del notdone[name]
fp.close()
# strip spurious spaces
for k, v in done.items():
if isinstance(v, str):
done[k] = v.strip()
# save the results in the global dictionary
g.update(done)
return g
def expand_makefile_vars(s, vars):
"""Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in
'string' according to 'vars' (a dictionary mapping variable names to
values). Variables not present in 'vars' are silently expanded to the
empty string. The variable values in 'vars' should not contain further
variable expansions; if 'vars' is the output of 'parse_makefile()',
you're fine. Returns a variable-expanded version of 's'.
"""
# This algorithm does multiple expansion, so if vars['foo'] contains
# "${bar}", it will expand ${foo} to ${bar}, and then expand
# ${bar}... and so forth. This is fine as long as 'vars' comes from
# 'parse_makefile()', which takes care of such expansions eagerly,
# according to make's variable expansion semantics.
while True:
m = _findvar1_rx.search(s) or _findvar2_rx.search(s)
if m:
(beg, end) = m.span()
s = s[0:beg] + vars.get(m.group(1)) + s[end:]
else:
break
return s
_config_vars = None
def _init_posix():
"""Initialize the module as appropriate for POSIX systems."""
# _sysconfigdata is generated at build time, see the sysconfig module
name = os.environ.get('_PYTHON_SYSCONFIGDATA_NAME',
'_sysconfigdata_{abi}_{platform}_{multiarch}'.format(
abi=sys.abiflags,
platform=sys.platform,
multiarch=getattr(sys.implementation, '_multiarch', ''),
))
_temp = __import__(name, globals(), locals(), ['build_time_vars'], 0)
build_time_vars = _temp.build_time_vars
global _config_vars
_config_vars = {}
_config_vars.update(build_time_vars)
def _init_nt():
"""Initialize the module as appropriate for NT"""
g = {}
# set basic install directories
g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1)
g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1)
# XXX hmmm.. a normal install puts include files here
g['INCLUDEPY'] = get_python_inc(plat_specific=0)
g['EXT_SUFFIX'] = _imp.extension_suffixes()[0]
g['EXE'] = ".exe"
g['VERSION'] = get_python_version().replace(".", "")
g['BINDIR'] = os.path.dirname(os.path.abspath(sys.executable))
global _config_vars
_config_vars = g
def get_config_vars(*args):
"""With no arguments, return a dictionary of all configuration
variables relevant for the current platform. Generally this includes
everything needed to build extensions and install both pure modules and
extensions. On Unix, this means every variable defined in Python's
installed Makefile; on Windows it's a much smaller set.
With arguments, return a list of values that result from looking up
each argument in the configuration variable dictionary.
"""
global _config_vars
if _config_vars is None:
func = globals().get("_init_" + os.name)
if func:
func()
else:
_config_vars = {}
# Normalized versions of prefix and exec_prefix are handy to have;
# in fact, these are the standard versions used most places in the
# Distutils.
_config_vars['prefix'] = PREFIX
_config_vars['exec_prefix'] = EXEC_PREFIX
# For backward compatibility, see issue19555
SO = _config_vars.get('EXT_SUFFIX')
if SO is not None:
_config_vars['SO'] = SO
# Always convert srcdir to an absolute path
srcdir = _config_vars.get('srcdir', project_base)
if os.name == 'posix':
if python_build:
# If srcdir is a relative path (typically '.' or '..')
# then it should be interpreted relative to the directory
# containing Makefile.
base = os.path.dirname(get_makefile_filename())
srcdir = os.path.join(base, srcdir)
else:
# srcdir is not meaningful since the installation is
# spread about the filesystem. We choose the
# directory containing the Makefile since we know it
# exists.
srcdir = os.path.dirname(get_makefile_filename())
_config_vars['srcdir'] = os.path.abspath(os.path.normpath(srcdir))
# Convert srcdir into an absolute path if it appears necessary.
# Normally it is relative to the build directory. However, during
# testing, for example, we might be running a non-installed python
# from a different directory.
if python_build and os.name == "posix":
base = project_base
if (not os.path.isabs(_config_vars['srcdir']) and
base != os.getcwd()):
# srcdir is relative and we are not in the same directory
# as the executable. Assume executable is in the build
# directory and make srcdir absolute.
srcdir = os.path.join(base, _config_vars['srcdir'])
_config_vars['srcdir'] = os.path.normpath(srcdir)
# OS X platforms require special customization to handle
# multi-architecture, multi-os-version installers
if sys.platform == 'darwin':
import _osx_support
_osx_support.customize_config_vars(_config_vars)
if args:
vals = []
for name in args:
vals.append(_config_vars.get(name))
return vals
else:
return _config_vars
def get_config_var(name):
"""Return the value of a single variable using the dictionary
returned by 'get_config_vars()'. Equivalent to
get_config_vars().get(name)
"""
if name == 'SO':
import warnings
warnings.warn('SO is deprecated, use EXT_SUFFIX', DeprecationWarning, 2)
return get_config_vars().get(name)
if sys.implementation.name == "graalpython":
# Truffle: import our overrides
from distutils.sysconfig_graalpython import *
from distutils.sysconfig_graalpython import _config_vars # needed by setuptools
| 37.109515
| 94
| 0.599952
|
c3c8cc78d84d0bd7e48bd08341cd0e21f60bf958
| 468
|
py
|
Python
|
website/migrations/0030_pocket_created.py
|
CrowdcoinSA/crowdcoin-platform
|
72d48d4a7ac4abe7d02e3f369ca62a29c402c9d9
|
[
"MIT"
] | null | null | null |
website/migrations/0030_pocket_created.py
|
CrowdcoinSA/crowdcoin-platform
|
72d48d4a7ac4abe7d02e3f369ca62a29c402c9d9
|
[
"MIT"
] | 7
|
2020-02-12T00:10:22.000Z
|
2022-01-13T00:43:13.000Z
|
website/migrations/0030_pocket_created.py
|
CrowdcoinSA/crowdcoin-platform
|
72d48d4a7ac4abe7d02e3f369ca62a29c402c9d9
|
[
"MIT"
] | 1
|
2020-10-05T12:20:19.000Z
|
2020-10-05T12:20:19.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2017-01-17 15:56
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('website', '0029_auto_20170117_1627'),
]
operations = [
migrations.AddField(
model_name='pocket',
name='created',
field=models.DateTimeField(auto_now_add=True, null=True),
),
]
| 22.285714
| 69
| 0.623932
|
ca8eb04a26164a4f2f6cba2676a10f2468f7cedc
| 3,022
|
py
|
Python
|
utils.py
|
pedbrgs/Tuning-Darknet
|
8be14cc2ae2e8a5ec8a3cf97bd53f186d56acfd9
|
[
"MIT"
] | 1
|
2021-11-01T02:37:22.000Z
|
2021-11-01T02:37:22.000Z
|
utils.py
|
pedbrgs/Tuning-Darknet
|
8be14cc2ae2e8a5ec8a3cf97bd53f186d56acfd9
|
[
"MIT"
] | null | null | null |
utils.py
|
pedbrgs/Tuning-Darknet
|
8be14cc2ae2e8a5ec8a3cf97bd53f186d56acfd9
|
[
"MIT"
] | null | null | null |
import subprocess
def evaluate(cfg, data):
# Running evaluation algorithm and saving to temporary file
f = open('temp.txt', mode = 'w+')
filename = cfg.split('.')[0]
# Performing the evaluation
command = './darknet detector map ' + data + ' ' + cfg + ' weights/' + filename + '_best.weights'
subprocess.call(command, shell = True, stdout = f)
# Reading temporary file to extract map metric
f.seek(0,0)
for line in f.readlines():
if 'mAP@0.50' in line:
fitness = float(line.split('= ')[-1].split(',')[0])
f.close()
return fitness
def train(cfg, data, model):
# Training command
command = './darknet detector train ' + data + ' ' + cfg + ' '
# Train model
if model.lower() == 'tiny-yolov4':
command += 'yolov4-tiny.conv.29 -dont_show -map'
subprocess.call(command, shell = True)
elif model.lower() == 'yolov4':
command += 'yolov4.conv.137 -dont_show -map'
subprocess.call(command, shell = True)
elif model.lower() == 'tiny-yolov3':
command += 'yolov3-tiny.conv.15 -dont_show -map'
subprocess.call(command, shell = True)
elif model.lower() == 'yolov3':
command += 'darknet53.conv.74 -dont_show -map'
subprocess.call(command, shell = True)
else:
print('Incompatible YOLO version')
def reset(cfg):
f = open(cfg, mode = 'r')
lines = f.readlines()
# Updating hyperparameters in the model
for i, line in enumerate(lines):
if 'momentum' in line and 'momentum' in names:
lines[i] = 'momentum=' + str(0.9) + '\n'
elif 'decay' in line and 'decay' in names:
lines[i] = 'decay=' + str(0.0005) + '\n'
elif 'learning_rate' in line and 'learning_rate' in names:
lines[i] = 'learning_rate=' + str(0.001) + '\n'
elif 'ignore_thresh' in line and 'ignore_thresh' in names:
lines[i] = 'ignore_thresh=' + str(0.7) + '\n'
else:
pass
f = open(cfg, mode = 'w')
# Writing new hyperparameters in the configuration file
f.writelines(lines)
f.close()
def update(cfg, hyperparams, names):
f = open(cfg, mode = 'r')
lines = f.readlines()
# Updating hyperparameters in the model
for i, line in enumerate(lines):
if 'momentum' in line and 'momentum' in names:
lines[i] = 'momentum=' + str(hyperparams['momentum']) + '\n'
elif 'decay' in line and 'decay' in names:
lines[i] = 'decay=' + str(hyperparams['decay']) + '\n'
elif 'learning_rate' in line and 'learning_rate' in names:
lines[i] = 'learning_rate=' + str(hyperparams['learning_rate']) + '\n'
elif 'ignore_thresh' in line and 'ignore_thresh' in names:
lines[i] = 'ignore_thresh=' + str(hyperparams['ignore_thresh']) + '\n'
else:
pass
f = open(cfg, mode = 'w')
# Writing new hyperparameters in the configuration file
f.writelines(lines)
f.close()
| 33.955056
| 101
| 0.585705
|
30bf99a9432bfe8414c21ac42efe58c5f807b4f4
| 2,832
|
py
|
Python
|
donkeycar/tests/test_util_data.py
|
bauchter-work/donkeycar
|
5b7c303e964e930fb02110c391f8ddba23002263
|
[
"MIT"
] | null | null | null |
donkeycar/tests/test_util_data.py
|
bauchter-work/donkeycar
|
5b7c303e964e930fb02110c391f8ddba23002263
|
[
"MIT"
] | null | null | null |
donkeycar/tests/test_util_data.py
|
bauchter-work/donkeycar
|
5b7c303e964e930fb02110c391f8ddba23002263
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Jun 25 14:17:59 2017
@author: wroscoe
"""
import unittest
import pytest
from donkeycar.utils import *
def create_lbin(marker_index):
""" Create a linear binary array with value set """
l = [0] * 15
l[marker_index] = 1
return l
class TestLinearBin(unittest.TestCase):
def test_zero(self):
res = linear_bin(0)
assert res[7] == 1
assert sum(res[:7]) == 0
assert sum(res[8:]) == 0
def test_positive(self):
res = linear_bin(1)
assert res[14] == 1
assert sum(res[:14]) == 0
def test_negative(self):
res = linear_bin(-1)
assert res[0] == 1
assert sum(res[1:]) == 0
def test_illegal_type(self):
with pytest.raises(TypeError):
linear_bin('0')
class TestLinearUnbin(unittest.TestCase):
def test_zero(self):
l = create_lbin(7)
res = linear_unbin(l)
assert res == 0.0
def test_positive(self):
l = create_lbin(14)
res = linear_unbin(l)
assert res == 1.0
def test_negative(self):
l = create_lbin(0)
res = linear_unbin(l)
assert res == -1.0
def test_empty_list(self):
res = linear_unbin( [0] * 15 )
assert res == -1.0
class TestMapping(unittest.TestCase):
def test_positive(self):
min = map_range(-100, -100, 100, 0, 1000)
half = map_range(0, -100, 100, 0, 1000)
max = map_range(100, -100, 100, 0, 1000)
assert min == 0
assert half == 500
assert max == 1000
def test_negative(self):
ranges = (0, 100, 0, 1000)
min = map_range(0, *ranges)
half = map_range(50, *ranges)
max = map_range(100, *ranges)
assert min == 0
assert half == 500
assert max == 1000
def test_reverse(self):
ranges = (100, 0, 0, 1000)
min = map_range(0, *ranges)
half = map_range(50, *ranges)
max = map_range(100, *ranges)
assert min == 1000
assert half == 500
assert max == 0
class TestMergeDicts(unittest.TestCase):
def test_merge_two_dicts(self):
d1 = { 'a' : 1, 'b' : 2, 'c' : 3 }
d2 = { 10 : 'hi', 'bob' : 20 }
res = merge_two_dicts(d1, d2)
assert res == { 'a' : 1, 'b' : 2, 'c' : 3, 10 : 'hi', 'bob' : 20 }
class TestParamGen(unittest.TestCase):
def test_param_gen(self):
g = param_gen({ 'a' : [ 'opt1', 'opt2' ], 'b' : [ 'opt3', 'opt4' ] })
l = [ x for x in g ]
expected = [
{'a': 'opt1', 'b': 'opt3'},
{'a': 'opt1', 'b': 'opt4'},
{'a': 'opt2', 'b': 'opt3'},
{'a': 'opt2', 'b': 'opt4'}
]
self.assertCountEqual(expected, l)
| 24.413793
| 77
| 0.520127
|
8ebd7175dcee50bb550295605b5a216a9136fb79
| 3,172
|
py
|
Python
|
ermaket/utils/xml/xmltuple.py
|
SqrtMinusOne/ERMaket_Experiment
|
c4a7b61651edd15a619d9b690e2aaeaab4de282d
|
[
"Apache-2.0"
] | null | null | null |
ermaket/utils/xml/xmltuple.py
|
SqrtMinusOne/ERMaket_Experiment
|
c4a7b61651edd15a619d9b690e2aaeaab4de282d
|
[
"Apache-2.0"
] | null | null | null |
ermaket/utils/xml/xmltuple.py
|
SqrtMinusOne/ERMaket_Experiment
|
c4a7b61651edd15a619d9b690e2aaeaab4de282d
|
[
"Apache-2.0"
] | null | null | null |
from magic_repr import make_repr
from .xml_object import XMLObject, ConvertableXML
__all__ = ['xmltuple', 'make_to_xml']
class TextTag:
@classmethod
def from_xml(cls, tag):
return tag.text
def make_to_xml(tag_name, attributes, kws=None):
if kws is None:
kws = []
def to_xml(self):
tag = self.soup.new_tag(tag_name)
for key in attributes + kws:
value = getattr(self, key)
if isinstance(value, XMLObject):
tag.append(value.to_xml())
elif kws and key in kws and value is not None:
tag[key] = value
elif value is not None:
tag.append(self.new_tag(key, value))
return tag
return to_xml
def make_from_xml(children_classes):
children_classes = {} if children_classes is None else children_classes
@classmethod
def from_xml(cls, tag):
attrs = {
child.name: children_classes.get(child.name,
TextTag).from_xml(child)
for child in tag.find_all(True, recursive=False)
}
return cls._make_args(**attrs, **tag.attrs)
return from_xml
def try_cast(key, value, types):
type_ = types.get(key)
if type_ is not None:
return type_(value)
return value
def make_init(attributes, types):
def __init__(self, *args, **kwargs):
[setattr(self, key, None) for key in attributes]
[
setattr(self, key, try_cast(key, value, types))
for key, value in zip(attributes, args)
]
[
setattr(self, key, try_cast(key, value, types))
for key, value in kwargs.items()
]
return __init__
def make_to_object(tag_name, attributes, kws=None, types={}):
if kws is None:
kws = []
def to_object(self, add_name=True):
res = {}
if add_name:
res['_tag_name'] = tag_name
for key in attributes + kws:
value = getattr(self, key)
if isinstance(value, ConvertableXML):
res[key] = value.to_object(add_name=False)
else:
res[key] = try_cast(key, value, types)
return res
return to_object
def make_iter(attributes):
def __iter__(self):
for attr in attributes:
yield getattr(self, attr)
return __iter__
def xmltuple(
classname, tag_name, attributes, children_classes=None, kws=[], types={}
):
if isinstance(children_classes, (list, tuple)):
children_classes = {c_._tag_name: c_ for c_ in children_classes}
class_ = type(
classname, (XMLObject, ConvertableXML), {
"__init__": make_init(attributes, types),
"to_xml": make_to_xml(tag_name, attributes, kws),
"_from_xml": make_from_xml(children_classes),
"_tag_name": tag_name,
"to_object": make_to_object(tag_name, attributes, kws, types),
"__iter__": make_iter(attributes),
**{key: None
for key in attributes}
}
)
class_.__repr__ = make_repr(*attributes, *kws)
return class_
| 27.344828
| 76
| 0.586381
|
8b4858c7399ebf0ed7b81d5e750033bef016ba97
| 1,285
|
py
|
Python
|
scyllaso/bin/make_cpu_config.py
|
xemul/scylla-stress-orchestrator
|
f0f142d93966db9ef6fde38992dd93c5a6b32e31
|
[
"Apache-2.0"
] | 3
|
2021-04-06T16:06:41.000Z
|
2021-10-05T09:53:11.000Z
|
scyllaso/bin/make_cpu_config.py
|
xemul/scylla-stress-orchestrator
|
f0f142d93966db9ef6fde38992dd93c5a6b32e31
|
[
"Apache-2.0"
] | 32
|
2021-04-19T08:01:27.000Z
|
2021-12-08T12:38:44.000Z
|
scyllaso/bin/make_cpu_config.py
|
xemul/scylla-stress-orchestrator
|
f0f142d93966db9ef6fde38992dd93c5a6b32e31
|
[
"Apache-2.0"
] | 5
|
2021-05-18T18:12:38.000Z
|
2021-12-29T08:36:43.000Z
|
import argparse
def cli():
parser = argparse.ArgumentParser()
parser.add_argument("nr_cpus", help="Number of cpus", nargs=1)
parser.add_argument("cpus", help="The cpus", nargs='*')
args = parser.parse_args()
nr_cpus = int(args.nr_cpus[0])
cpu_bit_list = []
for i in range(0, nr_cpus):
cpu_bit_list.append('0')
for cpu_string in args.cpus:
cpu = int(cpu_string)
cpu_bit_list[cpu] = '1'
print(to_cpu_set(cpu_bit_list))
print("irq_cpu_mask: " + to_irq_cpu_mask(cpu_bit_list))
def to_cpu_set(cpu_bit_list):
cpu_set = "CPUSET=\"--cpuset "
first = True
for cpu in range(0, len(cpu_bit_list)):
if cpu_bit_list[cpu] == '0':
if first:
first = False
else:
cpu_set = cpu_set + ","
cpu_set = cpu_set + str(cpu)
cpu_set = cpu_set + '"'
return cpu_set
def to_irq_cpu_mask(cpu_bit_list):
cpu_bit_list.reverse()
s = "".join(cpu_bit_list)
a = hex(int(s, 2))
# remove the first 2 chars (0x)
a = a[2:]
# prefix with zero's
a = a.zfill(24)
r = ""
for i in range(0, 24):
if i % 8 == 0:
if i > 0:
r += ","
r += "0x"
r += a[i]
return r
| 21.416667
| 66
| 0.540856
|
3a4bd69cae63ea842a8360576bce5d43860e1990
| 18,275
|
py
|
Python
|
VirtualBox-5.0.0/src/VBox/ValidationKit/testmanager/webui/wuiadmintestbox.py
|
egraba/vbox_openbsd
|
6cb82f2eed1fa697d088cecc91722b55b19713c2
|
[
"MIT"
] | 1
|
2015-04-30T14:18:45.000Z
|
2015-04-30T14:18:45.000Z
|
VirtualBox-5.0.0/src/VBox/ValidationKit/testmanager/webui/wuiadmintestbox.py
|
egraba/vbox_openbsd
|
6cb82f2eed1fa697d088cecc91722b55b19713c2
|
[
"MIT"
] | null | null | null |
VirtualBox-5.0.0/src/VBox/ValidationKit/testmanager/webui/wuiadmintestbox.py
|
egraba/vbox_openbsd
|
6cb82f2eed1fa697d088cecc91722b55b19713c2
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# $Id: wuiadmintestbox.py $
"""
Test Manager WUI - TestBox.
"""
__copyright__ = \
"""
Copyright (C) 2012-2015 Oracle Corporation
This file is part of VirtualBox Open Source Edition (OSE), as
available from http://www.virtualbox.org. This file is free software;
you can redistribute it and/or modify it under the terms of the GNU
General Public License (GPL) as published by the Free Software
Foundation, in version 2 as it comes in the "COPYING" file of the
VirtualBox OSE distribution. VirtualBox OSE is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
The contents of this file may alternatively be used under the terms
of the Common Development and Distribution License Version 1.0
(CDDL) only, as it comes in the "COPYING.CDDL" file of the
VirtualBox OSE distribution, in which case the provisions of the
CDDL are applicable instead of those of the GPL.
You may elect to license modified versions of this file under the
terms and conditions of either the GPL or the CDDL or both.
"""
__version__ = "$Revision: 100880 $"
# Standard python imports.
import socket;
# Validation Kit imports.
from testmanager.webui.wuicontentbase import WuiListContentWithActionBase, WuiFormContentBase, WuiLinkBase, WuiSvnLink, \
WuiTmLink, WuiSpanText, WuiRawHtml;
from testmanager.core.db import TMDatabaseConnection;
from testmanager.core.schedgroup import SchedGroupLogic, SchedGroupData;
from testmanager.core.testbox import TestBoxData;
from testmanager.core.testset import TestSetData;
from common import utils;
from testmanager.core.db import isDbTimestampInfinity;
class WuiTestBox(WuiFormContentBase):
"""
WUI TestBox Form Content Generator.
"""
def __init__(self, oData, sMode, oDisp):
if sMode == WuiFormContentBase.ksMode_Add:
sTitle = 'Create TextBox';
if oData.uuidSystem is not None and len(oData.uuidSystem) > 10:
sTitle += ' - ' + oData.uuidSystem;
elif sMode == WuiFormContentBase.ksMode_Edit:
sTitle = 'Edit TestBox - %s (#%s)' % (oData.sName, oData.idTestBox);
else:
assert sMode == WuiFormContentBase.ksMode_Show;
sTitle = 'TestBox - %s (#%s)' % (oData.sName, oData.idTestBox);
WuiFormContentBase.__init__(self, oData, sMode, 'TestBox', oDisp, sTitle);
# Try enter sName as hostname (no domain) when creating the testbox.
if sMode == WuiFormContentBase.ksMode_Add \
and self._oData.sName in [None, ''] \
and self._oData.ip not in [None, '']:
try:
(self._oData.sName, _, _) = socket.gethostbyaddr(self._oData.ip);
except:
pass;
offDot = self._oData.sName.find('.');
if offDot > 0:
self._oData.sName = self._oData.sName[:offDot];
def _populateForm(self, oForm, oData):
oForm.addIntRO( TestBoxData.ksParam_idTestBox, oData.idTestBox, 'TestBox ID');
oForm.addIntRO( TestBoxData.ksParam_idGenTestBox, oData.idGenTestBox, 'TestBox generation ID');
oForm.addTimestampRO(TestBoxData.ksParam_tsEffective, oData.tsEffective, 'Last changed');
oForm.addTimestampRO(TestBoxData.ksParam_tsExpire, oData.tsExpire, 'Expires (excl)');
oForm.addIntRO( TestBoxData.ksParam_uidAuthor, oData.uidAuthor, 'Changed by UID');
oForm.addText( TestBoxData.ksParam_ip, oData.ip, 'TestBox IP Address');
oForm.addUuid( TestBoxData.ksParam_uuidSystem, oData.uuidSystem, 'TestBox System/Firmware UUID');
oForm.addText( TestBoxData.ksParam_sName, oData.sName, 'TestBox Name');
oForm.addText( TestBoxData.ksParam_sDescription, oData.sDescription, 'TestBox Description');
oForm.addComboBox( TestBoxData.ksParam_idSchedGroup, oData.idSchedGroup, 'Scheduling Group',
SchedGroupLogic(TMDatabaseConnection()).getSchedGroupsForCombo());
oForm.addCheckBox( TestBoxData.ksParam_fEnabled, oData.fEnabled, 'Enabled');
oForm.addComboBox( TestBoxData.ksParam_enmLomKind, oData.enmLomKind, 'Lights-out-management',
TestBoxData.kaoLomKindDescs);
oForm.addText( TestBoxData.ksParam_ipLom, oData.ipLom, 'Lights-out-management IP Address');
oForm.addInt( TestBoxData.ksParam_pctScaleTimeout, oData.pctScaleTimeout, 'Timeout scale factor (%)');
## @todo Pretty format the read-only fields and use hidden fields for
# passing the actual values. (Yes, we need the values so we can
# display the form correctly on input error.)
oForm.addTextRO( TestBoxData.ksParam_sOs, oData.sOs, 'TestBox OS');
oForm.addTextRO( TestBoxData.ksParam_sOsVersion, oData.sOsVersion, 'TestBox OS version');
oForm.addTextRO( TestBoxData.ksParam_sCpuArch, oData.sCpuArch, 'TestBox OS kernel architecture');
oForm.addTextRO( TestBoxData.ksParam_sCpuVendor, oData.sCpuVendor, 'TestBox CPU vendor');
oForm.addTextRO( TestBoxData.ksParam_sCpuName, oData.sCpuName, 'TestBox CPU name');
if oData.lCpuRevision:
oForm.addTextRO( TestBoxData.ksParam_lCpuRevision, '%#x' % (oData.lCpuRevision,), 'TestBox CPU revision',
sPostHtml = ' (family=%#x model=%#x stepping=%#x)'
% (oData.getCpuFamily(), oData.getCpuModel(), oData.getCpuStepping(),),
sSubClass = 'long');
else:
oForm.addLongRO( TestBoxData.ksParam_lCpuRevision, oData.lCpuRevision, 'TestBox CPU revision');
oForm.addIntRO( TestBoxData.ksParam_cCpus, oData.cCpus, 'Number of CPUs, cores and threads');
oForm.addCheckBoxRO( TestBoxData.ksParam_fCpuHwVirt, oData.fCpuHwVirt, 'VT-x or AMD-V supported');
oForm.addCheckBoxRO( TestBoxData.ksParam_fCpuNestedPaging, oData.fCpuNestedPaging, 'Nested paging supported');
oForm.addCheckBoxRO( TestBoxData.ksParam_fCpu64BitGuest, oData.fCpu64BitGuest, '64-bit guest supported');
oForm.addCheckBoxRO( TestBoxData.ksParam_fChipsetIoMmu, oData.fChipsetIoMmu, 'I/O MMU supported');
oForm.addMultilineTextRO(TestBoxData.ksParam_sReport, oData.sReport, 'Hardware/software report');
oForm.addLongRO( TestBoxData.ksParam_cMbMemory, oData.cMbMemory, 'Installed RAM size (MB)');
oForm.addLongRO( TestBoxData.ksParam_cMbScratch, oData.cMbScratch, 'Available scratch space (MB)');
oForm.addIntRO( TestBoxData.ksParam_iTestBoxScriptRev, oData.iTestBoxScriptRev,
'TestBox Script SVN revision');
# Later:
#if not self.isAttributeNull(''):
# sHexVer = '%s.%s.%.%s' % (oData.iPythonHexVersion >> 24, (oData.iPythonHexVersion >> 16) & 0xff,
# (oData.iPythonHexVersion >> 8) & 0xff, oData.iPythonHexVersion & 0xff);
#else:
# sHexVer = str(oData.iPythonHexVersion);
oForm.addIntRO( TestBoxData.ksParam_iPythonHexVersion, oData.iPythonHexVersion,
'Python version (hex)');
if self._sMode == WuiFormContentBase.ksMode_Edit:
oForm.addComboBox(TestBoxData.ksParam_enmPendingCmd, oData.enmPendingCmd, 'Pending command',
TestBoxData.kaoTestBoxCmdDescs);
else:
oForm.addComboBoxRO(TestBoxData.ksParam_enmPendingCmd, oData.enmPendingCmd, 'Pending command',
TestBoxData.kaoTestBoxCmdDescs);
if self._sMode != WuiFormContentBase.ksMode_Show:
oForm.addSubmit('Create TestBox' if self._sMode == WuiFormContentBase.ksMode_Add else 'Change TestBox');
return True;
class WuiTestBoxList(WuiListContentWithActionBase):
"""
WUI TestBox List Content Generator.
"""
## Descriptors for the combo box.
kasTestBoxActionDescs = \
[ \
[ 'none', 'Select an action...', '' ],
[ 'enable', 'Enable', '' ],
[ 'disable', 'Disable', '' ],
TestBoxData.kaoTestBoxCmdDescs[1],
TestBoxData.kaoTestBoxCmdDescs[2],
TestBoxData.kaoTestBoxCmdDescs[3],
TestBoxData.kaoTestBoxCmdDescs[4],
TestBoxData.kaoTestBoxCmdDescs[5],
];
def __init__(self, aoEntries, iPage, cItemsPerPage, tsEffective, fnDPrint, oDisp):
WuiListContentWithActionBase.__init__(self, aoEntries, iPage, cItemsPerPage, tsEffective,
sTitle = 'TestBoxes', sId = 'users', fnDPrint = fnDPrint, oDisp = oDisp);
self._asColumnHeaders.extend([ 'Name', 'LOM', 'Status',
'Cmd', 'Script', 'Python', 'Group',
'OS', 'CPU', 'Features', 'CPUs', 'RAM', 'Scratch',
'Actions' ]);
self._asColumnAttribs.extend([ 'align="center"', 'align="center"', 'align="center"',
'align="center"', 'align="center"', 'align="center"', 'align="center"',
'', '', '', 'align="right"', 'align="right"', 'align="right"',
'align="center"' ]);
self._aoActions = list(self.kasTestBoxActionDescs);
self._aoSchedGroups = SchedGroupLogic(self._oDisp.getDb()).fetchOrderedByName();
self._dSchedGroups = dict();
for oSchedGroup in self._aoSchedGroups:
self._aoActions.append([ 'setgroup-%u' % (oSchedGroup.idSchedGroup,),
'Migrate to group %s (#%u)' % (oSchedGroup.sName, oSchedGroup.idSchedGroup,),
oSchedGroup.sDescription ]);
self._dSchedGroups[oSchedGroup.idSchedGroup] = oSchedGroup;
self._sAction = oDisp.ksActionTestBoxListPost;
self._sCheckboxName = TestBoxData.ksParam_idTestBox;
def _formatListEntry(self, iEntry): # pylint: disable=R0914
from testmanager.webui.wuiadmin import WuiAdmin;
oEntry = self._aoEntries[iEntry];
# Lights outs managment.
if oEntry.enmLomKind == TestBoxData.ksLomKind_ILOM:
aoLom = [ WuiLinkBase('ILOM', 'https://%s/' % (oEntry.ipLom,), fBracketed = False), ];
elif oEntry.enmLomKind == TestBoxData.ksLomKind_ELOM:
aoLom = [ WuiLinkBase('ELOM', 'http://%s/' % (oEntry.ipLom,), fBracketed = False), ];
elif oEntry.enmLomKind == TestBoxData.ksLomKind_AppleXserveLom:
aoLom = [ 'Apple LOM' ];
elif oEntry.enmLomKind == TestBoxData.ksLomKind_None:
aoLom = [ 'none' ];
else:
aoLom = [ 'Unexpected enmLomKind value "%s"' % (oEntry.enmLomKind,) ];
if oEntry.ipLom is not None:
if oEntry.enmLomKind in [ TestBoxData.ksLomKind_ILOM, TestBoxData.ksLomKind_ELOM ]:
aoLom += [ WuiLinkBase('(ssh)', 'ssh://%s' % (oEntry.ipLom,), fBracketed = False) ];
aoLom += [ WuiRawHtml('<br>'), '%s' % (oEntry.ipLom,) ];
# State and Last seen.
if oEntry.oStatus is None:
oSeen = WuiSpanText('tmspan-offline', 'Never');
oState = '';
else:
oDelta = oEntry.tsCurrent - oEntry.oStatus.tsUpdated;
if oDelta.days <= 0 and oDelta.seconds <= 15*60: # 15 mins and we consider you dead.
oSeen = WuiSpanText('tmspan-online', u'%s\u00a0s\u00a0ago' % (oDelta.days * 24 * 3600 + oDelta.seconds,));
else:
oSeen = WuiSpanText('tmspan-offline', u'%s' % (self.formatTsShort(oEntry.oStatus.tsUpdated),));
if oEntry.oStatus.idTestSet is None:
oState = str(oEntry.oStatus.enmState);
else:
from testmanager.webui.wuimain import WuiMain;
oState = WuiTmLink(oEntry.oStatus.enmState, WuiMain.ksScriptName,
{ WuiMain.ksParamAction: WuiMain.ksActionTestResultDetails,
TestSetData.ksParam_idTestSet: oEntry.oStatus.idTestSet, },
sTitle = '#%u' % (oEntry.oStatus.idTestSet,),
fBracketed = False);
# Group link.
oGroup = self._dSchedGroups.get(oEntry.idSchedGroup);
oGroupLink = WuiTmLink(oGroup.sName if oGroup is not None else str(oEntry.idSchedGroup),
WuiAdmin.ksScriptName,
{ WuiAdmin.ksParamAction: WuiAdmin.ksActionSchedGroupEdit,
SchedGroupData.ksParam_idSchedGroup: oEntry.idSchedGroup, },
sTitle = '#%u' % (oEntry.idSchedGroup,),
fBracketed = False);
# Reformat the OS version to take less space.
aoOs = [ 'N/A' ];
if oEntry.sOs is not None and oEntry.sOsVersion is not None and oEntry.sCpuArch:
sOsVersion = oEntry.sOsVersion;
if sOsVersion[0] not in [ 'v', 'V', 'r', 'R'] \
and sOsVersion[0].isdigit() \
and sOsVersion.find('.') in range(4) \
and oEntry.sOs in [ 'linux', 'solaris', 'darwin', ]:
sOsVersion = 'v' + sOsVersion;
sVer1 = sOsVersion;
sVer2 = None;
if oEntry.sOs == 'linux' or oEntry.sOs == 'darwin':
iSep = sOsVersion.find(' / ');
if iSep > 0:
sVer1 = sOsVersion[:iSep].strip();
sVer2 = sOsVersion[iSep + 3:].strip();
sVer2 = sVer2.replace('Red Hat Enterprise Linux Server', 'RHEL');
sVer2 = sVer2.replace('Oracle Linux Server', 'OL');
elif oEntry.sOs == 'solaris':
iSep = sOsVersion.find(' (');
if iSep > 0 and sOsVersion[-1] == ')':
sVer1 = sOsVersion[:iSep].strip();
sVer2 = sOsVersion[iSep + 2:-1].strip();
aoOs = [
WuiSpanText('tmspan-osarch', u'%s.%s' % (oEntry.sOs, oEntry.sCpuArch,)),
WuiSpanText('tmspan-osver1', sVer1.replace('-', u'\u2011'),),
];
if sVer2 is not None:
aoOs += [ WuiRawHtml('<br>'), WuiSpanText('tmspan-osver2', sVer2.replace('-', u'\u2011')), ];
# Format the CPU revision.
oCpu = None;
if oEntry.lCpuRevision is not None and oEntry.sCpuVendor is not None and oEntry.sCpuName is not None:
oCpu = [
u'%s (fam:%xh\u00a0m:%xh\u00a0s:%xh)'
% (oEntry.sCpuVendor, oEntry.getCpuFamily(), oEntry.getCpuModel(), oEntry.getCpuStepping(),),
WuiRawHtml('<br>'),
oEntry.sCpuName,
];
else:
oCpu = [];
if oEntry.sCpuVendor is not None:
oCpu.append(oEntry.sCpuVendor);
if oEntry.lCpuRevision is not None:
oCpu.append('%#x' % (oEntry.lCpuRevision,));
if oEntry.sCpuName is not None:
oCpu.append(oEntry.sCpuName);
# Stuff cpu vendor and cpu/box features into one field.
asFeatures = []
if oEntry.fCpuHwVirt is True: asFeatures.append(u'HW\u2011Virt');
if oEntry.fCpuNestedPaging is True: asFeatures.append(u'Nested\u2011Paging');
if oEntry.fCpu64BitGuest is True: asFeatures.append(u'64\u2011bit\u2011Guest');
if oEntry.fChipsetIoMmu is True: asFeatures.append(u'I/O\u2011MMU');
sFeatures = u' '.join(asFeatures) if len(asFeatures) > 0 else u'';
# Collection applicable actions.
aoActions = [
WuiTmLink('Details', WuiAdmin.ksScriptName,
{ WuiAdmin.ksParamAction: WuiAdmin.ksActionTestBoxDetails,
TestBoxData.ksParam_idTestBox: oEntry.idTestBox,
WuiAdmin.ksParamEffectiveDate: self._tsEffectiveDate, } ),
]
if isDbTimestampInfinity(oEntry.tsExpire):
aoActions += [
WuiTmLink('Edit', WuiAdmin.ksScriptName,
{ WuiAdmin.ksParamAction: WuiAdmin.ksActionTestBoxEdit,
TestBoxData.ksParam_idTestBox: oEntry.idTestBox, } ),
WuiTmLink('Remove', WuiAdmin.ksScriptName,
{ WuiAdmin.ksParamAction: WuiAdmin.ksActionTestBoxRemovePost,
TestBoxData.ksParam_idTestBox: oEntry.idTestBox },
sConfirm = 'Are you sure that you want to remove %s (%s)?' % (oEntry.sName, oEntry.ip) ),
]
if oEntry.sOs not in [ 'win', 'os2', ] and oEntry.ip is not None:
aoActions.append(WuiLinkBase('ssh', 'ssh://vbox@%s' % (oEntry.ip,),));
return [ self._getCheckBoxColumn(iEntry, oEntry.idTestBox),
[ WuiSpanText('tmspan-name', oEntry.sName), WuiRawHtml('<br>'), '%s' % (oEntry.ip,),],
aoLom,
[
'' if oEntry.fEnabled else 'disabled / ',
oState,
WuiRawHtml('<br>'),
oSeen,
],
oEntry.enmPendingCmd,
WuiSvnLink(oEntry.iTestBoxScriptRev),
oEntry.formatPythonVersion(),
oGroupLink,
aoOs,
oCpu,
sFeatures,
oEntry.cCpus if oEntry.cCpus is not None else 'N/A',
utils.formatNumberNbsp(oEntry.cMbMemory) + u'\u00a0MB' if oEntry.cMbMemory is not None else 'N/A',
utils.formatNumberNbsp(oEntry.cMbScratch) + u'\u00a0MB' if oEntry.cMbScratch is not None else 'N/A',
aoActions,
];
| 54.87988
| 123
| 0.584186
|
3f75095e43dc49731f2b84482664b37ebd877fe8
| 1,318
|
py
|
Python
|
pyspider/libs/dataurl.py
|
zgwcome/pyspider
|
1e6375850538d8e04793f5e26b6d92adf8db6a9e
|
[
"Apache-2.0"
] | 13,935
|
2015-01-01T04:48:55.000Z
|
2022-03-30T02:02:42.000Z
|
pyspider/libs/dataurl.py
|
zgwcome/pyspider
|
1e6375850538d8e04793f5e26b6d92adf8db6a9e
|
[
"Apache-2.0"
] | 848
|
2015-01-04T14:13:40.000Z
|
2022-03-04T02:29:44.000Z
|
pyspider/libs/dataurl.py
|
zgwcome/pyspider
|
1e6375850538d8e04793f5e26b6d92adf8db6a9e
|
[
"Apache-2.0"
] | 4,077
|
2015-01-02T03:01:27.000Z
|
2022-03-27T12:06:40.000Z
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<i@binux.me>
# http://binux.me
# Created on 2012-11-16 10:33:20
import six
from base64 import b64encode, b64decode
from . import utils
from six.moves.urllib.parse import quote, unquote
def encode(data, mime_type='', charset='utf-8', base64=True):
"""
Encode data to DataURL
"""
if isinstance(data, six.text_type):
data = data.encode(charset)
else:
charset = None
if base64:
data = utils.text(b64encode(data))
else:
data = utils.text(quote(data))
result = ['data:', ]
if mime_type:
result.append(mime_type)
if charset:
result.append(';charset=')
result.append(charset)
if base64:
result.append(';base64')
result.append(',')
result.append(data)
return ''.join(result)
def decode(data_url):
"""
Decode DataURL data
"""
metadata, data = data_url.rsplit(',', 1)
_, metadata = metadata.split('data:', 1)
parts = metadata.split(';')
if parts[-1] == 'base64':
data = b64decode(data)
else:
data = unquote(data)
for part in parts:
if part.startswith("charset="):
data = data.decode(part[8:])
return data
| 23.122807
| 61
| 0.590288
|
850ec44ca6222c34584b9e82485724cd041ac1f0
| 981
|
py
|
Python
|
PyLCONF/lconf_schema.py
|
LCONF-Data-Serialization-Format/PyLCONF
|
63b7df0092dcf3700604dfe8d5ef3775582891eb
|
[
"BSD-3-Clause"
] | null | null | null |
PyLCONF/lconf_schema.py
|
LCONF-Data-Serialization-Format/PyLCONF
|
63b7df0092dcf3700604dfe8d5ef3775582891eb
|
[
"BSD-3-Clause"
] | null | null | null |
PyLCONF/lconf_schema.py
|
LCONF-Data-Serialization-Format/PyLCONF
|
63b7df0092dcf3700604dfe8d5ef3775582891eb
|
[
"BSD-3-Clause"
] | null | null | null |
"""
### PyLCONF.lconf_schema
#### Overview
`validate_one_section_schema`: Validate one LCONF-Schema-Section raw string.
`validate_schemas_from_file`: Validates a LCONF-Schema-File containing one or more LCONF-Schema-Sections.
"""
# =====================================================================================================================
def TOOD_deletelater():
print("\n\nTOOD_deletelater\n\n")
# ===========
section_text = """___SECTION :: 4 :: Own Test Section
# OWN COMMENT
key1value_pair_name :: FRED
key2value_pair_age :: 17
___END"""
parse_section_obj = parse_section(default_obj_result, section_text, lconf_template_structure, validate=False)
print("\n\nparse_section_obj: \n\n", parse_section_obj, "\n\n")
print("\n\nparse_section_obj.section_name: <", parse_section_obj.section_name, ">")
print("\n\nparse_section_obj.is_parsed: <", parse_section_obj.is_parsed, ">")
if __name__ == '__main__':
TOOD_deletelater()
| 27.25
| 119
| 0.633028
|
395d5b48e9a7c27e9a76ca4ebe161f7d8ba4413d
| 2,557
|
py
|
Python
|
tests/enerpiweb/test_remote_download.py
|
azogue/enerpi
|
155374f6c86248a63b141fc5a78d38cbc4b0ca45
|
[
"MIT"
] | 5
|
2018-01-27T17:08:36.000Z
|
2020-05-24T04:08:39.000Z
|
tests/enerpiweb/test_remote_download.py
|
azogue/enerpi
|
155374f6c86248a63b141fc5a78d38cbc4b0ca45
|
[
"MIT"
] | null | null | null |
tests/enerpiweb/test_remote_download.py
|
azogue/enerpi
|
155374f6c86248a63b141fc5a78d38cbc4b0ca45
|
[
"MIT"
] | 4
|
2017-12-25T09:47:27.000Z
|
2021-06-17T18:14:30.000Z
|
# -*- coding: utf-8 -*-
"""
Test data downloading from remote running ENERPIWEB
This test needs a proper ENERPI running in another machine in the same network
"""
from tests.conftest import TestCaseEnerpi
TS_GET_START = '2016-11-28'
TS_GET_END = '2016-11-29'
BAD_IP_WITH_NO_ENERPI = '192.168.1.99'
IP_RUNNING_ENERPI = '192.168.1.44'
# IP_RUNNING_ENERPI = 'localhost'
PORT_RUNNING_ENERPI = 80
# PORT_RUNNING_ENERPI = 7777
WEBPREFIX_RUNNING_ENERPI = '/enerpi'
class TestEnerpiRemote(TestCaseEnerpi):
def test_0_remote_replication(self):
from enerpi.api import replicate_remote_enerpi_data_catalog
ok = replicate_remote_enerpi_data_catalog(local_path=self.DATA_PATH,
enerpi_ip=BAD_IP_WITH_NO_ENERPI, port=PORT_RUNNING_ENERPI,
prefix_remote_enerpi=WEBPREFIX_RUNNING_ENERPI)
print('replication OK? {}'.format(ok))
self.assertFalse(ok, "replication OK in bad remote machine? can't be!!")
ok = replicate_remote_enerpi_data_catalog(local_path=self.DATA_PATH,
enerpi_ip=IP_RUNNING_ENERPI, port=PORT_RUNNING_ENERPI,
prefix_remote_enerpi=WEBPREFIX_RUNNING_ENERPI)
print('replication OK? {}'.format(ok))
self.assertTrue(ok, 'replication NOT OK!!')
def test_1_remote_data_get(self):
from enerpi.api import remote_data_get
r_data_1 = remote_data_get(TS_GET_START, tf=TS_GET_END,
enerpi_ip=BAD_IP_WITH_NO_ENERPI, port=PORT_RUNNING_ENERPI,
prefix_remote_enerpi=WEBPREFIX_RUNNING_ENERPI, verbose=True)
print('r_data_1:\n', r_data_1)
self.assertEqual(r_data_1, {}, "remote_data_get OK in bad remote machine? can't be!!")
r_data_2 = remote_data_get(TS_GET_START, tf=TS_GET_END,
enerpi_ip=IP_RUNNING_ENERPI, port=PORT_RUNNING_ENERPI,
prefix_remote_enerpi=WEBPREFIX_RUNNING_ENERPI, verbose=True)
print('r_data_2:\n', r_data_2)
assert r_data_2
r_data_3 = remote_data_get('2015-11-28', tf='2016-01-28',
enerpi_ip=IP_RUNNING_ENERPI, port=PORT_RUNNING_ENERPI,
prefix_remote_enerpi=WEBPREFIX_RUNNING_ENERPI, verbose=True)
print('r_data_3:\n', r_data_3)
if __name__ == '__main__':
import unittest
unittest.main()
| 39.953125
| 108
| 0.63551
|
1084a6d16dc62955a328351f156411a3408b65c1
| 1,424
|
py
|
Python
|
backend/trending.py
|
archanl/thetweetrises
|
b9ac7b9e9df9c4c36da0812a893b1b793f08c48c
|
[
"MIT"
] | 1
|
2019-03-02T20:14:07.000Z
|
2019-03-02T20:14:07.000Z
|
backend/trending.py
|
archanl/thetweetrises
|
b9ac7b9e9df9c4c36da0812a893b1b793f08c48c
|
[
"MIT"
] | null | null | null |
backend/trending.py
|
archanl/thetweetrises
|
b9ac7b9e9df9c4c36da0812a893b1b793f08c48c
|
[
"MIT"
] | null | null | null |
import sys
import redis
import requests
from requests_oauthlib import OAuth1
import json
import dateutil.parser as dp
USA_WOEID = '23424977'
TRENDING_KEY = 'trending_keys'
MAX_TRENDING = 10
def main():
r = redis.Redis('localhost')
oauth = OAuth1('ZZQMKjtL8kewgk4001jF8krqx', # API Key
'4EVEmz1EKPTRmJDpQBSeW5Aldxs7KXbaYw7AJo771kKn12qPp4', # API secret
'16635628-QBipfEYkp3d0TBODdnNMHHM0cLYovy3OjcmsHIvNp', # Access token
'3jMS4f7jbWVDxoq5Gl8sXISEZutCWXrv6rmMUeJe2nPTS') # Access token secret
# Trending Topics
# Place: USA (woeid = 23424977)
t = requests.get('https://api.twitter.com/1.1/trends/place.json?id=' + USA_WOEID,
auth=oauth)
j_str = ""
for x in t:
j_str += x
j = json.loads(j_str)
trending_time = j[0]['created_at']
# if not r.exists(trending_key):
# while r.llen(TRENDING_KEYS_KEY) >= MAX_TRENDING:
# to_remove = r.brpop(TRENDING_KEYS_KEY)
#
# r.lpush(TRENDING_KEYS_KEY, trending_key)
# for trend in j[0]['trends']:
# r.lpush(trending_key, json.dumps(trend))
epoch = dp.parse(trending_time)
epoch = epoch.strftime("%s")
for trend in j[0]['trends']:
r.zadd(TRENDING_KEY, trend['name'], epoch)
r.set('trending_json', j_str)
if __name__ == '__main__':
main()
| 28.48
| 97
| 0.620787
|
53c1c4648e475a96e29acf1660277e6c777f5a29
| 187
|
py
|
Python
|
CHAPTER-1/Python overview - pg 14/1.py
|
teamzz111/Python-Solutionary
|
8b756d088ef1268a94c0e46aa206c52913c96a3b
|
[
"Unlicense"
] | 1
|
2018-11-03T21:13:20.000Z
|
2018-11-03T21:13:20.000Z
|
CHAPTER-1/Python overview - pg 14/1.py
|
teamzz111/Python-Solutionary
|
8b756d088ef1268a94c0e46aa206c52913c96a3b
|
[
"Unlicense"
] | null | null | null |
CHAPTER-1/Python overview - pg 14/1.py
|
teamzz111/Python-Solutionary
|
8b756d088ef1268a94c0e46aa206c52913c96a3b
|
[
"Unlicense"
] | null | null | null |
# Age avergage with floating points
# Var declarations
# Code
age1 = 10.0
age2 = 11.0
age3 = 13.0
age4 = 9.0
age5 = 12.0
result = (age1+age2+age3+age4+age5)/5.0
# Result
print(result)
| 12.466667
| 39
| 0.679144
|
57767d489e248e353f224ecc76c8f865397910bf
| 17,337
|
py
|
Python
|
intersight/model/niatelemetry_apic_psu_details.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 5
|
2021-12-16T15:13:32.000Z
|
2022-03-29T16:09:54.000Z
|
intersight/model/niatelemetry_apic_psu_details.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 4
|
2022-01-25T19:05:51.000Z
|
2022-03-29T20:18:37.000Z
|
intersight/model/niatelemetry_apic_psu_details.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 2
|
2020-07-07T15:01:08.000Z
|
2022-01-31T04:27:35.000Z
|
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document. # noqa: E501
The version of the OpenAPI document: 1.0.9-4950
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from intersight.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from intersight.model.asset_device_registration_relationship import AssetDeviceRegistrationRelationship
from intersight.model.display_names import DisplayNames
from intersight.model.mo_base_mo import MoBaseMo
from intersight.model.mo_base_mo_relationship import MoBaseMoRelationship
from intersight.model.mo_tag import MoTag
from intersight.model.mo_version_context import MoVersionContext
from intersight.model.niatelemetry_apic_psu_details_all_of import NiatelemetryApicPsuDetailsAllOf
globals()['AssetDeviceRegistrationRelationship'] = AssetDeviceRegistrationRelationship
globals()['DisplayNames'] = DisplayNames
globals()['MoBaseMo'] = MoBaseMo
globals()['MoBaseMoRelationship'] = MoBaseMoRelationship
globals()['MoTag'] = MoTag
globals()['MoVersionContext'] = MoVersionContext
globals()['NiatelemetryApicPsuDetailsAllOf'] = NiatelemetryApicPsuDetailsAllOf
class NiatelemetryApicPsuDetails(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('class_id',): {
'NIATELEMETRY.APICPSUDETAILS': "niatelemetry.ApicPsuDetails",
},
('object_type',): {
'NIATELEMETRY.APICPSUDETAILS': "niatelemetry.ApicPsuDetails",
},
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'class_id': (str,), # noqa: E501
'object_type': (str,), # noqa: E501
'dn': (str,), # noqa: E501
'model_number': (str,), # noqa: E501
'node_id': (int,), # noqa: E501
'record_type': (str,), # noqa: E501
'record_version': (str,), # noqa: E501
'serial_number': (str,), # noqa: E501
'site_name': (str,), # noqa: E501
'vendor_name': (str,), # noqa: E501
'vid': (str,), # noqa: E501
'registered_device': (AssetDeviceRegistrationRelationship,), # noqa: E501
'account_moid': (str,), # noqa: E501
'create_time': (datetime,), # noqa: E501
'domain_group_moid': (str,), # noqa: E501
'mod_time': (datetime,), # noqa: E501
'moid': (str,), # noqa: E501
'owners': ([str], none_type,), # noqa: E501
'shared_scope': (str,), # noqa: E501
'tags': ([MoTag], none_type,), # noqa: E501
'version_context': (MoVersionContext,), # noqa: E501
'ancestors': ([MoBaseMoRelationship], none_type,), # noqa: E501
'parent': (MoBaseMoRelationship,), # noqa: E501
'permission_resources': ([MoBaseMoRelationship], none_type,), # noqa: E501
'display_names': (DisplayNames,), # noqa: E501
}
@cached_property
def discriminator():
val = {
}
if not val:
return None
return {'class_id': val}
attribute_map = {
'class_id': 'ClassId', # noqa: E501
'object_type': 'ObjectType', # noqa: E501
'dn': 'Dn', # noqa: E501
'model_number': 'ModelNumber', # noqa: E501
'node_id': 'NodeId', # noqa: E501
'record_type': 'RecordType', # noqa: E501
'record_version': 'RecordVersion', # noqa: E501
'serial_number': 'SerialNumber', # noqa: E501
'site_name': 'SiteName', # noqa: E501
'vendor_name': 'VendorName', # noqa: E501
'vid': 'Vid', # noqa: E501
'registered_device': 'RegisteredDevice', # noqa: E501
'account_moid': 'AccountMoid', # noqa: E501
'create_time': 'CreateTime', # noqa: E501
'domain_group_moid': 'DomainGroupMoid', # noqa: E501
'mod_time': 'ModTime', # noqa: E501
'moid': 'Moid', # noqa: E501
'owners': 'Owners', # noqa: E501
'shared_scope': 'SharedScope', # noqa: E501
'tags': 'Tags', # noqa: E501
'version_context': 'VersionContext', # noqa: E501
'ancestors': 'Ancestors', # noqa: E501
'parent': 'Parent', # noqa: E501
'permission_resources': 'PermissionResources', # noqa: E501
'display_names': 'DisplayNames', # noqa: E501
}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
'_composed_instances',
'_var_name_to_model_instances',
'_additional_properties_model_instances',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""NiatelemetryApicPsuDetails - a model defined in OpenAPI
Args:
Keyword Args:
class_id (str): The fully-qualified name of the instantiated, concrete type. This property is used as a discriminator to identify the type of the payload when marshaling and unmarshaling data.. defaults to "niatelemetry.ApicPsuDetails", must be one of ["niatelemetry.ApicPsuDetails", ] # noqa: E501
object_type (str): The fully-qualified name of the instantiated, concrete type. The value should be the same as the 'ClassId' property.. defaults to "niatelemetry.ApicPsuDetails", must be one of ["niatelemetry.ApicPsuDetails", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
dn (str): Dn for the PSU in the inventory.. [optional] # noqa: E501
model_number (str): Model number of the PSU in APIC.. [optional] # noqa: E501
node_id (int): Node id for the PSU in the inventory.. [optional] # noqa: E501
record_type (str): Type of record DCNM / APIC / SE. This determines the type of platform where inventory was collected.. [optional] # noqa: E501
record_version (str): Version of record being pushed. This determines what was the API version for data available from the device.. [optional] # noqa: E501
serial_number (str): Serial number of the PSU in APIC.. [optional] # noqa: E501
site_name (str): Name of the APIC site from which this data is being collected.. [optional] # noqa: E501
vendor_name (str): Vendor name of the PSU in APIC.. [optional] # noqa: E501
vid (str): VID for the PSU in the inventory.. [optional] # noqa: E501
registered_device (AssetDeviceRegistrationRelationship): [optional] # noqa: E501
account_moid (str): The Account ID for this managed object.. [optional] # noqa: E501
create_time (datetime): The time when this managed object was created.. [optional] # noqa: E501
domain_group_moid (str): The DomainGroup ID for this managed object.. [optional] # noqa: E501
mod_time (datetime): The time when this managed object was last modified.. [optional] # noqa: E501
moid (str): The unique identifier of this Managed Object instance.. [optional] # noqa: E501
owners ([str], none_type): [optional] # noqa: E501
shared_scope (str): Intersight provides pre-built workflows, tasks and policies to end users through global catalogs. Objects that are made available through global catalogs are said to have a 'shared' ownership. Shared objects are either made globally available to all end users or restricted to end users based on their license entitlement. Users can use this property to differentiate the scope (global or a specific license tier) to which a shared MO belongs.. [optional] # noqa: E501
tags ([MoTag], none_type): [optional] # noqa: E501
version_context (MoVersionContext): [optional] # noqa: E501
ancestors ([MoBaseMoRelationship], none_type): An array of relationships to moBaseMo resources.. [optional] # noqa: E501
parent (MoBaseMoRelationship): [optional] # noqa: E501
permission_resources ([MoBaseMoRelationship], none_type): An array of relationships to moBaseMo resources.. [optional] # noqa: E501
display_names (DisplayNames): [optional] # noqa: E501
"""
class_id = kwargs.get('class_id', "niatelemetry.ApicPsuDetails")
object_type = kwargs.get('object_type', "niatelemetry.ApicPsuDetails")
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
constant_args = {
'_check_type': _check_type,
'_path_to_item': _path_to_item,
'_spec_property_naming': _spec_property_naming,
'_configuration': _configuration,
'_visited_composed_classes': self._visited_composed_classes,
}
required_args = {
'class_id': class_id,
'object_type': object_type,
}
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(
constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in kwargs.items():
if var_name in unused_args and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
not self._additional_properties_model_instances:
# discard variable.
continue
setattr(self, var_name, var_value)
@cached_property
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
lazy_import()
return {
'anyOf': [
],
'allOf': [
MoBaseMo,
NiatelemetryApicPsuDetailsAllOf,
],
'oneOf': [
],
}
| 53.841615
| 1,678
| 0.634712
|
3bbc4b872843dd4e1e7eeab9e38fd9b656683872
| 1,572
|
py
|
Python
|
test/test_npu/test_network_ops/test_cudnn_is_acceptable.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | 1
|
2021-12-02T03:07:35.000Z
|
2021-12-02T03:07:35.000Z
|
test/test_npu/test_network_ops/test_cudnn_is_acceptable.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | 1
|
2021-11-12T07:23:03.000Z
|
2021-11-12T08:28:13.000Z
|
test/test_npu/test_network_ops/test_cudnn_is_acceptable.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2020, Huawei Technologies.All rights reserved.
#
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import numpy as np
from common_utils import TestCase, run_tests
from common_device_type import dtypes, instantiate_device_type_tests
from util_test import create_common_tensor
class TestCudnnIsAcceptable(TestCase):
def test_cudnn_is_acceptable_common_shape_format(self, device):
shape_format = [
[[np.float16, 0, 1]],
[[np.float16, 0, 5]],
[[np.float32, 4, 3]],
[[np.float32, 29, 4]]
]
for shape in shape_format:
cpu_input, npu_input = create_common_tensor(shape[0], -1, 1)
cpu_output = np.array([torch.cudnn_is_acceptable(cpu_input)]).astype(np.float32)
npu_output = np.array([torch.cudnn_is_acceptable(npu_input)]).astype(np.float32)
self.assertRtolEqual(cpu_output, npu_output)
instantiate_device_type_tests(TestCudnnIsAcceptable, globals(), except_for="cpu")
if __name__ == "__main__":
run_tests()
| 41.368421
| 92
| 0.707379
|
d967434e5b852e995fa7da7b6a0b38dd6b82245a
| 1,491
|
py
|
Python
|
dist-packages/samba/tdb_util.py
|
Jianwei-Wang/python2.7_lib
|
911b8e81512e5ac5f13e669ab46f7693ed897378
|
[
"PSF-2.0"
] | 3
|
2015-10-31T10:39:25.000Z
|
2019-04-27T20:19:33.000Z
|
dist-packages/samba/tdb_util.py
|
Jianwei-Wang/python2.7_lib
|
911b8e81512e5ac5f13e669ab46f7693ed897378
|
[
"PSF-2.0"
] | null | null | null |
dist-packages/samba/tdb_util.py
|
Jianwei-Wang/python2.7_lib
|
911b8e81512e5ac5f13e669ab46f7693ed897378
|
[
"PSF-2.0"
] | null | null | null |
# Unix SMB/CIFS implementation.
# tdb util helpers
#
# Copyright (C) Kai Blin <kai@samba.org> 2011
# Copyright (C) Amitay Isaacs <amitay@gmail.com> 2011
# Copyright (C) Andrew Bartlett <abartlet@samba.org> 2013
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import samba
import subprocess
import os
def tdb_copy(file1, file2):
"""Copy tdb file using tdbbackup utility and rename it
"""
# Find the location of tdbbackup tool
dirs = ["bin", samba.param.bin_dir()] + os.getenv('PATH').split(os.pathsep)
for d in dirs:
toolpath = os.path.join(d, "tdbbackup")
if os.path.exists(toolpath):
break
tdbbackup_cmd = [toolpath, "-s", ".copy.tdb", file1]
status = subprocess.call(tdbbackup_cmd, close_fds=True, shell=False)
if status == 0:
os.rename("%s.copy.tdb" % file1, file2)
else:
raise Exception("Error copying %s" % file1)
| 35.5
| 79
| 0.701543
|
c8e594a2ef0f78228b6f66906ed176ec66f8d8f0
| 6,913
|
py
|
Python
|
setup.py
|
actively-ai/fastcluster
|
569c3e38e749ba2f3c3a665a04f5f8e0a71d6d37
|
[
"BSD-2-Clause"
] | null | null | null |
setup.py
|
actively-ai/fastcluster
|
569c3e38e749ba2f3c3a665a04f5f8e0a71d6d37
|
[
"BSD-2-Clause"
] | null | null | null |
setup.py
|
actively-ai/fastcluster
|
569c3e38e749ba2f3c3a665a04f5f8e0a71d6d37
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
u'''
fastcluster: Fast hierarchical clustering routines for R and Python
Copyright:
* Until package version 1.1.23: © 2011 Daniel Müllner <http://danifold.net>
* All changes from version 1.1.24 on: © Google Inc. <http://google.com>
'''
import os
import sys
import numpy
from setuptools import setup, Extension
from io import open
with open('fastcluster.py', encoding='utf_8') as f:
for line in f:
if line.find('__version_info__ =') == 0:
version = '.'.join(line.split("'")[1:-1:2])
break
print('Fastcluster version: ' + version)
print('Python version: ' + sys.version)
setup(name='fastcluster',
version=version,
py_modules=['fastcluster'],
description='Fast hierarchical clustering routines for R and Python.',
long_description=u"""
This library provides Python functions for hierarchical clustering. It
generates hierarchical clusters from distance matrices or from vector data.
This module is intended to replace the functions
```
linkage, single, complete, average, weighted, centroid, median, ward
```
in the module [`scipy.cluster.hierarchy`](
https://docs.scipy.org/doc/scipy/reference/cluster.hierarchy.html) with the same
functionality but much faster algorithms. Moreover, the function
`linkage_vector` provides memory-efficient clustering for vector data.
The interface is very similar to MATLAB's Statistics Toolbox API to make code
easier to port from MATLAB to Python/NumPy. The core implementation of this
library is in C++ for efficiency.
**User manual:** [fastcluster.pdf](
https://github.com/dmuellner/fastcluster/raw/master/docs/fastcluster.pdf).
The “Yule” distance function changed in fastcluster version 1.2.0. This is
following a [change in SciPy 1.6.3](
https://github.com/scipy/scipy/commit/3b22d1da98dc1b5f64bc944c21f398d4ba782bce).
It is recommended to use fastcluster version 1.1.x together with SciPy versions
before 1.6.3 and fastcluster 1.2.x with SciPy ≥1.6.3.
The fastcluster package is considered stable and will undergo few changes
from now on. If some years from now there have not been any updates, this does
not necessarily mean that the package is unmaintained but maybe it just was
not necessary to correct anything. Of course, please still report potential
bugs and incompatibilities to daniel@danifold.net. You may also use
[my GitHub repository](https://github.com/dmuellner/fastcluster/)
for bug reports, pull requests etc.
Note that [PyPI](https://pypi.org/project/fastcluster/) and [my GitHub
repository](https://github.com/dmuellner/fastcluster/) host the source code
for the Python interface only. The archive with both the R and the Python
interface is available on
[CRAN](https://CRAN.R-project.org/package=fastcluster) and the GitHub repository
[“cran/fastcluster”](https://github.com/cran/fastcluster). Even though I appear
as the author also of this second GitHub repository, this is just an automatic,
read-only mirror of the CRAN archive, so please do not attempt to report bugs or
contact me via this repository.
Installation files for Windows are provided on [PyPI](
https://pypi.org/project/fastcluster/#files) and on [Christoph Gohlke's web
page](http://www.lfd.uci.edu/~gohlke/pythonlibs/#fastcluster).
Christoph Dalitz wrote a pure [C++ interface to fastcluster](
https://lionel.kr.hs-niederrhein.de/~dalitz/data/hclust/).
Reference: Daniel Müllner, *fastcluster: Fast Hierarchical, Agglomerative
Clustering Routines for R and Python*, Journal of Statistical Software, **53**
(2013), no. 9, 1–18, https://doi.org/10.18637/jss.v053.i09.
""",
long_description_content_type='text/markdown',
python_requires='>=3',
requires=['numpy'],
install_requires=["numpy>=1.9"],
extras_require={'test': ['scipy>=1.6.3']},
provides=['fastcluster'],
ext_modules=[Extension('_fastcluster',
['src/fastcluster_python.cpp'],
extra_compile_args=['/EHsc'] if os.name == 'nt' else [],
include_dirs=[numpy.get_include()],
# Feel free to uncomment the line below if you use the GCC.
# This switches to more aggressive optimization and turns
# more warning switches on. No warning should appear in
# the compilation process.
#
# Also, the author's Python distribution generates debug
# symbols by default. This can be turned off, resulting a in
# much smaller compiled library.
#
# Optimization
#extra_compile_args=['-O2', '-g0', '-march=native', '-mtune=native', '-fno-math-errno'],
#
# List of all warning switches, somewhere from stackoverflow.com
#extra_compile_args=['-Wall', '-Weffc++', '-Wextra', '-Wall', '-Wcast-align', '-Wchar-subscripts', '-Wcomment', '-Wconversion', '-Wsign-conversion', '-Wdisabled-optimization', '-Wfloat-equal', '-Wformat', '-Wformat=2', '-Wformat-nonliteral', '-Wformat-security', '-Wformat-y2k', '-Wimport', '-Winit-self', '-Winline', '-Winvalid-pch', '-Wunsafe-loop-optimizations', '-Wmissing-braces', '-Wmissing-field-initializers', '-Wmissing-format-attribute', '-Wmissing-include-dirs', '-Wmissing-noreturn', '-Wpacked', '-Wparentheses', '-Wpointer-arith', '-Wredundant-decls', '-Wreturn-type', '-Wsequence-point', '-Wshadow', '-Wsign-compare', '-Wstack-protector', '-Wstrict-aliasing', '-Wstrict-aliasing=2', '-Wswitch', '-Wswitch-enum', '-Wtrigraphs', '-Wuninitialized', '-Wunknown-pragmas', '-Wunreachable-code', '-Wunused', '-Wunused-function', '-Wunused-label', '-Wunused-parameter', '-Wunused-value', '-Wunused-variable', '-Wvariadic-macros', '-Wvolatile-register-var', '-Wwrite-strings', '-Wlong-long', '-Wpadded', '-Wcast-qual', '-Wswitch-default', '-Wnon-virtual-dtor', '-Wold-style-cast', '-Woverloaded-virtual', '-Waggregate-return', '-Werror'],
#
# Linker optimization
#extra_link_args=['-Wl,--strip-all'],
)],
keywords=['dendrogram', 'linkage', 'cluster', 'agglomerative',
'hierarchical', 'hierarchy', 'ward'],
author=u"Daniel Müllner",
author_email="daniel@danifold.net",
license="BSD <http://opensource.org/licenses/BSD-2-Clause>",
classifiers=[
"Topic :: Scientific/Engineering :: Information Analysis",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Topic :: Scientific/Engineering :: Mathematics",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: C++",
"Operating System :: OS Independent",
"License :: OSI Approved :: BSD License",
"License :: OSI Approved :: GNU General Public License v2 (GPLv2)",
"Intended Audience :: Science/Research",
"Development Status :: 5 - Production/Stable"],
url='http://danifold.net',
test_suite='tests.fastcluster_test',
)
| 51.977444
| 1,143
| 0.70447
|
2f593d3871d62302a8b5515eb2e2c7cb2351bd5b
| 708
|
py
|
Python
|
00_Original/23_Iteratoren/iteratoren_fibonacci_mehrfach.py
|
felixdittrich92/Python3_book
|
cd0e2b55aa72c51927d347b70199fb9ed928e06f
|
[
"MIT"
] | null | null | null |
00_Original/23_Iteratoren/iteratoren_fibonacci_mehrfach.py
|
felixdittrich92/Python3_book
|
cd0e2b55aa72c51927d347b70199fb9ed928e06f
|
[
"MIT"
] | null | null | null |
00_Original/23_Iteratoren/iteratoren_fibonacci_mehrfach.py
|
felixdittrich92/Python3_book
|
cd0e2b55aa72c51927d347b70199fb9ed928e06f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class Fibonacci3:
class FibonacciIterator:
def __init__(self, max_n):
self.MaxN = max_n
self.N, self.A, self.B = 0, 0, 1
def __iter__(self):
return self
def __next__(self):
if self.N < self.MaxN:
self.N += 1
self.A, self.B = self.B, self.A + self.B
return self.A
else:
raise StopIteration
def __init__(self, max_n):
self.MaxN = max_n
def __iter__(self):
return self.FibonacciIterator(self.MaxN)
l = Fibonacci3(3)
for i in l:
for j in l:
print(i,j, end=", ")
print()
| 23.6
| 56
| 0.502825
|
8694e854e9d18f061aa8893ffbb3ee7e6d966e58
| 104,156
|
py
|
Python
|
main.py
|
Psy-Fer/interARTIC
|
d8e6ef6fa536dd11bfd80230149ad5ac55fe63ce
|
[
"MIT"
] | 20
|
2021-04-23T13:28:20.000Z
|
2022-02-04T08:14:22.000Z
|
main.py
|
Psy-Fer/interARTIC
|
d8e6ef6fa536dd11bfd80230149ad5ac55fe63ce
|
[
"MIT"
] | 41
|
2021-04-26T05:22:54.000Z
|
2022-03-29T05:12:14.000Z
|
main.py
|
Psy-Fer/interARTIC
|
d8e6ef6fa536dd11bfd80230149ad5ac55fe63ce
|
[
"MIT"
] | 2
|
2021-05-24T10:45:47.000Z
|
2021-08-04T13:16:34.000Z
|
# -*- coding: utf-8 -*-
from flask import Flask, render_template, request, redirect, url_for, json, jsonify, flash
#from src.job import Job
import src.queue as q
import os
import signal
import base64
from celery import Celery
import subprocess
from src.system import System
from celery.utils.log import get_task_logger
import random
import time
import fnmatch
import subprocess
from subprocess import Popen, PIPE, CalledProcessError
import sys
import re
import threading
import gzip
import glob
import argparse
import redis
import traceback
import functools
import inspect
import pandas as pd
import numpy as np
VERSION = "0.4.4"
ARTIC_VERSION = "1.2.1"
DOCS = "/static/site/index.html"
print(DOCS)
pd.set_option('display.width', 1000)
pd.set_option('colheader_justify', 'center')
class MyParser(argparse.ArgumentParser):
def error(self, message):
sys.stderr.write('error: %s\n' % message)
self.print_help()
sys.exit(2)
redis_port = sys.argv[1]
app = Flask(__name__)
app.config['SECRET_KEY'] = 'top-secret!'
# Celery configuration
# app.config['CELERY_BROKER_URL'] = 'redis://localhost:7777/0'
# app.config['CELERY_RESULT_BACKEND'] = 'redis://localhost:7777/0'
app.config['CELERY_BROKER_URL'] = 'redis://localhost:{}/0'.format(redis_port)
app.config['CELERY_RESULT_BACKEND'] = 'redis://localhost:{}/0'.format(redis_port)
app.secret_key = "shhhh"
# Initialize Celery
# celery = Celery(app.name)
celery = Celery(app.name, broker=app.config['CELERY_BROKER_URL'])
celery.conf.update(app.config)
logger = get_task_logger(__name__)
#Define maximum queue size
max_queue_size = 10
#Create a System object with a queue of length maximum_queue_size
qSys = System(max_queue_size)
if fnmatch.fnmatch(sys.argv[0], "*celery"):
test_arg = False
for a in sys.argv:
if a == "-b":
test_arg = True
continue
if test_arg:
redis_port_arg = a
break
# worker_port = int(sys.argv[5].split(":")[2].split("/")[0])
worker_port = int(redis_port_arg.split(":")[2].split("/")[0])
red = redis.StrictRedis(host='localhost', port=worker_port, db=0)
#Global variable for base filepath
#initialised as /user/data
# plot_file = os.path.dirname(os.path.realpath(__file__))+'/plots.py'
config_file = os.path.dirname(os.path.realpath(__file__))+'/config.init'
primer_folder = os.path.dirname(os.path.realpath(__file__))+'/primer-schemes'
with open(config_file) as f:
data = json.load(f)
input_filepath = data['data-folder']
sample_csv = data['sample-barcode-csvs']
schemes = {}
# nCoV-2019 schemes
schemes['nCoV_2019_eden_V1_scheme'] = os.path.join(primer_folder, "eden")
schemes['nCoV_2019_eden_V1_scheme_name'] = "nCoV-2019/V1"
schemes['nCoV_2019_midnight_V1_scheme'] = os.path.join(primer_folder, "midnight")
schemes['nCoV_2019_midnight_V1_scheme_name'] = "nCoV-2019/V1"
schemes['nCoV_2019_artic_V1_scheme'] = os.path.join(primer_folder, "artic")
schemes['nCoV_2019_artic_V1_scheme_name'] = "nCoV-2019/V1"
schemes['nCoV_2019_artic_V2_scheme'] = os.path.join(primer_folder, "artic")
schemes['nCoV_2019_artic_V2_scheme_name'] = "nCoV-2019/V2"
schemes['nCoV_2019_artic_V3_scheme'] = os.path.join(primer_folder, "artic")
schemes['nCoV_2019_artic_V3_scheme_name'] = "nCoV-2019/V3"
schemes['nCoV_2019_artic_V4_scheme'] = os.path.join(primer_folder, "artic")
schemes['nCoV_2019_artic_V4_scheme_name'] = "nCoV-2019/V4"
# ZaireEbola shemes
schemes['IturiEBOV_artic_V1_scheme'] = os.path.join(primer_folder, "artic")
schemes['IturiEBOV_artic_V1_scheme_name'] = "IturiEBOV/V1"
@app.route('/getCheckTasksUrl', methods = ['POST'])
def getCheckTasksUrl():
return jsonify({}), 202, {'Location': url_for('checkTasks')}
@app.route('/checkTasks')
def checkTasks():
queueList = []
completedList = []
changed = False
for job in qSys.queue.getItems():
if job.task_id:
task = executeJob.AsyncResult(job.task_id)
if task.ready():
qSys.moveJobToComplete(job.job_name)
changed = True
#Don't add this job to queueList (we don't want it to display in the queue)
continue
queueList.append({job.job_name : url_for('progress', job_name=job.job_name, task_id = job.task_id)})
for job in qSys.completed:
completedList.append({job.job_name : url_for('delete', job_name=job.job_name)})
queueDict = {'jobs': queueList}
for key, value in queueDict.items():
print(key, value)
completedDict = {'jobs': completedList}
for key, value in completedDict.items():
print(key, value)
return json.htmlsafe_dumps({'changed': changed, 'queue': queueDict, 'completed': completedDict})
def check_override(output_folder, override_data, skip):
print("Checking output folder:::", output_folder)
if(not os.path.exists(output_folder)):
if skip > 0:
return False
return True
dir_files = os.listdir(output_folder)
if len(dir_files) > 1 and override_data is False:
if skip > 0:
return False
return True
elif len(dir_files) == 1 and dir_files[0] == "all_cmds_log.txt":
print("checking files:::",dir_files)
if os.path.getsize(output_folder+"/all_cmds_log.txt") > 0:
if skip > 0:
return False
return True
return False
@celery.task(bind=True)
def executeJob(self, job_name, gather_cmd, guppyplex_cmd, demult_cmd, min_cmd, plot_cmd, step):
logger.info("In celery task, executing job...")
logger.info("executing job_name: {}".format(job_name))
logger.info("Starting from step: {}".format(step))
# Step is a debug command to start at 0, 1, 2, 3 in the commands list with
# an existing job_name, as it should build all the commands as usual
# but not execute them, so if I just want to do plots, I can use skip=3
# group ID to kill children
# {"job_name": #####}
Anakin = {}
self.update_state(state='PROGRESS', meta={'current':10, 'status':'Beginning execution'})
if guppyplex_cmd != "":
sys.stderr.write("guppyplex_cmd detected\n")
commands = [guppyplex_cmd, min_cmd, plot_cmd]
else:
sys.stderr.write("guppyplex_cmd NOT detected\n")
# sys.stderr.write(guppyplex_cmd+"\n")
commands = [gather_cmd, demult_cmd, min_cmd, plot_cmd]
for i, cmd in enumerate(commands[step:]):
po = subprocess.Popen(cmd, shell=True, preexec_fn=os.setsid,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
Anakin[job_name] = po.pid
rval = json.dumps(Anakin)
red.set(str(job_name), rval)
# k = ["{}: {}".format(key, Anakin[key]) for key in Anakin.keys()]
# sys.stderr.write(",".join(k))
# sys.stderr.write("\n")
stdout, stderr = po.communicate()
#self.update_state(state='PROGRESS')
po.wait()
if i == 0:
status = "Successfully ran gather"
n = 50
else:
status = "Successfully ran minion"
n = 90
self.update_state(state='PROGRESS', meta={'current': n, 'status': status, 'command': cmd})
returnCode = po.returncode
if returnCode != 0:
self.update_state(state='FAILURE', meta={'exc_type': "STAND IN TYPE", 'exc_message': traceback.format_exc().split('\n'), 'current': n, 'status': 'Command failed', 'command': cmd})
raise Exception("Command {} got return code {}.\nSTDOUT: {}\nSTDERR: {}".format(cmd, returnCode, stdout, stderr))
break
print("JOB CMD {} RETURNED: {}".format(cmd, returnCode))
self.update_state(state='FINISHED', meta={'current': 100, 'status': 'Finishing', 'result': returnCode}) #Don't know if this is actually used
return {'current': 100, 'total': 100, 'status': 'Task completed!', 'result': returnCode}
@celery.task(bind=True)
def killJob(self, job_name):
logger.info("In celery task, executing job...")
logger.info("killing job_name: {}".format(job_name))
pidss = red.get(str(job_name))
Anakin = json.loads(pidss)
if not Anakin:
sys.stderr.write("ANAKIN EMPTY!!!\n")
else:
# k = ["{}: {}".format(key, Anakin[key]) for key in Anakin.keys()]
# sys.stderr.write(",".join(k))
try:
# k = ["{}: {}".format(key, Anakin[key]) for key in Anakin.keys()]
# sys.stderr.write(",".join(k))
group_pid = Anakin[job_name]
sys.stderr.write("killing PID: {}\n".format(group_pid))
os.killpg(group_pid, signal.SIGTERM)
except:
traceback.print_exc()
sys.stderr.write("killJob FAILED - 1")
sys.stderr.write("\n")
return 1
sys.stderr.write("killJob SUCCESS - 0")
sys.stderr.write("\n")
return 0
sys.stderr.write("killJob FAILED (ANAKIN EMPTY) - 1")
sys.stderr.write("\n")
return 1
@celery.task(bind=True)
def getVersions(self, from_file=False):
root = os.path.join(os.path.dirname(os.path.realpath(__file__)))
bin_path = os.path.join(root, "artic_bin", "bin")
logger.info("In celery task, getting software versions...")
version_file = os.path.join(root, "version_dump.txt")
logger.info(version_file)
version_dic = {"interartic": VERSION,
"artic": "UNKNOWN",
"medaka": "UNKNOWN",
"nanopolish": "UNKNOWN",
"minimap2": "UNKNOWN",
"samtools": "UNKNOWN",
"bcftools": "UNKNOWN",
"muscle": "UNKNOWN",
"longshot": "UNKNOWN"}
def _versions():
cmd_list = ["{}/artic --version | cut -d ' ' -f2 | awk '{}' >> {}".format(bin_path, '{print "artic " $1}', os.path.join(root, "version_dump.txt")),
"{}/medaka --version | cut -d ' ' -f2 | awk '{}' >> {}".format(bin_path, '{print "medaka " $1}', os.path.join(root, "version_dump.txt")),
"{}/nanopolish --version | head -1 | cut -d ' ' -f3 | awk '{}' >> {}".format(bin_path, '{print "nanopolish " $1}', os.path.join(root, "version_dump.txt")),
"{}/minimap2 --version | awk '{}' >> {}".format(bin_path, '{print "minimap2 " $1}', os.path.join(root, "version_dump.txt")),
"{}/samtools --version | head -1 | cut -d ' ' -f2 | awk '{}' >> {}".format(bin_path, '{print "samtools " $1}', os.path.join(root, "version_dump.txt")),
"{}/bcftools --version | head -1 | cut -d ' ' -f2 | awk '{}' >> {}".format(bin_path, '{print "bcftools " $1}', os.path.join(root, "version_dump.txt")),
"{}/muscle --version 2>&1 | head -3 | tail -1 | cut -d ' ' -f2 | awk '{}' >> {}".format(bin_path, '{print "muscle " $1}', os.path.join(root, "version_dump.txt")),
"{}/longshot --version 2>&1 | tail -1 | cut -d ' ' -f2 | awk '{}' >> {}".format(bin_path, '{print "longshot " $1}', os.path.join(root, "version_dump.txt")),
]
for cmd in cmd_list:
os.system(cmd)
if not from_file:
if os.path.isfile(version_file):
# remove and re-make
logger.info("File found and deleting")
os.remove(version_file)
logger.info("File being re-made")
_versions()
if not os.path.isfile(version_file):
logger.info("File not found, building..")
_versions()
# final check to see if the file was made
if not os.path.isfile(version_file):
# error making file
logger.info("File STILL not found, error")
flash("WARNING: Could not construct software version table in: {}".format(version_file))
if os.path.isfile(version_file):
# read file
logger.info("Reading file")
with open(version_file, 'r') as f:
for l in f:
l = l.strip("\n")
l = l.split(" ")
if len(l) > 1:
name = l[0]
version = l[1]
if name in list(version_dic.keys()):
if version == "1:":
continue
version_dic[name] = version
else:
if os.path.isfile(version_file):
# read file
logger.info("Reading file")
with open(version_file, 'r') as f:
for l in f:
l = l.strip("\n")
l = l.split(" ")
if len(l) > 1:
name = l[0]
version = l[1]
if name in list(version_dic.keys()):
if version == "1:":
continue
version_dic[name] = version
return version_dic
@app.route('/task/<job_name>', methods = ['POST'])
def task(job_name):
job = qSys.getJobByName(job_name)
return jsonify({}), 202, {'Location': url_for('task_status', task_id = job.task_id, job_name = job.job_name)}
@app.route('/status/<task_id>')
def task_status(task_id):
task = executeJob.AsyncResult(task_id)
print("TASK.READY: ", task.ready())
if task.state == 'PENDING':
response = {
'state': task.state,
'current': 0,
'total': 100,
'status': 'Pending...'
}
elif task.state != 'FAILURE':
response = {
'state': task.state,
'current': task.info.get('current', 0),
'total': 100,
'status': task.info.get('status', '')
}
if 'result' in task.info:
response['result'] = task.info['result']
else:
# something went wrong in the background job
response = {
'state': task.state,
'current': 1,
'total': 100,
'status': str(task.info), # this is the exception raised
}
return json.htmlsafe_dumps(response)
@app.route("/")
def route():
return redirect(url_for('home'))
@app.route("/home",methods = ["POST", "GET"])
def home():
errors = {}
if request.method == "POST":
# get global variables
search_input = request.form.get('file_path')
search_csv = request.form.get('csv_folder')
# error checking here
if not os.path.isdir(search_input):
errors['invalid_input_file_path'] = "File path entered is not valid"
if not os.path.isdir(search_csv):
errors['invalid_csv_file_path'] = "File path entered is not valid"
# sys.stderr.write("errors:\n")
# k = ["{}: {}".format(key, errors[key]) for key in errors.keys()]
# sys.stderr.write(",".join(k))
# sys.stderr.write("\n")
# sys.stderr.write("search_input: {}\n".format(request.form.get('search_input')))
# sys.stderr.write("add_job: {}\n".format(request.form.get('add_job')))
if request.form.get('search_input') == 'Confirm':
if len(errors) != 0:
return render_template("home.html", input_folder=search_input, errors=errors, csv_folder=search_csv, search_csv=search_csv, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION, DOCS=DOCS)
global input_filepath
input_filepath = search_input
global sample_csv
sample_csv = search_csv
# Save config if paths all work
with open(os.path.dirname(os.path.realpath(__file__))+"/config.init", 'w') as c:
c.write("{\n")
c.write('\t"data-folder": "{}",\n'.format(search_input))
c.write('\t"sample-barcode-csvs": "{}"'.format(search_csv))
c.write('}\n')
if request.form.get('add_job') == "Add Job":
if len(errors) != 0:
flash("WARNING:File paths entered are not valid")
return render_template("home.html", input_folder=search_input, errors=errors, csv_folder=search_csv, search_csv=search_csv, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION, DOCS=DOCS)
else:
return redirect(url_for('parameters'))
# return render_template("home.html", input_folder=input_filepath, csv_folder=sample_csv, eden_folder=schemes['eden_scheme'], eden_name=schemes['eden_scheme_name'], midnight_folder=schemes['midnight_scheme'], midnight_name=schemes['midnight_scheme_name'], artic_folder=schemes['artic_scheme'], artic_name=schemes['artic_scheme_name'])
return render_template("home.html", input_folder=input_filepath, csv_folder=sample_csv, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION, DOCS=DOCS)
@app.route("/about")
def about():
# Get version info to display on about page
tasks_running = False
for job in qSys.queue.getItems():
if job.task_id:
tasks_running = True
if tasks_running:
res = getVersions(from_file=True)
version_dic = res
else:
res = getVersions.delay()
version_dic = res.get()
return render_template("about.html", VERSION_DIC=version_dic, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION, DOCS=DOCS)
def check_special_characters(func):
@functools.wraps(func)
def wraper_check_char(*args, **kwargs):
"""
check input args for special characters
return error dic handled after call
"""
def _detect_special_characer(pass_string, filename=False):
ret = False
if filename:
# regex= re.compile('^[a-zA-Z0-9._/-]+$')
char_set = set("<>,?:;|}{][+=)(*&^%$#@! ")
for e in pass_string:
if e in char_set:
sys.stderr.write("this character fails: {}\n".format(e))
ret = True
else:
# regex= re.compile('^[a-zA-Z0-9_/-]+$')
char_set = set("<>,?:;|}{][+=)(*&^%$#@! ")
for e in pass_string:
if e in char_set:
sys.stderr.write("this character fails: {}\n".format(e))
ret = True
return ret
# gets names of arguments
args_name = inspect.getargspec(func)[0]
# argnames: values into dic
args_dict = dict(zip(args_name, args))
errors = {}
for arg in args_dict:
a = args_dict[arg]
if a:
# sys.stderr.write(str(a))
# sys.stderr.write("\n")
if arg == "csv_filepath":
if _detect_special_characer(str(a), filename=True):
errors["char_error_{}".format(arg)] = "Invalid character in {}: ' {} ', please use utf-8 alpha/numerical or . _ - /".format(arg, str(a))
continue
if _detect_special_characer(str(a)):
errors["char_error_{}".format(arg)] = "Invalid character in {}: ' {} ', please use utf-8 alpha/numerical or _ - /".format(arg, str(a))
if len(errors) != 0:
return errors, args[1]
return func(*args, **kwargs)
return wraper_check_char
@check_special_characters
def checkInputs(input_folder, output_folder, primer_scheme_dir, read_file, pipeline, override_data, min_length, max_length, job_name, output_input, csv_filepath, skip, num_samples):
errors = {}
#Check of jobname is used
if qSys.getJobByName(job_name) is not None:
errors['job_name'] = "Job Name has already been used."
flash("Warning: Job Name has already been used.")
return errors, output_folder
if not input_folder:
errors['input_folder'] = "Input Directory does not exist"
flash("Warning: Input folder does not exist, please check input and try again")
return errors, output_folder
if num_samples == "multiple":
if not os.path.isfile(csv_filepath):
errors['csv_file'] = "csv file does not exist"
flash("Warning: CSV file does not exist, please check input and try again")
return errors, output_folder
#give error if input folder path is empty
if len(os.listdir(input_folder)) == 0:
errors['input_folder'] = "Directory is empty."
flash("Warning: Input folder contains no data, please check input and try again")
return errors, output_folder
io_check = output_input.strip("/")
if_check = input_folder.strip("/")
sys.stderr.write("io_check: {}\n".format(io_check))
sys.stderr.write("if_check: {}\n".format(if_check))
if io_check == if_check:
errors['input_output_folder'] = "Output directory will be in the same folder as data"
flash("Warning: Output directory will be in the same folder as data, please check data structure info in documentation.")
return errors, output_folder
#if no output folder entered, creates one inside of input folder
if not output_folder and not os.path.isdir(output_input):
errors['input_output_folder'] = "Input and output don't exist"
flash("Warning: Input and output don't exist!")
return errors, output_folder
elif not output_folder and os.path.isdir(output_input):
output_folder = output_input + "/output"
elif output_folder and os.path.isdir(output_input):
if output_folder[0] == "/":
check_out = "/".join(output_folder.split("/")[:-1])
if not os.path.isdir(check_out):
errors['output_folder'] = "Parent directory of new output folder ( {} ) does not exist".format(check_out)
flash("Warning: Parent directory of new output folder ( {} ) does not exist".format(check_out))
return errors, output_folder
else:
output_folder = output_input + "/" + output_folder
else:
errors['input_folder'] = "Input folder does not exist, pleas check: {}".format(output_input)
flash("Warning: Input folder does not exist, pleas check: {}".format(output_input))
return errors, output_folder
if output_folder[-1] == "/":
output_folder = output_folder[:-1]
if primer_scheme_dir[-1] == "/":
primer_scheme_dir = primer_scheme_dir[:-1]
#give error if primer schemes folder path is invalid or empty
if not os.path.isdir(primer_scheme_dir):
errors['primer_scheme_dir'] = "Invalid path."
flash("Warning: primer_scheme_dir does not exist, pleas check: {}".format(primer_scheme_dir))
return errors, output_folder
elif len(os.listdir(primer_scheme_dir)) == 0:
errors['primer_scheme_dir'] = "Directory is empty."
flash("Warning: Primer_scheme_dir is empty, pleas check: {}".format(primer_scheme_dir))
return errors, output_folder
#if read file is specified by user
if read_file:
if not os.path.isfile(read_file):
errors['read_file'] = "Invalid path/file."
else:
#to be filled later
read_file = ""
# if pipeline in ["both", "nanopolish"]:
# # check for sequencing summary file for nanopolish
# seq_sum_found = False
# for file in os.listdir(input_folder):
# if fnmatch.fnmatch(file, "*sequencing_summary*.txt"):
# seq_sum_found = True
# if not seq_sum_found:
# flash("Warning: sequencing_summary.txt file not found in input folder structure")
# errors['input_folder'] = "sequencing_summary.txt file not found"
# return errors, output_folder
#both pipelines running
if pipeline == "both":
# TODO: check all os.system() calls
if not os.path.exists(output_folder):
make_dir = 'mkdir ' + output_folder
if os.system(make_dir) != 0:
errors['mkdir'] = "Failed to create output directory, please check parent path exists and has write permission"
flash("Warning: Failed to create output directory, please check parent path exists and has write permission")
return errors, output_folder
if override_data is True and os.path.exists(output_folder):
# remove = "rm -r " + output_folder + "/all_cmds_log.txt"
remove = "rm -r " + output_folder
if os.system(remove) !=0:
errors['remove_folder'] = "Could not detele output_directory"
flash("Warning: Could not delete {}".format(output_folder))
return errors, output_folder
make_dir = 'mkdir ' + output_folder
if os.system(make_dir) != 0:
errors['mkdir'] = "Failed to create output directory, please check parent path exists and has write permission"
flash("Warning: Failed to create output directory, please check parent path exists and has write permission")
return errors, output_folder
elif check_override(output_folder, override_data, skip) and os.path.exists(output_input):
errors['override'] = True
flash("Warning: Output folder is NOT empty. Please choose another folder or delete/move files in it.")
return errors, output_folder
#if the output folder does not exist, it is created
if not os.path.exists(output_folder + "/medaka"):
# make_dir = 'mkdir ' + output_folder
# if os.system(make_dir) != 0:
# errors['mkdir_m1'] = "Failed to create output directory, please check parent path exists and has write permission"
# flash("Warning: Could not mkdir {}".format(output_folder))
# return errors, output_folder
make_dir_m = 'mkdir ' + output_folder + '/medaka'
if os.system(make_dir_m) != 0:
errors['mkdir_m2'] = "Failed to create medaka directory, please check parent path exists and has write permission"
flash("Warning: Could not mkdir {}/medaka".format(output_folder))
return errors, output_folder
#if the output folder does not exist, it is created
if not os.path.exists(output_folder + "/nanopolish"):
# make_dir = 'mkdir ' + output_folder
# # os.system(make_dir)
# if os.system(make_dir) != 0:
# errors['mkdir_n1'] = "Failed to create output directory, please check parent path exists and has write permission"
# flash("Warning: Could not mkdir {}".format(output_folder))
# return errors, output_folder
make_dir_n = 'mkdir ' + output_folder + '/nanopolish'
if os.system(make_dir_n) != 0:
errors['mkdir_n2'] = "Failed to create nanopolish directory, please check parent path exists and has write permission"
flash("Warning: Could not mkdir {}/nanopolish".format(output_folder))
return errors, output_folder
if check_override(output_folder + "/medaka", override_data, skip) and os.path.exists(output_input):
flash("Warning: Output folder is NOT empty. Please choose another folder or delete/move files in it.")
errors['override'] = True
return errors, output_folder
if check_override(output_folder + "/nanopolish", override_data, skip) and os.path.exists(output_input):
flash("Warning: Output folder is NOT empty. Please choose another folder or delete/move files in it.")
errors['override'] = True
return errors, output_folder
# Make empty log file for initial progress rendering
make_log_m = 'touch \"' + output_folder + '\"/medaka/all_cmds_log.txt'
make_log_n = 'touch \"' + output_folder + '\"/nanopolish/all_cmds_log.txt'
if os.system(make_log_m) != 0:
errors['touch_m'] = "Failed to write to output directory, please check path exists and has write permission"
flash("Warning: Failed to write to output directory, please check path exists and has write permission")
return errors, output_folder
if os.system(make_log_n) != 0:
errors['touch_n'] = "Failed to write to output directory, please check path exists and has write permission"
flash("Warning: Failed to write to output directory, please check path exists and has write permission")
return errors, output_folder
else:
#TODO: if not "both" still make the folders medaka | nanopolish based on selection
#if the output folder does not exist, it is created
if not os.path.exists(output_folder):
make_dir = 'mkdir ' + output_folder
if os.system(make_dir) != 0:
errors['mkdir'] = "Failed to create output directory, please check parent path exists and has write permission"
flash("Warning: Failed to create output directory, please check parent path exists and has write permission")
return errors, output_folder
if override_data is True:
# remove = "rm -r " + output_folder + "/all_cmds_log.txt"
remove = "rm -r " + output_folder
if os.system(remove) !=0:
errors['remove_folder'] = "Could not detele output_directory"
flash("Warning: Could not delete {}".format(output_folder))
return errors, output_folder
make_dir = 'mkdir ' + output_folder
if os.system(make_dir) != 0:
errors['mkdir'] = "Failed to create output directory, please check parent path exists and has write permission"
flash("Warning: Failed to create output directory, please check parent path exists and has write permission")
return errors, output_folder
elif check_override(output_folder, override_data, skip) and os.path.exists(output_input):
errors['override'] = True
flash("Warning: Output folder is NOT empty. Please choose another folder or delete/move files in it.")
return errors, output_folder
# Make empty log file for initial progress rendering
make_log = 'touch \"' + output_folder + '\"/all_cmds_log.txt'
if os.system(make_log) != 0:
errors['touch'] = "Failed to write to output directory, please check path exists and has write permission"
flash("Warning: Failed to create log file, please check parent path exists and has write permission")
return errors, output_folder
#check length parameters are valid
if min_length.isdigit() == False:
errors['invalid_length'] = "Invalid minimum length."
if max_length.isdigit() == False:
errors['invalid_length'] = "Invalid maximum and minimum length."
elif max_length.isdigit() == False:
errors['invalid_length'] = "Invalid maximum length."
elif int(max_length) < int(min_length):
errors['invalid_length'] = "Invalid parameters: Maximum length smaller than minimum length."
return errors, output_folder
def getInputFolders(filepath):
# find all the current input folders
checkFoldersCmd = "cd && cd " + filepath + " && ls"
print("check folders command")
print(checkFoldersCmd)
folders = subprocess.check_output(checkFoldersCmd, shell=True, stderr=subprocess.STDOUT).decode("utf8").split("\n")
# folders = subprocess.check_output(checkFoldersCmd, shell=True, stderr=subprocess.STDOUT).decode("ascii").split("\n")
return folders
@app.route("/parameters", methods = ["POST","GET"])
def parameters():
# get global variables for use
global input_filepath
global sample_csv
global schemes
# get a list of all the folders in the input and csv folders to be displayed to the user
folders = getInputFolders(input_filepath)
csvs = getInputFolders(sample_csv)
if request.method == "POST":
# get curr queue
queueList = []
if not qSys.queue.empty():
for item in qSys.queue.getItems():
queueList.append({item._job_name : url_for('progress', job_name=item._job_name, task_id = item._task_id)})
queueDict = {'jobs': queueList}
displayQueue = json.htmlsafe_dumps(queueDict)
#get parameters
job_name = request.form.get('job_name')
input_folder = request.form.get('input_folder')
read_file = request.form.get('read_file')
primer_scheme_dir = request.form.get('primer_scheme_dir')
primer_scheme = request.form.get('primer_scheme')
primer_type = request.form.get('primer_type')
other_primer_type = request.form.get('other_primer_type')
output_folder = request.form.get('output_folder')
normalise = request.form.get('normalise')
num_threads = request.form.get('num_threads')
pipeline = request.form.get('pipeline')
num_samples = request.form.get('num_samples')
min_length = request.form.get('min_length')
max_length = request.form.get('max_length')
bwa = request.form.get('bwa')
skip_nanopolish = request.form.get('skip_nanopolish')
dry_run = request.form.get('dry_run')
# num_samples = request.form.get('num_samples')
guppyplex = request.form.get('guppyplex')
barcode_type = request.form.get('barcode_type')
csv_file = request.form.get('csv_file')
virus = request.form.get('virus')
override_data = request.form.get('override_data')
# DEBUG
step = int(request.form.get('step'))
sys.stderr.write("override_data: {}\n".format(override_data))
sys.stderr.write("guppyplex: {}\n".format(guppyplex))
# set correct primer_type - if primer type is other, get the correct primer type from the tet input
# primer_select is so that on reload, the correct radio button will be selected
primer_select = primer_type
if virus == 'custom':
if other_primer_type:
primer_type = other_primer_type
else:
primer_type = "Custom-primer-scheme"
# store input_name
input_name = input_folder
#csv filepath
csv_filepath = sample_csv + '/' + csv_file
# concat /data to input folder
# global input_filepath
input_folder = input_filepath + '/' + input_folder
filename = os.path.dirname(os.path.realpath(__file__))
# if no output folder entered, creates one inside of input folder
# Do this to put output above input folder to stop fastq cross talk
# if not output_folder:
# output_folder = input_folder + "/output"
# else:
# if output_folder[0] != "/":
# output_folder = input_folder + output_folder
if not os.path.isdir(input_folder):
input_folder = ""
output_input = ""
else:
os.chdir(input_folder)
tmp_oi = os.getcwd()
output_input = tmp_oi
# get the correct input folder filepath from user input
# path = glob.glob(input_folder + '/*/*')[0]
# use fnmatch with walk to get fastq_pass, fastq_fail folders
# then split off the last bit to get the top folder for the gather command
tmp_folder_list = []
for dName, sdName, fList in os.walk(input_folder):
for fileName in sdName:
if fnmatch.fnmatch(fileName, "fastq*"):
tmp_folder_list.append(os.path.join(dName, fileName))
elif fnmatch.fnmatch(fileName, "barcode*"):
tmp_folder_list.append(os.path.join(dName, fileName))
if len(tmp_folder_list) == 0:
queueList = []
flash("Warning: Could not locate fastq files in {}. Check the file names, demultiplexing options and the directory structure is compatible".format(input_folder))
errors = {}
if qSys.queue.empty():
return render_template("parameters.html", job_name=job_name, queue=None,
input_name=input_name, input_folder=input_folder,
output_folder=output_folder, virus=virus,
pipeline=pipeline, min_length=min_length,
max_length=max_length, primer_scheme=primer_scheme,
primer_type=primer_type, num_samples=num_samples,
primer_scheme_dir=primer_scheme_dir, guppyplex=guppyplex, barcode_type=barcode_type,
errors=errors, folders=folders, csvs=csvs, csv_name=csv_file,
other_primer_type=other_primer_type, primer_select=primer_select,
schemes=schemes, override_data=override_data, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION, DOCS=DOCS)
return render_template("parameters.html", job_name=job_name, queue=displayQueue,
input_name=input_name, input_folder=input_folder,
output_folder=output_folder, virus=virus,
pipeline=pipeline, min_length=min_length,
max_length=max_length, primer_scheme=primer_scheme,
primer_type=primer_type, num_samples=num_samples,
primer_scheme_dir=primer_scheme_dir, guppyplex=guppyplex, barcode_type=barcode_type,
errors=errors,folders=folders, csvs=csvs, csv_name=csv_file,
other_primer_type=other_primer_type, primer_select=primer_select,
schemes=schemes, override_data=override_data, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION, DOCS=DOCS)
# this takes the first encountered (so fastq_pass or a barcode folder depending on how the user demuxed or not)
# It looks at sdName above, not filenames!!!! so here when it splits, it keeps all BUT the sdName, so it gets the containing parent directory
tmp_path = tmp_folder_list[0].split("/")[:-1]
path = "/".join(tmp_path)
os.chdir(path)
input_folder = os.getcwd()
#if user agrees output can override files with the same name in output folder
if request.form.get('override_data'):
override_data = True
else:
override_data = False
# check errors
errors = {}
errors, output_folder_checked = checkInputs(input_folder, output_folder, primer_scheme_dir,
read_file, pipeline, override_data, min_length,
max_length, job_name, output_input, csv_filepath, step, num_samples)
# if an output folder does not exist, make one
# if not output_folder:
# output_folder = output_folder_checked
output_folder = output_folder_checked
# validate csv contents.
# No special characters -
# comma separated -
# 2 columns -
# 2nd column should have NB or RB or BC-
def _detect_special(pass_string):
regex= re.compile('^[a-zA-Z0-9,_-]+$')
if(regex.search(pass_string) == None):
ret = True
else:
ret = False
return ret
sys.stderr.write("checking CSV file: {}\n".format(csv_filepath))
if os.path.isfile(csv_filepath):
sys.stderr.write("csv file exists\n")
with open(csv_filepath, 'r') as c:
for l in c:
l = l.strip("\n")
if _detect_special(l):
flash("Warning: csv file malformed: special characters detected ")
errors['csv_malformed'] = "csv is malformed, special characters detected a-zA-Z0-9,_- only"
break
l = l.split(",")
if len(l) != 2:
errors['csv_malformed'] = "csv is malformed, more or less than 2 columns"
flash("Warning: csv file malformed: more or less than 2 columns")
break
else:
if l[1][:2] not in ["NB", "RB", "BC"]:
errors['csv_malformed'] = "csv is malformed, not NB or RB or BC for barcode"
flash("Warning: csv file malformed: not NB or RB or BC for barcode")
break
sys.stderr.write("printing errors:\n")
k = ["{}: {}".format(key, errors[key]) for key in errors.keys()]
sys.stderr.write(",".join(k))
sys.stderr.write("\n")
# if queue is full, add an error to the list
if qSys.queue.full():
errors['full_queue'] = "Job queue is full."
# display errors if errors exist
if len(errors) != 0:
# k = ["{}: {}".format(key, errors[key]) for key in errors.keys()]
# sys.stderr.write(",".join(k))
# sys.stderr.write("\n")
#Update displayed queue on home page
queueList = []
if qSys.queue.empty():
return render_template("parameters.html", job_name=job_name, queue=None,
input_name=input_name, input_folder=input_folder,
output_folder=output_folder, virus=virus,
pipeline=pipeline, min_length=min_length,
max_length=max_length, primer_scheme=primer_scheme,
primer_type=primer_type, num_samples=num_samples,
primer_scheme_dir=primer_scheme_dir, guppyplex=guppyplex, barcode_type=barcode_type,
errors=errors, folders=folders, csvs=csvs, csv_name=csv_file,
other_primer_type=other_primer_type, primer_select=primer_select,
schemes=schemes, override_data=override_data, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION, DOCS=DOCS)
return render_template("parameters.html", job_name=job_name, queue=displayQueue,
input_name=input_name, input_folder=input_folder,
output_folder=output_folder, virus=virus,
pipeline=pipeline, min_length=min_length,
max_length=max_length, primer_scheme=primer_scheme,
primer_type=primer_type, num_samples=num_samples,
primer_scheme_dir=primer_scheme_dir, guppyplex=guppyplex, barcode_type=barcode_type,
errors=errors,folders=folders, csvs=csvs, csv_name=csv_file,
other_primer_type=other_primer_type, primer_select=primer_select,
schemes=schemes, override_data=override_data, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION, DOCS=DOCS)
#no spaces in the job name - messes up commands
job_name = job_name.replace(" ", "_")
# create new jobs
if pipeline != "both":
#Create a new instance of the Job class
new_job = qSys.newJob(job_name, input_folder, read_file, primer_scheme_dir, primer_scheme, primer_type, output_folder, normalise, num_threads, pipeline, min_length, max_length, bwa, skip_nanopolish, dry_run, override_data, num_samples, guppyplex, barcode_type, input_name, csv_filepath, primer_select, input_name)
#Add job to queue
qSys.addJob(new_job)
print("qSys has jobs: ", qSys.printQueue())
new_task = executeJob.apply_async(args=[new_job.job_name, new_job.gather_cmd, new_job.guppyplex_cmd, new_job.demult_cmd, new_job.min_cmd, new_job.plot_cmd, step])
new_job.task_id = new_task.id
#if both pipelines
else:
#Create a new medaka instance of the Job class
new_job_m = qSys.newJob(job_name + "_medaka", input_folder, read_file, primer_scheme_dir, primer_scheme, primer_type, output_folder + "/medaka", normalise, num_threads, "medaka", min_length, max_length, bwa, skip_nanopolish, dry_run, override_data, num_samples, guppyplex, barcode_type, input_name, csv_filepath, primer_select, input_name)
#Create a new nanopolish instance of the Job class
new_job_n = qSys.newJob(job_name + "_nanopolish", input_folder, read_file, primer_scheme_dir, primer_scheme, primer_type, output_folder + "/nanopolish", normalise, num_threads, "nanopolish", min_length, max_length, bwa, skip_nanopolish, dry_run, override_data, num_samples, guppyplex, barcode_type, input_name, csv_filepath, primer_select, input_name)
#Add medaka job to queue
qSys.addJob(new_job_m)
task_m = executeJob.apply_async(args=[new_job_m.job_name, new_job_m.gather_cmd, new_job_m.guppyplex_cmd, new_job_m.demult_cmd, new_job_m.min_cmd, new_job_m.plot_cmd, step])
new_job_m.task_id = task_m.id
#Add nanopolish job to queue
qSys.addJob(new_job_n)
task_n = executeJob.apply_async(args=[new_job_n.job_name, new_job_n.gather_cmd, new_job_n.guppyplex_cmd, new_job_n.demult_cmd, new_job_n.min_cmd, new_job_n.plot_cmd, step])
new_job_n.task_id = task_n.id
# redirect to the progress page
if pipeline == "both":
return redirect(url_for('progress', job_name=job_name+"_medaka"))
else:
return redirect(url_for('progress', job_name=job_name))
#Update displayed queue on home page
queueList = []
if qSys.queue.empty():
return render_template("parameters.html", queue=None, folders=folders, csvs=csvs, schemes=schemes, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION, DOCS=DOCS)
for item in qSys.queue.getItems():
queueList.append({item._job_name : url_for('progress', job_name=item._job_name, task_id = item._task_id)})
queueDict = {'jobs': queueList}
displayQueue = json.htmlsafe_dumps(queueDict)
return render_template("parameters.html", queue = displayQueue, folders=folders, csvs=csvs, schemes=schemes, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION, DOCS=DOCS)
# error page, accessed if a user wants to re-run a job if an error occurs during a run
# @app.route("/error/<job_name>", methods = ["POST","GET"])
# def error(job_name):
# # get the job that needs to be re-run
# job = qSys.getJobByName(job_name)
#
# # get global variables
# global input_filepath
# global sample_csv
# folders = getInputFolders(input_filepath)
# csvs = getInputFolders(sample_csv)
#
# # if the job exists, get all the parameters used in the initial run so that they can be rendered for the user
# if job != None:
# input_folder = job.input_folder
# input_name = job.input_name
# output_folder = job.output_folder
# read_file = job.read_file
# pipeline = job.pipeline
# min_length = job.min_length
# max_length = job.max_length
# primer_select = job.primer_select
# primer_scheme = job.primer_scheme
# primer_scheme_dir = job.primer_scheme_dir
# primer_type = job.primer_type
# num_samples = job.num_samples
# barcode_type = job.barcode_type
# # abort existing job
# task = job.task_id
# blank = killJob.apply_async(args=[job_name])
# celery.control.revoke(task, terminate=True, signal='SIGKILL')
# qSys.removeQueuedJob(job_name)
#
# if request.method == "POST":
# #get parameters
# job_name = request.form.get('job_name')
# input_folder = request.form.get('input_folder')
# read_file = request.form.get('read_file')
# primer_scheme_dir = request.form.get('primer_scheme_dir')
# primer_scheme = request.form.get('primer_scheme')
# primer_type = request.form.get('primer_type')
# other_primer_type = request.form.get('other_primer_type')
# output_folder = request.form.get('output_folder')
# normalise = request.form.get('normalise')
# num_threads = request.form.get('num_threads')
# pipeline = request.form.get('pipeline')
# num_samples = request.form.get('num_samples')
# min_length = request.form.get('min_length')
# max_length = request.form.get('max_length')
# bwa = request.form.get('bwa')
# skip_nanopolish = request.form.get('skip_nanopolish')
# dry_run = request.form.get('dry_run')
# # num_samples = request.form.get('num_samples')
# barcode_type = request.form.get('barcode_type')
# csv_file = request.form.get('csv_file')
# virus = request.form.get('virus')
# override_data = request.form.get('override_data')
# step = int(request.form.get('step'))
#
# # set correct primer_type - if primer type is other, get the correct primer type from the tet input
# # primer_select is so that on reload, the correct radio button will be selected
# primer_select = primer_type
#
# if virus == 'custom':
# if other_primer_type:
# primer_type = other_primer_type
# else:
# primer_type = "Custom-primer-scheme"
#
#
# # store input_name
# input_name = input_folder
#
# #csv filepath
# csv_filepath = sample_csv + '/' + csv_file
#
# # concat /data to input folder
# input_folder = input_filepath + '/' + input_folder
# filename = os.path.dirname(os.path.realpath(__file__))
# if not os.path.isdir(input_folder):
# input_folder = ""
# output_input = ""
# else:
# os.chdir(input_folder)
# tmp_oi = os.getcwd()
# output_input = tmp_oi
#
# # get the correct input folder filepath from user input
# # path = glob.glob(input_folder + '/*/*')[0]
# # use fnmatch with walk to get fastq_pass, fastq_fail folders
# # then split off the last bit to get the top folder for the gather command
# tmp_folder_list = []
# for dName, sdName, fList in os.walk(input_folder):
# for fileName in sdName:
# if fnmatch.fnmatch(fileName, "fastq*"):
# tmp_folder_list.append(os.path.join(dName, fileName))
# tmp_path = tmp_folder_list[0].split("/")[:-1]
# path = "/".join(tmp_path)
# os.chdir(path)
# input_folder = os.getcwd()
#
# #if user agrees output can override files with the same name in output folder
# if request.form.get('override_data'):
# override_data = True
# else:
# override_data = False
#
# # check errors
# errors = {}
# errors, output_folder_checked = checkInputs(input_folder, output_folder, primer_scheme_dir,
# read_file, pipeline, override_data, min_length,
# max_length, job_name, output_input, csv_filepath, step, num_samples)
#
# # if an output folder does not exist, make one
# # if not output_folder:
# # output_folder = output_folder_checked
#
# output_folder = output_folder_checked
#
# # if queue is full, add an error to the list
# if qSys.queue.full():
# errors['full_queue'] = "Job queue is full."
#
# # display errors if errors exist
# if len(errors) != 0:
# #Update displayed queue on home page
# queueList = []
# if qSys.queue.empty():
# return render_template("parameters.html", job_name=job_name, queue=None,
# input_name=input_name, input_folder=input_folder,
# output_folder=output_folder, virus=virus,
# pipeline=pipeline, min_length=min_length,
# max_length=max_length, primer_scheme=primer_scheme,
# primer_type=primer_type, num_samples=num_samples,
# primer_scheme_dir=primer_scheme_dir, barcode_type=barcode_type,
# errors=errors, folders=folders, csvs=csvs, csv_name=csv_file,
# other_primer_type=other_primer_type, primer_select=primer_select,
# schemes=schemes, override_data=override_data, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION)
# for item in qSys.queue.getItems():
# queueList.append({item.job_name : url_for('progress', job_name=item.job_name, task_id = item.task_id)})
#
# queueDict = {'jobs': queueList}
# displayQueue = json.htmlsafe_dumps(queueDict)
#
# return render_template("parameters.html", job_name=job_name, queue=None,
# input_name=input_name, input_folder=input_folder,
# output_folder=output_folder, virus=virus,
# pipeline=pipeline, min_length=min_length,
# max_length=max_length, primer_scheme=primer_scheme,
# primer_type=primer_type, num_samples=num_samples,
# primer_scheme_dir=primer_scheme_dir, barcode_type=barcode_type,
# errors=errors, folders=folders, csvs=csvs, csv_name=csv_file,
# other_primer_type=other_primer_type, primer_select=primer_select,
# schemes=schemes, override_data=override_data, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION)
#
# #no spaces in the job name - messes up commands
# job_name = job_name.replace(" ", "_")
#
# # create new jobs
# if pipeline != "both":
# #Create a new instance of the Job class
# new_job = qSys.newJob(job_name, input_folder, read_file, primer_scheme_dir, primer_scheme, primer_type, output_folder, normalise, num_threads, pipeline, min_length, max_length, bwa, skip_nanopolish, dry_run, override_data, num_samples, barcode_type, input_name, csv_filepath, primer_select, input_name)
#
# #Add job to queue
# qSys.addJob(new_job)
# print("qSys has jobs: ", qSys.printQueue())
# new_task = executeJob.apply_async(args=[new_job.job_name, new_job.gather_cmd, new_job.demult_cmd, new_job.min_cmd, new_job.plot_cmd, step])
# new_job.task_id = new_task.id
#
# #if both pipelines
# else:
# #Create a new medaka instance of the Job class
# new_job_m = qSys.newJob(job_name + "_medaka", input_folder, read_file, primer_scheme_dir, primer_scheme, primer_type, output_folder + "/medaka", normalise, num_threads, "medaka", min_length, max_length, bwa, skip_nanopolish, dry_run, override_data, num_samples,barcode_type, input_name, csv_filepath, primer_select, input_name)
# #Create a new nanopolish instance of the Job class
# new_job_n = qSys.newJob(job_name + "_nanopolish", input_folder, read_file, primer_scheme_dir, primer_scheme, primer_type, output_folder + "/nanopolish", normalise, num_threads, "nanopolish", min_length, max_length, bwa, skip_nanopolish, dry_run, override_data, num_samples,barcode_type, input_name, csv_filepath, primer_select, input_name)
#
# #Add medaka job to queue
# qSys.addJob(new_job_m)
# task_m = executeJob.apply_async(args=[new_job_m.job_name, new_job_m.gather_cmd, new_job_m.demult_cmd, new_job_m.min_cmd, new_job_m.plot_cmd, step])
# new_job_m.task_id = task_m.id
# #Add nanopolish job to queue
# qSys.addJob(new_job_n)
# task_n = executeJob.apply_async(args=[new_job_n.job_name, new_job_n.gather_cmd, new_job_n.demult_cmd, new_job_n.min_cmd, new_job_n.plot_cmd, step])
# new_job_n.task_id = task_n.id
# if pipeline == "both":
# return redirect(url_for('progress', job_name=job_name+"_medaka"))
# else:
# return redirect(url_for('progress', job_name=job_name))
#
# #Update displayed queue on home page
# queueList = []
# if qSys.queue.empty():
# return render_template("parameters.html", job_name=job_name, queue=None,
# input_name=input_name, input_folder=input_folder,
# output_folder=output_folder, virus=virus,
# pipeline=pipeline, min_length=min_length,
# max_length=max_length, primer_scheme=primer_scheme,
# primer_type=primer_type, num_samples=num_samples,
# primer_scheme_dir=primer_scheme_dir, barcode_type=barcode_type,
# errors=errors, folders=folders, csvs=csvs, csv_name=csv_file,
# other_primer_type=other_primer_type, primer_select=primer_select,
# schemes=schemes, override_data=override_data, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION)
#
# for item in qSys.queue.getItems():
# queueList.append({item.job_name : url_for('progress', job_name=item.job_name, task_id = item.task_id)})
#
# queueDict = {'jobs': queueList}
# displayQueue = json.htmlsafe_dumps(queueDict)
# return render_template("parameters.html", job_name=job_name, queue=None,
# input_name=input_name, input_folder=input_folder,
# output_folder=output_folder, virus=virus,
# pipeline=pipeline, min_length=min_length,
# max_length=max_length, primer_scheme=primer_scheme,
# primer_type=primer_type, num_samples=num_samples,
# primer_scheme_dir=primer_scheme_dir, barcode_type=barcode_type,
# errors=errors, folders=folders, csvs=csvs, csv_name=csv_file,
# other_primer_type=other_primer_type, primer_select=primer_select,
# schemes=schemes, override_data=override_data, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION)
# Progress page
@app.route("/progress/<job_name>", methods = ["GET", "POST"])
def progress(job_name):
# get the job
job = qSys.getJobByName(job_name)
# get the filepath where the output is located
path = job.output_folder
path +="/all_cmds_log.txt"
################## TODO: NEED TO CHANGE
with open(path, "r") as f:
outputLog = f.read().replace("\n","<br/>")
# find the status of the current job
if re.findall(r':D', outputLog):
frac = "4"
elif len(re.findall(r'COMPLETE', outputLog)) == 1:
frac = "1"
elif len(re.findall(r'COMPLETE', outputLog)) == 2:
frac = "2"
elif len(re.findall(r'COMPLETE', outputLog)) > 2:
frac = "3"
else:
frac = "0"
# find any errors that occur in the output log
pattern = "<br\/>[A-Za-z0-9\s]*ERROR"
numErrors = len(re.findall(pattern, outputLog, re.IGNORECASE)) + len(re.findall(r'No such file or directory', outputLog, re.IGNORECASE))
# get all the parameters in the job so that they can be displayed for the user
num_in_queue = qSys.queue.getJobNumber(job_name)
queue_length = qSys.queue.getNumberInQueue()
input_folder = job.input_folder
output_folder = job.output_folder
read_file = job.read_file
pipeline = job.pipeline
min_length = job.min_length
max_length = job.max_length
primer_scheme = job.primer_scheme
primer_type = job.primer_type
num_samples = job.num_samples
guppyplex = job.guppyplex
barcode_type = job.barcode_type
return render_template("progress.html", outputLog=outputLog, num_in_queue=num_in_queue,
queue_length=queue_length, job_name=job_name, frac=frac, input_folder=input_folder, output_folder=output_folder,
read_file=read_file, pipeline=pipeline, min_length=min_length, max_length=max_length, primer_scheme=primer_scheme,
primer_type=primer_type, num_samples=num_samples, guppyplex=guppyplex, barcode_type=barcode_type, numErrors=numErrors, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION, DOCS=DOCS)
@app.route("/abort/<job_name>", methods = ["GET", "POST"])
def abort(job_name):
job = qSys.getJobByName(job_name)
task = job.task_id
blank = killJob.apply_async(args=[job_name])
celery.control.revoke(task,terminate=True, signal='SIGKILL')
qSys.removeQueuedJob(job_name)
return redirect(url_for("home"))
@app.route("/abort/delete/<job_name>", methods = ["GET", "POST"])
def abort_delete(job_name):
job = qSys.getJobByName(job_name)
task = job.task_id
blank = killJob.apply_async(args=[job_name])
celery.control.revoke(task,terminate=True, signal='SIGKILL')
os.system('rm -r ' + job.output_folder)
qSys.removeQueuedJob(job_name)
return redirect(url_for("home"))
@app.route("/delete/<job_name>", methods = ["GET", "POST"])
def delete(job_name):
# images = os.path.dirname(os.path.realpath(__file__)) + '/static/tmp_plots/' + job_name
# print(images)
# os.system('rm -r ' + images + '*' )
qSys.removeCompletedJob(job_name)
return redirect(url_for("home"))
@app.route("/output/<job_name>", methods = ["GET", "POST"])
def output(job_name):
job = qSys.getJobByName(job_name)
output_folder = job.output_folder
primer_scheme_dir = job.primer_scheme_dir
primer_scheme = job.primer_scheme
full_primer_scheme_dir = os.path.join(primer_scheme_dir, primer_scheme)
# need to only do the big/file stuff once
if len(job.metadata) < 1:
sys.stderr.write("building metadata\n")
sample_folders = []
plots = {}
vcfs = {}
fastas = {}
meta = {}
meta_data_files = {}
plots_found = False
vcf_found = False
fasta_found = False
fastq_found = False
fail_vcf_found = False
depths2_found = False
depths1_found = False
sample = ""
total_samples = ""
current_sample_num = 0
if output_folder:
# sys.stderr.write("output_folder found\n")
if os.path.exists(output_folder):
#Finds all files in the output folder
for (dirpath, dirnames, filenames) in os.walk(output_folder):
for i in dirnames:
if "_medaka" in i:
sample_folders.append(i)
elif "_nanopolish" in i:
sample_folders.append(i)
sample_name = dirpath.split("/")[-1]
meta[sample_name] = {}
meta[sample_name]["vcf_found"] = False
meta[sample_name]["fasta_found"] = False
meta[sample_name]["fastq_found"] = False
meta[sample_name]["fail_vcf"] = False
meta[sample_name]["pool_2_depths"] = False
meta[sample_name]["pool_1_depths"] = False
for name in filenames:
#finds barplot pngs
if fnmatch.fnmatch(name, '*CoVarPlot.png'):
plots[sample_name] = os.path.join(dirpath,name)
plots_found = True
#finds vcf files
if fnmatch.fnmatch(name, '*.pass.vcf.gz'):
vcfs[sample_name] = (os.path.join(dirpath,name))
meta[sample_name]["pass_vcf"] = (os.path.join(dirpath,name))
vcf_found = True
meta[sample_name]["vcf_found"] = True
#finds consensus.fasta
if fnmatch.fnmatch(name, '*.consensus.fasta'):
fastas[sample_name] = os.path.join(dirpath,name)
meta[sample_name]["consensus"] = (os.path.join(dirpath,name))
fasta_found = True
meta[sample_name]["fasta_found"] = True
# nano_fastq_pass-NB04.fastq
if fnmatch.fnmatch(name, '*.fastq'):
meta[sample_name]["fastq"] = os.path.join(dirpath,name)
fastq_found = True
meta[sample_name]["fastq_found"] = True
# nano_fastq_pass-NB04.fastq.index
# nano_fastq_pass-NB04.fastq.index.gzi
# nano_fastq_pass-NB04.fastq.index.fai
# nano_fastq_pass-NB04.fastq.index.readdb
# nano_sample4_NB04.sorted.bam
# nano_sample4_NB04.sorted.bam.bai
# nano_sample4_NB04.trimmed.rg.sorted.bam
# nano_sample4_NB04.alignreport.txt
# nano_sample4_NB04.alignreport.er
# nano_sample4_NB04.primertrimmed.rg.sorted.bam
# nano_sample4_NB04.trimmed.rg.sorted.bam.bai
# nano_sample4_NB04.primertrimmed.rg.sorted.bam.bai
# nano_sample4_NB04.nCoV-2019_2.vcf
# nano_sample4_NB04.nCoV-2019_1.vcf
# nano_sample4_NB04.primersitereport.txt
# nano_sample4_NB04.merged.vcf
# nano_sample4_NB04.primers.vcf
# nano_sample4_NB04.fail.vcf
if fnmatch.fnmatch(name, '*.fail.vcf'):
meta[sample_name]["fail_vcf"] = os.path.join(dirpath,name)
fail_vcf_found = True
meta[sample_name]["fail_vcf"] = True
# nano_sample4_NB04.pass.vcf.gz
# nano_sample4_NB04.pass.vcf.gz.tbi
# nano_sample4_NB04.coverage_mask.txt.nCoV-2019_2.depths
if fnmatch.fnmatch(name, '*2.depths'):
meta[sample_name]["pool_2_depths"] = os.path.join(dirpath,name)
depths2_found = True
meta[sample_name]["depths2_found"] = True
# nano_sample4_NB04.coverage_mask.txt.nCoV-2019_1.depths
if fnmatch.fnmatch(name, '*1.depths'):
meta[sample_name]["pool_1_depths"] = os.path.join(dirpath,name)
depths1_found = True
meta[sample_name]["depths1_found"] = True
# nano_sample4_NB04.coverage_mask.txt
# nano_sample4_NB04.preconsensus.fasta
# nano_sample4_NB04.consensus.fasta
# nano_sample4_NB04.muscle.in.fasta
# nano_sample4_NB04.muscle.out.fasta
# nano_sample4_NB04.minion.log.txt
# nano_sample4_NB04.CoVarPlot.png
sample_folders.sort(key=lambda s: list(map(str, s.split('_')))[-2])
total_samples = len(sample_folders)
sample_dic = {}
for i in range(0, len(sample_folders)):
sample_dic[sample_folders[i]] = i
# k = [i for i in sample_folders]
# sys.stderr.write(",".join(k))
# sys.stderr.write("\n")
# k = [[i, sample_dic[sample_folders[i]]] for i in range(0, len(sample_folders))]
# for i, j in k:
# sys.stderr.write(":".join([str(i), str(k)]))
# sys.stderr.write("\n")
# combine all single files into single tarball and single file
if os.path.isfile(os.path.dirname(os.path.realpath(__file__)) + "/static/tmp_fastas/" + job_name + "/all_" + job_name + ".fasta"):
os.remove(os.path.dirname(os.path.realpath(__file__)) + "/static/tmp_fastas/" + job_name + "/all_" + job_name + ".fasta")
if os.path.isfile(os.path.dirname(os.path.realpath(__file__)) + "/static/tmp_fastas/" + job_name + "/all_" + job_name + ".tar"):
os.remove(os.path.dirname(os.path.realpath(__file__)) + "/static/tmp_fastas/" + job_name + "/all_" + job_name + ".tar")
sys.stderr.write("building tars and all files\n")
if fasta_found:
if len(fastas) > 0:
for sample in fastas.keys():
fasta = fastas[sample]
fasta_file = fasta.split("/")[-1]
fasta_path = os.path.dirname(os.path.realpath(__file__)) + '/static/tmp_fastas/' + job_name + "/all_" + job_name
if not os.path.isdir(fasta_path):
mkdir = "mkdir -p " + fasta_path
os.system(mkdir)
cp_fasta = "cp " + fasta + " " + fasta_path
os.system(cp_fasta)
cmd = "cat " + fasta + " >> " + os.path.dirname(os.path.realpath(__file__)) + '/static/tmp_fastas/' + job_name + "/all_" + job_name + ".fasta"
os.system(cmd)
html_fasta_all = "/static/tmp_fastas/" + job_name + "/all_" + job_name + ".fasta"
cmd = "tar -cf " + fasta_path + ".tar -C " + fasta_path + " ."
os.system(cmd)
html_fasta_tar = "/static/tmp_fastas/" + job_name + "/all_" + job_name + ".tar"
# build metrics files, store in output, then load as table in each sample
# build here so download all is available from start
#get bed sets for each pool
scheme_bed_file = ""
gene_bed_file = ""
if os.path.exists(full_primer_scheme_dir):
#Finds all files in the output folder
for (dirpath, dirnames, filenames) in os.walk(full_primer_scheme_dir):
for name in filenames:
if fnmatch.fnmatch(name, '*.scheme.bed'):
scheme_bed_file = os.path.join(dirpath,name)
if fnmatch.fnmatch(name, '*.genes.bed'):
gene_bed_file = os.path.join(dirpath,name)
sys.stderr.write("gene_bed_file: {}\n".format(gene_bed_file))
if os.path.isfile(scheme_bed_file):
tmp_1 = []
tmp_2 = []
bed_1 = []
bed_2 = []
with open(scheme_bed_file, 'r') as f:
for l in f:
l = l.strip('\n')
l = l.split('\t')
if "alt" in l[3]:
continue
if l[4][-1] == '1':
tmp_1.append(int(l[1]))
tmp_1.append(int(l[2]))
elif l[4][-1] == '2':
tmp_2.append(int(l[1]))
tmp_2.append(int(l[2]))
else:
sys.stderr.write("bed format unknown: {}\n, please contact developers\n".format(l[-1]))
tmp_1.sort()
tmp_2.sort()
for i in range(0,len(tmp_1)-3+1,4):
bed_1.append((tmp_1[i], tmp_1[i+3]))
for i in range(0,len(tmp_2)-3+1,4):
bed_2.append((tmp_2[i], tmp_2[i+3]))
P1, P2 = np.array(bed_1), np.array(bed_2)
amp_dic = {}
amp_count = 1
amp_total = 0
for i, j in P1:
amp_dic[amp_count] = {}
amp_dic[amp_count]["bounds"] = [i, j]
amp_dic[amp_count]["depth"] = []
amp_count += 2
amp_total += 1
amp_count = 2
for i, j in P2:
amp_dic[amp_count] = {}
amp_dic[amp_count]["bounds"] = [i, j]
amp_dic[amp_count]["depth"] = []
amp_count += 2
amp_total += 1
# get genes in reference if available
gene_dic = {}
if os.path.isfile(gene_bed_file):
sys.stderr.write("gene_bed_file found!\n")
with open(gene_bed_file, 'r') as f:
for l in f:
l = l.strip('\n')
l = l.split('\t')
if len(l) > 1:
ref = l[0]
start = int(l[1])
stop = int(l[2])
name = l[3]
gene_dic[name] = {}
gene_dic[name]["bounds"] = [start, stop]
gene_dic[name]["ref"] = ref
else:
sys.stderr.write("gene_bed_file NOT found!\n")
# Process per sample
for folder in sample_folders:
sample_name = folder.split("/")[-1]
sample_table = [["Metric", "Value"], ["Sample", sample_name]]
# fastq metrics
fq_count = 1
fq_reads = 0
fq_len_list = []
read_qual_list = []
q_list = []
if meta[sample_name]["fastq_found"]:
with open(meta[sample_name]["fastq"], 'r') as fq:
for l in fq:
if fq_count in [1, 3]:
fq_count += 1
continue
elif fq_count == 2:
seq = l.strip("\n")
fq_count += 1
fq_reads += 1
fq_len_list.append(len(seq))
continue
elif fq_count == 4:
quals = l.strip("\n")
x = 0
for i in quals:
x += ord(i)
avg_qual = x / len(quals)
read_qual_list.append(round(avg_qual - 33, 2))
fq_count = 1
# fastq read count
meta[sample_name]["fastq_count"] = fq_reads
sample_table.append(["Pass read count", fq_reads])
# fastq read length mean
fastq_len_mean = np.mean(fq_len_list)
fastq_len_std = np.std(fq_len_list)
meta[sample_name]["fastq_len_mean"] = round(fastq_len_mean)
sample_table.append(["Read length mean", round(fastq_len_mean)])
meta[sample_name]["fastq_len_std"] = round(fastq_len_std, 2)
sample_table.append(["Read length stdev", round(fastq_len_std, 2)])
# fastq quality mean
fastq_qual_mean = np.mean(read_qual_list)
fastq_qual_std = np.std(read_qual_list)
meta[sample_name]["fastq_qual_mean"] = round(fastq_qual_mean, 2)
sample_table.append(["Read quality mean", round(fastq_qual_mean, 2)])
meta[sample_name]["fastq_qual_std"] = round(fastq_qual_std, 2)
sample_table.append(["Read quality stdev", round(fastq_qual_std, 2)])
else:
sample_table.append(["Pass read count", "NA"])
sample_table.append(["Read length mean", "NA"])
sample_table.append(["Read length stdev", "NA"])
sample_table.append(["Read quality mean", "NA"])
sample_table.append(["Read quality stdev", "NA"])
# mean of 2 means
# meta[sample_name]["pool_1_depths"]
index_depth = []
D1 = []
if meta[sample_name]["depths1_found"] and meta[sample_name]["depths2_found"]:
with open(meta[sample_name]["pool_1_depths"], 'r') as d1:
for l in d1:
l = l.strip("\n")
l = l.split("\t")
D1.append(int(l[3]))
index_depth.append(int(l[3]))
# meta[sample_name]["pool_2_depths"]
D2 = []
i = 0
with open(meta[sample_name]["pool_2_depths"], 'r') as d2:
for l in d2:
l = l.strip("\n")
l = l.split("\t")
D2.append(int(l[3]))
index_depth[i] += int(l[3])
i += 1
total_mean_cov_list = []
total_median_cov_list = []
for amp in amp_dic:
if amp % 2 == 0:
dlist = D2
else:
dlist = D1
i, j = amp_dic[amp]["bounds"]
amp_dic[amp]["depth"] = dlist[i:j]
total_mean_cov_list.append(round(sum(dlist[i:j]) / len(dlist[i:j]), 2))
total_median_cov_list.append(round(np.median(dlist[i:j]), 2))
total_mean_cov = round(sum(total_mean_cov_list) / len(total_mean_cov_list))
total_median_cov = round(np.median(total_median_cov_list))
meta[sample_name]["total_median_cov"] = total_median_cov
meta[sample_name]["total_mean_cov"] = total_mean_cov
sample_table.append(["Total median coverage", total_median_cov])
sample_table.append(["Total mean coverage", total_mean_cov])
else:
sample_table.append(["Total median coverage", "NA"])
sample_table.append(["Total mean coverage", "NA"])
failed_amps = []
# print per amplicon
if os.path.isfile(scheme_bed_file):
for i in range(1, amp_total +1):
D = amp_dic[i]["depth"]
D_mean = round(sum(D) / len(D))
sample_table.append(["Amplicon {} mean coverage".format(i), D_mean])
if D_mean < 20:
failed_amps.append(i)
# failed amplicons
fail_str = ""
fail_amp_count = 0
if len(failed_amps) > 0:
head = True
for i in failed_amps:
if head:
fail_str = fail_str + "Amplicon_" + str(i)
fail_amp_count += 1
head = False
else:
fail_str = fail_str + ", Amplicon_" + str(i)
fail_amp_count += 1
sample_table.append(["N failed amplicons (<20x)", fail_amp_count])
sample_table.append(["Failed amplicons", fail_str])
else:
sample_table.append(["Amplicon {} mean coverage".format(i), "NA"])
sample_table.append(["N failed amplicons (<20x)", "NA"])
sample_table.append(["Failed amplicons", "NA"])
# meta[sample_name]["pass_vcf"]
pass_vcf_count = 0
pass_SNV = 0
pass_indel = 0
pass_vcf_table = []
pass_variant_fraction = []
pass_variant_pos_list = []
if meta[sample_name]["vcf_found"]:
with gzip.open(meta[sample_name]["pass_vcf"], "rt") as f:
for l in f:
if l[:2] == "##":
continue
if l[0] == "#":
l = l[1:].strip('\n')
# sys.stderr.write("header = {}\n".format(l))
l = l.split('\t')
header = l
pass_vcf_table.append(["CHROM", "POS", "REF", "ALT", "QUAL", "FILTER", "DEPTH"])
continue
l = l.strip('\n')
l = l.split('\t')
row = dict(zip(header, l))
# how to calculate variant depths and frequencies
# medaka: depth = AC, VAF=AC/DP (AC/BAM depth for CoVarPlots)
# nanopolish: depth = BaseCalledReadsWithVariant, VAF=BaseCalledReadsWithVariant / BAM depth, or BaseCalledFraction
var_depth = 0
DP = 0
np_frac = 0
for i in row["INFO"].split(";"):
# sys.stderr.write("vcf_INFO: {}\n".format(i))
if len(i) > 0:
name, result = i.split("=")
if name == "AC":
var_depth = int(result.split(",")[-1])
elif name == "BaseCalledReadsWithVariant":
var_depth = int(result)
elif name == "DP":
DP = int(result)
elif name == "TotalReads":
DP = index_depth[int(row["POS"])]
elif name == "BaseCalledFraction":
np_frac = float(result)
# depth = int(row["INFO"].split(";")[0].split("=")[1])
depth = DP
pass_variant_pos_list.append(int(row["POS"]))
if np_frac > 0:
frac = round(np_frac * 100, 2)
elif DP > 0:
frac = round((var_depth / DP) * 100, 2)
else:
sys.stderr.write("VCF depth fields not found\n")
# frac = round((depth / index_depth[int(row["POS"])]) * 100, 2)
pass_variant_fraction.append(frac)
pass_vcf_table.append([row["CHROM"], int(row["POS"]), row["REF"], row["ALT"], float(row["QUAL"]), row["FILTER"], depth])
pass_vcf_count += 1
if len(row["REF"]) > 1 or len(row["ALT"]) > 1:
pass_indel += 1
else:
pass_SNV += 1
sample_table.append(["Total pass variants", pass_vcf_count])
sample_table.append(["Pass SNV", pass_SNV])
sample_table.append(["Pass indel", pass_indel])
sample_table.append(["Read support % for pass variants", ", ".join([str(i) for i in pass_variant_fraction])])
if len(gene_dic) > 1:
gene_pass_variant_count = []
for name in gene_dic:
gene_count = 0
i, j = gene_dic[name]["bounds"]
for k in pass_variant_pos_list:
if k >= i and k < j:
gene_count += 1
gene_pass_variant_count.append([name, gene_count])
sample_table.append(["Pass variants per gene", ""])
for name, gene_count in gene_pass_variant_count:
sample_table.append([name, gene_count])
else:
sample_table.append(["Total pass variants", "NA"])
sample_table.append(["Pass SNV", "NA"])
sample_table.append(["Pass indel", "NA"])
sample_table.append(["Read support % for pass variants", "NA"])
sample_table.append(["Pass variants per gene", "NA"])
# meta[sample_name]["fail_vcf"]
# meta[sample_name]["consensus"]
# NOTE: Currently only works with single reference
genome_seq = ""
if meta[sample_name]["fasta_found"]:
with open(meta[sample_name]["consensus"], 'r') as fa:
for l in fa:
l = l.strip("\n")
if l[0] == ">":
genome_name = l
else:
genome_seq = genome_seq + l
genome_size = len(genome_seq)
# genome_primer_size
genome_N_count = genome_seq.count('N')
genome_called = genome_size - genome_N_count
genome_called_fraction = round(genome_called / genome_size, 4) * 100
sample_table.append(["Genome size", genome_size])
# sample_table.append(["Genome primer coverage size", genome_primer_size])
sample_table.append(["Called Bases", genome_called])
sample_table.append(["N bases", genome_N_count])
sample_table.append(["% called", genome_called_fraction])
if len(gene_dic) > 1:
orf_list_fractions = []
gene_N_total = 0
gene_length_total = 0
for name in gene_dic:
i, j = gene_dic[name]["bounds"]
gene_seq = genome_seq[i:j]
gene_length = len(gene_seq)
gene_length_total += gene_length
gene_N_count = gene_seq.count('N')
gene_N_total += gene_N_count
gene_called = gene_length - gene_N_count
gene_called_fraction = round(gene_called / gene_length, 4) * 100
orf_list_fractions.append([name, gene_called_fraction])
orfs_called = gene_length_total - gene_N_total
orfs_called_fraction = round(orfs_called / gene_length_total, 4) * 100
sample_table.append(["Fraction of ORF regions called", orfs_called_fraction])
sample_table.append(["Fraction of ORF regions called, by gene", ""])
for name, gene_called_fraction in orf_list_fractions:
sample_table.append([name, gene_called_fraction])
else:
sys.stderr.write("gene_dic not found\n")
else:
sample_table.append(["Genome size", "NA"])
sample_table.append(["Called Bases", "NA"])
sample_table.append(["N bases", "NA"])
sample_table.append(["% called", "NA"])
sample_table.append(["Fraction of ORF regions called", "NA"])
sample_table.append(["Fraction of ORF regions called, by gene", "NA"])
sample_meta_file = os.path.join(output_folder, folder, "{}.metadata.tsv".format(sample_name))
meta_data_files[sample_name] = sample_meta_file
with open(sample_meta_file, 'w') as w:
for i, j in sample_table:
w.write("\t".join([str(i), str(j)]))
w.write("\n")
# add current sample to meta
meta[sample_name]["table"] = sample_table
# build meta data tables all samples
header = []
h_tmp = []
h_max = 0
for folder in sample_folders:
sample_name = folder.split("/")[-1]
if sample_name in meta:
sample_table = meta[sample_name]["table"]
for i in sample_table[1:]:
j, k = i
h_tmp.append(j)
if len(h_tmp) > h_max:
header = h_tmp
h_max = len(h_tmp)
h_tmp = []
combined_meta_file = os.path.join(output_folder, "{}.combined.metadata.tsv".format(job_name))
with open(combined_meta_file, "w") as w:
w.write("\t".join(header))
w.write("\n")
for folder in sample_folders:
sample_name = folder.split("/")[-1]
if sample_name in meta:
sample_table = meta[sample_name]["table"]
table_heads = []
values = []
for i in sample_table[1:]:
j, k = i
table_heads.append(str(j))
values.append(str(k))
for C, thead in enumerate(table_heads):
if C < len(header):
if header[C] == thead:
w.write(values[C])
w.write("\t")
else:
w.write("\t")
w.write("\n")
meta_tmp_path = os.path.dirname(os.path.realpath(__file__)) + '/static/tmp_meta/' + job_name
if not os.path.isdir(meta_tmp_path):
mkdir = "mkdir -p " + meta_tmp_path
os.system(mkdir)
cmd = "cp {} {}".format(combined_meta_file, meta_tmp_path)
os.system(cmd)
combined_meta_html = "/static/tmp_meta/" + job_name + "/{}.combined.metadata.tsv".format(job_name)
meta["sample_folders"] = sample_folders
meta["plots"] = plots
meta["vcfs"] = vcfs
meta["fastas"] = fastas
meta["plots_found"] = plots_found
meta["vcf_found"] = vcf_found
meta["fasta_found"] = fasta_found
meta["sample_dic"] = sample_dic
meta["total_samples"] = total_samples
meta["current_sample_num"] = current_sample_num
meta["html_fasta_all"] = html_fasta_all
meta["html_fasta_tar"] = html_fasta_tar
meta["combined_meta_html"] = combined_meta_html
job.metadata = meta
else:
meta = job.metadata
sample_folders = meta["sample_folders"]
plots = meta["plots"]
vcfs = meta["vcfs"]
fastas = meta["fastas"]
plots_found = meta["plots_found"]
vcf_found = meta["vcf_found"]
fasta_found = meta["fasta_found"]
sample_dic = meta["sample_dic"]
total_samples = meta["total_samples"]
current_sample_num = meta["current_sample_num"]
html_fasta_all = meta["html_fasta_all"]
html_fasta_tar = meta["html_fasta_tar"]
combined_meta_html = meta["combined_meta_html"]
if request.method == "POST":
# sys.stderr.write("sample_num (START):{}\n".format(current_sample_num))
if request.form.get("select_sample"):
sample = request.form.get('sample_folder')
if sample == '':
sample = sample_folders[0]
elif sample is None:
sample = sample_folders[0]
current_sample_num = sample_dic[sample] + 1
# sys.stderr.write("sample_num (V):{}\n".format(current_sample_num))
elif request.form.get("next_sample"):
current_sample_num = int(request.form.get('current_sample_number')) - 1
if current_sample_num == '':
sample = sample_folders[0]
elif current_sample_num is None:
sample = sample_folders[0]
else:
n_val = sample_dic[sample_folders[current_sample_num]] + 1
if n_val > len(sample_folders) - 1:
n_val = 0
sample = sample_folders[n_val]
current_sample_num = sample_dic[sample] + 1
# sys.stderr.write("sample_num (N):{}\n".format(current_sample_num))
elif request.form.get("previous_sample"):
current_sample_num = int(request.form.get('current_sample_number')) -1
# sys.stderr.write("sample_num (P_start):{}\n".format(current_sample_num))
if current_sample_num == '':
sample = sample_folders[len(sample_folders) - 1]
elif current_sample_num is None:
sample = sample_folders[len(sample_folders) - 1]
elif current_sample_num == -1:
sample = sample_folders[len(sample_folders) - 1]
else:
p_val = sample_dic[sample_folders[current_sample_num]] - 1
if p_val < 0:
p_val = len(sample_folders) - 1
sample = sample_folders[p_val]
current_sample_num = sample_dic[sample] + 1
# sys.stderr.write("sample_num (P):{}\n".format(current_sample_num))
if vcf_found:
if sample in vcfs.keys():
# sys.stderr.write("vcf found and building\n")
# try:
header = []
vcf_table = []
with gzip.open(vcfs[sample], "rt") as f:
for l in f:
if l[:2] == "##":
continue
if l[0] == "#":
l = l[1:].strip('\n')
# sys.stderr.write("header = {}\n".format(l))
l = l.split('\t')
header = l
vcf_table.append(["CHROM", "POS", "REF", "ALT", "QUAL", "FILTER", "DEPTH"])
continue
l = l.strip('\n')
l = l.split('\t')
row = dict(zip(header, l))
# k = ["{}: {}".format(key, row[key]) for key in row.keys()]
# sys.stderr.write(",".join(k))
# sys.stderr.write("\n")
var_depth = 0
DP = 0
np_frac = 0
for i in row["INFO"].split(";"):
if len(i) > 0:
name, result = i.split("=")
if name == "AC":
var_depth = int(result.split(",")[-1])
elif name == "BaseCalledReadsWithVariant":
var_depth = int(result)
# depth = int(row["INFO"].split(";")[0].split("=")[1])
depth = var_depth
# depth = int(row["INFO"].split(";")[0].split("=")[1])
vcf_table.append([row["CHROM"], int(row["POS"]), row["REF"], row["ALT"], float(row["QUAL"]), row["FILTER"], depth])
df = pd.DataFrame(vcf_table[1:], columns=vcf_table[0])
vcf_table_html = df.to_html(classes='mystyle')
# sys.stderr.write("vcf built\n")
# except:
# flash("Warning: vcf table creation failed for {}".format(sample))
# sys.stderr.write("vcf failed to build\n")
# vcf_table = False
else:
flash("Warning: No vcf files found in {}".format(output_folder))
sys.stderr.write("no vcf for sample found\n")
return render_template("output.html", job_name=job_name, sample_folders=sample_folders, plots_found=plots_found, vcf_found=vcf_found, fasta_found=fasta_found, sample_folder=sample, current_sample_num=current_sample_num, total_samples=total_samples, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION, DOCS=DOCS)
else:
flash("Warning: No vcf files found in {}".format(output_folder))
sys.stderr.write("no vcfs found\n")
return render_template("output.html", job_name=job_name, sample_folders=sample_folders, plots_found=plots_found, vcf_found=vcf_found, fasta_found=fasta_found, sample_folder=sample, current_sample_num=current_sample_num, total_samples=total_samples, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION, DOCS=DOCS)
if plots_found:
if sample in plots.keys():
plot = plots[sample]
plot_file = plot.split("/")[-1]
plot_path = os.path.dirname(os.path.realpath(__file__)) + '/static/tmp_plots/' + job_name
if not os.path.isdir(plot_path):
mkdir = "mkdir -p " + plot_path
os.system(mkdir)
cp_plot = "cp " + plot + " " + plot_path
os.system(cp_plot)
html_plot = "/static/tmp_plots/" + job_name+ "/" + plot_file
# sys.stderr.write("plots found: {}\n".format("/static/tmp_plots/"+job_name+ "/" + plot_file))
else:
plot = False
sys.stderr.write("plot for sample not found in plots\n")
return render_template("output.html", job_name=job_name, sample_folders=sample_folders, plots_found=plots_found, vcf_found=vcf_found, fasta_found=fasta_found, sample_folder=sample, current_sample_num=current_sample_num, total_samples=total_samples, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION, DOCS=DOCS)
else:
plot = False
sys.stderr.write("plots not found\n")
return render_template("output.html", job_name=job_name, sample_folders=sample_folders, plots_found=plots_found, vcf_found=vcf_found, fasta_found=fasta_found, sample_folder=sample, current_sample_num=current_sample_num, total_samples=total_samples, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION, DOCS=DOCS)
if fasta_found:
if sample in fastas.keys():
fasta = fastas[sample]
fasta_file = fasta.split("/")[-1]
fasta_path = os.path.dirname(os.path.realpath(__file__)) + '/static/tmp_fastas/' + job_name
if not os.path.isdir(fasta_path):
mkdir = "mkdir -p " + fasta_path
os.system(mkdir)
cp_fasta = "cp " + fasta + " " + fasta_path
os.system(cp_fasta)
html_fasta = "/static/tmp_fastas/" + job_name+ "/" + fasta_file
else:
flash("Warning: No fasta files found in {}".format(output_folder))
sys.stderr.write("no fasta for sample found\n")
return render_template("output.html", job_name=job_name, sample_folders=sample_folders, plots_found=plots_found, vcf_found=vcf_found, fasta_found=fasta_found, sample_folder=sample, current_sample_num=current_sample_num, total_samples=total_samples, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION, DOCS=DOCS)
else:
flash("Warning: No fasta files found in {}".format(output_folder))
sys.stderr.write("no fastas found\n")
return render_template("output.html", job_name=job_name, sample_folders=sample_folders, plots_found=plots_found, vcf_found=vcf_found, fasta_found=fasta_found, sample_folder=sample, current_sample_num=current_sample_num, total_samples=total_samples, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION, DOCS=DOCS)
if meta[sample]["table"]:
meta_table = meta[sample]["table"]
df = pd.DataFrame(meta_table[1:], columns=meta_table[0])
meta_table_html = df.to_html(classes='mystyle')
else:
flash("Warning: No metadata table data generated")
sys.stderr.write("no metadata found for sample: {}\n".format(sample))
return render_template("output.html", job_name=job_name, sample_folders=sample_folders, plots_found=plots_found, vcf_found=vcf_found, fasta_found=fasta_found, sample_folder=sample, current_sample_num=current_sample_num, total_samples=total_samples, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION, DOCS=DOCS)
# sys.stderr.write("running plot return\n")
return render_template("output.html", job_name=job_name, output_folder=output_folder, vcf_table=vcf_table_html, plot=html_plot, fasta=html_fasta,
fasta_tar=html_fasta_tar, fasta_all=html_fasta_all, plots_found=plots_found, vcf_found=vcf_found, fasta_found=fasta_found,
sample_folders=sample_folders, sample_folder=sample, current_sample_num=current_sample_num, total_samples=total_samples,
meta_table_html=meta_table_html, combined_meta_html=combined_meta_html, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION, DOCS=DOCS)
# sys.stderr.write("running regular return\n")
sample = request.form.get('sample_folder')
if sample == '':
sample = sample_folders[0]
elif sample is None:
sample = sample_folders[0]
current_sample_num = sample_dic[sample] + 1
return render_template("output.html", job_name=job_name, sample_folders=sample_folders, sample_folder=sample, current_sample_num=current_sample_num, total_samples=total_samples, VERSION=VERSION, ARTIC_VERSION=ARTIC_VERSION, DOCS=DOCS)
# return render_template("output.html", job_name=job_name, output_folder=output_folder, output_files=output_files, save_graphs=save_able, vcf_table=vcf_table, create_vcfs=create_able, plots_found=plots_found, vcf_found=vcf_found)
if __name__ == "__main__":
# app.run(debug=True)
"""
---------------------------------------------------------------------------
Arguments
---------------------------------------------------------------------------
"""
parser = MyParser(
description="interARTIC - coronavirus genome analysis web app")
parser.add_argument("redis_port", nargs='?',
help="redis port *pass_through**")
parser.add_argument("-a", "--web_address", default="127.0.0.1",
help="localhost default 127.0.0.1, but for use on other computers (under VPN) can be 0.0.0.0 *WARNING*")
parser.add_argument("-p", "--web_port", default=5000,
help="port used with web address, eg -p 5000 would be 127.0.0.1:5000")
args = parser.parse_args()
app.run(host=args.web_address, port=args.web_port , debug=True)
| 50.487639
| 363
| 0.557155
|
a4f9985ec0212a3727ec3ad8ab5e18e67af8ead2
| 5,216
|
py
|
Python
|
ros/src/twist_controller/dbw_node.py
|
stevemg9/CarND-Capstone
|
019dd26e480295bfb17aae18339241cfbb7791ea
|
[
"MIT"
] | null | null | null |
ros/src/twist_controller/dbw_node.py
|
stevemg9/CarND-Capstone
|
019dd26e480295bfb17aae18339241cfbb7791ea
|
[
"MIT"
] | null | null | null |
ros/src/twist_controller/dbw_node.py
|
stevemg9/CarND-Capstone
|
019dd26e480295bfb17aae18339241cfbb7791ea
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import rospy
from std_msgs.msg import Bool
from dbw_mkz_msgs.msg import ThrottleCmd, SteeringCmd, BrakeCmd, SteeringReport
from geometry_msgs.msg import TwistStamped
import math
from twist_controller import Controller
'''
You can build this node only after you have built (or partially built) the `waypoint_updater` node.
You will subscribe to `/twist_cmd` message which provides the proposed linear and angular velocities.
You can subscribe to any other message that you find important or refer to the document for list
of messages subscribed to by the reference implementation of this node.
One thing to keep in mind while building this node and the `twist_controller` class is the status
of `dbw_enabled`. While in the simulator, its enabled all the time, in the real car, that will
not be the case. This may cause your PID controller to accumulate error because the car could
temporarily be driven by a human instead of your controller.
We have provided two launch files with this node. Vehicle specific values (like vehicle_mass,
wheel_base) etc should not be altered in these files.
We have also provided some reference implementations for PID controller and other utility classes.
You are free to use them or build your own.
Once you have the proposed throttle, brake, and steer values, publish it on the various publishers
that we have created in the `__init__` function.
'''
class DBWNode(object):
def __init__(self):
rospy.init_node('dbw_node')
vehicle_mass = rospy.get_param('~vehicle_mass', 1736.35)
fuel_capacity = rospy.get_param('~fuel_capacity', 13.5)
brake_deadband = rospy.get_param('~brake_deadband', .1)
decel_limit = rospy.get_param('~decel_limit', -5)
accel_limit = rospy.get_param('~accel_limit', 1.)
wheel_radius = rospy.get_param('~wheel_radius', 0.2413)
wheel_base = rospy.get_param('~wheel_base', 2.8498)
steer_ratio = rospy.get_param('~steer_ratio', 14.8)
max_lat_accel = rospy.get_param('~max_lat_accel', 3.)
max_steer_angle = rospy.get_param('~max_steer_angle', 8.)
# Creating ROS Publishers
self.steer_pub = rospy.Publisher('/vehicle/steering_cmd',
SteeringCmd, queue_size=1)
self.throttle_pub = rospy.Publisher('/vehicle/throttle_cmd',
ThrottleCmd, queue_size=1)
self.brake_pub = rospy.Publisher('/vehicle/brake_cmd',
BrakeCmd, queue_size=1)
# Creating the Controller Object
self.controller = Controller(vehicle_mass = vehicle_mass,
brake_deadband = brake_deadband,
decel_limit = decel_limit,
accel_limit = accel_limit,
wheel_radius = wheel_radius,
wheel_base = wheel_base,
steer_ratio = steer_ratio,
max_lat_accel = max_lat_accel,
max_steer_angle = max_steer_angle)
# Creating ROS Subscribers
rospy.Subscriber("/current_velocity", TwistStamped, self.velocity_cb)
rospy.Subscriber("/vehicle/dbw_enabled", Bool, self.dbw_enabled_cb)
rospy.Subscriber("/twist_cmd", TwistStamped, self.twist_cb)
# DBWNode Class Members
self.current_vel = None
self.current_ang_vel = None
self.dbw_enabled = None
self.linear_vel = None
self.angular_vel = None
self.throttle = 0
self.steering = 0
self.brake = 0
self.loop()
def loop(self):
# Operate Loop at 50hz
rate = rospy.Rate(50)
while not rospy.is_shutdown():
# Get steering, brake and throttle from Controller
if not None in (self.current_vel, self.linear_vel, self.angular_vel):
self.throttle, self.brake, self.steering = self.controller.control(self.current_vel,
self.dbw_enabled,
self.linear_vel,
self.angular_vel)
# Publishing Steering, Brake and Throttle over ROS
if self.dbw_enabled:
self.publish(self.throttle, self.brake, self.steering)
rate.sleep()
def velocity_cb(self, msg):
# Update current velocity
self.current_vel = msg.twist.linear.x
def dbw_enabled_cb(self, msg):
# Update DBW enabled flag
self.dbw_enabled = msg
def twist_cb(self, msg):
# Update twist values
self.linear_vel = msg.twist.linear.x
self.angular_vel = msg.twist.angular.z
def publish(self, throttle, brake, steer):
tcmd = ThrottleCmd()
tcmd.enable = True
tcmd.pedal_cmd_type = ThrottleCmd.CMD_PERCENT
tcmd.pedal_cmd = throttle
self.throttle_pub.publish(tcmd)
scmd = SteeringCmd()
scmd.enable = True
scmd.steering_wheel_angle_cmd = steer
self.steer_pub.publish(scmd)
bcmd = BrakeCmd()
bcmd.enable = True
bcmd.pedal_cmd_type = BrakeCmd.CMD_TORQUE
bcmd.pedal_cmd = brake
self.brake_pub.publish(bcmd)
if __name__ == '__main__':
DBWNode()
| 38.925373
| 101
| 0.655483
|
9e482479728e438d3e544a12a46a14291aafdcce
| 13,093
|
py
|
Python
|
manager/master/eventHandlers.py
|
Tootooroo/VerManager
|
65a37ed4f864c8d6adeade52582315aeff901fbe
|
[
"MIT"
] | 2
|
2020-03-20T20:04:54.000Z
|
2021-03-18T12:03:54.000Z
|
manager/master/eventHandlers.py
|
Tootooroo/VerManager
|
65a37ed4f864c8d6adeade52582315aeff901fbe
|
[
"MIT"
] | null | null | null |
manager/master/eventHandlers.py
|
Tootooroo/VerManager
|
65a37ed4f864c8d6adeade52582315aeff901fbe
|
[
"MIT"
] | null | null | null |
# MIT License
#
# Copyright (c) 2020 Gcom
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# EventHandlers.py
import asyncio
import concurrent.futures
import os
import zipfile
import shutil
import manager.master.configs as cfg
from VerManager.settings import DATA_URL
from manager.master.docGen import log_gen
from manager.master.exceptions import DOC_GEN_FAILED_TO_GENERATE, \
POSTPROC_NO_HANDLERS_MATCH_WITH_THE_KEY
from typing import List, Dict, Optional, cast, Callable, Tuple, \
Any
from collections import namedtuple
from manager.master.eventListener \
import letterLog, Entry
from manager.basic.type import Error
from manager.basic.letter import Letter, \
ResponseLetter, BinaryLetter, NotifyLetter, TaskLogLetter
from manager.master.task import Task, SingleTask, PostTask
from manager.basic.storage import StoChooser
from manager.master.logger import Logger, M_NAME as LOGGER_M_NAME
from manager.master.workerRoom import WorkerRoom, M_NAME as WR_M_NAME
from manager.basic.storage import M_NAME as STORAGE_M_NAME
from manager.basic.util import pathSeperator
from manager.basic.notify import Notify, WSCNotify
from manager.basic.dataLink import DataLink, DataLinkNotify
from manager.master.persistentDB import PersistentDB, TAIL
from manager.master.postProc import PostProc
from manager.master.misc import General_PostProc
ActionInfo = namedtuple('ActionInfo', 'isMatch execute args')
path = str
class EVENT_HANDLER_TOOLS:
ProcessPool = concurrent.futures.ProcessPoolExecutor()
chooserSet = {} # type: Dict[str, StoChooser]
transfer_finished = {} # type: Dict[str, path]
PREPARE_ACTIONS = [] # type: List[ActionInfo]
IN_PROC_ACTIONS = [] # type: List[ActionInfo]
FIN_ACTIONS = [] # type: List[ActionInfo]
FAIL_ACTIONS = [] # type: List[ActionInfo]
ACTION_TBL = {
Task.STATE_IN_PROC: IN_PROC_ACTIONS,
Task.STATE_FINISHED: FIN_ACTIONS,
Task.STATE_FAILURE: FAIL_ACTIONS
} # type: Dict[int, List[ActionInfo]]
@classmethod
def action_init(self, env: Entry.EntryEnv) -> None:
# Fin action install
singletask_fin_action_info = ActionInfo(
isMatch=lambda t: isinstance(t, SingleTask),
execute=self._singletask_fin_action,
args=env
)
self.install_action(Task.STATE_FINISHED, singletask_fin_action_info)
posttask_fin_action_info = ActionInfo(
isMatch=lambda t: isinstance(t, PostTask),
execute=self._posttask_fin_action,
args=env
)
self.install_action(Task.STATE_FINISHED, posttask_fin_action_info)
task_common_fin_action_info = ActionInfo(
isMatch=lambda t: True,
execute=self._tasks_fin_action,
args=env
)
self.install_action(Task.STATE_FINISHED, task_common_fin_action_info)
# Fail action install
task_common_fail_action_info = ActionInfo(
isMatch=lambda t: True,
execute=self._tasks_fail_action,
args=env
)
self.install_action(Task.STATE_FAILURE, task_common_fail_action_info)
@classmethod
async def do_action(self, t: Task, state: int) -> None:
actions = self.ACTION_TBL[state]
for action in actions:
if action.isMatch(t):
await action.execute(t, action.args)
@classmethod
def install_action(self, state: int, action: ActionInfo) -> None:
self.ACTION_TBL[state].append(action)
@classmethod
async def packDataWithChangeLog(
self, vsn: str, filePath: str, dest: str) -> str:
pathSplit = filePath.split("/")
pathSplit[-1] = pathSplit[-1] + ".log.rar"
zipPath = "/".join(pathSplit)
zipFileName = zipPath.split("/")[-1]
try:
await log_gen(vsn, "./log.txt")
except DOC_GEN_FAILED_TO_GENERATE:
# Fail to generate log file
# log into log file.
return filePath.split("/")[-1]
# Pack into a zipfile may take a while
# do it in another process.
loop = asyncio.get_running_loop()
await loop.run_in_executor(
EVENT_HANDLER_TOOLS.ProcessPool,
self.zipPackHelper,
["./log.txt", filePath], zipPath
)
return zipFileName
@staticmethod
def changeLogGen(start_commit: str,
last_commit: str,
destPath: str) -> None:
from manager.models import infoBetweenRev
changeLog = infoBetweenRev(start_commit, last_commit)
with open(destPath, "w") as logFile:
for log in changeLog:
logFile.write(log)
@staticmethod
def zipPackHelper(files: List[str], zipPath: str) -> None:
zipFd = zipfile.ZipFile(zipPath, "w")
for f in files:
zipFd.write(f)
zipFd.close()
@staticmethod
async def _singletask_fin_action(
t: SingleTask, env: Entry.EntryEnv) -> None:
if t.job.numOfTasks() == 1:
await responseHandler_ResultStore(t, env)
@staticmethod
async def _posttask_fin_action(t: PostTask, env: Entry.EntryEnv) -> None:
Temporary = t.job.get_info('Temporary')
if Temporary is not None and Temporary == 'true':
await temporaryBuild_handling(t, env)
else:
await responseHandler_ResultStore(t, env)
t.toFinState()
@staticmethod
async def _tasks_fin_action(t: Task, env: Entry.EntryEnv) -> None:
return None
@staticmethod
async def _tasks_fail_action(t: Task, env: Entry.EntryEnv) -> None:
return None
async def responseHandler(
env: Entry.EntryEnv, letter: Letter) -> None:
if not isinstance(letter, ResponseLetter):
return None
ident = letter.getHeader('ident')
taskId = letter.getHeader('tid')
state = int(letter.getContent('state'))
wr = env.modules.getModule('WorkerRoom') # type: WorkerRoom
task = wr.getTaskOfWorker(ident, taskId)
if task is None or not Task.isValidState(state):
return None
if task.stateChange(state) is Error:
return None
try:
await EVENT_HANDLER_TOOLS.do_action(task, state)
except Exception:
state = Task.STATE_FAILURE
task.stateChange(Task.STATE_FAILURE)
# Notify to components that
# task's state is changed.
type = env.eventListener.NOTIFY_TASK_STATE_CHANGED
await env.eventListener.notify(type, (taskId, state))
async def copyFileInExecutor(src: str, dest: str) -> None:
loop = asyncio.get_running_loop()
await loop.run_in_executor(
EVENT_HANDLER_TOOLS.ProcessPool,
shutil.copy,
src, dest)
async def temporaryBuild_handling(
task: Task, env: Entry.EntryEnv) -> None:
logger = env.modules.getModule('Logger')
chooserSet = EVENT_HANDLER_TOOLS.chooserSet
seperator = pathSeperator()
taskId = task.id()
chooser = chooserSet[taskId]
filePath = chooser.path()
fileName = filePath.split(seperator)[-1]
try:
if not os.path.exists("private"):
os.mkdir("private")
# Copy may happen on NFS may take a long time to deal
# just run in another process.
await copyFileInExecutor(filePath, "private" + seperator + fileName)
except FileNotFoundError as e:
Logger.putLog(logger, letterLog, str(e))
except PermissionError as e:
Logger.putLog(logger, letterLog, str(e))
async def responseHandler_ResultStore(
task: Task, env: Entry.EntryEnv) -> None:
path = EVENT_HANDLER_TOOLS.transfer_finished[task.id()]
# Hand to PostProc so PostProc able to attach a changelog
# to result file.
pp = cast(PostProc, env.modules.getModule(PostProc.NAME))
# Exception of post_req unable
# to be processed here.
try:
pp.post_req(
# Request
(General_PostProc, task, path)
)
except POSTPROC_NO_HANDLERS_MATCH_WITH_THE_KEY:
# This task no need to do post proc
task.job.job_result = job_result_url(
str(task.job.unique_id), path)
def job_result_url(unique_id: str, path: str) -> str:
fileName = path.split("/")[-1]
may_slash = "" if DATA_URL[-1] == '/' else "/"
return DATA_URL + may_slash + unique_id + "/" + fileName
def cmd_log_handler(dl: DataLink, letter: TaskLogLetter, args: Any) -> None:
"""
args
"""
tid = letter.getIdent()
msg = letter.getMessage()
dl.notify(DataLinkNotify("CMD_LOG", (tid, msg)))
def cmd_log_notify(msg: Tuple[str, str], arg: Any) -> None:
assert(cfg.mmanager is not None)
tid = msg[0]
content = msg[1]
metaDB = cast(PersistentDB, cfg.mmanager.getModule(PersistentDB.M_NAME))
if not metaDB.is_exists(tid) or \
not metaDB.is_open(tid):
return
# Write to Tail of log file.
metaDB.write_sync(tid, content, TAIL)
async def binaryHandler(dl: DataLink, letter: BinaryLetter,
env: Entry.EntryEnv) -> None:
chooserSet = EVENT_HANDLER_TOOLS.chooserSet
if not isinstance(letter, BinaryLetter):
return None
tid = letter.getHeader('tid')
unique_id = tid.split("_")[0]
# A new file is transfered.
if unique_id not in chooserSet:
fileName = letter.getFileName()
sto = env.modules.getModule(STORAGE_M_NAME)
chooser = sto.create(unique_id, fileName)
chooserSet[unique_id] = chooser
chooser = chooserSet[unique_id]
content = letter.getContent('bytes')
if content == b"":
# A file is transfer finished.
chooser.close()
del chooserSet[unique_id]
# Notify To DataLinker a file is transfered finished.
dl.notify(DataLinkNotify("BINARY", (tid, chooser.path())))
else:
chooser.store(content)
def binaryNotify(msg: Tuple[str, str], arg: Any) -> None:
tid, path = msg[0], msg[1]
transfered = EVENT_HANDLER_TOOLS.transfer_finished
if tid in transfered:
return None
transfered[tid] = path
async def logHandler(env: Entry.EntryEnv, letter: Letter) -> None:
logger = env.modules.getModule(LOGGER_M_NAME)
logId = letter.getHeader('logId')
logMsg = letter.getContent('logMsg')
if isinstance(logMsg, str):
await Logger.putLog(logger, logId, logMsg)
async def logRegisterhandler(env: Entry.EntryEnv, letter: Letter) -> None:
logger = env.modules.getModule(LOGGER_M_NAME)
logId = letter.getHeader('logId')
logger.log_register(logId)
###############################################################################
# Notify Handlers #
###############################################################################
class NotifyHandle:
@classmethod
async def handle(self, env: Entry.EntryEnv, nl: Letter) -> None:
if not isinstance(nl, NotifyLetter):
return None
handler = self._search_handler(nl)
if handler is None:
return None
await handler(env, nl)
@classmethod
def _search_handler(self, nl: NotifyLetter) -> Optional[Callable]:
type = nl.notifyType()
try:
return getattr(NotifyHandle, 'NOTIFY_H_'+type)
except AttributeError:
raise NOTIFY_NOT_MATCH_WITH_HANDLER(type)
@classmethod
async def NOTIFY_H_WSC(self, env: Entry.EntryEnv,
nl: NotifyLetter) -> None:
"""
Change state of correspond worker.
"""
wsc_notify = cast(WSCNotify, Notify.transform(nl))
who = wsc_notify.fromWho()
state = wsc_notify.state()
wr = env.modules.getModule(WR_M_NAME) # type: WorkerRoom
wr.setState(who, int(state))
class NOTIFY_NOT_MATCH_WITH_HANDLER(Exception):
def __init__(self, type: str) -> None:
self._type = type
def __str__(self) -> str:
return "Notify " + self._type + " not match with any handler"
| 30.952719
| 79
| 0.654014
|
b67a544f4e782b04aa42c28a3cfaf2ac72cf53bf
| 3,681
|
py
|
Python
|
bookmarks/models.py
|
gradel/django-generic-bookmarks
|
98d4c2099c019a6767fccebd96ec726f35fd1414
|
[
"MIT"
] | null | null | null |
bookmarks/models.py
|
gradel/django-generic-bookmarks
|
98d4c2099c019a6767fccebd96ec726f35fd1414
|
[
"MIT"
] | null | null | null |
bookmarks/models.py
|
gradel/django-generic-bookmarks
|
98d4c2099c019a6767fccebd96ec726f35fd1414
|
[
"MIT"
] | null | null | null |
import string
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import fields
from django.contrib.auth.models import User
from bookmarks import managers
class Bookmark(models.Model):
"""
A user's bookmark for a content object.
This is only used if the current backend stores bookmarks in the database
using Django models.
.. py:attribute:: content_type
the bookmarked instance content type
.. py:attribute:: object_id
the bookmarked instance id
.. py:attribute:: content_object
the bookmarked instance
.. py:attribute:: key
the bookmark key
.. py:attribute:: user
the user who bookmarked the instance
(as a fk to *django.contrib.auth.models.User*)
.. py:attribute:: created_at
the bookmark creation datetime
"""
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = fields.GenericForeignKey('content_type', 'object_id')
key = models.CharField(max_length=16)
user = models.ForeignKey(User, blank=True, null=True,
related_name='bookmarks')
created_at = models.DateTimeField(auto_now_add=True)
# manager
objects = managers.BookmarksManager()
class Meta:
unique_together = ('content_type', 'object_id', 'key', 'user')
def __unicode__(self):
return u'Bookmark for %s by %s' % (self.content_object, self.user)
# IN BULK SELECT QUERIES
def annotate_bookmarks(queryset_or_model, key, user, attr='is_bookmarked'):
"""
Annotate *queryset_or_model* with bookmarks, in order to retreive from
the database all bookmark values in bulk.
The first argument *queryset_or_model* must be, of course, a queryset
or a Django model object. The argument *key* is the bookmark key.
The bookmarks are filtered using given *user*.
A boolean is inserted in an attr named *attr* (default='is_bookmarked')
of each object in the generated queryset.
Usage example::
for article in annotate_bookmarks(Article.objects.all(), 'favourite',
myuser, attr='has_a_bookmark'):
if article.has_a_bookmark:
print u"User %s likes article %s" (myuser, article)
"""
from bookmarks import utils
# getting the queryset
if isinstance(queryset_or_model, models.base.ModelBase):
queryset = queryset_or_model.objects.all()
else:
queryset = queryset_or_model
# preparing arguments for *extra* query
opts = queryset.model._meta
content_type = utils.get_content_type_for_model(queryset.model)
mapping = {
'bookmark_table': Bookmark._meta.db_table,
'model_table': opts.db_table,
'model_pk_name': opts.pk.name,
'content_type_id': content_type.pk,
}
# building base query
template = """
SELECT id FROM ${bookmark_table} WHERE
${bookmark_table}.object_id = ${model_table}.${model_pk_name} AND
${bookmark_table}.content_type_id = ${content_type_id} AND
${bookmark_table}.user_id = %s AND
${bookmark_table}.key = %s
"""
select = {attr: string.Template(template).substitute(mapping)}
return queryset.extra(select=select, select_params=[user.pk, key])
# ABSTRACT MODELS
class BookmarkedModel(models.Model):
"""
Mixin for bookmarkable models.
Models subclassing this abstract model gain a *bookmarks* attribute
allowing accessto the reverse generic relation
to the *bookmarks.models.Bookmark*.
"""
bookmarks = fields.GenericRelation(Bookmark)
class Meta:
abstract = True
| 28.984252
| 77
| 0.690845
|
853ec73a724ac39a307be28b80219f46df66b01a
| 6,526
|
py
|
Python
|
src/utils/net.py
|
rostob/Limnoria
|
068488c546612ee0198cecf1a4a46e2667551bcf
|
[
"BSD-3-Clause"
] | 22
|
2021-09-01T20:51:10.000Z
|
2022-03-23T05:51:58.000Z
|
src/utils/net.py
|
rostob/Limnoria
|
068488c546612ee0198cecf1a4a46e2667551bcf
|
[
"BSD-3-Clause"
] | 16
|
2021-09-02T08:33:29.000Z
|
2022-03-28T18:21:09.000Z
|
src/utils/net.py
|
rostob/Limnoria
|
068488c546612ee0198cecf1a4a46e2667551bcf
|
[
"BSD-3-Clause"
] | 9
|
2021-09-02T09:07:53.000Z
|
2022-03-28T17:34:59.000Z
|
###
# Copyright (c) 2002-2005, Jeremiah Fincher
# Copyright (c) 2011, 2013, James McCoy
# Copyright (c) 2010-2021, Valentin Lorentz
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
"""
Simple utility modules.
"""
import re
import ssl
import socket
import hashlib
import contextlib
from .web import _ipAddr, _domain
emailRe = re.compile(r"^\S+@(%s|%s)$" % (_domain, _ipAddr), re.I)
def getAddressFromHostname(host, port=None, attempt=0):
addrinfo = socket.getaddrinfo(host, port)
addresses = []
for (family, socktype, proto, canonname, sockaddr) in addrinfo:
if sockaddr[0] not in addresses:
addresses.append(sockaddr[0])
return addresses[attempt % len(addresses)]
def getSocket(host, port=None, socks_proxy=None, vhost=None, vhostv6=None):
"""Returns a socket of the correct AF_INET type (v4 or v6) in order to
communicate with host.
"""
if not socks_proxy:
addrinfo = socket.getaddrinfo(host, port)
host = addrinfo[0][4][0]
if socks_proxy:
import socks
s = socks.socksocket()
hostname, port = socks_proxy.rsplit(':', 1)
s.setproxy(socks.PROXY_TYPE_SOCKS5, hostname, int(port),
rdns=True)
return s
if isIPV4(host):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if vhost:
s.bind((vhost, 0))
return s
elif isIPV6(host):
s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
if vhostv6:
s.bind((vhostv6, 0))
return s
else:
raise socket.error('Something wonky happened.')
def isSocketAddress(s):
if ':' in s:
host, port = s.rsplit(':', 1)
try:
int(port)
sock = getSocket(host, port)
return True
except (ValueError, socket.error):
pass
return False
def isIP(s):
"""Returns whether or not a given string is an IP address.
>>> isIP('255.255.255.255')
1
>>> isIP('::1')
0
"""
return isIPV4(s) or isIPV6(s)
def isIPV4(s):
"""Returns whether or not a given string is an IPV4 address.
>>> isIPV4('255.255.255.255')
1
>>> isIPV4('abc.abc.abc.abc')
0
"""
if set(s) - set('0123456789.'):
# inet_aton ignores trailing data after the first valid IP address
return False
try:
return bool(socket.inet_aton(str(s)))
except socket.error:
return False
def bruteIsIPV6(s):
if s.count('::') <= 1:
L = s.split(':')
if len(L) <= 8:
for x in L:
if x:
try:
int(x, 16)
except ValueError:
return False
return True
return False
def isIPV6(s):
"""Returns whether or not a given string is an IPV6 address."""
try:
if hasattr(socket, 'inet_pton'):
return bool(socket.inet_pton(socket.AF_INET6, s))
else:
return bruteIsIPV6(s)
except socket.error:
try:
socket.inet_pton(socket.AF_INET6, '::')
except socket.error:
# We gotta fake it.
return bruteIsIPV6(s)
return False
normalize_fingerprint = lambda fp: fp.replace(':', '').lower()
FINGERPRINT_ALGORITHMS = ('md5', 'sha1', 'sha224', 'sha256', 'sha384',
'sha512')
def check_certificate_fingerprint(conn, trusted_fingerprints):
trusted_fingerprints = set(normalize_fingerprint(fp)
for fp in trusted_fingerprints)
cert = conn.getpeercert(binary_form=True)
for algorithm in FINGERPRINT_ALGORITHMS:
h = hashlib.new(algorithm)
h.update(cert)
if h.hexdigest() in trusted_fingerprints:
return
raise ssl.CertificateError('No matching fingerprint.')
@contextlib.contextmanager
def _prefix_ssl_error(prefix):
try:
yield
except ssl.SSLError as e:
raise ssl.SSLError(
e.args[0], '%s failed: %s' % (prefix, e.args[1]), *e.args[2:]) \
from None
def ssl_wrap_socket(conn, hostname, logger, certfile=None,
trusted_fingerprints=None, verify=True, ca_file=None,
**kwargs):
with _prefix_ssl_error('creating SSL context'):
context = ssl.create_default_context(**kwargs)
if ca_file:
with _prefix_ssl_error('loading CA certificate'):
context.load_verify_locations(cafile=ca_file)
elif trusted_fingerprints or not verify:
# Do not use Certification Authorities
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
if certfile:
with _prefix_ssl_error('loading client certfile'):
context.load_cert_chain(certfile)
with _prefix_ssl_error('establishing TLS connection'):
conn = context.wrap_socket(conn, server_hostname=hostname)
if trusted_fingerprints:
check_certificate_fingerprint(conn, trusted_fingerprints)
return conn
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
| 32.63
| 79
| 0.650781
|
3bb68bf441b3d6cc4058c22909eefa48b3900f37
| 5,686
|
py
|
Python
|
third_party/google-endpoints/google/api/gen/servicecontrol_v1_client.py
|
tingshao/catapult
|
a8fe19e0c492472a8ed5710be9077e24cc517c5c
|
[
"BSD-3-Clause"
] | 2,151
|
2020-04-18T07:31:17.000Z
|
2022-03-31T08:39:18.000Z
|
third_party/google-endpoints/google/api/gen/servicecontrol_v1_client.py
|
tingshao/catapult
|
a8fe19e0c492472a8ed5710be9077e24cc517c5c
|
[
"BSD-3-Clause"
] | 4,640
|
2015-07-08T16:19:08.000Z
|
2019-12-02T15:01:27.000Z
|
third_party/google-endpoints/google/api/gen/servicecontrol_v1_client.py
|
tingshao/catapult
|
a8fe19e0c492472a8ed5710be9077e24cc517c5c
|
[
"BSD-3-Clause"
] | 698
|
2015-06-02T19:18:35.000Z
|
2022-03-29T16:57:15.000Z
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generated client library for servicecontrol version v1."""
# NOTE: This file is originally auto-generated using google-apitools then
# style-correcting hand edits were applied. New behaviour should not provided
# by hand, please re-generate and restyle.
from __future__ import absolute_import
from apitools.base.py import base_api
from . import servicecontrol_v1_messages as messages
class ServicecontrolV1(base_api.BaseApiClient):
"""Generated client library for service servicecontrol version v1."""
MESSAGES_MODULE = messages
_PACKAGE = u'servicecontrol'
_SCOPES = [u'https://www.googleapis.com/auth/cloud-platform',
u'https://www.googleapis.com/auth/servicecontrol']
_VERSION = u'v1'
_CLIENT_CLASS_NAME = u'ServicecontrolV1'
_URL_VERSION = u'v1'
_API_KEY = None
# pylint: disable=too-many-arguments
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None):
"""Create a new servicecontrol handle."""
url = url or u'https://servicecontrol.googleapis.com/'
super(ServicecontrolV1, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers)
self.services = self.ServicesService(self)
class ServicesService(base_api.BaseApiService):
"""Service class for the services resource."""
_NAME = u'services'
def __init__(self, client):
super(ServicecontrolV1.ServicesService, self).__init__(client)
self._method_configs = {
'check': base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'servicecontrol.services.check',
ordered_params=[u'serviceName'],
path_params=[u'serviceName'],
query_params=[],
relative_path=u'v1/services/{serviceName}:check',
request_field=u'checkRequest',
request_type_name=u'ServicecontrolServicesCheckRequest',
response_type_name=u'CheckResponse',
supports_download=False,
),
'report': base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'servicecontrol.services.report',
ordered_params=[u'serviceName'],
path_params=[u'serviceName'],
query_params=[],
relative_path=u'v1/services/{serviceName}:report',
request_field=u'reportRequest',
request_type_name=u'ServicecontrolServicesReportRequest',
response_type_name=u'ReportResponse',
supports_download=False,
),
}
self._upload_configs = {
}
def check(self, request, global_params=None):
"""Checks quota, abuse status etc. to decide whether the given
operation. should proceed. It should be called by the service
before the given operation is executed.
This method requires the `servicemanagement.services.check`
permission on the specified service. For more information, see
[Google Cloud IAM](https://cloud.google.com/iam).
Args:
request: (ServicecontrolServicesCheckRequest) input message
global_params: (StandardQueryParameters, default: None)
global arguments
Returns:
(CheckResponse) The response message.
"""
config = self.GetMethodConfig('check')
return self._RunMethod(
config, request, global_params=global_params)
def report(self, request, global_params=None):
"""Reports an operation to the service control features such as
billing, logging, monitoring etc. It should be called by the
service after the given operation is completed.
This method requires the `servicemanagement.services.report`
permission on the specified service. For more information, see
[Google Cloud IAM](https://cloud.google.com/iam).
Args:
request: (ServicecontrolServicesReportRequest) input message
global_params: (StandardQueryParameters, default: None) global
arguments
Returns:
(ReportResponse) The response message.
"""
config = self.GetMethodConfig('report')
return self._RunMethod(
config, request, global_params=global_params)
| 44.421875
| 78
| 0.634013
|
bd332ccb8861638331592776a1f9ee7cdbdf2c1a
| 1,710
|
py
|
Python
|
frappe_notification/handlers/fcm.py
|
leam-tech/frappe_notification
|
79e40f2c541d86d714a0b8d48b87f32b2f85076a
|
[
"MIT"
] | null | null | null |
frappe_notification/handlers/fcm.py
|
leam-tech/frappe_notification
|
79e40f2c541d86d714a0b8d48b87f32b2f85076a
|
[
"MIT"
] | null | null | null |
frappe_notification/handlers/fcm.py
|
leam-tech/frappe_notification
|
79e40f2c541d86d714a0b8d48b87f32b2f85076a
|
[
"MIT"
] | null | null | null |
import frappe
from renovation_core.utils.fcm import _notify_via_fcm
from frappe_notification import NotificationOutbox, NotificationOutboxStatus
def fcm_handler(
*,
# The channel selected, ie Telegram
channel: str,
# The Sender Type, for eg. TelegramBot
sender_type: str,
# The Sender, TelegramBot.bot_a
sender: str,
# Recipient ID, @test-user-a
channel_id: str,
# Channel Specific Args, like FCM Data, Email CC
channel_args: dict,
# Subject of message, ignore for Telegram, useful for Email
subject: str,
# The text message content
content: str,
# The name of Notification Outbox
outbox: str,
# The name of the child row in Notification Outbox
outbox_row_name: str,
# When this is true, verify the channel_id & other params. Do not send the message
to_validate=False,
# If there is any extra arguments, eg: user_identifier
**kwargs
):
assert channel == "FCM"
if to_validate:
# TODO: We could make use of Firebase Library ?
return True
outbox: NotificationOutbox = frappe.get_doc("Notification Outbox", outbox)
try:
fcm_data = None
if channel_args and "fcm_data" in channel_args:
fcm_data = channel_args.get("fcm_data")
if not frappe.flags.in_test:
_notify_via_fcm(
title=subject,
body=content,
data=fcm_data,
tokens=[
channel_id
]
)
outbox.update_status(outbox_row_name, NotificationOutboxStatus.SUCCESS)
except BaseException:
outbox.update_status(outbox_row_name, NotificationOutboxStatus.FAILED)
| 30
| 86
| 0.650877
|
ed335b37686e71ab33c8806c933cc9c626764e27
| 1,662
|
py
|
Python
|
test/test_send_test_email.py
|
Danilka/APIv3-python-library
|
c96472f47d652d2e09e8b4a48a80e33fde06e7f1
|
[
"MIT"
] | null | null | null |
test/test_send_test_email.py
|
Danilka/APIv3-python-library
|
c96472f47d652d2e09e8b4a48a80e33fde06e7f1
|
[
"MIT"
] | null | null | null |
test/test_send_test_email.py
|
Danilka/APIv3-python-library
|
c96472f47d652d2e09e8b4a48a80e33fde06e7f1
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
SendinBlue API
SendinBlue provide a RESTFul API that can be used with any languages. With this API, you will be able to : - Manage your campaigns and get the statistics - Manage your contacts - Send transactional Emails and SMS - and much more... You can download our wrappers at https://github.com/orgs/sendinblue **Possible responses** | Code | Message | | :-------------: | ------------- | | 200 | OK. Successful Request | | 201 | OK. Successful Creation | | 202 | OK. Request accepted | | 204 | OK. Successful Update/Deletion | | 400 | Error. Bad Request | | 401 | Error. Authentication Needed | | 402 | Error. Not enough credit, plan upgrade needed | | 403 | Error. Permission denied | | 404 | Error. Object does not exist | | 405 | Error. Method not allowed | # noqa: E501
OpenAPI spec version: 3.0.0
Contact: contact@sendinblue.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import sib_api_v3_sdk
from sib_api_v3_sdk.models.send_test_email import SendTestEmail # noqa: E501
from sib_api_v3_sdk.rest import ApiException
class TestSendTestEmail(unittest.TestCase):
"""SendTestEmail unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testSendTestEmail(self):
"""Test SendTestEmail"""
# FIXME: construct object with mandatory attributes with example values
# model = sib_api_v3_sdk.models.send_test_email.SendTestEmail() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 40.536585
| 820
| 0.672082
|
222bc40f18991f06f0ba69774807bc17db1e0d63
| 148
|
py
|
Python
|
src/hurnado/app/handler/resource.py
|
guyuecanhui/hurnado
|
fc54eed8ebd70768ce6e1f1073e506d365bb5b8d
|
[
"Apache-2.0"
] | null | null | null |
src/hurnado/app/handler/resource.py
|
guyuecanhui/hurnado
|
fc54eed8ebd70768ce6e1f1073e506d365bb5b8d
|
[
"Apache-2.0"
] | null | null | null |
src/hurnado/app/handler/resource.py
|
guyuecanhui/hurnado
|
fc54eed8ebd70768ce6e1f1073e506d365bb5b8d
|
[
"Apache-2.0"
] | null | null | null |
# coding:utf-8
__author__ = 'cheng.hu'
from base import BaseHandler
class QueryUserHandler(BaseHandler):
def get(self):
print "list"
| 14.8
| 36
| 0.695946
|
3a53b28cf5da8f667494c0ad05501f606779f7b3
| 294
|
py
|
Python
|
configs/memdpc/r3d_18_kinetics/finetune_ucf101.py
|
bpiyush/CtP-ssl
|
33f325f4f824508ea6391cfbb52d3e17623b7e8f
|
[
"MIT"
] | 37
|
2021-05-07T05:50:11.000Z
|
2022-03-30T03:38:48.000Z
|
configs/memdpc/r3d_18_kinetics/finetune_ucf101.py
|
bpiyush/CtP-ssl
|
33f325f4f824508ea6391cfbb52d3e17623b7e8f
|
[
"MIT"
] | 2
|
2021-06-03T10:25:30.000Z
|
2021-08-18T09:36:48.000Z
|
configs/memdpc/r3d_18_kinetics/finetune_ucf101.py
|
bpiyush/CtP-ssl
|
33f325f4f824508ea6391cfbb52d3e17623b7e8f
|
[
"MIT"
] | 5
|
2021-05-18T02:21:33.000Z
|
2022-02-08T06:49:22.000Z
|
_base_ = ['../../recognizers/_base_/model_r3d18.py',
'../../recognizers/_base_/runtime_ucf101.py']
work_dir = './output/memdpc/r3d_18_kinetics/finetune_ucf101/'
model = dict(
backbone=dict(
pretrained='./output/memdpc/r3d_18_kinetics/pretraining/epoch_90.pth',
),
)
| 26.727273
| 78
| 0.676871
|
97795f8e82c2b2b435c24403e7fd45aff83cc6ab
| 576
|
py
|
Python
|
src/django-nonrel/django/contrib/localflavor/uy/uy_departaments.py
|
adamjmcgrath/glancydesign
|
826ede7c639879d5b79ee730eb5e91422768cb02
|
[
"BSD-3-Clause"
] | 790
|
2015-01-03T02:13:39.000Z
|
2020-05-10T19:53:57.000Z
|
django/contrib/localflavor/uy/uy_departaments.py
|
mradziej/django
|
5d38965743a369981c9a738a298f467f854a2919
|
[
"BSD-3-Clause"
] | 1,361
|
2015-01-08T23:09:40.000Z
|
2020-04-14T00:03:04.000Z
|
django/contrib/localflavor/uy/uy_departaments.py
|
mradziej/django
|
5d38965743a369981c9a738a298f467f854a2919
|
[
"BSD-3-Clause"
] | 155
|
2015-01-08T22:59:31.000Z
|
2020-04-08T08:01:53.000Z
|
# -*- coding: utf-8 -*-
"""A list of Urguayan departaments as `choices` in a formfield."""
DEPARTAMENT_CHOICES = (
('G', u'Artigas'),
('A', u'Canelones'),
('E', u'Cerro Largo'),
('L', u'Colonia'),
('Q', u'Durazno'),
('N', u'Flores'),
('O', u'Florida'),
('P', u'Lavalleja'),
('B', u'Maldonado'),
('S', u'Montevideo'),
('I', u'Paysandú'),
('J', u'Río Negro'),
('F', u'Rivera'),
('C', u'Rocha'),
('H', u'Salto'),
('M', u'San José'),
('K', u'Soriano'),
('R', u'Tacuarembó'),
('D', u'Treinta y Tres'),
)
| 23.04
| 66
| 0.458333
|
b323fd82fc0a9d1ba95a7e04b173594da1a282b0
| 3,119
|
py
|
Python
|
tests/test_api.py
|
hyperplan-io/cli
|
dc7d407701fd78d9065d60c35b0f2674b28c86bb
|
[
"MIT"
] | 1
|
2019-09-04T02:33:34.000Z
|
2019-09-04T02:33:34.000Z
|
tests/test_api.py
|
hyperplan-io/cli
|
dc7d407701fd78d9065d60c35b0f2674b28c86bb
|
[
"MIT"
] | 1
|
2019-09-16T06:09:42.000Z
|
2019-09-16T06:09:42.000Z
|
tests/test_api.py
|
hyperplan-io/cli
|
dc7d407701fd78d9065d60c35b0f2674b28c86bb
|
[
"MIT"
] | null | null | null |
from hyperplan.api import Api
from hyperplan.features_descriptors import *
from hyperplan.labels_descriptors import *
from hyperplan.project import *
from hyperplan.predict import *
class TestWorkingApi:
def setUp(self):
self.api = Api("test_service", "http://localhost:8080", "admin", "admin")
self.logger = logging.getLogger()
self.logger.setLevel(level=logging.DEBUG)
def teardown_module(self):
self.api.delete_features('myFeature')
self.api.delete_labels('myLabel')
def test_01_list_features(self):
features = list_features(self.api, self.logger)
assert features is not None
def test_02_list_labels(self):
labels = list_labels(self.api, self.logger)
assert labels is not None
def test_03_list_projects(self):
projects = list_labels(self.api, self.logger)
assert projects is not None
def test_04_create_features(self):
features = [{'name': 'myFeature', 'type': 'string', 'dimension': 'scalar', 'description': 'My custom feature'}]
create_features(self.api, self.logger, 'myFeature', features)
feature_data = describe_feature(self.api, self.logger, 'myFeature')['data']
assert feature_data == features
def test_05_create_label(self):
create_labels(self.api, self.logger, 'myLabel', label_type='oneOf', label_one_of=['test1','test2','test3'], label_description='My description')
label_data = describe_label(self.api, self.logger, 'myLabel')
assert label_data == { 'id': 'myLabel', 'data': {'type': 'oneOf', 'oneOf': ['test1', 'test2', 'test3'], 'description': 'My description'}}
def test_06_create_project(self):
create_project(self.api, self.logger, 'myProjectId', 'My project name', 'classification', 'myFeature', 'myLabel', 'myTopic')
project = describe_project(self.api, self.logger, 'myProjectId')
assert project == {'id': 'myProjectId', 'name': 'My project name', 'problem': 'classification', 'algorithms': [{'id': 'random', 'projectId': 'myProjectId', 'backend': {'class': 'LocalRandomClassification', 'labels': ['test1', 'test2', 'test3']}, 'security': {'encryption': 'plain', 'headers': []}}], 'policy': {'class': 'DefaultAlgorithm', 'algorithmId': 'random'}, 'configuration': {'features': {'id': 'myFeature', 'data': [{'name': 'myFeature', 'type': 'string', 'dimension': 'scalar', 'description': 'My custom feature'}]}, 'labels': {'id': 'myLabel', 'data': {'type': 'oneOf', 'oneOf': ['test1', 'test2', 'test3'], 'description': 'My description'}}, 'dataStream': {'topic': 'myTopic'}}}
def test_07_predict(self):
prediction = predict(self.api, self.logger, 'myProjectId', features = { 'myFeature': 'example string' }, annotate=False, log=False)
assert prediction['type'] == 'classification'
assert prediction['projectId'] == 'myProjectId'
assert prediction['algorithmId'] == 'random'
assert prediction['features'] == [{'key': 'myFeature', 'type': 'string', 'dimension': 'scalar', 'value': 'example string'}]
assert prediction['examples'] == []
| 58.849057
| 698
| 0.659186
|
5c3000aea61dc4c83fd640f261b96aa861c86c57
| 1,561
|
py
|
Python
|
Orbit Fitting/fit.py
|
vighnesh-nagpal/HD_104304
|
d89585b341db666e33b2ea505da6dd6f9d68282d
|
[
"MIT"
] | null | null | null |
Orbit Fitting/fit.py
|
vighnesh-nagpal/HD_104304
|
d89585b341db666e33b2ea505da6dd6f9d68282d
|
[
"MIT"
] | null | null | null |
Orbit Fitting/fit.py
|
vighnesh-nagpal/HD_104304
|
d89585b341db666e33b2ea505da6dd6f9d68282d
|
[
"MIT"
] | null | null | null |
import orbitize
from orbitize import read_input, system, priors, sampler,results
import h5py
import os
def main():
#parameters for the system
num_planets=1
data_table = read_input.read_file('../Data/data.csv')
m0 = 1.01
mass_err = 0.05
plx=78.33591471044681
plx_err=0.1
#initialise a system object
sys = system.System(
num_planets, data_table, m0,
plx, mass_err=mass_err, plx_err=plx_err,fit_secondary_mass=True
)
sys.sys_priors[lab['plx1']] = priors.UniformPrior(60, 110)
sys.sys_priors[lab['sma1']] = priors.UniformPrior(0.5,1.50)
#MCMC parameters
n_temps=10
n_walkers=1000
n_threads=10
total_orbits_MCMC=75000000
burn_steps=15000
thin=10
#set up sampler object and run it
mcmc_sampler = sampler.MCMC(sys,n_temps,n_walkers,n_threads)
orbits=mcmc_sampler.run_sampler(total_orbits_MCMC,burn_steps=burn_steps,thin=thin)
#save results
myResults=mcmc_sampler.results
try:
### CHANGE THIS TO SAVE TO YOUR DESIRED DIRECTORY ##
filename = 'floatplx.hdf5'
# hdf5_filename=os.path.join(save_path,filename)
myResults.save_results(filename) # saves results object as an hdf5 file
except:
print("Something went wrong while saving the results")
finally:
corner_figure=myResults.plot_corner()
corner_name='floatplx_corner.png'
corner_figure.savefig(corner_name)
orbit_figure=myResults.plot_orbits(rv_time_series=True)
orbit_name='floatplx_orbit.png'
orbit_figure.savefig(orbit_name)
return None
if __name__ == '__main__':
main()
| 26.016667
| 84
| 0.732864
|
fa65121e3ad95c881c3be94f230c274a13970205
| 17,870
|
py
|
Python
|
astrobot/cogs/moderation.py
|
astro-devel/astrobot
|
d0a957731cabed23a7442c8ad4672e7331946e26
|
[
"MIT"
] | null | null | null |
astrobot/cogs/moderation.py
|
astro-devel/astrobot
|
d0a957731cabed23a7442c8ad4672e7331946e26
|
[
"MIT"
] | 8
|
2022-01-24T00:14:54.000Z
|
2022-03-25T18:19:20.000Z
|
astrobot/cogs/moderation.py
|
astro-devel/astrobot
|
d0a957731cabed23a7442c8ad4672e7331946e26
|
[
"MIT"
] | 1
|
2022-01-24T22:37:47.000Z
|
2022-01-24T22:37:47.000Z
|
import re
import time
import datetime
import string
from typing import List, Optional
import discord
import sqlalchemy as sql
from discord.ext import commands
from astrobot import util
from astrobot.checks import invoker_is_lower_rank
from astrobot.user_sys import database as db
class Moderation(commands.Cog):
def __init__(self, bot) -> None:
self.bot: commands.Bot = bot
def increment_db_count(self, member, guild_id, mod_type=None) -> None:
_query = db.session.query(db.GuildUser__Obj)
modinfo = None
for item in _query:
if str(item.user_id) == str(member.id) and str(item.guild_id) == str(
guild_id
):
modinfo = item.moderation_info
break
if modinfo:
modinfo[mod_type] += 1
db.session.execute(
sql.update(db.GuildUser__Obj)
.where(
(
sql.and_(
db.GuildUser__Obj.user_id == member.id,
db.GuildUser__Obj.guild_id == guild_id,
)
)
)
.values(moderation_info=modinfo)
.execution_options(synchronize_session="fetch")
)
else:
userobj = db.GuildUser__Obj.blank_obj(member.id, guild_id)
userobj.moderation_info[mod_type] += 1
db.session.add(userobj)
db.session.commit()
return
@commands.command(
brief="Warn a user", help="Warn a given user.", usage="@[user] [reason]"
)
@commands.has_permissions(kick_members=True)
@commands.check(invoker_is_lower_rank)
async def warn(
self, ctx, member: discord.Member, *, reason=None, bot_invoked=False
):
if bot_invoked:
invoker = self.bot.user
else:
invoker = ctx.author
self.increment_db_count(
member=member, guild_id=ctx.guild.id, mod_type="warn"
) # increment count in database if not bot invoked
# send warn Message to user
embed = discord.Embed(
title=f"{self.bot.custom_emojis.warning} Warning! {self.bot.custom_emojis.warning}",
description=f"Server: {ctx.guild.name}\nWarned by: {invoker}\nReason: {reason}",
colour=self.bot.colors.orange,
)
if bot_invoked:
embed.set_footer(
text="NOTE: This will not count against your official warnings tally."
)
try:
await member.send(embed=embed)
except discord.Forbidden: # if user only accepts DMs from friends, warn them in server channel
embed = discord.Embed(
title=f"{self.bot.custom_emojis.warning} Warning! {self.bot.custom_emojis.warning}",
description=f"Warned by: {invoker}\nReason: {reason}",
colour=self.bot.colors.orange,
)
await ctx.send(member.mention, embed=embed, delete_after=10)
return
# send success message in channel
embed = discord.Embed(
title=f"{self.bot.custom_emojis.success} Warned {member.name}\nReason: {reason}",
colour=self.bot.colors.green,
)
if not bot_invoked: # no need to send a warn_success if automod
await ctx.send(embed=embed, delete_after=10)
@commands.command()
@commands.has_permissions(ban_members=True)
async def get_bans(self, ctx):
bans = await ctx.guild.bans()
list = ""
counter = 1
for ban in bans:
list += f"{counter}. {ban.user} ({ban.reason if ban.reason else 'No reason given'})\n"
counter += 1
embed = discord.Embed(
title="Banned Users", description=list, colour=self.bot.colors.black
)
await ctx.send(embed=embed)
@commands.command()
@commands.has_permissions(kick_members=True, ban_members=True)
async def modinfo(self, ctx, *, member: str):
# TODO: implement just name search and if multiple, show list of options (with nick if applicable)
if member[0:3] == "<@!":
await ctx.send(
embed=discord.Embed(
title=f"{self.bot.custom_emojis.error} **Please use User#Discriminator format instead of mention. i.e. '!modinfo DiscordUser#1234'**",
colour=self.bot.colors.red,
),
delete_after=10,
)
return
_member_name, _member_discriminator = member.split("#")
_member_obj = None
async for _member in ctx.guild.fetch_members():
if (
_member.name == _member_name
and _member.discriminator == _member_discriminator
):
_member_obj = _member
break
if not _member_obj:
await ctx.send(
embed=discord.Embed(
title=f"{self.bot.custom_emojis.error} **Unable to find user {member}, are they in the server?**",
colour=self.bot.colors.red,
),
delete_after=10,
)
return
member = _member
embed = discord.Embed(title=f"Moderation info for {member}")
_query = db.session.query(db.GuildUser__Obj)
_user = db.GuildUser__Obj.blank_obj(member.id, ctx.guild.id)
for item in _query:
if str(item.user_id) == str(member.id) and str(item.guild_id) == str(
ctx.guild.id
):
_user = item
break
embed.add_field(
name="**Warns**", value=_user.moderation_info["warn"]
).add_field(name="**Bans**", value=_user.moderation_info["ban"]).add_field(
name="**Kicks**", value=_user.moderation_info["kick"]
).add_field(
name="**Mutes**", value=_user.moderation_info["mute"]
).set_thumbnail(
url=member.avatar.__str__() if member.avatar else None
)
await ctx.send(embed=embed)
@commands.command(
brief="Ban a user", help="Ban a given user.", usage="@[user] [reason]"
)
@commands.has_permissions(ban_members=True)
@commands.check(invoker_is_lower_rank)
async def ban(self, ctx, member: discord.Member, *, reason=None):
try:
await ctx.guild.ban(user=member, reason=reason)
except discord.Forbidden:
await ctx.send(
embed=discord.Embed(
title=f"{self.bot.custom_emojis.error} **Unable to ban {member.name}#{member.discriminator}, try manually.**"
)
)
return
embed = discord.Embed(
title=f"You have been banned from {ctx.guild.name}",
description=f"Banned by: {ctx.author}\nBan Reason: {reason}",
colour=self.bot.colors.red,
)
if not member.bot:
try:
await member.send(embed=embed)
except discord.Forbidden: # if user only accepts DMs from friends, nothing to do
pass
self.increment_db_count(member=member, guild_id=ctx.guild.id, mod_type="ban")
text = f"{self.bot.custom_emojis.success} **Successfully banned user {member.name}#{member.discriminator}**"
embed = discord.Embed(title=text, colour=self.bot.colors.green)
await ctx.send(embed=embed)
@commands.command(
brief="Kick a user", help="Kick a given user.", usage="@[user] [reason]"
)
@commands.has_permissions(kick_members=True)
@commands.check(invoker_is_lower_rank)
async def kick(self, ctx, member: discord.Member, *, reason=None):
try:
await ctx.guild.kick(user=member, reason=reason)
except discord.Forbidden:
await ctx.send(
embed=discord.Embed(
title=f"{self.bot.custom_emojis.error} **Unable to kick {member.name}#{member.discriminator}, try manually.**"
)
)
return
embed = discord.Embed(
title=f"You have been kicked from {ctx.guild.name}",
description=f"Kicked by: {ctx.author}\nKick Reason: {reason}",
colour=self.bot.colors.red,
)
if not member.bot:
try:
await member.send(embed=embed)
except discord.Forbidden: # if user only accepts DMs from friends, nothing to do
pass
self.increment_db_count(member=member, guild_id=ctx.guild.id, mod_type="kick")
text = f"{self.bot.custom_emojis.success} **Successfully kicked user {member.name}#{member.discriminator}**"
embed = discord.Embed(description=text, colour=self.bot.colors.green)
await ctx.send(embed=embed)
def is_int_convertable(self, item):
try:
int(item)
return True
except ValueError:
return False
@commands.command(
brief="Unban a user",
help="Unban a given user.",
usage="[number] | [user]#[discriminator]",
)
@commands.has_permissions(ban_members=True)
async def unban(self, ctx, *, member):
if self.is_int_convertable(member):
member = int(member)
bans = await ctx.guild.bans()
_unban = bans[member - 1].user
await ctx.guild.unban(_unban)
text = f"{self.bot.custom_emojis.success} **Successfully unbanned user {_unban}**"
embed = discord.Embed(title=text, colour=self.bot.colors.green)
await ctx.send(embed=embed)
else:
banned_users = await ctx.guild.bans()
member_name, member_discriminator = member.split("#")
for ban_entry in banned_users:
user = ban_entry.user
if (user.name, user.discriminator) == (
member_name,
member_discriminator,
):
await ctx.guild.unban(user)
text = f"{self.bot.custom_emojis.success} **Successfully unbanned user {user}**"
embed = discord.Embed(title=text, colour=self.bot.colors.green)
await ctx.send(embed=embed)
@commands.command()
@commands.has_permissions(moderate_members=True)
@commands.check(invoker_is_lower_rank)
async def ismuted(self, ctx, member: discord.Member):
if (
member.communication_disabled_until
and member.communication_disabled_until < discord.utils.utcnow()
):
await member.edit(communication_disabled_until=None)
embed = discord.Embed(title=f"Mute status for {member}:")
embed.add_field(
name="Status:",
value="Muted" if member.communication_disabled_until else "Not Muted",
)
if member.communication_disabled_until:
embed.add_field(
name="Expires at:",
value=f"<t:{int(member.communication_disabled_until.timestamp())}:F>",
)
await ctx.send(embed=embed)
@commands.command()
@commands.has_permissions(moderate_members=True)
@commands.check(invoker_is_lower_rank)
async def mute(self, ctx, member: discord.Member, _time: str, *, reason=None):
if (
member.communication_disabled_until
and member.communication_disabled_until < discord.utils.utcnow()
):
await member.edit(communication_disabled_until=None)
if member.communication_disabled_until: # if user is already timed out, return
await ctx.send(
embed=discord.Embed(
title=f"{self.bot.custom_emojis.error} **{member}** is already muted! Expires <t:{int(member.communication_disabled_until.timestamp())}:R>",
colour=self.bot.colors.red,
)
)
return
_timestamp = int(time.time())
_mute_length = util.convert_time(_time)[0]
_unmute_time = _mute_length + _timestamp
iso_timestamp = datetime.datetime.fromtimestamp(
_unmute_time, tz=datetime.timezone.utc
)
self.increment_db_count(member, guild_id=ctx.guild.id, mod_type="mute")
await member.timeout(until=iso_timestamp, reason=reason)
# attempt to send DM to muted user
try:
await member.send(
embed=discord.Embed(
title=f"{self.bot.custom_emojis.warning} You have been muted in {ctx.guild.name} for {_time}.",
description=f"Reason: {reason}",
colour=self.bot.colors.orange,
)
)
except discord.Forbidden: # if user only accepts DMs from friends, nothing to do
pass
await ctx.send(
embed=discord.Embed(
title=f"{self.bot.custom_emojis.success} **{member}** has successfully been muted.",
colour=self.bot.colors.green,
)
)
return
@commands.command()
@commands.has_permissions(moderate_members=True)
@commands.check(invoker_is_lower_rank)
async def unmute(self, ctx, member: discord.Member, *, reason=None):
if not member.communication_disabled_until:
embed = discord.Embed(
title=f"{self.bot.custom_emojis.error} **{member}** is not muted!",
colour=self.bot.colors.red,
)
await ctx.send(embed=embed)
return
await member.remove_timeout(reason=reason)
try:
await member.send(
embed=discord.Embed(
title=f"You have been unmuted in {ctx.guild.name}",
description=f"Reason: {reason}",
colour=self.bot.colors.green,
)
)
except discord.Forbidden: # if user only accepts DMs from friends, nothing to do
pass
await ctx.send(
embed=discord.Embed(
title=f"{self.bot.custom_emojis.success} **{member}** has successfully been unmuted.",
colour=self.bot.colors.green,
)
)
return
@commands.command()
@commands.has_permissions(manage_roles=True)
async def lock(self, ctx, *, channels: Optional[str]):
l = await ctx.guild.fetch_roles()
if channels:
channels = re.sub('[#<>]', '', channels).split()
else:
channels = [ctx.channel]
for c in channels:
channel = ctx.guild.get_channel(int(c))
await channel.set_permissions(l[0], send_messages=False)
await ctx.send(f"Successfully locked down {channel.mention}.")
return
@commands.command()
@commands.has_permissions(manage_roles=True)
async def unlock(self, ctx, *, channels: Optional[str]):
l = await ctx.guild.fetch_roles()
if channels:
channels = re.sub('[#<>]', '', channels).split()
else:
channels = [ctx.channel]
for c in channels:
channel = ctx.guild.get_channel(int(c))
await channel.set_permissions(l[0], send_messages=True)
await ctx.send(f"Successfully unlocked {channel.mention}.")
return
@commands.command()
@commands.has_permissions(administrator=True)
async def blockword(self, ctx, word: str):
self.bot.blocked_words.append(word.lower())
self.bot.sync_blocked_words()
await ctx.send(
embed=discord.Embed(
title=f"{self.bot.custom_emojis.success} Added word '{word.lower()}' to blocked words list.",
color=self.bot.colors.green,
)
)
@commands.command()
@commands.has_permissions(administrator=True)
async def unblockword(self, ctx, word: str):
if word.lower() not in self.bot.blocked_words:
await ctx.send(
embed=discord.Embed(
title=f"{self.bot.custom_emojis.error} '{word.lower()}' is not currently a blocked word.",
color=self.bot.colors.red,
)
)
return
self.bot.blocked_words.remove(word.lower())
self.bot.sync_blocked_words()
await ctx.send(
embed=discord.Embed(
title=f"{self.bot.custom_emojis.success} Removed word '{word.lower()}' from blocked words list.",
color=self.bot.colors.green,
)
)
@commands.command()
@commands.has_permissions(administrator=True)
async def blockedwords(self, ctx):
if not self.bot.blocked_words:
await ctx.send("There are currently no blocked words.")
return
await ctx.send(
"```\n\
{0}```".format(
"\n".join(self.bot.blocked_words)
)
)
@commands.Cog.listener()
async def on_message(self, message):
# check message for blocked words
ctx: commands.Context = await self.bot.get_context(message)
words = list(set(message.content.split()))
if "!blockword" in words or ctx.author.bot:
return
for word in words:
word: str = word.lower()
for char in word:
if char not in string.ascii_letters:
word = word.replace(char, "")
if word in self.bot.blocked_words:
await message.delete()
await self.warn(
ctx,
message.author,
reason=f"`{word}` is a forbidden word. Watch your language!",
bot_invoked=True,
)
| 38.183761
| 160
| 0.570845
|
0255789e54991da37b083604d997cec027dce107
| 13,032
|
py
|
Python
|
src/uvm/base/uvm_event.py
|
rodrigomelo9/uvm-python
|
e3127eba2cc1519a61dc6f736d862a8dcd6fce20
|
[
"Apache-2.0"
] | 140
|
2020-01-18T00:14:17.000Z
|
2022-03-29T10:57:24.000Z
|
src/uvm/base/uvm_event.py
|
Mohsannaeem/uvm-python
|
1b8768a1358d133465ede9cadddae651664b1d53
|
[
"Apache-2.0"
] | 24
|
2020-01-18T18:40:58.000Z
|
2021-03-25T17:39:07.000Z
|
src/uvm/base/uvm_event.py
|
Mohsannaeem/uvm-python
|
1b8768a1358d133465ede9cadddae651664b1d53
|
[
"Apache-2.0"
] | 34
|
2020-01-18T12:22:59.000Z
|
2022-02-11T07:03:11.000Z
|
#//
#//------------------------------------------------------------------------------
#// Copyright 2007-2010 Mentor Graphics Corporation
#// Copyright 2007-2011 Cadence Design Systems, Inc.
#// Copyright 2010 Synopsys, Inc.
#// Copyright 2014 NVIDIA Corportation
#// Copyright 2019 Tuomas Poikela (tpoikela)
#// All Rights Reserved Worldwide
#//
#// Licensed under the Apache License, Version 2.0 (the
#// "License"); you may not use this file except in
#// compliance with the License. You may obtain a copy of
#// the License at
#//
#// http://www.apache.org/licenses/LICENSE-2.0
#//
#// Unless required by applicable law or agreed to in
#// writing, software distributed under the License is
#// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
#// CONDITIONS OF ANY KIND, either express or implied. See
#// the License for the specific language governing
#// permissions and limitations under the License.
#//------------------------------------------------------------------------------
from cocotb.triggers import Event
from .sv import sv, wait
from .uvm_object import UVMObject
from ..macros.uvm_message_defines import uvm_warning
from .uvm_globals import uvm_zero_delay
from .uvm_queue import UVMQueue
class UVMEventBase(UVMObject):
"""
The UVMEventBase class is an abstract wrapper class around the
`cocotb.triggers.Event` class. It provides some additional services
such as setting self.callbacks and maintaining the number of waiters.
"""
type_name = "uvm_event_base"
def __init__(self, name=""):
"""
Creates a new event object.
Args:
name (str): Name of this event.
"""
super().__init__(name)
self.m_event = Event()
self.num_waiters = 0
self.on = False
self.on_event = Event("on_event_" + name)
self.trigger_time = 0
self.callbacks = UVMQueue() # list[uvm_event_callback]
self.m_waiters = 0
self.m_value_changed_event = Event("value_changed_event_" + name)
def set_value(self, key, value):
"""
Sets a value inside this event object and triggers value_changed_event.
Args:
key (str): Member variable name
value (any): Value to set for that member variable.
"""
setattr(self, key, value)
self.m_value_changed_event.set()
def set(self):
if self.m_waiters > 0:
self.on_event.set()
async def wait(self):
if self.m_waiters == 0:
self.on_event.clear()
self.m_waiters += 1
await self.on_event.wait()
self.m_waiters -= 1
if self.m_waiters == 0:
self.on_event.clear()
async def wait_on(self, delta=False):
"""
Waits for the event to be activated for the first time.
If the event has already been triggered, this task returns immediately.
If `delta` is set, the caller will be forced to wait a single delta #0
before returning. This prevents the caller from returning before
previously waiting processes have had a chance to resume.
Once an event has been triggered, it will be remain "on" until the event
is `UVMEvent.reset`.
Args:
delta (bool): If True, wait for one zero delay
"""
if self.on is True:
if delta is True:
#0
await uvm_zero_delay()
await uvm_zero_delay() # TODO tpoikela: redundant, should be removed?
return
self.num_waiters += 1
await wait(lambda: self.on is True, self.m_value_changed_event)
async def wait_off(self, delta=False):
"""
If the event has already triggered and is "on", this task waits for the
event to be turned "off" via a call to `reset`.
If the event has not already been triggered, this task returns immediately.
If `delta` is set, the caller will be forced to wait a single delta #0
before returning. This prevents the caller from returning before
previously waiting processes have had a chance to resume.
Args:
delta (bool): If True, wait for one zero delay
"""
if self.on is False:
if delta is True:
await uvm_zero_delay()
return
self.num_waiters += 1
await wait(lambda: self.on is False, self.m_value_changed_event)
async def wait_trigger(self):
"""
Waits for the event to be triggered.
If one process calls wait_trigger in the same delta as another process
calls `UVMEvent.trigger`, a race condition occurs. If the call to wait occurs
before the trigger, this method will return in this delta. If the wait
occurs after the trigger, this method will not return until the next
trigger, which may never occur and thus cause deadlock.
"""
self.num_waiters += 1
await self.m_event.wait()
self.m_event.clear()
async def wait_ptrigger(self):
"""
Waits for a persistent trigger of the event. Unlike `UVMEvent.wait_trigger`, this
views the trigger as persistent within a given time-slice and thus avoids
certain race conditions. If this method is called after the trigger but
within the same time-slice, the caller returns immediately.
"""
if self.m_event.fired:
return
self.num_waiters += 1
await self.m_event.wait()
self.m_event.clear()
def get_trigger_time(self):
"""
Gets the time that this event was last triggered. If the event has not bee
triggered, or the event has been reset, then the trigger time will be 0.
Returns:
int: Time when this event was triggered.
"""
return self.trigger_time
def is_on(self):
"""
Indicates whether the event has been triggered since it was last reset.
A return of 1 indicates that the event has triggered.
Returns:
"""
return self.on
def is_off(self):
"""
Indicates whether the event has been triggered or been reset.
A return of 1 indicates that the event has not been triggered.
Returns:
"""
return not self.on
def reset(self, wakeup=False):
"""
Resets the event to its off state. If `wakeup` is set, then all processes
currently waiting for the event are activated before the reset.
No self.callbacks are called during a reset.
Args:
wakeup (bool):
"""
if wakeup:
self.m_event.set()
self.m_event = Event()
self.num_waiters = 0
self.set_value("on", False)
self.trigger_time = 0
def cancel(self):
"""
Decrements the number of waiters on the event.
This is used if a process that is waiting on an event is disabled or
activated by some other means.
"""
if self.num_waiters > 0:
self.num_waiters -= 1
def get_num_waiters(self):
"""
Returns the number of processes waiting on the event.
"""
return self.num_waiters
def get_type_name(self) -> str:
return UVMEventBase.type_name
# virtual function void do_print (uvm_printer printer)
# printer.print_field_int("self.num_waiters",
# self.num_waiters, $bits(self.num_waiters), UVM_DEC, ".", "int")
# printer.print_field_int("on", self.on, $bits(self.on), UVM_BIN, ".", "bit")
# printer.print_time("self.trigger_time", self.trigger_time)
# printer.m_scope.down("self.callbacks")
# foreach(self.callbacks[e]) begin
# printer.print_object($sformatf("[%0d]",e), self.callbacks[e], "[")
# end
# printer.m_scope.up()
# endfunction
# virtual function void do_copy (uvm_object rhs)
# uvm_event_base e
# super.do_copy(rhs)
# if(!$cast(e, rhs) || (e is None)) return
#
# self.m_event = e.self.m_event
# self.num_waiters = e.self.num_waiters
# self.set_value("on", e.on)
# self.trigger_time = e.self.trigger_time
# self.callbacks.delete()
# self.callbacks = e.self.callbacks
#
# endfunction
#endclass
class UVMEvent(UVMEventBase): # (type T=uvm_object) extends uvm_event_base
"""
CLASS: UVMEvent
The UVMEvent class is an extension of the abstract `UVMEventBase` class.
The optional parameter ~T~ allows the user to define a data type which
can be passed during an event trigger.
"""
type_name = "uvm_event"
def __init__(self, name="", T=None):
"""
Creates a new event object.
Args:
name (str): Name of the event.
T (any): Optional data type.
"""
super().__init__(name)
self.trigger_data = None
self.T = T
async def wait_trigger_data(self): # output T data)
"""
Task: wait_trigger_data
This method calls <uvm_event_base::wait_trigger> followed by `get_trigger_data`.
Returns:
any: Trigger data.
"""
await self.wait_trigger()
return self.get_trigger_data()
async def wait_ptrigger_data(self, data):
"""
This method calls <uvm_event_base::wait_ptrigger> followed by `get_trigger_data`.
Args:
data:
Returns:
any: Data used to trigger this event.
"""
await self.wait_ptrigger()
trig_data = self.get_trigger_data()
data.append(trig_data)
return trig_data
# //------------//
# // triggering //
# //------------//
def trigger(self, data=None):
"""
Triggers the event, resuming all waiting processes.
An optional `data` argument can be supplied with the enable to provide
trigger-specific information.
Args:
data (any): Data associated with the trigger.
"""
skip = False
if self.callbacks.size() > 0:
for i in range(0, self.callbacks.size()):
#uvm_event_callback#(T) tmp=self.callbacks[i]
tmp = self.callbacks.get(i)
skip = skip + tmp.pre_trigger(self, data)
if skip is False:
self.m_event.set(data)
if self.callbacks.size() > 0:
for i in range(0, self.callbacks.size()):
#uvm_event_callback#(T) tmp=self.callbacks[i]
tmp = self.callbacks[i]
tmp.post_trigger(self,data) # type: ignore
self.num_waiters = 0
self.set_value("on", True)
self.trigger_time = sv.realtime()
self.trigger_data = data
def get_trigger_data(self):
"""
Gets the data, if any, provided by the last call to `trigger`.
Returns:
any: Data used to trigger this event.
"""
return self.trigger_data
def get_type_name(self):
return UVMEvent.type_name
def add_callback(self, cb, append=True):
"""
Registers a callback object, `cb`, with this event. The callback object
may include pre_trigger and post_trigger functionality. If `append` is set
to 1, the default, `cb` is added to the back of the callback list. Otherwise,
`cb` is placed at the front of the callback list.
Args:
cb:
append (bool): If True, append to the end. Otherwise insert to front.
"""
if cb in self.callbacks:
uvm_warning("CBRGED","add_callback: Callback already registered. Ignoring.")
return
if append is True:
self.callbacks.push_back(cb)
else:
self.callbacks.insert(0, cb)
def delete_callback(self, cb):
"""
Unregisters the given callback, ~cb~, from this event.
Args:
cb (UVMCallback): Callback to delete from this event.
"""
for i in range(len(self.callbacks)):
if cb == self.callbacks[i]:
self.callbacks.delete(i)
return
uvm_warning("CBNTFD", "delete_callback: Callback not found. Ignoring delete request.")
def do_print(self, printer):
super().do_print(printer)
printer.print_object("trigger_data", self.trigger_data)
# virtual function void do_copy (uvm_object rhs)
# uvm_event#(T) e
# super.do_copy(rhs)
# if(!$cast(e, rhs) || (e is None)) return
# self.trigger_data = e.trigger_data
# endfunction // do_copy
# virtual function uvm_object create(string name="")
# uvm_event#(T) v
# v=new(name)
# return v
# endfunction
| 31.631068
| 94
| 0.589088
|
2e731c3cca38c13756d745fb7d2692ea2b059061
| 2,738
|
py
|
Python
|
day_16/solution.py
|
jamsidedown/adventofcode2020
|
2ff6ab6fd10470523e869fe6346cdfebc291d5a4
|
[
"MIT"
] | null | null | null |
day_16/solution.py
|
jamsidedown/adventofcode2020
|
2ff6ab6fd10470523e869fe6346cdfebc291d5a4
|
[
"MIT"
] | null | null | null |
day_16/solution.py
|
jamsidedown/adventofcode2020
|
2ff6ab6fd10470523e869fe6346cdfebc291d5a4
|
[
"MIT"
] | null | null | null |
import re
from typing import Dict, List, Tuple
range_pattern = re.compile(r'^([\w\s]+): (\d+)-(\d+) or (\d+)-(\d+)$')
class Range:
def __init__(self, low: int, high: int):
self.low = low
self.high = high
def valid(self, input: int) -> bool:
return self.low <= input <= self.high
class Field:
def __init__(self, input: str):
match = range_pattern.match(input)
name, l1, h1, l2, h2 = match.groups()
first = Range(int(l1), int(h1))
second = Range(int(l2), int(h2))
self.name = name
self.ranges = [first, second]
def valid(self, input: int) -> bool:
return any(r.valid(input) for r in self.ranges)
def part_1(filename: str) -> int:
fields, _, tickets = parse(filename)
total_invalid = 0
for ticket in tickets:
for field in ticket:
if not any(f.valid(field) for f in fields):
total_invalid += field
return total_invalid
def part_2(filename: str) -> int:
fields, my_ticket, tickets = parse(filename)
valid_tickets = [t for t in tickets if all(any(f.valid(field) for f in fields) for field in t)]
potential_columns: Dict[str, List[int]] = {}
for i in range(len(my_ticket)):
for f in fields:
columns = potential_columns.setdefault(f.name, [])
if all(f.valid(t[i]) for t in valid_tickets):
columns.append(i)
definite_columns = {}
while potential_columns:
for field in potential_columns:
columns = potential_columns[field]
if len(columns) == 1:
col = columns[0]
definite_columns[field] = col
potential_columns.pop(field)
for cols in potential_columns.values():
if col in cols:
cols.remove(col)
break
product = 1
for col in definite_columns:
if 'departure' in col:
col_index = definite_columns[col]
product *= my_ticket[col_index]
return product
def parse(filename: str) -> Tuple[List[Field], List[int], List[List[int]]]:
with open(filename, 'r') as f:
sections = f.read().split('\n\n')
fields = [Field(line) for line in sections[0].split('\n')]
my_ticket = [int(x) for x in sections[1].strip().split('\n')[1].split(',')]
tickets = [[int(x) for x in line.split(',')] for line in sections[2].strip().split('\n')[1:]]
return fields, my_ticket, tickets
if __name__ == '__main__':
p1 = part_1('day_16/input.txt')
print(f'Sum of invalid fields on tickets: {p1}')
p2 = part_2('day_16/input.txt')
print(f'Product of fields starting with departure: {p2}')
| 31.113636
| 101
| 0.577794
|
d862f7c3fa19360c1ad5b4e7abd11181d28cd716
| 4,993
|
py
|
Python
|
cvpods/solver/optimizer_builder.py
|
phalanx-hk/cvpods
|
bd4e5cf944e5514af1716a92f54509d9e359208f
|
[
"Apache-2.0"
] | 1
|
2020-12-03T15:19:01.000Z
|
2020-12-03T15:19:01.000Z
|
cvpods/solver/optimizer_builder.py
|
phalanx-hk/cvpods
|
bd4e5cf944e5514af1716a92f54509d9e359208f
|
[
"Apache-2.0"
] | null | null | null |
cvpods/solver/optimizer_builder.py
|
phalanx-hk/cvpods
|
bd4e5cf944e5514af1716a92f54509d9e359208f
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) BaseDetection, Inc. and its affiliates. All Rights Reserved
from typing import Any, Dict, List, Set
import torch
from torch import optim
from cvpods.utils.registry import Registry
OPTIMIZER_BUILDER = Registry("Optimizer builder")
NORM_MODULE_TYPES = (
torch.nn.BatchNorm1d,
torch.nn.BatchNorm2d,
torch.nn.BatchNorm3d,
torch.nn.SyncBatchNorm,
# NaiveSyncBatchNorm inherits from BatchNorm2d
torch.nn.GroupNorm,
torch.nn.InstanceNorm1d,
torch.nn.InstanceNorm2d,
torch.nn.InstanceNorm3d,
torch.nn.LayerNorm,
torch.nn.LocalResponseNorm,
)
@OPTIMIZER_BUILDER.register()
class OptimizerBuilder:
@staticmethod
def build(model, cfg):
raise NotImplementedError
@OPTIMIZER_BUILDER.register()
class SGDBuilder(OptimizerBuilder):
@staticmethod
def build(model, cfg):
params: List[Dict[str, Any]] = []
memo: Set[torch.nn.parameter.Parameter] = set()
for module in model.modules():
for key, value in module.named_parameters(recurse=False):
if not value.requires_grad:
continue
# Avoid duplicating parameters
if value in memo:
continue
memo.add(value)
lr = cfg.SOLVER.OPTIMIZER.BASE_LR
weight_decay = cfg.SOLVER.OPTIMIZER.WEIGHT_DECAY
if isinstance(module, NORM_MODULE_TYPES):
weight_decay = cfg.SOLVER.OPTIMIZER.WEIGHT_DECAY_NORM
elif key == "bias":
# NOTE: unlike Detectron v1, we now default BIAS_LR_FACTOR to 1.0
# and WEIGHT_DECAY_BIAS to WEIGHT_DECAY so that bias optimizer
# hyperparameters are by default exactly the same as for regular
# weights.
lr = cfg.SOLVER.OPTIMIZER.BASE_LR * cfg.SOLVER.OPTIMIZER.BIAS_LR_FACTOR
weight_decay = cfg.SOLVER.OPTIMIZER.WEIGHT_DECAY
params += [{"params": [value], "lr": lr, "weight_decay": weight_decay}]
optimizer = optim.SGD(
params,
cfg.SOLVER.OPTIMIZER.BASE_LR,
momentum=cfg.SOLVER.OPTIMIZER.MOMENTUM
)
return optimizer
@OPTIMIZER_BUILDER.register()
class AdamBuilder(OptimizerBuilder):
@staticmethod
def build(model, cfg):
lr = cfg.SOLVER.OPTIMIZER.BASE_LR
optimizer = optim.Adam(
model.parameters(),
lr=lr,
weight_decay=cfg.SOLVER.OPTIMIZER.WEIGHT_DECAY,
amsgrad=cfg.SOLVER.OPTIMIZER.AMSGRAD
)
return optimizer
@OPTIMIZER_BUILDER.register()
class AdamWBuilder(OptimizerBuilder):
@staticmethod
def build(model, cfg):
lr = cfg.SOLVER.OPTIMIZER.BASE_LR
optimizer = optim.AdamW(
model.parameters(),
lr=lr,
betas=cfg.SOLVER.OPTIMIZER.BETAS,
weight_decay=cfg.SOLVER.OPTIMIZER.WEIGHT_DECAY,
amsgrad=cfg.SOLVER.OPTIMIZER.AMSGRAD
)
return optimizer
@OPTIMIZER_BUILDER.register()
class SGDGateLRBuilder(OptimizerBuilder):
"""
SGD Gate LR optimizer builder, used for DynamicRouting in cvpods.
This optimizer will ultiply lr for gating function.
"""
@staticmethod
def build(model, cfg):
gate_lr_multi = cfg.SOLVER.OPTIMIZER.GATE_LR_MULTI
params: List[Dict[str, Any]] = []
memo: Set[torch.nn.parameter.Parameter] = set()
for name, module in model.named_modules():
for key, value in module.named_parameters(recurse=False):
if not value.requires_grad:
continue
# Avoid duplicating parameters
if value in memo:
continue
memo.add(value)
lr = cfg.SOLVER.OPTIMIZER.BASE_LR
weight_decay = cfg.SOLVER.OPTIMIZER.WEIGHT_DECAY
if isinstance(module, NORM_MODULE_TYPES):
weight_decay = cfg.SOLVER.OPTIMIZER.WEIGHT_DECAY_NORM
elif key == "bias":
# NOTE: unlike Detectron v1, we now default BIAS_LR_FACTOR to 1.0
# and WEIGHT_DECAY_BIAS to WEIGHT_DECAY so that bias optimizer
# hyperparameters are by default exactly the same as for regular
# weights.
lr = cfg.SOLVER.OPTIMIZER.BASE_LR * cfg.SOLVER.OPTIMIZER.BIAS_LR_FACTOR
weight_decay = cfg.SOLVER.OPTIMIZER.WEIGHT_DECAY
if gate_lr_multi > 0.0 and "gate_conv" in name:
lr *= gate_lr_multi
params += [{"params": [value], "lr": lr, "weight_decay": weight_decay}]
optimizer = torch.optim.SGD(
params,
cfg.SOLVER.OPTIMIZER.BASE_LR,
momentum=cfg.SOLVER.OPTIMIZER.MOMENTUM
)
return optimizer
| 34.434483
| 91
| 0.606249
|
e7ac57f5e122d40d5c3025526eaf5a8b988b71e6
| 163
|
py
|
Python
|
modules/user.py
|
SilentFan/EvenThing
|
b32054a3c9b53d8dfbdc6a808ff9d00d88d1c876
|
[
"MIT"
] | null | null | null |
modules/user.py
|
SilentFan/EvenThing
|
b32054a3c9b53d8dfbdc6a808ff9d00d88d1c876
|
[
"MIT"
] | 1
|
2015-10-23T14:43:09.000Z
|
2015-10-23T14:43:09.000Z
|
modules/user.py
|
SilentFan/EvenThing
|
b32054a3c9b53d8dfbdc6a808ff9d00d88d1c876
|
[
"MIT"
] | null | null | null |
__author__ = 'meli'
class User:
name = None
surname = None
def __init__(self, name, surname):
self.name = name
self.surname = surname
| 18.111111
| 38
| 0.601227
|
09883cdf506e5c1e61bc457b808e5b8418a3b13b
| 7,514
|
py
|
Python
|
python/eggroll/core/pair_store/format.py
|
ZYKJShadow/eggroll
|
02b845c46cfb7a67459c6d842fa9ef3a02ae60ba
|
[
"Apache-2.0"
] | 1
|
2020-10-23T03:18:54.000Z
|
2020-10-23T03:18:54.000Z
|
python/eggroll/core/pair_store/format.py
|
ZYKJShadow/eggroll
|
02b845c46cfb7a67459c6d842fa9ef3a02ae60ba
|
[
"Apache-2.0"
] | null | null | null |
python/eggroll/core/pair_store/format.py
|
ZYKJShadow/eggroll
|
02b845c46cfb7a67459c6d842fa9ef3a02ae60ba
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2019 - now, Eggroll Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from struct import pack_into, unpack_from, unpack, pack
MAGIC_NUM = bytes.fromhex('46709394')
PROTOCOL_VERSION = bytes.fromhex('00000001')
# def create_byte_buffer(data, options=None):
# if options and "buffer_type" in options :
# if options["buffer_type"] == "array":
# return ArrayByteBuffer(data)
# elif options["buffer_type"] != "file":
# return FileByteBuffer(data)
# else:
# raise ValueError("not supported:", options)
# return ArrayByteBuffer(data)
class ByteBuffer:
def remaining_size(self):
return self.size() - self.get_offset()
def _check_remaining(self, offset, size):
raise NotImplementedError()
def size(self):
raise NotImplementedError()
def get_offset(self):
raise NotImplementedError()
def set_offset(self, offset):
raise NotImplementedError()
def read_int32(self, offset=None):
raise NotImplementedError()
def read_bytes(self, size, offset=None):
raise NotImplementedError()
def write_int32(self, value, offset=None):
raise NotImplementedError()
def write_bytes(self, value, offset=None):
raise NotImplementedError()
class FileByteBuffer:
def __init__(self, file):
if not "b" in file.mode:
raise ValueError("file is not binary mode:" + file.name)
self.file = file
# TODO:1: cached?
self.__size = self.size()
def remaining_size(self):
return self.size() - self.get_offset()
def _check_remaining(self, offset, size):
if offset is None:
offset = self.get_offset()
if self.size() - offset - size < 0:
raise IndexError(f'buffer overflow. remaining: {self.__size - offset}, required: {size}')
def size(self):
return os.fstat(self.file.fileno()).st_size
def __seek_offset(self, offset):
if offset is not None:
self.file.seek(offset)
def get_offset(self):
return self.file.tell()
def set_offset(self, offset):
self.file.seek(offset)
def read_int32(self, offset=None):
self._check_remaining(offset, 4)
self.__seek_offset(offset)
return unpack(">i", self.file.read(4))[0]
def read_bytes(self, size, offset=None):
self._check_remaining(offset, size)
self.__seek_offset(offset)
return self.file.read(size)
def write_int32(self, value, offset=None):
self.__seek_offset(offset)
self.file.write(pack(">i", value))
def write_bytes(self, value, offset=None):
self.__seek_offset(offset)
self.file.write(value)
class ArrayByteBuffer(ByteBuffer):
def __init__(self, data):
self.__buffer = data
self.__offset = 0
self.__size = len(data)
def get_offset(self):
return self.__offset
def set_offset(self, offset):
self.__offset = offset
def size(self):
return self.__size
def __get_op_offset(self, offset):
if offset is None:
return self.__offset
else:
return offset
def __adjust_offset(self, offset, delta):
self.__offset = offset + delta
def read_int32(self, offset=None):
op_offset = self.__get_op_offset(offset)
value_size = 4
self._check_remaining(op_offset, value_size)
result = unpack_from('>i', self.__buffer, op_offset)
self.__adjust_offset(op_offset, value_size)
return result[0]
def _check_remaining(self, offset, size):
if self.__size - offset - size < 0:
raise IndexError(f'buffer overflow. remaining: {self.size() - offset}, required: {size}')
def read_bytes(self, size, offset=None):
op_offset = self.__get_op_offset(offset)
self._check_remaining(op_offset, size)
ret = self.__buffer[op_offset: op_offset + size]
self.__adjust_offset(op_offset, size)
return ret
def write_int32(self, value, offset=None):
size = 4
op_offset = self.__get_op_offset(offset)
self._check_remaining(op_offset, size)
pack_into('>i', self.__buffer, op_offset, value)
self.__adjust_offset(op_offset, size)
def write_bytes(self, value, offset=None):
op_offset = self.__get_op_offset(offset)
size = len(value)
self._check_remaining(op_offset, size)
self.__buffer[op_offset: op_offset + size] = value
self.__adjust_offset(op_offset, size)
class PairBinReader(object):
def __init__(self, pair_buffer):
self.__buf = pair_buffer
_magic_num = self.__buf.read_bytes(len(MAGIC_NUM))
if _magic_num != MAGIC_NUM:
raise ValueError('magic num does not match')
_protocol_version = self.__buf.read_bytes(len(PROTOCOL_VERSION))
if _protocol_version != PROTOCOL_VERSION:
raise ValueError('protocol version not suppoted')
# move offset, do not delete
header_size = self.__buf.read_int32()
body_size = self.__buf.read_int32()
if body_size > 0 and self.__buf.size() - self.__buf.get_offset() != body_size:
raise ValueError('body size does not match len of body')
def read_all(self):
while self.__buf.remaining_size() > 0:
old_offset = self.__buf.get_offset()
try:
key_size = self.__buf.read_int32()
# empty means end, though there is remaining data
if key_size == 0:
self.__buf.set_offset(old_offset)
return
key = self.__buf.read_bytes(size=key_size)
value_size = self.__buf.read_int32()
value = self.__buf.read_bytes(size=value_size)
except IndexError as e:
# read end
self.__buf.set_offset(old_offset)
return
yield key, value
class PairBinWriter(object):
@staticmethod
def write_pair(buf, key_bytes, value_bytes):
old_offset = buf.get_offset()
try:
buf.write_int32(len(key_bytes))
buf.write_bytes(key_bytes)
buf.write_int32(len(value_bytes))
buf.write_bytes(value_bytes)
except IndexError as e:
buf.set_offset(old_offset)
raise e
@staticmethod
def write_head(buf):
buf.write_bytes(MAGIC_NUM)
buf.write_bytes(PROTOCOL_VERSION)
buf.write_int32(0)
buf.write_int32(0)
def __init__(self, pair_buffer):
self.__buf = pair_buffer
PairBinWriter.write_head(self.__buf)
def write(self, key_bytes, value_bytes):
PairBinWriter.write_pair(self.__buf, key_bytes, value_bytes)
def write_all(self, items):
for k, v in items:
self.write(k, v)
| 31.974468
| 101
| 0.633883
|
484bc1d1f0a38b593f7343ab9dd08a656fcd50a9
| 11,932
|
py
|
Python
|
coinbase/mock.py
|
EU-institution/coinbase_python
|
1e0d5d162cb40c1094775ceb5c267a5bdedf0949
|
[
"Unlicense",
"MIT"
] | 53
|
2015-01-05T08:42:17.000Z
|
2022-03-01T20:52:41.000Z
|
coinbase/mock.py
|
EU-institution/coinbase_python
|
1e0d5d162cb40c1094775ceb5c267a5bdedf0949
|
[
"Unlicense",
"MIT"
] | 10
|
2015-01-08T04:09:25.000Z
|
2021-10-08T21:43:17.000Z
|
coinbase/mock.py
|
mhluongo/coinbase_python
|
2e29d4fa1c501495b41005bbcc770cb29fba6ad1
|
[
"MIT",
"Unlicense"
] | 34
|
2016-09-18T23:18:44.000Z
|
2022-02-19T17:31:05.000Z
|
from collections import namedtuple
import datetime
from decimal import Decimal
import random
import string
from coinbase.models import *
class CoinbaseAccountMock(object):
"""
This class has the same attributes as CoinbaseAccount and mimics its
behavior of Coinbase without actually using Coinbase. Use it to test
an application without requiring real money.
"""
def __init__(self):
self._buy_price = Decimal('510')
self._sell_price = Decimal('490')
self._me = CoinbaseContact(id='2346178248353',
name='Me', email='me@example.com')
self._transactions = {} # transaction id -> CoinbaseTransaction
self._transaction_ids = [] # transaction ids in creation order
self._transfers = {} # transaction id -> CoinbaseTransfer
self._transfer_ids = [] # transaction ids in creation order
self._buttons = {} # button id -> CoinbasePaymentButton
self._orders = {} # order id -> CoinbaseOrder
self._order_ids = [] # order ids in creation order
self._orders_by_address = {} # receive address -> CoinbaseOrder.id
self._orders_by_custom = {} # button custom string -> CoinbaseOrder.id
self.authenticated = True
self.auth = None
self.allow_transfers = True
self.balance = CoinbaseAmount('0', 'BTC')
self.receive_address = random_bitcoin_address()
self.exchange_rates = {
'usd_to_btc': Decimal('0.002'),
'btc_to_usd': Decimal('500'),
}
self.mock = MockControl(account=self)
def get_exchange_rate(self, from_currency, to_currency):
return self.exchange_rates['{}_to_{}'.format(
from_currency.lower(),
to_currency.lower()
)]
def contacts(self, page=None, limit=None, query=None):
raise NotImplementedError # todo
def buy_price(self, qty=1):
return CoinbaseAmount(qty * self._buy_price, 'USD')
def sell_price(self, qty=1):
return CoinbaseAmount(qty * self._sell_price, 'USD')
def buy_btc(self, qty, pricevaries=False):
now = get_now()
transaction = CoinbaseTransaction(
id=random_transaction_id(),
created_at=now,
amount=CoinbaseAmount(qty, 'BTC'),
status=CoinbaseTransaction.Status.pending,
)
transfer = CoinbaseTransfer(
transaction_id=transaction.id,
created_at=now,
)
self.mock.add_transaction(transaction)
self.mock.add_transfer(transfer)
return transfer
def sell_btc(self, qty):
return self.buy_btc(qty=-Decimal(qty))
def request(self, from_email, amount, notes=''):
raise NotImplementedError # todo
def send(self, to_address, amount, notes='', user_fee=None, idem=None):
transaction = CoinbaseTransaction(
id=random_transaction_id(),
created_at=get_now(),
notes=notes,
amount=amount,
status=CoinbaseTransaction.Status.pending,
request=False,
sender=self._me,
recipient=None, # todo
recipient_address=to_address,
recipient_type='coinbase' if '@' in to_address else 'bitcoin',
)
self.mock.add_transaction(transaction)
return transaction
def transactions(self, count=30):
return [self._transactions[i] for i in
list(reversed(self._transaction_ids))[:count]]
def transfers(self, count=30):
raise [self._transfers[i] for i in
list(reversed(self._transfer_ids))[:count]]
def get_transaction(self, transaction_id):
return self._transactions[transaction_id]
def get_user_details(self):
# todo - this could be mocked better
return CoinbaseUser(email='test@example.com')
def generate_receive_address(self, callback_url=None):
# todo - this could be mocked better
return '1DzkRzSUqm8jGhT8wp7E8XNMcr9J3nT3SX'
def create_button(self, button, account_id=None):
id = random_button_id()
button = button._replace(
id=id,
type=button.type or 'buy_now',
style=button.style or 'buy_now_large',
text=button.text or 'Pay With Bitcoin',
custom_secure=bool(button.custom_secure),
)
self._buttons[id] = button
return button
def orders(self, account_id=None, page=None):
# todo - paging
return [self._orders[i] for i in
list(reversed(self._order_ids))]
def get_order(self, id_or_custom_field, account_id=None):
order = self._orders.get(id_or_custom_field)
if order:
return order
order_id = self._orders_by_custom.get(id_or_custom_field)
if order_id:
return self._orders.get(order_id)
def create_button_and_order(self, button):
button_id = self.create_button(button).id
return self.create_order_from_button(button_id)
def create_order_from_button(self, button_id):
button = self.mock.get_button(button_id)
order = CoinbaseOrder(
id=random_order_id(),
created_at=get_now(),
status=CoinbaseOrder.Status.pending,
receive_address=random_bitcoin_address(),
button=CoinbaseOrder.Button.from_coinbase_payment_button(button),
custom=button.custom,
total=self.mock.btc_and_native(button.price),
)
self.mock.add_order(order)
return order
class MockControl(namedtuple('CoinbaseAccount_MockControl', 'account')):
def complete_transaction(self, transaction_id):
transaction = self.modify_transaction(
transaction_id, status=CoinbaseTransaction.Status.complete)
if transaction_id in self.account._transfers:
self.modify_transfer(transaction_id,
status=CoinbaseTransfer.Status.complete)
send = (transaction.sender is not None and
transaction.sender.id == self.account._me.id)
amount_btc = self.convert_amount(transaction.amount, 'BTC').amount
account_amount = self.account.balance.amount
self.account.balance = self.account.balance._replace(
amount=account_amount + amount_btc * (-1 if send else 1))
return transaction
def create_order_from_button(self, button_id, customer=None,
refund_address=None):
"""
customer - CoinbaseOrder.Customer
refund_address - bitcoin addresss
"""
order_id = self.account.create_order_from_button(button_id).id
return self.modify_order(order_id, customer=customer,
refund_address=refund_address)
def accept_payment(self, receive_address, amount_btc):
"""
receive_address - bitcoin address
amount_btc - Decimal
Returns a list of Callback
"""
callbacks = []
now = get_now()
amount_btc = Decimal(amount_btc)
amount_usd = amount_btc * self.account.exchange_rates['btc_to_usd']
amount = CoinbaseAmount.BtcAndNative(
btc=CoinbaseAmount(amount_btc, 'BTC'),
native=CoinbaseAmount(amount_usd, 'USD'),
)
self.account.balance = self.account.balance._replace(
amount=self.account.balance.amount + amount_btc)
transaction = CoinbaseTransaction(
id=random_transaction_id(),
created_at=now,
amount=amount.btc,
status=CoinbaseTransaction.Status.complete,
)
self.account.mock.add_transaction(transaction)
order_id = self.account._orders_by_address.get(receive_address)
if order_id is not None:
order = self.account._orders[order_id]
button = self.account._buttons[order.button.id]
# I'm not actually sure when the transaction field gets updated.
order = self.modify_order(
order_id,
transaction=CoinbaseOrder.Transaction(
id=transaction.id,
hash=None,
confirmations=0,
)
)
if order.status == CoinbaseOrder.Status.pending:
amount_is_correct = amount.btc == order.total.btc
status = (CoinbaseOrder.Status.complete if amount_is_correct
else CoinbaseOrder.Status.mispaid)
order = self.modify_order(order.id, status=status)
if order.status in [CoinbaseOrder.Status.mispaid,
CoinbaseOrder.Status.expired]:
order = self.modify_order(order.id, mispaid=amount)
if button.callback_url is not None:
callbacks.append(Callback(
url=button.callback_url,
body=order.render_callback(),
))
return callbacks
def add_transaction(self, transaction):
self.account._transactions[transaction.id] = transaction
self.account._transaction_ids.append(transaction.id)
def add_transfer(self, transfer):
self.account._transfers[transfer.transaction_id] = transfer
self.account._transfer_ids.append(transfer.transaction_id)
def add_order(self, order):
self.account._orders[order.id] = order
self.account._orders_by_address[order.receive_address] = order.id
if order.custom:
self.account._orders_by_custom[order.custom] = order.id
self.account._order_ids.append(order.id)
def modify_transaction(self, transaction_id, **kwargs):
transaction = self.account._transactions[transaction_id]
transaction = transaction._replace(**kwargs)
self.account._transactions[transaction_id] = transaction
return transaction
def modify_transfer(self, transaction_id, **kwargs):
transfer = self.account._transfers[transaction_id]
transfer = transfer._replace(**kwargs)
self.account._transfers[transaction_id] = transfer
return transfer
def modify_order(self, order_id, **kwargs):
order = self.account._orders[order_id]
order = order._replace(**kwargs)
self.account._orders[order_id] = order
return order
def get_button(self, button_id):
return self.account._buttons[button_id]
def convert_amount(self, amount, currency):
if amount.currency == currency:
return amount
return amount.convert(
currency=currency,
exchange_rate=self.account.get_exchange_rate(
from_currency=amount.currency,
to_currency=currency
)
)
def btc_and_native(self, amount, preferred_native_currency='USD'):
native_currency = (amount.currency if amount.currency != 'BTC'
else preferred_native_currency)
return CoinbaseAmount.BtcAndNative(
btc=self.convert_amount(amount, 'BTC'),
native=self.convert_amount(amount, native_currency),
)
Callback = namedtuple('Callback', 'url body')
def get_now():
return floor_second(datetime.datetime.now())
def floor_second(x):
return x - datetime.timedelta(microseconds=x.microsecond)
def random_string(length, chars):
return ''.join((random.choice(chars) for _ in range(length)))
def random_transaction_id():
return random_string(24, string.hexdigits[:16])
def random_button_id():
return random_string(32, string.hexdigits[:16])
def random_order_id():
return random_string(8, string.digits + string.ascii_uppercase)
def random_bitcoin_address():
return random_string(34, string.ascii_letters + string.digits)
| 34.287356
| 79
| 0.633423
|
6e0562c5f127232d21df9ddaf9d1576d800fc155
| 7,643
|
py
|
Python
|
Tests/test_Affy.py
|
rht/biopython
|
3a44496d7bd79446266a4951b7d1f64569e4a96d
|
[
"BSD-3-Clause"
] | 3
|
2021-08-17T15:28:41.000Z
|
2022-02-12T06:43:22.000Z
|
Tests/test_Affy.py
|
rht/biopython
|
3a44496d7bd79446266a4951b7d1f64569e4a96d
|
[
"BSD-3-Clause"
] | 32
|
2016-11-21T07:38:21.000Z
|
2017-08-16T13:00:03.000Z
|
Tests/test_Affy.py
|
rht/biopython
|
3a44496d7bd79446266a4951b7d1f64569e4a96d
|
[
"BSD-3-Clause"
] | 8
|
2016-11-24T18:57:35.000Z
|
2022-01-16T08:15:25.000Z
|
import unittest
import struct
import os
import sys
try:
from numpy import array
import numpy.testing
except ImportError:
from Bio import MissingPythonDependencyError
raise MissingPythonDependencyError(
"Install NumPy if you want to use Bio.Affy.CelFile")
from Bio.Affy import CelFile
def testRecordV4(record):
assert(record.intensities.shape == (5, 5))
assert(record.intensities.shape == record.stdevs.shape)
assert(record.intensities.shape == record.npix.shape)
assert(record.ncols == 5)
assert(record.nrows == 5)
numpy.testing.assert_allclose(record.intensities,
[[0., 1., 2., 3., 4.],
[5., 6., 7., 8., 9.],
[10., 11., 12., 13., 14.],
[15., 16., 17., 18., 19.],
[20., 21., 22., 23., 24.]])
numpy.testing.assert_allclose(record.stdevs,
[[0., -1., -2., -3., -4.],
[-5., -6., -7., -8., -9.],
[-10., -11., -12., -13., -14.],
[-15., -16., -17., -18., -19.],
[-20., -21., -22., -23., -24.]])
numpy.testing.assert_allclose(record.npix,
[[9, 9, 9, 9, 9],
[9, 9, 9, 9, 9],
[9, 9, 9, 9, 9],
[9, 9, 9, 9, 9],
[9, 9, 9, 9, 9]])
assert(len(record.AlgorithmParameters) == 329)
assert(len(record.GridCornerUL) == 7)
assert(record.AlgorithmParameters[-3:] == '169')
class AffyTest(unittest.TestCase):
def setUp(self):
self.affy3 = "Affy/affy_v3_example.CEL"
self.affy4 = "Affy/affy_v4_example.CEL"
self.affy4Bad = "Affy/affy_v4_bad_example.CEL"
with open(self.affy4Bad, "wb") as f:
self.writeExampleV4(f, bad=True)
def tearDown(self):
os.remove(self.affy4Bad)
# tests if the code is backwards compatible
def testAffyStrict(self):
record = CelFile.read("hello")
assert record.DatHeader is None
# tests the old Affymetrix v3 parser
def testAffy3(self):
with open(self.affy3, "r") as f:
record = CelFile.read(f)
assert(len(record.DatHeader) > 0)
assert(record.intensities.shape == (5, 5))
assert(record.intensities.shape == record.stdevs.shape)
assert(record.intensities.shape == record.npix.shape)
assert(record.ncols == 5)
assert(record.nrows == 5)
def testAffy3Backwards(self):
# tests the old Affymetrix v3 parser
with open(self.affy3, "r") as f:
lines = f.readlines()
record = CelFile.read_v3(lines)
assert(len(record.DatHeader) > 0)
assert(record.intensities.shape == (5, 5))
assert(record.intensities.shape == record.stdevs.shape)
assert(record.intensities.shape == record.npix.shape)
assert(record.ncols == 5)
assert(record.nrows == 5)
# tests the new Affymetrix v4 parser
def testAffy4(self):
with open(self.affy4, "rb") as f:
record = CelFile.read(f)
testRecordV4(record)
def testAffyBadHeader(self):
with self.assertRaises(CelFile.ParserError):
with open(self.affy4Bad, "rb") as f:
record = CelFile.read(f)
def testAffyWrongModeReadV4(self):
try:
with open(self.affy4, "r") as f:
record = CelFile.read_v4(f)
except CelFile.ParserError:
if int(sys.version[0]) >= 3:
return # As expected in pyhthon 3
else:
raise AssertionError("Expected CelFile.ParserError in python3")
# the code just works in python 2
testRecordV4(record)
def testAffyWrongModeRead(self):
try:
with open(self.affy4, "r") as f:
record = CelFile.read(f)
except CelFile.ParserError:
if int(sys.version[0]) >= 3:
return # As expected in pyhthon 3
else:
raise AssertionError("Expected CelFile.ParserError in python3")
# the code just works in python 2
testRecordV4(record)
# Writes a small example Affymetrix V4 CEL File
def writeExampleV4(self, f, bad=False):
preHeaders = {'cellNo': 25,
'columns': 5,
'headerLen': 752,
'magic': 64,
'rows': 5,
'version': 4}
goodH = {u'Axis-invertX': b'0'}
badH = {u'Axis-invertX': b'1'}
headers = {u'Algorithm': b'Percentile',
u'AlgorithmParameters': b'Percentile:75;CellMargin:4;Outlie'
b'rHigh:1.500;OutlierLow:1.004;AlgVersion:6.0;FixedCellSize'
b':TRUE;FullFeatureWidth:7;FullFeatureHeight:7;IgnoreOutlie'
b'rsInShiftRows:FALSE;FeatureExtraction:TRUE;PoolWidthExten'
b'stion:1;PoolHeightExtension:1;UseSubgrids:FALSE;Randomize'
b'Pixels:FALSE;ErrorBasis:StdvMean;StdMult:1.000000;NumDATS'
b'ubgrids:169',
u'AxisInvertY': b'0',
u'Cols': b'5',
u'DatHeader': b'[0..65534] 20_10N:CLS=19420RWS=19420XIN=0'
b' YIN=0 VE=30 2.0 05/25/05 23:19:07 50102310 M10 '
b' \x14 \x14 HuEx-1_0-st-v2.1sq \x14 \x14 \x14 \x14 '
b'\x14570 \x14 25540.671875 \x14 3.500000 \x14 0.7000 \x14'
b' 3',
u'GridCornerLL': b'518 18668',
u'GridCornerLR': b'18800 18825',
u'GridCornerUL': b'659 469',
u'GridCornerUR': b'18942 623',
u'OffsetX': b'0',
u'OffsetY': b'0',
u'Rows': b'5',
u'TotalX': b'2560',
u'TotalY': b'2560',
u'swapXY': b'0'}
if not bad:
headers.update(goodH)
else:
headers.update(badH)
prePadding = b"this text doesn't matter and is ignored\x04"
preHeadersOrder = ["magic",
"version",
"columns",
"rows",
"cellNo",
"headerLen"]
headersEncoded = struct.pack('<' + 'i' * len(preHeadersOrder),
*[preHeaders[header] for header in
preHeadersOrder])
def packData(intensity, sdev, pixel):
return struct.pack("< f f h", intensity, sdev, pixel)
f.write(headersEncoded)
for header in headers:
try:
f.write(bytes(header, encoding="utf-8") +
b"=" +
headers[header] +
b"\n")
except TypeError:
f.write(header + b"=" + headers[header] + b"\n")
f.write(prePadding)
f.write(b"\x00" * 15)
for i in range(25):
f.write(packData(float(i), float(-i), 9))
if __name__ == "__main__":
runner = unittest.TextTestRunner(verbosity=0)
unittest.main(testRunner=runner)
| 39.601036
| 79
| 0.487767
|
63f15fa451a81d06bba004bbd7dd69a88177fa5d
| 17,357
|
py
|
Python
|
wikia/wikia.py
|
spbonez/DLD-Bot
|
ed7b9f4f290e741e9080e5bd900d195f20e7b414
|
[
"MIT"
] | 1
|
2019-09-01T13:53:20.000Z
|
2019-09-01T13:53:20.000Z
|
wikia/wikia.py
|
spbonez/DLD-Bot
|
ed7b9f4f290e741e9080e5bd900d195f20e7b414
|
[
"MIT"
] | null | null | null |
wikia/wikia.py
|
spbonez/DLD-Bot
|
ed7b9f4f290e741e9080e5bd900d195f20e7b414
|
[
"MIT"
] | 1
|
2019-01-16T07:08:59.000Z
|
2019-01-16T07:08:59.000Z
|
import requests
import time
import mimetypes
from bs4 import BeautifulSoup
from datetime import datetime, timedelta
from decimal import Decimal
from .exceptions import (
PageError, DisambiguationError, RedirectError, HTTPTimeoutError,
WikiaException, ODD_ERROR_MESSAGE)
from .util import cache, stdout_encode, debug
# Generate all extensions from the OS
mimetypes.init()
API_URL = 'http://{lang}{sub_wikia}.wikia.com/api/v1/{action}'
# URL used when browsing the wikia proper
STANDARD_URL = 'http://{lang}{sub_wikia}.wikia.com/wiki/{page}'
LANG = ""
RATE_LIMIT = False
RATE_LIMIT_MIN_WAIT = None
RATE_LIMIT_LAST_CALL = None
USER_AGENT = 'wikia (https://github.com/NotThatSiri/DLD-Bot/wikia)'
def set_lang(language):
'''
Sets the global language variable, which is sent in the params
'''
global LANG
LANG = language.lower() + '.' if language else ''
for cached_func in (search, summary):
cached_func.clear_cache()
def set_user_agent(user_agent_string):
'''
Set the User-Agent string to be used for all requests.
Arguments:
* user_agent_string - (string) a string specifying the User-Agent header
'''
global USER_AGENT
USER_AGENT = user_agent_string
def set_rate_limiting(rate_limit, min_wait=timedelta(milliseconds=50)):
'''
Enable or disable rate limiting on requests to the wikia servers.
If rate limiting is not enabled, under some circumstances (depending on
load on Wikia, the number of requests you and other `wikia` users
are making, and other factors), Wikia may return an HTTP timeout error.
Enabling rate limiting generally prevents that issue, but please note that
HTTPTimeoutError still might be raised.
Arguments:
* rate_limit - (Boolean) whether to enable rate limiting or not
Keyword arguments:
* min_wait - if rate limiting is enabled, `min_wait` is a timedelta describing the minimum time to wait before requests.
Defaults to timedelta(milliseconds=50)
'''
global RATE_LIMIT
global RATE_LIMIT_MIN_WAIT
global RATE_LIMIT_LAST_CALL
RATE_LIMIT = rate_limit
if not rate_limit:
RATE_LIMIT_MIN_WAIT = None
else:
RATE_LIMIT_MIN_WAIT = min_wait
RATE_LIMIT_LAST_CALL = None
@cache
def search(sub_wikia, query, results=10):
'''
Do a Wikia search for `query`.
Keyword arguments:
* sub_wikia - the sub wikia to search in (i.e: "runescape", "elderscrolls")
* results - the maxmimum number of results returned
'''
global LANG
search_params = {
'action': 'Search/List?/',
'sub_wikia': sub_wikia,
'lang': LANG,
'limit': results,
'query': query
}
raw_results = _wiki_request(search_params)
try:
search_results = (d['title'] for d in raw_results['items'])
except KeyError as e:
raise WikiaError("Could not locate page \"{}\" in subwikia \"{}\"".format(query,
sub_wikia))
return list(search_results)
def random(pages=1):
'''
Get a list of random Wikia article titles.
.. note:: Random only gets articles from namespace 0, meaning no Category, U
Keyword arguments:
* pages - the number of random pages returned (max of 10)
'''
#http://en.wikia.org/w/api.php?action=query&list=random&rnlimit=5000&format=
query_params = {
'lang': LANG
}
request = _wiki_request(query_params)
titles = [page['title'] for page in request['query']['random']]
if len(titles) == 1:
return titles[0]
return titles
@cache
def summary(sub_wikia, title, chars=500, redirect=True):
'''
Plain text summary of the page from the sub-wikia.
.. note:: This is a convenience wrapper - auto_suggest and redirect are enab
Keyword arguments:
* chars - if set, return only the first `chars` characters (limit is 500)
* auto_suggest - let Wikia find a valid page title for the query
* redirect - allow redirection without raising RedirectError
'''
# use auto_suggest and redirect to get the correct article
# also, use page's error checking to raise DisambiguationError if necessary
page_info = page(sub_wikia, title, redirect=redirect)
title = page_info.title
pageid = page_info.pageid
query_params = {
'action': 'Articles/Details?/',
'sub_wikia': sub_wikia,
'titles': title,
'ids': pageid,
'abstract': chars,
'lang': LANG
}
request = _wiki_request(query_params)
summary = request['items'][str(pageid)]['abstract']
return summary
def page(sub_wikia, title=None, pageid=None, redirect=True, preload=False):
'''
Get a WikiaPage object for the page in the sub wikia with title `title` or the pageid
`pageid` (mutually exclusive).
Keyword arguments:
* title - the title of the page to load
* pageid - the numeric pageid of the page to load
* redirect - allow redirection without raising RedirectError
* preload - load content, summary, images, references, and links during initialization
'''
if title is not None:
return WikiaPage(sub_wikia, title, redirect=redirect, preload=preload)
elif pageid is not None:
return WikiaPage(sub_wikia, pageid=pageid, preload=preload)
else:
raise ValueError("Either a title or a pageid must be specified")
class WikiaPage(object):
'''
Contains data from a Wikia page.
Uses property methods to filter data from the raw HTML.
'''
def __init__(self, sub_wikia, title=None, pageid=None, redirect=True, preload=False, original_title=''):
if title is not None:
self.title = title
self.original_title = original_title or title
elif pageid is not None:
self.pageid = pageid
else:
raise ValueError("Either a title or a pageid must be specified")
self.sub_wikia = sub_wikia
try:
self.__load(redirect=redirect, preload=preload)
except AttributeError as e:
raise WikiaError("Could not locate page \"{}\" in subwikia \"{}\"".format(title or pageid,
sub_wikia))
if preload:
for prop in ('content', 'summary', 'images', 'references', 'links', 'sections'):
getattr(self, prop)
def __repr__(self):
return stdout_encode(u'<WikiaPage \'{}\'>'.format(self.title))
def __eq__(self, other):
try:
return (
self.pageid == other.pageid
and self.title == other.title
and self.url == other.url
)
except:
return False
def __load(self, redirect=True, preload=False):
'''
Load basic information from Wikia.
Confirm that page exists and is not a disambiguation/redirect.
Does not need to be called manually, should be called automatically during __init__.
'''
query_params = {
'action': 'Articles/Details?/',
'sub_wikia': self.sub_wikia,
'lang': LANG,
}
if not getattr(self, 'pageid', None):
query_params['titles'] = self.title
else:
query_params['ids'] = self.pageid
try:
request = _wiki_request(query_params)
query = list(request['items'].values())[0]
except (IndexError, requests.ConnectionError):
raise WikiaError("Could not find page \"{}\" "
"of the sub-wikia \"{}\"".format(self.title or self.pageid,
self.sub_wikia))
self.pageid = query['id']
self.title = query['title']
lang = query_params['lang']
self.url = STANDARD_URL.format(lang=lang, sub_wikia=self.sub_wikia, page=self.title.replace(' ','_'))
def __continued_query(self, query_params):
'''
Based on https://www.mediawiki.org/wiki/API:Query#Continuing_queries
'''
query_params.update(self.__title_query_param)
last_continue = {}
prop = query_params.get('prop', None)
while True:
params = query_params.copy()
params.update(last_continue)
request = _wiki_request(params)
if 'query' not in request:
break
pages = request['query']['pages']
if 'generator' in query_params:
yield from pages.values()
else:
yield from pages[self.pageid][prop]
if 'continue' not in request:
break
last_continue = request['continue']
@property
def __title_query_param(self):
if getattr(self, 'title', None) is not None:
return {'titles': self.title}
else:
return {'pageids': self.pageid}
def html(self):
'''
Get full page HTML.
.. warning:: This can get pretty slow on long pages.
'''
if not getattr(self, '_html', False):
request = requests.get(self.url)
self._html = request.text
return self._html
@property
def content(self):
'''
Plain text content of each section of the page, excluding images, tables,
and other data.
'''
if not getattr(self, '_content', False):
query_params = {
'action': "Articles/AsSimpleJson?/",
'id': self.pageid,
'sub_wikia': self.sub_wikia,
'lang': LANG
}
request = _wiki_request(query_params)
self._content = "\n".join(segment['text'] for section in request['sections']
for segment in section['content']
if segment['type'] == "paragraph")
return self._content
@property
def revision_id(self):
'''
Revision ID of the page.
The revision ID is a number that uniquely identifies the current
version of the page. It can be used to create the permalink or for
other direct API calls. See `Help:Page history
<http://en.wikia.org/wiki/Wikia:Revision>`_ for more
information.
'''
if not getattr(self, '_revid', False):
query_params = {
'action': "Articles/Details?/",
'ids': self.pageid,
'sub_wikia': self.sub_wikia,
'lang': LANG
}
request = _wiki_request(query_params)
self._revision_id = request['items'][str(self.pageid)]['revision']['id']
return self._revision_id
@property
def summary(self):
'''
Plain text summary of the page.
'''
if not getattr(self, '_summary', False):
self._summary = summary(self.sub_wikia, self.title)
return self._summary
@property
def images(self):
'''
List of URLs of images on the page.
'''
if not getattr(self, '_images', False):
# Get the first round of images
query_params = {
'action': "Articles/AsSimpleJson?/",
'id': str(self.pageid),
'sub_wikia': self.sub_wikia,
'lang': LANG,
}
request = _wiki_request(query_params)
images = [section['images'][0]['src'] for section in request["sections"]
if section['images']]
# Get the second round of images
# This time, have to use a different API call
query_params['action'] = "Articles/Details?/"
query_params['titles'] = self.title # This stops redirects
request = _wiki_request(query_params)
image_thumbnail = request["items"][str(self.pageid)]["thumbnail"]
# Only if there are more pictures to grab
if image_thumbnail:
images.append(image_thumbnail)
# A little URL manipulation is required to get the full sized version
for index, image in enumerate(images):
# Remove the /revision/ fluff after the image url
image = image.partition("/revision/")[0]
image_type = mimetypes.guess_type(image)[0]
if image_type is not None:
image_type = "." + image_type.split("/")[-1]
else:
image_type = ".png" # in case mimetypes.guess cant find it it will return None
# JPEG has a special case, where sometimes it is written as "jpg"
if image_type == ".jpeg" and image_type not in image:
image_type = ".jpg"
# Remove the filler around the image url that reduces the size
image = "".join(image.partition(image_type)[:2])
images[index] = image.replace("/thumb/", "/")
self._images = images
return self._images
@property
def related_pages(self):
'''
Lists up to 10 of the wikia URLs of pages related to this page.
'''
if not getattr(self, "_related_pages", False):
query_params = {
'action': "RelatedPages/List?/",
'ids': self.pageid,
'limit': 10,
'sub_wikia': self.sub_wikia,
'lang': LANG,
}
request = _wiki_request(query_params)
self._related_pages = [request['basepath'] + url['url']
for url in request['items'][str(self.pageid)]]
return self._related_pages
@property
def sections(self):
'''
List of section titles from the table of contents on the page.
'''
if not getattr(self, '_sections', False):
query_params = {
'action': 'Articles/AsSimpleJson?/',
'id': self.pageid,
'sub_wikia': self.sub_wikia,
'lang': LANG,
}
request = _wiki_request(query_params)
self._sections = [section['title'] for section in request['sections']]
return self._sections
def section(self, section_title):
'''
Get the plain text content of a section from `self.sections`.
Returns None if `section_title` isn't found, otherwise returns a whitespace stripped string.
This is a convenience method that wraps self.content.
.. warning:: Calling `section` on a section that has subheadings will NOT return
the full text of all of the subsections. It only gets the text between
`section_title` and the next subheading, which is often empty.
'''
if section_title not in self.sections:
return None
query_params = {
'action': "Articles/AsSimpleJson?/",
'id': self.pageid,
'sub_wikia': self.sub_wikia,
'lang': LANG
}
request = _wiki_request(query_params)
section = "\n".join(segment['text'] for section in request['sections']
if section['title'] == section_title
for segment in section['content']
if segment['type'] == "paragraph")
return section
def section_lists(self, section_title):
'''
Get the plain text content of a section from `self.sections`.
Returns None if `section_title` isn't found, otherwise returns a whitespace stripped string.
This is a convenience method that wraps self.content.
.. warning:: Calling `section` on a section that has subheadings will NOT return
the full text of all of the subsections. It only gets the text between
`section_title` and the next subheading, which is often empty.
'''
if section_title not in self.sections:
return None
query_params = {
'action': "Articles/AsSimpleJson?/",
'id': self.pageid,
'sub_wikia': self.sub_wikia,
'lang': LANG
}
request = _wiki_request(query_params)
section = [section for section in request['sections'] if section['title'] == section_title][0]
lists = [list['elements'] for list in section['content'] if list['type'] == 'list']
list = [item['text'] for items in lists for item in items]
return list
@cache
def languages():
'''
List all the currently supported language prefixes (usually ISO language code).
Can be inputted to `set_lang` to change the Wikia that `wikia` requests
results from.
Returns: dict of <prefix>: <local_lang_name> pairs. To get just a list of prefixes,
use `wikia.languages().keys()`.
'''
query_params = {
'action': "WAM/WAMLanguages?/",
'timestamp': time.time(), # Uses the UNIX timestamp to determine available LANGs
'sub_wikia': '',
'lang': LANG
}
request = _wiki_request(query_params)
return response['languages']
def _wiki_request(params):
'''
Make a request to the Wikia API using the given search parameters.
Returns a parsed dict of the JSON response.
'''
global RATE_LIMIT_LAST_CALL
global USER_AGENT
api_url = API_URL.format(**params)
params['format'] = 'json'
headers = {
'User-Agent': USER_AGENT
}
if RATE_LIMIT and RATE_LIMIT_LAST_CALL and \
RATE_LIMIT_LAST_CALL + RATE_LIMIT_MIN_WAIT > datetime.now():
# it hasn't been long enough since the last API call
# so wait until we're in the clear to make the request
wait_time = (RATE_LIMIT_LAST_CALL + RATE_LIMIT_MIN_WAIT) - datetime.now()
time.sleep(int(wait_time.total_seconds()))
r = requests.get(api_url, params=params, headers=headers)
if RATE_LIMIT:
RATE_LIMIT_LAST_CALL = datetime.now()
# If getting the json representation did not work, our data is mangled
try:
r = r.json()
except ValueError:
raise WikiaError("Your request to the url \"{url}\" with the paramaters"
"\"{params}\" returned data in a format other than JSON."
"Please check your input data.".format(url=api_url,
params=params))
# If we got a json response, then we know the format of the input was correct
if "exception" in r:
details, message, error_code= r['exception'].values()
if error_code == 408:
raise HTTPTimeoutError(query)
raise WikiaError("{}. {} ({})".format(message, details, error_code))
return r
class WikiaError(Exception):
pass
| 32.142593
| 122
| 0.646195
|
e0348de1fcc9667d1d4d34d19caa4f118b56cd70
| 5,880
|
py
|
Python
|
sandbox/lib/jumpscale/JumpScale9Lib/clients/gitea/GiteaRepo.py
|
Jumpscale/sandbox_linux
|
2aacd36b467ef30ac83718abfa82c6883b67a02f
|
[
"Apache-2.0"
] | null | null | null |
sandbox/lib/jumpscale/JumpScale9Lib/clients/gitea/GiteaRepo.py
|
Jumpscale/sandbox_linux
|
2aacd36b467ef30ac83718abfa82c6883b67a02f
|
[
"Apache-2.0"
] | 1
|
2018-04-04T12:13:40.000Z
|
2018-05-03T07:57:52.000Z
|
sandbox/lib/jumpscale/JumpScale9Lib/clients/gitea/GiteaRepo.py
|
Jumpscale/sandbox_linux
|
2aacd36b467ef30ac83718abfa82c6883b67a02f
|
[
"Apache-2.0"
] | null | null | null |
from js9 import j
from datetime import datetime
import calendar
JSBASE = j.application.jsbase_get_class()
class GiteaRepo(JSBASE):
def __init__(self, org, name, data):
JSBASE.__init__(self)
self.name = data.name
self.owner = data.owner.login
self.data = data
self.org = org
self.id = data.id
self.client = org.client
self.api = self.client.api.repos
def labels_add(self, labels=None, remove_old=False):
"""
Add multiple labels to 1 or more repo's
If a label with the same name exists on a repo, it won't be added.
:param labels: a list of labels ex: [{'color': '#fef2c0', 'name': 'state_blocked'}]
default goes over all repo's
"""
self.logger.info("labels add")
if labels == None:
labels_default = self.org.labels_default_get()
repo_labels = self.api.issueListLabels(self.name, self.owner)[0]
# @TODO: change the way we check on label name when this is fixed
names = [l.name for l in repo_labels]
for label in labels_default:
if label["name"] in names:
continue
self.client.api.repos.issueCreateLabel(label, self.name, self.owner)
def get_label_id(name):
for item in repo_labels:
if item.name == name:
return str(item["id"])
if remove_old:
labels_on_repo = [item.name for item in repo_labels]
labels_default = [item.name for item in labels_default]
for label in labels_on_repo:
if label not in labels_default:
self.client.api.repos.issueDeleteLabel(get_label_id(label), self.name, self.owner)
def milestones_add(self, milestones=None, remove_old=False):
"""
Add multiple milestones to multiple repos.
If a milestone with the same title exists on a repo, it won't be added.
If no milestones are supplied, the default milestones for the current quarter will be added.
:param milestones: a list of milestones ex: [['Q1','2018-03-31'],...]
:return:
"""
self.logger.info("milestones add")
if not milestones:
milestones = self.milestones_default
def deadline_get(year_month_day):
year, month, day = year_month_day.split("-")
return '%s-%s-%sT23:59:00Z' % (year, str(month).zfill(2), str(day).zfill(2))
def milestone_get(title, deadline):
deadline = deadline_get(deadline)
return {"title": title, "due_on": deadline}
repo_milestones = self.client.api.repos.issueGetMilestones(self.name, self.owner)[0]
# @TODO: change the way we check on milestone title when this is fixed https://github.com/Jumpscale/go-raml/issues/396
names = [m['title'] for m in repo_milestones]
for title, deadline in milestones:
if title in names:
continue
milestone = milestone_get(title, deadline)
self.client.api.repos.issueCreateMilestone(milestone, self.name, self.owner)
milestone = milestone_get("roadmap", "2100-12-30")
self.client.api.repos.issueCreateMilestone(milestone, self.name, self.owner)
if remove_old:
milestones_default = [item[0] for item in milestones]
for item in repo_milestones:
if item["title"] not in milestones_default:
self.client.api.repos.issueDeleteMilestone(str(item["id"]), self.name, self.owner)
@property
def milestones_default(self):
"""
"""
today = datetime.today()
thismonth = today.month
months = [i for i in range(thismonth, thismonth + 5)]
year = today.year
milestones = []
# Set the begining of the week to Sunday
c = calendar.Calendar(calendar.SUNDAY)
# Add weekly milestones
for month in months:
lastdate = [item for item in c.itermonthdates(2018, month) if item.month == month][-1]
month_name = calendar.month_name[month].lower()[0:3]
# weeks = c.monthdayscalendar(year, month)
due_on = '%s-%s-%s' % (lastdate.year, str(lastdate.month).zfill(2), str(lastdate.day).zfill(2))
milestones.append((month_name, due_on))
# if month == thismonth:
# for i, week in enumerate(weeks):
# # check if this week has a value for Saturday
# day = week[6]
# if day:
# title = '%s_w%s' % (month_name, i + 1)
# due_on = '%s-%s-%s' % (year, str(month).zfill(2), str(day).zfill(2))
# milestones.append((title, due_on))
# else:
# res=[]
# for i, week in enumerate(weeks):
# # check if this week has a value for Saturday
# day = week[6]
# if day:
# res.append((i,day))
# i,day=res[-1]
# title = '%s_w%s' % (month_name, i + 1)
# due_on = '%s-%s-%s' % (year, str(month).zfill(2), str(day).zfill(2))
# milestones.append((title, due_on))
# Add quarter milestone
for quarter in range(1, 5):
title = 'Q%s' % quarter
quarter_month = quarter * 3
last_day = calendar.monthrange(year, quarter_month)[1]
due_on = '%s-%s-%s' % (year, str(quarter_month).zfill(2), last_day)
milestones.append((title, due_on))
return milestones
def issues_get(self):
return self.api.issueListIssues(self.name, self.owner)[0]
def __repr__(self):
return "repo:%s" % self.name
__str__ = __repr__
| 37.21519
| 126
| 0.569048
|
474f0fee9b3b851d35e7fc0d4fbf02b9626dec5c
| 45,703
|
py
|
Python
|
swagger_to/go_server.py
|
abingham/swagger-to
|
a1ef9f46561d39809da0e6ab356427a247815d92
|
[
"MIT"
] | 38
|
2018-08-06T15:11:10.000Z
|
2022-02-13T22:43:00.000Z
|
swagger_to/go_server.py
|
abingham/swagger-to
|
a1ef9f46561d39809da0e6ab356427a247815d92
|
[
"MIT"
] | 42
|
2018-08-07T08:25:07.000Z
|
2021-11-28T19:32:48.000Z
|
swagger_to/go_server.py
|
abingham/swagger-to
|
a1ef9f46561d39809da0e6ab356427a247815d92
|
[
"MIT"
] | 16
|
2019-02-26T12:39:43.000Z
|
2022-01-29T06:38:41.000Z
|
#!/usr/bin/env python3
"""Generate server stubs from Swagger specification in Go."""
# pylint: disable=missing-docstring,too-many-instance-attributes,too-many-locals,too-many-ancestors,too-many-branches
# pylint: disable=too-many-statements, too-many-lines
from typing import MutableMapping, Union, Set, List, Optional, Mapping, Iterable, Tuple # pylint: disable=unused-import
import collections
import icontract
import jinja2
import swagger_to
import swagger_to.indent
import swagger_to.intermediate
import swagger_to.swagger
class JsonSchema:
"""Represent a JSON validation schema."""
def __init__(self):
"""Initialize with default values."""
self.identifier = ''
self.text = ''
def _to_json_schema(intermediate_schema: swagger_to.intermediate.JsonSchema) -> JsonSchema:
"""
Convert the intermediate schema to a representation that we can use to easily generate go code.
:param intermediate_schema: intermediate representation of a JSON schema
:return: representation suitable for generation of Go code
"""
schema = JsonSchema()
schema.identifier = intermediate_schema.identifier
schema.text = intermediate_schema.text
return schema
class Typedef:
"""Represent a type definition such that it's suitable for generation of Go code."""
def __init__(self):
"""Initialize with default values."""
self.identifier = ''
self.description = ''
self.json_schema = None # type: Union[None, JsonSchema]
class Fielddef:
"""Represent a field definition of a struct suitable for generation of Go code."""
def __init__(self):
"""Initialize with default values."""
self.typedef = None # type: Union[None, Typedef]
self.description = ''
self.json_name = ''
self.name = ''
class Structdef(Typedef):
"""Represent a struct type."""
def __init__(self):
"""Initialize with default values."""
super().__init__()
self.fields = collections.OrderedDict() # type: MutableMapping[str, Fielddef]
self.required = [] # type: List[str]
class Arraydef(Typedef):
"""Represent an array type."""
def __init__(self):
"""Initialize with default values."""
super().__init__()
self.items = None # type: Union[None, Typedef]
class Mapdef(Typedef):
"""Represent a map type."""
def __init__(self):
"""Initialize with default values."""
super().__init__()
self.values = None # type: Union[None, Typedef]
class Pointerdef(Typedef):
"""Represent a pointer type."""
def __init__(self):
"""Initialize with default values."""
super().__init__()
self.pointed = None # type: Union[None, Typedef]
class Primitivedef(Typedef):
"""Represent a primitive type."""
def __init__(self):
"""Initialize with default values."""
super().__init__()
self.type = ''
class Interfacedef(Typedef):
"""Represent a interface{} type."""
def __init__(self):
"""Initialize with default values."""
super().__init__()
self.type = 'interface{}'
def _to_typedef(intermediate_typedef: swagger_to.intermediate.Typedef) -> Typedef:
"""Convert intermediate type definition into a type definition suitable for Go code generation."""
typedef = None # type: Union[None, Typedef]
if isinstance(intermediate_typedef, swagger_to.intermediate.Primitivedef):
typedef = Primitivedef()
if intermediate_typedef.type == 'string':
if intermediate_typedef.format == 'date-time':
typedef.type = 'time.Time'
else:
typedef.type = 'string'
elif intermediate_typedef.type == 'number':
if intermediate_typedef.format is None:
typedef.type = 'float64'
elif intermediate_typedef.format == 'float':
typedef.type = 'float32'
elif intermediate_typedef.format == 'double':
typedef.type = 'float64'
else:
raise ValueError("Unexpected format {!r} for type {!r}".format(intermediate_typedef.format,
intermediate_typedef.type))
elif intermediate_typedef.type == 'integer':
if intermediate_typedef.format is None:
typedef.type = 'int'
elif intermediate_typedef.format == 'int32':
typedef.type = 'int32'
elif intermediate_typedef.format == 'int64':
typedef.type = 'int64'
else:
raise ValueError("Unexpected format {!r} for type {!r}".format(intermediate_typedef.format,
intermediate_typedef.type))
elif intermediate_typedef.type == 'boolean':
typedef.type = 'bool'
else:
raise NotImplementedError(
"Unhandled translation of a primitive intermediate type to Go with 'type': {!r}".format(
intermediate_typedef.type))
elif isinstance(intermediate_typedef, swagger_to.intermediate.Arraydef):
if intermediate_typedef.items is None:
raise ValueError("Unexpected intermediate type definition of an array to have items None: {!r}".format(
intermediate_typedef.identifier))
typedef = Arraydef()
typedef.items = _to_typedef(intermediate_typedef=intermediate_typedef.items)
elif isinstance(intermediate_typedef, swagger_to.intermediate.Mapdef):
typedef = Mapdef()
typedef.values = _to_typedef(intermediate_typedef=intermediate_typedef.values)
elif isinstance(intermediate_typedef, swagger_to.intermediate.Objectdef):
typedef = Structdef()
for propdef in intermediate_typedef.properties.values():
field = Fielddef()
field_typedef = _to_typedef(intermediate_typedef=propdef.typedef)
if not propdef.name in intermediate_typedef.required and isinstance(field_typedef, Primitivedef):
optional_field_typedef = Pointerdef()
optional_field_typedef.pointed = field_typedef
field_typedef = optional_field_typedef
field.typedef = field_typedef
field.description = propdef.description
field.json_name = propdef.name
field.name = swagger_to.capital_camel_case(identifier=propdef.name)
typedef.fields[field.name] = field
if propdef.name in intermediate_typedef.required:
typedef.required.append(field.name)
elif isinstance(intermediate_typedef, swagger_to.intermediate.AnyValuedef):
typedef = Interfacedef()
else:
raise NotImplementedError("Unhandled translation of an intermediate type to Go: {!r}".format(
type(intermediate_typedef)))
assert typedef is not None
if intermediate_typedef.identifier != '':
typedef.identifier = swagger_to.capital_camel_case(identifier=intermediate_typedef.identifier)
typedef.description = intermediate_typedef.description
typedef.json_schema = _to_json_schema(intermediate_schema=intermediate_typedef.json_schema)
return typedef
@icontract.ensure(lambda result: all(key == typedef.identifier for key, typedef in result.items()))
def to_typedefs(
intermediate_typedefs: MutableMapping[str, swagger_to.intermediate.Typedef]) -> MutableMapping[str, Typedef]:
"""Convert a table of intermediate type representations to a table of type definitions for generation of Go code."""
typedefs = collections.OrderedDict() # type: MutableMapping[str, Typedef]
for intermediate_typedef in intermediate_typedefs.values():
assert intermediate_typedef is not None
typedef = _to_typedef(intermediate_typedef=intermediate_typedef)
typedefs[typedef.identifier] = typedef
return typedefs
# yapf: disable
@icontract.ensure(
lambda intermediate_typedef, result:
intermediate_typedef.identifier == '' or result.identifier == intermediate_typedef.identifier)
@icontract.ensure(
lambda intermediate_typedef, result:
intermediate_typedef.identifier != '' or result.identifier == '')
# yapf: enable
def _anonymous_or_get_typedef(intermediate_typedef: swagger_to.intermediate.Typedef,
typedefs: MutableMapping[str, Typedef]) -> Typedef:
"""Create an anonymous type definition or retrieve the type definition from the existing definition table."""
if intermediate_typedef.identifier != '':
identifier = swagger_to.capital_camel_case(identifier=intermediate_typedef.identifier)
if not identifier in typedefs:
raise ValueError("Undefined Go type for intermediate typedef {!r}: {!r}".format(
intermediate_typedef.identifier, identifier))
return typedefs[identifier]
return _to_typedef(intermediate_typedef=intermediate_typedef)
def _walk(typedef: Typedef, parent: Optional[Typedef] = None) -> Iterable[Tuple[Optional[Typedef], Typedef]]:
"""Walk the tree of nested type definitions as (nesting type definition, nested type definition)."""
yield parent, typedef
if isinstance(typedef, (Primitivedef, Interfacedef)):
pass
elif isinstance(typedef, Pointerdef):
if typedef.pointed is None:
raise ValueError("Unexpected None pointed in typedef: {!r}".format(typedef.identifier))
yield from _walk(typedef=typedef.pointed, parent=typedef)
elif isinstance(typedef, Arraydef):
if typedef.items is None:
raise ValueError("Unexpected None items in typedef: {!r}".format(typedef.identifier))
yield from _walk(typedef=typedef.items, parent=typedef)
elif isinstance(typedef, Mapdef):
if typedef.values is None:
raise ValueError("Unexpected None values in typedef: {!r}".format(typedef.identifier))
yield from _walk(typedef=typedef.values, parent=typedef)
elif isinstance(typedef, Structdef):
for fielddef in typedef.fields.values():
if fielddef.typedef is None:
raise ValueError("Unexpected None typedef in fielddef {!r} of type {!r}".format(
fielddef.name, typedef.identifier))
yield from _walk(typedef=fielddef.typedef, parent=typedef)
else:
raise NotImplementedError("_walk for Go type definition of type: {}".format(type(typedef)))
class Argument:
"""Represent an argument of a handler implementation."""
def __init__(self):
"""Initialize with default values."""
self.typedef = None # type: Union[None, Typedef]
self.identifier = ''
self.in_what = ''
# Original name of the endpoint parameter
self.parameter_name = ''
self.required = False
self.parsing_identifier = ''
self.json_schema = None # type: Optional[JsonSchema]
class Handler:
"""Represent a handler interface of an endpoint."""
def __init__(self):
"""Initialize with default values."""
self.identifier = ''
self.arguments = [] # type: List[Argument]
class Wrapper:
"""Represent a wrapper that parses the arguments from a request and forwards them to the handler."""
def __init__(self):
"""Initialize with default values."""
self.identifier = ''
self.handler = None # type: Union[None, Handler]
self.header_arguments = [] # type: List[Argument]
self.query_arguments = [] # type: List[Argument]
self.path_arguments = [] # type: List[Argument]
self.body_argument = None # type: Union[None, Argument]
class Route:
"""Represent a muxing route to an endpoint."""
def __init__(self):
"""Initialize with default values."""
self.description = ''
self.path = ''
self.method = ''
self.wrapper = Wrapper()
self.handler = Handler()
def _endpoint_to_route_path(endpoint: swagger_to.intermediate.Endpoint) -> str:
"""
Convert an endpoint path to Gorrila Mux route path.
:param endpoint: whose path we need to convert
:return: Gorrila Mux route path
"""
token_pth = swagger_to.tokenize_path(path=endpoint.path)
tkns = token_pth.tokens[:]
for param in endpoint.parameters:
if param.in_what != 'path':
continue
if param.name not in token_pth.parameter_to_token_indices:
continue
if not isinstance(param.typedef, swagger_to.intermediate.Primitivedef):
raise ValueError("Expected path parameter {!r} in the path {!r} to be primitive, but got: {!r}".format(
param.name, endpoint.path, type(param.typedef)))
assert isinstance(param.typedef, swagger_to.intermediate.Primitivedef)
if param.typedef.pattern != '':
param_in_route = '{' + param.name + ":" + param.typedef.pattern + "}"
else:
param_in_route = '{' + param.name + '}'
for i in token_pth.parameter_to_token_indices[param.name]:
tkns[i] = param_in_route
return "".join(tkns)
def _to_route(endpoint: swagger_to.intermediate.Endpoint, typedefs: MutableMapping[str, Typedef]) -> Route:
"""
Convert an intermediate representation of an endpoint to a muxing route of Go server stub.
:param endpoint: intermediate representation of an endpoint
:param typedefs: table of type definitions
:return: converted route
"""
route = Route()
route.method = endpoint.method.lower()
route.path = _endpoint_to_route_path(endpoint=endpoint)
route.description = endpoint.description
##
# Determine handable parameters
##
handable_parameters = [] # type: List[swagger_to.intermediate.Parameter]
for param in endpoint.parameters:
# Assert that we can handle all the supplied parameters.
if param.in_what == 'formData':
# No code is generated for the parameters in the form data since there are so many edge cases
# which we possibly can't cover.
continue
elif param.in_what in ['query', 'body', 'path', 'header']:
handable_parameters.append(param)
else:
raise NotImplementedError(
"Handling of parameters in {} is not implemented yet: endpoint {} {}, parameter {}.".format(
param.in_what, endpoint.path, endpoint.method, param.name))
##
# Generate identifiers corresponding to the parameters.
##
param_to_identifier = {param: swagger_to.camel_case(identifier=param.name) for param in handable_parameters}
# Add the location as prefix if the argument identifiers overlap
identifiers = list(param_to_identifier.values())
needs_location_prefix = len(set(identifiers)) != len(identifiers)
if needs_location_prefix:
param_to_identifier = {
param: swagger_to.camel_case(identifier="{}_{}".format(param.in_what, param.name))
for param in endpoint.parameters
}
##
# Assert that there are no conflicts at this point
##
by_identifier = collections.defaultdict(
list) # type: MutableMapping[str, List[swagger_to.intermediate.Parameter]]
for param, identifier in param_to_identifier.items():
by_identifier[identifier].append(param)
# yapf: disable
msgs = [
"in the endpoint {} {} for the identifier {!r}: {}".format(
endpoint.method.upper(), endpoint.path, identifier, ", ".join(
["{} in {}".format(param.name, param.in_what) for param in params]))
for identifier, params in by_identifier.items()
if len(params) > 1
]
# yapf: enable
if len(msgs) > 0:
raise ValueError("There are conflicting identifiers for parameters:\n{}".format("\n".join(msgs)))
##
# Convert parameters to arguments
##
assert all(param in param_to_identifier for param in handable_parameters), \
"Expected all parameters to have a generated argument identifier."
for param in handable_parameters:
identifier = param_to_identifier[param]
argument = Argument()
argument.typedef = _anonymous_or_get_typedef(intermediate_typedef=param.typedef, typedefs=typedefs)
argument.required = param.required
if not param.required and isinstance(argument.typedef, Primitivedef):
pointer_typedef = Pointerdef()
pointer_typedef.identifier = argument.typedef.identifier
pointer_typedef.description = argument.typedef.description
pointer_typedef.pointed = argument.typedef
argument.typedef = pointer_typedef
argument.parameter_name = param.name
argument.identifier = identifier
argument.in_what = param.in_what
argument.parsing_identifier = swagger_to.camel_case(identifier='a_' + identifier)
if param.json_schema is not None:
argument.json_schema = _to_json_schema(intermediate_schema=param.json_schema)
if argument.in_what == 'header':
route.wrapper.header_arguments.append(argument)
elif argument.in_what == 'query':
route.wrapper.query_arguments.append(argument)
elif argument.in_what == 'body':
route.wrapper.body_argument = argument
elif argument.in_what == 'path':
route.wrapper.path_arguments.append(argument)
else:
raise AssertionError("Unexpected argument given in: {}".format(argument.in_what))
route.handler.arguments.append(argument)
##
# Determine route attributes
##
route.wrapper.identifier = swagger_to.capital_camel_case(identifier='wrap_' + endpoint.operation_id)
route.wrapper.handler = route.handler
route.handler.identifier = swagger_to.capital_camel_case(identifier=endpoint.operation_id)
return route
def to_routes(endpoints: List[swagger_to.intermediate.Endpoint], typedefs: MutableMapping[str, Typedef]) -> List[Route]:
"""
Convert the intermediate representation of endpoints to muxing routes of a Go server stub.
:param endpoints: intermediate representation of endpoints
:param typedefs: table of type definitions
:return: muxing routes of a Go server stub
"""
routes = [] # type: List[Route]
for endpoint in endpoints:
routes.append(_to_route(endpoint=endpoint, typedefs=typedefs))
return routes
@icontract.ensure(lambda result: not result.endswith('\n'))
def _comment(text: str) -> str:
r"""
Genearates a (possibly multi-line) comment from the text.
>>> cmt = _comment(" testme\n \nagain\n")
>>> assert cmt == '// testme\n//\n// again\n//'
:param text: of the comment
:return: Go code
"""
out = [] # type: List[str]
lines = text.split('\n')
for line in lines:
rstripped = line.rstrip()
if len(rstripped) > 0:
out.append('// {}'.format(rstripped))
else:
out.append('//')
return '\n'.join(out)
@icontract.ensure(lambda result: result.startswith('"'))
@icontract.ensure(lambda result: result.endswith('"'))
def _escaped_str(text: str) -> str:
"""Escape the text and returns it as a valid Golang string."""
return '"{}"'.format(
text.replace('\\', '\\\\').replace('"', '\\"').replace('\a', '\\a').replace('\f', '\\f').replace('\t', '\\t')
.replace('\n', '\\n').replace('\r', '\\r').replace('\v', '\\v'))
# Jinja2 environment
ENV = jinja2.Environment(trim_blocks=True, lstrip_blocks=True, loader=jinja2.BaseLoader())
ENV.filters.update({
'capital_camel_case': swagger_to.capital_camel_case,
'comment': _comment,
'escaped_str': _escaped_str
})
ENV.globals.update({'is_pointerdef': lambda typedef: isinstance(typedef, Pointerdef)})
_STRUCT_TPL = ENV.from_string('''\
struct {
{% for fielddef in typedef.fields.values() %}
{% if not loop.first %}
{% endif %}
{% if fielddef.description %}
{{ fielddef.description|comment|indent }}
{% endif %}
{% set json_tags = fielddef.json_name %}
{% if fielddef.name not in typedef.required %}
{% set json_tags = json_tags + ',omitempty' %}
{% endif %}
{{ fielddef.name }} {{ field_type[fielddef] }} `json:{{ json_tags|escaped_str }}`
{% endfor %}
}
''')
@icontract.ensure(lambda result: result == result.strip())
def _express_type(typedef: Typedef) -> str:
"""Express the type in Golang corresponding to the type definition."""
if isinstance(typedef, (Primitivedef, Interfacedef)):
return typedef.type
if isinstance(typedef, Pointerdef):
if typedef.pointed is None:
raise ValueError("Unexpected None pointed in typedef: {!r}".format(typedef.identifier))
return "*{}".format(_express_or_identify_type(typedef.pointed))
if isinstance(typedef, Arraydef):
if typedef.items is None:
raise ValueError("Unexpected None items in typedef: {!r}".format(typedef.identifier))
return "[]{}".format(_express_or_identify_type(typedef.items))
if isinstance(typedef, Mapdef):
if typedef.values is None:
raise ValueError("Unexpected None values in typedef: {!r}".format(typedef.identifier))
return "map[string]{}".format(_express_or_identify_type(typedef.values))
if isinstance(typedef, Structdef):
if len(typedef.fields) == 0:
return "struct{}"
field_type = dict()
for fielddef in typedef.fields.values():
if fielddef.typedef is None:
raise ValueError('Unexpected None typedef of fielddef {!r} in typedef {!r}'.format(
fielddef.name, typedef.identifier))
field_type[fielddef] = _express_or_identify_type(fielddef.typedef)
return _STRUCT_TPL.render(typedef=typedef, field_type=field_type).strip()
else:
raise NotImplementedError("No Go type writing defined for typedef of type: {!r}".format(type(typedef)))
@icontract.ensure(lambda result: result == result.strip())
def _express_or_identify_type(typedef: Typedef) -> str:
"""Give the type identifier or expresses the type if the typedef lacks an identifier."""
if typedef.identifier != '':
return typedef.identifier
return _express_type(typedef=typedef)
@icontract.require(lambda typedef: typedef.identifier != '')
@icontract.ensure(lambda result: result == result.strip())
def _define_type(typedef: Typedef) -> str:
"""Define the type in Golang code."""
return 'type {} {}'.format(typedef.identifier, _express_type(typedef=typedef))
_IMPORTS_TPL = ENV.from_string('''\
{% if imports|length == 0 %}
{% elif imports|length == 1 %}
import "{{ imports[0] }}"{#
#}{% else %}
import (
{% for imprt in imports %}
"{{ imprt }}"
{% endfor %}
){% endif %}''')
@icontract.ensure(lambda result: not result.endswith('\n'))
@icontract.ensure(lambda import_set, result: len(import_set) != 0 or result == '')
def _state_imports(import_set: Set[str]) -> str:
"""State the imports in Golang code."""
return _IMPORTS_TPL.render(imports=sorted(import_set))
_TYPES_GO_TPL = ENV.from_string('''\
// Code generated by swagger_to. DO NOT EDIT.
package {{ package }}
// Automatically generated file by swagger_to. DO NOT EDIT OR APPEND ANYTHING!
{% if imports_code != '' %}
{{ imports_code }}
{% endif %}
{% for typedef in typedefs.values() %}
{% if typedef.description != '' %}
{{ '%s %s'|format(typedef.identifier, typedef.description)|comment }}
{% endif %}
type {{ typedef.identifier }} {{ type_expression[typedef] }}
{% endfor %}
''')
@icontract.ensure(lambda result: result.endswith('\n'), "final newline")
def generate_types_go(package: str, typedefs: Mapping[str, Typedef]) -> str:
"""
Generate a file which defines all the involved types.
:param package: name of the package
:param typedefs: type definitions
:return: Golang code
"""
# imports
import_set = set() # type: Set[str]
for typedef in typedefs.values():
for _, another_typedef in _walk(typedef=typedef, parent=None):
if isinstance(another_typedef, Primitivedef):
if another_typedef.type == 'time.Time':
import_set.add('time')
text = _TYPES_GO_TPL.render(
package=package,
imports_code=_state_imports(import_set=import_set),
typedefs=typedefs,
type_expression={typedef: _express_type(typedef)
for typedef in typedefs.values()})
return swagger_to.indent.reindent(text=text, indention='\t')
_STRING_ARGUMENT_FROM_STRING_TPL = ENV.from_string('''\
{% if is_pointerdef(argument.typedef) %}
val := {{ string_identifier }}
{{ target_identifier }} = &val{#
#}{% else %}
{{ target_identifier }} = {{ string_identifier }}{#
#}{% endif %}''')
_INT_ARGUMENT_FROM_STRING_TPL = ENV.from_string('''\
{
parsed, err := strconv.ParseInt({{ string_identifier }}, 10, 64)
if err != nil {
{% set msg = "Parameter '%s': "|format(argument.parameter_name)|escaped_str %}
http.Error(w, {{ msg }}+err.Error(), http.StatusBadRequest)
return
}
converted := int(parsed)
{% if is_pointerdef(argument.typedef) %}
{{ target_identifier }} = &converted
{% else %}
{{ target_identifier }} = converted
{% endif %}
}''')
_INT64_ARGUMENT_FROM_STRING_TPL = ENV.from_string('''\
{
parsed, err := strconv.ParseInt({{ string_identifier }}, 10, 64)
if err != nil {
{% set msg = "Parameter '%s': "|format(argument.parameter_name)|escaped_str %}
http.Error(w, {{ msg }}+err.Error(), http.StatusBadRequest)
return
}
converted := int64(parsed)
{% if is_pointerdef(argument.typedef) %}
{{ target_identifier }} = &converted
{% else %}
{{ target_identifier }} = converted
{% endif %}
}''')
_INT32_ARGUMENT_FROM_STRING_TPL = ENV.from_string('''\
{
parsed, err := strconv.ParseInt({{ string_identifier }}, 10, 32)
if err != nil {
{% set msg = "Parameter '%s': "|format(argument.parameter_name)|escaped_str %}
http.Error(w, {{ msg }}+err.Error(), http.StatusBadRequest)
return
}
converted := int32(parsed)
{% if is_pointerdef(argument.typedef) %}
{{ target_identifier }} = &converted
{% else %}
{{ target_identifier }} = converted
{% endif %}
}''')
_FLOAT32_ARGUMENT_FROM_STRING_TPL = ENV.from_string('''\
{
parsed, err := strconv.ParseFloat({{ string_identifier }}, 32)
if err != nil {
{% set msg = "Parameter '%s': "|format(argument.parameter_name)|escaped_str %}
http.Error(w, {{ msg }}+err.Error(), http.StatusBadRequest)
return
}
converted := float32(parsed)
{% if is_pointerdef(argument.typedef) %}
{{ target_identifier }} = &converted
{% else %}
{{ target_identifier }} = converted
{% endif %}
}''')
_FLOAT64_ARGUMENT_FROM_STRING_TPL = ENV.from_string('''\
{
parsed, err := strconv.ParseFloat({{ string_identifier }}, 64)
if err != nil {
{% set msg = "Parameter '%s': "|format(argument.parameter_name)|escaped_str %}
http.Error(w, {{ msg }}+err.Error(), http.StatusBadRequest)
return
}
converted := float64(parsed)
{% if is_pointerdef(argument.typedef) %}
{{ target_identifier }} = &converted
{% else %}
{{ target_identifier }} = converted
{% endif %}
}''')
_BOOLEAN_ARGUMENT_FROM_STRING_TPL = ENV.from_string('''\
{
parsed, err := strconv.ParseBool({{ string_identifier }})
if err != nil {
{% set msg = "Parameter '%s': "|format(argument.parameter_name)|escaped_str %}
http.Error(w, {{ msg }}+err.Error(), http.StatusBadRequest)
return
}
{% if is_pointerdef(argument.typedef) %}
{{ target_identifier }} = &parsed
{% else %}
{{ target_identifier }} = parsed
{% endif %}
}
''')
@icontract.require(lambda string_identifier: string_identifier == string_identifier.strip())
@icontract.ensure(lambda result: not result.endswith('\n'))
def _argument_from_string(argument: Argument, string_identifier: str) -> str:
"""Generate the code to parse an argument from a string."""
target_identifier = argument.parsing_identifier
tajp = ''
if isinstance(argument.typedef, Primitivedef):
tajp = argument.typedef.type
elif isinstance(argument.typedef, Pointerdef):
if isinstance(argument.typedef.pointed, Primitivedef):
tajp = argument.typedef.pointed.type
else:
raise NotImplementedError("Parsing argument from string {!r} of type: {!r}".format(
string_identifier, type(argument)))
assert tajp != '', 'Expected tajp to be set in the previous execution path.'
if tajp == 'string':
return _STRING_ARGUMENT_FROM_STRING_TPL.render(
argument=argument, string_identifier=string_identifier, target_identifier=target_identifier)
elif tajp == 'int':
return _INT_ARGUMENT_FROM_STRING_TPL.render(
argument=argument, string_identifier=string_identifier, target_identifier=target_identifier)
elif tajp == 'int64':
return _INT64_ARGUMENT_FROM_STRING_TPL.render(
argument=argument, string_identifier=string_identifier, target_identifier=target_identifier)
elif tajp == 'int32':
return _INT32_ARGUMENT_FROM_STRING_TPL.render(
argument=argument, string_identifier=string_identifier, target_identifier=target_identifier)
elif tajp == 'float32':
return _FLOAT32_ARGUMENT_FROM_STRING_TPL.render(
argument=argument, string_identifier=string_identifier, target_identifier=target_identifier)
elif tajp == 'float64':
return _FLOAT64_ARGUMENT_FROM_STRING_TPL.render(
argument=argument, string_identifier=string_identifier, target_identifier=target_identifier)
elif tajp == 'bool':
return _BOOLEAN_ARGUMENT_FROM_STRING_TPL.render(
argument=argument, string_identifier=string_identifier, target_identifier=target_identifier)
else:
raise NotImplementedError("Parsing argument from string {!r} of Go type: {!r}".format(string_identifier, tajp))
_ARGUMENT_FROM_BODY_TPL = ENV.from_string('''\
{
var err error
r.Body = http.MaxBytesReader(w, r.Body, 1024*1024)
body, err := ioutil.ReadAll(r.Body)
if err != nil {
http.Error(w, "Body unreadable: "+err.Error(), http.StatusBadRequest)
return
}
err = ValidateAgainst{{ argument.json_schema.identifier|capital_camel_case }}Schema(body)
if err != nil {
http.Error(w, "Failed to validate against schema: "+err.Error(), http.StatusBadRequest)
return
}
err = json.Unmarshal(body, &{{ argument.parsing_identifier }})
if err != nil {
{% set msg = "Error JSON-decoding body parameter '%s': "|format(argument.parameter_name)|escaped_str %}
http.Error(w, {{ msg }}+err.Error(),
http.StatusBadRequest)
return
}
}''')
@icontract.ensure(lambda result: not result.endswith('\n'))
def _argument_from_body(argument: Argument) -> str:
"""Generate the code to parse the argument from a request body."""
return _ARGUMENT_FROM_BODY_TPL.render(argument=argument)
_WRAPPER_TPL = ENV.from_string('''\
{% set newliner = joiner("XXX") %}
{% set description %}
{{ route.wrapper.identifier }} wraps the path `{{ route.path }}` with the method "{{ route.method }}".
{% if route.description %}
Path description:
{{ route.description }}
{% endif %}
{% endset %}{# /set description #}
{{ description|trim|comment }}
func {{ route.wrapper.identifier }}(h Handler, w http.ResponseWriter, r *http.Request) {
{% if route.handler.arguments %}{# intermediate variables #}
{% if newliner() %}{{ '\n' }}{% endif %}
{% for argument in route.handler.arguments %}
var {{ argument.parsing_identifier }} {{ express_or_identify_type[argument]|indent|indent }}
{% endfor %}{# /for argument in route.handler.arguments #}
{% endif %}{# /if intermediate variables #}
{% if route.wrapper.header_arguments %}{### Header arguments ###}
{% if newliner() %}{{ '\n' }}{% endif %}
hdr := r.Header
{% for argument in route.wrapper.header_arguments %}
{% if argument.required %}
if _, ok := hdr[{{ argument.parameter_name|escaped_str }}]; !ok {
{% set msg = "Parameter '%s' expected in header"|format(argument.parameter_name)|escaped_str %}
http.Error(w, {{ msg }}, http.StatusBadRequest)
return
}
{{ argument_from_string(
argument, "hdr.Get(%s)"|format(argument.parameter_name|escaped_str))|indent }}
{% else %}
if _, ok := hdr[{{ argument.parameter_name|escaped_str }}]; ok {
{{ argument_from_string(
argument, "hdr.Get(%s)"|format(argument.parameter_name|escaped_str))|indent|indent }}
}
{% endif %}{# /if argument.required #}
{% endfor %}{# /for argument in route.wrapper.header_arguments #}
{% endif %}{# /if header arguments #}
{% if route.wrapper.query_arguments %}{### Query arguments ###}
{% if newliner() %}{{ '\n' }}{% endif %}
q := r.URL.Query()
{% for argument in route.wrapper.query_arguments %}
{% if argument.required %}
if _, ok := q[{{ argument.parameter_name|escaped_str }}]; !ok {
{% set msg = "Parameter '%s' expected in query"|format(argument.parameter_name)|escaped_str %}
http.Error(w, {{ msg }}, http.StatusBadRequest)
return
}
{{ argument_from_string(
argument, "q.Get(%s)"|format(argument.parameter_name|escaped_str))|indent }}
{% else %}
if _, ok := q[{{ argument.parameter_name|escaped_str }}]; ok {
{{ argument_from_string(
argument, "q.Get(%s)"|format(argument.parameter_name|escaped_str))|indent|indent }}
}
{% endif %}{# /if argument.required #}
{% endfor %}{# /for query arguments #}
{% endif %}{# /if query arguments #}
{% if route.wrapper.path_arguments %}{### Path arguments ###}
{% if newliner() %}{{ '\n' }}{% endif %}
vars := mux.Vars(r)
{% for argument in route.wrapper.path_arguments %}
{% if argument.required %}
if _, ok := vars[{{ argument.parameter_name|escaped_str }}]; !ok {
{% set msg = "Parameter '%s' expected in path"|format(argument.parameter_name)|escaped_str %}
http.Error(w, {{ msg }}, http.StatusBadRequest)
return
}
{{ argument_from_string(argument, "vars[%s]"|format(argument.parameter_name|escaped_str))|indent }}
{% else %}
if value, ok := vars[{{ argument.parameter_name|escaped_str }}]; ok {
{{ argument_from_string(argument, "vars[%s]"|format(argument.parameter_name|escaped_str))|indent|indent }}
}
{% endif %}
{% endfor %}{# /path arguments #}
{% endif %}{# /if path arguments #}
{% if route.wrapper.body_argument is not none %}{### Body argument ###}
{% if newliner() %}{{ '\n' }}{% endif %}
{% if route.wrapper.body_argument.required %}
if r.Body == nil {
{% set msg = "Parameter '%s' expected in body, but got no body"|format(route.wrapper.body_argument.parameter_name)|escaped_str %}
http.Error(w, {{ msg }}, http.StatusBadRequest)
return
}
{{ argument_from_body(route.wrapper.body_argument)|indent }}
{% else %}
if r.Body != nil {
{{ argument_from_body(route.wrapper.body_argument)|indent|indent }}
}
{% endif %}{# /if route.wrapper.body_argument.required #}
{% endif %}{# /if body argument #}
{% if newliner() %}{{ '\n' }}{% endif %}
{% if not route.handler.arguments %}
h.{{ route.handler.identifier }}(w, r)
{% else %}
h.{{ route.handler.identifier }}(w,
r,
{% for argument in route.handler.arguments %}
{{ argument.parsing_identifier }}{{ "," if not loop.last else ")" }}
{% endfor %}
{% endif %}
}
''')
_ROUTES_GO_TPL = ENV.from_string('''\
// Code generated by swagger_to. DO NOT EDIT.
package {{ package }}
// Automatically generated file by swagger_to. DO NOT EDIT OR APPEND ANYTHING!
{% if imports_code != '' %}
{{ imports_code }}
{% endif %}
// SetupRouter sets up a router. If you don't use any middleware, you are good to go.
// Otherwise, you need to maually re-implement this function with your middlewares.
func SetupRouter(h Handler) *mux.Router {
r := mux.NewRouter()
{% for route in routes %}
r.HandleFunc(`{{ route.path }}`,
func(w http.ResponseWriter, r *http.Request) {
{{ route.wrapper.identifier }}(h, w, r)
}).Methods({{ route.method|escaped_str }})
{% endfor %}
return r
}
{% if routes %}
{% for route in routes %}
{{ wrapper_code[route] }}
{% endfor %}
{% endif %}
// Automatically generated file by swagger_to. DO NOT EDIT OR APPEND ANYTHING!
''')
@icontract.ensure(lambda result: result.endswith('\n'), "final new line")
def generate_routes_go(package: str, routes: List[Route]) -> str:
"""
Generate the file which defines the router and the routes.
:param package: name of the package
:param routes: routes that the router will handle.
:return: Golang code
"""
# imports
import_set = {"github.com/gorilla/mux", "net/http"}
for route in routes:
for argument in route.handler.arguments:
if argument.in_what == 'body':
import_set.add('io/ioutil')
import_set.add('encoding/json')
if argument.in_what == 'query' or argument.in_what == 'header':
tajp = ''
if isinstance(argument.typedef, Primitivedef):
tajp = argument.typedef.type
elif isinstance(argument.typedef, Pointerdef):
if isinstance(argument.typedef.pointed, Primitivedef):
tajp = argument.typedef.pointed.type
if tajp in ['int', 'int32', 'int64', 'float32', 'float64']:
import_set.add('strconv')
imports_code = _state_imports(import_set=import_set)
wrapper_code = {
route: _WRAPPER_TPL.render(
route=route,
express_or_identify_type={
argument: _express_or_identify_type(typedef=argument.typedef)
for route in routes for argument in route.handler.arguments
},
argument_from_string=_argument_from_string,
argument_from_body=_argument_from_body)
for route in routes
}
text = _ROUTES_GO_TPL.render(package=package, imports_code=imports_code, routes=routes, wrapper_code=wrapper_code)
return swagger_to.indent.reindent(text=text, indention='\t')
_HANDLER_IMPL_GO_TPL = ENV.from_string('''\
package {{ package }}
import (
"net/http"
"log"
)
// HandlerImpl implements the Handler.
type HandlerImpl struct {
LogErr *log.Logger
LogOut *log.Logger}
{% for route in routes %}
// {{ route.handler.identifier }} implements Handler.{{ route.handler.identifier }}.
{% if not route.handler.arguments %}
func (h *HandlerImpl) {{ route.handler.identifier }}(w http.ResponseWriter,
r *http.Request) {
{% else %}
func (h *HandlerImpl) {{ route.handler.identifier }}(w http.ResponseWriter,
r *http.Request,
{% for argument in route.handler.arguments %}
{{ argument.identifier }} {{ argument_type[argument] }}{{ ',' if not loop.last else ') {' }}
{% endfor %}
{% endif %}{# /if not route.handler.arguments #}
{% set msg = "Not implemented: %s"|format(route.handler.identifier)|escaped_str %}
http.Error(w, {{ msg }}, http.StatusInternalServerError)
h.LogErr.Printf({{ msg }})
}
{% endfor %}{# /routes #}
''')
@icontract.ensure(lambda result: result.endswith('\n'), "final newline")
def generate_handler_impl_go(package: str, routes: List[Route]) -> str:
"""
Generate a file which implements the handler interface with empty methods.
:param package: name of the package
:param routes: that a handler will handle
:return: Golang code
"""
text = _HANDLER_IMPL_GO_TPL.render(
package=package,
routes=routes,
argument_type={
argument: _express_or_identify_type(argument.typedef)
for route in routes for argument in route.handler.arguments
})
return swagger_to.indent.reindent(text=text, indention='\t')
_HANDLER_GO_TPL = ENV.from_string('''\
// Code generated by swagger_to. DO NOT EDIT.
package {{ package }}
// Automatically generated file by swagger_to. DO NOT EDIT OR APPEND ANYTHING!
import "net/http"
// Handler defines an interface to handling the routes.
type Handler interface {
{% for route in routes %}
{% if not loop.first %}
{% endif %}
{% set handler_description %}
{{ route.handler.identifier }} handles the path `{{ route.path }}` with the method "{{ route.method }}".
{% if route.description %}
Path description:
{{ route.description }}
{% endif %}{# /if route.description #}
{% endset %}{# /set handler_description #}
{{ handler_description|trim|comment|indent }}
{% if not route.handler.arguments %}
{{ route.handler.identifier }}(w http.ResponseWriter,
r *http.Request)
{% else %}
{{ route.handler.identifier }}(w http.ResponseWriter,
r *http.Request,
{% for argument in route.handler.arguments %}
{{ argument.identifier }} {{ argument_type[argument] }}{{ ',' if not loop.last else ')' }}
{% endfor %}
{% endif %}{# /if not route.handler.arguments #}
{% endfor %}
}
// Automatically generated file by swagger_to. DO NOT EDIT OR APPEND ANYTHING!
''')
@icontract.ensure(lambda result: result.endswith('\n'), "final newline")
def generate_handler_go(package: str, routes: List[Route]) -> str:
"""
Generate a file which defines the handler interface.
:param package: name of the package
:param routes: that a handler will handle
:return: Golang code
"""
text = _HANDLER_GO_TPL.render(
package=package,
routes=routes,
argument_type={
argument: _express_or_identify_type(argument.typedef)
for route in routes for argument in route.handler.arguments
})
return swagger_to.indent.reindent(text=text, indention='\t')
_JSON_SCHEMAS_GO_TPL = ENV.from_string('''\
{# This template must be indented with tabs since we need to include the schema as text and hence can not re-indent
since re-indention . #}
// Code generated by swagger_to. DO NOT EDIT.
package {{ package }}
// Automatically generated file by swagger_to. DO NOT EDIT OR APPEND ANYTHING!
{% if not schemas %}
// No schemas are defined in the swagger.
{% else %}
import (
"errors"
"fmt"
"github.com/xeipuuv/gojsonschema"
)
func mustNewJSONSchema(text string, name string) *gojsonschema.Schema {
loader := gojsonschema.NewStringLoader(text)
schema, err := gojsonschema.NewSchema(loader)
if err != nil {
panic(fmt.Sprintf("failed to load JSON Schema %#v: %s", text, err.Error()))
}
return schema
}
{% for schema in schemas.values() %}
var jsonSchema{{ schema.identifier|capital_camel_case }}Text = `{{ schema.text|replace('`', '` + "`" + `') }}`
{% endfor %}
{% for schema in schemas.values() %}
var jsonSchema{{ schema.identifier|capital_camel_case }} = mustNewJSONSchema(
jsonSchema{{ schema.identifier|capital_camel_case }}Text,
{{ schema.identifier|escaped_str }})
{% endfor %}
{% for schema in schemas.values() %}
{% set validateFuncName = "ValidateAgainst%sSchema"|format(schema.identifier|capital_camel_case) %}
// {{ validateFuncName }} validates a message coming from the client against {{ schema.identifier }} schema.
func {{ validateFuncName }}(bb []byte) error {
loader := gojsonschema.NewStringLoader(string(bb))
result, err := jsonSchema{{ schema.identifier|capital_camel_case }}.Validate(loader)
if err != nil {
return err
}
if result.Valid() {
return nil
}
msg := ""
for i, valErr := range result.Errors() {
if i > 0 {
msg += ", "
}
msg += valErr.String()
}
return errors.New(msg)
}
{% endfor %}
{% endif %}{# /if not schemas #}
// Automatically generated file by swagger_to. DO NOT EDIT OR APPEND ANYTHING!
''')
@icontract.ensure(lambda result: result.endswith('\n'), "final newline")
def generate_json_schemas_go(package: str, routes: List[Route], typedefs: MutableMapping[str, Typedef]) -> str:
"""
Represent the definitions as json schemas and hard-codes them as strings in Go.
It is assumed that the Swagger definitions already represent a subset of JSON Schema.
This is theoretically not the case (some formats are swagger-only), but in most cases
the literal translation should work.
:param package: package name
:param routes: needed to generate the parameter schemas if they are not already defined in the definitions
:param typedefs: type definitions to generate the schemas for
:return: Golang code
"""
schemas = collections.OrderedDict() # type: MutableMapping[str, JsonSchema]
for route in routes:
if route.wrapper.body_argument is not None:
if not route.wrapper.body_argument.json_schema.identifier in schemas:
schemas[route.wrapper.body_argument.json_schema.identifier] = route.wrapper.body_argument.json_schema
for typedef_id, typedef in typedefs.items():
if typedef.json_schema is None:
raise AssertionError("Missing JSON schema for typedef: {!r}".format(typedef_id))
# Assume the typedef identifiers are unique accross routes and typedefs.
if typedef.json_schema.identifier not in schemas:
schemas[typedef.json_schema.identifier] = typedef.json_schema
return _JSON_SCHEMAS_GO_TPL.render(package=package, schemas=schemas)
| 35.26466
| 133
| 0.657812
|
26e28d228495c14a07ad5e621cc4dd3c6edecde9
| 3,427
|
py
|
Python
|
cf/test/test_Maths.py
|
sadielbartholomew/cf-python
|
98541d8e55c703eca9bfba4168fb3d42755267da
|
[
"MIT"
] | null | null | null |
cf/test/test_Maths.py
|
sadielbartholomew/cf-python
|
98541d8e55c703eca9bfba4168fb3d42755267da
|
[
"MIT"
] | null | null | null |
cf/test/test_Maths.py
|
sadielbartholomew/cf-python
|
98541d8e55c703eca9bfba4168fb3d42755267da
|
[
"MIT"
] | null | null | null |
import datetime
import faulthandler
import os
import unittest
import numpy
faulthandler.enable() # to debug seg faults and timeouts
import cf
class MathTest(unittest.TestCase):
filename1 = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "regrid_file1.nc"
)
def test_relative_vorticity_distance(self):
x_min = 0.0
x_max = 100.0
dx = 1.0
x_1d = numpy.arange(x_min, x_max, dx)
size = x_1d.size
data_1d = x_1d * 2.0 + 1.0
data_2d = numpy.broadcast_to(data_1d[numpy.newaxis, :], (size, size))
dim_x = cf.DimensionCoordinate(
data=cf.Data(x_1d, "m"), properties={"axis": "X"}
)
dim_y = cf.DimensionCoordinate(
data=cf.Data(x_1d, "m"), properties={"axis": "Y"}
)
u = cf.Field()
X = u.set_construct(cf.DomainAxis(size=dim_x.data.size))
Y = u.set_construct(cf.DomainAxis(size=dim_y.data.size))
u.set_construct(dim_x, axes=[X])
u.set_construct(dim_y, axes=[Y])
u.set_data(cf.Data(data_2d, "m/s"), axes=("Y", "X"))
v = cf.Field()
v.set_construct(cf.DomainAxis(size=dim_x.data.size))
v.set_construct(cf.DomainAxis(size=dim_y.data.size))
v.set_construct(dim_x, axes=[X])
v.set_construct(dim_y, axes=[Y])
v.set_data(cf.Data(data_2d, "m/s"), axes=("X", "Y"))
rv = cf.relative_vorticity(u, v, one_sided_at_boundary=True)
self.assertTrue((rv.array == 0.0).all())
def test_relative_vorticity_latlong(self):
lat_min = -90.0
lat_max = 90.0
dlat = 1.0
lat_1d = numpy.arange(lat_min, lat_max, dlat)
lat_size = lat_1d.size
lon_min = 0.0
lon_max = 359.0
dlon = 1.0
lon_1d = numpy.arange(lon_min, lon_max, dlon)
lon_size = lon_1d.size
u_1d = lat_1d * 2.0 + 1.0
u_2d = numpy.broadcast_to(u_1d[numpy.newaxis, :], (lon_size, lat_size))
v_1d = lon_1d * 2.0 + 1.0
v_2d = numpy.broadcast_to(v_1d[:, numpy.newaxis], (lon_size, lat_size))
v_2d = v_2d * numpy.cos(lat_1d * numpy.pi / 180.0)[numpy.newaxis, :]
rv_array = (
u_2d
/ cf.Data(6371229.0, "meters")
* numpy.tan(lat_1d * numpy.pi / 180.0)[numpy.newaxis, :]
)
dim_x = cf.DimensionCoordinate(
data=cf.Data(lon_1d, "degrees_east"), properties={"axis": "X"}
)
dim_y = cf.DimensionCoordinate(
data=cf.Data(lat_1d, "degrees_north"), properties={"axis": "Y"}
)
u = cf.Field()
u.set_construct(cf.DomainAxis(size=lon_1d.size))
u.set_construct(cf.DomainAxis(size=lat_1d.size))
u.set_construct(dim_x)
u.set_construct(dim_y)
u.set_data(cf.Data(u_2d, "m/s"), axes=("X", "Y"))
u.cyclic("X", period=360.0)
v = cf.Field()
v.set_construct(cf.DomainAxis(size=lon_1d.size))
v.set_construct(cf.DomainAxis(size=lat_1d.size))
v.set_construct(dim_x)
v.set_construct(dim_y)
v.set_data(cf.Data(v_2d, "m/s"), axes=("X", "Y"))
v.cyclic("X", period=360.0)
rv = cf.relative_vorticity(u, v, wrap=True)
self.assertTrue(numpy.allclose(rv.array, rv_array))
if __name__ == "__main__":
print("Run date:", datetime.datetime.now())
cf.environment()
print()
unittest.main(verbosity=2)
| 30.327434
| 79
| 0.583309
|
d93ecccc7185994bdc387b95945945d1fc70e5a3
| 107
|
py
|
Python
|
sabrina_test/visualization/visualize.py
|
charlos1204/sabrina_test
|
b3d840b9fd2d42c4bd9c0eae4a1c294555171e3a
|
[
"RSA-MD"
] | null | null | null |
sabrina_test/visualization/visualize.py
|
charlos1204/sabrina_test
|
b3d840b9fd2d42c4bd9c0eae4a1c294555171e3a
|
[
"RSA-MD"
] | null | null | null |
sabrina_test/visualization/visualize.py
|
charlos1204/sabrina_test
|
b3d840b9fd2d42c4bd9c0eae4a1c294555171e3a
|
[
"RSA-MD"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Visualization
"""
# import project config.py
import sabrina_test.config as cfg
| 15.285714
| 33
| 0.682243
|
8cd4742e183f5d859930112322fee1ecf0fde201
| 1,299
|
py
|
Python
|
Research/Code-Snippets/LoginUsingFace.py
|
AuroraStarChain/facial-recognition-temperature
|
5250dac393c8c791c24989b5ff745bc9e7580811
|
[
"MIT"
] | 34
|
2020-01-27T15:07:25.000Z
|
2021-09-25T17:07:37.000Z
|
doc's/Code-Snippets/LoginUsingFace.py
|
Moran98/facial-recognition
|
da4711c5d0fb77d77a5dffb20d85bfa9072f7933
|
[
"MIT"
] | 26
|
2020-01-29T12:24:42.000Z
|
2022-03-12T00:16:44.000Z
|
Research/Code-Snippets/LoginUsingFace.py
|
AuroraStarChain/facial-recognition-temperature
|
5250dac393c8c791c24989b5ff745bc9e7580811
|
[
"MIT"
] | 7
|
2020-01-27T11:42:11.000Z
|
2021-04-05T04:42:22.000Z
|
import face_recognition
from PIL import Image as img , ImageDraw
image_of_Obama = face_recognition.load_image_file('RegisterPhoto.jpg')
Obama_face_encoding = face_recognition.face_encodings(image_of_Obama)[0]
Known_face_encoding = [
Obama_face_encoding
]
known_face_names = [
"Name of user"
]
Test_image = face_recognition.load_image_file('LoginPhoto.jpg')
face_locations = face_recognition.face_locations(Test_image)
face_encoding = face_recognition.face_encodings(Test_image,face_locations)
pil_image = img.fromarray(Test_image)
draw = ImageDraw.Draw(pil_image)
for(top,right,bottom , left) , face_encoding in zip(face_locations,face_encoding):
matches = face_recognition.compare_faces(Known_face_encoding,face_encoding)
name = "Unknown Person"
if True in matches:
first_match_index = matches.index(True)
name = known_face_names[first_match_index]
draw.rectangle(((left,top), (right, bottom)) , outline=(0,0,0))
text_width , text_height = draw.textsize(name)
draw.rectangle(((left,bottom - text_height) , (right , bottom+5)),fill=(0,0,0), outline=(0,0,0))
draw.text((left + 6 , bottom - text_height), name, fill = (255,255,255,255))
del draw
print("Thank you for logging in " + name)
pil_image.show()
pil_image.save('identify.jpg')
| 28.23913
| 100
| 0.750577
|
f28dac142c7ade5b076585268ef79e6026102ca4
| 2,443
|
py
|
Python
|
app/core/tests/test_models.py
|
crocodundee/poll-api-app
|
dceaddeb4fe579024211ce3ba25ca7ffd6c5226f
|
[
"MIT"
] | null | null | null |
app/core/tests/test_models.py
|
crocodundee/poll-api-app
|
dceaddeb4fe579024211ce3ba25ca7ffd6c5226f
|
[
"MIT"
] | null | null | null |
app/core/tests/test_models.py
|
crocodundee/poll-api-app
|
dceaddeb4fe579024211ce3ba25ca7ffd6c5226f
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from django.contrib.auth import get_user_model
from core.models import Question, Answer, Poll
def sample_user(username='username', password='password'):
"""Create sample user"""
return get_user_model().objects.create_user(
username=username, password=password
)
class ModelsTests(TestCase):
"""Testing project's models"""
def test_create_user_success(self):
"""Test create user with valid credentials"""
username = 'testuser'
password = 'testpass'
user = get_user_model().objects.create_user(
username=username, password=password
)
self.assertEqual(user.username, username)
self.assertTrue(user.check_password(password))
def test_create_superuser(self):
"""Test is superuser created successfull"""
superuser = get_user_model().objects.create_superuser(
username='testsuperuser',
password='testsuperpass',
email='admin@company.com',
)
self.assertTrue(superuser.is_staff)
self.assertTrue(superuser.is_superuser)
def test_create_question_success(self):
"""Test creating questions polls"""
question = Question.objects.create(
title='How old are you?', type='TEXT', user=sample_user()
)
self.assertEqual(str(question), question.title)
def test_create_answer_success(self):
"""Test answer the question"""
user = sample_user()
user_id = 'Lm678tr'
question = Question.objects.create(
title='Whats up?', type='TEXT', user=user
)
answer = Answer.objects.create(
content="I'm fine!", question=question, user_id=user_id
)
expected = f'{answer.question.id}-{answer.user_id}'
self.assertEqual(str(answer), expected)
def test_create_poll_success(self):
"""Test create poll"""
user = sample_user()
question = Question.objects.create(
title='How are you?', type='TEXT', user=user
)
poll = Poll.objects.create(
title='Profile poll',
description='Get personal info',
date_start='2020-05-15',
date_end='2020-06-15',
user=user,
)
poll.questions.add(question)
self.assertEqual(str(poll), poll.title)
self.assertEqual(poll.questions.count(), 1)
| 30.924051
| 69
| 0.621777
|
ef630020da2b1a69b857e6213862ac955421138e
| 26,619
|
py
|
Python
|
radiantkit/plot.py
|
ggirelli/radiantkit
|
df3e57dbcda902f4f7b3796e6b8dbbf623ee36b8
|
[
"MIT"
] | 2
|
2021-03-03T09:19:25.000Z
|
2022-03-23T10:24:47.000Z
|
radiantkit/plot.py
|
ggirelli/radiantkit
|
df3e57dbcda902f4f7b3796e6b8dbbf623ee36b8
|
[
"MIT"
] | 122
|
2020-10-05T06:19:09.000Z
|
2022-02-04T18:26:20.000Z
|
radiantkit/plot.py
|
ggirelli/radiantkit
|
df3e57dbcda902f4f7b3796e6b8dbbf623ee36b8
|
[
"MIT"
] | null | null | null |
"""
@author: Gabriele Girelli
@contact: gigi.ga90@gmail.com
"""
from collections import defaultdict
import numpy as np # type: ignore
import os
import pandas as pd # type: ignore
import plotly.graph_objects as go # type: ignore
import plotly.express as px # type: ignore
from plotly.subplots import make_subplots # type: ignore
from radiantkit import distance, report, stat
from typing import Any, DefaultDict, Dict, List, Optional, Tuple
def get_axis_label(axis: str, aid: int) -> str:
return f"{axis}{aid+1}" if aid > 0 else axis
def get_axis_range(
trace_list: List[go.Figure], axis_type: str, axis_label: str
) -> Tuple[float, float]:
return (
np.min(
[
trace[axis_type].min()
for trace in trace_list
if axis_label == trace[f"{axis_type}axis"]
]
),
np.max(
[
trace[axis_type].max()
for trace in trace_list
if axis_label == trace[f"{axis_type}axis"]
]
),
)
def add_derivative_xaxis_to_profiles(fig: go.Figure) -> go.Figure:
fig.add_shape(
type="line",
x0=0,
x1=1,
y0=0,
y1=0,
xsizemode="scaled",
ysizemode="scaled",
line_color="#969696",
xref="x",
yref="y2",
line_dash="dash",
)
fig.add_shape(
type="line",
x0=0,
x1=1,
y0=0,
y1=0,
xsizemode="scaled",
ysizemode="scaled",
line_color="#969696",
xref="x",
yref="y3",
line_dash="dash",
)
return fig
def add_line_trace(
fig: go.Figure,
x0: Optional[np.ndarray],
x1: Optional[np.ndarray],
y0: Optional[np.ndarray],
y1: Optional[np.ndarray],
line_color: str = "#969696",
**kwargs,
) -> go.Figure:
fig.add_trace(
go.Scatter(
x=[x0, x1],
y=[y0, y1],
mode="lines",
line_color=line_color,
**kwargs,
)
)
return fig
class ProfileMultiConditionNorm(object):
html_class: str = "plot-multi-condition-normalized"
_stub: str
def __init__(self, stub: str):
super(ProfileMultiConditionNorm, self).__init__()
self._stub = stub
def __make_scatter_trace(
self,
channel_data: pd.DataFrame,
pfit: Dict[str, Dict[str, Any]],
) -> go.Scatter:
condition_list: List[str] = sorted(list(set(channel_data["condition"])))
panel_data = []
for condition_idx in range(len(condition_list)):
condition_lab = condition_list[condition_idx]
condition_data = channel_data.loc[
condition_lab == channel_data["condition"], :
]
assert condition_lab in pfit
assert "pfit" in pfit[condition_lab]
x, y = pfit[condition_lab]["pfit"].linspace(200)
xx, yy = pfit[condition_lab]["pfit"].deriv().linspace(200)
xxx, yyy = pfit[condition_lab]["pfit"].deriv().deriv().linspace(200)
stat_lab = pfit[condition_lab]["stat"].value
panel_data.extend(
[
go.Scatter(
name=f"{condition_lab}_{stat_lab}_raw",
xaxis="x",
yaxis=get_axis_label("y", condition_idx),
x=condition_data["x"],
y=condition_data[f"{stat_lab}_raw"],
mode="markers",
legendgroup=condition_lab,
marker=dict(
size=4,
opacity=0.5,
color=px.colors.qualitative.Pastel2[condition_idx],
),
showlegend=False,
),
go.Scatter(
name=f"{condition_lab}_{stat_lab}",
x=x,
y=y,
xaxis="x",
yaxis=get_axis_label("y", condition_idx),
mode="lines",
legendgroup=condition_lab,
line_color=px.colors.qualitative.Dark2[condition_idx],
),
]
)
return panel_data
def __add_der_zeros(
self, fig: go.Figure, pfit_data: Dict[str, Dict[str, Any]]
) -> go.Figure:
pfit_sorted = sorted(pfit_data.items(), key=lambda x: x[0])
for pfit_idx in range(len(pfit_sorted)):
condition_lab, pfit = pfit_sorted[pfit_idx]
der_roots = stat.get_radial_profile_roots(pfit["pfit"])
for rid in range(len(der_roots)):
if np.isnan(der_roots[rid]):
continue
pid = 0
panel_trace_y = np.concatenate(
[
p["y"]
for p in fig["data"]
if p["yaxis"] == get_axis_label("y", pid)
]
)
fig = add_line_trace(
fig,
der_roots[rid],
der_roots[rid],
panel_trace_y.min(),
panel_trace_y.max(),
line_dash="dot" if rid == 1 else "dash",
line_color=px.colors.qualitative.Set2[pfit_idx],
legendgroup=condition_lab,
showlegend=False,
xaxis="x",
yaxis=get_axis_label("y", pid),
)
return fig
def __secondary_yaxes_props(
self, pfit_data: Dict[str, List[Dict[str, Any]]]
) -> Dict[str, Any]:
yaxes_props: Dict[str, Any] = {}
for ii in range(1, len(pfit_data)):
yaxes_props[get_axis_label("yaxis", ii)] = dict(
domain=[0, 1],
side="left",
showgrid=False,
zeroline=False,
visible=False,
)
if "y" != get_axis_label("y", ii):
yaxes_props[get_axis_label("yaxis", ii)]["overlaying"] = "y"
return yaxes_props
def __make_panel(
self,
data: pd.DataFrame,
pfit_data: Dict[str, List[Dict[str, Any]]],
stat_type: stat.ProfileStatType,
dtype: distance.DistanceType,
) -> go.Figure:
channel_lab = data["channel"].tolist()[0]
selected_pfits: Dict[str, Dict[str, Any]] = {}
for condition_lab, pfit_list in pfit_data.items():
for pfit in pfit_list:
condition = pfit["stat"] == stat_type
condition = condition and pfit["distance_type"] == dtype.value
condition = condition and pfit["cname"] == channel_lab
if condition:
selected_pfits[
os.path.basename(os.path.dirname(condition_lab))
] = pfit
fig = make_subplots(specs=[[{"secondary_y": True}]])
plot_data = self.__make_scatter_trace(
data,
selected_pfits,
)
for panel in plot_data:
fig.add_trace(panel)
fig = self.__add_der_zeros(fig, selected_pfits)
fig.update_layout(
template="plotly_dark",
title=f"""Signal profile (y-axis not comparable across curves)<br>
<sub>Channel: {channel_lab}; Stat: {stat_type.value}</sub>""".replace(
f"\n{' '*4*3}", "\n"
),
xaxis=dict(title=dtype.label, anchor="y"),
yaxis=dict(
showgrid=True,
zeroline=False,
visible=False,
),
**self.__secondary_yaxes_props(pfit_data),
autosize=False,
width=1000,
height=500,
)
return fig
def _plot(
self, data: DefaultDict[str, Dict[str, pd.DataFrame]], *args, **kwargs
) -> DefaultDict[str, Dict[str, go.Figure]]:
distance_type = distance.DistanceType.LAMINA_NORM
fig_data: DefaultDict[str, Dict[str, go.Figure]] = defaultdict(lambda: {})
assert "raw_data" in data
assert "poly_fit" in data
condition_data = []
for dirpath, dirdata in data["raw_data"].items():
assert isinstance(dirdata, pd.DataFrame)
assert dirpath in data["poly_fit"]
condition_lab = os.path.basename(os.path.dirname(dirpath))
distdata = dirdata.loc[
distance_type.value == dirdata["distance_type"], :
].copy()
distdata["condition"] = condition_lab
condition_data.append(distdata)
plottable_data = pd.concat(condition_data)
for channel_lab in list(set(plottable_data["channel"])):
channel_data = plottable_data.loc[
channel_lab == plottable_data["channel"], :
]
for stat_type in stat.ProfileStatType:
fig_data[self._stub][
f"{channel_lab}-{stat_type.value}"
] = self.__make_panel(
channel_data,
data["poly_fit"],
stat_type,
distance_type,
)
return fig_data
def make(
self, output_data: DefaultDict[str, Dict[str, Any]]
) -> Tuple[str, List[str]]:
fig_data = self._plot(output_data)
panels = "\n\t".join(
[
report.ReportBase.figure_to_html(
fig,
classes=[self._stub, f"{self.html_class}-panel", "hidden"],
data=dict(condition=os.path.basename(dpath)),
)
for dpath, fig in sorted(
fig_data[self._stub].items(), key=lambda x: x[0]
)
]
)
return (panels, sorted(fig_data[self._stub].keys()))
class ProfileMultiCondition(object):
html_class: str = "plot-multi-condition"
_stub: str
def __init__(self, stub: str):
super(ProfileMultiCondition, self).__init__()
self._stub = stub
def __make_scatter_trace(
self,
channel_data: pd.DataFrame,
pfit: Dict[str, Dict[str, Any]],
) -> go.Scatter:
condition_list: List[str] = sorted(list(set(channel_data["condition"])))
panel_data = []
for condition_idx in range(len(condition_list)):
condition_lab = condition_list[condition_idx]
condition_data = channel_data.loc[
condition_lab == channel_data["condition"], :
]
assert condition_lab in pfit
assert "pfit" in pfit[condition_lab]
x, y = pfit[condition_lab]["pfit"].linspace(200)
xx, yy = pfit[condition_lab]["pfit"].deriv().linspace(200)
xxx, yyy = pfit[condition_lab]["pfit"].deriv().deriv().linspace(200)
stat_lab = pfit[condition_lab]["stat"].value
panel_data.extend(
[
go.Scatter(
name=f"{condition_lab}_{stat_lab}_raw",
xaxis="x",
yaxis="y",
x=condition_data["x"],
y=condition_data[f"{stat_lab}_raw"],
mode="markers",
legendgroup=condition_lab,
marker=dict(
size=4,
opacity=0.5,
color=px.colors.qualitative.Pastel2[condition_idx],
),
showlegend=False,
),
go.Scatter(
name=f"{condition_lab}_{stat_lab}",
x=x,
y=y,
xaxis="x",
yaxis="y",
mode="lines",
legendgroup=condition_lab,
line_color=px.colors.qualitative.Dark2[condition_idx],
),
go.Scatter(
name=f"{condition_lab}_{stat_lab}_der1",
x=xx,
y=yy,
xaxis="x",
yaxis="y2",
mode="lines",
legendgroup=condition_lab,
showlegend=False,
line_color=px.colors.qualitative.Dark2[condition_idx],
),
go.Scatter(
name=f"{condition_lab}_{stat_lab}_der2",
x=xxx,
y=yyy,
xaxis="x",
yaxis="y3",
mode="lines",
legendgroup=condition_lab,
showlegend=False,
line_color=px.colors.qualitative.Dark2[condition_idx],
),
]
)
return panel_data
def __add_der_zeros(
self, fig: go.Figure, pfit_data: Dict[str, Dict[str, Any]]
) -> go.Figure:
pfit_sorted = sorted(pfit_data.items(), key=lambda x: x[0])
for pfit_idx in range(len(pfit_sorted)):
condition_lab, pfit = pfit_sorted[pfit_idx]
der_roots = stat.get_radial_profile_roots(pfit["pfit"])
for rid in range(len(der_roots)):
if np.isnan(der_roots[rid]):
continue
for pid in range(min(rid + 2, 3)):
panel_trace_y = np.concatenate(
[
p["y"]
for p in fig["data"]
if p["yaxis"] == get_axis_label("y", pid)
]
)
fig = add_line_trace(
fig,
der_roots[rid],
der_roots[rid],
panel_trace_y.min(),
panel_trace_y.max(),
line_dash="dot" if rid == 1 else "dash",
line_color=px.colors.qualitative.Set2[pfit_idx],
legendgroup=condition_lab,
showlegend=False,
xaxis="x",
yaxis=get_axis_label("y", pid),
)
return fig
def __make_panel(
self,
data: pd.DataFrame,
pfit_data: Dict[str, List[Dict[str, Any]]],
stat_type: stat.ProfileStatType,
dtype: distance.DistanceType,
) -> go.Figure:
channel_lab = data["channel"].tolist()[0]
selected_pfits: Dict[str, Dict[str, Any]] = {}
for condition_lab, pfit_list in pfit_data.items():
for pfit in pfit_list:
condition = pfit["stat"] == stat_type
condition = condition and pfit["distance_type"] == dtype.value
condition = condition and pfit["cname"] == channel_lab
if condition:
selected_pfits[
os.path.basename(os.path.dirname(condition_lab))
] = pfit
fig = make_subplots(rows=3, cols=1)
plot_data = self.__make_scatter_trace(
data,
selected_pfits,
)
for panel in plot_data:
fig.add_trace(panel)
fig = add_derivative_xaxis_to_profiles(fig)
fig = self.__add_der_zeros(fig, selected_pfits)
yranges = dict(
y=get_axis_range(plot_data, "y", "y"),
y2=get_axis_range(plot_data, "y", "y2"),
y3=get_axis_range(plot_data, "y", "y3"),
)
fig.update_layout(
template="plotly_dark",
title=f"""Signal profile<br>
<sub>Channel: {channel_lab}; Stat: {stat_type.value}</sub>""".replace(
f"\n{' '*4*3}", "\n"
),
xaxis=dict(title=dtype.label, anchor="y3"),
yaxis=dict(
domain=[0.66, 1],
range=yranges["y"],
title="Intensity (a.u.)",
),
yaxis2=dict(
domain=[0.33, 0.63],
range=yranges["y2"],
title="1st Derivative Intensity (a.u.)",
),
yaxis3=dict(
domain=[0, 0.30],
range=yranges["y3"],
title="2nd Derivative Intensity (a.u.)",
),
autosize=False,
width=1000,
height=1000,
)
return fig
def _plot(
self, data: DefaultDict[str, Dict[str, pd.DataFrame]], *args, **kwargs
) -> DefaultDict[str, Dict[str, go.Figure]]:
distance_type = distance.DistanceType.LAMINA_NORM
fig_data: DefaultDict[str, Dict[str, go.Figure]] = defaultdict(lambda: {})
assert "raw_data" in data
assert "poly_fit" in data
condition_data = []
for dirpath, dirdata in data["raw_data"].items():
assert isinstance(dirdata, pd.DataFrame)
assert dirpath in data["poly_fit"]
condition_lab = os.path.basename(os.path.dirname(dirpath))
distdata = dirdata.loc[
distance_type.value == dirdata["distance_type"], :
].copy()
distdata["condition"] = condition_lab
condition_data.append(distdata)
plottable_data = pd.concat(condition_data)
for channel_lab in list(set(plottable_data["channel"])):
channel_data = plottable_data.loc[
channel_lab == plottable_data["channel"], :
]
for stat_type in stat.ProfileStatType:
fig_data[self._stub][
f"{channel_lab}-{stat_type.value}"
] = self.__make_panel(
channel_data,
data["poly_fit"],
stat_type,
distance_type,
)
return fig_data
def make(
self, output_data: DefaultDict[str, Dict[str, Any]]
) -> Tuple[str, List[str]]:
fig_data = self._plot(output_data)
panels = "\n\t".join(
[
report.ReportBase.figure_to_html(
fig,
classes=[self._stub, f"{self.html_class}-panel", "hidden"],
data=dict(condition=os.path.basename(dpath)),
)
for dpath, fig in sorted(
fig_data[self._stub].items(), key=lambda x: x[0]
)
]
)
return (panels, sorted(fig_data[self._stub].keys()))
class ProfileSingleCondition(object):
html_class: str = "plot-single-condition"
_stub: str
def __init__(self, stub: str):
super(ProfileSingleCondition, self).__init__()
self._stub = stub
def __make_scatter_trace(
self,
name: str,
data: pd.DataFrame,
pfit_data: List[Dict[str, Any]],
) -> go.Scatter:
panel_data = []
for stat_type in stat.ProfileStatType:
pfit = [x for x in pfit_data if x["stat"] == stat_type]
assert 1 == len(pfit), pfit
assert "pfit" in pfit[0]
x, y = pfit[0]["pfit"].linspace(200)
xx, yy = pfit[0]["pfit"].deriv().linspace(200)
xxx, yyy = pfit[0]["pfit"].deriv().deriv().linspace(200)
panel_data.extend(
[
go.Scatter(
name=f"{name}_{stat_type.value}_raw",
xaxis="x",
yaxis="y",
x=data["x"],
y=data[f"{stat_type.value}_raw"],
mode="markers",
legendgroup=stat_type.value,
marker=dict(
size=4,
opacity=0.5,
color=px.colors.qualitative.Pastel2[stat_type.id],
),
showlegend=False,
),
go.Scatter(
name=f"{name}_{stat_type.value}",
x=x,
y=y,
xaxis="x",
yaxis="y",
mode="lines",
legendgroup=stat_type.value,
line_color=px.colors.qualitative.Dark2[stat_type.id],
),
go.Scatter(
name=f"{name}_{stat_type.value}_der1",
x=xx,
y=yy,
xaxis="x",
yaxis="y2",
mode="lines",
legendgroup=stat_type.value,
showlegend=False,
line_color=px.colors.qualitative.Dark2[stat_type.id],
),
go.Scatter(
name=f"{name}_{stat_type.value}_der2",
x=xxx,
y=yyy,
xaxis="x",
yaxis="y3",
mode="lines",
legendgroup=stat_type.value,
showlegend=False,
line_color=px.colors.qualitative.Dark2[stat_type.id],
),
]
)
return panel_data
def __add_der_zeros(
self, fig: go.Figure, pfit_data: List[Dict[str, Any]]
) -> go.Figure:
for pfit in pfit_data:
der_roots = stat.get_radial_profile_roots(pfit["pfit"])
for rid in range(len(der_roots)):
if np.isnan(der_roots[rid]):
continue
for pid in range(min(rid + 2, 3)):
panel_trace_y = np.concatenate(
[
p["y"]
for p in fig["data"]
if p["yaxis"] == get_axis_label("y", pid)
]
)
fig = add_line_trace(
fig,
der_roots[rid],
der_roots[rid],
panel_trace_y.min(),
panel_trace_y.max(),
line_dash="dot" if rid == 1 else "dash",
line_color=px.colors.qualitative.Set2[pfit["stat"].id],
legendgroup=pfit["stat"].value,
showlegend=False,
xaxis="x",
yaxis=get_axis_label("y", pid),
)
return fig
def __make_panel(
self,
data: pd.DataFrame,
pfit_data: List[Dict[str, Any]],
condition_lab: str,
channel_lab: str,
dtype: distance.DistanceType,
) -> go.Figure:
pfit = [
x
for x in pfit_data
if x["cname"] == channel_lab and x["distance_type"] == dtype.value
]
fig = make_subplots(rows=3, cols=1)
plot_data = self.__make_scatter_trace(
channel_lab,
data.loc[channel_lab == data["channel"]],
pfit,
)
for panel in plot_data:
fig.add_trace(panel)
fig = add_derivative_xaxis_to_profiles(fig)
fig = self.__add_der_zeros(fig, pfit)
yranges = dict(
y=get_axis_range(plot_data, "y", "y"),
y2=get_axis_range(plot_data, "y", "y2"),
y3=get_axis_range(plot_data, "y", "y3"),
)
fig.update_layout(
template="plotly_dark",
title=f"""Signal profile<br>
<sub>Condition: {condition_lab}; Channel: {channel_lab}</sub>""".replace(
f"\n{' '*4*3}", "\n"
),
xaxis=dict(title=dtype.label, anchor="y3"),
yaxis=dict(
domain=[0.66, 1],
range=yranges["y"],
title="Intensity (a.u.)",
),
yaxis2=dict(
domain=[0.33, 0.63],
range=yranges["y2"],
title="1st Derivative Intensity (a.u.)",
),
yaxis3=dict(
domain=[0, 0.30],
range=yranges["y3"],
title="2nd Derivative Intensity (a.u.)",
),
autosize=False,
width=1000,
height=1000,
)
return fig
def _plot(
self, data: DefaultDict[str, Dict[str, pd.DataFrame]], *args, **kwargs
) -> DefaultDict[str, Dict[str, go.Figure]]:
fig_data: DefaultDict[str, Dict[str, go.Figure]] = defaultdict(lambda: {})
assert "raw_data" in data
assert "poly_fit" in data
for dirpath, dirdata in data["raw_data"].items():
assert isinstance(dirdata, pd.DataFrame)
assert dirpath in data["poly_fit"]
condition_lab = os.path.basename(os.path.dirname(dirpath))
distance_type = distance.DistanceType.LAMINA_NORM
for channel_lab in set(dirdata["channel"]):
distdata = dirdata.loc[distance_type.value == dirdata["distance_type"]]
if 0 == distdata.shape[0]:
continue
fig_data[self._stub][
f"{channel_lab}-{condition_lab}"
] = self.__make_panel(
distdata,
data["poly_fit"][dirpath],
condition_lab,
channel_lab,
distance_type,
)
return fig_data
def make(
self, output_data: DefaultDict[str, Dict[str, Any]]
) -> Tuple[str, List[str]]:
fig_data = self._plot(output_data)
panels = "\n\t".join(
[
report.ReportBase.figure_to_html(
fig,
classes=[self._stub, f"{self.html_class}-panel", "hidden"],
data=dict(condition=os.path.basename(dpath)),
)
for dpath, fig in sorted(
fig_data[self._stub].items(), key=lambda x: x[0]
)
]
)
return (panels, sorted(fig_data[self._stub].keys()))
| 35.730201
| 87
| 0.468838
|
bb5a0bbced63a88b309119f077998e433b5ee44a
| 74
|
py
|
Python
|
plugins/google_cloud_compute/komand_google_cloud_compute/actions/start_instance/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 46
|
2019-06-05T20:47:58.000Z
|
2022-03-29T10:18:01.000Z
|
plugins/google_cloud_compute/komand_google_cloud_compute/actions/start_instance/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 386
|
2019-06-07T20:20:39.000Z
|
2022-03-30T17:35:01.000Z
|
plugins/google_cloud_compute/komand_google_cloud_compute/actions/start_instance/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 43
|
2019-07-09T14:13:58.000Z
|
2022-03-28T12:04:46.000Z
|
# GENERATED BY KOMAND SDK - DO NOT EDIT
from .action import StartInstance
| 24.666667
| 39
| 0.783784
|
da8c885256571db706a0f035eeeee113e469326b
| 1,702
|
py
|
Python
|
tests/test_completor.py
|
tokorom/completor.vim
|
f4994feb7dc0cbb7988fb256a3974f3043ebdbab
|
[
"MIT"
] | null | null | null |
tests/test_completor.py
|
tokorom/completor.vim
|
f4994feb7dc0cbb7988fb256a3974f3043ebdbab
|
[
"MIT"
] | null | null | null |
tests/test_completor.py
|
tokorom/completor.vim
|
f4994feb7dc0cbb7988fb256a3974f3043ebdbab
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import mock
from completor import Completor, load_completer, get
class HelloCompleter(Completor):
filetype = 'hello'
def test_disabled(vim_mod):
com = HelloCompleter()
vim_mod.vars = {'completor_disable_hello': 1}
assert com.disabled
vim_mod.vars = {'completor_disable_hello': [b'hello']}
com.ft = 'hello'
assert com.disabled
vim_mod.vars = {}
assert not com.disabled
def test_load(vim_mod, monkeypatch):
from completor import Meta
vim_mod.eval = mock.Mock(return_value={})
vim_mod.vars = {}
with mock.patch.object(Meta, 'type_map',
{b'python.django': b'python'}):
assert load_completer(b'hello', b'') is None
assert get('python') is None
c = load_completer(b'python', b'os.')
assert c.input_data == 'os.'
assert get('python') is c
c = load_completer(b'python', b'#')
assert c is get('common')
c = load_completer(b'python.django', b'os.')
assert c is get('python')
vim_mod.current.buffer.options.update({
'omnifunc': b'csscomplete#CompleteCSS'})
vim_mod.vars = {
'completor_css_omni_trigger': b'([\w-]+|@[\w-]*|[\w-]+:\s*[\w-]*)$'
}
assert load_completer(b'css', b'text') is get('omni')
def test_parse_config():
h = HelloCompleter()
args = h.parse_config('tests/test_config')
assert args == ['-I/home/hello', '-x', '--std=c11', '-hello=world',
'abcd', '-a', '123']
args = h.parse_config(['tests/invalid', 'tests/args',
'tests/tests_config'])
assert args == ['-x', 'c', '-D/usr/lib/', '123']
| 29.859649
| 79
| 0.577556
|
2096307ffa05d31d2fce8c9cf95b33cf029bf541
| 831
|
py
|
Python
|
downloader/upl.py
|
andre487/downloader487
|
687c0177019d27990b7b29a2b98af360558e8ca4
|
[
"MIT"
] | null | null | null |
downloader/upl.py
|
andre487/downloader487
|
687c0177019d27990b7b29a2b98af360558e8ca4
|
[
"MIT"
] | null | null | null |
downloader/upl.py
|
andre487/downloader487
|
687c0177019d27990b7b29a2b98af360558e8ca4
|
[
"MIT"
] | null | null | null |
import logging
import os
from typing import Optional
import boto3
class Uploader:
def __init__(
self, s3_endpoint: str, s3_region: str, s3_bucket: str,
s3_access: Optional[str], s3_secret: Optional[str],
) -> None:
self._s3_bucket = s3_bucket
self._client = boto3.client(
service_name='s3',
endpoint_url=s3_endpoint,
region_name=s3_region,
aws_access_key_id=s3_access,
aws_secret_access_key=s3_secret,
)
def upload(self, file_path: str) -> bool:
if not os.path.exists(file_path):
logging.error(f'File not found: {file_path}')
return False
s3_key = os.path.basename(file_path)
self._client.upload_file(file_path, self._s3_bucket, s3_key)
return True
| 26.806452
| 68
| 0.618532
|
4ac52034aed42750c47693b1690101494efd1c5b
| 1,663
|
py
|
Python
|
examples/example_telegram_monitor.py
|
cyberjunky/xknx
|
c708ed6a2ca6449b74c6cea197d658e3399b99d1
|
[
"MIT"
] | 1
|
2020-12-09T16:17:49.000Z
|
2020-12-09T16:17:49.000Z
|
examples/example_telegram_monitor.py
|
cyberjunky/xknx
|
c708ed6a2ca6449b74c6cea197d658e3399b99d1
|
[
"MIT"
] | null | null | null |
examples/example_telegram_monitor.py
|
cyberjunky/xknx
|
c708ed6a2ca6449b74c6cea197d658e3399b99d1
|
[
"MIT"
] | null | null | null |
"""Example for the telegram monitor callback."""
import asyncio
import getopt
import sys
from xknx import XKNX
from xknx.telegram import AddressFilter
async def telegram_received_cb(telegram):
"""Do something with the received telegram."""
print("Telegram received: {0}".format(telegram))
return True
def show_help():
"""Print Help."""
print("Telegram filter.")
print("")
print("Usage:")
print("")
print(__file__, " Listen to all telegrams")
print(__file__, "-f --filter 1/2/*,1/4/[5-6] Filter for specific group addresses")
print(__file__, "-h --help Print help")
print("")
async def monitor(address_filters):
"""Set telegram_received_cb within XKNX and connect to KNX/IP device in daemon mode."""
xknx = XKNX()
xknx.telegram_queue.register_telegram_received_cb(
telegram_received_cb, address_filters)
await xknx.start(daemon_mode=True)
await xknx.stop()
async def main(argv):
"""Parse command line arguments and start monitor."""
try:
opts, _ = getopt.getopt(argv, "hf:", ["help", "filter="])
except getopt.GetoptError:
show_help()
sys.exit(2)
address_filters = None
for opt, arg in opts:
if opt in ['-h', '--help']:
show_help()
sys.exit()
if opt in ['-f', '--filter']:
address_filters = list(map(AddressFilter, arg.split(',')))
await monitor(address_filters)
if __name__ == "__main__":
# pylint: disable=invalid-name
loop = asyncio.get_event_loop()
loop.run_until_complete(main(sys.argv[1:]))
loop.close()
| 28.186441
| 91
| 0.624775
|
a5e7caecdb32bc8cfe2c5a573bea031d16477071
| 52,509
|
py
|
Python
|
nova/tests/unit/api/openstack/compute/test_server_actions.py
|
bbc/nova
|
655cd30c9fd1753323f725f45581a56f1d17c075
|
[
"Apache-2.0"
] | 1
|
2021-06-10T17:08:15.000Z
|
2021-06-10T17:08:15.000Z
|
nova/tests/unit/api/openstack/compute/test_server_actions.py
|
bbc/nova
|
655cd30c9fd1753323f725f45581a56f1d17c075
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/api/openstack/compute/test_server_actions.py
|
bbc/nova
|
655cd30c9fd1753323f725f45581a56f1d17c075
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
import fixtures
import mock
from oslo_utils.fixture import uuidsentinel as uuids
from oslo_utils import uuidutils
import webob
from nova.api.openstack.compute import servers as servers_v21
from nova.compute import api as compute_api
from nova.compute import task_states
from nova.compute import vm_states
import nova.conf
from nova import exception
from nova.image import glance
from nova import objects
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_block_device
from nova.tests.unit import fake_instance
CONF = nova.conf.CONF
FAKE_UUID = fakes.FAKE_UUID
class MockSetAdminPassword(object):
def __init__(self):
self.instance_id = None
self.password = None
def __call__(self, context, instance, password):
self.instance_id = instance['uuid']
self.password = password
@ddt.ddt
class ServerActionsControllerTestV21(test.TestCase):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_base_url = 'http://localhost:9292/images/'
image_href = image_base_url + '/' + image_uuid
servers = servers_v21
validation_error = exception.ValidationError
request_too_large_error = exception.ValidationError
image_url = None
def setUp(self):
super(ServerActionsControllerTestV21, self).setUp()
self.flags(group='glance', api_servers=['http://localhost:9292'])
self.stub_out('nova.compute.api.API.get',
fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
project_id=fakes.FAKE_PROJECT_ID,
host='fake_host'))
self.stub_out('nova.objects.Instance.save', lambda *a, **kw: None)
fakes.stub_out_compute_api_snapshot(self)
self.useFixture(nova_fixtures.GlanceFixture(self))
self.flags(enable_instance_password=True, group='api')
# TODO(stephenfin): Use uuidsentinel instead of this
self._image_href = '155d900f-4e14-4e4c-a73d-069cbf4541e6'
self.controller = self._get_controller()
self.compute_api = self.controller.compute_api
# We don't care about anything getting as far as hitting the compute
# RPC API so we just mock it out here.
mock_rpcapi = mock.patch.object(self.compute_api, 'compute_rpcapi')
mock_rpcapi.start()
self.addCleanup(mock_rpcapi.stop)
# The project_id here matches what is used by default in
# fake_compute_get which need to match for policy checks.
self.req = fakes.HTTPRequest.blank('',
project_id=fakes.FAKE_PROJECT_ID)
self.context = self.req.environ['nova.context']
self.image_api = glance.API()
# Assume that anything that hits the compute API and looks for a
# RequestSpec doesn't care about it, since testing logic that deep
# should be done in nova.tests.unit.compute.test_api.
mock_reqspec = mock.patch('nova.objects.RequestSpec')
mock_reqspec.start()
self.addCleanup(mock_reqspec.stop)
# Similarly we shouldn't care about anything hitting conductor from
# these tests.
mock_conductor = mock.patch.object(
self.controller.compute_api, 'compute_task_api')
mock_conductor.start()
self.addCleanup(mock_conductor.stop)
# Assume that none of the tests are using ports with resource requests.
self.mock_list_port = self.useFixture(
fixtures.MockPatch('nova.network.neutron.API.list_ports')).mock
self.mock_list_port.return_value = {'ports': []}
def _get_controller(self):
return self.servers.ServersController()
def _test_locked_instance(self, action, method=None, body_map=None,
compute_api_args_map=None):
if body_map is None:
body_map = {}
if compute_api_args_map is None:
compute_api_args_map = {}
args, kwargs = compute_api_args_map.get(action, ((), {}))
uuid = uuidutils.generate_uuid()
context = self.req.environ['nova.context']
instance = fake_instance.fake_db_instance(
id=1, uuid=uuid, vm_state=vm_states.ACTIVE, task_state=None,
project_id=context.project_id,
user_id=context.user_id)
instance = objects.Instance._from_db_object(
self.context, objects.Instance(), instance)
with test.nested(
mock.patch.object(compute_api.API, 'get',
return_value=instance),
mock.patch.object(compute_api.API, method,
side_effect=exception.InstanceIsLocked(
instance_uuid=instance['uuid'])),
) as (mock_get, mock_method):
controller_function = 'self.controller.' + action
self.assertRaises(webob.exc.HTTPConflict,
eval(controller_function),
self.req, instance['uuid'],
body=body_map.get(action))
expected_attrs = ['flavor', 'numa_topology']
if method == 'resize':
expected_attrs.append('services')
mock_get.assert_called_once_with(self.context, uuid,
expected_attrs=expected_attrs,
cell_down_support=False)
mock_method.assert_called_once_with(self.context, instance,
*args, **kwargs)
def test_actions_with_locked_instance(self):
actions = ['_action_resize', '_action_confirm_resize',
'_action_revert_resize', '_action_reboot',
'_action_rebuild']
method_translations = {'_action_resize': 'resize',
'_action_confirm_resize': 'confirm_resize',
'_action_revert_resize': 'revert_resize',
'_action_reboot': 'reboot',
'_action_rebuild': 'rebuild'}
body_map = {'_action_resize': {'resize': {'flavorRef': '2'}},
'_action_reboot': {'reboot': {'type': 'HARD'}},
'_action_rebuild': {'rebuild': {
'imageRef': self.image_uuid,
'adminPass': 'TNc53Dr8s7vw'}}}
args_map = {'_action_resize': (('2'), {'auto_disk_config': None}),
'_action_confirm_resize': ((), {}),
'_action_reboot': (('HARD',), {}),
'_action_rebuild': ((self.image_uuid,
'TNc53Dr8s7vw'), {})}
for action in actions:
method = method_translations.get(action)
self._test_locked_instance(action, method=method,
body_map=body_map,
compute_api_args_map=args_map)
def test_reboot_hard(self):
body = dict(reboot=dict(type="HARD"))
self.controller._action_reboot(self.req, FAKE_UUID, body=body)
def test_reboot_soft(self):
body = dict(reboot=dict(type="SOFT"))
self.controller._action_reboot(self.req, FAKE_UUID, body=body)
def test_reboot_incorrect_type(self):
body = dict(reboot=dict(type="NOT_A_TYPE"))
self.assertRaises(self.validation_error,
self.controller._action_reboot,
self.req, FAKE_UUID, body=body)
def test_reboot_missing_type(self):
body = dict(reboot=dict())
self.assertRaises(self.validation_error,
self.controller._action_reboot,
self.req, FAKE_UUID, body=body)
def test_reboot_none(self):
body = dict(reboot=dict(type=None))
self.assertRaises(self.validation_error,
self.controller._action_reboot,
self.req, FAKE_UUID, body=body)
def test_reboot_not_found(self):
body = dict(reboot=dict(type="HARD"))
with mock.patch('nova.compute.api.API.get',
side_effect=exception.InstanceNotFound(
instance_id=uuids.fake)):
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._action_reboot,
self.req, uuids.fake, body=body)
def test_reboot_raises_conflict_on_invalid_state(self):
body = dict(reboot=dict(type="HARD"))
def fake_reboot(*args, **kwargs):
raise exception.InstanceInvalidState(attr='fake_attr',
state='fake_state', method='fake_method',
instance_uuid='fake')
self.stub_out('nova.compute.api.API.reboot', fake_reboot)
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_reboot,
self.req, FAKE_UUID, body=body)
def test_reboot_soft_with_soft_in_progress_raises_conflict(self):
body = dict(reboot=dict(type="SOFT"))
self.stub_out('nova.compute.api.API.get',
fakes.fake_compute_get(project_id=fakes.FAKE_PROJECT_ID,
vm_state=vm_states.ACTIVE,
task_state=task_states.REBOOTING))
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_reboot,
self.req, FAKE_UUID, body=body)
def test_reboot_hard_with_soft_in_progress_does_not_raise(self):
body = dict(reboot=dict(type="HARD"))
self.stub_out('nova.compute.api.API.get',
fakes.fake_compute_get(project_id=fakes.FAKE_PROJECT_ID,
vm_state=vm_states.ACTIVE,
task_state=task_states.REBOOTING))
self.controller._action_reboot(self.req, FAKE_UUID, body=body)
def test_reboot_hard_with_hard_in_progress(self):
body = dict(reboot=dict(type="HARD"))
self.stub_out('nova.compute.api.API.get',
fakes.fake_compute_get(
project_id=fakes.FAKE_PROJECT_ID,
vm_state=vm_states.ACTIVE,
task_state=task_states.REBOOTING_HARD))
self.controller._action_reboot(self.req, FAKE_UUID, body=body)
def test_reboot_soft_with_hard_in_progress_raises_conflict(self):
body = dict(reboot=dict(type="SOFT"))
self.stub_out('nova.compute.api.API.get',
fakes.fake_compute_get(
project_id=fakes.FAKE_PROJECT_ID,
vm_state=vm_states.ACTIVE,
task_state=task_states.REBOOTING_HARD))
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_reboot,
self.req, FAKE_UUID, body=body)
def _test_rebuild_preserve_ephemeral(self, value=None):
return_server = fakes.fake_compute_get(
project_id=fakes.FAKE_PROJECT_ID,
image_ref='2',
vm_state=vm_states.ACTIVE,
host='fake_host')
self.stub_out('nova.compute.api.API.get', return_server)
body = {
"rebuild": {
"imageRef": self._image_href,
},
}
if value is not None:
body['rebuild']['preserve_ephemeral'] = value
with mock.patch.object(compute_api.API, 'rebuild') as mock_rebuild:
self.controller._action_rebuild(self.req, FAKE_UUID, body=body)
if value is not None:
mock_rebuild.assert_called_once_with(self.context, mock.ANY,
self._image_href, mock.ANY, preserve_ephemeral=value)
else:
mock_rebuild.assert_called_once_with(self.context, mock.ANY,
self._image_href, mock.ANY)
def test_rebuild_preserve_ephemeral_true(self):
self._test_rebuild_preserve_ephemeral(True)
def test_rebuild_preserve_ephemeral_false(self):
self._test_rebuild_preserve_ephemeral(False)
def test_rebuild_preserve_ephemeral_default(self):
self._test_rebuild_preserve_ephemeral()
def test_rebuild_accepted_minimum(self):
return_server = fakes.fake_compute_get(
project_id=fakes.FAKE_PROJECT_ID,
image_ref='2',
vm_state=vm_states.ACTIVE, host='fake_host')
self.stub_out('nova.compute.api.API.get', return_server)
self_href = 'http://localhost/v2/servers/%s' % FAKE_UUID
body = {
"rebuild": {
"imageRef": self._image_href,
},
}
robj = self.controller._action_rebuild(self.req, FAKE_UUID, body=body)
body = robj.obj
self.assertEqual(body['server']['image']['id'], '2')
self.assertEqual(len(body['server']['adminPass']),
CONF.password_length)
self.assertEqual(robj['location'], self_href)
# pep3333 requires applications produces headers which are str
self.assertEqual(str, type(robj['location']))
def test_rebuild_instance_with_image_uuid(self):
info = dict(image_href_in_call=None)
def rebuild(self2, context, instance, image_href, *args, **kwargs):
info['image_href_in_call'] = image_href
self.stub_out('nova.compute.api.API.rebuild', rebuild)
# proper local hrefs must start with 'http://localhost/v2/'
body = {
'rebuild': {
'imageRef': self.image_uuid,
},
}
self.controller._action_rebuild(self.req, FAKE_UUID, body=body)
self.assertEqual(info['image_href_in_call'], self.image_uuid)
def test_rebuild_instance_with_image_href_uses_uuid(self):
# proper local hrefs must start with 'http://localhost/v2/'
body = {
'rebuild': {
'imageRef': self.image_href,
},
}
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_accepted_minimum_pass_disabled(self):
# run with enable_instance_password disabled to verify adminPass
# is missing from response. See lp bug 921814
self.flags(enable_instance_password=False, group='api')
return_server = fakes.fake_compute_get(
project_id=fakes.FAKE_PROJECT_ID,
image_ref='2',
vm_state=vm_states.ACTIVE, host='fake_host')
self.stub_out('nova.compute.api.API.get', return_server)
self_href = 'http://localhost/v2/servers/%s' % FAKE_UUID
body = {
"rebuild": {
"imageRef": self._image_href,
},
}
robj = self.controller._action_rebuild(self.req, FAKE_UUID, body=body)
body = robj.obj
self.assertEqual(body['server']['image']['id'], '2')
self.assertNotIn("adminPass", body['server'])
self.assertEqual(robj['location'], self_href)
# pep3333 requires applications produces headers which are str
self.assertEqual(str, type(robj['location']))
@ddt.data(
exception.InstanceIsLocked(instance_uuid=uuids.instance),
exception.OperationNotSupportedForVTPM(
instance_uuid=uuids.instance, operation='foo'),
)
@mock.patch('nova.compute.api.API.rebuild')
def test_rebuild__http_conflict_error(self, exc, mock_rebuild):
mock_rebuild.side_effect = exc
self.assertRaises(
webob.exc.HTTPConflict,
self.controller._action_rebuild,
self.req, uuids.instance,
body={'rebuild': {'imageRef': uuids.image}})
def test_rebuild_raises_conflict_on_invalid_state(self):
body = {'rebuild': {'imageRef': uuids.image}}
def fake_rebuild(*args, **kwargs):
raise exception.InstanceInvalidState(attr='fake_attr',
state='fake_state', method='fake_method',
instance_uuid='fake')
self.stub_out('nova.compute.api.API.rebuild', fake_rebuild)
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_accepted_with_metadata(self):
metadata = {'new': 'metadata'}
return_server = fakes.fake_compute_get(
project_id=fakes.FAKE_PROJECT_ID,
metadata=metadata,
vm_state=vm_states.ACTIVE, host='fake_host')
self.stub_out('nova.compute.api.API.get', return_server)
body = {
"rebuild": {
"imageRef": self._image_href,
"metadata": metadata,
},
}
body = self.controller._action_rebuild(self.req, FAKE_UUID,
body=body).obj
self.assertEqual(body['server']['metadata'], metadata)
def test_rebuild_accepted_with_bad_metadata(self):
body = {
"rebuild": {
"imageRef": self._image_href,
"metadata": "stack",
},
}
self.assertRaises(self.validation_error,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_with_too_large_metadata(self):
body = {
"rebuild": {
"imageRef": self._image_href,
"metadata": {
256 * "k": "value"
}
}
}
self.assertRaises(self.request_too_large_error,
self.controller._action_rebuild, self.req,
FAKE_UUID, body=body)
def test_rebuild_bad_entity(self):
body = {
"rebuild": {
"imageId": self._image_href,
},
}
self.assertRaises(self.validation_error,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_admin_pass(self):
return_server = fakes.fake_compute_get(
project_id=fakes.FAKE_PROJECT_ID,
image_ref='2',
vm_state=vm_states.ACTIVE, host='fake_host')
self.stub_out('nova.compute.api.API.get', return_server)
body = {
"rebuild": {
"imageRef": self._image_href,
"adminPass": "asdf",
},
}
body = self.controller._action_rebuild(self.req, FAKE_UUID,
body=body).obj
self.assertEqual(body['server']['image']['id'], '2')
self.assertEqual(body['server']['adminPass'], 'asdf')
def test_rebuild_admin_pass_pass_disabled(self):
# run with enable_instance_password disabled to verify adminPass
# is missing from response. See lp bug 921814
self.flags(enable_instance_password=False, group='api')
return_server = fakes.fake_compute_get(
project_id=fakes.FAKE_PROJECT_ID,
image_ref='2',
vm_state=vm_states.ACTIVE, host='fake_host')
self.stub_out('nova.compute.api.API.get', return_server)
body = {
"rebuild": {
"imageRef": self._image_href,
"adminPass": "asdf",
},
}
body = self.controller._action_rebuild(self.req, FAKE_UUID,
body=body).obj
self.assertEqual(body['server']['image']['id'], '2')
self.assertNotIn('adminPass', body['server'])
def test_rebuild_server_not_found(self):
body = {
"rebuild": {
"imageRef": self._image_href,
},
}
with mock.patch('nova.compute.api.API.get',
side_effect=exception.InstanceNotFound(
instance_id=FAKE_UUID)):
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_with_bad_image(self):
body = {
"rebuild": {
"imageRef": "foo",
},
}
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_accessIP(self):
attributes = {
'access_ip_v4': '172.19.0.1',
'access_ip_v6': 'fe80::1',
}
body = {
"rebuild": {
"imageRef": self._image_href,
"accessIPv4": "172.19.0.1",
"accessIPv6": "fe80::1",
},
}
data = {'changes': {}}
orig_get = compute_api.API.get
def wrap_get(*args, **kwargs):
data['instance'] = orig_get(*args, **kwargs)
return data['instance']
def fake_save(context, **kwargs):
data['changes'].update(data['instance'].obj_get_changes())
self.stub_out('nova.compute.api.API.get', wrap_get)
self.stub_out('nova.objects.Instance.save', fake_save)
self.controller._action_rebuild(self.req, FAKE_UUID, body=body)
self.assertEqual(self._image_href, data['changes']['image_ref'])
self.assertEqual("", data['changes']['kernel_id'])
self.assertEqual("", data['changes']['ramdisk_id'])
self.assertEqual(task_states.REBUILDING, data['changes']['task_state'])
self.assertEqual(0, data['changes']['progress'])
for attr, value in attributes.items():
self.assertEqual(value, str(data['changes'][attr]))
def test_rebuild_when_kernel_not_exists(self):
def return_image_meta(*args, **kwargs):
image_meta_table = {
'2': {'id': uuids.image_id, 'status': 'active',
'container_format': 'ari'},
'155d900f-4e14-4e4c-a73d-069cbf4541e6':
{'id': uuids.image_id, 'status': 'active',
'container_format': 'raw',
'properties': {'kernel_id': 1, 'ramdisk_id': 2}},
}
image_id = args[2]
try:
image_meta = image_meta_table[str(image_id)]
except KeyError:
raise exception.ImageNotFound(image_id=image_id)
return image_meta
self.stub_out('nova.tests.fixtures.GlanceFixture.show',
return_image_meta)
body = {
"rebuild": {
"imageRef": "155d900f-4e14-4e4c-a73d-069cbf4541e6",
},
}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_proper_kernel_ram(self):
instance_meta = {'kernel_id': None, 'ramdisk_id': None}
orig_get = compute_api.API.get
def wrap_get(*args, **kwargs):
inst = orig_get(*args, **kwargs)
instance_meta['instance'] = inst
return inst
def fake_save(context, **kwargs):
instance = instance_meta['instance']
for key in instance_meta.keys():
if key in instance.obj_what_changed():
instance_meta[key] = instance[key]
def return_image_meta(*args, **kwargs):
image_meta_table = {
uuids.kernel_image_id: {
'id': uuids.kernel_image_id,
'status': 'active',
'container_format': 'aki'},
uuids.ramdisk_image_id: {
'id': uuids.ramdisk_image_id,
'status': 'active',
'container_format': 'ari'},
'155d900f-4e14-4e4c-a73d-069cbf4541e6':
{'id': '155d900f-4e14-4e4c-a73d-069cbf4541e6',
'status': 'active',
'container_format': 'raw',
'properties': {'kernel_id': uuids.kernel_image_id,
'ramdisk_id': uuids.ramdisk_image_id}},
}
image_id = args[2]
try:
image_meta = image_meta_table[str(image_id)]
except KeyError:
raise exception.ImageNotFound(image_id=image_id)
return image_meta
self.stub_out('nova.tests.fixtures.GlanceFixture.show',
return_image_meta)
self.stub_out('nova.compute.api.API.get', wrap_get)
self.stub_out('nova.objects.Instance.save', fake_save)
body = {
"rebuild": {
"imageRef": "155d900f-4e14-4e4c-a73d-069cbf4541e6",
},
}
self.controller._action_rebuild(self.req, FAKE_UUID, body=body).obj
self.assertEqual(instance_meta['kernel_id'], uuids.kernel_image_id)
self.assertEqual(instance_meta['ramdisk_id'], uuids.ramdisk_image_id)
@mock.patch.object(compute_api.API, 'rebuild')
def test_rebuild_instance_raise_auto_disk_config_exc(self, mock_rebuild):
body = {
"rebuild": {
"imageRef": self._image_href,
},
}
mock_rebuild.side_effect = exception.AutoDiskConfigDisabledByImage(
image='dummy')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
@mock.patch.object(compute_api.API, 'rebuild')
def test_rebuild_raise_invalid_architecture_exc(self, mock_rebuild):
body = {
"rebuild": {
"imageRef": self._image_href,
},
}
mock_rebuild.side_effect = exception.InvalidArchitectureName('arm64')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
@mock.patch.object(compute_api.API, 'rebuild')
def test_rebuild_raise_invalid_volume_exc(self, mock_rebuild):
"""Make sure that we can't rebuild with an InvalidVolume exception."""
body = {
"rebuild": {
"imageRef": self._image_href,
},
}
mock_rebuild.side_effect = exception.InvalidVolume('error')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_resize_server(self):
body = dict(resize=dict(flavorRef="http://localhost/3"))
self.resize_called = False
def resize_mock(*args, **kwargs):
self.resize_called = True
self.stub_out('nova.compute.api.API.resize', resize_mock)
self.controller._action_resize(self.req, FAKE_UUID, body=body)
self.assertTrue(self.resize_called)
def test_resize_server_no_flavor(self):
body = dict(resize=dict())
self.assertRaises(self.validation_error,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
def test_resize_server_no_flavor_ref(self):
body = dict(resize=dict(flavorRef=None))
self.assertRaises(self.validation_error,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
def test_resize_server_with_extra_arg(self):
body = dict(resize=dict(favorRef="http://localhost/3",
extra_arg="extra_arg"))
self.assertRaises(self.validation_error,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
def test_resize_server_invalid_flavor_ref(self):
body = dict(resize=dict(flavorRef=1.2))
self.assertRaises(self.validation_error,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
def test_resize_with_server_not_found(self):
body = dict(resize=dict(flavorRef="http://localhost/3"))
with mock.patch('nova.compute.api.API.get',
side_effect=exception.InstanceNotFound(
instance_id=FAKE_UUID)):
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
def test_resize_with_image_exceptions(self):
body = dict(resize=dict(flavorRef="http://localhost/3"))
self.resize_called = 0
image_id = 'fake_image_id'
exceptions = [
(exception.ImageNotAuthorized(image_id=image_id),
webob.exc.HTTPUnauthorized),
(exception.ImageNotFound(image_id=image_id),
webob.exc.HTTPBadRequest),
(exception.Invalid, webob.exc.HTTPBadRequest),
(exception.AutoDiskConfigDisabledByImage(image=image_id),
webob.exc.HTTPBadRequest),
]
raised, expected = map(iter, zip(*exceptions))
def _fake_resize(obj, context, instance, flavor_id,
auto_disk_config=None):
self.resize_called += 1
raise next(raised)
self.stub_out('nova.compute.api.API.resize', _fake_resize)
for call_no in range(len(exceptions)):
next_exception = next(expected)
actual = self.assertRaises(next_exception,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
if (isinstance(exceptions[call_no][0],
exception.NoValidHost)):
self.assertEqual(actual.explanation,
'No valid host was found. Bad host')
elif (isinstance(exceptions[call_no][0],
exception.AutoDiskConfigDisabledByImage)):
self.assertEqual(actual.explanation,
'Requested image fake_image_id has automatic'
' disk resize disabled.')
self.assertEqual(self.resize_called, call_no + 1)
@mock.patch('nova.compute.api.API.resize',
side_effect=exception.CannotResizeDisk(reason=''))
def test_resize_raises_cannot_resize_disk(self, mock_resize):
body = dict(resize=dict(flavorRef="http://localhost/3"))
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
@mock.patch('nova.compute.api.API.resize',
side_effect=exception.FlavorNotFound(reason='',
flavor_id='fake_id'))
def test_resize_raises_flavor_not_found(self, mock_resize):
body = dict(resize=dict(flavorRef="http://localhost/3"))
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
def test_resize_with_too_many_instances(self):
body = dict(resize=dict(flavorRef="http://localhost/3"))
def fake_resize(*args, **kwargs):
raise exception.TooManyInstances(message="TooManyInstance")
self.stub_out('nova.compute.api.API.resize', fake_resize)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
def test_resize_raises_conflict_on_invalid_state(self):
body = dict(resize=dict(flavorRef="http://localhost/3"))
def fake_resize(*args, **kwargs):
raise exception.InstanceInvalidState(attr='fake_attr',
state='fake_state', method='fake_method',
instance_uuid='fake')
self.stub_out('nova.compute.api.API.resize', fake_resize)
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
@mock.patch.object(compute_api.API, 'resize')
def test_resize_instance_raise_auto_disk_config_exc(self, mock_resize):
mock_resize.side_effect = exception.AutoDiskConfigDisabledByImage(
image='dummy')
body = dict(resize=dict(flavorRef="http://localhost/3"))
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
@mock.patch('nova.compute.api.API.resize',
side_effect=exception.PciRequestAliasNotDefined(
alias='fake_name'))
def test_resize_pci_alias_not_defined(self, mock_resize):
# Tests that PciRequestAliasNotDefined is translated to a 400 error.
body = dict(resize=dict(flavorRef="http://localhost/3"))
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
def test_confirm_resize_server(self):
body = dict(confirmResize=None)
self.confirm_resize_called = False
def cr_mock(*args):
self.confirm_resize_called = True
self.stub_out('nova.compute.api.API.confirm_resize', cr_mock)
self.controller._action_confirm_resize(self.req, FAKE_UUID, body=body)
self.assertTrue(self.confirm_resize_called)
def test_confirm_resize_migration_not_found(self):
body = dict(confirmResize=None)
def confirm_resize_mock(*args):
raise exception.MigrationNotFoundByStatus(instance_id=1,
status='finished')
self.stub_out('nova.compute.api.API.confirm_resize',
confirm_resize_mock)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_confirm_resize,
self.req, FAKE_UUID, body=body)
def test_confirm_resize_raises_conflict_on_invalid_state(self):
body = dict(confirmResize=None)
def fake_confirm_resize(*args, **kwargs):
raise exception.InstanceInvalidState(attr='fake_attr',
state='fake_state', method='fake_method',
instance_uuid='fake')
self.stub_out('nova.compute.api.API.confirm_resize',
fake_confirm_resize)
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_confirm_resize,
self.req, FAKE_UUID, body=body)
def test_revert_resize_migration_not_found(self):
body = dict(revertResize=None)
def revert_resize_mock(*args):
raise exception.MigrationNotFoundByStatus(instance_id=1,
status='finished')
self.stub_out('nova.compute.api.API.revert_resize',
revert_resize_mock)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_revert_resize,
self.req, FAKE_UUID, body=body)
def test_revert_resize_server_not_found(self):
body = dict(revertResize=None)
with mock.patch('nova.compute.api.API.get',
side_effect=exception.InstanceNotFound(
instance_id='bad_server_id')):
self.assertRaises(webob. exc.HTTPNotFound,
self.controller._action_revert_resize,
self.req, "bad_server_id", body=body)
def test_revert_resize_server(self):
body = dict(revertResize=None)
self.revert_resize_called = False
def revert_mock(*args):
self.revert_resize_called = True
self.stub_out('nova.compute.api.API.revert_resize', revert_mock)
body = self.controller._action_revert_resize(self.req, FAKE_UUID,
body=body)
self.assertTrue(self.revert_resize_called)
def test_revert_resize_raises_conflict_on_invalid_state(self):
body = dict(revertResize=None)
def fake_revert_resize(*args, **kwargs):
raise exception.InstanceInvalidState(attr='fake_attr',
state='fake_state', method='fake_method',
instance_uuid='fake')
self.stub_out('nova.compute.api.API.revert_resize',
fake_revert_resize)
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_revert_resize,
self.req, FAKE_UUID, body=body)
def test_create_image(self):
body = {
'createImage': {
'name': 'Snapshot 1',
},
}
response = self.controller._action_create_image(self.req, FAKE_UUID,
body=body)
location = response.headers['Location']
self.assertEqual(self.image_url + '123' if self.image_url else
self.image_api.generate_image_url('123', self.context),
location)
def test_create_image_v2_45(self):
"""Tests the createImage server action API with the 2.45 microversion
where there is a response body but no Location header.
"""
body = {
'createImage': {
'name': 'Snapshot 1',
},
}
req = fakes.HTTPRequest.blank('', version='2.45')
response = self.controller._action_create_image(req, FAKE_UUID,
body=body)
self.assertIsInstance(response, dict)
self.assertEqual('123', response['image_id'])
def test_create_image_name_too_long(self):
long_name = 'a' * 260
body = {
'createImage': {
'name': long_name,
},
}
self.assertRaises(self.validation_error,
self.controller._action_create_image, self.req,
FAKE_UUID, body=body)
def _do_test_create_volume_backed_image(
self, extra_properties, mock_vol_create_side_effect=None):
def _fake_id(x):
return '%s-%s-%s-%s' % (x * 8, x * 4, x * 4, x * 12)
body = dict(createImage=dict(name='snapshot_of_volume_backed'))
if extra_properties:
body['createImage']['metadata'] = extra_properties
image_service = glance.get_default_image_service()
bdm = [dict(volume_id=_fake_id('a'),
volume_size=1,
device_name='vda',
delete_on_termination=False)]
def fake_block_device_mapping_get_all_by_instance(context, inst_id,
use_slave=False):
return [fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': _fake_id('a'),
'source_type': 'snapshot',
'destination_type': 'volume',
'volume_size': 1,
'device_name': 'vda',
'snapshot_id': 1,
'boot_index': 0,
'delete_on_termination': False,
'no_device': None})]
self.stub_out('nova.db.api.block_device_mapping_get_all_by_instance',
fake_block_device_mapping_get_all_by_instance)
system_metadata = dict(image_kernel_id=_fake_id('b'),
image_ramdisk_id=_fake_id('c'),
image_root_device_name='/dev/vda',
image_block_device_mapping=str(bdm),
image_container_format='ami')
instance = fakes.fake_compute_get(project_id=fakes.FAKE_PROJECT_ID,
image_ref=uuids.fake,
vm_state=vm_states.ACTIVE,
root_device_name='/dev/vda',
system_metadata=system_metadata)
self.stub_out('nova.compute.api.API.get', instance)
volume = dict(id=_fake_id('a'),
size=1,
host='fake',
display_description='fake')
snapshot = dict(id=_fake_id('d'))
with test.nested(
mock.patch.object(
self.controller.compute_api.volume_api, 'get_absolute_limits',
return_value={'totalSnapshotsUsed': 0,
'maxTotalSnapshots': 10}),
mock.patch.object(self.controller.compute_api.compute_rpcapi,
'quiesce_instance',
side_effect=exception.InstanceQuiesceNotSupported(
instance_id='fake', reason='test')),
mock.patch.object(self.controller.compute_api.volume_api, 'get',
return_value=volume),
mock.patch.object(self.controller.compute_api.volume_api,
'create_snapshot_force',
return_value=snapshot),
) as (mock_get_limits, mock_quiesce, mock_vol_get, mock_vol_create):
if mock_vol_create_side_effect:
mock_vol_create.side_effect = mock_vol_create_side_effect
response = self.controller._action_create_image(self.req,
FAKE_UUID, body=body)
location = response.headers['Location']
image_id = location.replace(self.image_url or
self.image_api.generate_image_url('', self.context),
'')
image = image_service.show(None, image_id)
self.assertEqual(image['name'], 'snapshot_of_volume_backed')
properties = image['properties']
self.assertEqual(properties['kernel_id'], _fake_id('b'))
self.assertEqual(properties['ramdisk_id'], _fake_id('c'))
self.assertEqual(properties['root_device_name'], '/dev/vda')
self.assertTrue(properties['bdm_v2'])
bdms = properties['block_device_mapping']
self.assertEqual(len(bdms), 1)
self.assertEqual(bdms[0]['boot_index'], 0)
self.assertEqual(bdms[0]['source_type'], 'snapshot')
self.assertEqual(bdms[0]['destination_type'], 'volume')
self.assertEqual(bdms[0]['snapshot_id'], snapshot['id'])
self.assertEqual('/dev/vda', bdms[0]['device_name'])
for fld in ('connection_info', 'id', 'instance_uuid'):
self.assertNotIn(fld, bdms[0])
for k in extra_properties.keys():
self.assertEqual(properties[k], extra_properties[k])
mock_quiesce.assert_called_once_with(mock.ANY, mock.ANY)
mock_vol_get.assert_called_once_with(mock.ANY, volume['id'])
mock_vol_create.assert_called_once_with(mock.ANY, volume['id'],
mock.ANY, mock.ANY)
def test_create_volume_backed_image_no_metadata(self):
self._do_test_create_volume_backed_image({})
def test_create_volume_backed_image_with_metadata(self):
self._do_test_create_volume_backed_image(dict(ImageType='Gold',
ImageVersion='2.0'))
def test_create_volume_backed_image_cinder_over_quota(self):
self.assertRaises(
webob.exc.HTTPForbidden,
self._do_test_create_volume_backed_image, {},
mock_vol_create_side_effect=exception.OverQuota(
overs='snapshot'))
def _test_create_volume_backed_image_with_metadata_from_volume(
self, extra_metadata=None):
def _fake_id(x):
return '%s-%s-%s-%s' % (x * 8, x * 4, x * 4, x * 12)
body = dict(createImage=dict(name='snapshot_of_volume_backed'))
if extra_metadata:
body['createImage']['metadata'] = extra_metadata
image_service = glance.get_default_image_service()
def fake_block_device_mapping_get_all_by_instance(context, inst_id,
use_slave=False):
return [fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': _fake_id('a'),
'source_type': 'snapshot',
'destination_type': 'volume',
'volume_size': 1,
'device_name': 'vda',
'snapshot_id': 1,
'boot_index': 0,
'delete_on_termination': False,
'no_device': None})]
self.stub_out('nova.db.api.block_device_mapping_get_all_by_instance',
fake_block_device_mapping_get_all_by_instance)
instance = fakes.fake_compute_get(
project_id=fakes.FAKE_PROJECT_ID,
image_ref='',
vm_state=vm_states.ACTIVE,
root_device_name='/dev/vda',
system_metadata={'image_test_key1': 'test_value1',
'image_test_key2': 'test_value2'})
self.stub_out('nova.compute.api.API.get', instance)
volume = dict(id=_fake_id('a'),
size=1,
host='fake',
display_description='fake')
snapshot = dict(id=_fake_id('d'))
with test.nested(
mock.patch.object(
self.controller.compute_api.volume_api, 'get_absolute_limits',
return_value={'totalSnapshotsUsed': 0,
'maxTotalSnapshots': 10}),
mock.patch.object(self.controller.compute_api.compute_rpcapi,
'quiesce_instance',
side_effect=exception.InstanceQuiesceNotSupported(
instance_id='fake', reason='test')),
mock.patch.object(self.controller.compute_api.volume_api, 'get',
return_value=volume),
mock.patch.object(self.controller.compute_api.volume_api,
'create_snapshot_force',
return_value=snapshot),
) as (mock_get_limits, mock_quiesce, mock_vol_get, mock_vol_create):
response = self.controller._action_create_image(self.req,
FAKE_UUID, body=body)
location = response.headers['Location']
image_id = location.replace(self.image_base_url, '')
image = image_service.show(None, image_id)
properties = image['properties']
self.assertEqual(properties['test_key1'], 'test_value1')
self.assertEqual(properties['test_key2'], 'test_value2')
if extra_metadata:
for key, val in extra_metadata.items():
self.assertEqual(properties[key], val)
mock_quiesce.assert_called_once_with(mock.ANY, mock.ANY)
mock_vol_get.assert_called_once_with(mock.ANY, volume['id'])
mock_vol_create.assert_called_once_with(mock.ANY, volume['id'],
mock.ANY, mock.ANY)
def test_create_vol_backed_img_with_meta_from_vol_without_extra_meta(self):
self._test_create_volume_backed_image_with_metadata_from_volume()
def test_create_vol_backed_img_with_meta_from_vol_with_extra_meta(self):
self._test_create_volume_backed_image_with_metadata_from_volume(
extra_metadata={'a': 'b'})
def test_create_image_with_metadata(self):
body = {
'createImage': {
'name': 'Snapshot 1',
'metadata': {'key': 'asdf'},
},
}
response = self.controller._action_create_image(self.req, FAKE_UUID,
body=body)
location = response.headers['Location']
self.assertEqual(self.image_url + '123' if self.image_url else
self.image_api.generate_image_url('123', self.context), location)
def test_create_image_with_too_much_metadata(self):
body = {
'createImage': {
'name': 'Snapshot 1',
'metadata': {},
},
}
for num in range(CONF.quota.metadata_items + 1):
body['createImage']['metadata']['foo%i' % num] = "bar"
self.assertRaises(webob.exc.HTTPForbidden,
self.controller._action_create_image,
self.req, FAKE_UUID, body=body)
def test_create_image_no_name(self):
body = {
'createImage': {},
}
self.assertRaises(self.validation_error,
self.controller._action_create_image,
self.req, FAKE_UUID, body=body)
def test_create_image_blank_name(self):
body = {
'createImage': {
'name': '',
}
}
self.assertRaises(self.validation_error,
self.controller._action_create_image,
self.req, FAKE_UUID, body=body)
def test_create_image_bad_metadata(self):
body = {
'createImage': {
'name': 'geoff',
'metadata': 'henry',
},
}
self.assertRaises(self.validation_error,
self.controller._action_create_image,
self.req, FAKE_UUID, body=body)
def test_create_image_raises_conflict_on_invalid_state(self):
def snapshot(*args, **kwargs):
raise exception.InstanceInvalidState(attr='fake_attr',
state='fake_state', method='fake_method',
instance_uuid='fake')
self.stub_out('nova.compute.api.API.snapshot', snapshot)
body = {
"createImage": {
"name": "test_snapshot",
},
}
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_create_image,
self.req, FAKE_UUID, body=body)
@mock.patch('nova.objects.Service.get_by_host_and_binary')
@mock.patch('nova.api.openstack.common.'
'instance_has_port_with_resource_request', return_value=True)
def test_resize_with_bandwidth_from_old_compute_not_supported(
self, mock_has_res_req, mock_get_service):
body = dict(resize=dict(flavorRef="http://localhost/3"))
mock_get_service.return_value = objects.Service()
mock_get_service.return_value.version = 38
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_resize,
self.req, FAKE_UUID, body=body)
mock_has_res_req.assert_called_once_with(
FAKE_UUID, self.controller.network_api)
mock_get_service.assert_called_once_with(
self.req.environ['nova.context'], 'fake_host', 'nova-compute')
| 40.54749
| 79
| 0.575654
|
531404e79bdbf1f2d5bae127319d47725dea4ed3
| 7,800
|
py
|
Python
|
docker-gen.py
|
ZHAJOR/docker-compose-generator
|
11dc2f68e8e10f60e82cd0b030025f0db4dd1550
|
[
"MIT"
] | 4
|
2017-11-10T09:51:55.000Z
|
2021-06-18T15:22:02.000Z
|
docker-gen.py
|
ZHAJOR/docker-compose-generator
|
11dc2f68e8e10f60e82cd0b030025f0db4dd1550
|
[
"MIT"
] | null | null | null |
docker-gen.py
|
ZHAJOR/docker-compose-generator
|
11dc2f68e8e10f60e82cd0b030025f0db4dd1550
|
[
"MIT"
] | 4
|
2017-11-17T19:45:55.000Z
|
2020-03-11T05:52:00.000Z
|
#!/usr/bin/env python3
# MAINTAINER Pierre-Antoine 'ZHAJOR' Tible <antoinetible@gmail.com>
import argparse
import os.path
"""The list of available images"""
images_available = {'api':
{'laravel': 'zhajor/docker-apache-2.4-php5.6-for-laravel',
'phalcon': 'zhajor/docker-apache-php7-phalcon'},
'db':
{'postgres': 'postgres:latest'},
'front':
{'angular': 'zhajor/docker-apache-2.4-proxy'},
'db_administration':
{'phppgadmin': 'zhajor/docker-phppgadmin'}
}
"""The image to use when the parameter --api or --front or --db or --db-admin is not set"""
default_image = {'api': images_available['api']['phalcon'],
'db': images_available['db']['postgres'],
'front': images_available['front']['angular'],
'db_administration': images_available['db_administration']['phppgadmin']}
default_compose_version = 2
api_configuration = {
'image': default_image['api'],
'ports': [80],
'container-name': 'api',
'volumes': [['./api', '/var/www/html']],
'networks': [['net', ['api']]]
}
front_configuration = {
'image': default_image['front'],
'ports': [80],
'container-name': 'front',
'volumes': [['./front', '/var/www/html']],
'networks': [['net', ['front']]],
'environments': [['proxy', '/api'], ['proxy-host', 'http://api']]
}
db_configuration = {
'image': default_image['db'],
'container-name': 'db',
'volumes': [['./database/data', '/var/lib/postgresql/data']],
'networks': [['net', ['database']]],
'environments': [['POSTGRES_USER', 'postgres'],
['POSTGRES_DB', 'postgres'],
['POSTGRES_PASSWORD', 'postgres']]
}
db_administration_configuration = {
'image': default_image['db_administration'],
'ports': [80],
'container-name': 'db_administration',
'networks': [['net', ['phppgadmin']]],
'environments': [['DB_PORT', '5432'],
['DB_HOST', 'database']]
}
parser = argparse.ArgumentParser(description='Create your docker-compose configuration within a minute')
parser.add_argument('--name', help='The project name', required=True, metavar='my-project')
parser.add_argument('--port', help='The port', required=True, metavar='24000')
parser.add_argument('--file', help='The output file name', metavar='docker-compose.yml', default='docker-compose.yml')
parser.add_argument('--no-front', dest='no_front', help='Do not use a front container', action='store_true')
parser.add_argument('--no-api', dest='no_api', help='Do not use an api container', action='store_true')
parser.add_argument('--no-db', dest='no_db', help='Do not use a db container', action='store_true')
parser.add_argument('--no-db-admin', dest='no_db_admin', help='Do not use a db configuration container', action='store_true')
parser.add_argument('--api', help='The image to use for an api', metavar='phalcon')
parser.add_argument('--front', help='The image to use for an api', metavar='angular')
parser.add_argument('--db', help='The image to use for an api', metavar='postgres')
parser.add_argument('--db-admin', help='The image to use for an api', metavar='phppgadmin')
args = parser.parse_args()
defined_port = int(args.port)
defined_base_container_name = args.name + "-"
document = "version: '" + str(default_compose_version) + "'\n\nservices:\n"
default_networks = "networks:\n net:"
def check_images(args):
if args.api is not None:
api_configuration['image'] = images_available['api'][args.api]
if args.front is not None:
front_configuration['image'] = images_available['front'][args.front]
if args.db is not None:
db_configuration['image'] = images_available['db'][args.db]
if args.db_admin is not None:
db_administration_configuration['image'] = images_available['db_administration'][args.db_admin]
def update_conf(config):
new_conf = []
for p in config['ports']:
new_conf.append([int(args.port), p])
print(config['container-name'] + " set to port " + args.port)
args.port = str(int(args.port) + 1)
config['ports'] = new_conf
class colors:
CIAN = '\033[95m'
BLUE = '\033[94m'
GREEN = '\033[92m'
FAIL = '\033[91m'
ENDC = '\033[0m'
def check_file_exists():
if os.path.exists(args.file):
error = input(colors.FAIL + "[WARNING]" + colors.ENDC + " " + args.file + " already exists, do you want to ecrase it ?\n[Y,n]: ")
if error.lower() == "y" or error == "":
print("Erasing the file...")
else:
exit("Aborted.")
class ImageBlock:
"""This class allows to create a configuration for an image"""
block = ""
image = ""
ports = ""
container_name = ""
volumes = ""
networks = ""
environments = ""
not_mapped = ["block"]
"""All the variables in this array will not be put in the docker compose block code"""
def __init__(self, name):
self.block = " " + name + ":"
def set_from_conf(self, conf):
if 'image' in conf:
self.set_image(conf.get('image'))
if 'ports' in conf:
self.set_ports(conf.get('ports'))
if 'container-name' in conf:
self.set_container_name(conf.get('container-name'))
if 'volumes' in conf:
self.set_volumes(conf.get('volumes'))
if 'networks' in conf:
self.set_networks(conf.get('networks'))
if 'environments' in conf:
self.set_environments(conf.get('environments'))
def set_image(self, name):
self.image += "\n image: " + name
def set_ports(self, ports):
self.ports += '\n ports:'
for port in ports:
self.ports += '\n - "' + str(port[0]) + ':' + str(port[1]) + '"'
def set_container_name(self, container_name):
self.container_name += "\n container_name: " + defined_base_container_name + container_name
def set_volumes(self, volumes):
self.volumes += "\n volumes:"
for volume in volumes:
self.volumes += "\n - " + volume[0] + ":" + volume[1]
def set_networks(self, networks):
self.networks += "\n networks:"
for net in networks:
self.networks += '\n ' + net[0] + ':'
self.networks += '\n aliases:'
for alias in net[1]:
self.networks += "\n - " + alias
def set_environments(self, environments):
self.environments += "\n environment:"
for environment in environments:
self.environments += "\n - "+environment[0]+"="+environment[1]
def get(self):
value = self.block
for item in vars(self).items():
if item.__getitem__(0) not in self.not_mapped:
value += item.__getitem__(1)
return value + "\n\n"
check_file_exists()
check_images(args)
if not args.no_front:
update_conf(front_configuration)
front = ImageBlock('front')
front.set_from_conf(front_configuration)
document += front.get()
if not args.no_api:
update_conf(api_configuration)
api = ImageBlock('api')
api.set_from_conf(api_configuration)
document += api.get()
if not args.no_db:
db = ImageBlock('db')
db.set_from_conf(db_configuration)
document += db.get()
if not args.no_db_admin:
update_conf(db_administration_configuration)
db_admin = ImageBlock('db_admin')
db_admin.set_from_conf(db_administration_configuration)
document += db_admin.get()
document += default_networks
file = open(args.file, 'w')
file.write(document)
file.close()
| 35.779817
| 137
| 0.602821
|
226b796ab4a3f08439c4f8efc4e6c951c22c3fbc
| 1,711
|
py
|
Python
|
app/routes.py
|
thewordisbird/flask-cloud-run
|
4cdf00d875cdbcba89872c7c95db1c6840a919eb
|
[
"MIT"
] | null | null | null |
app/routes.py
|
thewordisbird/flask-cloud-run
|
4cdf00d875cdbcba89872c7c95db1c6840a919eb
|
[
"MIT"
] | 4
|
2020-10-21T19:53:06.000Z
|
2021-09-02T13:39:02.000Z
|
app/routes.py
|
thewordisbird/flask-cloud-run
|
4cdf00d875cdbcba89872c7c95db1c6840a919eb
|
[
"MIT"
] | null | null | null |
from flask import Blueprint, render_template, request, redirect, url_for, \
jsonify
from .forms import SearchForm, ContactInfoForm
from app import firebase
firestore = firebase.firestore()
bp = Blueprint('main', __name__)
@bp.route('/')
@bp.route('/index')
def index():
results = firestore.get_collection("contacts")
return render_template('index.html', results=results)
@bp.route('/add', methods=['GET', 'POST'])
def add():
form = ContactInfoForm()
print(form.data)
print(form.validate())
print(form.errors)
if form.validate_on_submit():
form_data = {
'name': form.data['name'],
'phone': form.data['phone'],
'email': form.data['email']
}
firestore.set_document_without_id("contacts", form_data)
return redirect(url_for('main.index'))
return render_template('input_form.html', form=form)
@bp.route('/update/<contact_id>', methods=['POST', 'GET'])
def update(contact_id):
contact = firestore.get_document(f"contacts/{contact_id}")
form = ContactInfoForm(data=contact)
if form.validate_on_submit():
update_data = {
'name': form.data['name'],
'phone': form.data['phone'],
'email': form.data['email']
}
firestore.update_document(f"contacts/{contact_id}", update_data)
return redirect(url_for('main.index'))
return render_template('input_form.html', form=form)
@bp.route('/delete', methods=['POST'])
def delete():
req_data = request.get_json()
contact_id = req_data['contactId']
firestore.delete_document(f"contacts/{contact_id}")
resp = jsonify({"status": "success"})
return resp
| 27.596774
| 75
| 0.639977
|
e01e8e75246e69a98507b26b897eb273799ba81a
| 2,152
|
py
|
Python
|
tests/unit/test_reframed_helpers.py
|
DD-DeCaF/simulations
|
dab77166f301c0a12e6fed973147fb4add8a62c4
|
[
"Apache-2.0"
] | 1
|
2018-04-30T23:46:34.000Z
|
2018-04-30T23:46:34.000Z
|
tests/unit/test_reframed_helpers.py
|
DD-DeCaF/model
|
dab77166f301c0a12e6fed973147fb4add8a62c4
|
[
"Apache-2.0"
] | 65
|
2016-12-21T14:05:27.000Z
|
2019-07-15T06:50:54.000Z
|
tests/unit/test_reframed_helpers.py
|
DD-DeCaF/model
|
dab77166f301c0a12e6fed973147fb4add8a62c4
|
[
"Apache-2.0"
] | 2
|
2019-10-10T14:04:31.000Z
|
2019-11-21T09:47:49.000Z
|
# Copyright 2018 Novo Nordisk Foundation Center for Biosustainability, DTU.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from simulations.modeling.reframed_helpers import generate_transactions
def test_generate_transactions_uptake():
# Create mock exchanges
id2name = {"M1": "Metabolite 1"}
exchanges_mock_more_uptake = {
("A", "M1"): 10,
("B", "M1"): 5,
("C", "M1"): -15,
("D", "M1"): -5,
}
result = generate_transactions(id2name, exchanges_mock_more_uptake)
assert (result[0][4] + result[1][4]) == exchanges_mock_more_uptake[("A", "M1")]
assert (result[2][4] + result[3][4]) == exchanges_mock_more_uptake[("B", "M1")]
assert (result[0][4] + result[2][4] + result[4][4]) == -exchanges_mock_more_uptake[
("C", "M1")
]
assert (result[1][4] + result[3][4] + result[5][4]) == -exchanges_mock_more_uptake[
("D", "M1")
]
def test_generate_transactions_secretion():
# Create mock exchanges
id2name = {"M1": "Metabolite 1"}
exchanges_mock_more_secretion = {
("A", "M1"): 20,
("B", "M1"): 30,
("C", "M1"): -15,
("D", "M1"): -5,
}
result = generate_transactions(id2name, exchanges_mock_more_secretion)
assert (
result[0][4] + result[1][4] + result[4][4]
) == exchanges_mock_more_secretion[("A", "M1")]
assert (
result[2][4] + result[3][4] + result[5][4]
) == exchanges_mock_more_secretion[("B", "M1")]
assert (result[0][4] + result[2][4]) == -exchanges_mock_more_secretion[("C", "M1")]
assert (result[1][4] + result[3][4]) == -exchanges_mock_more_secretion[("D", "M1")]
| 37.103448
| 87
| 0.634294
|
196fe5659936586cac6c1379d7f238d21d84340b
| 4,240
|
py
|
Python
|
pybamm/models/submodels/oxygen_diffusion/full_oxygen_diffusion.py
|
gyouhoc/PyBaMM
|
6852e0e518157e6802ce83a2549562e7d0ed4b9f
|
[
"BSD-3-Clause"
] | null | null | null |
pybamm/models/submodels/oxygen_diffusion/full_oxygen_diffusion.py
|
gyouhoc/PyBaMM
|
6852e0e518157e6802ce83a2549562e7d0ed4b9f
|
[
"BSD-3-Clause"
] | null | null | null |
pybamm/models/submodels/oxygen_diffusion/full_oxygen_diffusion.py
|
gyouhoc/PyBaMM
|
6852e0e518157e6802ce83a2549562e7d0ed4b9f
|
[
"BSD-3-Clause"
] | null | null | null |
#
# Class for oxygen diffusion
#
import pybamm
from .base_oxygen_diffusion import BaseModel
def separator_and_positive_only(variable):
"""Return only the separator and positive electrode children
Parameters
----------
variable : :class:`pybamm.Concatenation`
Concatenation of variables in negative, separator, positive
Returns
-------
:class:`pybamm.Concatenation`
Concatenation of variables in separator and positive only
"""
_, var_s, var_p = variable.orphans
return pybamm.Concatenation(var_s, var_p)
class Full(BaseModel):
"""Class for conservation of mass of oxygen. (Full refers to unreduced by
asymptotic methods)
In this model, extremely fast oxygen kinetics in the negative electrode imposes
zero oxygen concentration there, and so the oxygen variable only lives in the
separator and positive electrode. The boundary condition at the negative electrode/
separator interface is homogeneous Dirichlet.
Parameters
----------
param : parameter class
The parameters to use for this submodel
reactions : dict
Dictionary of reaction terms
**Extends:** :class:`pybamm.oxygen_diffusion.BaseModel`
"""
def __init__(self, param, reactions):
super().__init__(param, reactions)
def get_fundamental_variables(self):
# Oxygen concentration (oxygen concentration is zero in the negative electrode)
c_ox_n = pybamm.FullBroadcast(0, "negative electrode", "current collector")
c_ox_s = pybamm.Variable(
"Separator oxygen concentration",
domain="separator",
auxiliary_domains={"secondary": "current collector"},
)
c_ox_p = pybamm.Variable(
"Positive oxygen concentration",
domain="positive electrode",
auxiliary_domains={"secondary": "current collector"},
)
c_ox_s_p = pybamm.Concatenation(c_ox_s, c_ox_p)
variables = {"Separator and positive electrode oxygen concentration": c_ox_s_p}
c_ox = pybamm.Concatenation(c_ox_n, c_ox_s, c_ox_p)
variables.update(self._get_standard_concentration_variables(c_ox))
return variables
def get_coupled_variables(self, variables):
tor = separator_and_positive_only(variables["Electrolyte tortuosity"])
c_ox = variables["Separator and positive electrode oxygen concentration"]
# TODO: allow charge and convection?
v_box = pybamm.Scalar(0)
param = self.param
N_ox_diffusion = -tor * param.curlyD_ox * pybamm.grad(c_ox)
N_ox = N_ox_diffusion + param.C_e * c_ox * v_box
# Flux in the negative electrode is zero
N_ox = pybamm.Concatenation(
pybamm.FullBroadcast(0, "negative electrode", "current collector"), N_ox
)
variables.update(self._get_standard_flux_variables(N_ox))
return variables
def set_rhs(self, variables):
param = self.param
eps = separator_and_positive_only(variables["Porosity"])
deps_dt = separator_and_positive_only(variables["Porosity change"])
c_ox = variables["Separator and positive electrode oxygen concentration"]
N_ox = variables["Oxygen flux"].orphans[1]
source_terms = sum(
pybamm.Concatenation(
pybamm.FullBroadcast(0, "separator", "current collector"),
reaction["Positive"]["s_ox"] * variables[reaction["Positive"]["aj"]],
)
for reaction in self.reactions.values()
)
self.rhs = {
c_ox: (1 / eps)
* (-pybamm.div(N_ox) / param.C_e + source_terms - c_ox * deps_dt)
}
def set_boundary_conditions(self, variables):
c_ox = variables["Separator and positive electrode oxygen concentration"]
self.boundary_conditions = {
c_ox: {
"left": (pybamm.Scalar(0), "Dirichlet"),
"right": (pybamm.Scalar(0), "Neumann"),
}
}
def set_initial_conditions(self, variables):
c_ox = variables["Separator and positive electrode oxygen concentration"]
self.initial_conditions = {c_ox: self.param.c_ox_init}
| 33.385827
| 87
| 0.653774
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.