hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d41dedbab50e310d051ff3936d915a72d1bd91ab
| 2,321
|
py
|
Python
|
doujin-eromanga-com/utils/make_model.py
|
GINK03/itmedia-scraping
|
5afbe06dd0aa12db1694a2b387aa2eeafb20e981
|
[
"MIT"
] | 16
|
2018-02-06T14:43:41.000Z
|
2021-01-23T05:07:33.000Z
|
doujin-eromanga-com/utils/make_model.py
|
GINK03/itmedia-scraping
|
5afbe06dd0aa12db1694a2b387aa2eeafb20e981
|
[
"MIT"
] | null | null | null |
doujin-eromanga-com/utils/make_model.py
|
GINK03/itmedia-scraping
|
5afbe06dd0aa12db1694a2b387aa2eeafb20e981
|
[
"MIT"
] | 4
|
2018-01-16T13:50:43.000Z
|
2019-12-16T19:45:54.000Z
|
from sklearn.model_selection import train_test_split
# from sklearn.metrics import mean_squared_log_error
from sklearn.metrics import mean_squared_error
from math import sqrt
import lightgbm as lgb
from sklearn.cross_validation import KFold
import pandas as pd
import numpy as np
def get_oof(clf, x_train, y, x_test):
NFOLDS=5
SEED=71
kf = KFold(len(x_train), n_folds=NFOLDS, shuffle=True, random_state=SEED)
oof_train = np.zeros((len(x_train),))
oof_test = np.zeros((len(x_test),))
oof_test_skf = np.empty((NFOLDS, len(x_test)))
lgbm_params = {
'task': 'train',
'boosting_type': 'gbdt',
'objective': 'regression',
'metric': 'rmse',
# 'max_depth': 15,
'num_leaves': 30,
'feature_fraction': 0.9,
'bagging_fraction': 0.75,
'bagging_freq': 4,
'learning_rate': 0.016*5,
#'max_bin':1023,
'verbose': 0
}
for i, (train_index, test_index) in enumerate(kf):
print('\nFold {}'.format(i))
x_tr = x_train.iloc[train_index]
y_tr = y[train_index]
y_te = y[test_index]
x_te = x_train.iloc[test_index]
lgtrain = lgb.Dataset(x_tr, y_tr, feature_name=x_train.columns.tolist())
lgvalid = lgb.Dataset(x_te, y_te, feature_name=x_train.columns.tolist())
#categorical_feature = categorical)
lgb_clf = lgb.train(
lgbm_params,
lgtrain,
num_boost_round=20000,
valid_sets=[lgtrain, lgvalid],
valid_names=['train','valid'],
early_stopping_rounds=50,
verbose_eval=50
)
oof_train[test_index] = lgb_clf.predict(x_te)
oof_test_skf[i, :] = lgb_clf.predict(x_test)
oof_test[:] = oof_test_skf.mean(axis=0)
return oof_train, oof_test
df = pd.read_csv('source.csv')
trainy = df['_stars_'].values
trainX = df.drop(['_stars_'], axis=1)
testX = pd.read_csv('./target.csv')
oof_train, oof_test = get_oof(None, trainX, np.log(trainy+2.0), testX.drop(['_hashval_'], axis=1))
rms = sqrt(mean_squared_error( np.log(trainy+2.0), oof_train))
print('LGB OOF RMSE: {}'.format(rms))
print("Modeling Stage")
testX['preds'] = np.exp(np.concatenate([oof_test])) - 2
testX[['_hashval_', 'preds']].to_csv('preds.csv', index=False)
| 32.690141
| 98
| 0.631624
|
1ae48f2aed0c475f13e3b26d1fdc16a3d74d47d6
| 8,231
|
py
|
Python
|
test/functional/test_framework/key.py
|
Bits-Coin/bits-coin
|
dd8220018f5582e76d43e8c52bd323524e495d8c
|
[
"MIT"
] | 2
|
2021-11-17T23:05:13.000Z
|
2021-11-17T23:05:32.000Z
|
test/functional/test_framework/key.py
|
Bits-Coin/bits-coin
|
dd8220018f5582e76d43e8c52bd323524e495d8c
|
[
"MIT"
] | null | null | null |
test/functional/test_framework/key.py
|
Bits-Coin/bits-coin
|
dd8220018f5582e76d43e8c52bd323524e495d8c
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2011 Sam Rushing
"""ECC secp256k1 OpenSSL wrapper.
WARNING: This module does not mlock() secrets; your private keys may end up on
disk in swap! Use with caution!
This file is modified from python-bitscoinlib.
"""
import ctypes
import ctypes.util
import hashlib
ssl = ctypes.cdll.LoadLibrary(ctypes.util.find_library ('ssl') or 'libeay32')
ssl.BN_new.restype = ctypes.c_void_p
ssl.BN_new.argtypes = []
ssl.BN_bin2bn.restype = ctypes.c_void_p
ssl.BN_bin2bn.argtypes = [ctypes.c_char_p, ctypes.c_int, ctypes.c_void_p]
ssl.BN_CTX_free.restype = None
ssl.BN_CTX_free.argtypes = [ctypes.c_void_p]
ssl.BN_CTX_new.restype = ctypes.c_void_p
ssl.BN_CTX_new.argtypes = []
ssl.ECDH_compute_key.restype = ctypes.c_int
ssl.ECDH_compute_key.argtypes = [ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p]
ssl.ECDSA_sign.restype = ctypes.c_int
ssl.ECDSA_sign.argtypes = [ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
ssl.ECDSA_verify.restype = ctypes.c_int
ssl.ECDSA_verify.argtypes = [ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p]
ssl.EC_KEY_free.restype = None
ssl.EC_KEY_free.argtypes = [ctypes.c_void_p]
ssl.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p
ssl.EC_KEY_new_by_curve_name.argtypes = [ctypes.c_int]
ssl.EC_KEY_get0_group.restype = ctypes.c_void_p
ssl.EC_KEY_get0_group.argtypes = [ctypes.c_void_p]
ssl.EC_KEY_get0_public_key.restype = ctypes.c_void_p
ssl.EC_KEY_get0_public_key.argtypes = [ctypes.c_void_p]
ssl.EC_KEY_set_private_key.restype = ctypes.c_int
ssl.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
ssl.EC_KEY_set_conv_form.restype = None
ssl.EC_KEY_set_conv_form.argtypes = [ctypes.c_void_p, ctypes.c_int]
ssl.EC_KEY_set_public_key.restype = ctypes.c_int
ssl.EC_KEY_set_public_key.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
ssl.i2o_ECPublicKey.restype = ctypes.c_void_p
ssl.i2o_ECPublicKey.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
ssl.EC_POINT_new.restype = ctypes.c_void_p
ssl.EC_POINT_new.argtypes = [ctypes.c_void_p]
ssl.EC_POINT_free.restype = None
ssl.EC_POINT_free.argtypes = [ctypes.c_void_p]
ssl.EC_POINT_mul.restype = ctypes.c_int
ssl.EC_POINT_mul.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
# this specifies the curve used with ECDSA.
NID_secp256k1 = 714 # from openssl/obj_mac.h
SECP256K1_ORDER = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141
SECP256K1_ORDER_HALF = SECP256K1_ORDER // 2
# Thx to Sam Devlin for the ctypes magic 64-bit fix.
def _check_result(val, func, args):
if val == 0:
raise ValueError
else:
return ctypes.c_void_p (val)
ssl.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p
ssl.EC_KEY_new_by_curve_name.errcheck = _check_result
class CECKey():
"""Wrapper around OpenSSL's EC_KEY"""
POINT_CONVERSION_COMPRESSED = 2
POINT_CONVERSION_UNCOMPRESSED = 4
def __init__(self):
self.k = ssl.EC_KEY_new_by_curve_name(NID_secp256k1)
def __del__(self):
if ssl:
ssl.EC_KEY_free(self.k)
self.k = None
def set_secretbytes(self, secret):
priv_key = ssl.BN_bin2bn(secret, 32, ssl.BN_new())
group = ssl.EC_KEY_get0_group(self.k)
pub_key = ssl.EC_POINT_new(group)
ctx = ssl.BN_CTX_new()
if not ssl.EC_POINT_mul(group, pub_key, priv_key, None, None, ctx):
raise ValueError("Could not derive public key from the supplied secret.")
ssl.EC_POINT_mul(group, pub_key, priv_key, None, None, ctx)
ssl.EC_KEY_set_private_key(self.k, priv_key)
ssl.EC_KEY_set_public_key(self.k, pub_key)
ssl.EC_POINT_free(pub_key)
ssl.BN_CTX_free(ctx)
return self.k
def set_privkey(self, key):
self.mb = ctypes.create_string_buffer(key)
return ssl.d2i_ECPrivateKey(ctypes.byref(self.k), ctypes.byref(ctypes.pointer(self.mb)), len(key))
def set_pubkey(self, key):
self.mb = ctypes.create_string_buffer(key)
return ssl.o2i_ECPublicKey(ctypes.byref(self.k), ctypes.byref(ctypes.pointer(self.mb)), len(key))
def get_privkey(self):
size = ssl.i2d_ECPrivateKey(self.k, 0)
mb_pri = ctypes.create_string_buffer(size)
ssl.i2d_ECPrivateKey(self.k, ctypes.byref(ctypes.pointer(mb_pri)))
return mb_pri.raw
def get_pubkey(self):
size = ssl.i2o_ECPublicKey(self.k, 0)
mb = ctypes.create_string_buffer(size)
ssl.i2o_ECPublicKey(self.k, ctypes.byref(ctypes.pointer(mb)))
return mb.raw
def get_raw_ecdh_key(self, other_pubkey):
ecdh_keybuffer = ctypes.create_string_buffer(32)
r = ssl.ECDH_compute_key(ctypes.pointer(ecdh_keybuffer), 32,
ssl.EC_KEY_get0_public_key(other_pubkey.k),
self.k, 0)
if r != 32:
raise Exception('CKey.get_ecdh_key(): ECDH_compute_key() failed')
return ecdh_keybuffer.raw
def get_ecdh_key(self, other_pubkey, kdf=lambda k: hashlib.sha256(k).digest()):
# FIXME: be warned it's not clear what the kdf should be as a default
r = self.get_raw_ecdh_key(other_pubkey)
return kdf(r)
def sign(self, hash, low_s = True):
# FIXME: need unit tests for below cases
if not isinstance(hash, bytes):
raise TypeError('Hash must be bytes instance; got %r' % hash.__class__)
if len(hash) != 32:
raise ValueError('Hash must be exactly 32 bytes long')
sig_size0 = ctypes.c_uint32()
sig_size0.value = ssl.ECDSA_size(self.k)
mb_sig = ctypes.create_string_buffer(sig_size0.value)
result = ssl.ECDSA_sign(0, hash, len(hash), mb_sig, ctypes.byref(sig_size0), self.k)
assert 1 == result
assert mb_sig.raw[0] == 0x30
assert mb_sig.raw[1] == sig_size0.value - 2
total_size = mb_sig.raw[1]
assert mb_sig.raw[2] == 2
r_size = mb_sig.raw[3]
assert mb_sig.raw[4 + r_size] == 2
s_size = mb_sig.raw[5 + r_size]
s_value = int.from_bytes(mb_sig.raw[6+r_size:6+r_size+s_size], byteorder='big')
if (not low_s) or s_value <= SECP256K1_ORDER_HALF:
return mb_sig.raw[:sig_size0.value]
else:
low_s_value = SECP256K1_ORDER - s_value
low_s_bytes = (low_s_value).to_bytes(33, byteorder='big')
while len(low_s_bytes) > 1 and low_s_bytes[0] == 0 and low_s_bytes[1] < 0x80:
low_s_bytes = low_s_bytes[1:]
new_s_size = len(low_s_bytes)
new_total_size_byte = (total_size + new_s_size - s_size).to_bytes(1,byteorder='big')
new_s_size_byte = (new_s_size).to_bytes(1,byteorder='big')
return b'\x30' + new_total_size_byte + mb_sig.raw[2:5+r_size] + new_s_size_byte + low_s_bytes
def verify(self, hash, sig):
"""Verify a DER signature"""
return ssl.ECDSA_verify(0, hash, len(hash), sig, len(sig), self.k) == 1
def set_compressed(self, compressed):
if compressed:
form = self.POINT_CONVERSION_COMPRESSED
else:
form = self.POINT_CONVERSION_UNCOMPRESSED
ssl.EC_KEY_set_conv_form(self.k, form)
class CPubKey(bytes):
"""An encapsulated public key
Attributes:
is_valid - Corresponds to CPubKey.IsValid()
is_fullyvalid - Corresponds to CPubKey.IsFullyValid()
is_compressed - Corresponds to CPubKey.IsCompressed()
"""
def __new__(cls, buf, _cec_key=None):
self = super(CPubKey, cls).__new__(cls, buf)
if _cec_key is None:
_cec_key = CECKey()
self._cec_key = _cec_key
self.is_fullyvalid = _cec_key.set_pubkey(self) != 0
return self
@property
def is_valid(self):
return len(self) > 0
@property
def is_compressed(self):
return len(self) == 33
def verify(self, hash, sig):
return self._cec_key.verify(hash, sig)
def __str__(self):
return repr(self)
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, super(CPubKey, self).__repr__())
| 36.259912
| 130
| 0.691289
|
2ccd3c761e87ed9c45b16518309f7ac14b168890
| 613
|
py
|
Python
|
src/reopenwebnet/messages.py
|
mic-rigaud/ReOpenWebNet
|
c313e9421d4cfa8a0c803b9b98898211ac0def3b
|
[
"MIT"
] | null | null | null |
src/reopenwebnet/messages.py
|
mic-rigaud/ReOpenWebNet
|
c313e9421d4cfa8a0c803b9b98898211ac0def3b
|
[
"MIT"
] | null | null | null |
src/reopenwebnet/messages.py
|
mic-rigaud/ReOpenWebNet
|
c313e9421d4cfa8a0c803b9b98898211ac0def3b
|
[
"MIT"
] | null | null | null |
# OK message from bus
ACK = '*#*1##'
# Non OK message from bus
NACK = '*#*0##'
# OpenWeb string for open a command session
CMD_SESSION = '*99*0##'
# OpenWeb string for open an event session
EVENT_SESSION = '*99*1##'
def extract_messages(data):
if not data.startswith("*"):
raise Exception("data does not start with *")
if not data.endswith("##"):
raise Exception('data does not end with ##')
parts = [part + "##" for part in data.split("##")[:-1]]
return parts
def extract_single(message):
return message[2:-2]
def generate_single(message):
return "*#%s##" % message
| 23.576923
| 59
| 0.628059
|
8835e7abc47d4f784d4e93c06319b6021f226296
| 11,741
|
py
|
Python
|
DTL/gui/widgets/colorpickerwidget.py
|
rocktavious/DevToolsLib
|
117200c91a3361e04f7c8e07d2ed4999bbcfc469
|
[
"MIT"
] | 1
|
2015-03-23T18:52:12.000Z
|
2015-03-23T18:52:12.000Z
|
DTL/gui/widgets/colorpickerwidget.py
|
rocktavious/DevToolsLib
|
117200c91a3361e04f7c8e07d2ed4999bbcfc469
|
[
"MIT"
] | null | null | null |
DTL/gui/widgets/colorpickerwidget.py
|
rocktavious/DevToolsLib
|
117200c91a3361e04f7c8e07d2ed4999bbcfc469
|
[
"MIT"
] | 2
|
2017-05-21T12:50:41.000Z
|
2021-10-17T03:32:45.000Z
|
#Written by Blur Studios
# used in painting, faster to do global import
from DTL.qt.QtCore import Qt, pyqtSlot, pyqtProperty, pyqtSignal, QPoint, QRect
from DTL.qt.QtGui import QPainter, QColor, QWidget, QLinearGradient, QPushButton, QConicalGradient, QRadialGradient
class ColorPickerWidget( QWidget ):
BarWidth = 15
colorChanged = pyqtSignal( QColor )
editingFinished = pyqtSignal()
def __init__( self, parent ):
# initialize the super class
QWidget.__init__( self, parent )
self._hue = 0 # defines red color
self._saturation = 255
self._lightness = 255
self._alpha = 255
self._showAlpha = True
self._editing = None
def alpha( self ):
return self._alpha
def alphaPercent( self ):
return self._alpha / 255.0
def alphaRect( self ):
if ( self.showAlpha() ):
return QRect( ColorPickerWidget.BarWidth + 7, self.height() - ColorPickerWidget.BarWidth - 2, self.width() - ColorPickerWidget.BarWidth - 8, ColorPickerWidget.BarWidth )
return QRect()
def color( self ):
return QColor.fromHsv( self.hue(), self.saturation(), self.lightness(), self.alpha() )
def colorRect( self ):
if ( self.showAlpha() ):
return QRect( ColorPickerWidget.BarWidth + 7, 1, self.width() - ColorPickerWidget.BarWidth - 8, self.height() - ColorPickerWidget.BarWidth - 8 )
else:
return QRect( ColorPickerWidget.BarWidth + 7, 1, self.width() - ColorPickerWidget.BarWidth - 8, self.height() - 2 )
def emitColorChanged( self, color ):
if ( not self.signalsBlocked() ):
self.colorChanged.emit( color )
def hue( self ):
return self._hue
def huePercent( self ):
return self._hue / 359.0
def hueRect( self ):
if ( self.showAlpha() ):
return QRect( 1, 1, ColorPickerWidget.BarWidth, self.height() - ColorPickerWidget.BarWidth - 8 )
else:
return QRect( 1, 1, ColorPickerWidget.BarWidth, self.height() - 2 )
def lightness( self ):
return self._lightness
def mousePressEvent( self, event ):
# edit the hue
r = self.hueRect()
if ( r.contains( event.pos() ) ):
self._editing = 'hue'
self.setHueFromPoint( event.pos() )
return QWidget.mousePressEvent( self, event )
# edit the alpha
r = self.alphaRect()
if ( r.contains( event.pos() ) ):
self._editing = 'alpha'
self.setAlphaFromPoint( event.pos() )
return QWidget.mousePressEvent( self, event )
# edit the color
r = self.colorRect()
if ( r.contains( event.pos() ) ):
self._editing = 'color'
self.setSaturationAndLightnessFromPoint( event.pos() )
return QWidget.mousePressEvent( self, event )
return QWidget.mousePressEvent( self, event )
def mouseMoveEvent( self, event ):
# edit the color
if ( self._editing == 'color' ):
self.setSaturationAndLightnessFromPoint( event.pos() )
# edit the alpha
elif ( self._editing == 'alpha' ):
self.setAlphaFromPoint( event.pos() )
# edit the hue
elif ( self._editing == 'hue' ):
self.setHueFromPoint( event.pos() )
return QWidget.mouseMoveEvent( self, event )
def mouseReleaseEvent( self, event ):
if ( self._editing and not self.signalsBlocked() ):
self.editingFinished.emit()
self._editing = None
return QWidget.mouseReleaseEvent( self,event )
def paintEvent( self, event ):
painter = QPainter()
painter.begin( self )
painter.setPen( Qt.black )
# create the hue rect
hrect = self.hueRect()
# create the hue gradient
grad = QLinearGradient()
grad.setStart( 0, hrect.top() )
grad.setFinalStop( 0, hrect.bottom() )
for i in range( 10 ):
perc = i / 10.0
grad.setColorAt( perc, QColor.fromHsv( perc * 360, 255, 255 ) )
grad.setColorAt( 1.0, QColor.fromHsv( 359, 255, 255 ) )
painter.setBrush( grad )
painter.drawRect( hrect )
# create the hue line
y = (hrect.y() + 2) + self.huePercent() * (hrect.height() - 3)
pen = painter.pen()
pen.setColor( Qt.white )
pen.setWidth( 2 )
painter.setPen( pen )
painter.drawLine( hrect.left() + 2, y, hrect.right(), y )
painter.setPen( Qt.black )
# create the alpha rect
if ( self.showAlpha() ):
arect = self.alphaRect()
# create the alpha gradient
grad = QLinearGradient()
grad.setStart( arect.left(), 0 )
grad.setFinalStop( arect.right(), 0 )
grad.setColorAt( 0.0, QColor( Qt.black ) )
grad.setColorAt( 1.0, QColor( Qt.white ) )
painter.setBrush( grad )
painter.drawRect( arect )
# create the alpha line
x = (arect.x() + 2) + (1 - self.alphaPercent()) * (arect.width() - 3)
pen = painter.pen()
pen.setColor( Qt.yellow )
pen.setWidth( 2 )
painter.setPen( pen )
painter.drawLine( x, arect.top() + 2, x, arect.bottom() )
painter.setPen( Qt.black )
crect = self.colorRect()
# create the color scale gradient
grad = QLinearGradient()
grad.setStart( crect.left(), 0 )
grad.setFinalStop( crect.right(), 0 )
grad.setColorAt( 0.0, QColor( 255, 255, 255, self.alpha() ) )
grad.setColorAt( 1.0, QColor.fromHsv( self.hue(), 255, 255, self.alpha() ) )
painter.setBrush( grad )
painter.drawRect( crect )
# create the grayscale gradient
grad = QLinearGradient()
grad.setStart( 0, crect.top() )
grad.setFinalStop( 0, crect.bottom() )
grad.setColorAt( 0.0, QColor( 0, 0, 0, 0 ) )
grad.setColorAt( 1.0, QColor( 0, 0, 0, self.alpha() ) )
painter.setBrush( grad )
painter.drawRect( crect )
# create the color location
x = crect.x() + (self.saturation() / 255.0) * crect.width()
y = crect.y() + (1 - (self.lightness() / 255.0)) * crect.height()
painter.setPen( Qt.white )
painter.setBrush( QColor.fromHsv( self.hue(), self.saturation(), self.lightness(), 50) )
painter.setClipRect( crect )
painter.setRenderHint( QPainter.Antialiasing )
painter.drawEllipse( QPoint( x, y ), 5, 5 )
painter.end()
def saturation( self ):
return self._saturation
def setAlphaFromPoint( self, point ):
rect = self.alphaRect()
rmin = rect.left()
rmax = rect.right()
ex = point.x()
if ( ex < rmin ):
self._alpha = 255
elif ( rmax < ex ):
self._alpha = 0
else:
self._alpha = (1 - (float( ex - rmin ) / float( rmax - rmin ))) * 255
self.emitColorChanged( self.color() )
self.repaint()
@pyqtSlot(QColor)
def setColor( self, color ):
self._alpha = color.alpha()
self._hue = color.hue()
self._lightness = color.value()
self._saturation = color.saturation()
self.repaint()
def setHueFromPoint( self, point ):
rect = self.hueRect()
rmin = rect.top()
rmax = rect.bottom()
ey = point.y()
if ( ey < rmin ):
self._hue = 0
elif ( rmax < ey ):
self._hue = 359
else:
self._hue = (float( ey - rmin ) / float( rmax - rmin )) * 359
self.emitColorChanged( self.color() )
self.repaint()
def setSaturationAndLightnessFromPoint( self, point ):
rect = self.colorRect()
x = point.x()
y = point.y()
# normalize the x position
if ( x < rect.x() ):
x = 0
elif ( rect.right() < x ):
x = rect.width()
else:
x -= rect.x()
# normalize the y position
if ( y < rect.y() ):
y = 0
elif ( rect.bottom() < y ):
y = rect.height()
else:
y -= rect.y()
self._saturation = ( x / float(rect.width()) ) * 255
self._lightness = (1 - ( y / float(rect.height()) )) * 255
self.emitColorChanged( self.color() )
self.repaint()
@pyqtSlot(bool)
def setShowAlpha( self, state ):
self._showAlpha = state
def showAlpha( self ):
return self._showAlpha
pyShowAlpha = pyqtProperty( 'bool', showAlpha, setShowAlpha )
class ColorPickerButton( QPushButton ):
colorPicked = pyqtSignal( QColor )
colorChanged = pyqtSignal( QColor )
def __init__( self, parent ):
QPushButton.__init__( self, parent )
self._cancelled = False
self._originalColor = None
self._color = QColor( 'black' )
self._colorPickerWidget = ColorPickerWidget( self )
self._colorPickerWidget.setWindowFlags( Qt.Popup )
self._colorPickerWidget.hide()
self._colorPickerWidget.installEventFilter( self )
self._colorPickerWidget.resize( 80, 80 )
self.refresh()
self._colorPickerWidget.colorChanged.connect( self.refresh )
self.clicked.connect( self.togglePopup )
def color( self ):
return self._color
def eventFilter( self, object, event ):
if ( event.type() == event.KeyPress ):
# cancel the change
if ( event.key() == Qt.Key_Escape ):
self._cancelled = True
self._colorPickerWidget.hide()
self.refresh()
# accept the color
elif ( event.key() in ( Qt.Key_Return, Qt.Key_Enter ) ):
color = self._colorPickerWidget.color()
self.setColor( color )
if ( not self.signalsBlocked() ):
self.colorPicked.emit( color )
self._colorPickerWidget.hide()
elif ( event.type() == event.Close ):
# accept the change
if ( not self._cancelled ):
color = self._colorPickerWidget.color()
self.setColor( color )
if ( not self.signalsBlocked() ):
self.colorPicked.emit( color )
return False
def refresh( self, color = None ):
if ( color == None ):
color = self.color()
palette = self.palette()
palette.setColor( palette.Button, color )
self.setPalette( palette )
def setColor( self, color ):
if ( color == self._color ):
return False
self._color = color
self.refresh()
if ( not self.signalsBlocked() ):
self.colorChanged.emit( color )
def togglePopup( self ):
if ( not self._colorPickerWidget.isVisible() ):
w = self.width()
if ( w < 120 ):
w = 120
self._cancelled = False
self._colorPickerWidget.resize( w, 120 )
self._colorPickerWidget.move( self.mapToGlobal( QPoint( 0, self.height() ) ) )
self._colorPickerWidget.setColor( self.color() )
self._colorPickerWidget.show()
if ( __name__ == '__main__' ):
from DTL.qt.QtGui import QVBoxLayout
from DTL.gui import Core, Dialog
dlg = Dialog()
dlg.setWindowTitle('Color Test')
layout = QVBoxLayout()
layout.addWidget( ColorPickerWidget(dlg) )
layout.addWidget( ColorPickerButton(dlg) )
dlg.setLayout(layout)
dlg.show()
Core.Start()
| 31.6469
| 181
| 0.560174
|
ab9825a0d7618aa52766f1aeb328cf81680c4958
| 47,888
|
py
|
Python
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2017_03_30/operations/_snapshots_operations.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | 1
|
2021-09-07T18:39:05.000Z
|
2021-09-07T18:39:05.000Z
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2017_03_30/operations/_snapshots_operations.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | null | null | null |
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2017_03_30/operations/_snapshots_operations.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | 1
|
2022-03-04T06:21:56.000Z
|
2022-03-04T06:21:56.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_create_or_update_request_initial(
subscription_id: str,
resource_group_name: str,
snapshot_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"snapshotName": _SERIALIZER.url("snapshot_name", snapshot_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_update_request_initial(
subscription_id: str,
resource_group_name: str,
snapshot_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"snapshotName": _SERIALIZER.url("snapshot_name", snapshot_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PATCH",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_get_request(
subscription_id: str,
resource_group_name: str,
snapshot_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"snapshotName": _SERIALIZER.url("snapshot_name", snapshot_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_delete_request_initial(
subscription_id: str,
resource_group_name: str,
snapshot_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"snapshotName": _SERIALIZER.url("snapshot_name", snapshot_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_by_resource_group_request(
subscription_id: str,
resource_group_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_request(
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/snapshots')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_grant_access_request_initial(
subscription_id: str,
resource_group_name: str,
snapshot_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}/beginGetAccess')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"snapshotName": _SERIALIZER.url("snapshot_name", snapshot_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_revoke_access_request_initial(
subscription_id: str,
resource_group_name: str,
snapshot_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}/endGetAccess')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"snapshotName": _SERIALIZER.url("snapshot_name", snapshot_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class SnapshotsOperations(object):
"""SnapshotsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2017_03_30.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _create_or_update_initial(
self,
resource_group_name: str,
snapshot_name: str,
snapshot: "_models.Snapshot",
**kwargs: Any
) -> "_models.Snapshot":
cls = kwargs.pop('cls', None) # type: ClsType["_models.Snapshot"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(snapshot, 'Snapshot')
request = build_create_or_update_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('Snapshot', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('Snapshot', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}'} # type: ignore
@distributed_trace
def begin_create_or_update(
self,
resource_group_name: str,
snapshot_name: str,
snapshot: "_models.Snapshot",
**kwargs: Any
) -> LROPoller["_models.Snapshot"]:
"""Creates or updates a snapshot.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param snapshot_name: The name of the snapshot that is being created. The name can't be changed
after the snapshot is created. Supported characters for the name are a-z, A-Z, 0-9 and _. The
max name length is 80 characters.
:type snapshot_name: str
:param snapshot: Snapshot object supplied in the body of the Put disk operation.
:type snapshot: ~azure.mgmt.compute.v2017_03_30.models.Snapshot
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either Snapshot or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2017_03_30.models.Snapshot]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.Snapshot"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
snapshot=snapshot,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('Snapshot', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}'} # type: ignore
def _update_initial(
self,
resource_group_name: str,
snapshot_name: str,
snapshot: "_models.SnapshotUpdate",
**kwargs: Any
) -> "_models.Snapshot":
cls = kwargs.pop('cls', None) # type: ClsType["_models.Snapshot"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(snapshot, 'SnapshotUpdate')
request = build_update_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
content_type=content_type,
json=_json,
template_url=self._update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('Snapshot', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('Snapshot', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}'} # type: ignore
@distributed_trace
def begin_update(
self,
resource_group_name: str,
snapshot_name: str,
snapshot: "_models.SnapshotUpdate",
**kwargs: Any
) -> LROPoller["_models.Snapshot"]:
"""Updates (patches) a snapshot.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param snapshot_name: The name of the snapshot that is being created. The name can't be changed
after the snapshot is created. Supported characters for the name are a-z, A-Z, 0-9 and _. The
max name length is 80 characters.
:type snapshot_name: str
:param snapshot: Snapshot object supplied in the body of the Patch snapshot operation.
:type snapshot: ~azure.mgmt.compute.v2017_03_30.models.SnapshotUpdate
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either Snapshot or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2017_03_30.models.Snapshot]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.Snapshot"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_initial(
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
snapshot=snapshot,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('Snapshot', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}'} # type: ignore
@distributed_trace
def get(
self,
resource_group_name: str,
snapshot_name: str,
**kwargs: Any
) -> "_models.Snapshot":
"""Gets information about a snapshot.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param snapshot_name: The name of the snapshot that is being created. The name can't be changed
after the snapshot is created. Supported characters for the name are a-z, A-Z, 0-9 and _. The
max name length is 80 characters.
:type snapshot_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Snapshot, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2017_03_30.models.Snapshot
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Snapshot"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Snapshot', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}'} # type: ignore
def _delete_initial(
self,
resource_group_name: str,
snapshot_name: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}'} # type: ignore
@distributed_trace
def begin_delete(
self,
resource_group_name: str,
snapshot_name: str,
**kwargs: Any
) -> LROPoller["_models.OperationStatusResponse"]:
"""Deletes a snapshot.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param snapshot_name: The name of the snapshot that is being created. The name can't be changed
after the snapshot is created. Supported characters for the name are a-z, A-Z, 0-9 and _. The
max name length is 80 characters.
:type snapshot_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either OperationStatusResponse or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}'} # type: ignore
@distributed_trace
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> Iterable["_models.SnapshotList"]:
"""Lists snapshots under a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SnapshotList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2017_03_30.models.SnapshotList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SnapshotList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_resource_group_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
template_url=self.list_by_resource_group.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_resource_group_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("SnapshotList", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots'} # type: ignore
@distributed_trace
def list(
self,
**kwargs: Any
) -> Iterable["_models.SnapshotList"]:
"""Lists snapshots under a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SnapshotList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2017_03_30.models.SnapshotList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SnapshotList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("SnapshotList", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/snapshots'} # type: ignore
def _grant_access_initial(
self,
resource_group_name: str,
snapshot_name: str,
grant_access_data: "_models.GrantAccessData",
**kwargs: Any
) -> Optional["_models.AccessUri"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.AccessUri"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(grant_access_data, 'GrantAccessData')
request = build_grant_access_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
content_type=content_type,
json=_json,
template_url=self._grant_access_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('AccessUri', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_grant_access_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}/beginGetAccess'} # type: ignore
@distributed_trace
def begin_grant_access(
self,
resource_group_name: str,
snapshot_name: str,
grant_access_data: "_models.GrantAccessData",
**kwargs: Any
) -> LROPoller["_models.AccessUri"]:
"""Grants access to a snapshot.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param snapshot_name: The name of the snapshot that is being created. The name can't be changed
after the snapshot is created. Supported characters for the name are a-z, A-Z, 0-9 and _. The
max name length is 80 characters.
:type snapshot_name: str
:param grant_access_data: Access data object supplied in the body of the get snapshot access
operation.
:type grant_access_data: ~azure.mgmt.compute.v2017_03_30.models.GrantAccessData
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either AccessUri or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2017_03_30.models.AccessUri]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AccessUri"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._grant_access_initial(
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
grant_access_data=grant_access_data,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('AccessUri', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_grant_access.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}/beginGetAccess'} # type: ignore
def _revoke_access_initial(
self,
resource_group_name: str,
snapshot_name: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_revoke_access_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
template_url=self._revoke_access_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_revoke_access_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}/endGetAccess'} # type: ignore
@distributed_trace
def begin_revoke_access(
self,
resource_group_name: str,
snapshot_name: str,
**kwargs: Any
) -> LROPoller["_models.OperationStatusResponse"]:
"""Revokes access to a snapshot.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param snapshot_name: The name of the snapshot that is being created. The name can't be changed
after the snapshot is created. Supported characters for the name are a-z, A-Z, 0-9 and _. The
max name length is 80 characters.
:type snapshot_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either OperationStatusResponse or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._revoke_access_initial(
resource_group_name=resource_group_name,
snapshot_name=snapshot_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_revoke_access.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/snapshots/{snapshotName}/endGetAccess'} # type: ignore
| 42.871979
| 198
| 0.667432
|
6f117bed426f25777f0e49495842fc9f6a49f4ec
| 420
|
py
|
Python
|
src/dressup/__init__.py
|
paw-lu/dressup
|
d6b7971c1d1dd2e365974dda62e06eb5c65b85d2
|
[
"MIT"
] | 15
|
2020-05-23T20:47:47.000Z
|
2022-01-02T18:57:47.000Z
|
src/dressup/__init__.py
|
paw-lu/dressup
|
d6b7971c1d1dd2e365974dda62e06eb5c65b85d2
|
[
"MIT"
] | 154
|
2020-05-23T03:19:15.000Z
|
2021-09-10T03:21:21.000Z
|
src/dressup/__init__.py
|
pscosta5/dressup
|
d6b7971c1d1dd2e365974dda62e06eb5c65b85d2
|
[
"MIT"
] | 1
|
2021-04-13T16:11:13.000Z
|
2021-04-13T16:11:13.000Z
|
"""Dress up."""
from .converter import convert, show_all
try:
from importlib.metadata import version, PackageNotFoundError # type: ignore
except ImportError: # pragma: no cover
from importlib_metadata import version, PackageNotFoundError # type: ignore
try:
__version__ = version(__name__)
except PackageNotFoundError: # pragma: no cover
__version__ = "unknown"
__all__ = ["convert", "show_all"]
| 26.25
| 80
| 0.740476
|
a8ca4c0b625ea9ec1045355731708deaa480da1d
| 666
|
py
|
Python
|
bigquery/tests/integration/smoke_test.py
|
JensMadsen/gcloud-aio
|
0edf1f167a1f89470ff81a73ccf841c454e7bd0d
|
[
"MIT"
] | null | null | null |
bigquery/tests/integration/smoke_test.py
|
JensMadsen/gcloud-aio
|
0edf1f167a1f89470ff81a73ccf841c454e7bd0d
|
[
"MIT"
] | null | null | null |
bigquery/tests/integration/smoke_test.py
|
JensMadsen/gcloud-aio
|
0edf1f167a1f89470ff81a73ccf841c454e7bd0d
|
[
"MIT"
] | null | null | null |
import os
import uuid
import aiohttp
import pytest
from gcloud.aio.bigquery import make_stream_insert
@pytest.mark.asyncio
async def test_data_is_inserted():
project = os.environ['GCLOUD_PROJECT']
creds = os.environ['GOOGLE_APPLICATION_CREDENTIALS']
dataset_name = 'test'
table_name = 'test'
rows = [{'key': uuid.uuid4().hex, 'value': uuid.uuid4().hex}
for i in range(3)]
async with aiohttp.ClientSession() as session:
stream_insert = make_stream_insert(project, creds, dataset_name,
table_name, session=session)
result = await stream_insert(rows)
assert result
| 27.75
| 72
| 0.665165
|
46f57e8472be36b47102f4f4dcc7488e38be49f0
| 219
|
py
|
Python
|
pytest_drf/__init__.py
|
theY4Kman/pytest-drf
|
1c86e023ebe7353e89aa71a7fc3b15457b5b20bd
|
[
"MIT"
] | 58
|
2020-02-09T07:13:57.000Z
|
2021-12-06T10:00:15.000Z
|
pytest_drf/__init__.py
|
theY4Kman/pytest-drf
|
1c86e023ebe7353e89aa71a7fc3b15457b5b20bd
|
[
"MIT"
] | 10
|
2020-07-27T09:21:51.000Z
|
2021-09-11T20:14:45.000Z
|
pytest_drf/__init__.py
|
theY4Kman/pytest-drf
|
1c86e023ebe7353e89aa71a7fc3b15457b5b20bd
|
[
"MIT"
] | 5
|
2020-07-27T08:39:48.000Z
|
2021-12-26T07:08:55.000Z
|
import pkg_resources
__version__ = pkg_resources.get_distribution('pytest-drf').version
from .authentication import *
from .authorization import *
from .pagination import *
from .status import *
from .views import *
| 19.909091
| 66
| 0.789954
|
642a751589c0a2fb5d2c05e26b7317955fa62999
| 39,698
|
py
|
Python
|
qa/rpc-tests/segwit.py
|
gotaproblem/AuxPowCoin
|
eea038edf893677e85ca45eed047ff81092d3a0f
|
[
"MIT"
] | null | null | null |
qa/rpc-tests/segwit.py
|
gotaproblem/AuxPowCoin
|
eea038edf893677e85ca45eed047ff81092d3a0f
|
[
"MIT"
] | null | null | null |
qa/rpc-tests/segwit.py
|
gotaproblem/AuxPowCoin
|
eea038edf893677e85ca45eed047ff81092d3a0f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2016 The Apcoin Core developers
# Copyright (c) 2016-2019 The Mincoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test the SegWit changeover logic
#
from test_framework.test_framework import MincoinTestFramework
from test_framework.util import *
from test_framework.mininode import sha256, ripemd160, CTransaction, CTxIn, COutPoint, CTxOut
from test_framework.address import script_to_p2sh, key_to_p2pkh
from test_framework.script import CScript, OP_HASH160, OP_CHECKSIG, OP_0, hash160, OP_EQUAL, OP_DUP, OP_EQUALVERIFY, OP_1, OP_2, OP_CHECKMULTISIG, OP_TRUE
from io import BytesIO
from test_framework.mininode import ToHex, FromHex, COIN
NODE_0 = 0
NODE_1 = 1
NODE_2 = 2
WIT_V0 = 0
WIT_V1 = 1
def witness_script(version, pubkey):
if (version == 0):
pubkeyhash = bytes_to_hex_str(ripemd160(sha256(hex_str_to_bytes(pubkey))))
pkscript = "0014" + pubkeyhash
elif (version == 1):
# 1-of-1 multisig
scripthash = bytes_to_hex_str(sha256(hex_str_to_bytes("5121" + pubkey + "51ae")))
pkscript = "0020" + scripthash
else:
assert("Wrong version" == "0 or 1")
return pkscript
def addlength(script):
scriptlen = format(len(script)//2, 'x')
assert(len(scriptlen) == 2)
return scriptlen + script
def create_witnessprogram(version, node, utxo, pubkey, encode_p2sh, amount):
pkscript = witness_script(version, pubkey)
if (encode_p2sh):
p2sh_hash = bytes_to_hex_str(ripemd160(sha256(hex_str_to_bytes(pkscript))))
pkscript = "a914"+p2sh_hash+"87"
inputs = []
outputs = {}
inputs.append({ "txid" : utxo["txid"], "vout" : utxo["vout"]} )
DUMMY_P2SH = "2MySexEGVzZpRgNQ1JdjdP5bRETznm3roQ2" # P2SH of "OP_1 OP_DROP"
outputs[DUMMY_P2SH] = amount
tx_to_witness = node.createrawtransaction(inputs,outputs)
#replace dummy output with our own
tx_to_witness = tx_to_witness[0:110] + addlength(pkscript) + tx_to_witness[-8:]
return tx_to_witness
def send_to_witness(version, node, utxo, pubkey, encode_p2sh, amount, sign=True, insert_redeem_script=""):
tx_to_witness = create_witnessprogram(version, node, utxo, pubkey, encode_p2sh, amount)
if (sign):
signed = node.signrawtransaction(tx_to_witness)
assert("errors" not in signed or len(["errors"]) == 0)
return node.sendrawtransaction(signed["hex"])
else:
if (insert_redeem_script):
tx_to_witness = tx_to_witness[0:82] + addlength(insert_redeem_script) + tx_to_witness[84:]
return node.sendrawtransaction(tx_to_witness)
def getutxo(txid):
utxo = {}
utxo["vout"] = 0
utxo["txid"] = txid
return utxo
def find_unspent(node, min_value):
for utxo in node.listunspent():
if utxo['amount'] >= min_value:
return utxo
class SegWitTest(MincoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 3
def setup_network(self):
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, ["-logtimemicros", "-debug", "-walletprematurewitness", "-rpcserialversion=0"]))
self.nodes.append(start_node(1, self.options.tmpdir, ["-logtimemicros", "-debug", "-blockversion=4", "-promiscuousmempoolflags=517", "-prematurewitness", "-walletprematurewitness", "-rpcserialversion=1"]))
self.nodes.append(start_node(2, self.options.tmpdir, ["-logtimemicros", "-debug", "-blockversion=536870915", "-promiscuousmempoolflags=517", "-prematurewitness", "-walletprematurewitness"]))
connect_nodes(self.nodes[1], 0)
connect_nodes(self.nodes[2], 1)
connect_nodes(self.nodes[0], 2)
self.is_network_split = False
self.sync_all()
def success_mine(self, node, txid, sign, redeem_script=""):
send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("499.998"), sign, redeem_script)
block = node.generate(1)
assert_equal(len(node.getblock(block[0])["tx"]), 2)
sync_blocks(self.nodes)
def skip_mine(self, node, txid, sign, redeem_script=""):
send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("499.998"), sign, redeem_script)
block = node.generate(1)
assert_equal(len(node.getblock(block[0])["tx"]), 1)
sync_blocks(self.nodes)
def fail_accept(self, node, txid, sign, redeem_script=""):
try:
send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
except JSONRPCException as exp:
assert(exp.error["code"] == -26)
else:
raise AssertionError("Tx should not have been accepted")
def fail_mine(self, node, txid, sign, redeem_script=""):
send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("499.998"), sign, redeem_script)
try:
node.generate(1)
except JSONRPCException as exp:
assert(exp.error["code"] == -1)
else:
raise AssertionError("Created valid block when TestBlockValidity should have failed")
sync_blocks(self.nodes)
def run_test(self):
self.nodes[0].generate(161) #block 161
print("Verify sigops are counted in GBT with pre-BIP141 rules before the fork")
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
tmpl = self.nodes[0].getblocktemplate({})
assert(tmpl['sizelimit'] == 1000000)
assert('weightlimit' not in tmpl)
assert(tmpl['sigoplimit'] == 20000)
assert(tmpl['transactions'][0]['hash'] == txid)
assert(tmpl['transactions'][0]['sigops'] == 2)
tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']})
assert(tmpl['sizelimit'] == 1000000)
assert('weightlimit' not in tmpl)
assert(tmpl['sigoplimit'] == 20000)
assert(tmpl['transactions'][0]['hash'] == txid)
assert(tmpl['transactions'][0]['sigops'] == 2)
self.nodes[0].generate(1) #block 162
balance_presetup = self.nodes[0].getbalance()
self.pubkey = []
p2sh_ids = [] # p2sh_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE embedded in p2sh
wit_ids = [] # wit_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE via bare witness
for i in range(3):
newaddress = self.nodes[i].getnewaddress()
self.pubkey.append(self.nodes[i].validateaddress(newaddress)["pubkey"])
multiaddress = self.nodes[i].addmultisigaddress(1, [self.pubkey[-1]])
self.nodes[i].addwitnessaddress(newaddress)
self.nodes[i].addwitnessaddress(multiaddress)
p2sh_ids.append([])
wit_ids.append([])
for v in range(2):
p2sh_ids[i].append([])
wit_ids[i].append([])
for i in range(5):
for n in range(3):
for v in range(2):
wit_ids[n][v].append(send_to_witness(v, self.nodes[0], find_unspent(self.nodes[0], 500), self.pubkey[n], False, Decimal("499.999")))
p2sh_ids[n][v].append(send_to_witness(v, self.nodes[0], find_unspent(self.nodes[0], 500), self.pubkey[n], True, Decimal("499.999")))
self.nodes[0].generate(1) #block 163
sync_blocks(self.nodes)
# Make sure all nodes recognize the transactions as theirs
assert_equal(self.nodes[0].getbalance(), balance_presetup - 60*500 + 20*Decimal("499.999") + 500)
assert_equal(self.nodes[1].getbalance(), 20*Decimal("499.999"))
assert_equal(self.nodes[2].getbalance(), 20*Decimal("499.999"))
self.nodes[0].generate(260) #block 423
sync_blocks(self.nodes)
print("Verify default node can't accept any witness format txs before fork")
# unsigned, no scriptsig
self.fail_accept(self.nodes[0], wit_ids[NODE_0][WIT_V0][0], False)
self.fail_accept(self.nodes[0], wit_ids[NODE_0][WIT_V1][0], False)
self.fail_accept(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0], False)
self.fail_accept(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0], False)
# unsigned with redeem script
self.fail_accept(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0], False, addlength(witness_script(0, self.pubkey[0])))
self.fail_accept(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0], False, addlength(witness_script(1, self.pubkey[0])))
# signed
self.fail_accept(self.nodes[0], wit_ids[NODE_0][WIT_V0][0], True)
self.fail_accept(self.nodes[0], wit_ids[NODE_0][WIT_V1][0], True)
self.fail_accept(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0], True)
self.fail_accept(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0], True)
print("Verify witness txs are skipped for mining before the fork")
self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][0], True) #block 424
self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][0], True) #block 425
self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][0], True) #block 426
self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][0], True) #block 427
# TODO: An old node would see these txs without witnesses and be able to mine them
print("Verify unsigned bare witness txs in versionbits-setting blocks are valid before the fork")
self.success_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][1], False) #block 428
self.success_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][1], False) #block 429
print("Verify unsigned p2sh witness txs without a redeem script are invalid")
self.fail_accept(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][1], False)
self.fail_accept(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][1], False)
print("Verify unsigned p2sh witness txs with a redeem script in versionbits-settings blocks are valid before the fork")
self.success_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][1], False, addlength(witness_script(0, self.pubkey[2]))) #block 430
self.success_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][1], False, addlength(witness_script(1, self.pubkey[2]))) #block 431
print("Verify previous witness txs skipped for mining can now be mined")
assert_equal(len(self.nodes[2].getrawmempool()), 4)
block = self.nodes[2].generate(1) #block 432 (first block with new rules; 432 = 144 * 3)
sync_blocks(self.nodes)
assert_equal(len(self.nodes[2].getrawmempool()), 0)
segwit_tx_list = self.nodes[2].getblock(block[0])["tx"]
assert_equal(len(segwit_tx_list), 5)
print("Verify block and transaction serialization rpcs return differing serializations depending on rpc serialization flag")
assert(self.nodes[2].getblock(block[0], False) != self.nodes[0].getblock(block[0], False))
assert(self.nodes[1].getblock(block[0], False) == self.nodes[2].getblock(block[0], False))
for i in range(len(segwit_tx_list)):
tx = FromHex(CTransaction(), self.nodes[2].gettransaction(segwit_tx_list[i])["hex"])
assert(self.nodes[2].getrawtransaction(segwit_tx_list[i]) != self.nodes[0].getrawtransaction(segwit_tx_list[i]))
assert(self.nodes[1].getrawtransaction(segwit_tx_list[i], 0) == self.nodes[2].getrawtransaction(segwit_tx_list[i]))
assert(self.nodes[0].getrawtransaction(segwit_tx_list[i]) != self.nodes[2].gettransaction(segwit_tx_list[i])["hex"])
assert(self.nodes[1].getrawtransaction(segwit_tx_list[i]) == self.nodes[2].gettransaction(segwit_tx_list[i])["hex"])
assert(self.nodes[0].getrawtransaction(segwit_tx_list[i]) == bytes_to_hex_str(tx.serialize_without_witness()))
print("Verify witness txs without witness data are invalid after the fork")
self.fail_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][2], False)
self.fail_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][2], False)
self.fail_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][2], False, addlength(witness_script(0, self.pubkey[2])))
self.fail_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][2], False, addlength(witness_script(1, self.pubkey[2])))
print("Verify default node can now use witness txs")
self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V0][0], True) #block 432
self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V1][0], True) #block 433
self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0], True) #block 434
self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0], True) #block 435
print("Verify sigops are counted in GBT with BIP141 rules after the fork")
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']})
assert(tmpl['sizelimit'] >= 3999577) # actual maximum size is lower due to minimum mandatory non-witness data
assert(tmpl['weightlimit'] == 4000000)
assert(tmpl['sigoplimit'] == 80000)
assert(tmpl['transactions'][0]['txid'] == txid)
assert(tmpl['transactions'][0]['sigops'] == 8)
self.nodes[0].generate(1) # Mine a block to clear the gbt cache
print("Non-segwit miners are able to use GBT response after activation.")
# Create a 3-tx chain: tx1 (non-segwit input, paying to a segwit output) ->
# tx2 (segwit input, paying to a non-segwit output) ->
# tx3 (non-segwit input, paying to a non-segwit output).
# tx1 is allowed to appear in the block, but no others.
txid1 = send_to_witness(1, self.nodes[0], find_unspent(self.nodes[0], 500), self.pubkey[0], False, Decimal("499.996"))
hex_tx = self.nodes[0].gettransaction(txid)['hex']
tx = FromHex(CTransaction(), hex_tx)
assert(tx.wit.is_null()) # This should not be a segwit input
assert(txid1 in self.nodes[0].getrawmempool())
# Now create tx2, which will spend from txid1.
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(int(txid1, 16), 0), b''))
tx.vout.append(CTxOut(int(499.99*COIN), CScript([OP_TRUE])))
tx2_hex = self.nodes[0].signrawtransaction(ToHex(tx))['hex']
txid2 = self.nodes[0].sendrawtransaction(tx2_hex)
tx = FromHex(CTransaction(), tx2_hex)
assert(not tx.wit.is_null())
# Now create tx3, which will spend from txid2
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(int(txid2, 16), 0), b""))
tx.vout.append(CTxOut(int(499.95*COIN), CScript([OP_TRUE]))) # Huge fee
tx.calc_sha256()
txid3 = self.nodes[0].sendrawtransaction(ToHex(tx))
assert(tx.wit.is_null())
assert(txid3 in self.nodes[0].getrawmempool())
# Now try calling getblocktemplate() without segwit support.
template = self.nodes[0].getblocktemplate()
# Check that tx1 is the only transaction of the 3 in the template.
template_txids = [ t['txid'] for t in template['transactions'] ]
assert(txid2 not in template_txids and txid3 not in template_txids)
assert(txid1 in template_txids)
# Check that running with segwit support results in all 3 being included.
template = self.nodes[0].getblocktemplate({"rules": ["segwit"]})
template_txids = [ t['txid'] for t in template['transactions'] ]
assert(txid1 in template_txids)
assert(txid2 in template_txids)
assert(txid3 in template_txids)
# Mine a block to clear the gbt cache again.
self.nodes[0].generate(1)
print("Verify behaviour of importaddress, addwitnessaddress and listunspent")
# Some public keys to be used later
pubkeys = [
"0363D44AABD0F1699138239DF2F042C3282C0671CC7A76826A55C8203D90E39242", # cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb
"02D3E626B3E616FC8662B489C123349FECBFC611E778E5BE739B257EAE4721E5BF", # cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97
"04A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538A62F5BD8EC85C2477F39650BD391EA6250207065B2A81DA8B009FC891E898F0E", # 91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV
"02A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538", # cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd
"036722F784214129FEB9E8129D626324F3F6716555B603FFE8300BBCB882151228", # cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66
"0266A8396EE936BF6D99D17920DB21C6C7B1AB14C639D5CD72B300297E416FD2EC", # cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K
"0450A38BD7F0AC212FEBA77354A9B036A32E0F7C81FC4E0C5ADCA7C549C4505D2522458C2D9AE3CEFD684E039194B72C8A10F9CB9D4764AB26FCC2718D421D3B84", # 92h2XPssjBpsJN5CqSP7v9a7cf2kgDunBC6PDFwJHMACM1rrVBJ
]
# Import a compressed key and an uncompressed key, generate some multisig addresses
self.nodes[0].importprivkey("92e6XLo5jVAVwrQKPNTs93oQco8f8sDNBcpv73Dsrs397fQtFQn")
uncompressed_spendable_address = ["mvozP4UwyGD2mGZU4D2eMvMLPB9WkMmMQu"]
self.nodes[0].importprivkey("cNC8eQ5dg3mFAVePDX4ddmPYpPbw41r9bm2jd1nLJT77e6RrzTRR")
compressed_spendable_address = ["mmWQubrDomqpgSYekvsU7HWEVjLFHAakLe"]
assert ((self.nodes[0].validateaddress(uncompressed_spendable_address[0])['iscompressed'] == False))
assert ((self.nodes[0].validateaddress(compressed_spendable_address[0])['iscompressed'] == True))
self.nodes[0].importpubkey(pubkeys[0])
compressed_solvable_address = [key_to_p2pkh(pubkeys[0])]
self.nodes[0].importpubkey(pubkeys[1])
compressed_solvable_address.append(key_to_p2pkh(pubkeys[1]))
self.nodes[0].importpubkey(pubkeys[2])
uncompressed_solvable_address = [key_to_p2pkh(pubkeys[2])]
spendable_anytime = [] # These outputs should be seen anytime after importprivkey and addmultisigaddress
spendable_after_importaddress = [] # These outputs should be seen after importaddress
solvable_after_importaddress = [] # These outputs should be seen after importaddress but not spendable
unsolvable_after_importaddress = [] # These outputs should be unsolvable after importaddress
solvable_anytime = [] # These outputs should be solvable after importpubkey
unseen_anytime = [] # These outputs should never be seen
uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]]))
uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]]))
compressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_spendable_address[0]]))
uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], uncompressed_solvable_address[0]]))
compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]]))
compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], compressed_solvable_address[1]]))
unknown_address = ["mtKKyoHabkk6e4ppT7NaM7THqPUt7AzPrT", "2NDP3jLWAFT8NDAiUa9qiE6oBt2awmMq7Dx"]
# Test multisig_without_privkey
# We have 2 public keys without private keys, use addmultisigaddress to add to wallet.
# Money sent to P2SH of multisig of this should only be seen after importaddress with the BASE58 P2SH address.
multisig_without_privkey_address = self.nodes[0].addmultisigaddress(2, [pubkeys[3], pubkeys[4]])
script = CScript([OP_2, hex_str_to_bytes(pubkeys[3]), hex_str_to_bytes(pubkeys[4]), OP_2, OP_CHECKMULTISIG])
solvable_after_importaddress.append(CScript([OP_HASH160, hash160(script), OP_EQUAL]))
for i in compressed_spendable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# bare and p2sh multisig with compressed keys should always be spendable
spendable_anytime.extend([bare, p2sh])
# P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after direct importaddress
spendable_after_importaddress.extend([p2wsh, p2sh_p2wsh])
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# normal P2PKH and P2PK with compressed keys should always be spendable
spendable_anytime.extend([p2pkh, p2pk])
# P2SH_P2PK, P2SH_P2PKH, and witness with compressed keys are spendable after direct importaddress
spendable_after_importaddress.extend([p2wpkh, p2sh_p2wpkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
for i in uncompressed_spendable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# bare and p2sh multisig with uncompressed keys should always be spendable
spendable_anytime.extend([bare, p2sh])
# P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
unseen_anytime.extend([p2wsh, p2sh_p2wsh])
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# normal P2PKH and P2PK with uncompressed keys should always be spendable
spendable_anytime.extend([p2pkh, p2pk])
# P2SH_P2PK and P2SH_P2PKH are spendable after direct importaddress
spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh])
# witness with uncompressed keys are never seen
unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
for i in compressed_solvable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
# Multisig without private is not seen after addmultisigaddress, but seen after importaddress
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
solvable_after_importaddress.extend([bare, p2sh, p2wsh, p2sh_p2wsh])
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# normal P2PKH and P2PK with compressed keys should always be seen
solvable_anytime.extend([p2pkh, p2pk])
# P2SH_P2PK, P2SH_P2PKH, and witness with compressed keys are seen after direct importaddress
solvable_after_importaddress.extend([p2wpkh, p2sh_p2wpkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
for i in uncompressed_solvable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# Base uncompressed multisig without private is not seen after addmultisigaddress, but seen after importaddress
solvable_after_importaddress.extend([bare, p2sh])
# P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
unseen_anytime.extend([p2wsh, p2sh_p2wsh])
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# normal P2PKH and P2PK with uncompressed keys should always be seen
solvable_anytime.extend([p2pkh, p2pk])
# P2SH_P2PK, P2SH_P2PKH with uncompressed keys are seen after direct importaddress
solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh])
# witness with uncompressed keys are never seen
unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
op1 = CScript([OP_1])
op0 = CScript([OP_0])
# 2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe is the P2SH(P2PKH) version of mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V
unsolvable_address = ["mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V", "2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe", script_to_p2sh(op1), script_to_p2sh(op0)]
unsolvable_address_key = hex_str_to_bytes("02341AEC7587A51CDE5279E0630A531AEA2615A9F80B17E8D9376327BAEAA59E3D")
unsolvablep2pkh = CScript([OP_DUP, OP_HASH160, hash160(unsolvable_address_key), OP_EQUALVERIFY, OP_CHECKSIG])
unsolvablep2wshp2pkh = CScript([OP_0, sha256(unsolvablep2pkh)])
p2shop0 = CScript([OP_HASH160, hash160(op0), OP_EQUAL])
p2wshop1 = CScript([OP_0, sha256(op1)])
unsolvable_after_importaddress.append(unsolvablep2pkh)
unsolvable_after_importaddress.append(unsolvablep2wshp2pkh)
unsolvable_after_importaddress.append(op1) # OP_1 will be imported as script
unsolvable_after_importaddress.append(p2wshop1)
unseen_anytime.append(op0) # OP_0 will be imported as P2SH address with no script provided
unsolvable_after_importaddress.append(p2shop0)
spendable_txid = []
solvable_txid = []
spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime, 2))
solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime, 1))
self.mine_and_test_listunspent(spendable_after_importaddress + solvable_after_importaddress + unseen_anytime + unsolvable_after_importaddress, 0)
importlist = []
for i in compressed_spendable_address + uncompressed_spendable_address + compressed_solvable_address + uncompressed_solvable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
bare = hex_str_to_bytes(v['hex'])
importlist.append(bytes_to_hex_str(bare))
importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(bare)])))
else:
pubkey = hex_str_to_bytes(v['pubkey'])
p2pk = CScript([pubkey, OP_CHECKSIG])
p2pkh = CScript([OP_DUP, OP_HASH160, hash160(pubkey), OP_EQUALVERIFY, OP_CHECKSIG])
importlist.append(bytes_to_hex_str(p2pk))
importlist.append(bytes_to_hex_str(p2pkh))
importlist.append(bytes_to_hex_str(CScript([OP_0, hash160(pubkey)])))
importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(p2pk)])))
importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(p2pkh)])))
importlist.append(bytes_to_hex_str(unsolvablep2pkh))
importlist.append(bytes_to_hex_str(unsolvablep2wshp2pkh))
importlist.append(bytes_to_hex_str(op1))
importlist.append(bytes_to_hex_str(p2wshop1))
for i in importlist:
try:
self.nodes[0].importaddress(i,"",False,True)
except JSONRPCException as exp:
assert_equal(exp.error["message"], "The wallet already contains the private key for this address or script")
self.nodes[0].importaddress(script_to_p2sh(op0)) # import OP_0 as address only
self.nodes[0].importaddress(multisig_without_privkey_address) # Test multisig_without_privkey
spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2))
solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1))
self.mine_and_test_listunspent(unsolvable_after_importaddress, 1)
self.mine_and_test_listunspent(unseen_anytime, 0)
# addwitnessaddress should refuse to return a witness address if an uncompressed key is used or the address is
# not in the wallet
# note that no witness address should be returned by unsolvable addresses
# the multisig_without_privkey_address will fail because its keys were not added with importpubkey
for i in uncompressed_spendable_address + uncompressed_solvable_address + unknown_address + unsolvable_address + [multisig_without_privkey_address]:
try:
self.nodes[0].addwitnessaddress(i)
except JSONRPCException as exp:
assert_equal(exp.error["message"], "Public key or redeemscript not known to wallet, or the key is uncompressed")
else:
assert(False)
for i in compressed_spendable_address + compressed_solvable_address:
witaddress = self.nodes[0].addwitnessaddress(i)
# addwitnessaddress should return the same address if it is a known P2SH-witness address
assert_equal(witaddress, self.nodes[0].addwitnessaddress(witaddress))
spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2))
solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1))
self.mine_and_test_listunspent(unsolvable_after_importaddress, 1)
self.mine_and_test_listunspent(unseen_anytime, 0)
# Repeat some tests. This time we don't add witness scripts with importaddress
# Import a compressed key and an uncompressed key, generate some multisig addresses
self.nodes[0].importprivkey("927pw6RW8ZekycnXqBQ2JS5nPyo1yRfGNN8oq74HeddWSpafDJH")
uncompressed_spendable_address = ["mguN2vNSCEUh6rJaXoAVwY3YZwZvEmf5xi"]
self.nodes[0].importprivkey("cMcrXaaUC48ZKpcyydfFo8PxHAjpsYLhdsp6nmtB3E2ER9UUHWnw")
compressed_spendable_address = ["n1UNmpmbVUJ9ytXYXiurmGPQ3TRrXqPWKL"]
self.nodes[0].importpubkey(pubkeys[5])
compressed_solvable_address = [key_to_p2pkh(pubkeys[5])]
self.nodes[0].importpubkey(pubkeys[6])
uncompressed_solvable_address = [key_to_p2pkh(pubkeys[6])]
spendable_after_addwitnessaddress = [] # These outputs should be seen after importaddress
solvable_after_addwitnessaddress=[] # These outputs should be seen after importaddress but not spendable
unseen_anytime = [] # These outputs should never be seen
uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]]))
uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]]))
compressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_spendable_address[0]]))
uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], uncompressed_solvable_address[0]]))
compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]]))
premature_witaddress = []
for i in compressed_spendable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after addwitnessaddress
spendable_after_addwitnessaddress.extend([p2wsh, p2sh_p2wsh])
premature_witaddress.append(script_to_p2sh(p2wsh))
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# P2WPKH, P2SH_P2WPKH are spendable after addwitnessaddress
spendable_after_addwitnessaddress.extend([p2wpkh, p2sh_p2wpkh])
premature_witaddress.append(script_to_p2sh(p2wpkh))
for i in uncompressed_spendable_address + uncompressed_solvable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
unseen_anytime.extend([p2wsh, p2sh_p2wsh])
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# P2WPKH, P2SH_P2WPKH with uncompressed keys are never seen
unseen_anytime.extend([p2wpkh, p2sh_p2wpkh])
for i in compressed_solvable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
# P2WSH multisig without private key are seen after addwitnessaddress
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
solvable_after_addwitnessaddress.extend([p2wsh, p2sh_p2wsh])
premature_witaddress.append(script_to_p2sh(p2wsh))
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# P2SH_P2PK, P2SH_P2PKH with compressed keys are seen after addwitnessaddress
solvable_after_addwitnessaddress.extend([p2wpkh, p2sh_p2wpkh])
premature_witaddress.append(script_to_p2sh(p2wpkh))
self.mine_and_test_listunspent(spendable_after_addwitnessaddress + solvable_after_addwitnessaddress + unseen_anytime, 0)
# addwitnessaddress should refuse to return a witness address if an uncompressed key is used
# note that a multisig address returned by addmultisigaddress is not solvable until it is added with importaddress
# premature_witaddress are not accepted until the script is added with addwitnessaddress first
for i in uncompressed_spendable_address + uncompressed_solvable_address + premature_witaddress + [compressed_solvable_address[1]]:
try:
self.nodes[0].addwitnessaddress(i)
except JSONRPCException as exp:
assert_equal(exp.error["message"], "Public key or redeemscript not known to wallet, or the key is uncompressed")
else:
assert(False)
# after importaddress it should pass addwitnessaddress
v = self.nodes[0].validateaddress(compressed_solvable_address[1])
self.nodes[0].importaddress(v['hex'],"",False,True)
for i in compressed_spendable_address + compressed_solvable_address + premature_witaddress:
witaddress = self.nodes[0].addwitnessaddress(i)
assert_equal(witaddress, self.nodes[0].addwitnessaddress(witaddress))
spendable_txid.append(self.mine_and_test_listunspent(spendable_after_addwitnessaddress, 2))
solvable_txid.append(self.mine_and_test_listunspent(solvable_after_addwitnessaddress, 1))
self.mine_and_test_listunspent(unseen_anytime, 0)
# Check that spendable outputs are really spendable
self.create_and_mine_tx_from_txids(spendable_txid)
# import all the private keys so solvable addresses become spendable
self.nodes[0].importprivkey("cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb")
self.nodes[0].importprivkey("cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97")
self.nodes[0].importprivkey("91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV")
self.nodes[0].importprivkey("cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd")
self.nodes[0].importprivkey("cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66")
self.nodes[0].importprivkey("cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K")
self.create_and_mine_tx_from_txids(solvable_txid)
def mine_and_test_listunspent(self, script_list, ismine):
utxo = find_unspent(self.nodes[0], 50)
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(int('0x'+utxo['txid'],0), utxo['vout'])))
for i in script_list:
tx.vout.append(CTxOut(10000000, i))
tx.rehash()
signresults = self.nodes[0].signrawtransaction(bytes_to_hex_str(tx.serialize_without_witness()))['hex']
txid = self.nodes[0].sendrawtransaction(signresults, True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
watchcount = 0
spendcount = 0
for i in self.nodes[0].listunspent():
if (i['txid'] == txid):
watchcount += 1
if (i['spendable'] == True):
spendcount += 1
if (ismine == 2):
assert_equal(spendcount, len(script_list))
elif (ismine == 1):
assert_equal(watchcount, len(script_list))
assert_equal(spendcount, 0)
else:
assert_equal(watchcount, 0)
return txid
def p2sh_address_to_script(self,v):
bare = CScript(hex_str_to_bytes(v['hex']))
p2sh = CScript(hex_str_to_bytes(v['scriptPubKey']))
p2wsh = CScript([OP_0, sha256(bare)])
p2sh_p2wsh = CScript([OP_HASH160, hash160(p2wsh), OP_EQUAL])
return([bare, p2sh, p2wsh, p2sh_p2wsh])
def p2pkh_address_to_script(self,v):
pubkey = hex_str_to_bytes(v['pubkey'])
p2wpkh = CScript([OP_0, hash160(pubkey)])
p2sh_p2wpkh = CScript([OP_HASH160, hash160(p2wpkh), OP_EQUAL])
p2pk = CScript([pubkey, OP_CHECKSIG])
p2pkh = CScript(hex_str_to_bytes(v['scriptPubKey']))
p2sh_p2pk = CScript([OP_HASH160, hash160(p2pk), OP_EQUAL])
p2sh_p2pkh = CScript([OP_HASH160, hash160(p2pkh), OP_EQUAL])
p2wsh_p2pk = CScript([OP_0, sha256(p2pk)])
p2wsh_p2pkh = CScript([OP_0, sha256(p2pkh)])
p2sh_p2wsh_p2pk = CScript([OP_HASH160, hash160(p2wsh_p2pk), OP_EQUAL])
p2sh_p2wsh_p2pkh = CScript([OP_HASH160, hash160(p2wsh_p2pkh), OP_EQUAL])
return [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh]
def create_and_mine_tx_from_txids(self, txids, success = True):
tx = CTransaction()
for i in txids:
txtmp = CTransaction()
txraw = self.nodes[0].getrawtransaction(i)
f = BytesIO(hex_str_to_bytes(txraw))
txtmp.deserialize(f)
for j in range(len(txtmp.vout)):
tx.vin.append(CTxIn(COutPoint(int('0x'+i,0), j)))
tx.vout.append(CTxOut(0, CScript()))
tx.rehash()
signresults = self.nodes[0].signrawtransaction(bytes_to_hex_str(tx.serialize_without_witness()))['hex']
self.nodes[0].sendrawtransaction(signresults, True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
if __name__ == '__main__':
SegWitTest().main()
| 60.057489
| 213
| 0.687793
|
f71431e15f97613abc12e56b17caf9d892de3bd9
| 1,359
|
py
|
Python
|
setup.py
|
butla/bravado-falcon
|
2c377db486150a6e0b93a4fb5970be9cf3e769d0
|
[
"MIT"
] | 2
|
2017-01-16T07:51:35.000Z
|
2020-02-17T21:44:13.000Z
|
setup.py
|
butla/bravado-falcon
|
2c377db486150a6e0b93a4fb5970be9cf3e769d0
|
[
"MIT"
] | null | null | null |
setup.py
|
butla/bravado-falcon
|
2c377db486150a6e0b93a4fb5970be9cf3e769d0
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os.path
from setuptools import setup
project_name = 'bravado-falcon'
version = '0.1.0'
setup_dir = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(setup_dir, 'requirements.txt')) as req_file:
requirements = [lib.split('==')[0] for lib in req_file.readlines()]
with open(os.path.join(setup_dir, 'README.rst')) as readme_file:
readme = readme_file.read()
setup(
name=project_name,
version=version,
description='Integration of Falcon API unit tests with Bravado.',
long_description=readme,
author='Michał Bultrowicz',
author_email='michal.bultrowicz@gmail.com',
url='https://github.com/butla/bravado-falcon',
packages=[
project_name.replace('-', '_'),
],
package_dir={project_name: project_name},
include_package_data=True,
install_requires=requirements,
license="MIT",
keywords='falcon bravado test',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
],
)
| 32.357143
| 71
| 0.65195
|
88907f64eccf9178bc72de9e7da73e817e5bdaf4
| 25,887
|
py
|
Python
|
test/bindery/test_mutation.py
|
zepheira/amara
|
d3ffe07d6e2266b34d72b012a82d572c8edbf1e7
|
[
"Apache-2.0"
] | 6
|
2015-01-30T03:50:36.000Z
|
2022-03-20T16:09:58.000Z
|
test/bindery/test_mutation.py
|
zepheira/amara
|
d3ffe07d6e2266b34d72b012a82d572c8edbf1e7
|
[
"Apache-2.0"
] | 2
|
2015-02-04T17:18:47.000Z
|
2019-09-27T23:39:52.000Z
|
test/bindery/test_mutation.py
|
zepheira/amara
|
d3ffe07d6e2266b34d72b012a82d572c8edbf1e7
|
[
"Apache-2.0"
] | 6
|
2015-02-04T16:16:18.000Z
|
2019-10-30T20:07:48.000Z
|
# Testing new amara.tree API
# Based on Amara1.x Tests mutation.py
import unittest
import amara
from amara import tree, bindery
from amara.lib import treecompare
from xml.dom import Node
XMLDECL = '<?xml version="1.0" encoding="UTF-8"?>\n'
ATTRIBUTE_NODE = tree.attribute.xml_type
def test_simple_attr_update1():
EXPECTED = """<a x="2"/>"""
doc = bindery.parse('<a x="1"/>')
doc.a.x = unicode(int(doc.a.x)+1)
treecompare.check_xml(doc.xml_encode(), XMLDECL+EXPECTED)
return
def test_simple_attr_update2():
EXPECTED = """<a xmlns="urn:bogus:x" x="2"/>"""
doc = bindery.parse('<a xmlns="urn:bogus:x" x="1"/>')
doc.a.x = unicode(int(doc.a.x)+1)
treecompare.check_xml(doc.xml_encode(), XMLDECL+EXPECTED)
return
def test_simple_attr_update3():
EXPECTED = """<n:a xmlns:n="urn:bogus:x" x="2"/>"""
doc = bindery.parse('<n:a xmlns:n="urn:bogus:x" x="1"/>')
doc.a.x = unicode(int(doc.a.x)+1)
treecompare.check_xml(doc.xml_encode(), XMLDECL+EXPECTED)
return
def test_simple_elem_and_attr_addition1():
EXPECTED = """<spam><eggs eggs="1"/><eggs eggs="2"/></spam>"""
doc = bindery.nodes.entity_base()
doc.xml_append(doc.xml_element_factory(None, u'spam'))
doc.spam.xml_append_fragment('<eggs eggs="1"/>')
doc.spam.xml_append_fragment('<eggs eggs="2"/>')
treecompare.check_xml(doc.xml_encode(), XMLDECL+EXPECTED)
result = [ k for k in doc.spam.eggs if 'eggs' in dir(k) ]
assert len(result) == 2, result
#unittest.TestCase.assertEquals([k for k in dir(doc.spam.eggs) if 'eggs' in k], [])
return
#XXX The rest are in old unittest style. Probably best to add new test cases above in nose test style
import cStringIO
output = cStringIO.StringIO()
class TestBasicMods(unittest.TestCase):
#def setUp(self):
# return
def compare_output(self, doc, expected):
"""
Auxiliar method for testing output puposes
"""
#output = cStringIO.StringIO()
#xml_print(doc, stream=output)
return self.assertEqual(doc.xml_encode(), expected)
def test_basic_tree_create_doc1(self):
EXPECTED = "<A/>"
doc = bindery.nodes.entity_base()
A = doc.xml_element_factory(None, u'A')
doc.xml_append(A)
self.assertEqual(len(doc.xml_select(u'//A')), 1)
self.assertEqual(len(doc.xml_children[0]), 1)
self.assertEqual(len(doc.xml_children[0].xml_children), 0)
self.assertEqual(len(doc.A.xml_children), 0)
self.compare_output(doc, XMLDECL+EXPECTED)
#self.assert_(isinstance(doc.xbel.title, unicode))
#self.assertRaises(AttributeError, binding.xbel.folder.)
return
def test_basic_tree_create_doc2(self):
EXPECTED = '<A xmlns:ns="urn:bogus" ns:a="b"/>'
doc = bindery.nodes.entity_base()
A = doc.xml_element_factory(None, u'A')
doc.xml_append(A)
doc.A.xmlns_attributes[u'ns']= u'urn:bogus'
doc.A.xml_attributes[u'ns:a'] = u'b'
self.assertEqual(len(list(doc.A)), 1)
self.assertEqual(len(doc.A.xml_children), 0)
self.compare_output(doc, XMLDECL+EXPECTED)
#self.assert_(isinstance(doc.xbel.title, unicode))
#self.assertRaises(AttributeError, binding.xbel.folder.)
return
def test_basic_tree_create_doc3(self):
EXPECTED = '<A a="b"/>'
#Namespace-free attr creation, abbreviated
doc = bindery.nodes.entity_base()
A = doc.xml_element_factory(None, u'A')
doc.xml_append(A)
doc.A.xml_attributes[u'a'] = u'b'
self.assertEqual(len(list(doc.A)), 1)
self.assertEqual(len(doc.A.xml_children), 0)
self.compare_output(doc, XMLDECL+EXPECTED)
#Namespace-free attr creation, full
#self.assert_(isinstance(doc.xbel.title, unicode))
#self.assertRaises(AttributeError, binding.xbel.folder.)
return
def test_basic_tree_create_doc4(self):
doc = bindery.nodes.entity_base()
A = doc.xml_element_factory(None, u'A')
doc.xml_append(A)
B = doc.xml_element_factory(None, u'B')
doc.A.xml_append(B)
doc.A.B.xmlns_attributes[u'ns']= u'urn:bogus'
doc.A.B.xml_attributes[u'ns:a'] = u'b'
doc.A.B.xml_append(doc.xml_text_factory(u"One"))
#doc.A.B.xmlns_attributes[u'ns']= u'urn:bogus' ??
#doc.A.b.xml_attributes[u'ns:a'] = u'b'
self.assertEqual(len(list(doc.A)), 1)
self.assertEqual(len(doc.A.xml_children), 1)
EXPECTED = '<A><B xmlns:ns="urn:bogus" ns:a="b">One</B></A>'
self.compare_output(doc, XMLDECL+EXPECTED)
#self.assert_(isinstance(doc.xbel.title, unicode))
#self.assertRaises(AttributeError, binding.xbel.folder.)
return
def test_basic_create_doc4(self):
EXPECTED = '<A><B a="b">One</B></A>'
#Namespace-free attr creation, abbreviated
doc = bindery.nodes.entity_base()
A = doc.xml_element_factory(None, u'A')
doc.xml_append(A)
B = doc.xml_element_factory(None, u'B')
doc.A.xml_append(B)
doc.A.B.xml_attributes[u'a'] = u'b'
doc.A.B.xml_append(doc.xml_text_factory(u"One"))
self.assertEqual(len(list(doc.A)), 1)
self.assertEqual(len(doc.A.xml_children), 1)
self.compare_output(doc, XMLDECL+EXPECTED)
return
def test_basic_create_doc5(self):
EXPECTED = '<A a="b"/>'
doc = bindery.nodes.entity_base()
A = doc.xml_element_factory(None, u'A')
doc.xml_append(A)
doc.A.xml_attributes[u'a'] = u"b"
self.assertEqual(len(list(doc.A)), 1)
self.assertEqual(len(doc.A.xml_children), 0)
self.compare_output(doc, XMLDECL+EXPECTED)
#self.assert_(isinstance(doc.xbel.title, unicode))
#self.assertRaises(AttributeError, binding.xbel.folder.)
return
def test_basic_create_doc6(self):
EXPECTED = '<A xmlns:ns="urn:bogus" ns:a="b"/>'
doc = bindery.nodes.entity_base()
A = doc.xml_element_factory(None, u'A')
doc.xml_append(A)
doc.A.xmlns_attributes[u'ns']= u'urn:bogus'
doc.A.xml_attributes[u'ns:a'] = u'b'
self.assertEqual(len(list(doc.A)), 1)
self.assertEqual(len(doc.A.xml_children), 0)
self.compare_output(doc, XMLDECL+EXPECTED)
#self.assert_(isinstance(doc.xbel.title, unicode))
#self.assertRaises(AttributeError, binding.xbel.folder.)
return
#def testCreateDoc2(self):
#from amara.writers import outputparameters, xmlwriter
#doc = bindery.nodes.entity_base()
#A = doc.xml_element_factory(None, u'A')
#doc.xml_append(A)
#doc.A.xml_append(doc.xml_element_factory(None, u'A'))
#doc.A.A.xml_append(doc.xml_text_factory(u"One"))
#doc.A.xml_append(doc.xml_element_factory(None, u'B'))
#doc.A.B.xml_append(doc.xml_text_factory(u"Two"))
#self.assertEqual(len(list(doc.A)), 1)
#self.assertEqual(len(list(doc.A.A)), 1)
#self.assertEqual(len(list(doc.A.B)), 1)
#self.assertEqual(unicode(doc.A.A), u"One")
#self.assertEqual(unicode(doc.A.B), u"Two")
#EXPECTED = "<A><A>One</A><B>Two</B></A>"
#self.compare_output(doc, XMLDECL+EXPECTED)
#EXPECTED = "<A>\n <A>One</A>\n <B>Two</B>\n</A>"
#op = outputparameters.outputparameters()
#op.indent = u"yes"
#stream = cStringIO.StringIO()
#w = xmlwriter.xmlwriter(op, stream)
#doc.xml(writer=w)
#self.assertEqual(stream.getvalue(), XMLDECL+EXPECTED)
###PrettyPrint(doc)
##self.assert_(isinstance(doc.xbel.title, unicode))
##self.assertRaises(AttributeError, binding.xbel.folder.)
#return
def testCreateDocNs1(self):
EXPECTED = '<A xmlns="urn:bogus"/>'
doc = bindery.nodes.entity_base()
A = doc.xml_element_factory(u'urn:bogus', u'A')
doc.xml_append(A)
self.assertEqual(len(list(doc.A)), 1)
self.assertEqual(len(doc.A.xml_children), 0)
self.compare_output(doc, XMLDECL+EXPECTED)
#self.assert_(isinstance(doc.xbel.title, unicode))
#self.assertRaises(AttributeError, binding.xbel.folder.)
return
def testTemplate1(self):
EXPECTED = '<A><B a="b">One</B></A>'
doc = bindery.nodes.entity_base()
A = doc.xml_element_factory(None, u'A')
doc.xml_append(A)
doc.A.xml_append_fragment('<B a="b">One</B>')
self.assertEqual(len(list(doc.A)), 1)
self.assertEqual(len(doc.A.xml_children), 1)
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testTemplate2(self):
EXPECTED = '<A><B xmlns:ns="urn:bogus" ns:a="b">One</B></A>'
doc = bindery.nodes.entity_base()
A = doc.xml_element_factory(None, u'A')
doc.xml_append(A)
doc.A.xml_append_fragment('<B xmlns:ns="urn:bogus" ns:a="b">One</B>')
self.assertEqual(len(list(doc.A)), 1)
self.assertEqual(len(doc.A.xml_children), 1)
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testTemplate3(self):
EXPECTED = '<A xmlns:ns="urn:bogus" ns:a="b"><B>One</B></A>'
doc = bindery.nodes.entity_base()
A = doc.xml_element_factory(None, u'A')
doc.xml_append(A)
doc.A.xmlns_attributes[u'ns']= u'urn:bogus'
doc.A.xml_attributes[u'ns:a'] = u'b'
doc.A.xml_append_fragment('<B>One</B>')
self.assertEqual(len(list(doc.A)), 1)
self.assertEqual(len(doc.A.xml_children), 1)
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testTemplate4(self):
EXPECTED = '<A><B xmlns:ns="urn:bogus" ns:a="b">One</B></A>'
doc = bindery.nodes.entity_base()
A = doc.xml_element_factory(None, u'A')
doc.xml_append(A)
doc.A.xml_append(bindery.parse('<B xmlns:ns="urn:bogus" ns:a="b">One</B>').xml_children[0])
self.assertEqual(len(list(doc.A)), 1)
self.assertEqual(len(doc.A.xml_children), 1)
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testTemplate5(self):
EXPECTED = u'<A><B>\u2203</B></A>'.encode('utf-8')
doc = bindery.nodes.entity_base()
A = doc.xml_element_factory(None, u'A')
doc.xml_append(A)
doc.A.xml_append(bindery.parse(u'<B>\u2203</B>'.encode('utf-8')).xml_children[0])
self.assertEqual(len(list(doc.A)), 1)
self.assertEqual(len(doc.A.xml_children), 1)
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testTemplate6(self):
EXPECTED = u'<A><B>\u00AB\u00BB</B></A>'.encode('utf-8')
doc = bindery.nodes.entity_base()
A = doc.xml_element_factory(None, u'A')
doc.xml_append(A)
doc.A.xml_append_fragment(u'<B>\u00AB\u00BB</B>'.encode('utf-8'))
self.assertEqual(len(list(doc.A)), 1)
self.assertEqual(len(doc.A.xml_children), 1)
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testCreateDocType1(self):
EXPECTED = '<!DOCTYPE xsa PUBLIC "-//LM Garshol//DTD XML Software Autoupdate 1.0//EN//XML" "http://www.garshol.priv.no/download/xsa/xsa.dtd">\n<xsa/>'
doc = bindery.nodes.entity_base()
doc.xml_system_id = u"http://www.garshol.priv.no/download/xsa/xsa.dtd"
doc.xml_public_id = u"-//LM Garshol//DTD XML Software Autoupdate 1.0//EN//XML"
doc.xml_append(doc.xml_element_factory(None, u'xsa'))
self.assertEqual(len(list(doc.xsa)), 1)
self.assertEqual(len(doc.xsa.xml_children), 0)
self.compare_output(doc, XMLDECL+EXPECTED)
#PrettyPrint(doc)
#self.assert_(isinstance(doc.xbel.title, unicode))
#self.assertRaises(AttributeError, binding.xbel.folder.)
return
#def testCreateDocType2(self): ???
#EXPECTED = '<!DOCTYPE xsa PUBLIC "-//LM Garshol//DTD XML Software Autoupdate 1.0//EN//XML" "http://www.garshol.priv.no/download/xsa/xsa.dtd">\n<xsa/>'
#doc = bindery.nodes.entity_base()
#doc.xml_system_id = u"http://www.garshol.priv.no/download/xsa/xsa.dtd"
#doc.xml_public_id = u"-//LM Garshol//DTD XML Software Autoupdate 1.0//EN//XML"
## ???
##doc = amara.create_document(
##u"xsa",
##pubid=u"-//LM Garshol//DTD XML Software Autoupdate 1.0//EN//XML",
##sysid=u"http://www.garshol.priv.no/download/xsa/xsa.dtd"
##)
#doc.xml_append(doc.xml_element_factory(None, u'xsa'))
#self.assertEqual(len(list(doc.xsa)), 1)
#self.assertEqual(len(doc.xsa.xml_children), 0)
#self.compare_output(doc, XMLDECL+EXPECTED)
## ??? self.assertEqual(doc.xml(indent=u'yes'), XMLDECL+EXPECTED)
##PrettyPrint(doc)
##self.assert_(isinstance(doc.xbel.title, unicode))
##self.assertRaises(AttributeError, binding.xbel.folder.)
#return
#def testCreateDocType4(self):
#EXPECTED = '<!DOCTYPE xsa PUBLIC "-//LM Garshol//DTD XML Software Autoupdate 1.0//EN//XML" "http://www.garshol.priv.no/download/xsa/xsa.dtd">\n<xsa/>'
#op = OutputParameters()
#op.indent = u'yes'
#op.doctypeSystem = u"http://www.garshol.priv.no/download/xsa/xsa.dtd"
#op.doctypePublic = u"-//LM Garshol//DTD XML Software Autoupdate 1.0//EN//XML"
#stream = cStringIO.StringIO()
#w = XmlWriter(op, stream)
#doc = amara.create_document(u"xsa")
#self.assertEqual(len(list(doc.xsa)), 1)
#self.assertEqual(len(doc.xsa.xml_children), 0)
#doc.xml(writer=w)
#self.compare_output(doc, XMLDECL+EXPECTED)
##PrettyPrint(doc)
##self.assert_(isinstance(doc.xbel.title, unicode))
##self.assertRaises(AttributeError, binding.xbel.folder.)
#return
def testReplace(self):
EXPECTED = '<A><B id="1">One</B><B id="2">Two</B></A>'
DOC = EXPECTED
doc = bindery.parse(DOC)
del doc.A.B[1]
e2 = doc.xml_element_factory(None, u'B')
e2.xml_attributes[u'id'] = u"2"
e2.xml_append(doc.xml_text_factory(u'Two'))
doc.A.xml_append(e2)
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testRepeatEdits1(self):
EXPECTED = '<A><B a="b">One</B></A>'
doc = bindery.nodes.entity_base()
doc.xml_append(doc.xml_element_factory(None, u'A'))
e1 = doc.xml_element_factory(None, u'B')
e1.xml_attributes[u'a'] = u"b"
e1.xml_append(doc.xml_text_factory(u'One'))
doc.A.xml_append(e1)
self.assertEqual(len(list(doc.A)), 1)
self.assertEqual(len(doc.A.xml_children), 1)
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testSetChildElement1(self):
DOC = "<a><b>spam</b></a>"
EXPECTED = '<a><b>eggs</b></a>'
doc = bindery.parse(DOC)
doc.a.b = u"eggs"
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testSetChildElement2(self):
DOC = "<a><b>spam</b></a>"
EXPECTED = '<a><b>eggs</b></a>'
doc = bindery.parse(DOC)
doc.a.b[0] = u"eggs"
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testSetChildElement3(self):
DOC = "<a><b>spam</b><b>spam</b></a>"
EXPECTED = '<a><b>eggs</b><b>spam</b></a>'
doc = bindery.parse(DOC)
doc.a.b = u"eggs"
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testSetChildElement4(self):
DOC = "<a><b>spam</b><b>spam</b></a>"
EXPECTED = '<a><b>eggs</b><b>spam</b></a>'
doc = bindery.parse(DOC)
doc.a.b[0] = u"eggs"
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testSetChildElement5(self):
DOC = "<a><b>spam</b><b>spam</b></a>"
EXPECTED = '<a><b>spam</b><b>eggs</b></a>'
doc = bindery.parse(DOC)
doc.a.b[1] = u"eggs"
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testSetChildElement6(self):
DOC = "<a><b>spam</b><b>spam</b></a>"
doc = bindery.parse(DOC)
def edit():
doc.a.b[2] = u"eggs"
self.assertRaises(IndexError, edit)
return
def testDelChildElement1(self):
DOC = "<a><b>spam</b><b>eggs</b></a>"
EXPECTED = '<a><b>eggs</b></a>'
doc = bindery.parse(DOC)
del doc.a.b
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testDelChildElement2(self):
DOC = "<a><b>spam</b><b>eggs</b></a>"
EXPECTED = '<a><b>eggs</b></a>'
doc = bindery.parse(DOC)
del doc.a.b[0]
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testDelChildElement3(self):
DOC = "<a><b>spam</b><b>eggs</b></a>"
EXPECTED = '<a><b>spam</b></a>'
doc = bindery.parse(DOC)
del doc.a.b[1]
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testDelChildElement4(self):
DOC = "<a><b>spam</b><b>spam</b></a>"
doc = bindery.parse(DOC)
def edit():
del doc.a.b[2]
self.assertRaises(IndexError, edit)
return
def testDelChildElement5(self):
DOC = "<a><b>spam</b><b>eggs</b></a>"
EXPECTED = '<a><b>eggs</b></a>'
doc = bindery.parse(DOC)
del doc.a[u'b']
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testDelChildElement6(self):
DOC = "<a><b>spam</b><b>eggs</b></a>"
EXPECTED = '<a><b>eggs</b></a>'
doc = bindery.parse(DOC)
del doc.a[u'b'][0]
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testDelChildElement7(self):
DOC = "<a><b>spam</b><b>eggs</b></a>"
EXPECTED = '<a><b>spam</b></a>'
doc = bindery.parse(DOC)
del doc.a[u'b'][1]
self.compare_output(doc, XMLDECL+EXPECTED)
return
#def testDelChildElementWithClash1(self):
#DOC = '<a-1 b-1=""><b-1/></a-1>'
#EXPECTED = '<a-1 b-1=""/>'
#doc = bindery.parse(DOC)
#E = Node.ELEMENT_NODE
#del doc[E, None, u'a-1'][E, None, u'b-1']
#self.compare_output(doc, XMLDECL+EXPECTED)
#return
#def testDelAttributeWithClash1(self):
#DOC = '<a-1 b-1=""><b-1/></a-1>'
#EXPECTED = '<a-1><b-1/></a-1>'
#doc = bindery.parse(DOC)
#E = Node.ELEMENT_NODE
#A = Node.ATTRIBUTE_NODE
#del doc[E, None, u'a-1'][A, None, u'b-1']
#self.compare_output(doc, XMLDECL+EXPECTED)
#return
def testDelAttribute1(self):
DOC = '<a b="spam"><b>spam</b></a>'
EXPECTED = '<a><b>spam</b></a>'
doc = bindery.parse(DOC)
del doc.a.b
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testSetAttribute1(self):
DOC = '<a b="spam"></a>'
EXPECTED = '<a b="eggs"/>'
doc = bindery.parse(DOC)
doc.a.xml_attributes[u'b'] = u"eggs"
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testSetAttribute2(self):
DOC = '<a b="spam"><b>spam</b></a>'
EXPECTED = '<a b="eggs"><b>spam</b></a>'
doc = bindery.parse(DOC)
doc.a.xml_attributes[u'b'] = u"eggs"
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testSetAttribute3(self):
from xml.dom import Node
DOC = '<a b="spam"><b>spam</b></a>'
EXPECTED = '<a b="eggs"><b>spam</b></a>'
doc = bindery.parse(DOC)
doc.a[ATTRIBUTE_NODE, None, u'b'] = u'eggs'
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testSetAttribute4(self):
DOC = '<a><b>spam</b></a>'
EXPECTED = '<a><b xml:lang="en">spam</b></a>'
doc = bindery.parse(DOC)
# doc.a.b.xml_set_attribute((u"xml:lang"), u"en")
doc.a.b.xml_attributes[u'xml:lang'] = u'en'
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testSetAttribute5(self):
DOC = '<a><b>spam</b></a>'
EXPECTED = '<a xmlns:ns="urn:bogus" ns:foo="bar"><b>spam</b></a>'
doc = bindery.parse(DOC)
doc.a.xmlns_attributes[u'ns']= u'urn:bogus'
doc.a.xml_attributes[u'ns:foo'] = u'bar'
self.compare_output(doc, XMLDECL+EXPECTED)
return
#def testSetAttribute6(self):
#### Do we need this test now ?
#DOC = '<a><b>spam</b></a>'
#EXPECTED = '<a xmlns:ns="urn:bogus" ns:foo="bar"><b>spam</b></a>'
#doc = amara.parse(DOC, prefixes={u'ns': u'urn:bogus'})
#doc.a.xml_set_attribute((u"foo", u"urn:bogus"), u"bar")
#self.compare_output(doc, XMLDECL+EXPECTED)
#return
def testSetAttribute7(self):
DOC = '<a><b>spam</b></a>'
EXPECTED = '<a foo="bar"><b>spam</b></a>'
doc = bindery.parse(DOC)
doc.a.xml_attributes[u"foo"] = u"bar"
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testInsertAfter1(self):
DOC = "<a><b>spam</b></a>"
EXPECTED = '<a><b>spam</b><b>eggs</b></a>'
doc = bindery.parse(DOC)
new = doc.xml_element_factory(None, u'b')
new.xml_append(doc.xml_text_factory(u'eggs'))
doc.a.xml_insert(1, new)
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testInsertAfter2(self):
DOC = "<a><b>spam</b></a>"
EXPECTED = '<a><b>spam</b><c>eggs</c></a>'
doc = bindery.parse(DOC)
new = doc.xml_element_factory(None, u'c')
new.xml_append(doc.xml_text_factory(u'eggs'))
doc.a.xml_insert(doc.a.xml_index(doc.a.b)+1, new)
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testInsertAfter3(self):
DOC = "<a><b>spam</b><c>ham</c><c>pork</c></a>"
EXPECTED = "<a><b>spam</b><c>eggs</c><c>ham</c><c>pork</c></a>"
doc = bindery.parse(DOC)
new = doc.xml_element_factory(None, u'c')
new.xml_append(doc.xml_text_factory(u'eggs'))
doc.a.xml_insert(doc.a.xml_index(doc.a.b) +1, new)
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testInsertBefore1(self):
DOC = "<a><b>eggs</b></a>"
EXPECTED = '<a><b>spam</b><b>eggs</b></a>'
doc = bindery.parse(DOC)
new = doc.xml_element_factory(None, u'b')
new.xml_append(doc.xml_text_factory(u'spam'))
doc.a.xml_insert(0, new)
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testSetNamespace1(self):
#EXPECTED = '<A><B id="1">One</B><B id="2">Two</B></A>'
EXPECTED = '<ns:A xmlns:ns="urn:bogus"><ns:B/></ns:A>'
DOC = '<A><B/></A>'
doc = bindery.parse(DOC)
doc.A.xml_namespaces[u'ns'] = u'urn:bogus'
self.assertEqual(doc.A.xml_name, (None, u'A'))
self.assertEqual(doc.A.B.xml_name, (None, u'B'))
doc.A.xml_namespace = u'urn:bogus'
doc.xml_fixup()
#print [ n for n in dir(doc) if not n.startswith('__') and not n.startswith('xml') ]
self.assertEqual(doc.A_.xml_name, (u'urn:bogus', u'A'))
doc.A_.B.xml_namespace = u'urn:bogus'
doc.A_.xml_fixup()
self.assertEqual(doc.A_.B_.xml_name, (u'urn:bogus', u'B'))
self.compare_output(doc, XMLDECL+EXPECTED)
return
def testSetNamespace2(self):
#EXPECTED = '<A><B id="1">One</B><B id="2">Two</B></A>'
EXPECTED = '<A xmlns="urn:bogus"><B/></A>'
DOC = '<A><B/></A>'
doc = bindery.parse(DOC)
doc.A.xml_namespaces[None] = u'urn:bogus'
self.assertEqual(doc.A.xml_name, (None, u'A'))
self.assertEqual(doc.A.B.xml_name, (None, u'B'))
doc.A.xml_namespace = u'urn:bogus'
doc.xml_fixup()
self.assertEqual(doc.A_.xml_name, (u'urn:bogus', u'A'))
doc.A_.B.xml_namespace = u'urn:bogus'
doc.A_.xml_fixup()
self.assertEqual(doc.A_.B_.xml_name, (u'urn:bogus', u'B'))
self.compare_output(doc, XMLDECL+EXPECTED)
return
MONTY = """\
<?xml version="1.0" encoding="utf-8"?>
<monty>
<python spam="eggs">
What do you mean "bleh"
</python>
<python ministry="abuse">
But I was looking for argument
</python>
</monty>
"""
class TestTransforms(unittest.TestCase):
def compare_output(self, doc, expected):
"""
Auxiliar method for testing output puposes
"""
return self.assertEqual(doc.xml_encode(), expected)
def test_deep_copy_entity(self):
#FIXME really goes in manual.py, since it's based on a manual example
EXPECTED1 = '<python spam="eggs">\n What do you mean "bleh"\n </python>'
EXPECTED2 = '<python spam="abcd">\n What do you mean "bleh"\n </python>'
import copy
doc = bindery.parse(MONTY)
doc2 = copy.deepcopy(doc)
doc2.monty.python.xml_attributes[u'spam'] = u"abcd"
self.compare_output(doc.monty.python, EXPECTED1)
self.compare_output(doc2.monty.python, EXPECTED2)
return
def test_deep_copy_element(self):
#FIXME really goes in manual.py, since it's based on a manual example
EXPECTED1 = '<python spam="eggs">\n What do you mean "bleh"\n </python>'
EXPECTED2 = '<python spam="abcd">\n What do you mean "bleh"\n </python>'
import copy
doc = bindery.parse(MONTY)
doc2 = bindery.nodes.entity_base()
root_elem = copy.deepcopy(doc.xml_first_child)
doc2.xml_append(root_elem)
doc2.monty.python.xml_attributes[u'spam'] = u"abcd"
self.compare_output(doc.monty.python, EXPECTED1)
self.compare_output(doc2.monty.python, EXPECTED2)
return
if __name__ == '__main__':
raise SystemExit("use nosetests")
| 37.463097
| 159
| 0.591069
|
00315b78b065aae4730702dff26be7d8bd48da19
| 314
|
py
|
Python
|
v1.0.0.test/toontown/uberdog/DataStoreGlobals.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 4
|
2019-07-01T15:46:43.000Z
|
2021-07-23T16:26:48.000Z
|
v1.0.0.test/toontown/uberdog/DataStoreGlobals.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 1
|
2019-06-29T03:40:05.000Z
|
2021-06-13T01:15:16.000Z
|
v1.0.0.test/toontown/uberdog/DataStoreGlobals.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 4
|
2019-07-28T21:18:46.000Z
|
2021-02-25T06:37:25.000Z
|
from toontown.uberdog.ScavengerHuntDataStore import *
from toontown.uberdog.DataStore import *
SH = 1
GEN = 2
TYPES = {SH: (ScavengerHuntDataStore,), GEN: (
DataStore,)}
def getStoreClass(type):
storeClass = TYPES.get(type, None)
if storeClass:
return storeClass[0]
else:
return
| 24.153846
| 53
| 0.681529
|
c1ad30526e2e618337819f9c9654101e90cf7d42
| 21,984
|
py
|
Python
|
tests/generic_views/test_base.py
|
beshrkayali/django
|
84633905273fc916e3d17883810d9969c03f73c2
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 7
|
2020-01-13T18:26:41.000Z
|
2021-04-20T04:22:26.000Z
|
tests/generic_views/test_base.py
|
beshrkayali/django
|
84633905273fc916e3d17883810d9969c03f73c2
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
tests/generic_views/test_base.py
|
beshrkayali/django
|
84633905273fc916e3d17883810d9969c03f73c2
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 4
|
2019-11-07T01:22:16.000Z
|
2020-09-16T22:02:16.000Z
|
import time
from django.core.exceptions import ImproperlyConfigured
from django.http import HttpResponse
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.test.utils import require_jinja2
from django.urls import resolve
from django.views.generic import RedirectView, TemplateView, View
from . import views
class SimpleView(View):
"""
A simple view with a docstring.
"""
def get(self, request):
return HttpResponse('This is a simple view')
class SimplePostView(SimpleView):
post = SimpleView.get
class PostOnlyView(View):
def post(self, request):
return HttpResponse('This view only accepts POST')
class CustomizableView(SimpleView):
parameter = {}
def decorator(view):
view.is_decorated = True
return view
class DecoratedDispatchView(SimpleView):
@decorator
def dispatch(self, request, *args, **kwargs):
return super().dispatch(request, *args, **kwargs)
class AboutTemplateView(TemplateView):
def get(self, request):
return self.render_to_response({})
def get_template_names(self):
return ['generic_views/about.html']
class AboutTemplateAttributeView(TemplateView):
template_name = 'generic_views/about.html'
def get(self, request):
return self.render_to_response(context={})
class InstanceView(View):
def get(self, request):
return self
class ViewTest(SimpleTestCase):
rf = RequestFactory()
def _assert_simple(self, response):
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'This is a simple view')
def test_no_init_kwargs(self):
"""
A view can't be accidentally instantiated before deployment
"""
msg = 'This method is available only on the class, not on instances.'
with self.assertRaisesMessage(AttributeError, msg):
SimpleView(key='value').as_view()
def test_no_init_args(self):
"""
A view can't be accidentally instantiated before deployment
"""
msg = 'as_view() takes 1 positional argument but 2 were given'
with self.assertRaisesMessage(TypeError, msg):
SimpleView.as_view('value')
def test_pathological_http_method(self):
"""
The edge case of a http request that spoofs an existing method name is caught.
"""
self.assertEqual(SimpleView.as_view()(
self.rf.get('/', REQUEST_METHOD='DISPATCH')
).status_code, 405)
def test_get_only(self):
"""
Test a view which only allows GET doesn't allow other methods.
"""
self._assert_simple(SimpleView.as_view()(self.rf.get('/')))
self.assertEqual(SimpleView.as_view()(self.rf.post('/')).status_code, 405)
self.assertEqual(SimpleView.as_view()(
self.rf.get('/', REQUEST_METHOD='FAKE')
).status_code, 405)
def test_get_and_head(self):
"""
Test a view which supplies a GET method also responds correctly to HEAD.
"""
self._assert_simple(SimpleView.as_view()(self.rf.get('/')))
response = SimpleView.as_view()(self.rf.head('/'))
self.assertEqual(response.status_code, 200)
def test_setup_get_and_head(self):
view_instance = SimpleView()
self.assertFalse(hasattr(view_instance, 'head'))
view_instance.setup(self.rf.get('/'))
self.assertTrue(hasattr(view_instance, 'head'))
self.assertEqual(view_instance.head, view_instance.get)
def test_head_no_get(self):
"""
Test a view which supplies no GET method responds to HEAD with HTTP 405.
"""
response = PostOnlyView.as_view()(self.rf.head('/'))
self.assertEqual(response.status_code, 405)
def test_get_and_post(self):
"""
Test a view which only allows both GET and POST.
"""
self._assert_simple(SimplePostView.as_view()(self.rf.get('/')))
self._assert_simple(SimplePostView.as_view()(self.rf.post('/')))
self.assertEqual(SimplePostView.as_view()(
self.rf.get('/', REQUEST_METHOD='FAKE')
).status_code, 405)
def test_invalid_keyword_argument(self):
"""
View arguments must be predefined on the class and can't
be named like a HTTP method.
"""
msg = (
"You tried to pass in the %s method name as a keyword argument "
"to SimpleView(). Don't do that."
)
# Check each of the allowed method names
for method in SimpleView.http_method_names:
with self.assertRaisesMessage(TypeError, msg % method):
SimpleView.as_view(**{method: 'value'})
# Check the case view argument is ok if predefined on the class...
CustomizableView.as_view(parameter="value")
# ...but raises errors otherwise.
msg = (
"CustomizableView() received an invalid keyword 'foobar'. "
"as_view only accepts arguments that are already attributes of "
"the class."
)
with self.assertRaisesMessage(TypeError, msg):
CustomizableView.as_view(foobar="value")
def test_calling_more_than_once(self):
"""
Test a view can only be called once.
"""
request = self.rf.get('/')
view = InstanceView.as_view()
self.assertNotEqual(view(request), view(request))
def test_class_attributes(self):
"""
The callable returned from as_view() has proper
docstring, name and module.
"""
self.assertEqual(SimpleView.__doc__, SimpleView.as_view().__doc__)
self.assertEqual(SimpleView.__name__, SimpleView.as_view().__name__)
self.assertEqual(SimpleView.__module__, SimpleView.as_view().__module__)
def test_dispatch_decoration(self):
"""
Attributes set by decorators on the dispatch method
are also present on the closure.
"""
self.assertTrue(DecoratedDispatchView.as_view().is_decorated)
def test_options(self):
"""
Views respond to HTTP OPTIONS requests with an Allow header
appropriate for the methods implemented by the view class.
"""
request = self.rf.options('/')
view = SimpleView.as_view()
response = view(request)
self.assertEqual(200, response.status_code)
self.assertTrue(response['Allow'])
def test_options_for_get_view(self):
"""
A view implementing GET allows GET and HEAD.
"""
request = self.rf.options('/')
view = SimpleView.as_view()
response = view(request)
self._assert_allows(response, 'GET', 'HEAD')
def test_options_for_get_and_post_view(self):
"""
A view implementing GET and POST allows GET, HEAD, and POST.
"""
request = self.rf.options('/')
view = SimplePostView.as_view()
response = view(request)
self._assert_allows(response, 'GET', 'HEAD', 'POST')
def test_options_for_post_view(self):
"""
A view implementing POST allows POST.
"""
request = self.rf.options('/')
view = PostOnlyView.as_view()
response = view(request)
self._assert_allows(response, 'POST')
def _assert_allows(self, response, *expected_methods):
"Assert allowed HTTP methods reported in the Allow response header"
response_allows = set(response['Allow'].split(', '))
self.assertEqual(set(expected_methods + ('OPTIONS',)), response_allows)
def test_args_kwargs_request_on_self(self):
"""
Test a view only has args, kwargs & request once `as_view`
has been called.
"""
bare_view = InstanceView()
view = InstanceView.as_view()(self.rf.get('/'))
for attribute in ('args', 'kwargs', 'request'):
self.assertNotIn(attribute, dir(bare_view))
self.assertIn(attribute, dir(view))
def test_overridden_setup(self):
class SetAttributeMixin:
def setup(self, request, *args, **kwargs):
self.attr = True
super().setup(request, *args, **kwargs)
class CheckSetupView(SetAttributeMixin, SimpleView):
def dispatch(self, request, *args, **kwargs):
assert hasattr(self, 'attr')
return super().dispatch(request, *args, **kwargs)
response = CheckSetupView.as_view()(self.rf.get('/'))
self.assertEqual(response.status_code, 200)
def test_not_calling_parent_setup_error(self):
class TestView(View):
def setup(self, request, *args, **kwargs):
pass # Not calling super().setup()
msg = (
"TestView instance has no 'request' attribute. Did you override "
"setup() and forget to call super()?"
)
with self.assertRaisesMessage(AttributeError, msg):
TestView.as_view()(self.rf.get('/'))
def test_setup_adds_args_kwargs_request(self):
request = self.rf.get('/')
args = ('arg 1', 'arg 2')
kwargs = {'kwarg_1': 1, 'kwarg_2': 'year'}
view = View()
view.setup(request, *args, **kwargs)
self.assertEqual(request, view.request)
self.assertEqual(args, view.args)
self.assertEqual(kwargs, view.kwargs)
def test_direct_instantiation(self):
"""
It should be possible to use the view by directly instantiating it
without going through .as_view() (#21564).
"""
view = PostOnlyView()
response = view.dispatch(self.rf.head('/'))
self.assertEqual(response.status_code, 405)
@override_settings(ROOT_URLCONF='generic_views.urls')
class TemplateViewTest(SimpleTestCase):
rf = RequestFactory()
def _assert_about(self, response):
response.render()
self.assertContains(response, '<h1>About</h1>')
def test_get(self):
"""
Test a view that simply renders a template on GET
"""
self._assert_about(AboutTemplateView.as_view()(self.rf.get('/about/')))
def test_head(self):
"""
Test a TemplateView responds correctly to HEAD
"""
response = AboutTemplateView.as_view()(self.rf.head('/about/'))
self.assertEqual(response.status_code, 200)
def test_get_template_attribute(self):
"""
Test a view that renders a template on GET with the template name as
an attribute on the class.
"""
self._assert_about(AboutTemplateAttributeView.as_view()(self.rf.get('/about/')))
def test_get_generic_template(self):
"""
Test a completely generic view that renders a template on GET
with the template name as an argument at instantiation.
"""
self._assert_about(TemplateView.as_view(template_name='generic_views/about.html')(self.rf.get('/about/')))
def test_template_name_required(self):
"""
A template view must provide a template name.
"""
msg = (
"TemplateResponseMixin requires either a definition of "
"'template_name' or an implementation of 'get_template_names()'"
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get('/template/no_template/')
@require_jinja2
def test_template_engine(self):
"""
A template view may provide a template engine.
"""
request = self.rf.get('/using/')
view = TemplateView.as_view(template_name='generic_views/using.html')
self.assertEqual(view(request).render().content, b'DTL\n')
view = TemplateView.as_view(template_name='generic_views/using.html', template_engine='django')
self.assertEqual(view(request).render().content, b'DTL\n')
view = TemplateView.as_view(template_name='generic_views/using.html', template_engine='jinja2')
self.assertEqual(view(request).render().content, b'Jinja2\n')
def test_template_params(self):
"""
A generic template view passes kwargs as context.
"""
response = self.client.get('/template/simple/bar/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['foo'], 'bar')
self.assertIsInstance(response.context['view'], View)
def test_extra_template_params(self):
"""
A template view can be customized to return extra context.
"""
response = self.client.get('/template/custom/bar/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['foo'], 'bar')
self.assertEqual(response.context['key'], 'value')
self.assertIsInstance(response.context['view'], View)
def test_cached_views(self):
"""
A template view can be cached
"""
response = self.client.get('/template/cached/bar/')
self.assertEqual(response.status_code, 200)
time.sleep(1.0)
response2 = self.client.get('/template/cached/bar/')
self.assertEqual(response2.status_code, 200)
self.assertEqual(response.content, response2.content)
time.sleep(2.0)
# Let the cache expire and test again
response2 = self.client.get('/template/cached/bar/')
self.assertEqual(response2.status_code, 200)
self.assertNotEqual(response.content, response2.content)
def test_content_type(self):
response = self.client.get('/template/content_type/')
self.assertEqual(response['Content-Type'], 'text/plain')
def test_resolve_view(self):
match = resolve('/template/content_type/')
self.assertIs(match.func.view_class, TemplateView)
self.assertEqual(match.func.view_initkwargs['content_type'], 'text/plain')
def test_resolve_login_required_view(self):
match = resolve('/template/login_required/')
self.assertIs(match.func.view_class, TemplateView)
def test_extra_context(self):
response = self.client.get('/template/extra_context/')
self.assertEqual(response.context['title'], 'Title')
@override_settings(ROOT_URLCONF='generic_views.urls')
class RedirectViewTest(SimpleTestCase):
rf = RequestFactory()
def test_no_url(self):
"Without any configuration, returns HTTP 410 GONE"
response = RedirectView.as_view()(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 410)
def test_default_redirect(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_permanent_redirect(self):
"Permanent redirects are an option"
response = RedirectView.as_view(url='/bar/', permanent=True)(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 301)
self.assertEqual(response.url, '/bar/')
def test_temporary_redirect(self):
"Temporary redirects are an option"
response = RedirectView.as_view(url='/bar/', permanent=False)(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_include_args(self):
"GET arguments can be included in the redirected URL"
response = RedirectView.as_view(url='/bar/')(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
response = RedirectView.as_view(url='/bar/', query_string=True)(self.rf.get('/foo/?pork=spam'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/?pork=spam')
def test_include_urlencoded_args(self):
"GET arguments can be URL-encoded when included in the redirected URL"
response = RedirectView.as_view(url='/bar/', query_string=True)(
self.rf.get('/foo/?unicode=%E2%9C%93'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/?unicode=%E2%9C%93')
def test_parameter_substitution(self):
"Redirection URLs can be parameterized"
response = RedirectView.as_view(url='/bar/%(object_id)d/')(self.rf.get('/foo/42/'), object_id=42)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/42/')
def test_named_url_pattern(self):
"Named pattern parameter should reverse to the matching pattern"
response = RedirectView.as_view(pattern_name='artist_detail')(self.rf.get('/foo/'), pk=1)
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], '/detail/artist/1/')
def test_named_url_pattern_using_args(self):
response = RedirectView.as_view(pattern_name='artist_detail')(self.rf.get('/foo/'), 1)
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], '/detail/artist/1/')
def test_redirect_POST(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.post('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_HEAD(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.head('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_OPTIONS(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.options('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_PUT(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.put('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_PATCH(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.patch('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_DELETE(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.delete('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_when_meta_contains_no_query_string(self):
"regression for #16705"
# we can't use self.rf.get because it always sets QUERY_STRING
response = RedirectView.as_view(url='/bar/')(self.rf.request(PATH_INFO='/foo/'))
self.assertEqual(response.status_code, 302)
def test_direct_instantiation(self):
"""
It should be possible to use the view without going through .as_view()
(#21564).
"""
view = RedirectView()
response = view.dispatch(self.rf.head('/foo/'))
self.assertEqual(response.status_code, 410)
class GetContextDataTest(SimpleTestCase):
def test_get_context_data_super(self):
test_view = views.CustomContextView()
context = test_view.get_context_data(kwarg_test='kwarg_value')
# the test_name key is inserted by the test classes parent
self.assertIn('test_name', context)
self.assertEqual(context['kwarg_test'], 'kwarg_value')
self.assertEqual(context['custom_key'], 'custom_value')
# test that kwarg overrides values assigned higher up
context = test_view.get_context_data(test_name='test_value')
self.assertEqual(context['test_name'], 'test_value')
def test_object_at_custom_name_in_context_data(self):
# Checks 'pony' key presence in dict returned by get_context_date
test_view = views.CustomSingleObjectView()
test_view.context_object_name = 'pony'
context = test_view.get_context_data()
self.assertEqual(context['pony'], test_view.object)
def test_object_in_get_context_data(self):
# Checks 'object' key presence in dict returned by get_context_date #20234
test_view = views.CustomSingleObjectView()
context = test_view.get_context_data()
self.assertEqual(context['object'], test_view.object)
class UseMultipleObjectMixinTest(SimpleTestCase):
rf = RequestFactory()
def test_use_queryset_from_view(self):
test_view = views.CustomMultipleObjectMixinView()
test_view.get(self.rf.get('/'))
# Don't pass queryset as argument
context = test_view.get_context_data()
self.assertEqual(context['object_list'], test_view.queryset)
def test_overwrite_queryset(self):
test_view = views.CustomMultipleObjectMixinView()
test_view.get(self.rf.get('/'))
queryset = [{'name': 'Lennon'}, {'name': 'Ono'}]
self.assertNotEqual(test_view.queryset, queryset)
# Overwrite the view's queryset with queryset from kwarg
context = test_view.get_context_data(object_list=queryset)
self.assertEqual(context['object_list'], queryset)
class SingleObjectTemplateResponseMixinTest(SimpleTestCase):
def test_template_mixin_without_template(self):
"""
We want to makes sure that if you use a template mixin, but forget the
template, it still tells you it's ImproperlyConfigured instead of
TemplateDoesNotExist.
"""
view = views.TemplateResponseWithoutTemplate()
msg = (
"TemplateResponseMixin requires either a definition of "
"'template_name' or an implementation of 'get_template_names()'"
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
view.get_template_names()
| 37.451448
| 114
| 0.651701
|
51208f54a587faf62fdf7b8bd8f30b3536af5401
| 194
|
py
|
Python
|
Serial_IO/raspberry/serialio.py
|
albertoft/arduino-stuff
|
27f624a9732252cff9482f4af9446bd55ec778c2
|
[
"MIT"
] | null | null | null |
Serial_IO/raspberry/serialio.py
|
albertoft/arduino-stuff
|
27f624a9732252cff9482f4af9446bd55ec778c2
|
[
"MIT"
] | null | null | null |
Serial_IO/raspberry/serialio.py
|
albertoft/arduino-stuff
|
27f624a9732252cff9482f4af9446bd55ec778c2
|
[
"MIT"
] | null | null | null |
import serial
ser = serial.Serial('/dev/ttyACM0',9600)
s = [0,1]
while True:
read_serial = ser.readline()
#s[0] = str(int (ser.readline(),16))
#print s[0]
print read_serial
ser.write("A")
| 17.636364
| 40
| 0.659794
|
4090f3a5796916ee7d00b4dc12f728f36159a8ef
| 6,608
|
py
|
Python
|
src/models/networks/fcn8_vgg16.py
|
IssamLaradji/looc
|
50a05b9bf2d36cd8770add8cc65f9bab1ad45841
|
[
"Apache-2.0"
] | 9
|
2020-10-27T15:02:53.000Z
|
2022-01-19T06:33:15.000Z
|
src/models/networks/fcn8_vgg16.py
|
IssamLaradji/looc
|
50a05b9bf2d36cd8770add8cc65f9bab1ad45841
|
[
"Apache-2.0"
] | 1
|
2021-01-12T13:24:07.000Z
|
2021-01-13T16:20:11.000Z
|
src/models/networks/fcn8_vgg16.py
|
IssamLaradji/looc
|
50a05b9bf2d36cd8770add8cc65f9bab1ad45841
|
[
"Apache-2.0"
] | 2
|
2020-10-28T12:43:50.000Z
|
2021-03-09T03:19:32.000Z
|
import torch.nn as nn
import torchvision
import torch
from skimage import morphology as morph
import numpy as np
import torch.utils.model_zoo as model_zoo
#----------- LC-FCN8
class FCN8VGG16(nn.Module):
def __init__(self, n_classes):
super().__init__()
self.n_classes = n_classes
# PREDEFINE LAYERS
self.pool = nn.MaxPool2d(kernel_size=2, stride=2, ceil_mode=True)
self.relu = nn.ReLU(inplace=True)
# VGG16 PART
self.conv1_1 = conv3x3(3, 64, stride=1, padding=100)
self.conv1_2 = conv3x3(64, 64)
self.conv2_1 = conv3x3(64, 128)
self.conv2_2 = conv3x3(128, 128)
self.conv3_1 = conv3x3(128, 256)
self.conv3_2 = conv3x3(256, 256)
self.conv3_3 = conv3x3(256, 256)
self.conv4_1 = conv3x3(256, 512)
self.conv4_2 = conv3x3(512, 512)
self.conv4_3 = conv3x3(512, 512)
self.conv5_1 = conv3x3(512, 512)
self.conv5_2 = conv3x3(512, 512)
self.conv5_3 = conv3x3(512, 512)
self.fc6 = nn.Conv2d(512, 4096, kernel_size=7, stride=1, padding=0)
self.dropout_f6 = nn.Dropout()
self.fc7 = nn.Conv2d(4096, 4096, kernel_size=1, stride=1, padding=0)
self.dropout_f7 = nn.Dropout()
# SEMANTIC SEGMENTAION PART
self.scoring_layer = nn.Conv2d(4096, self.n_classes, kernel_size=1,
stride=1, padding=0)
self.upscore2 = nn.ConvTranspose2d(self.n_classes, self.n_classes,
kernel_size=4, stride=2, bias=False)
self.upscore_pool4 = nn.ConvTranspose2d(self.n_classes, self.n_classes,
kernel_size=4, stride=2, bias=False)
self.upscore8 = nn.ConvTranspose2d(self.n_classes, self.n_classes,
kernel_size=16, stride=8, bias=False)
# Initilize Weights
self.scoring_layer.weight.data.zero_()
self.scoring_layer.bias.data.zero_()
self.score_pool3 = nn.Conv2d(256, self.n_classes, kernel_size=1)
self.score_pool4 = nn.Conv2d(512, self.n_classes, kernel_size=1)
self.score_pool3.weight.data.zero_()
self.score_pool3.bias.data.zero_()
self.score_pool4.weight.data.zero_()
self.score_pool4.bias.data.zero_()
self.upscore2.weight.data.copy_(get_upsampling_weight(self.n_classes, self.n_classes, 4))
self.upscore_pool4.weight.data.copy_(get_upsampling_weight(self.n_classes, self.n_classes, 4))
self.upscore8.weight.data.copy_(get_upsampling_weight(self.n_classes, self.n_classes, 16))
# Pretrained layers
pth_url = 'https://download.pytorch.org/models/vgg16-397923af.pth' # download from model zoo
state_dict = model_zoo.load_url(pth_url)
layer_names = [layer_name for layer_name in state_dict]
counter = 0
for p in self.parameters():
if counter < 26: # conv1_1 to pool5
p.data = state_dict[ layer_names[counter] ]
elif counter == 26: # fc6 weight
p.data = state_dict[ layer_names[counter] ].view(4096, 512, 7, 7)
elif counter == 27: # fc6 bias
p.data = state_dict[ layer_names[counter] ]
elif counter == 28: # fc7 weight
p.data = state_dict[ layer_names[counter] ].view(4096, 4096, 1, 1)
elif counter == 29: # fc7 bias
p.data = state_dict[ layer_names[counter] ]
counter += 1
def forward(self, x):
n,c,h,w = x.size()
# VGG16 PART
conv1_1 = self.relu( self.conv1_1(x) )
conv1_2 = self.relu( self.conv1_2(conv1_1) )
pool1 = self.pool(conv1_2)
conv2_1 = self.relu( self.conv2_1(pool1) )
conv2_2 = self.relu( self.conv2_2(conv2_1) )
pool2 = self.pool(conv2_2)
# pool2 = self.eprop(pool2)
conv3_1 = self.relu( self.conv3_1(pool2) )
conv3_2 = self.relu( self.conv3_2(conv3_1) )
conv3_3 = self.relu( self.conv3_3(conv3_2) )
pool3 = self.pool(conv3_3)
conv4_1 = self.relu( self.conv4_1(pool3) )
conv4_2 = self.relu( self.conv4_2(conv4_1) )
conv4_3 = self.relu( self.conv4_3(conv4_2) )
pool4 = self.pool(conv4_3)
conv5_1 = self.relu( self.conv5_1(pool4) )
conv5_2 = self.relu( self.conv5_2(conv5_1) )
conv5_3 = self.relu( self.conv5_3(conv5_2) )
pool5 = self.pool(conv5_3)
fc6 = self.dropout_f6( self.relu( self.fc6(pool5) ) )
fc7 = self.dropout_f7( self.relu( self.fc7(fc6) ) )
# SEMANTIC SEGMENTATION PART
# first
scores = self.scoring_layer( fc7 )
upscore2 = self.upscore2(scores)
# second
score_pool4 = self.score_pool4(pool4)
score_pool4c = score_pool4[:, :, 5:5+upscore2.size(2),
5:5+upscore2.size(3)]
upscore_pool4 = self.upscore_pool4(score_pool4c + upscore2)
# third
score_pool3 = self.score_pool3(pool3)
score_pool3c = score_pool3[:, :, 9:9+upscore_pool4.size(2),
9:9+upscore_pool4.size(3)]
output = self.upscore8(score_pool3c + upscore_pool4)
return output[:, :, 31: (31 + h), 31: (31 + w)].contiguous()
# ===========================================================
# helpers
def get_upsampling_weight(in_channels, out_channels, kernel_size):
"""Make a 2D bilinear kernel suitable for upsampling"""
factor = (kernel_size + 1) // 2
if kernel_size % 2 == 1:
center = factor - 1
else:
center = factor - 0.5
og = np.ogrid[:kernel_size, :kernel_size]
filt = (1 - abs(og[0] - center) / factor) * \
(1 - abs(og[1] - center) / factor)
weight = np.zeros((in_channels, out_channels, kernel_size, kernel_size),
dtype=np.float64)
weight[range(in_channels), range(out_channels), :, :] = filt
return torch.from_numpy(weight).float()
def conv3x3(in_planes, out_planes, stride=1, padding=1):
"3x3 convolution with padding"
return nn.Conv2d(in_planes, out_planes, kernel_size=(3,3), stride=(stride,stride),
padding=(padding,padding))
def conv1x1(in_planes, out_planes, stride=1):
"1x1 convolution with padding"
return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride,
padding=0)
| 40.292683
| 102
| 0.585654
|
2bd0da440b084e4a512d930bf20abe42e48fcd89
| 1,765
|
py
|
Python
|
seer/seer_pb2_grpc.py
|
cshenton/seer-python
|
72ff88edf4148c2b2a13deb8e1ad984647124874
|
[
"Apache-2.0"
] | 2
|
2019-05-22T21:36:01.000Z
|
2020-01-16T12:23:45.000Z
|
seer/seer_pb2_grpc.py
|
cshenton/seer-python
|
72ff88edf4148c2b2a13deb8e1ad984647124874
|
[
"Apache-2.0"
] | null | null | null |
seer/seer_pb2_grpc.py
|
cshenton/seer-python
|
72ff88edf4148c2b2a13deb8e1ad984647124874
|
[
"Apache-2.0"
] | 1
|
2020-01-14T23:53:19.000Z
|
2020-01-14T23:53:19.000Z
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
from __future__ import absolute_import
import grpc
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from . import seer_pb2 as seer__pb2
class SeerStub(object):
"""The seer service
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.CreateStream = channel.unary_unary(
'/seer.Seer/CreateStream',
request_serializer=seer__pb2.CreateStreamRequest.SerializeToString,
response_deserializer=seer__pb2.Stream.FromString,
)
self.GetStream = channel.unary_unary(
'/seer.Seer/GetStream',
request_serializer=seer__pb2.GetStreamRequest.SerializeToString,
response_deserializer=seer__pb2.Stream.FromString,
)
self.UpdateStream = channel.unary_unary(
'/seer.Seer/UpdateStream',
request_serializer=seer__pb2.UpdateStreamRequest.SerializeToString,
response_deserializer=seer__pb2.Stream.FromString,
)
self.DeleteStream = channel.unary_unary(
'/seer.Seer/DeleteStream',
request_serializer=seer__pb2.DeleteStreamRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.ListStreams = channel.unary_unary(
'/seer.Seer/ListStreams',
request_serializer=seer__pb2.ListStreamsRequest.SerializeToString,
response_deserializer=seer__pb2.ListStreamsResponse.FromString,
)
self.GetForecast = channel.unary_unary(
'/seer.Seer/GetForecast',
request_serializer=seer__pb2.GetForecastRequest.SerializeToString,
response_deserializer=seer__pb2.Forecast.FromString,
)
| 35.3
| 82
| 0.730312
|
adacba3546ba31af3ce7d497979651ed982e587e
| 80
|
py
|
Python
|
app/env/lib/python3.7/sre_constants.py
|
siyaochen/Tier1Health
|
536591a7534bbb3fb27fe889bfed9de152ec1864
|
[
"MIT"
] | 4
|
2020-02-05T11:26:47.000Z
|
2021-05-26T07:48:46.000Z
|
app/env/lib/python3.7/sre_constants.py
|
siyaochen/Tier1Health
|
536591a7534bbb3fb27fe889bfed9de152ec1864
|
[
"MIT"
] | 10
|
2019-11-25T16:54:39.000Z
|
2022-02-10T08:29:51.000Z
|
frontend/env/lib/python3.7/sre_constants.py
|
US579/Seddit
|
116a676efd0fa31c8cc6fe4c723b739203d9428b
|
[
"MIT"
] | 7
|
2019-05-30T05:48:39.000Z
|
2019-06-27T12:26:54.000Z
|
/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/sre_constants.py
| 80
| 80
| 0.85
|
70834e0f3945437e22c7705674e11606f7687d89
| 1,225
|
py
|
Python
|
modules/fdlibm/doc/__ieee754_hypot.py
|
brycelelbach/nt2
|
73d7e8dd390fa4c8d251c6451acdae65def70e0b
|
[
"BSL-1.0"
] | 1
|
2022-03-24T03:35:10.000Z
|
2022-03-24T03:35:10.000Z
|
modules/fdlibm/doc/__ieee754_hypot.py
|
brycelelbach/nt2
|
73d7e8dd390fa4c8d251c6451acdae65def70e0b
|
[
"BSL-1.0"
] | null | null | null |
modules/fdlibm/doc/__ieee754_hypot.py
|
brycelelbach/nt2
|
73d7e8dd390fa4c8d251c6451acdae65def70e0b
|
[
"BSL-1.0"
] | null | null | null |
[ ## this file was manually modified by jt
{
'functor' : {
'arity' : '2',
'call_types' : [],
'ret_arity' : '0',
'rturn' : {
'default' : 'T',
},
'simd_types' : [],
'special' : ['fdlibm'],
'type_defs' : [],
'types' : ['real_'],
},
'info' : 'manually modified',
'unit' : {
'global_header' : {
'first_stamp' : 'created by jt the 03/03/2011',
'included' : ['#include <nt2/include/functions/hypot.hpp>'],
'notes' : [],
'stamp' : 'modified by jt the 03/03/2011',
},
'ranges' : {
'default' : [['T(-10)', 'T(10)'], ['T(-10)', 'T(10)']],
},
'specific_values' : {
},
'verif_test' : {
'property_call' : {
'default' : ['nt2::fdlibm::__ieee754_hypot(a0,a1)'],
},
'property_value' : {
'default' : ['nt2::hypot(a0,a1)'],
},
'simd' : {
},
'ulp_thresh' : {
'default' : ['1'],
},
},
},
},
]
| 27.840909
| 73
| 0.340408
|
d8f861eac13df0680c099c3e3d0d32b19cd531ad
| 9,071
|
py
|
Python
|
nnmethods/falconn/hyperplaneLSH/D1.py
|
gpapadis/ContinuousFilteringBenchmark
|
7121b81f3d0e9d62ef61abc30ca6866f8a56fe64
|
[
"Apache-2.0"
] | null | null | null |
nnmethods/falconn/hyperplaneLSH/D1.py
|
gpapadis/ContinuousFilteringBenchmark
|
7121b81f3d0e9d62ef61abc30ca6866f8a56fe64
|
[
"Apache-2.0"
] | null | null | null |
nnmethods/falconn/hyperplaneLSH/D1.py
|
gpapadis/ContinuousFilteringBenchmark
|
7121b81f3d0e9d62ef61abc30ca6866f8a56fe64
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import print_function
import numpy as np
import falconn
import timeit
import math
import psutil
if __name__ == '__main__':
datasetN = 1
column = 'Embedded Name'
datapart = 'A'
querypart = 'B'
main_dir = "/home/gap2/Documents/blockingNN/data/csvProfiles/falconn/"
for column in ['Embedded Name', 'Embedded Ag.Value']:
for iteration in range(0, 10):
print('Reading the dataset')
dataset = np.load(main_dir + 'D{}'.format(datasetN)+datapart+'-'+column+'.npy')
queries = np.load(main_dir + 'D{}'.format(datasetN)+querypart+'-'+column+'.npy')
IDdata = list(np.load(main_dir + 'D{}'.format(datasetN)+datapart+'ID-'+column+'.npy'))
IDqueries = list(np.load(main_dir + 'D{}'.format(datasetN)+querypart+'ID-'+column+'.npy'))
gt = np.load(main_dir + 'D{}'.format(datasetN)+'GT-'+column+'.npy')
print('Done')
# It's important not to use doubles, unless they are strictly necessary.
# If your dataset consists of doubles, convert it to floats using `astype`.
assert dataset.dtype == np.float32
# Normalize all the lenghts, since we care about the cosine similarity.
print('Normalizing the dataset')
dataset /= np.linalg.norm(dataset, axis=1).reshape(-1, 1)
queries /= np.linalg.norm(queries, axis=1).reshape(-1, 1)
print('Done')
# Perform linear scan using NumPy to get answers to the queries.
#print('Solving queries using linear scan')
#t1 = timeit.default_timer()
#answers = []
#for query in queries:
# answers.append(np.dot(dataset, query).argmax())
#t2 = timeit.default_timer()
#print('Done')
#print('Linear scan time: {} per query'.format((t2 - t1) / float(
# len(queries))))
# Center the dataset and the queries: this improves the performance of LSH quite a bit.
print('Centering the dataset and queries')
center = np.mean(dataset, axis=0)
dataset -= center
queries -= center
print('Done')
#params_cp = falconn.get_default_parameters(len(dataset), len(dataset[0]), falconn.DistanceFunction.EuclideanSquared, True)
#print('lsh family: ', params_cp.lsh_family)
#print('number of tables: ', params_cp.l)
#print('number of rotations: ', params_cp.num_rotations)
#print('number of hash functions: ', params_cp.k)
# we build only 50 tables, increasing this quantity will improve the query time
# at a cost of slower preprocessing and larger memory footprint, feel free to
# play with this number
outstring= main_dir + "Output/D{}_HP".format(datasetN)+column+datapart+querypart+".txt"
out = open(outstring, 'a')
out.write("LSH \t dist Funct \t num tables \t num hash funct \t num probes \t index time \t query time\t true matches \t all candidates \t all matches \t Index \t Query \t Indexmemory \t Searchmemory \t storage\n")
out.close()
for L in range(1,2,1):
for k in range(1,2):
if column == "Embedded Name":
number_of_tables = 13
number_of_functions = 14
number_of_probes = 33
storage = falconn.StorageHashTable.LinearProbingHashTable
if column == "Embedded Ag.Value":
number_of_tables = 13
number_of_functions = 15
number_of_probes = 297
storage = falconn.StorageHashTable.BitPackedFlatHashTable
t1 = timeit.default_timer()
#params_cp = falconn.get_default_parameters(len(dataset), len(dataset[0]), falconn.DistanceFunction.EuclideanSquared,False)
params_cp = falconn.LSHConstructionParameters()
params_cp.dimension = len(dataset[0])
params_cp.lsh_family = falconn.LSHFamily.Hyperplane
params_cp.distance_function = falconn.DistanceFunction.NegativeInnerProduct #EuclideanSquared #
params_cp.l = number_of_tables
# we set one rotation, since the data is dense enough,
# for sparse data set it to 2
params_cp.num_rotations = 1
params_cp.seed = 5721840
# we want to use all the available threads to set up
params_cp.num_setup_threads = 0
params_cp.storage_hash_table = storage
# we build 18-bit hashes so that each table has
# 2^18 bins; this is a good choise since 2^18 is of the same
# order of magnitude as the number of data points
#falconn.compute_number_of_hash_functions(num_hash_bits, params_cp)
params_cp.k = number_of_functions
print('Constructing the LSH table')
m1= psutil.Process().memory_info().rss / (1024 * 1024)
table = falconn.LSHIndex(params_cp)
table.setup(dataset)
m2 = psutil.Process().memory_info().rss / (1024 * 1024)
t2 = timeit.default_timer()
print('Done')
constructtime = t2-t1
indexm = m2-m1
print('Construction time: {}'.format(constructtime))
t1 = timeit.default_timer()
query_object = table.construct_query_object()
# find the smallest number of probes to achieve accuracy 0.9
# using the binary search
print('Choosing number of probes')
#number_of_probes = params_cp.l
def evaluate_number_of_probes(number_of_probes):
query_object.set_num_probes(number_of_probes)
score = 0
for i,j in gt:
if IDdata.index(i) in query_object.get_unique_candidates(queries[IDqueries.index(j)]):
score +=1
return float(score) / len(gt), score
#while True:
# accuracy, score = evaluate_number_of_probes(number_of_probes)
# print('{} -> {}'.format(number_of_probes, accuracy))
# if accuracy >= 0.9:
# break
# number_of_probes = number_of_probes * 2
#if number_of_probes > params_cp.l:
# left = number_of_probes // 2
# right = number_of_probes
# while right - left > 1:
# number_of_probes = (left + right) // 2
# accuracy, score = evaluate_number_of_probes(number_of_probes)
# print('{} -> {}'.format(number_of_probes, accuracy))
# if accuracy >= 0.9:
# right = number_of_probes
# else:
# left = number_of_probes
# number_of_probes = right
#print('Done')
#print('{} probes'.format(number_of_probes))
# final evaluation
query_object.set_num_probes(number_of_probes)
m1 = psutil.Process().memory_info().rss / (1024 * 1024)
finscore = 0
for i,j in gt:
if IDdata.index(i) in query_object.get_unique_candidates(queries[IDqueries.index(j)]):
finscore +=1
m2= psutil.Process().memory_info().rss / (1024 * 1024)
t2 = timeit.default_timer()
querym = m2-m1
querytime = t2-t1
print('Query time: {}'.format((querytime) / len(queries)))
print('Precision: {}'.format(float(finscore) / len(gt)))
#full number of candidates:
fullCandidates=0
for query in queries:
fullCandidates += len(query_object.get_unique_candidates(query))
#out.write("{} \t {}".format(num_hash_bits, number_of_tables))
out = open(outstring, 'a')
out.write("{} \t {} \t {} \t {} \t {} \t {} \t {} \t {} \t {} \t {} \t {} \t {} \t {} \t {} \t {} \n".format(params_cp.lsh_family, params_cp.distance_function, params_cp.l, params_cp.k, number_of_probes, constructtime, querytime, finscore, fullCandidates, len(gt), len(dataset), len(queries), indexm, querym, params_cp.storage_hash_table))
out.close()
| 51.248588
| 357
| 0.533017
|
bdfb34f523fef549cf42734a04511cae3fce7d83
| 51,884
|
py
|
Python
|
apps/odoo/lib/odoo-10.0.post20170615-py2.7.egg/odoo/addons/crm/models/crm_lead.py
|
gtfarng/Odoo_migrade
|
9cc28fae4c379e407645248a29d22139925eafe7
|
[
"Apache-2.0"
] | 1
|
2019-12-19T01:53:13.000Z
|
2019-12-19T01:53:13.000Z
|
apps/odoo/lib/odoo-10.0.post20170615-py2.7.egg/odoo/addons/crm/models/crm_lead.py
|
gtfarng/Odoo_migrade
|
9cc28fae4c379e407645248a29d22139925eafe7
|
[
"Apache-2.0"
] | null | null | null |
apps/odoo/lib/odoo-10.0.post20170615-py2.7.egg/odoo/addons/crm/models/crm_lead.py
|
gtfarng/Odoo_migrade
|
9cc28fae4c379e407645248a29d22139925eafe7
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from datetime import datetime, timedelta, date
from dateutil.relativedelta import relativedelta
from odoo import api, fields, models, tools, SUPERUSER_ID
from odoo.tools.translate import _
from odoo.tools import email_re, email_split
from odoo.exceptions import UserError, AccessError
from odoo.addons.base.res.res_partner import FormatAddress
from . import crm_stage
_logger = logging.getLogger(__name__)
CRM_LEAD_FIELDS_TO_MERGE = [
'name',
'partner_id',
'campaign_id',
'company_id',
'country_id',
'team_id',
'state_id',
'stage_id',
'medium_id',
'source_id',
'user_id',
'title',
'city',
'contact_name',
'description',
'fax',
'mobile',
'partner_name',
'phone',
'probability',
'planned_revenue',
'street',
'street2',
'zip',
'create_date',
'date_action_last',
'date_action_next',
'email_from',
'email_cc',
'partner_name']
class Lead(FormatAddress, models.Model):
_name = "crm.lead"
_description = "Lead/Opportunity"
_order = "priority desc,date_action,id desc"
_inherit = ['mail.thread', 'ir.needaction_mixin', 'utm.mixin']
_mail_mass_mailing = _('Leads / Opportunities')
def _default_probability(self):
stage_id = self._default_stage_id()
if stage_id:
return self.env['crm.stage'].browse(stage_id).probability
return 10
def _default_stage_id(self):
team = self.env['crm.team'].sudo()._get_default_team_id(user_id=self.env.uid)
return self._stage_find(team_id=team.id, domain=[('fold', '=', False)]).id
name = fields.Char('Opportunity', required=True, index=True)
partner_id = fields.Many2one('res.partner', string='Customer', track_visibility='onchange', index=True,
help="Linked partner (optional). Usually created when converting the lead.")
active = fields.Boolean('Active', default=True)
date_action_last = fields.Datetime('Last Action', readonly=True)
date_action_next = fields.Datetime('Next Action', readonly=True)
email_from = fields.Char('Email', help="Email address of the contact", index=True)
team_id = fields.Many2one('crm.team', string='Sales Team', oldname='section_id', default=lambda self: self.env['crm.team'].sudo()._get_default_team_id(user_id=self.env.uid),
index=True, track_visibility='onchange', help='When sending mails, the default email address is taken from the sales team.')
kanban_state = fields.Selection([('grey', 'No next activity planned'), ('red', 'Next activity late'), ('green', 'Next activity is planned')],
string='Activity State', compute='_compute_kanban_state')
email_cc = fields.Text('Global CC', help="These email addresses will be added to the CC field of all inbound and outbound emails for this record before being sent. Separate multiple email addresses with a comma")
description = fields.Text('Notes')
create_date = fields.Datetime('Create Date', readonly=True)
write_date = fields.Datetime('Update Date', readonly=True)
tag_ids = fields.Many2many('crm.lead.tag', 'crm_lead_tag_rel', 'lead_id', 'tag_id', string='Tags', help="Classify and analyze your lead/opportunity categories like: Training, Service")
contact_name = fields.Char('Contact Name')
partner_name = fields.Char("Customer Name", index=True, help='The name of the future partner company that will be created while converting the lead into opportunity')
opt_out = fields.Boolean(string='Opt-Out', oldname='optout',
help="If opt-out is checked, this contact has refused to receive emails for mass mailing and marketing campaign. "
"Filter 'Available for Mass Mailing' allows users to filter the leads when performing mass mailing.")
type = fields.Selection([('lead', 'Lead'), ('opportunity', 'Opportunity')], index=True, required=True,
default=lambda self: 'lead' if self.env['res.users'].has_group('crm.group_use_lead') else 'opportunity',
help="Type is used to separate Leads and Opportunities")
priority = fields.Selection(crm_stage.AVAILABLE_PRIORITIES, string='Rating', index=True, default=crm_stage.AVAILABLE_PRIORITIES[0][0])
date_closed = fields.Datetime('Closed Date', readonly=True, copy=False)
stage_id = fields.Many2one('crm.stage', string='Stage', track_visibility='onchange', index=True,
domain="['|', ('team_id', '=', False), ('team_id', '=', team_id)]",
group_expand='_read_group_stage_ids', default=lambda self: self._default_stage_id())
user_id = fields.Many2one('res.users', string='Salesperson', index=True, track_visibility='onchange', default=lambda self: self.env.user)
referred = fields.Char('Referred By')
date_open = fields.Datetime('Assigned', readonly=True, default=lambda self: fields.Datetime.now())
day_open = fields.Float(compute='_compute_day_open', string='Days to Assign', store=True)
day_close = fields.Float(compute='_compute_day_close', string='Days to Close', store=True)
date_last_stage_update = fields.Datetime(string='Last Stage Update', index=True, default=fields.Datetime.now)
date_conversion = fields.Datetime('Conversion Date', readonly=True)
# Messaging and marketing
message_bounce = fields.Integer('Bounce', help="Counter of the number of bounced emails for this contact")
# Only used for type opportunity
probability = fields.Float('Probability', group_operator="avg", default=lambda self: self._default_probability())
planned_revenue = fields.Float('Expected Revenue', track_visibility='always')
date_deadline = fields.Date('Expected Closing', help="Estimate of the date on which the opportunity will be won.")
# CRM Actions
next_activity_id = fields.Many2one("crm.activity", string="Next Activity", index=True)
date_action = fields.Date('Next Activity Date', index=True)
title_action = fields.Char('Next Activity Summary')
color = fields.Integer('Color Index', default=0)
partner_address_name = fields.Char('Partner Contact Name', related='partner_id.name', readonly=True)
partner_address_email = fields.Char('Partner Contact Email', related='partner_id.email', readonly=True)
company_currency = fields.Many2one(string='Currency', related='company_id.currency_id', readonly=True, relation="res.currency")
user_email = fields.Char('User Email', related='user_id.email', readonly=True)
user_login = fields.Char('User Login', related='user_id.login', readonly=True)
# Fields for address, due to separation from crm and res.partner
street = fields.Char('Street')
street2 = fields.Char('Street2')
zip = fields.Char('Zip', change_default=True)
city = fields.Char('City')
state_id = fields.Many2one("res.country.state", string='State')
country_id = fields.Many2one('res.country', string='Country')
phone = fields.Char('Phone')
fax = fields.Char('Fax')
mobile = fields.Char('Mobile')
function = fields.Char('Job Position')
title = fields.Many2one('res.partner.title')
company_id = fields.Many2one('res.company', string='Company', index=True, default=lambda self: self.env.user.company_id.id)
meeting_count = fields.Integer('# Meetings', compute='_compute_meeting_count')
lost_reason = fields.Many2one('crm.lost.reason', string='Lost Reason', index=True, track_visibility='onchange')
_sql_constraints = [
('check_probability', 'check(probability >= 0 and probability <= 100)', 'The probability of closing the deal should be between 0% and 100%!')
]
@api.model
def _read_group_stage_ids(self, stages, domain, order):
# retrieve team_id from the context and write the domain
# - ('id', 'in', stages.ids): add columns that should be present
# - OR ('fold', '=', False): add default columns that are not folded
# - OR ('team_ids', '=', team_id), ('fold', '=', False) if team_id: add team columns that are not folded
team_id = self._context.get('default_team_id')
if team_id:
search_domain = ['|', ('id', 'in', stages.ids), '|', ('team_id', '=', False), ('team_id', '=', team_id)]
else:
search_domain = ['|', ('id', 'in', stages.ids), ('team_id', '=', False)]
# perform search
stage_ids = stages._search(search_domain, order=order, access_rights_uid=SUPERUSER_ID)
return stages.browse(stage_ids)
@api.multi
def _compute_kanban_state(self):
today = date.today()
for lead in self:
kanban_state = 'grey'
if lead.date_action:
lead_date = fields.Date.from_string(lead.date_action)
if lead_date >= today:
kanban_state = 'green'
else:
kanban_state = 'red'
lead.kanban_state = kanban_state
@api.depends('date_open')
def _compute_day_open(self):
""" Compute difference between create date and open date """
for lead in self.filtered(lambda l: l.date_open):
date_create = fields.Datetime.from_string(lead.create_date)
date_open = fields.Datetime.from_string(lead.date_open)
lead.day_open = abs((date_open - date_create).days)
@api.depends('date_closed')
def _compute_day_close(self):
""" Compute difference between current date and log date """
for lead in self.filtered(lambda l: l.date_closed):
date_create = fields.Datetime.from_string(lead.create_date)
date_close = fields.Datetime.from_string(lead.date_closed)
lead.day_close = abs((date_close - date_create).days)
@api.multi
def _compute_meeting_count(self):
meeting_data = self.env['calendar.event'].read_group([('opportunity_id', 'in', self.ids)], ['opportunity_id'], ['opportunity_id'])
mapped_data = {m['opportunity_id'][0]: m['opportunity_id_count'] for m in meeting_data}
for lead in self:
lead.meeting_count = mapped_data.get(lead.id, 0)
@api.model
def _onchange_stage_id_values(self, stage_id):
""" returns the new values when stage_id has changed """
if not stage_id:
return {}
stage = self.env['crm.stage'].browse(stage_id)
if stage.on_change:
return {'probability': stage.probability}
return {}
@api.onchange('stage_id')
def _onchange_stage_id(self):
values = self._onchange_stage_id_values(self.stage_id.id)
self.update(values)
def _onchange_partner_id_values(self, partner_id):
""" returns the new values when partner_id has changed """
if partner_id:
partner = self.env['res.partner'].browse(partner_id)
partner_name = partner.parent_id.name
if not partner_name and partner.is_company:
partner_name = partner.name
return {
'partner_name': partner_name,
'contact_name': partner.name if not partner.is_company else False,
'title': partner.title.id,
'street': partner.street,
'street2': partner.street2,
'city': partner.city,
'state_id': partner.state_id.id,
'country_id': partner.country_id.id,
'email_from': partner.email,
'phone': partner.phone,
'mobile': partner.mobile,
'fax': partner.fax,
'zip': partner.zip,
'function': partner.function,
}
return {}
@api.onchange('partner_id')
def _onchange_partner_id(self):
values = self._onchange_partner_id_values(self.partner_id.id if self.partner_id else False)
self.update(values)
@api.model
def _onchange_user_values(self, user_id):
""" returns new values when user_id has changed """
if user_id and self._context.get('team_id'):
team = self.env['crm.team'].browse(self._context['team_id'])
if user_id in team.member_ids.ids:
return {}
team_id = self.env['crm.team']._get_default_team_id(user_id=user_id)
return {'team_id': team_id}
@api.onchange('user_id')
def _onchange_user_id(self):
""" When changing the user, also set a team_id or restrict team id to the ones user_id is member of. """
values = self._onchange_user_values(self.user_id.id)
self.update(values)
@api.onchange('state_id')
def _onchange_state(self):
if self.state_id:
self.country_id = self.state_id.country_id.id
@api.onchange('next_activity_id')
def _onchange_next_activity_id(self):
values = {
'title_action': False,
'date_action': False,
}
if self.next_activity_id:
values['title_action'] = self.next_activity_id.description
if self.next_activity_id.days:
values['date_action'] = fields.Datetime.to_string(datetime.now() + timedelta(days=self.next_activity_id.days))
self.update(values)
# ----------------------------------------
# ORM override (CRUD, fields_view_get, ...)
# ----------------------------------------
@api.model
def create(self, vals):
# set up context used to find the lead's sales team which is needed
# to correctly set the default stage_id
context = dict(self._context or {})
if vals.get('type') and not self._context.get('default_type'):
context['default_type'] = vals.get('type')
if vals.get('team_id') and not self._context.get('default_team_id'):
context['default_team_id'] = vals.get('team_id')
if vals.get('user_id') and 'date_open' not in vals:
vals['date_open'] = fields.Datetime.now()
# context: no_log, because subtype already handle this
return super(Lead, self.with_context(context, mail_create_nolog=True)).create(vals)
@api.multi
def write(self, vals):
# stage change: update date_last_stage_update
if 'stage_id' in vals:
vals['date_last_stage_update'] = fields.Datetime.now()
if vals.get('user_id') and 'date_open' not in vals:
vals['date_open'] = fields.Datetime.now()
# stage change with new stage: update probability and date_closed
if vals.get('stage_id') and 'probability' not in vals:
vals.update(self._onchange_stage_id_values(vals.get('stage_id')))
if vals.get('probability') >= 100 or not vals.get('active', True):
vals['date_closed'] = fields.Datetime.now()
elif 'probability' in vals and vals['probability'] < 100:
vals['date_closed'] = False
return super(Lead, self).write(vals)
@api.multi
def copy(self, default=None):
self.ensure_one()
# set default value in context, if not already set (Put stage to 'new' stage)
context = dict(self._context)
context.setdefault('default_type', self.type)
context.setdefault('default_team_id', self.team_id.id)
# Set date_open to today if it is an opp
default = default or {}
default['date_open'] = fields.Datetime.now() if self.type == 'opportunity' else False
return super(Lead, self.with_context(context)).copy(default=default)
@api.model
def fields_view_get(self, view_id=None, view_type='form', toolbar=False, submenu=False):
if self._context.get('opportunity_id'):
opportunity = self.browse(self._context['opportunity_id'])
action = opportunity.get_formview_action()
if action.get('views') and any(view_id for view_id in action['views'] if view_id[1] == view_type):
view_id = next(view_id[0] for view_id in action['views'] if view_id[1] == view_type)
res = super(Lead, self).fields_view_get(view_id=view_id, view_type=view_type, toolbar=toolbar, submenu=submenu)
if view_type == 'form':
res['arch'] = self.fields_view_get_address(res['arch'])
return res
# ----------------------------------------
# Actions Methods
# ----------------------------------------
@api.multi
def action_set_lost(self):
""" Lost semantic: probability = 0, active = False """
return self.write({'probability': 0, 'active': False})
@api.multi
def action_set_active(self):
return self.write({'active': True})
@api.multi
def action_set_unactive(self):
return self.write({'active': False})
@api.multi
def action_set_won(self):
""" Won semantic: probability = 100 (active untouched) """
for lead in self:
stage_id = lead._stage_find(domain=[('probability', '=', 100.0), ('on_change', '=', True)])
lead.write({'stage_id': stage_id.id, 'probability': 100})
return True
@api.multi
def action_schedule_meeting(self):
""" Open meeting's calendar view to schedule meeting on current opportunity.
:return dict: dictionary value for created Meeting view
"""
self.ensure_one()
action = self.env.ref('calendar.action_calendar_event').read()[0]
partner_ids = self.env.user.partner_id.ids
if self.partner_id:
partner_ids.append(self.partner_id.id)
action['context'] = {
'search_default_opportunity_id': self.id if self.type == 'opportunity' else False,
'default_opportunity_id': self.id if self.type == 'opportunity' else False,
'default_partner_id': self.partner_id.id,
'default_partner_ids': partner_ids,
'default_team_id': self.team_id.id,
'default_name': self.name,
}
return action
@api.multi
def close_dialog(self):
return {'type': 'ir.actions.act_window_close'}
# ----------------------------------------
# Business Methods
# ----------------------------------------
def _stage_find(self, team_id=False, domain=None, order='sequence'):
""" Determine the stage of the current lead with its teams, the given domain and the given team_id
:param team_id
:param domain : base search domain for stage
:returns crm.stage recordset
"""
# collect all team_ids by adding given one, and the ones related to the current leads
team_ids = set()
if team_id:
team_ids.add(team_id)
for lead in self:
if lead.team_id:
team_ids.add(lead.team_id.id)
# generate the domain
if team_ids:
search_domain = ['|', ('team_id', '=', False), ('team_id', 'in', list(team_ids))]
else:
search_domain = [('team_id', '=', False)]
# AND with the domain in parameter
if domain:
search_domain += list(domain)
# perform search, return the first found
return self.env['crm.stage'].search(search_domain, order=order, limit=1)
@api.multi
def _merge_get_result_type(self):
""" Define the type of the result of the merge. If at least one of the
element to merge is an opp, the resulting new element will be an opp.
Otherwise it will be a lead.
We'll directly use a list of browse records instead of a list of ids
for performances' sake: it will spare a second browse of the
leads/opps.
:param list opps: list of browse records containing the leads/opps to process
:return string type: the type of the final element
"""
if any(record.type == 'opportunity' for record in self):
return 'opportunity'
return 'lead'
@api.multi
def _merge_data(self, fields):
""" Prepare lead/opp data into a dictionary for merging. Different types
of fields are processed in different ways:
- text: all the values are concatenated
- m2m and o2m: those fields aren't processed
- m2o: the first not null value prevails (the other are dropped)
- any other type of field: same as m2o
:param fields: list of fields to process
:return dict data: contains the merged values of the new opportunity
"""
# helpers
def _get_first_not_null(attr, opportunities):
for opp in opportunities:
val = opp[attr]
if val:
return val
return False
def _get_first_not_null_id(attr, opportunities):
res = _get_first_not_null(attr, opportunities)
return res.id if res else False
def _concat_all(attr, opportunities):
return '\n\n'.join(filter(None, (opp[attr] for opp in opportunities)))
# process the fields' values
data = {}
for field_name in fields:
field = self._fields.get(field_name)
if field is None:
continue
if field.type in ('many2many', 'one2many'):
continue
elif field.type == 'many2one':
data[field_name] = _get_first_not_null_id(field_name, self) # take the first not null
elif field.type == 'text':
data[field_name] = _concat_all(field_name, self) # contact field of all opportunities
else:
data[field_name] = _get_first_not_null(field_name, self)
# define the resulting type ('lead' or 'opportunity')
data['type'] = self._merge_get_result_type()
return data
@api.one
def _mail_body(self, fields):
""" generate the message body with the changed values
:param fields : list of fields to track
:returns the body of the message for the current crm.lead
"""
title = "%s : %s\n" % (_('Merged opportunity') if self.type == 'opportunity' else _('Merged lead'), self.name)
body = [title]
fields = self.env['ir.model.fields'].search([('name', 'in', fields or []), ('model_id.model', '=', self._name)])
for field in fields:
value = getattr(self, field.name, False)
if field.ttype == 'selection':
value = dict(field.get_values(self.env)).get(value, value)
elif field.ttype == 'many2one':
if value:
value = value.sudo().name_get()[0][1]
elif field.ttype == 'many2many':
if value:
value = ','.join(
val.name_get()[0][1]
for val in value.sudo()
)
body.append("%s: %s" % (field.field_description, value or ''))
return "<br/>".join(body + ['<br/>'])
@api.multi
def _merge_notify(self, opportunities):
""" Create a message gathering merged leads/opps informations. Using message_post, send a
message explaining which fields has been merged and their new value. `self` is the
resulting merge crm.lead record.
:param opportunities : recordset of merged crm.lead
:returns mail.message posted on resulting crm.lead
"""
# TODO JEM: mail template should be used instead of fix body, subject text
self.ensure_one()
# mail message's subject
result_type = opportunities._merge_get_result_type()
merge_message = _('Merged leads') if result_type == 'lead' else _('Merged opportunities')
subject = merge_message + ": " + ", ".join(opportunities.mapped('name'))
# message bodies
message_bodies = opportunities._mail_body(list(CRM_LEAD_FIELDS_TO_MERGE))
message_body = "\n\n".join(message_bodies)
return self.message_post(body=message_body, subject=subject)
@api.multi
def _merge_opportunity_history(self, opportunities):
""" Move mail.message from the given opportunities to the current one. `self` is the
crm.lead record destination for message of `opportunities`.
:param opportunities : recordset of crm.lead to move the messages
"""
self.ensure_one()
for opportunity in opportunities:
for message in opportunity.message_ids:
message.write({
'res_id': self.id,
'subject': _("From %s : %s") % (opportunity.name, message.subject)
})
return True
@api.multi
def _merge_opportunity_attachments(self, opportunities):
""" Move attachments of given opportunities to the current one `self`, and rename
the attachments having same name than native ones.
:param opportunities : recordset of merged crm.lead
"""
self.ensure_one()
# return attachments of opportunity
def _get_attachments(opportunity_id):
return self.env['ir.attachment'].search([('res_model', '=', self._name), ('res_id', '=', opportunity_id)])
first_attachments = _get_attachments(self.id)
# counter of all attachments to move. Used to make sure the name is different for all attachments
count = 1
for opportunity in opportunities:
attachments = _get_attachments(opportunity.id)
for attachment in attachments:
values = {'res_id': self.id}
for attachment_in_first in first_attachments:
if attachment.name == attachment_in_first.name:
values['name'] = "%s (%s)" % (attachment.name, count)
count += 1
attachment.write(values)
return True
@api.multi
def merge_dependences(self, opportunities):
""" Merge dependences (messages, attachments, ...). These dependences will be
transfered to `self`, the most important lead.
:param opportunities : recordset of opportunities to transfert. Does
not include `self`.
"""
self.ensure_one()
self._merge_notify(opportunities)
self._merge_opportunity_history(opportunities)
self._merge_opportunity_attachments(opportunities)
@api.multi
def merge_opportunity(self, user_id=False, team_id=False):
""" Merge opportunities in one. Different cases of merge:
- merge leads together = 1 new lead
- merge at least 1 opp with anything else (lead or opp) = 1 new opp
The resulting lead/opportunity will be the most important one (based on its confidence level)
updated with values from other opportunities to merge.
:param user_id : the id of the saleperson. If not given, will be determined by `_merge_data`.
:param team : the id of the sales team. If not given, will be determined by `_merge_data`.
:return crm.lead record resulting of th merge
"""
if len(self.ids) <= 1:
raise UserError(_('Please select more than one element (lead or opportunity) from the list view.'))
# Sorting the leads/opps according to the confidence level of its stage, which relates to the probability of winning it
# The confidence level increases with the stage sequence, except when the stage probability is 0.0 (Lost cases)
# An Opportunity always has higher confidence level than a lead, unless its stage probability is 0.0
def opps_key(opportunity):
sequence = -1
if opportunity.stage_id.on_change:
sequence = opportunity.stage_id.sequence
return (sequence != -1 and opportunity.type == 'opportunity'), sequence, -opportunity.id
opportunities = self.sorted(key=opps_key, reverse=True)
# get SORTED recordset of head and tail, and complete list
opportunities_head = opportunities[0]
opportunities_tail = opportunities[1:]
# merge all the sorted opportunity. This means the value of
# the first (head opp) will be a priority.
merged_data = opportunities._merge_data(list(CRM_LEAD_FIELDS_TO_MERGE))
# force value for saleperson and sales team
if user_id:
merged_data['user_id'] = user_id
if team_id:
merged_data['team_id'] = team_id
# merge other data (mail.message, attachments, ...) from tail into head
opportunities_head.merge_dependences(opportunities_tail)
# check if the stage is in the stages of the sales team. If not, assign the stage with the lowest sequence
if merged_data.get('team_id'):
team_stage_ids = self.env['crm.stage'].search(['|', ('team_id', '=', merged_data['team_id']), ('team_id', '=', False)], order='sequence')
if merged_data.get('stage_id') not in team_stage_ids.ids:
merged_data['stage_id'] = team_stage_ids[0].id if team_stage_ids else False
# write merged data into first opportunity
opportunities_head.write(merged_data)
# delete tail opportunities
# we use the SUPERUSER to avoid access rights issues because as the user had the rights to see the records it should be safe to do so
opportunities_tail.sudo().unlink()
return opportunities_head
@api.multi
def get_duplicated_leads(self, partner_id, include_lost=False):
""" Search for opportunities that have the same partner and that arent done or cancelled
:param partner_id : partner to search
"""
self.ensure_one()
email = self.partner_id.email or self.email_from
return self._get_duplicated_leads_by_emails(partner_id, email, include_lost=include_lost)
@api.model
def _get_duplicated_leads_by_emails(self, partner_id, email, include_lost=False):
""" Search for opportunities that have the same partner and that arent done or cancelled """
partner_match_domain = []
for email in set(email_split(email) + [email]):
partner_match_domain.append(('email_from', '=ilike', email))
if partner_id:
partner_match_domain.append(('partner_id', '=', partner_id))
partner_match_domain = ['|'] * (len(partner_match_domain) - 1) + partner_match_domain
if not partner_match_domain:
return []
domain = partner_match_domain
if not include_lost:
domain += ['&', ('active', '=', True), ('probability', '<', 100)]
return self.search(domain)
@api.multi
def _convert_opportunity_data(self, customer, team_id=False):
""" Extract the data from a lead to create the opportunity
:param customer : res.partner record
:param team_id : identifier of the sales team to determine the stage
"""
if not team_id:
team_id = self.team_id.id if self.team_id else False
value = {
'planned_revenue': self.planned_revenue,
'probability': self.probability,
'name': self.name,
'partner_id': customer.id if customer else False,
'type': 'opportunity',
'date_open': fields.Datetime.now(),
'email_from': customer and customer.email or self.email_from,
'phone': customer and customer.phone or self.phone,
'date_conversion': fields.Datetime.now(),
}
if not self.stage_id:
stage = self._stage_find(team_id=team_id)
value['stage_id'] = stage.id
if stage:
value['probability'] = stage.probability
return value
@api.multi
def convert_opportunity(self, partner_id, user_ids=False, team_id=False):
customer = False
if partner_id:
customer = self.env['res.partner'].browse(partner_id)
for lead in self:
if not lead.active or lead.probability == 100:
continue
vals = lead._convert_opportunity_data(customer, team_id)
lead.write(vals)
if user_ids or team_id:
self.allocate_salesman(user_ids, team_id)
return True
@api.multi
def _lead_create_contact(self, name, is_company, parent_id=False):
""" extract data from lead to create a partner
:param name : furtur name of the partner
:param is_company : True if the partner is a company
:param parent_id : id of the parent partner (False if no parent)
:returns res.partner record
"""
email_split = tools.email_split(self.email_from)
values = {
'name': name,
'user_id': self.env.context.get('default_user_id') or self.user_id.id,
'comment': self.description,
'team_id': self.team_id.id,
'parent_id': parent_id,
'phone': self.phone,
'mobile': self.mobile,
'email': email_split[0] if email_split else False,
'fax': self.fax,
'title': self.title.id,
'function': self.function,
'street': self.street,
'street2': self.street2,
'zip': self.zip,
'city': self.city,
'country_id': self.country_id.id,
'state_id': self.state_id.id,
'is_company': is_company,
'type': 'contact'
}
return self.env['res.partner'].create(values)
@api.multi
def _create_lead_partner(self):
""" Create a partner from lead data
:returns res.partner record
"""
contact_name = self.contact_name
if not contact_name:
contact_name = self.env['res.partner']._parse_partner_name(self.email_from)[0] if self.email_from else False
if self.partner_name:
partner_company = self._lead_create_contact(self.partner_name, True)
elif self.partner_id:
partner_company = self.partner_id
else:
partner_company = None
if contact_name:
return self._lead_create_contact(contact_name, False, partner_company.id if partner_company else False)
if partner_company:
return partner_company
return self._lead_create_contact(self.name, False)
@api.multi
def handle_partner_assignation(self, action='create', partner_id=False):
""" Handle partner assignation during a lead conversion.
if action is 'create', create new partner with contact and assign lead to new partner_id.
otherwise assign lead to the specified partner_id
:param list ids: leads/opportunities ids to process
:param string action: what has to be done regarding partners (create it, assign an existing one, or nothing)
:param int partner_id: partner to assign if any
:return dict: dictionary organized as followed: {lead_id: partner_assigned_id}
"""
partner_ids = {}
for lead in self:
if lead.partner_id:
partner_ids[lead.id] = lead.partner_id.id
continue
if action == 'create':
partner = lead._create_lead_partner()
partner_id = partner.id
partner.team_id = lead.team_id
if partner_id:
lead.partner_id = partner_id
partner_ids[lead.id] = partner_id
return partner_ids
@api.multi
def allocate_salesman(self, user_ids=None, team_id=False):
""" Assign salesmen and salesteam to a batch of leads. If there are more
leads than salesmen, these salesmen will be assigned in round-robin.
E.g.: 4 salesmen (S1, S2, S3, S4) for 6 leads (L1, L2, ... L6). They
will be assigned as followed: L1 - S1, L2 - S2, L3 - S3, L4 - S4,
L5 - S1, L6 - S2.
:param list ids: leads/opportunities ids to process
:param list user_ids: salesmen to assign
:param int team_id: salesteam to assign
:return bool
"""
index = 0
for lead in self:
value = {}
if team_id:
value['team_id'] = team_id
if user_ids:
value['user_id'] = user_ids[index]
# Cycle through user_ids
index = (index + 1) % len(user_ids)
if value:
lead.write(value)
return True
@api.multi
def redirect_opportunity_view(self):
self.ensure_one()
# Get opportunity views
form_view = self.env.ref('crm.crm_case_form_view_oppor')
tree_view = self.env.ref('crm.crm_case_tree_view_oppor')
return {
'name': _('Opportunity'),
'view_type': 'form',
'view_mode': 'tree, form',
'res_model': 'crm.lead',
'domain': [('type', '=', 'opportunity')],
'res_id': self.id,
'view_id': False,
'views': [
(form_view.id, 'form'),
(tree_view.id, 'tree'),
(False, 'kanban'),
(False, 'calendar'),
(False, 'graph')
],
'type': 'ir.actions.act_window',
'context': {'default_type': 'opportunity'}
}
@api.multi
def redirect_lead_view(self):
self.ensure_one()
# Get lead views
form_view = self.env.ref('crm.crm_case_form_view_leads')
tree_view = self.env.ref('crm.crm_case_tree_view_leads')
return {
'name': _('Lead'),
'view_type': 'form',
'view_mode': 'tree, form',
'res_model': 'crm.lead',
'domain': [('type', '=', 'lead')],
'res_id': self.id,
'view_id': False,
'views': [
(form_view.id, 'form'),
(tree_view.id, 'tree'),
(False, 'calendar'),
(False, 'graph')
],
'type': 'ir.actions.act_window',
}
@api.model
def get_empty_list_help(self, help):
if help:
alias_record = self.env.ref("crm.mail_alias_lead_info", raise_if_not_found=False)
if alias_record and alias_record.alias_domain and alias_record.alias_name:
email = '%s@%s' % (alias_record.alias_name, alias_record.alias_domain)
email_link = "<a href='mailto:%s'>%s</a>" % (email, email)
dynamic_help = _("""All email incoming to %s will automatically
create new opportunity. Update your business card, phone book, social media,...
Send an email right now and see it here.""") % (email_link,)
return '<p class="oe_view_nocontent_create">%s</p>%s<p>%s</p>' % (_('Click to add a new opportunity'), help, dynamic_help)
return super(Lead, self.with_context(
empty_list_help_model='crm.team',
empty_list_help_id=self._context.get('default_team_id', False),
empty_list_help_document_name=_("opportunities"),
)).get_empty_list_help(help)
@api.multi
def log_meeting(self, meeting_subject, meeting_date, duration):
if not duration:
duration = _('unknown')
else:
duration = str(duration)
meet_date = fields.Datetime.from_string(meeting_date)
meeting_usertime = fields.Datetime.to_string(fields.Datetime.context_timestamp(self, meet_date))
html_time = "<time datetime='%s+00:00'>%s</time>" % (meeting_date, meeting_usertime)
message = _("Meeting scheduled at '%s'<br> Subject: %s <br> Duration: %s hour(s)") % (html_time, meeting_subject, duration)
return self.message_post(body=message)
# ----------------------------------------
# Sales Team Dashboard
# ----------------------------------------
@api.model
def retrieve_sales_dashboard(self):
""" Fetch data to setup Sales Dashboard """
result = {
'meeting': {
'today': 0,
'next_7_days': 0,
},
'activity': {
'today': 0,
'overdue': 0,
'next_7_days': 0,
},
'closing': {
'today': 0,
'overdue': 0,
'next_7_days': 0,
},
'done': {
'this_month': 0,
'last_month': 0,
},
'won': {
'this_month': 0,
'last_month': 0,
},
'nb_opportunities': 0,
}
opportunities = self.search([('type', '=', 'opportunity'), ('user_id', '=', self._uid)])
for opp in opportunities:
# Expected closing
if opp.date_deadline:
date_deadline = fields.Date.from_string(opp.date_deadline)
if date_deadline == date.today():
result['closing']['today'] += 1
if date.today() <= date_deadline <= date.today() + timedelta(days=7):
result['closing']['next_7_days'] += 1
if date_deadline < date.today() and not opp.date_closed:
result['closing']['overdue'] += 1
# Next activities
if opp.next_activity_id and opp.date_action:
date_action = fields.Date.from_string(opp.date_action)
if date_action == date.today():
result['activity']['today'] += 1
if date.today() <= date_action <= date.today() + timedelta(days=7):
result['activity']['next_7_days'] += 1
if date_action < date.today():
result['activity']['overdue'] += 1
# Won in Opportunities
if opp.date_closed:
date_closed = fields.Date.from_string(opp.date_closed)
if date.today().replace(day=1) <= date_closed <= date.today():
if opp.planned_revenue:
result['won']['this_month'] += opp.planned_revenue
elif date.today() + relativedelta(months=-1, day=1) <= date_closed < date.today().replace(day=1):
if opp.planned_revenue:
result['won']['last_month'] += opp.planned_revenue
result['nb_opportunities'] = len(opportunities)
# crm.activity is a very messy model so we need to do that in order to retrieve the actions done.
self._cr.execute("""
SELECT
m.id,
m.subtype_id,
m.date,
l.user_id,
l.type
FROM mail_message M
LEFT JOIN crm_lead L ON (M.res_id = L.id)
INNER JOIN crm_activity A ON (M.subtype_id = A.subtype_id)
WHERE
(M.model = 'crm.lead') AND (L.user_id = %s) AND (L.type = 'opportunity')
""", (self._uid,))
activites_done = self._cr.dictfetchall()
for activity in activites_done:
if activity['date']:
date_act = fields.Date.from_string(activity['date'])
if date.today().replace(day=1) <= date_act <= date.today():
result['done']['this_month'] += 1
elif date.today() + relativedelta(months=-1, day=1) <= date_act < date.today().replace(day=1):
result['done']['last_month'] += 1
# Meetings
min_date = fields.Datetime.now()
max_date = fields.Datetime.to_string(datetime.now() + timedelta(days=8))
meetings_domain = [
('start', '>=', min_date),
('start', '<=', max_date),
('partner_ids', 'in', [self.env.user.partner_id.id])
]
meetings = self.env['calendar.event'].search(meetings_domain)
for meeting in meetings:
if meeting['start']:
start = datetime.strptime(meeting['start'], tools.DEFAULT_SERVER_DATETIME_FORMAT).date()
if start == date.today():
result['meeting']['today'] += 1
if date.today() <= start <= date.today() + timedelta(days=7):
result['meeting']['next_7_days'] += 1
result['done']['target'] = self.env.user.target_sales_done
result['won']['target'] = self.env.user.target_sales_won
result['currency_id'] = self.env.user.company_id.currency_id.id
return result
@api.model
def modify_target_sales_dashboard(self, target_name, target_value):
""" Update the user objectives (`target_sales_done`, target_sales_won`
and `target_sales_invoiced` fields).
:param target_name : part of the fields name to update
:param target_value : value of the field to update
"""
if target_name in ['won', 'done', 'invoiced']:
# bypass rights, since self.env.user is browsed as SUPERUSER_ID
self.env.user.write({'target_sales_' + target_name: target_value})
else:
raise UserError(_('This target does not exist.'))
# ----------------------------------------
# Mail Gateway
# ----------------------------------------
@api.multi
def _track_subtype(self, init_values):
self.ensure_one()
if 'stage_id' in init_values and self.probability == 100 and self.stage_id and self.stage_id.on_change:
return 'crm.mt_lead_won'
elif 'active' in init_values and self.probability == 0 and not self.active:
return 'crm.mt_lead_lost'
elif 'stage_id' in init_values and self.stage_id and self.stage_id.sequence <= 1:
return 'crm.mt_lead_create'
elif 'stage_id' in init_values:
return 'crm.mt_lead_stage'
return super(Lead, self)._track_subtype(init_values)
@api.multi
def _notification_recipients(self, message, groups):
""" Handle salesman recipients that can convert leads into opportunities
and set opportunities as won / lost. """
groups = super(Lead, self)._notification_recipients(message, groups)
self.ensure_one()
if self.type == 'lead':
convert_action = self._notification_link_helper('controller', controller='/lead/convert')
salesman_actions = [{'url': convert_action, 'title': _('Convert to opportunity')}]
else:
won_action = self._notification_link_helper('controller', controller='/lead/case_mark_won')
lost_action = self._notification_link_helper('controller', controller='/lead/case_mark_lost')
salesman_actions = [
{'url': won_action, 'title': _('Won')},
{'url': lost_action, 'title': _('Lost')}]
new_group = (
'group_sale_salesman', lambda partner: bool(partner.user_ids) and any(user.has_group('sales_team.group_sale_salesman') for user in partner.user_ids), {
'actions': salesman_actions,
})
return [new_group] + groups
@api.model
def message_get_reply_to(self, res_ids, default=None):
leads = self.sudo().browse(res_ids)
aliases = self.env['crm.team'].message_get_reply_to(leads.mapped('team_id').ids, default=default)
return {lead.id: aliases.get(lead.team_id.id or 0, False) for lead in leads}
@api.multi
def get_formview_id(self):
if self.type == 'opportunity':
view_id = self.env.ref('crm.crm_case_form_view_oppor').id
else:
view_id = super(Lead, self).get_formview_id()
return view_id
@api.multi
def message_get_suggested_recipients(self):
recipients = super(Lead, self).message_get_suggested_recipients()
try:
for lead in self:
if lead.partner_id:
lead._message_add_suggested_recipient(recipients, partner=lead.partner_id, reason=_('Customer'))
elif lead.email_from:
lead._message_add_suggested_recipient(recipients, email=lead.email_from, reason=_('Customer Email'))
except AccessError: # no read access rights -> just ignore suggested recipients because this imply modifying followers
pass
return recipients
@api.model
def message_new(self, msg_dict, custom_values=None):
""" Overrides mail_thread message_new that is called by the mailgateway
through message_process.
This override updates the document according to the email.
"""
# remove default author when going through the mail gateway. Indeed we
# do not want to explicitly set user_id to False; however we do not
# want the gateway user to be responsible if no other responsible is
# found.
self = self.with_context(default_user_id=False)
if custom_values is None:
custom_values = {}
defaults = {
'name': msg_dict.get('subject') or _("No Subject"),
'email_from': msg_dict.get('from'),
'email_cc': msg_dict.get('cc'),
'partner_id': msg_dict.get('author_id', False),
}
if msg_dict.get('author_id'):
defaults.update(self._onchange_partner_id_values(msg_dict.get('author_id')))
if msg_dict.get('priority') in dict(crm_stage.AVAILABLE_PRIORITIES):
defaults['priority'] = msg_dict.get('priority')
defaults.update(custom_values)
return super(Lead, self).message_new(msg_dict, custom_values=defaults)
@api.multi
def message_update(self, msg_dict, update_vals=None):
""" Overrides mail_thread message_update that is called by the mailgateway
through message_process.
This method updates the document according to the email.
"""
if update_vals is None:
update_vals = {}
if msg_dict.get('priority') in dict(crm_stage.AVAILABLE_PRIORITIES):
update_vals['priority'] = msg_dict.get('priority')
maps = {
'revenue': 'planned_revenue',
'probability': 'probability',
}
for line in msg_dict.get('body', '').split('\n'):
line = line.strip()
res = tools.command_re.match(line)
if res and maps.get(res.group(1).lower()):
key = maps.get(res.group(1).lower())
update_vals[key] = res.group(2).lower()
return super(Lead, self).message_update(msg_dict, update_vals=update_vals)
@api.multi
def message_partner_info_from_emails(self, emails, link_mail=False):
result = super(Lead, self).message_partner_info_from_emails(emails, link_mail=link_mail)
for partner_info in result:
if not partner_info.get('partner_id') and (self.partner_name or self.contact_name):
emails = email_re.findall(partner_info['full_name'] or '')
email = emails and emails[0] or ''
if email and self.email_from and email.lower() == self.email_from.lower():
partner_info['full_name'] = '%s <%s>' % (self.partner_name or self.contact_name, email)
break
return result
class Tag(models.Model):
_name = "crm.lead.tag"
_description = "Category of lead"
name = fields.Char('Name', required=True, translate=True)
color = fields.Integer('Color Index')
_sql_constraints = [
('name_uniq', 'unique (name)', "Tag name already exists !"),
]
class LostReason(models.Model):
_name = "crm.lost.reason"
_description = 'Reason for loosing leads'
name = fields.Char('Name', required=True, translate=True)
active = fields.Boolean('Active', default=True)
| 44.843561
| 216
| 0.607952
|
2f2850990fdd492cc2988185c88b1dc74d903a6c
| 1,932
|
py
|
Python
|
award/models.py
|
amtesire/Project-Awards
|
3728e3b5d9c1a89949101bef301fdd9b51d4fd7e
|
[
"MIT"
] | null | null | null |
award/models.py
|
amtesire/Project-Awards
|
3728e3b5d9c1a89949101bef301fdd9b51d4fd7e
|
[
"MIT"
] | 3
|
2021-03-19T04:49:07.000Z
|
2021-06-10T22:10:24.000Z
|
award/models.py
|
amtesire/Project-Awards
|
3728e3b5d9c1a89949101bef301fdd9b51d4fd7e
|
[
"MIT"
] | null | null | null |
from django.db import models
from tinymce.models import HTMLField
from django.contrib.auth.models import User
# Create your models here.
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, related_name='profile')
profile_picture = models.ImageField(upload_to='images/')
bio = models.TextField(max_length=500)
contact = models.CharField(max_length=200)
def __str__(self):
return self.bio
def save_profile(self):
self.save()
def delete_profile(self):
self.delete()
class Project(models.Model):
title = models.CharField(max_length=155)
description = models.TextField(max_length=255)
photo = models.ImageField(upload_to='pics/')
user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="project")
link = models.URLField(max_length=200)
design = models.IntegerField(choices=list(zip(range(0,11), range(0,11))), default=0)
usability = models.IntegerField(choices=list(zip(range(0,11), range(0,11))), default=0)
content = models.IntegerField(choices=list(zip(range(0,11), range(0,11))), default=0)
vote_submissions = models.IntegerField(default=0)
def __str__(self):
return f'{self.title}'
def save_project(self):
self.save()
def delete_project(self):
self.delete()
@classmethod
def search_by_title(cls,search_term):
projects = cls.objects.filter(title__icontains=search_term)
return projects
@classmethod
def get_all_images(cls):
images=cls.objects.all().prefetch_related('comment_set')
return images
class Comment(models.Model):
posted_by=models.ForeignKey(User, on_delete=models.CASCADE,null=True)
comment_image=models.ForeignKey(Project,on_delete=models.CASCADE,null=True)
comment=models.CharField(max_length=20,null=True)
def __str__(self):
return self.posted_by
| 33.894737
| 91
| 0.707039
|
abddaf558baad3ac8303e1f4f8500e86d7a3fd38
| 245
|
py
|
Python
|
python/testData/quickFixes/PyUpdatePropertySignatureQuickFixTest/getter.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/quickFixes/PyUpdatePropertySignatureQuickFixTest/getter.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/quickFixes/PyUpdatePropertySignatureQuickFixTest/getter.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
class A(Aa):
@property
def <warning descr="Getter signature should be (self)">x<caret></warning>(self, r):
return ""
@x.setter
def <warning descr="Setter should not return a value">x</warning>(self, r):
return r
| 27.222222
| 87
| 0.616327
|
d9183a1ec34d47d7e98c6cf4705d900596681bad
| 997
|
py
|
Python
|
config.py
|
tonypiazza/flights-python
|
4e09900cb5999ecaf486682ff4a75fd05c348430
|
[
"MIT"
] | null | null | null |
config.py
|
tonypiazza/flights-python
|
4e09900cb5999ecaf486682ff4a75fd05c348430
|
[
"MIT"
] | null | null | null |
config.py
|
tonypiazza/flights-python
|
4e09900cb5999ecaf486682ff4a75fd05c348430
|
[
"MIT"
] | null | null | null |
"""
Configuration data for report apps.
"""
from yaml import load
class Config(object):
"""Loads application configuration from YAML file and makes it available as
properties
"""
instance = None
class __Config(object):
def __init__(self):
try:
from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
from yaml import Loader, Dumper
with open('config.yaml', 'r') as f:
data = load(f, Loader=Loader)
self.airportPath = data['airportPath']
self.carrierPath = data['carrierPath']
self.flightPaths = data['flightPaths']
self.planePath = data['planePath']
def __str__(self):
return repr(self.instance)
def __init__(self):
if not Config.instance:
Config.instance = Config.__Config()
def __getattr__(self, name):
return getattr(self.instance, name)
| 27.694444
| 79
| 0.582748
|
f32d908c1af39518477f6c9f7b35caa598c4bb6d
| 9,994
|
py
|
Python
|
tests/test_full_library.py
|
mobiusklein/autowrap
|
6441782914bb91edd11adbb0de58de4d7ef3f1f0
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_full_library.py
|
mobiusklein/autowrap
|
6441782914bb91edd11adbb0de58de4d7ef3f1f0
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_full_library.py
|
mobiusklein/autowrap
|
6441782914bb91edd11adbb0de58de4d7ef3f1f0
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import, print_function
import os
import glob
import autowrap
import autowrap.Code
import autowrap.CodeGenerator
import autowrap.DeclResolver
import autowrap.Main
import autowrap.PXDParser
import autowrap.Utils
__license__ = """
Copyright (c) 2012-2014, Uwe Schmitt, ETH Zurich, all rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name of the mineway GmbH nor the names of its contributors may be
used to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
test_files = os.path.join(os.path.dirname(__file__), "test_files", "full_lib")
template = """
from distutils.core import setup, Extension
import sys
import pprint
from Cython.Distutils import build_ext
ext = []
ext.append( Extension("moduleCD", sources = ['package/moduleCD.cpp'], language="c++",
include_dirs = %(include_dirs)r,
extra_compile_args = %(compile_args)r,
extra_link_args = %(link_args)r,
))
ext.append(Extension("moduleA", sources = ['package/moduleA.cpp'], language="c++",
include_dirs = %(include_dirs)r,
extra_compile_args = %(compile_args)r,
extra_link_args = %(link_args)r,
))
ext.append(Extension("moduleB", sources = ['package/moduleB.cpp'], language="c++",
include_dirs = %(include_dirs)r,
extra_compile_args = %(compile_args)r,
extra_link_args = %(link_args)r,
))
setup(
name="package",
version="0.0.1",
ext_modules = ext
)
"""
def compile_and_import(names, source_files, include_dirs=None, extra_files=[], **kws):
if include_dirs is None:
include_dirs = []
debug = kws.get("debug")
import os.path
import shutil
import tempfile
import subprocess
import sys
from importlib import import_module
compile_args = []
link_args = []
if sys.platform == "darwin":
compile_args += ["-stdlib=libc++"]
link_args += ["-stdlib=libc++"]
if sys.platform != "win32":
compile_args += ["-Wno-unused-but-set-variable"]
setup_code = template % locals()
if debug:
print("\n")
print("-" * 70)
print(setup_code)
print("-" * 70)
print("\n")
now = os.getcwd()
try:
sys.path.insert(0, now)
sys.path.insert(0, now+"/package")
with open("setup.py", "w") as fp:
fp.write(setup_code)
assert (
subprocess.Popen(
"%s setup.py build_ext --force --inplace" % sys.executable,
shell=True
).wait()
== 0
)
files = glob.iglob("*.so")
for file in files:
if os.path.isfile(file):
shutil.copy2(file, "./package/")
results = [import_module(name) for name in names]
finally:
sys.path = sys.path[1:]
os.chdir(now)
print(results)
return results
def test_full_lib(tmpdir):
"""
Example with multi-file library and multi-file result.
This shows a full run through of a case where multiple class files (A, B,
C, D) with multiple classes in them (Aklass, A_second, etc.) need to be
wrapped, a total of 10 different entities over 8 header files (4 hpp and 4
pxd files). Autowrap will generate a .pxd and a .pyx file for each module.
We decided to wrap the library into three modules, A, B and CD to show the
capability of autowrap to do that. Note that we have perform multiple steps:
- Step 1: parse all header files *together* - all pxd files need to be
parsed together so that declarations are properly resolved.
- Step 2: Map the different parsed entities to the pxd files and the
desired modules, we use a master dict here that can be consumed
by autowrap and specifies which pxd files and which declarations
make up which module.
- Step 3: Generate Cython code for each module
- Step 4: Generate C++ code for each module (note that Step 3 has to be
completed first before we can start to generate C++ code)
- Step 5: Compile (run setup.py)
Note that autowrap gives you full control how many modules you want to
produce and which classes go into which modules. It automatically generates
correct cimport statements in each so that dependencies between the modules
are not an issue.
"""
curdir = os.getcwd()
workdir = tmpdir.strpath + "/package"
os.makedirs(workdir)
os.chdir(workdir)
open("__init__.py", "a").close()
try:
mnames = ["moduleA", "moduleB", "moduleCD"]
# Step 1: parse all header files
PY_NUM_THREADS = 1
pxd_files = ["A.pxd", "B.pxd", "C.pxd", "D.pxd"]
full_pxd_files = [os.path.join(test_files, f) for f in pxd_files]
decls, instance_map = autowrap.parse(
full_pxd_files, ".", num_processes=int(PY_NUM_THREADS)
)
assert len(decls) == 13, len(decls)
# Step 2: Perform mapping
pxd_decl_mapping = {}
for de in decls:
tmp = pxd_decl_mapping.get(de.cpp_decl.pxd_path, [])
tmp.append(de)
pxd_decl_mapping[de.cpp_decl.pxd_path] = tmp
masterDict = {}
masterDict[mnames[0]] = {
"decls": pxd_decl_mapping[full_pxd_files[0]],
"addons": [],
"files": [full_pxd_files[0]],
}
masterDict[mnames[1]] = {
"decls": pxd_decl_mapping[full_pxd_files[1]],
"addons": [],
"files": [full_pxd_files[1]],
}
masterDict[mnames[2]] = {
"decls": pxd_decl_mapping[full_pxd_files[2]]
+ pxd_decl_mapping[full_pxd_files[3]],
"addons": [],
"files": [full_pxd_files[2]] + [full_pxd_files[3]],
}
# Step 3: Generate Cython code
converters = []
for modname in mnames:
m_filename = "%s.pyx" % modname
cimports, manual_code = autowrap.Main.collect_manual_code(
masterDict[modname]["addons"]
)
autowrap.Main.register_converters(converters)
autowrap_include_dirs = autowrap.generate_code(
masterDict[modname]["decls"],
instance_map,
target=m_filename,
debug=False,
manual_code=manual_code,
extra_cimports=cimports,
include_boost=True,
all_decl=masterDict,
add_relative=True
)
masterDict[modname]["inc_dirs"] = autowrap_include_dirs
os.chdir("..")
# Step 4: Generate CPP code
for modname in mnames:
m_filename = "package/%s.pyx" % modname
autowrap_include_dirs = masterDict[modname]["inc_dirs"]
autowrap.Main.run_cython(
inc_dirs=autowrap_include_dirs, extra_opts=None, out=m_filename
)
# Step 5: Compile
all_pyx_files = ["package/%s.pyx" % modname for modname in mnames]
all_pxd_files = ["package/%s.pxd" % modname for modname in mnames]
include_dirs = masterDict[modname]["inc_dirs"]
moduleA, moduleB, moduleCD = compile_and_import(
mnames, all_pyx_files, include_dirs, extra_files=all_pxd_files
)
finally:
os.chdir(curdir)
Aobj = moduleA.Aalias(5)
Asecond = moduleA.A_second(8)
assert Asecond.i_ == 8
assert Aobj.i_ == 5
assert Aobj.KlassE is not None
assert Aobj.KlassE.A1 is not None
assert Aobj.KlassE.A2 is not None
assert Aobj.KlassE.A3 is not None
Bobj = moduleB.Bklass(5)
assert Bobj.i_ == 5 # access through A_second
Bobj.callA2()
assert Bobj.i_ == 6 # access through A_second
Bsecond = moduleB.B_second(8)
assert Bsecond.i_ == 8
Bsecond.processA(Aobj)
assert Bsecond.i_ == 15
assert Bobj.KlassE is not None
assert Bobj.KlassE.B1 is not None
assert Bobj.KlassE.B2 is not None
assert Bobj.KlassE.B3 is not None
assert Bobj.KlassKlass is not None
# there are two different ways to get Bklass::KlassKlass, either through a
# Bklass object or through the module
Bobj_kk = Bobj.KlassKlass()
Bobj_kk.k_ = 14
assert Bobj_kk.k_ == 14
Bobj_kk = moduleB.Bklass.KlassKlass()
Bobj_kk.k_ = 14
assert Bobj_kk.k_ == 14
# Check doc string
assert "Inherits from" in moduleB.Bklass.__doc__
assert "some doc!" in moduleB.Bklass.__doc__
assert len(moduleB.Bklass.__doc__) == 92, len(moduleB.Bklass.__doc__)
Bsecond = moduleB.B_second(8)
Dsecond = moduleCD.D_second(11)
assert Dsecond.i_ == 11
Dsecond.runB(Bsecond)
assert Dsecond.i_ == 8
| 33.092715
| 86
| 0.642686
|
f6a56e58b4d9a8a894e75116ff7f464cf1700ca5
| 5,041
|
py
|
Python
|
rl_sandbox/examples/pybullet/hopper/grac_experiment.py
|
chanb/rl_sandbox_public
|
e55f954a29880f83a5b0c3358badda4d900f1564
|
[
"MIT"
] | 14
|
2020-11-09T22:05:37.000Z
|
2022-02-11T12:41:33.000Z
|
rl_sandbox/examples/pybullet/hopper/grac_experiment.py
|
chanb/rl_sandbox_public
|
e55f954a29880f83a5b0c3358badda4d900f1564
|
[
"MIT"
] | null | null | null |
rl_sandbox/examples/pybullet/hopper/grac_experiment.py
|
chanb/rl_sandbox_public
|
e55f954a29880f83a5b0c3358badda4d900f1564
|
[
"MIT"
] | null | null | null |
import argparse
import numpy as np
import torch
import rl_sandbox.constants as c
import rl_sandbox.transforms.general_transforms as gt
from rl_sandbox.agents.random_agents import UniformContinuousAgent
from rl_sandbox.buffers.wrappers.torch_buffer import TorchBuffer
from rl_sandbox.envs.wrappers.action_repeat import ActionRepeatWrapper
from rl_sandbox.envs.wrappers.frame_stack import FrameStackWrapper
from rl_sandbox.train.train_grac import train_grac
from rl_sandbox.model_architectures.actor_critics.fully_connected_q_actor_critic import FullyConnectedGaussianQACSeparate, FullyConnectedGaussianCEMQAC, FullyConnectedGaussianQAC
from rl_sandbox.model_architectures.layers_definition import VALUE_BASED_LINEAR_LAYERS
# This is for script run
parser = argparse.ArgumentParser()
parser.add_argument('--seed', type=int, required=True, help="Random seed")
args = parser.parse_args()
seed = args.seed
obs_dim = 15
action_dim = 3
min_action = -np.ones(action_dim)
max_action = np.ones(action_dim)
device = torch.device("cuda:0")
action_repeat = 1
num_frames = 1
memory_size = 1000000 // action_repeat
max_total_steps = 1000000 // action_repeat
experiment_setting = {
# Auxiliary Tasks
c.AUXILIARY_TASKS: {},
# Buffer
c.BUFFER_PREPROCESSING: gt.AsType(),
c.BUFFER_SETTING: {
c.KWARGS: {
c.MEMORY_SIZE: memory_size,
c.OBS_DIM: (obs_dim,),
c.H_STATE_DIM: (1,),
c.ACTION_DIM: (action_dim,),
c.REWARD_DIM: (1,),
c.INFOS: {c.MEAN: ((action_dim,), np.float32),
c.VARIANCE: ((action_dim,), np.float32),
c.ENTROPY: ((action_dim,), np.float32),
c.LOG_PROB: ((1,), np.float32),
c.VALUE: ((1,), np.float32),
c.DISCOUNTING: ((1,), np.float32)},
c.CHECKPOINT_INTERVAL: 0,
c.CHECKPOINT_PATH: None,
},
c.STORAGE_TYPE: c.RAM,
c.BUFFER_WRAPPERS: [
{
c.WRAPPER: TorchBuffer,
c.KWARGS: {},
},
],
},
# Environment
c.ACTION_DIM: action_dim,
c.CLIP_ACTION: True,
c.ENV_SETTING: {
c.ENV_BASE: {
c.ENV_NAME: "HopperBulletEnv-v0"
},
c.ENV_TYPE: c.GYM,
c.ENV_WRAPPERS: [
{
c.WRAPPER: ActionRepeatWrapper,
c.KWARGS: {
c.ACTION_REPEAT: action_repeat,
c.DISCOUNT_FACTOR: 1.,
c.ENABLE_DISCOUNTING: False,
}
},
{
c.WRAPPER: FrameStackWrapper,
c.KWARGS: {
c.NUM_FRAMES: num_frames,
}
}
]
},
c.MIN_ACTION: min_action,
c.MAX_ACTION: max_action,
c.OBS_DIM: obs_dim,
# Evaluation
c.EVALUATION_FREQUENCY: 5000,
c.EVALUATION_RENDER: False,
c.EVALUATION_RETURNS: [],
c.NUM_EVALUATION_EPISODES: 5,
# Exploration
c.EXPLORATION_STEPS: 1000,
c.EXPLORATION_STRATEGY: UniformContinuousAgent(min_action,
max_action,
np.random.RandomState(seed)),
# General
c.DEVICE: device,
c.SEED: seed,
# Load
c.LOAD_MODEL: False,
# Logging
c.PRINT_INTERVAL: 5000,
c.SAVE_INTERVAL: 1000000,
# Model
c.MODEL_SETTING: {
c.MODEL_ARCHITECTURE: FullyConnectedGaussianQAC,
c.KWARGS: {
c.OBS_DIM: obs_dim,
c.ACTION_DIM: action_dim,
c.SHARED_LAYERS: VALUE_BASED_LINEAR_LAYERS(in_dim=obs_dim),
c.DEVICE: device,
c.NORMALIZE_OBS: False,
c.NORMALIZE_VALUE: False,
},
},
c.OPTIMIZER_SETTING: {
c.POLICY: {
c.OPTIMIZER: torch.optim.Adam,
c.KWARGS: {
c.LR: 2e-4,
},
},
c.QS: {
c.OPTIMIZER: torch.optim.Adam,
c.KWARGS: {
c.LR: 3e-4,
},
},
},
c.EVALUATION_PREPROCESSING: gt.Identity(),
c.TRAIN_PREPROCESSING: gt.Identity(),
# GRAC
c.ACCUM_NUM_GRAD: 1,
c.ALPHA: 0.85,
c.BATCH_SIZE: 256,
c.BUFFER_WARMUP: 1000,
c.COV_NOISE_END: 0.01,
c.COV_NOISE_INIT: 0.05,
c.COV_NOISE_TAU: 0.95,
c.ELITE_SIZE: 5,
c.GAMMA: 0.99,
c.MAX_GRAD_NORM: 1e10,
c.NUM_GRADIENT_UPDATES: 1,
c.NUM_ITERS: 2,
c.NUM_PREFETCH: 1,
c.NUM_Q_UPDATES: 5,
c.POP_SIZE: 128,
c.REWARD_SCALING: 1.,
c.STEPS_BETWEEN_UPDATE: 2,
c.UPDATE_NUM: 0,
# Progress Tracking
c.CUM_EPISODE_LENGTHS: [0],
c.CURR_EPISODE: 1,
c.NUM_UPDATES: 0,
c.RETURNS: [],
# Save
c.SAVE_PATH: f"/u/chanb/experiments/pybullet/results/hopper/gt-grac/{seed}",
# train parameters
c.MAX_TOTAL_STEPS: max_total_steps,
c.TRAIN_RENDER: False,
}
train_grac(experiment_config=experiment_setting)
| 27.248649
| 178
| 0.585201
|
6ccd00cb15e521f938181fe9bd3a18c9311641ec
| 19,593
|
py
|
Python
|
sdk/python/pulumi_azure_native/network/v20190401/express_route_circuit.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20190401/express_route_circuit.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20190401/express_route_circuit.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['ExpressRouteCircuit']
class ExpressRouteCircuit(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
allow_classic_operations: Optional[pulumi.Input[bool]] = None,
authorizations: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ExpressRouteCircuitAuthorizationArgs']]]]] = None,
bandwidth_in_gbps: Optional[pulumi.Input[float]] = None,
circuit_name: Optional[pulumi.Input[str]] = None,
circuit_provisioning_state: Optional[pulumi.Input[str]] = None,
express_route_port: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
gateway_manager_etag: Optional[pulumi.Input[str]] = None,
global_reach_enabled: Optional[pulumi.Input[bool]] = None,
id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
peerings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ExpressRouteCircuitPeeringArgs']]]]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
service_key: Optional[pulumi.Input[str]] = None,
service_provider_notes: Optional[pulumi.Input[str]] = None,
service_provider_properties: Optional[pulumi.Input[pulumi.InputType['ExpressRouteCircuitServiceProviderPropertiesArgs']]] = None,
service_provider_provisioning_state: Optional[pulumi.Input[Union[str, 'ServiceProviderProvisioningState']]] = None,
sku: Optional[pulumi.Input[pulumi.InputType['ExpressRouteCircuitSkuArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
ExpressRouteCircuit resource.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] allow_classic_operations: Allow classic operations.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ExpressRouteCircuitAuthorizationArgs']]]] authorizations: The list of authorizations.
:param pulumi.Input[float] bandwidth_in_gbps: The bandwidth of the circuit when the circuit is provisioned on an ExpressRoutePort resource.
:param pulumi.Input[str] circuit_name: The name of the circuit.
:param pulumi.Input[str] circuit_provisioning_state: The CircuitProvisioningState state of the resource.
:param pulumi.Input[pulumi.InputType['SubResourceArgs']] express_route_port: The reference to the ExpressRoutePort resource when the circuit is provisioned on an ExpressRoutePort resource.
:param pulumi.Input[str] gateway_manager_etag: The GatewayManager Etag.
:param pulumi.Input[bool] global_reach_enabled: Flag denoting Global reach status.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ExpressRouteCircuitPeeringArgs']]]] peerings: The list of peerings.
:param pulumi.Input[str] provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] service_key: The ServiceKey.
:param pulumi.Input[str] service_provider_notes: The ServiceProviderNotes.
:param pulumi.Input[pulumi.InputType['ExpressRouteCircuitServiceProviderPropertiesArgs']] service_provider_properties: The ServiceProviderProperties.
:param pulumi.Input[Union[str, 'ServiceProviderProvisioningState']] service_provider_provisioning_state: The ServiceProviderProvisioningState state of the resource.
:param pulumi.Input[pulumi.InputType['ExpressRouteCircuitSkuArgs']] sku: The SKU.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['allow_classic_operations'] = allow_classic_operations
__props__['authorizations'] = authorizations
__props__['bandwidth_in_gbps'] = bandwidth_in_gbps
__props__['circuit_name'] = circuit_name
__props__['circuit_provisioning_state'] = circuit_provisioning_state
__props__['express_route_port'] = express_route_port
__props__['gateway_manager_etag'] = gateway_manager_etag
__props__['global_reach_enabled'] = global_reach_enabled
__props__['id'] = id
__props__['location'] = location
__props__['peerings'] = peerings
__props__['provisioning_state'] = provisioning_state
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['service_key'] = service_key
__props__['service_provider_notes'] = service_provider_notes
__props__['service_provider_properties'] = service_provider_properties
__props__['service_provider_provisioning_state'] = service_provider_provisioning_state
__props__['sku'] = sku
__props__['tags'] = tags
__props__['etag'] = None
__props__['name'] = None
__props__['stag'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:network/v20190401:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/latest:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/latest:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20150501preview:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20150501preview:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20150615:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20150615:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20160330:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20160330:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20160601:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20160601:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20160901:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20160901:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20161201:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20161201:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20170301:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20170301:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20170601:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20170601:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20170801:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20170801:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20170901:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20170901:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20171001:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20171001:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20171101:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20171101:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20180101:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20180101:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20180201:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20180201:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20180401:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20180401:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20180601:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20180601:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20180701:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20180701:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20180801:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20180801:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20181001:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20181001:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20181101:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20181101:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20181201:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20181201:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20190201:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20190201:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20190601:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20190601:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20190701:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20190701:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20190801:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20190801:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20190901:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20190901:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20191101:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20191101:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20191201:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20191201:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20200301:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20200301:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20200401:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20200401:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20200501:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20200501:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20200601:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20200601:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20200701:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20200701:ExpressRouteCircuit"), pulumi.Alias(type_="azure-native:network/v20200801:ExpressRouteCircuit"), pulumi.Alias(type_="azure-nextgen:network/v20200801:ExpressRouteCircuit")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(ExpressRouteCircuit, __self__).__init__(
'azure-native:network/v20190401:ExpressRouteCircuit',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'ExpressRouteCircuit':
"""
Get an existing ExpressRouteCircuit resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["allow_classic_operations"] = None
__props__["authorizations"] = None
__props__["bandwidth_in_gbps"] = None
__props__["circuit_provisioning_state"] = None
__props__["etag"] = None
__props__["express_route_port"] = None
__props__["gateway_manager_etag"] = None
__props__["global_reach_enabled"] = None
__props__["location"] = None
__props__["name"] = None
__props__["peerings"] = None
__props__["provisioning_state"] = None
__props__["service_key"] = None
__props__["service_provider_notes"] = None
__props__["service_provider_properties"] = None
__props__["service_provider_provisioning_state"] = None
__props__["sku"] = None
__props__["stag"] = None
__props__["tags"] = None
__props__["type"] = None
return ExpressRouteCircuit(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="allowClassicOperations")
def allow_classic_operations(self) -> pulumi.Output[Optional[bool]]:
"""
Allow classic operations.
"""
return pulumi.get(self, "allow_classic_operations")
@property
@pulumi.getter
def authorizations(self) -> pulumi.Output[Optional[Sequence['outputs.ExpressRouteCircuitAuthorizationResponse']]]:
"""
The list of authorizations.
"""
return pulumi.get(self, "authorizations")
@property
@pulumi.getter(name="bandwidthInGbps")
def bandwidth_in_gbps(self) -> pulumi.Output[Optional[float]]:
"""
The bandwidth of the circuit when the circuit is provisioned on an ExpressRoutePort resource.
"""
return pulumi.get(self, "bandwidth_in_gbps")
@property
@pulumi.getter(name="circuitProvisioningState")
def circuit_provisioning_state(self) -> pulumi.Output[Optional[str]]:
"""
The CircuitProvisioningState state of the resource.
"""
return pulumi.get(self, "circuit_provisioning_state")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
Gets a unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="expressRoutePort")
def express_route_port(self) -> pulumi.Output[Optional['outputs.SubResourceResponse']]:
"""
The reference to the ExpressRoutePort resource when the circuit is provisioned on an ExpressRoutePort resource.
"""
return pulumi.get(self, "express_route_port")
@property
@pulumi.getter(name="gatewayManagerEtag")
def gateway_manager_etag(self) -> pulumi.Output[Optional[str]]:
"""
The GatewayManager Etag.
"""
return pulumi.get(self, "gateway_manager_etag")
@property
@pulumi.getter(name="globalReachEnabled")
def global_reach_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Flag denoting Global reach status.
"""
return pulumi.get(self, "global_reach_enabled")
@property
@pulumi.getter
def location(self) -> pulumi.Output[Optional[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def peerings(self) -> pulumi.Output[Optional[Sequence['outputs.ExpressRouteCircuitPeeringResponse']]]:
"""
The list of peerings.
"""
return pulumi.get(self, "peerings")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[Optional[str]]:
"""
Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="serviceKey")
def service_key(self) -> pulumi.Output[Optional[str]]:
"""
The ServiceKey.
"""
return pulumi.get(self, "service_key")
@property
@pulumi.getter(name="serviceProviderNotes")
def service_provider_notes(self) -> pulumi.Output[Optional[str]]:
"""
The ServiceProviderNotes.
"""
return pulumi.get(self, "service_provider_notes")
@property
@pulumi.getter(name="serviceProviderProperties")
def service_provider_properties(self) -> pulumi.Output[Optional['outputs.ExpressRouteCircuitServiceProviderPropertiesResponse']]:
"""
The ServiceProviderProperties.
"""
return pulumi.get(self, "service_provider_properties")
@property
@pulumi.getter(name="serviceProviderProvisioningState")
def service_provider_provisioning_state(self) -> pulumi.Output[Optional[str]]:
"""
The ServiceProviderProvisioningState state of the resource.
"""
return pulumi.get(self, "service_provider_provisioning_state")
@property
@pulumi.getter
def sku(self) -> pulumi.Output[Optional['outputs.ExpressRouteCircuitSkuResponse']]:
"""
The SKU.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def stag(self) -> pulumi.Output[int]:
"""
The identifier of the circuit traffic. Outer tag for QinQ encapsulation.
"""
return pulumi.get(self, "stag")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 60.847826
| 5,480
| 0.71107
|
4798166826ac2407c06e454ac3dc741489ab5e05
| 3,355
|
py
|
Python
|
sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2015_06_15/_configuration.py
|
vbarbaresi/azure-sdk-for-python
|
397ba46c51d001ff89c66b170f5576cf8f49c05f
|
[
"MIT"
] | 8
|
2021-01-13T23:44:08.000Z
|
2021-03-17T10:13:36.000Z
|
sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2015_06_15/_configuration.py
|
vbarbaresi/azure-sdk-for-python
|
397ba46c51d001ff89c66b170f5576cf8f49c05f
|
[
"MIT"
] | 2
|
2021-11-03T06:10:36.000Z
|
2021-12-01T06:29:39.000Z
|
sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2015_06_15/_configuration.py
|
vbarbaresi/azure-sdk-for-python
|
397ba46c51d001ff89c66b170f5576cf8f49c05f
|
[
"MIT"
] | 1
|
2021-05-19T02:55:10.000Z
|
2021-05-19T02:55:10.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any
from azure.core.credentials import TokenCredential
VERSION = "unknown"
class StorageManagementConfiguration(Configuration):
"""Configuration for StorageManagement.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: Subscription credentials which uniquely identify the Microsoft Azure subscription. The subscription ID forms part of the URI for every service call.
:type subscription_id: str
"""
def __init__(
self,
credential, # type: "TokenCredential"
subscription_id, # type: str
**kwargs # type: Any
):
# type: (...) -> None
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
super(StorageManagementConfiguration, self).__init__(**kwargs)
self.credential = credential
self.subscription_id = subscription_id
self.api_version = "2015-06-15"
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-storage/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs # type: Any
):
# type: (...) -> None
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
| 47.253521
| 176
| 0.688823
|
68e9a433addd3d0010fb94c80de600640d3dabfe
| 18,405
|
py
|
Python
|
sfftkplus/unittests/_test_omero.py
|
emdb-empiar/sfftk-plus
|
7ceca24b78c540169bddb3fd433b4aed050f40ec
|
[
"Apache-2.0"
] | null | null | null |
sfftkplus/unittests/_test_omero.py
|
emdb-empiar/sfftk-plus
|
7ceca24b78c540169bddb3fd433b4aed050f40ec
|
[
"Apache-2.0"
] | null | null | null |
sfftkplus/unittests/_test_omero.py
|
emdb-empiar/sfftk-plus
|
7ceca24b78c540169bddb3fd433b4aed050f40ec
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_omero_wrapper.py
Unit tests for OMERO handlers
"""
from __future__ import division, print_function
import os
import unittest
from omero.gateway import _ImageWrapper
from omero.model import RectangleI # @UnresolvedImport
from . import TEST_DATA_PATH, _random_float, _random_integer
from ..core.parser import parse_args
from ..omero import handlers, primitives
from ..sffplus import get_image_ids
__author__ = 'Paul K. Korir, PhD'
__email__ = 'pkorir@ebi.ac.uk, paul.korir@gmail.com'
__date__ = '2016-06-13'
# utility functions
def colorInt(r, g, b, a):
"""Function that return a signed 32-bit integer from RGBA tuple.
0 <= r,g,b,a <= 1
"""
assert 0 <= r <= 1
assert 0 <= g <= 1
assert 0 <= b <= 1
assert 0 <= a <= 1
# the actual format is 'ARGB'!!!
rgba_int = (int(r * 255) << 24) + (int(g * 255) << 16) + (int(b * 255) << 8) + int(a * 255)
if rgba_int > 2147483647:
rgba_int = -2147483648 + rgba_int % 2147483648
return rgba_int
# handlers
class TestHandlers_OMEROConnection(unittest.TestCase):
"""Test on using the opened connection"""
@classmethod
def setUpClass(cls):
cls.config_fn = os.path.join(TEST_DATA_PATH, 'configs', 'sffp.conf')
args, configs = parse_args('list -I --config-path {}'.format(cls.config_fn), use_shlex=True)
cls.connection = handlers.OMEROConnection(args, configs)
@classmethod
def tearDownClass(cls):
del cls.connection
def test_connect(self):
"""Test that we can connect"""
with self.connection:
connected = self.connection.connected
self.assertTrue(connected)
def test_images(self):
"""Test that we can get images"""
with self.connection:
images = [image for image in self.connection.images()]
self.assertGreater(len(images), 0)
def test_rois(self):
"""Test that we can get ROIs"""
with self.connection:
rois = [roi for roi in self.connection.rois()]
self.assertGreater(len(rois), 0)
def test_projects(self):
"""Test that we can get available projects"""
with self.connection:
projects = [project for project in self.connection.projects]
self.assertGreater(len(projects), 0)
def test_datasets(self):
"""test that we can get available datasets"""
with self.connection:
datasets = [dataset for dataset in self.connection.datasets()]
self.assertGreater(len(datasets), 0)
def test_get_image_by_id(self):
"""Test that we can get a single image by ID"""
with self.connection:
image = self.connection.getImage(101)
self.assertIsInstance(image, _ImageWrapper)
def test_get_rois_by_image_id(self):
"""Test that we can get all ROIs associated with an image"""
with self.connection:
rois = self.connection.getROIs(101)
self.assertGreater(len(rois), 0)
# class TestHandlers_OMEROROI(unittest.TestCase):
# def test_create(self):
# self.assertTrue(False)
#
# def test_addShape(self):
# self.assertTrue(False)
#
# def test_load_data(self):
# self.assertTrue(False)
class TestPrimitives_Shape(unittest.TestCase):
"""
Test generic Shape attributes with a Rectangle ROI object
FillColor
FontFamily
FontSize
FontStyle
StrokeColor
StrokeWidth
TheC
TheT
TheZ
TextValue
"""
@classmethod
def setUpClass(cls):
# the connectiona args
cls.config_fn = os.path.join(TEST_DATA_PATH, 'configs', 'sffp.conf')
args, configs = parse_args('list -I --config-path {}'.format(cls.config_fn), use_shlex=True)
# the connection
cls.connection = handlers.OMEROConnection(args, configs)
# image
with cls.connection:
cls.image = cls.connection.getImage(101)
# the ROI
cls.roi = handlers.OMEROROI('my_roi', cls.image)
# vars
cls.theT = _random_integer()
cls.theZ = _random_integer()
cls.X = _random_integer()
cls.Y = _random_integer()
cls.Width = _random_integer()
cls.Height = _random_integer()
cls.TextValue = 'I am a rectangle'
cls.r, cls.g, cls.b = _random_float(), _random_float(), _random_float()
cls.fontSize = _random_integer(5, 15)
# create the shape with the above params
cls.rect = primitives.Rectangle(theT=cls.theT, theZ=cls.theZ, X=cls.X, Y=cls.Y, Width=cls.Width,
Height=cls.Height)
cls.rect.setFillColor(cls.r, cls.g, cls.b)
cls.rect.setFontFamily('serif')
cls.rect.setFontSize(cls.fontSize, 'POINT')
cls.rect.setFontStyle('italic')
cls.rect.setStrokeColor(cls.r, cls.g, cls.b)
cls.rect.setTextValue(cls.TextValue)
# add the shape to the ROI
cls.roi.addShape(cls.rect)
def test_added_shape(self):
self.assertTrue(isinstance(self.roi.getShape(0), RectangleI))
def test_rectangle_type(self):
self.assertTrue(isinstance(self.rect, RectangleI))
def test_theT(self):
self.assertEqual(self.rect.getTheT(), self.theT)
def test_theZ(self):
self.assertEqual(self.rect.getTheZ(), self.theZ)
def test_X(self):
self.assertEqual(self.rect.getX(), self.X)
def test_Y(self):
self.assertEqual(self.rect.getY(), self.Y)
def test_Width(self):
self.assertEqual(self.rect.getWidth(), self.Width)
def test_Height(self):
self.assertEqual(self.rect.getHeight(), self.Height)
def test_fillColor(self):
self.assertEqual(self.rect.getFillColor(), colorInt(self.r, self.g, self.b, 1))
def test_fontFamily(self):
self.assertEqual(self.rect.getFontFamily(), 'serif')
def test_fontSize(self):
self.assertEqual(self.rect.getFontSize(), self.fontSize)
def test_fontStyle(self):
self.assertEqual(self.rect.getFontStyle(), 'italic')
def test_strokeColor(self):
self.assertEqual(self.rect.getStrokeColor(), colorInt(self.r, self.g, self.b, 1))
def test_textValue(self):
self.assertEqual(self.rect.getTextValue(), self.TextValue)
class TestPrimitives_Line(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.X1 = _random_float()
cls.X2 = _random_float()
cls.Y1 = _random_float()
cls.Y2 = _random_float()
cls.line = primitives.Line(X1=cls.X1, X2=cls.X2, Y1=cls.Y1, Y2=cls.Y2)
def test_create(self):
"""Test that we can create a Line object"""
# assertions
self.assertEqual(round(self.line.getX1(), 6), round(self.X1, 6))
self.assertEqual(round(self.line.getX2(), 6), round(self.X2, 6))
self.assertEqual(round(self.line.getY1(), 6), round(self.Y1, 6))
self.assertEqual(round(self.line.getY2(), 6), round(self.Y2, 6))
def test_modify(self):
"""Test that we can modify params"""
X1 = _random_float()
X2 = _random_float()
Y1 = _random_float()
Y2 = _random_float()
self.line.setX1(X1)
self.line.setX2(X2)
self.line.setY1(Y1)
self.line.setY2(Y2)
# assertions
self.assertEqual(round(self.line.getX1(), 6), round(X1, 6))
self.assertEqual(round(self.line.getX2(), 6), round(X2, 6))
self.assertEqual(round(self.line.getY1(), 6), round(Y1, 6))
self.assertEqual(round(self.line.getY2(), 6), round(Y2, 6))
class TestPrimitives_Rectangle(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.X = _random_float() * _random_integer()
cls.Y = _random_float() * _random_integer()
cls.Width = _random_float() * _random_integer()
cls.Height = _random_float() * _random_integer()
cls.rectangle = primitives.Rectangle(X=cls.X, Y=cls.Y, Width=cls.Width, Height=cls.Height)
def test_create(self):
"""Test that we can create a Rectangle object"""
self.assertEqual(round(self.rectangle.getX(), 6), round(self.X, 6))
self.assertEqual(round(self.rectangle.getY(), 6), round(self.Y, 6))
self.assertEqual(round(self.rectangle.getWidth(), 6), round(self.Width, 6))
self.assertEqual(round(self.rectangle.getHeight(), 6), round(self.Height, 6))
def test_modify(self):
"""Test that we can modify params"""
X = _random_float() * _random_integer()
Y = _random_float() * _random_integer()
Width = _random_float() * _random_integer()
Height = _random_float() * _random_integer()
self.rectangle.setX(X)
self.rectangle.setY(Y)
self.rectangle.setWidth(Width)
self.rectangle.setHeight(Height)
# assertions
self.assertEqual(round(self.rectangle.getX(), 6), round(X, 6))
self.assertEqual(round(self.rectangle.getY(), 6), round(Y, 6))
self.assertEqual(round(self.rectangle.getWidth(), 6), round(Width, 6))
self.assertEqual(round(self.rectangle.getHeight(), 6), round(Height, 6))
class TestPrimitives_Mask(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.X = _random_float() * _random_integer()
cls.Y = _random_float() * _random_integer()
cls.Width = _random_float() * _random_integer()
cls.Height = _random_float() * _random_integer()
cls.mask = primitives.Mask(X=cls.X, Y=cls.Y, Width=cls.Width, Height=cls.Height)
def test_create(self):
"""Test that we can create a Mask object"""
self.assertEqual(round(self.mask.getX(), 6), round(self.X, 6))
self.assertEqual(round(self.mask.getY(), 6), round(self.Y, 6))
self.assertEqual(round(self.mask.getWidth(), 6), round(self.Width, 6))
self.assertEqual(round(self.mask.getHeight(), 6), round(self.Height, 6))
def test_modify(self):
"""Test that we can modify params"""
X = _random_float() * _random_integer()
Y = _random_float() * _random_integer()
Width = _random_float() * _random_integer()
Height = _random_float() * _random_integer()
self.mask.setX(X)
self.mask.setY(Y)
self.mask.setWidth(Width)
self.mask.setHeight(Height)
# assertions
self.assertEqual(round(self.mask.getX(), 6), round(X, 6))
self.assertEqual(round(self.mask.getY(), 6), round(Y, 6))
self.assertEqual(round(self.mask.getWidth(), 6), round(Width, 6))
self.assertEqual(round(self.mask.getHeight(), 6), round(Height, 6))
class TestPrimitives_Ellipse(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.X = _random_float() * _random_integer()
cls.Y = _random_float() * _random_integer()
cls.RadiusX = _random_float() * _random_integer()
cls.RadiusY = _random_float() * _random_integer()
cls.ellipse = primitives.Ellipse(X=cls.X, Y=cls.Y, RadiusX=cls.RadiusX, RadiusY=cls.RadiusY)
def test_create(self):
"""Test that we can create an Ellipse object"""
self.assertEqual(round(self.ellipse.getX(), 6), round(self.X, 6))
self.assertEqual(round(self.ellipse.getY(), 6), round(self.Y, 6))
self.assertEqual(round(self.ellipse.getRadiusX(), 6), round(self.RadiusX, 6))
self.assertEqual(round(self.ellipse.getRadiusY(), 6), round(self.RadiusY, 6))
def test_modify(self):
"""Test that we can modify params"""
X = _random_float() * _random_integer()
Y = _random_float() * _random_integer()
RadiusX = _random_float() * _random_integer()
RadiusY = _random_float() * _random_integer()
self.ellipse.setX(X)
self.ellipse.setY(Y)
self.ellipse.setRadiusX(RadiusX)
self.ellipse.setRadiusY(RadiusY)
# assertions
self.assertEqual(round(self.ellipse.getX(), 6), round(X, 6))
self.assertEqual(round(self.ellipse.getY(), 6), round(Y, 6))
self.assertEqual(round(self.ellipse.getRadiusX(), 6), round(RadiusX, 6))
self.assertEqual(round(self.ellipse.getRadiusY(), 6), round(RadiusY, 6))
class TestPrimitives_Point(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.X = _random_float() * _random_integer()
cls.Y = _random_float() * _random_integer()
cls.point = primitives.Point(X=cls.X, Y=cls.Y)
def test_create(self):
"""Test that we can create a Point object"""
self.assertEqual(round(self.point.getX(), 6), round(self.X, 6))
self.assertEqual(round(self.point.getY(), 6), round(self.Y, 6))
def test_modify(self):
"""Test that we can modify params"""
X = _random_float() * _random_integer()
Y = _random_float() * _random_integer()
self.point.setX(X)
self.point.setY(Y)
# assertions
self.assertEqual(round(self.point.getX(), 6), round(X, 6))
self.assertEqual(round(self.point.getY(), 6), round(Y, 6))
class TestPrimitives_Polygon(unittest.TestCase):
"""Tests both primitives.Polyline and primitives.Polygon"""
@classmethod
def setUpClass(cls):
cls.points = [(_random_float() * _random_integer(), _random_float() * _random_integer()) for _ in range(4)]
cls.polygon = primitives.Polygon()
def test_create(self):
"""Test that we can create a Polygon object"""
self.assertEqual(len(self.polygon.getPoints()), 0)
def test_modify(self):
"""Test that we can modify params"""
self.polygon.setPoints(self.points)
# assertions
self.assertEqual(len(self.polygon.getPoints()), 4)
def test_swapXY(self):
"""Test that swapping X and Y works"""
self.polygon.setPoints(self.points, swapXY=True)
swapped_points = map(lambda x: (x[1], x[0]), self.points)
self.assertCountEqual(map(lambda p: (round(p[0], 6), round(p[1], 6)), self.polygon.getPoints()),
map(lambda p: (round(p[0], 6), round(p[1], 6)), swapped_points))
def test_offsetXFrom(self):
"""Test that we can define an X offset for point values"""
X_offset = _random_integer()
self.polygon.setPoints(self.points, offsetXFrom=X_offset)
offsetX_points = map(lambda p: (X_offset - p[0], p[1]), self.points)
self.assertCountEqual(map(lambda p: (round(p[0], 6), round(p[1], 6)), self.polygon.getPoints()),
map(lambda p: (round(p[0], 6), round(p[1], 6)), offsetX_points))
def test_offsetYFrom(self):
"""Test that we can define an Y offset for point values"""
Y_offset = _random_integer()
self.polygon.setPoints(self.points, offsetYFrom=Y_offset)
offsetY_points = map(lambda p: (p[0], Y_offset - p[1]), self.points)
self.assertCountEqual(map(lambda p: (round(p[0], 6), round(p[1], 6)), self.polygon.getPoints()),
map(lambda p: (round(p[0], 6), round(p[1], 6)), offsetY_points))
class TestPrimitives_Label(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.X = _random_float() * _random_integer()
cls.Y = _random_float() * _random_integer()
cls.label_text = "Some very interesting label"
cls.label = primitives.Label(X=cls.X, Y=cls.Y, label=cls.label_text)
def test_create(self):
"""Test that we can create a Point object"""
self.assertEqual(round(self.label.getX(), 6), round(self.X, 6))
self.assertEqual(round(self.label.getY(), 6), round(self.Y, 6))
self.assertEqual(self.label.getTextValue(), self.label_text)
def test_modify(self):
"""Test that we can modify params"""
X = _random_float() * _random_integer()
Y = _random_float() * _random_integer()
label_text = "Another piece of label text"
self.label.setX(X)
self.label.setY(Y)
self.label.setTextValue(label_text)
# assertions
self.assertEqual(round(self.label.getX(), 6), round(X, 6))
self.assertEqual(round(self.label.getY(), 6), round(Y, 6))
self.assertEqual(self.label.getTextValue(), label_text)
class TestHandler_OMEROROI_attachROI(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.config_fn = os.path.join(TEST_DATA_PATH, 'configs', 'sffp.conf')
args, configs = parse_args('list -I --config-path {}'.format(cls.config_fn), use_shlex=True)
try:
cls.connection = handlers.OMEROConnection(args, configs)
except Exception as e:
raise e
@classmethod
def tearDownClass(cls):
del cls.connection
def test_attachRois(self):
"""Test that we can attach ROIs
Implicitly tests .saveRoi() method
"""
from ..formats.roi import ROISegmentation
args, configs = parse_args('attachroi --config-path {} file.roi'.format(self.config_fn), use_shlex=True)
roi_fn = os.path.join(TEST_DATA_PATH, 'roi', 'test_emd_1832.roi')
roi_seg = ROISegmentation(roi_fn)
image_ids = get_image_ids(roi_seg, args)
# delete rois first
map(self.delete_rois, image_ids.values())
rois_before_attach = 0
rois_after_attach = 0
with self.connection:
for roi in self.connection.rois(project='test', dataset='test_attach'):
rois_before_attach += len(roi[1])
self.assertEqual(rois_before_attach, 0)
for orientation in roi_seg.oriented_segments:
image_id = image_ids[orientation]
image = self.connection.getImage(image_id)
omero_rois = roi_seg.as_omero_rois(orientation, image, args)
self.connection.attachRois(omero_rois)
for roi in self.connection.rois(project='test', dataset='test_attach'):
rois_after_attach += len(roi[1])
self.assertGreater(rois_after_attach, 0)
# assertions
# delete rois last
map(self.delete_rois, image_ids.values())
def delete_rois(self, image_id):
"""Delete all ROIs"""
with self.connection:
rois = self.connection.getROIs(image_id)
for roi in rois:
self.connection.deleteRoi(roi.getId().getValue())
| 38.184647
| 115
| 0.631079
|
045d63a7e2c478b0563880a6a214024609ea249c
| 10,379
|
py
|
Python
|
Framework/storage.py
|
AnganMitra/federatedLearn
|
f5c0d22fd677fbe8d5b90e5e018825ad89d596e5
|
[
"MIT"
] | null | null | null |
Framework/storage.py
|
AnganMitra/federatedLearn
|
f5c0d22fd677fbe8d5b90e5e018825ad89d596e5
|
[
"MIT"
] | null | null | null |
Framework/storage.py
|
AnganMitra/federatedLearn
|
f5c0d22fd677fbe8d5b90e5e018825ad89d596e5
|
[
"MIT"
] | null | null | null |
import bz2
import functools
import glob
import json
import logging
import os
from pathlib import Path
import pickle
import sys
from itertools import takewhile
from types import SimpleNamespace
import numpy as np
import _pickle as cPickle
from context import get_context
def get_round_from_name(fname: str):
parts = fname.split("_")
return parts[1]
def round_equals(fname, round_no):
round_in_name = get_round_from_name(fname)
return round_in_name == round_no
def max_round(files):
return max([get_round_from_name(file) for file in files])
def get_last_globalweight_file(job_no, base_path="./weights"):
search = os.path.join(base_path, f"{job_no}_*gw.bz2")
files = glob.glob(search)
file_names = [os.path.basename(file) for file in files]
ordered = sorted(file_names, key=get_round_from_name, reverse=True)
return next(iter(ordered), None)
def get_last_localweight_files(job_no, base_path="./weights"):
search = os.path.join(base_path, f"{job_no}_*lw.bz2")
files = glob.glob(search)
file_names = [os.path.basename(file) for file in files]
ordered = sorted(file_names, key=get_round_from_name, reverse=True)
first_item = next(iter(ordered), None)
if first_item is None:
return None
required_round = get_round_from_name(first_item)
round_check = functools.partial(round_equals, round_no=required_round)
return list(takewhile(round_check, ordered))
def get_last_partition_localweight_file(job_no, partition, base_path="./weights"):
search = os.path.join(base_path, f"{job_no}_*{partition}_lw.bz2")
files = glob.glob(search)
file_names = [os.path.basename(file) for file in files]
ordered = sorted(file_names, key=get_round_from_name, reverse=True)
first_item = next(iter(ordered), None)
if first_item is None:
return None
required_round = get_round_from_name(first_item)
round_check = functools.partial(round_equals, round_no=required_round)
return list(takewhile(round_check, ordered))
def load_weights_from_file(file_path):
with bz2.BZ2File(file_path, "r") as file:
npzfile = np.load(file)
return list([npzfile[name] for name in npzfile.files])
def get_null_weight_storage():
def null_func(*_):
return None
sn = SimpleNamespace()
sn.get_base_weights = null_func
sn.store_weights = null_func
sn.get_latest_weight_round = null_func
sn.ensure_default_weights = null_func
return sn
def get_disk_report_writer():
context = get_context()
base_path = context.get("report_dir", "./reports")
Path(base_path).mkdir(parents=True, exist_ok=True)
logger = logging.getLogger("ReporterDiskStorage")
base_store = get_disk_storage()
def store_report(metadata, report):
job_id = metadata.get("job_id", 0)
round_id = metadata.get("round_id", 0)
sender = metadata.get("nn", "unknown")
logger.info(
"Storing training report from %s, job, round %s %s",
sender,
job_id,
round_id,
)
logger.debug("Metadata %s", metadata)
logger.debug(report)
file_name = f"{job_id}_{round_id}_{sender}_report.json"
path = os.path.join(base_path, file_name)
try:
base_store.store_json(path, report)
except OSError:
logging.error("Error storing report %s", sys.exc_info())
return store_report
def get_default_weight_storage():
return get_disk_weight_storage()
def get_inmemory_weight_storage():
logger = logging.getLogger("MemoryWStore")
base_weights = None
weights = {}
rounds = set()
def store_weights(metadata, weights):
job_id = metadata.get("job_id", 0)
weight_type = metadata.get("wt", "lw")
round_id = metadata.get("round_id", 0)
rounds.add(round_id)
key = f"{job_id}-{round_id}-{weight_type}"
weight_store = weights.get(key, [])
weight_store.add(weights)
weights[key] = weight_store
def get_latest_weight_round(metadata):
job_id = metadata.get("job_id", 0)
round_id = max(rounds)
key = f"{job_id}-{round_id}-lw"
return weights.get(key, [])
def ensure_default_weights(_, get_weights):
logger.debug("Checking for base weights")
# job_id = metadata.get("job_id", 0)
if base_weights is None:
base_weights = get_weights()
sn = SimpleNamespace()
sn.get_base_weights = lambda: base_weights
sn.store_weights = store_weights
sn.get_latest_weight_round = get_latest_weight_round
sn.ensure_default_weights = ensure_default_weights
return sn
def get_disk_weight_storage(base_path="./weights"):
logger = logging.getLogger("DiskWStore")
Path(base_path).mkdir(parents=True, exist_ok=True)
def get_last_round():
context = get_context()
job_id = context.get("job_id", 0)
return get_last_global_round_number(job_id, base_path)
def store_weights(metadata, weights):
job_id = metadata.get("job_id", 0)
weight_type = metadata.get("wt", "lw")
round_id = metadata.get("round_id", -1)
partition_id = metadata.get("partition_id", 0)
logger.info(
"Storing weights %s %s %s %s", job_id, weight_type, round_id, partition_id
)
if round_id < 1:
last_round = int(get_last_global_round_number(job_id, base_path))
round_id = last_round + 1
if weight_type == "lw":
sender = metadata.get("sender", partition_id)
samples = metadata.get("samples", 1)
file_name = f"{job_id}_{round_id}_{samples}_{sender}_{weight_type}.bz2"
else:
file_name = f"{job_id}_{round_id}_{weight_type}.bz2"
print("Saving weights to file", file_name)
path = os.path.join(base_path, file_name)
with bz2.BZ2File(path, "w") as file:
np.savez(file, *weights)
def base_weights_exist(metadata):
job_id = metadata.get("job_id", 0)
dw_file_name = f"{job_id}_gwdf.bz2"
dw_file_path = os.path.join(base_path, dw_file_name)
exists = os.path.exists(dw_file_path)
if exists:
logger.debug("Base weights file %s exists", dw_file_name)
else:
logger.debug("Base weights file %s does not exist", dw_file_name)
return exists
def create_base_weights(metadata, get_weights):
job_id = metadata.get("job_id", 0)
dw_file_name = f"{job_id}_gwdf.bz2"
dw_file_path = os.path.join(base_path, dw_file_name)
if os.path.exists(dw_file_path):
logger.debug("Default weight file exists")
return
logger.debug("No default weights exist creating")
weights = get_weights()
with bz2.BZ2File(dw_file_path, "w") as file:
np.savez(file, *weights)
def ensure_default_weights(metadata, get_weights):
logger.debug("Checking for base weights")
if not base_weights_exist(metadata):
create_base_weights(metadata, get_weights)
def get_base_weights(metadata):
job_id = metadata.get("job_id", 0)
file_name = get_last_globalweight_file(job_id, base_path)
if file_name is not None:
logger.debug("Found base weights file %s", file_name)
print("Found base weights file", file_name)
file_path = os.path.join(base_path, file_name)
return load_weights_from_file(file_path)
dw_file_name = f"{job_id}_gwdf.bz2"
dw_file_path = os.path.join(base_path, dw_file_name)
if os.path.exists(dw_file_path):
logger.debug("Loading weight from default weight file")
print("Loading weights from file", dw_file_path)
return load_weights_from_file(dw_file_path)
logger.warning("Storage unable to find a base weight file for jobid %s", job_id)
return None
def get_latest_weight_round(metadata):
job_id = metadata.get("job_id", 0)
w_files = get_last_localweight_files(job_id, base_path)
if w_files is None:
return []
logger.debug(
"Found %s local weight files in weight round %s", len(w_files), w_files[0]
)
return [
load_weights_from_file(os.path.join(base_path, file)) for file in w_files
]
sn = SimpleNamespace()
sn.get_base_weights = get_base_weights
sn.store_weights = store_weights
sn.get_latest_weight_round = get_latest_weight_round
sn.ensure_default_weights = ensure_default_weights
sn.create_base_weights = create_base_weights
sn.base_weights_exist = base_weights_exist
sn.get_last_round_number = get_last_round
return sn
def get_last_round_number(job_id, partition, base_path="./weights"):
last_files = get_last_partition_localweight_file(job_id, partition, base_path)
if last_files is None:
return -1
return get_round_from_name(last_files[0])
def get_last_global_round_number(job_id, base_path="./weights"):
last_file = get_last_globalweight_file(job_id, base_path)
if last_file is None:
return -1
return get_round_from_name(last_file)
def get_disk_storage():
sn = SimpleNamespace()
def store_pickled(path, data):
with open(path, "wb") as file:
pickle.dump(data, file)
def store_pickled_compress(path, data):
with bz2.BZ2File(path, "w") as file:
cPickle.dump(data, file)
def load_pickled(path):
with open(path, "rb") as file:
data = pickle.load(file)
return data
def load_pickled_compress(path):
with bz2.BZ2File(path, "rb") as file:
data = cPickle.load(file)
return data
def load_data(path, compressed=True):
if compressed:
return load_pickled_compress(path)
return load_pickled(path)
def store_data(path, data, compressed=True):
if compressed:
store_pickled_compress(path, data)
else:
store_pickled(path, data)
def store_json(path, data):
with open(path, "w") as file:
json.dump(data, file)
sn.store_data = store_data
sn.load_data = load_data
sn.store_json = store_json
return sn
| 29.910663
| 88
| 0.661528
|
87301399bd0bfdb86e7f0d5debc2d8750c1a9b3a
| 5,951
|
py
|
Python
|
pymatgen/analysis/tests/test_molecule_structure_comparator.py
|
exenGT/pymatgen
|
a8ffb820ab8fc3f60251099e38c8888f45eae618
|
[
"MIT"
] | 1
|
2021-11-02T21:10:11.000Z
|
2021-11-02T21:10:11.000Z
|
pymatgen/analysis/tests/test_molecule_structure_comparator.py
|
exenGT/pymatgen
|
a8ffb820ab8fc3f60251099e38c8888f45eae618
|
[
"MIT"
] | 5
|
2018-08-07T23:00:23.000Z
|
2021-01-05T22:46:23.000Z
|
pymatgen/analysis/tests/test_molecule_structure_comparator.py
|
exenGT/pymatgen
|
a8ffb820ab8fc3f60251099e38c8888f45eae618
|
[
"MIT"
] | 6
|
2019-04-26T18:50:41.000Z
|
2020-03-29T17:58:34.000Z
|
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import os
import unittest
from unittest import TestCase
from pymatgen.analysis.molecule_structure_comparator import MoleculeStructureComparator
from pymatgen.core.structure import Molecule
from pymatgen.util.testing import PymatgenTest
__author__ = "xiaohuiqu"
test_dir = os.path.join(PymatgenTest.TEST_FILES_DIR, "molecules", "structural_change")
class TestMoleculeStructureComparator(TestCase):
def test_are_equal(self):
msc1 = MoleculeStructureComparator()
mol1 = Molecule.from_file(os.path.join(test_dir, "t1.xyz"))
mol2 = Molecule.from_file(os.path.join(test_dir, "t2.xyz"))
mol3 = Molecule.from_file(os.path.join(test_dir, "t3.xyz"))
self.assertFalse(msc1.are_equal(mol1, mol2))
self.assertTrue(msc1.are_equal(mol2, mol3))
thio1 = Molecule.from_file(os.path.join(test_dir, "thiophene1.xyz"))
thio2 = Molecule.from_file(os.path.join(test_dir, "thiophene2.xyz"))
# noinspection PyProtectedMember
msc2 = MoleculeStructureComparator(priority_bonds=msc1._get_bonds(thio1))
self.assertTrue(msc2.are_equal(thio1, thio2))
hal1 = Molecule.from_file(os.path.join(test_dir, "molecule_with_halogen_bonds_1.xyz"))
hal2 = Molecule.from_file(os.path.join(test_dir, "molecule_with_halogen_bonds_2.xyz"))
msc3 = MoleculeStructureComparator(priority_bonds=msc1._get_bonds(hal1))
self.assertTrue(msc3.are_equal(hal1, hal2))
def test_get_bonds(self):
mol1 = Molecule.from_file(os.path.join(test_dir, "t1.xyz"))
msc = MoleculeStructureComparator()
# noinspection PyProtectedMember
bonds = msc._get_bonds(mol1)
bonds_ref = [
(0, 1),
(0, 2),
(0, 3),
(0, 23),
(3, 4),
(3, 5),
(5, 6),
(5, 7),
(7, 8),
(7, 9),
(7, 21),
(9, 10),
(9, 11),
(9, 12),
(12, 13),
(12, 14),
(12, 15),
(15, 16),
(15, 17),
(15, 18),
(18, 19),
(18, 20),
(18, 21),
(21, 22),
(21, 23),
(23, 24),
(23, 25),
]
self.assertEqual(bonds, bonds_ref)
mol2 = Molecule.from_file(os.path.join(test_dir, "MgBH42.xyz"))
bonds = msc._get_bonds(mol2)
self.assertEqual(bonds, [(1, 3), (2, 3), (3, 4), (3, 5), (6, 8), (7, 8), (8, 9), (8, 10)])
msc = MoleculeStructureComparator(ignore_ionic_bond=False)
bonds = msc._get_bonds(mol2)
self.assertEqual(
bonds,
[
(0, 1),
(0, 2),
(0, 3),
(0, 5),
(0, 6),
(0, 7),
(0, 8),
(0, 9),
(1, 3),
(2, 3),
(3, 4),
(3, 5),
(6, 8),
(7, 8),
(8, 9),
(8, 10),
],
)
mol1 = Molecule.from_file(os.path.join(test_dir, "molecule_with_halogen_bonds_1.xyz"))
msc = MoleculeStructureComparator()
# noinspection PyProtectedMember
bonds = msc._get_bonds(mol1)
self.assertEqual(
bonds,
[
(0, 12),
(0, 13),
(0, 14),
(0, 15),
(1, 12),
(1, 16),
(1, 17),
(1, 18),
(2, 4),
(2, 11),
(2, 19),
(3, 5),
(3, 10),
(3, 20),
(4, 6),
(4, 10),
(5, 11),
(5, 12),
(6, 7),
(6, 8),
(6, 9),
],
)
def test_to_and_from_dict(self):
msc1 = MoleculeStructureComparator()
d1 = msc1.as_dict()
d2 = MoleculeStructureComparator.from_dict(d1).as_dict()
self.assertEqual(d1, d2)
thio1 = Molecule.from_file(os.path.join(test_dir, "thiophene1.xyz"))
# noinspection PyProtectedMember
msc2 = MoleculeStructureComparator(bond_length_cap=0.2, priority_bonds=msc1._get_bonds(thio1), priority_cap=0.5)
d1 = msc2.as_dict()
d2 = MoleculeStructureComparator.from_dict(d1).as_dict()
self.assertEqual(d1, d2)
# def test_structural_change_in_geom_opt(self):
# qcout_path = os.path.join(test_dir, "mol_1_3_bond.qcout")
# qcout = QcOutput(qcout_path)
# mol1 = qcout.data[0]["molecules"][0]
# mol2 = qcout.data[0]["molecules"][-1]
# priority_bonds = [[0, 1], [0, 2], [1, 3], [1, 4], [1, 7], [2, 5], [2, 6], [2, 8], [4, 6], [4, 10], [6, 9]]
# msc = MoleculeStructureComparator(priority_bonds=priority_bonds)
# self.assertTrue(msc.are_equal(mol1, mol2))
def test_get_13_bonds(self):
priority_bonds = [
[0, 1],
[0, 2],
[1, 3],
[1, 4],
[1, 7],
[2, 5],
[2, 6],
[2, 8],
[4, 6],
[4, 10],
[6, 9],
]
bonds_13 = MoleculeStructureComparator.get_13_bonds(priority_bonds)
ans = (
(0, 3),
(0, 4),
(0, 5),
(0, 6),
(0, 7),
(0, 8),
(1, 2),
(1, 6),
(1, 10),
(2, 4),
(2, 9),
(3, 4),
(3, 7),
(4, 7),
(4, 9),
(5, 6),
(5, 8),
(6, 8),
(6, 10),
)
self.assertEqual(bonds_13, tuple(ans))
if __name__ == "__main__":
unittest.main()
| 30.994792
| 120
| 0.467821
|
57ca95e332751eddb715ef30a66ea9ad6a988d72
| 537
|
py
|
Python
|
xero_python/payrollau/models/employee_status.py
|
sromero84/xero-python
|
89558c0baa8080c3f522701eb1b94f909248dbd7
|
[
"MIT"
] | null | null | null |
xero_python/payrollau/models/employee_status.py
|
sromero84/xero-python
|
89558c0baa8080c3f522701eb1b94f909248dbd7
|
[
"MIT"
] | null | null | null |
xero_python/payrollau/models/employee_status.py
|
sromero84/xero-python
|
89558c0baa8080c3f522701eb1b94f909248dbd7
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Xero Payroll AU
This is the Xero Payroll API for orgs in Australia region. # noqa: E501
OpenAPI spec version: 2.3.4
Contact: api@xero.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
from enum import Enum
class EmployeeStatus(Enum):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
allowed enum values
"""
ACTIVE = "ACTIVE"
TERMINATED = "TERMINATED"
| 17.9
| 76
| 0.664804
|
04e993d2e7da08711b0e232a10b448320c022051
| 33,627
|
py
|
Python
|
main.py
|
duongminhhieu/-Racing-Betting
|
dd58e2878a798df57ae10d719d3256a6636dcf33
|
[
"MIT"
] | null | null | null |
main.py
|
duongminhhieu/-Racing-Betting
|
dd58e2878a798df57ae10d719d3256a6636dcf33
|
[
"MIT"
] | null | null | null |
main.py
|
duongminhhieu/-Racing-Betting
|
dd58e2878a798df57ae10d719d3256a6636dcf33
|
[
"MIT"
] | null | null | null |
import pygame, sys, random, os
from DinoRun import *
from pygame.locals import *
import time
sys.path.insert(0, '../../')
WINDOWWIDTH = 1280
WINDOWHEIGHT = 720
# PHan backgroud menu
BG_MENU_IMG = pygame.image.load('img/backgroundMenu.png')
BG_PLAY_IMG = pygame.image.load("img/BackGroundPlay.png")
BG_MENU_SetAV = pygame.image.load("img/GiaoDienChonSetNV.png")
BG_Betting = pygame.image.load("img/GiaoDienBietting.png")
#BG_HELP_IMG = pygame.image.load('img/')
#BG_SHOP_IMG = pygame.image.load('img/')
b = [] # anh xe khi chien thang
a=[] # thu tu xe
# Hieu ung nut
NutPlay = pygame.image.load("img/NutPlay1.png")
NutHelp = pygame.image.load("img/NutHelp.png")
NutMiniGame = pygame.image.load("img/mini.png")
NutShop = pygame.image.load("img/shop.png")
#Am thanh
menu_sound = pygame.mixer.Sound("sound/Road Tripzzz - Ofshane (2).mp3")
menu_sound.set_volume(0.25)
minigame_sound = pygame.mixer.Sound("sound/Cuckoo Clock - Quincas Moreira.mp3")
minigame_sound.set_volume(0.25)
pygame.init()
FPS = 60
fpsClock = pygame.time.Clock()
DISPLAYSURF = pygame.display.set_mode((WINDOWWIDTH, WINDOWHEIGHT))
pygame.display.flip()
pygame.display.set_caption('RACING BETTING')
#======================================================================================
RED=(255,0,0)
GREEN=(0,255,0)
WINDOWWIDTH = 1500
WINDOWHEIGHT = 700
X_MARGIN = 80
LANEWIDTH = 60
CARWIDTH = 0
CARHEIGHT = 0
CARSPEED = 3
CARIMG10=pygame.image.load("img/dollar.png")
class Car1():
def __init__(self):
self.width = CARWIDTH
self.height = CARHEIGHT
self.x = 0
self.y = 312
self.speed = CARSPEED
self.surface = pygame.Surface((self.width, self.height))
self.surface.fill((255, 255, 255))
def draw(self):
DISPLAYSURF.blit(car_1, (int(self.x), int(self.y)))
global pos_now1
pos_now1 = self.x
if (pos_now1 > 1181):
a.append(1)
def update(self):
global pos_now1
pos_now1 = self.x
if self.x <= 1180:
if self.x +6>1180:
self.x += 8
global vt_1
vt_1 = 0
change = random.randint(1, 5)
if ((self.x > 100 * change) and (self.x < 100 * change + 5)):
self.x = 400
elif ((self.x > 105 * change) and (self.x < 105 * change + 5)):
pass
elif ((self.x > 125 * change) and (self.x < 125 * change + 100)):
self.x -= random.randint(0,3)
else:
self.x += random.randint(0,3)
else:
vt_1= 1
self.x = 1181
class Car2():
def __init__(self):
self.width = CARWIDTH
self.height = CARHEIGHT
self.x = 0
self.y = 388
self.speed = CARSPEED
self.surface = pygame.Surface((self.width, self.height))
self.surface.fill((255, 255, 255))
def draw(self):
DISPLAYSURF.blit(car_2, (int(self.x), int(self.y)))
global pos_now2
pos_now2 = self.x
if (pos_now2 > 1181):
a.append(2)
def update(self):
global pos_now2
pos_now2 = self.x
if self.x <= 1180:
if self.x +6>1180:
self.x += 8
global vt_2
vt_2 = 0
change = random.randint(1, 5)
if ((self.x > 100 * change) and (self.x < 100 * change + 5)):
self.x = 400
elif ((self.x > 105 * change) and (self.x < 105 * change + 5)):
pass
elif ((self.x > 125 * change) and (self.x < 125 * change + 100)):
self.x -= random.randint(0, 3)
else:
self.x += random.randint(0, 3)
else:
vt_2 = 2
self.x = 1181
vt2_x = self.x
class Car3():
def __init__(self):
self.width = CARWIDTH
self.height = CARHEIGHT
self.x = 0
self.y = 479
self.speed = CARSPEED
self.surface = pygame.Surface((self.width, self.height))
self.surface.fill((255, 255, 255))
def draw(self):
DISPLAYSURF.blit(car_3, (int(self.x), int(self.y)))
global pos_now3
pos_now3 = self.x
if (pos_now3 > 1181):
a.append(3)
def update(self):
global pos_now3
pos_now3 = self.x
if self.x <= 1180:
if self.x +6>1180:
self.x += 8
global vt_3
vt_3 = 0
change = random.randint(1, 5)
if ((self.x > 100 * change) and (self.x < 100 * change + 5)):
self.x = 400
elif ((self.x > 105 * change) and (self.x < 105 * change + 5)):
pass
elif ((self.x > 125 * change) and (self.x < 125 * change + 100)):
self.x -= random.randint(0, 3)
else:
self.x += random.randint(0, 3)
else:
vt_3 = 3
self.x = 1181
class Car4():
def __init__(self):
self.width = CARWIDTH
self.height = CARHEIGHT
self.x = 0
self.y = 564
self.speed = CARSPEED
self.surface = pygame.Surface((self.width, self.height))
self.surface.fill((255, 255, 255))
def draw(self):
DISPLAYSURF.blit(car_4, (int(self.x), int(self.y)))
global pos_now4
pos_now4 = self.x
if (pos_now4 > 1181):
a.append(4)
def update(self):
global pos_now4
pos_now4 = self.x
if self.x <= 1180:
if self.x +6>1180:
self.x += 8
global vt_4
vt_4 = 0
change = random.randint(1, 5)
if ((self.x > 100 * change) and (self.x < 100 * change + 5)):
self.x = 400
elif ((self.x > 105 * change) and (self.x < 105 * change + 5)):
pass
elif ((self.x > 125 * change) and (self.x < 125 * change + 100)):
self.x -= random.randint(0, 3)
else:
self.x += random.randint(0, 3)
else:
vt_4 = 4
self.x = 1181
class Car5():
def __init__(self):
self.width = CARWIDTH
self.height = CARHEIGHT
self.x = 0
self.y = 646
self.speed = CARSPEED
self.surface = pygame.Surface((self.width, self.height))
self.surface.fill((255, 255, 255))
def draw(self):
DISPLAYSURF.blit(car_5, (int(self.x), int(self.y)))
global pos_now5
pos_now5=self.x
if (pos_now5>1181):
a.append(5)
def update(self):
if self.x <= 1180:
if self.x +6>1180:
self.x += 8
global vt_5
vt_5 = 0
change = random.randint(1, 5)
if self.x==1280:
self.x+=2
if ((self.x > 100 * change) and (self.x < 100 * change + 5)):
self.x = 400
elif ((self.x > 105 * change) and (self.x < 105 * change + 5)):
pass
elif ((self.x > 125 * change) and (self.x < 125 * change + 100)):
self.x -= random.randint(0, 3)
else:
self.x += random.randint(0, 3)
else:
vt_5 = 5
self.x = 1181
def gamePlay(bg, car1, car2, car3, car4, car5):
tmp = 10
global coin, tienCuoc
car1.__init__()
car2.__init__()
car3.__init__()
car4.__init__()
car5.__init__()
bg.__init__()
bg.count_321()
running = True
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
bg.draw()
car1.draw()
car1.update()
car2.draw()
car2.update()
car3.draw()
car3.update()
car4.draw()
car4.update()
car5.draw()
car5.update()
print(chon_xe)
print(a)
if (vt_1==1 and vt_2==2 and vt_3== 3 and vt_4==4 and vt_5==5):
if (chon_xe[0]== a[0]):
over_bg = pygame.image.load("img\giaodienWWin.png")
DISPLAYSURF.blit(over_bg, (0, 0))
if ( tmp == 10):
coin[0] += int(tienCuoc[0]) * 10
tmp += 10
else:
over_bg = pygame.image.load("img\giaodienOver.png")
DISPLAYSURF.blit(over_bg, (0, 0))
if (tmp == 10 ):
coin[0] -= int(tienCuoc[0])
tmp += 10
file_2 = open(coin_username_info, 'w')
file_2.write(str(coin[0]))
file_2.close()
for i in range(5):
if i == 0:
DISPLAYSURF.blit(b[a[i] - 1], (551, 245))
if i == 1:
DISPLAYSURF.blit(b[a[i] - 1], (406, 340))
if i == 2:
DISPLAYSURF.blit(b[a[i] - 1], (690, 377))
if i == 3:
DISPLAYSURF.blit(b[a[i] - 1], (274, 426))
if i == 4:
DISPLAYSURF.blit(b[a[i] - 1], (836, 460))
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == K_ESCAPE:
a.clear()
b.clear()
menu_sound.stop()
MeNu()
pygame.display.update()
fpsClock.tick(FPS)
def start_the_game():
bg = Back_ground()
car1 = Car1()
car2 = Car2()
car3 = Car3()
car4 = Car4()
car5 = Car5()
gamePlay(bg, car1, car2, car3, car4, car5)
#######################################################
def drawCoin(): # vẽ tiền
draw_text(str(coin[0]) + "$", "font/monofonto.ttf", 38, (255, 255, 255), SCREEN_WIDTH - 70, 170, "topright")
def draw_Race(race_img): #hàm vẽ đường đua
DISPLAYSURF.blit(race_img, 0, 0)
#ve cac giao dien
def HamGiaoDienSetNV():
while True:
mouse_x, mouse_y = pygame.mouse.get_pos()
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
# Quay lại
if event.type == pygame.KEYDOWN:
if event.key == K_ESCAPE:
return
if event.type == pygame.MOUSEBUTTONDOWN:
# Quay lại
if (event.button == 1) & (mouse_x >= 1173) & (mouse_x <= 1259) & (mouse_y >= 32) & (mouse_y <= 112):
return
#chon nut tick thu nhat
if (event.button == 1) & (mouse_x >= 1032) & (mouse_x <= 1090) & (mouse_y >= 270) & (mouse_y <= 328):
HamGiaoDienBetting(1)
if (event.button == 1) & (mouse_x >= 1032) & (mouse_x <= 1090) & (mouse_y >= 340) & (mouse_y <= 398):
HamGiaoDienBetting(2)
if (event.button == 1) & (mouse_x >= 1032) & (mouse_x <= 1090) & (mouse_y >= 417) & (mouse_y <= 478):
HamGiaoDienBetting(3)
if (event.button == 1) & (mouse_x >= 1032) & (mouse_x <= 1090) & (mouse_y >= 487) & (mouse_y <= 549):
HamGiaoDienBetting(4)
if (event.button == 1) & (mouse_x >= 1032) & (mouse_x <= 1090) & (mouse_y >= 566) & (mouse_y <= 624):
HamGiaoDienBetting(5)
DISPLAYSURF.blit(BG_MENU_SetAV, (0, 0))
# Hieu ung nut
if (mouse_x >= 1173) & (mouse_x <= 1259) & (mouse_y >= 32) & (mouse_y <= 112):
DISPLAYSURF.blit(pygame.image.load("img/NutBack.png"), (0, 0))
if (mouse_x >= 1032) & (mouse_x <= 1090) & (mouse_y >= 270) & (mouse_y <= 328):
DISPLAYSURF.blit(pygame.image.load("img/NutTickSet1.png"), (0, 0))
if (mouse_x >= 1032) & (mouse_x <= 1090) & (mouse_y >= 340) & (mouse_y <= 398):
DISPLAYSURF.blit(pygame.image.load("img/NutTickSet2.png"), (0, 0))
if (mouse_x >= 1032) & (mouse_x <= 1090) & (mouse_y >= 417) & (mouse_y <= 478):
DISPLAYSURF.blit(pygame.image.load("img/NutTickSet3.png"), (0, 0))
if (mouse_x >= 1032) & (mouse_x <= 1090) & (mouse_y >= 487) & (mouse_y <= 549):
DISPLAYSURF.blit(pygame.image.load("img/NutTickSet4.png"), (0, 0))
if (mouse_x >= 1032) & (mouse_x <= 1090) & (mouse_y >= 566) & (mouse_y <= 624):
DISPLAYSURF.blit(pygame.image.load("img/NutTickSet5.png"), (0, 0))
pygame.display.update()
FONT = pygame.font.Font("font/monofonto.ttf", 32)
tienCuoc = [0]
class InputBox:
def __init__(self, x, y, w, h, text= '0' ):
global tienCuoc
self.rect = pygame.Rect(x, y, w, h)
self.color = pygame.Color((0, 0, 0))
self.text = text
self.txt_surface = FONT.render(text, True, self.color)
self.active = False
def handle_event(self, event):
if event.type == pygame.MOUSEBUTTONDOWN:
# If the user clicked on the input_box rect.
if self.rect.collidepoint(event.pos):
# Toggle the active variable.
self.active = not self.active
else:
self.active = False
# Change the current color of the input box.
self.color = pygame.Color((255, 255, 255)) if self.active else pygame.Color((0, 0, 0))
if event.type == pygame.KEYDOWN:
if self.active:
if event.key == pygame.K_BACKSPACE:
self.text = self.text[:-1]
else:
self.text += event.unicode
# Re-render the text.
self.txt_surface = FONT.render(self.text, True, self.color)
def update(self):
# Resize the box if the text is too long.
width = max(200, self.txt_surface.get_width()+10)
self.rect.w = width
def draw(self, screen):
# Blit the text.
DISPLAYSURF.blit(self.txt_surface, (self.rect.x+5, self.rect.y+5))
# Blit the rect.
pygame.draw.rect(DISPLAYSURF, self.color, self.rect, 2)
def HamGiaoDienBetting(set):
global chon_xe
chon_xe = [1]
global car_1, car_2, car_3, car_4, car_5, tienCuoc
if (set == 1):
car_1 = pygame.image.load("img/Set Xe/16.png")
car_2 = pygame.image.load("img/Set Xe/17.png")
car_3 = pygame.image.load("img/Set Xe/18.png")
car_4 = pygame.image.load("img/Set Xe/19.png")
car_5 = pygame.image.load("img/Set Xe/20.png")
elif (set == 2):
car_1 = pygame.image.load("img/Set Xe/15.png")
car_2 = pygame.image.load("img/Set Xe/13.png")
car_3 = pygame.image.load("img/Set Xe/11.png")
car_4 = pygame.image.load("img/Set Xe/12.png")
car_5 = pygame.image.load("img/Set Xe/14.png")
elif (set == 3):
car_1 = pygame.image.load("img/Set Xe/10.png")
car_2 = pygame.image.load("img/Set Xe/7.png")
car_3 = pygame.image.load("img/Set Xe/6.png")
car_4 = pygame.image.load("img/Set Xe/8.png")
car_5 = pygame.image.load("img/Set Xe/9.png")
elif (set == 4):
car_1 = pygame.image.load("img/Set Xe/5.png")
car_2 = pygame.image.load("img/Set Xe/3.png")
car_3 = pygame.image.load("img/Set Xe/1.png")
car_4 = pygame.image.load("img/Set Xe/2.png")
car_5 = pygame.image.load("img/Set Xe/4.png")
elif (set == 5):
car_1 = pygame.image.load("img/Set Xe/21.png")
car_2 = pygame.image.load("img/Set Xe/22.png")
car_3 = pygame.image.load("img/Set Xe/23.png")
car_4 = pygame.image.load("img/Set Xe/24.png")
car_5 = pygame.image.load("img/Set Xe/25.png")
b.append(car_1)
b.append(car_2)
b.append(car_3)
b.append(car_4)
b.append(car_5)
Nut1 = False
Nut2 = False
Nut3 = False
Nut4 = False
Nut5 = False
clock = pygame.time.Clock()
input_box = InputBox(680, 458, 140, 42) # Khai bao cai hop
while True:
mouse_x, mouse_y = pygame.mouse.get_pos()
DISPLAYSURF.blit(BG_Betting, (0, 0))
draw_text(" Enter your stake: ", "font/monofonto.ttf", 38, (255, 255, 255), 680, 453, "topright")
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
input_box.handle_event(event)
# Quay lại
if event.type == pygame.KEYDOWN:
if event.key == K_ESCAPE:
return
if event.type == pygame.MOUSEBUTTONDOWN:
global choose_1, choose_2, choose_3, choose_4, choose_5
# Quay lại
if (event.button == 1) & (mouse_x >= 1173) & (mouse_x <= 1259) & (mouse_y >= 32) & (mouse_y <= 112):
return
# Chon nut 1
if (event.button == 1) & (mouse_x >= 334) & (mouse_x <= 396) & (mouse_y >= 347) & (mouse_y <= 407):
Nut1 = True
Nut2 = False
Nut3 = False
Nut4 = False
Nut5 = False
chon_xe[0] = 1
# Chon nut 2
if (event.button == 1) & (mouse_x >= 471) & (mouse_x <= 532) & (mouse_y >= 347) & (mouse_y <= 407):
Nut1 = False
Nut2 = True
Nut3 = False
Nut4 = False
Nut5 = False
chon_xe[0] = 2
# chon nut 3
if (event.button == 1) & (mouse_x >= 606) & (mouse_x <= 668) & (mouse_y >= 347) & (mouse_y <= 407):
Nut1 = False
Nut2 = False
Nut3 = True
Nut4 = False
Nut5 = False
chon_xe[0] = 3
# Chon nut 4
if (event.button == 1) & (mouse_x >= 751) & (mouse_x <= 810) & (mouse_y >= 347) & (mouse_y <= 407):
Nut1 = False
Nut2 = False
Nut3 = False
Nut4 = True
Nut5 = False
chon_xe[0] = 4
if (event.button == 1) & (mouse_x >= 888) & (mouse_x <= 950) & (mouse_y >= 347) & (mouse_y <= 407):
Nut1 = False
Nut2 = False
Nut3 = False
Nut4 = False
Nut5 = True
chon_xe[0] = 5
if tienCuoc[0] == '':
print(tienCuoc[0])
elif (int(tienCuoc[0]) <= int(coin[0])) & (int(tienCuoc[0]) > 0) & (event.button == 1) & (mouse_x >= 570) & (mouse_x <= 754) & (mouse_y >= 540) & (mouse_y <= 607):
start_the_game()
# in set nhan vat ra
if set == 1:
DISPLAYSURF.blit(pygame.image.load("img/Set 1.png"), (0, 0))
if set == 2:
DISPLAYSURF.blit(pygame.image.load("img/Set 2.png"), (0, 0))
if set == 3:
DISPLAYSURF.blit(pygame.image.load("img/Set 3.png"), (0, 0))
if set == 4:
DISPLAYSURF.blit(pygame.image.load("img/Set 4.png"), (0, 0))
if set == 5:
DISPLAYSURF.blit(pygame.image.load("img/Set 5.png"), (0, 0))
input_box.update()
# Hieu ung chon
if Nut1 == True:
DISPLAYSURF.blit(pygame.image.load("img/NutTick1.png"), (0, 0))
elif Nut2 == True:
DISPLAYSURF.blit(pygame.image.load("img/NutTick2.png"), (0, 0))
elif Nut3 == True:
DISPLAYSURF.blit(pygame.image.load("img/NutTick3.png"), (0, 0))
elif Nut4 == True:
DISPLAYSURF.blit(pygame.image.load("img/NutTick4.png"), (0, 0))
elif Nut5 == True:
DISPLAYSURF.blit(pygame.image.load("img/NutTick5.png"), (0, 0))
# Hieu ung nut
if (mouse_x >= 1173) & (mouse_x <= 1259) & (mouse_y >= 32) & (mouse_y <= 112):
DISPLAYSURF.blit(pygame.image.load("img/NutBack.png"), (0, 0))
if (mouse_x >= 334) & (mouse_x <= 396) & (mouse_y >= 347) & (mouse_y <= 407):
DISPLAYSURF.blit(pygame.image.load("img/NutTick1.png"), (0, 0))
if (mouse_x >= 471) & (mouse_x <= 532) & (mouse_y >= 347) & (mouse_y <= 407):
DISPLAYSURF.blit(pygame.image.load("img/NutTick2.png"), (0, 0))
if (mouse_x >= 606) & (mouse_x <= 668) & (mouse_y >= 347) & (mouse_y <= 407):
DISPLAYSURF.blit(pygame.image.load("img/NutTick3.png"), (0, 0))
if (mouse_x >= 751) & (mouse_x <= 810) & (mouse_y >= 347) & (mouse_y <= 407):
DISPLAYSURF.blit(pygame.image.load("img/NutTick4.png"), (0, 0))
if (mouse_x >= 888) & (mouse_x <= 950) & (mouse_y >= 347) & (mouse_y <= 407):
DISPLAYSURF.blit(pygame.image.load("img/NutTick5.png"), (0, 0))
if (mouse_x >= 570) & (mouse_x <= 754) & (mouse_y >= 540) & (mouse_y <= 607):
DISPLAYSURF.blit(pygame.image.load("img/NutStart.png"), (0, 0))
input_box.draw(DISPLAYSURF)
tienCuoc[0] = input_box.text
drawCoin()
pygame.display.flip()
pygame.display.update()
clock.tick(30)
#############################################################################
class Back_ground():
def __init__(self):
self.x = 0
self.y = 0
self.img = map
#self.width = self.img.get_width()
#self.height = self.img.get_height()
def draw(self):
DISPLAYSURF.blit(self.img, (int(self.x), int(self.y)))
#DISPLAYSURF.blit(self.img, (int(self.x), int(self.y - self.height)))
def count_321(self):
count = 3
while count >= 0:
DISPLAYSURF.blit(self.img, (int(self.x), int(self.y)))
if count == 0:
message_display("GO!", 100, -70, (0, 255, 255), 1)
elif count == 3:
message_display(str(count), 100, -70, (255,0,0), 0.75)
elif count == 2:
message_display(str(count), 100, -70, (255, 255, 0), 0.75)
elif count == 1:
message_display(str(count), 100, -70, (0, 255, 0), 0.75)
count -= 1
fpsClock.tick(FPS)
def text_objects(text, font, color):
textSurface = font.render(text, True, color)
return textSurface, textSurface.get_rect()
def message_display(text, shift_x, shift_y, color, sleep_time):
largeText = pygame.font.SysFont('comicsansms', 72, True)
TextSurf, TextRect = text_objects(text, largeText, color)
TextRect.center = ((WINDOWWIDTH / 2 - shift_x), (WINDOWHEIGHT / 2 - shift_y))
DISPLAYSURF.blit(TextSurf, TextRect)
pygame.display.update()
time.sleep(sleep_time)
#############################################################################3
def HamGiaoDienHelp():
while True:
mouse_x, mouse_y = pygame.mouse.get_pos()
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.MOUSEBUTTONDOWN:
# Quay lại
if (event.button == 1) & (mouse_x >= 1173) & (mouse_x <= 1259) & (mouse_y >= 32) & (mouse_y <= 112):
return
DISPLAYSURF.blit(pygame.image.load("img/GiaodienHelp.png"), (0, 0))
if (mouse_x >= 1173) & (mouse_x <= 1259) & (mouse_y >= 32) & (mouse_y <= 112):
DISPLAYSURF.blit(pygame.image.load("img/NutBack.png"), (0, 0))
pygame.display.update()
def HamGiaoDienShop():
while True:
mouse_x, mouse_y = pygame.mouse.get_pos()
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.MOUSEBUTTONDOWN:
# Quay lại
if (event.button == 1) & (mouse_x >= 1173) & (mouse_x <= 1259) & (mouse_y >= 32) & (mouse_y <= 112):
return
DISPLAYSURF.blit(pygame.image.load("img/GiaoDienShop.png"), (0, 0))
if (mouse_x >= 1173) & (mouse_x <= 1259) & (mouse_y >= 32) & (mouse_y <= 112):
DISPLAYSURF.blit(pygame.image.load("img/NutBack.png"), (0, 0))
pygame.display.update()
def HamGiaoDienPlay():
global map,car1,car2,car3,car4,car5
while True:
mouse_x, mouse_y = pygame.mouse.get_pos()
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
# Quay lại
if event.type == pygame.KEYDOWN:
if event.key == K_ESCAPE:
return
if event.type == pygame.MOUSEBUTTONDOWN:
# Quay lại
if (event.button == 1) & (mouse_x >= 1173) & (mouse_x <= 1259) & (mouse_y >= 32) & (mouse_y <= 112):
return
# chon Map
if (event.button == 1) & (mouse_x >= 0) & (mouse_x <= 415) & (mouse_y >= 460) & (mouse_y <= 690):
map=pygame.image.load("img/Map1.png")
HamGiaoDienSetNV()
if (event.button == 1) & (mouse_x >= 0) & (mouse_x <= 415) & (mouse_y >= 158) & (mouse_y <= 392):
map=pygame.image.load("img/Map5.png")
HamGiaoDienSetNV()
if (event.button == 1) & (mouse_x >= 428) & (mouse_x <= 847) & (mouse_y >= 289) & (mouse_y <= 527):
map=pygame.image.load("img/Map3.png")
HamGiaoDienSetNV()
if (event.button == 1) & (mouse_x >= 858) & (mouse_x <= 1280) & (mouse_y >= 151) & (mouse_y <= 392):
map=pygame.image.load("img/Map4.png")
HamGiaoDienSetNV()
if (event.button == 1) & (mouse_x >= 858) & (mouse_x <= 1280) & (mouse_y >= 455) & (mouse_y <= 720):
map=pygame.image.load("img/Map2.png")
HamGiaoDienSetNV()
DISPLAYSURF.blit(BG_PLAY_IMG, (0, 0)) # Background sau khi ấn nút Play
# Bên dưới là hiệu ứng nút Play
if (mouse_x >= 1173) & (mouse_x <= 1259) & (mouse_y >= 32) & (mouse_y <= 112):
DISPLAYSURF.blit(pygame.image.load("img/NutBack.png"), (0, 0))
if (mouse_x >= 0) & (mouse_x <= 415) & (mouse_y >= 460) & (mouse_y <= 690):
DISPLAYSURF.blit(pygame.image.load("img/NutChonseMap1.png"), (0, 0))
if (mouse_x >= 0) & (mouse_x <= 415) & (mouse_y >= 158) & (mouse_y <= 392):
DISPLAYSURF.blit(pygame.image.load("img/NutChoseMap5.png"), (0, 0))
if (mouse_x >= 428) & (mouse_x <= 847) & (mouse_y >= 289) & (mouse_y <= 527):
DISPLAYSURF.blit(pygame.image.load("img/NutChoseMap3.png"), (0, 0))
if (mouse_x >= 858) & (mouse_x <= 1280) & (mouse_y >= 151) & (mouse_y <= 392):
DISPLAYSURF.blit(pygame.image.load("img/NutChoseMap4.png"), (0, 0))
if (mouse_x >= 858) & (mouse_x <= 1280) & (mouse_y >= 455) & (mouse_y <= 720):
DISPLAYSURF.blit(pygame.image.load("img/NutChoseMap2.png"), (0, 0))
pygame.display.update()
def MeNu():
menu_sound.play(-1) # Bât nhạc menu
global coin
while True:
mouse_x, mouse_y = pygame.mouse.get_pos()
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.MOUSEBUTTONDOWN:
if (event.button == 1) & (coin[0] > 0) & (mouse_x >= 506) & (mouse_x <= 817) & (mouse_y >= 210) &( mouse_y <= 294): #vào play
HamGiaoDienPlay()
if (event.button == 1) & (mouse_x >= 506) & (mouse_x <= 817) & (mouse_y >= 334) &( mouse_y <= 420):
HamGiaoDienShop()
if (event.button == 1) & (coin[0] == 0) & (mouse_x >= 506) & (mouse_x <= 817) & (mouse_y >= 454) &( mouse_y <= 537):
print("MiniGame")
menu_sound.stop()
minigame_sound.play()
start_game(coin)
file_2 = open(coin_username_info, 'w')
file_2.write(str(coin[0]))
file_2.close()
minigame_sound.stop()
menu_sound.play(-1)
if (event.button == 1) & (mouse_x >= 610) & (mouse_x <= 717) & (mouse_y >= 576) &( mouse_y <= 670):
HamGiaoDienHelp()
DISPLAYSURF.blit(BG_MENU_IMG, (0, 0))
drawCoin()
# Chỗ này làm hiệu ứn nút
if (mouse_x >= 506) & (mouse_x <= 817) & (mouse_y >= 210) & (mouse_y <= 294):
DISPLAYSURF.blit(NutPlay, (0, 0))
if (mouse_x >= 506) & (mouse_x <= 817) & (mouse_y >= 334) & (mouse_y <= 420):
DISPLAYSURF.blit(NutShop, (0, 0))
if (mouse_x >= 506) & (mouse_x <= 817) & (mouse_y >= 454) & (mouse_y <= 537):
DISPLAYSURF.blit(NutMiniGame, (0, 0))
if (mouse_x >= 610) & (mouse_x <= 717) & (mouse_y >= 576) & (mouse_y <= 670):
DISPLAYSURF.blit(NutHelp, (0, 0))
pygame.display.update()
def main():
MeNu()
#####################################
from tkinter import *
import os
def delete2():
screen3.destroy()
def delete3():
screen4.destroy()
def delete4():
screen5.destroy()
def login_sucess():
global screen3
screen3 = Toplevel(screen)
screen3.title("Success")
screen3.geometry("150x100")
Label(screen3, text="Login Sucess").pack()
Button(screen3, text="OK", command=delete2).pack()
def password_not_recognised():
global screen4
screen4 = Toplevel(screen)
screen4.title("Success")
screen4.geometry("150x100")
Label(screen4, text="Password Error").pack()
Button(screen4, text="OK", command=delete3).pack()
def user_not_found():
global screen5
screen5 = Toplevel(screen)
screen5.title("Success")
screen5.geometry("150x100")
Label(screen5, text="User Not Found").pack()
Button(screen5, text="OK", command=delete4).pack()
def register_user():
print("working")
global username_info
username_info = username.get()
password_info = password.get()
file = open(username_info, "w")
file.write(username_info + "\n")
file.write(password_info)
file.close()
global coin_username_info
coin_username_info =username_info+"_coin"
file_1 = open(coin_username_info, "w")
file_1.write(str(0))
username_entry.delete(0, END)
password_entry.delete(0, END)
Label(screen1, text="Registration Sucess", fg="green", font=("calibri", 11)).pack()
def login_verify():
username1 = username_verify.get()
password1 = password_verify.get()
username_entry1.delete(0, END)
password_entry1.delete(0, END)
list_of_files = os.listdir()
if username1 in list_of_files:
file1 = open(username1, "r")
verify = file1.read().splitlines()
global coin_username_info
coin_username_info=username1+"_coin"
if password1 in verify:
global coin
coin=[]
file_1=open(coin_username_info,'r')
n=file_1.read()
coin.append(int(n))
if __name__ == '__main__':
main()
else:
password_not_recognised()
else:
user_not_found()
def register():
global screen1
screen1 = Toplevel(screen)
screen1.title("Register")
screen1.geometry("300x250")
global username
global password
global username_entry
global password_entry
username = StringVar()
password = StringVar()
Label(screen1, text="Please enter details below").pack()
Label(screen1, text="").pack()
Label(screen1, text="Username * ").pack()
username_entry = Entry(screen1, textvariable=username)
username_entry.pack()
Label(screen1, text="Password * ").pack()
password_entry = Entry(screen1, textvariable=password)
password_entry.pack()
Label(screen1, text="").pack()
Button(screen1, text="Register", width=10, height=1, command=register_user).pack()
def login():
global screen2
screen2 = Toplevel(screen)
screen2.title("Login")
screen2.geometry("300x250")
Label(screen2, text="Please enter details below to login").pack()
Label(screen2, text="").pack()
global username_verify
global password_verify
username_verify = StringVar()
password_verify = StringVar()
global username_entry1
global password_entry1
Label(screen2, text="Username * ").pack()
username_entry1 = Entry(screen2, textvariable=username_verify)
username_entry1.pack()
Label(screen2, text="").pack()
Label(screen2, text="Password * ").pack()
password_entry1 = Entry(screen2, textvariable=password_verify)
password_entry1.pack()
Label(screen2, text="").pack()
Button(screen2, text="Login", width=10, height=1, command=login_verify).pack()
def main_screen():
global screen
screen = Tk()
screen.geometry("300x250")
screen.title("Notes 1.0")
Label(text="Notes 1.0", bg="grey", width="300", height="2", font=("Calibri", 13)).pack()
Label(text="").pack()
Button(text="Login", height="2", width="30", command=login).pack()
Label(text="").pack()
Button(text="Register", height="2", width="30", command=register).pack()
screen.mainloop()
main_screen()
'''
Nhóm 3
Game: Cá cược đua xe
GVHD: VÕ HOÀNG QUÂN
33
Danh sách sinh viên thực hiện :
Dương Minh Hiếu _ 20120473 (Dev, QA)
Trần Lê Hiếu _ 20120479 (Dev, Tester)
Nguyễn Tạ Huy Hoàng _ 20120482 (Dev, BA)
Ngô Phi Hùng _ 20120486 (Dev, PM)
Đỗ Đăng Huy _ 20120492 (Dev, Designer)
'''
| 35.621822
| 179
| 0.521278
|
423237a16d3612eb6f3d55360f2f4d7b41323de0
| 27,455
|
py
|
Python
|
rasa/core/policies/ensemble.py
|
yang198876/rasa
|
14db39f1facb324780553830d06986af8056dc26
|
[
"Apache-2.0"
] | 1
|
2021-01-04T23:27:04.000Z
|
2021-01-04T23:27:04.000Z
|
rasa/core/policies/ensemble.py
|
yang198876/rasa
|
14db39f1facb324780553830d06986af8056dc26
|
[
"Apache-2.0"
] | 60
|
2020-09-14T00:42:47.000Z
|
2022-03-01T13:45:13.000Z
|
rasa/core/policies/ensemble.py
|
yang198876/rasa
|
14db39f1facb324780553830d06986af8056dc26
|
[
"Apache-2.0"
] | 1
|
2020-11-21T01:26:43.000Z
|
2020-11-21T01:26:43.000Z
|
import importlib
import json
import logging
import os
import sys
from collections import defaultdict
from datetime import datetime
from pathlib import Path
from typing import Text, Optional, Any, List, Dict, Tuple, NamedTuple, Union
import rasa.core
import rasa.core.training.training
from rasa.shared.exceptions import RasaException
import rasa.shared.utils.common
import rasa.shared.utils.io
import rasa.utils.io
from rasa.constants import MINIMUM_COMPATIBLE_VERSION
from rasa.shared.constants import (
DOCS_URL_RULES,
DOCS_URL_POLICIES,
DOCS_URL_MIGRATION_GUIDE,
DEFAULT_CONFIG_PATH,
)
from rasa.shared.core.constants import (
USER_INTENT_BACK,
USER_INTENT_RESTART,
ACTION_LISTEN_NAME,
ACTION_RESTART_NAME,
ACTION_BACK_NAME,
)
from rasa.shared.core.domain import InvalidDomain, Domain
from rasa.shared.core.events import ActionExecutionRejected, ActionExecuted
from rasa.core.exceptions import UnsupportedDialogueModelError
from rasa.core.featurizers.tracker_featurizers import MaxHistoryTrackerFeaturizer
from rasa.shared.nlu.interpreter import NaturalLanguageInterpreter, RegexInterpreter
from rasa.core.policies.policy import Policy, SupportedData
from rasa.core.policies.fallback import FallbackPolicy
from rasa.core.policies.memoization import MemoizationPolicy, AugmentedMemoizationPolicy
from rasa.core.policies.rule_policy import RulePolicy
from rasa.shared.core.trackers import DialogueStateTracker
from rasa.shared.core.generator import TrackerWithCachedStates
from rasa.core import registry
logger = logging.getLogger(__name__)
class PolicyEnsemble:
versioned_packages = ["rasa", "tensorflow", "sklearn"]
def __init__(
self,
policies: List[Policy],
action_fingerprints: Optional[Dict[Any, Dict[Text, List]]] = None,
) -> None:
self.policies = policies
self.date_trained = None
self.action_fingerprints = action_fingerprints
self._check_priorities()
self._check_for_important_policies()
def _check_for_important_policies(self) -> None:
from rasa.core.policies.mapping_policy import MappingPolicy
if not any(
isinstance(policy, (MappingPolicy, RulePolicy)) for policy in self.policies
):
logger.info(
f"MappingPolicy not included in policy ensemble. Default intents "
f"'{USER_INTENT_RESTART} and {USER_INTENT_BACK} will not trigger "
f"actions '{ACTION_RESTART_NAME}' and '{ACTION_BACK_NAME}'."
)
@staticmethod
def check_domain_ensemble_compatibility(
ensemble: Optional["PolicyEnsemble"], domain: Optional[Domain]
) -> None:
"""Check for elements that only work with certain policy/domain combinations."""
from rasa.core.policies.mapping_policy import MappingPolicy
from rasa.core.policies.two_stage_fallback import TwoStageFallbackPolicy
policies_needing_validation = [
MappingPolicy,
TwoStageFallbackPolicy,
RulePolicy,
]
for policy in policies_needing_validation:
policy.validate_against_domain(ensemble, domain)
_check_policy_for_forms_available(domain, ensemble)
def _check_priorities(self) -> None:
"""Checks for duplicate policy priorities within PolicyEnsemble."""
priority_dict = defaultdict(list)
for p in self.policies:
priority_dict[p.priority].append(type(p).__name__)
for k, v in priority_dict.items():
if len(v) > 1:
rasa.shared.utils.io.raise_warning(
f"Found policies {v} with same priority {k} "
f"in PolicyEnsemble. When personalizing "
f"priorities, be sure to give all policies "
f"different priorities.",
docs=DOCS_URL_POLICIES,
)
def _policy_ensemble_contains_policy_with_rules_support(self) -> bool:
"""Determine whether the policy ensemble contains at least one policy
supporting rule-based data.
Returns:
Whether or not the policy ensemble contains at least one policy that
supports rule-based data.
"""
return any(
policy.supported_data()
in [SupportedData.RULE_DATA, SupportedData.ML_AND_RULE_DATA]
for policy in self.policies
)
@staticmethod
def _training_trackers_contain_rule_trackers(
training_trackers: List[DialogueStateTracker],
) -> bool:
"""Determine whether there are rule-based training trackers.
Args:
training_trackers: Trackers to inspect.
Returns:
Whether or not any of the supplied training trackers contain rule-based
data.
"""
return any(tracker.is_rule_tracker for tracker in training_trackers)
def _emit_rule_policy_warning(
self, training_trackers: List[DialogueStateTracker]
) -> None:
"""Emit `UserWarning`s about missing rule-based data."""
is_rules_consuming_policy_available = (
self._policy_ensemble_contains_policy_with_rules_support()
)
training_trackers_contain_rule_trackers = self._training_trackers_contain_rule_trackers(
training_trackers
)
if (
is_rules_consuming_policy_available
and not training_trackers_contain_rule_trackers
):
rasa.shared.utils.io.raise_warning(
f"Found a rule-based policy in your pipeline but "
f"no rule-based training data. Please add rule-based "
f"stories to your training data or "
f"remove the rule-based policy (`{RulePolicy.__name__}`) from your "
f"your pipeline.",
docs=DOCS_URL_RULES,
)
elif (
not is_rules_consuming_policy_available
and training_trackers_contain_rule_trackers
):
rasa.shared.utils.io.raise_warning(
f"Found rule-based training data but no policy supporting rule-based "
f"data. Please add `{RulePolicy.__name__}` or another rule-supporting "
f"policy to the `policies` section in `{DEFAULT_CONFIG_PATH}`.",
docs=DOCS_URL_RULES,
)
def train(
self,
training_trackers: List[TrackerWithCachedStates],
domain: Domain,
interpreter: NaturalLanguageInterpreter,
**kwargs: Any,
) -> None:
if training_trackers:
self._emit_rule_policy_warning(training_trackers)
for policy in self.policies:
trackers_to_train = SupportedData.trackers_for_policy(
policy, training_trackers
)
policy.train(
trackers_to_train, domain, interpreter=interpreter, **kwargs
)
self.action_fingerprints = rasa.core.training.training.create_action_fingerprints(
training_trackers, domain
)
else:
logger.info("Skipped training, because there are no training samples.")
self.date_trained = datetime.now().strftime("%Y%m%d-%H%M%S")
def probabilities_using_best_policy(
self,
tracker: DialogueStateTracker,
domain: Domain,
interpreter: NaturalLanguageInterpreter,
**kwargs: Any,
) -> Tuple[List[float], Optional[Text]]:
raise NotImplementedError
def _max_histories(self) -> List[Optional[int]]:
"""Return max history."""
max_histories = []
for p in self.policies:
if isinstance(p.featurizer, MaxHistoryTrackerFeaturizer):
max_histories.append(p.featurizer.max_history)
else:
max_histories.append(None)
return max_histories
def _add_package_version_info(self, metadata: Dict[Text, Any]) -> None:
"""Adds version info for self.versioned_packages to metadata."""
for package_name in self.versioned_packages:
try:
p = importlib.import_module(package_name)
v = p.__version__
metadata[package_name] = v
except ImportError:
pass
def _persist_metadata(self, path: Text) -> None:
"""Persists the domain specification to storage."""
# make sure the directory we persist exists
domain_spec_path = os.path.join(path, "metadata.json")
rasa.shared.utils.io.create_directory_for_file(domain_spec_path)
policy_names = [
rasa.shared.utils.common.module_path_from_instance(p) for p in self.policies
]
metadata = {
"action_fingerprints": self.action_fingerprints,
"python": ".".join([str(s) for s in sys.version_info[:3]]),
"max_histories": self._max_histories(),
"ensemble_name": self.__module__ + "." + self.__class__.__name__,
"policy_names": policy_names,
"trained_at": self.date_trained,
}
self._add_package_version_info(metadata)
rasa.shared.utils.io.dump_obj_as_json_to_file(domain_spec_path, metadata)
def persist(self, path: Union[Text, Path]) -> None:
"""Persists the policy to storage."""
self._persist_metadata(path)
for i, policy in enumerate(self.policies):
dir_name = "policy_{}_{}".format(i, type(policy).__name__)
policy_path = Path(path) / dir_name
policy.persist(policy_path)
@classmethod
def load_metadata(cls, path) -> Any:
metadata_path = os.path.join(path, "metadata.json")
metadata = json.loads(
rasa.shared.utils.io.read_file(os.path.abspath(metadata_path))
)
return metadata
@staticmethod
def ensure_model_compatibility(metadata, version_to_check=None) -> None:
from packaging import version
if version_to_check is None:
version_to_check = MINIMUM_COMPATIBLE_VERSION
model_version = metadata.get("rasa", "0.0.0")
if version.parse(model_version) < version.parse(version_to_check):
raise UnsupportedDialogueModelError(
"The model version is too old to be "
"loaded by this Rasa Core instance. "
"Either retrain the model, or run with "
"an older version. "
"Model version: {} Instance version: {} "
"Minimal compatible version: {}"
"".format(model_version, rasa.__version__, version_to_check),
model_version,
)
@classmethod
def _ensure_loaded_policy(cls, policy, policy_cls, policy_name: Text):
if policy is None:
raise Exception(f"Failed to load policy {policy_name}: load returned None")
elif not isinstance(policy, policy_cls):
raise Exception(
"Failed to load policy {}: "
"load returned object that is not instance of its own class"
"".format(policy_name)
)
@classmethod
def load(cls, path: Union[Text, Path]) -> "PolicyEnsemble":
"""Loads policy and domain specification from storage"""
metadata = cls.load_metadata(path)
cls.ensure_model_compatibility(metadata)
policies = []
for i, policy_name in enumerate(metadata["policy_names"]):
policy_cls = registry.policy_from_module_path(policy_name)
dir_name = f"policy_{i}_{policy_cls.__name__}"
policy_path = os.path.join(path, dir_name)
policy = policy_cls.load(policy_path)
cls._ensure_loaded_policy(policy, policy_cls, policy_name)
policies.append(policy)
ensemble_cls = rasa.shared.utils.common.class_from_module_path(
metadata["ensemble_name"]
)
fingerprints = metadata.get("action_fingerprints", {})
ensemble = ensemble_cls(policies, fingerprints)
return ensemble
@classmethod
def from_dict(cls, policy_configuration: Dict[Text, Any]) -> List[Policy]:
import copy
policies = policy_configuration.get("policies") or policy_configuration.get(
"policy"
)
if policies is None:
raise InvalidPolicyConfig(
"You didn't define any policies. "
"Please define them under 'policies:' "
"in your policy configuration file."
)
if len(policies) == 0:
raise InvalidPolicyConfig(
"The policy configuration file has to include at least one policy."
)
policies = copy.deepcopy(policies) # don't manipulate passed `Dict`
parsed_policies = []
for policy in policies:
if policy.get("featurizer"):
featurizer_func, featurizer_config = cls.get_featurizer_from_dict(
policy
)
if featurizer_config.get("state_featurizer"):
(
state_featurizer_func,
state_featurizer_config,
) = cls.get_state_featurizer_from_dict(featurizer_config)
# override featurizer's state_featurizer
# with real state_featurizer class
featurizer_config["state_featurizer"] = state_featurizer_func(
**state_featurizer_config
)
# override policy's featurizer with real featurizer class
policy["featurizer"] = featurizer_func(**featurizer_config)
policy_name = policy.pop("name")
try:
constr_func = registry.policy_from_module_path(policy_name)
try:
policy_object = constr_func(**policy)
except TypeError as e:
raise Exception(f"Could not initialize {policy_name}. {e}")
parsed_policies.append(policy_object)
except (ImportError, AttributeError):
raise InvalidPolicyConfig(
f"Module for policy '{policy_name}' could not "
f"be loaded. Please make sure the "
f"name is a valid policy."
)
cls._check_if_rule_policy_used_with_rule_like_policies(parsed_policies)
return parsed_policies
@classmethod
def get_featurizer_from_dict(cls, policy) -> Tuple[Any, Any]:
# policy can have only 1 featurizer
if len(policy["featurizer"]) > 1:
raise InvalidPolicyConfig(
f"Every policy can only have 1 featurizer "
f"but '{policy.get('name')}' "
f"uses {len(policy['featurizer'])} featurizers."
)
featurizer_config = policy["featurizer"][0]
featurizer_name = featurizer_config.pop("name")
featurizer_func = registry.featurizer_from_module_path(featurizer_name)
return featurizer_func, featurizer_config
@classmethod
def get_state_featurizer_from_dict(cls, featurizer_config) -> Tuple[Any, Any]:
# featurizer can have only 1 state featurizer
if len(featurizer_config["state_featurizer"]) > 1:
raise InvalidPolicyConfig(
f"Every featurizer can only have 1 state "
f"featurizer but one of the featurizers uses "
f"{len(featurizer_config['state_featurizer'])}."
)
state_featurizer_config = featurizer_config["state_featurizer"][0]
state_featurizer_name = state_featurizer_config.pop("name")
state_featurizer_func = registry.state_featurizer_from_module_path(
state_featurizer_name
)
return state_featurizer_func, state_featurizer_config
@staticmethod
def _check_if_rule_policy_used_with_rule_like_policies(
policies: List[Policy],
) -> None:
if not any(isinstance(policy, RulePolicy) for policy in policies):
return
from rasa.core.policies.mapping_policy import MappingPolicy
from rasa.core.policies.form_policy import FormPolicy
from rasa.core.policies.two_stage_fallback import TwoStageFallbackPolicy
policies_not_be_used_with_rule_policy = (
MappingPolicy,
FormPolicy,
FallbackPolicy,
TwoStageFallbackPolicy,
)
if any(
isinstance(policy, policies_not_be_used_with_rule_policy)
for policy in policies
):
rasa.shared.utils.io.raise_warning(
f"It is not recommended to use the '{RulePolicy.__name__}' with "
f"other policies which implement rule-like "
f"behavior. It is highly recommended to migrate all deprecated "
f"policies to use the '{RulePolicy.__name__}'. Note that the "
f"'{RulePolicy.__name__}' will supersede the predictions of the "
f"deprecated policies if the confidence levels of the predictions are "
f"equal.",
docs=DOCS_URL_MIGRATION_GUIDE,
)
class Prediction(NamedTuple):
"""Stores the probabilities and the priority of the prediction."""
probabilities: List[float]
priority: int
class SimplePolicyEnsemble(PolicyEnsemble):
@staticmethod
def is_not_memo_policy(
policy_name: Text, max_confidence: Optional[float] = None
) -> bool:
is_memo = policy_name.endswith("_" + MemoizationPolicy.__name__)
is_augmented = policy_name.endswith("_" + AugmentedMemoizationPolicy.__name__)
# also check if confidence is 0, than it cannot be count as prediction
return not (is_memo or is_augmented) or max_confidence == 0.0
@staticmethod
def _is_not_mapping_policy(
policy_name: Text, max_confidence: Optional[float] = None
) -> bool:
from rasa.core.policies.mapping_policy import MappingPolicy
is_mapping = policy_name.endswith("_" + MappingPolicy.__name__)
# also check if confidence is 0, than it cannot be count as prediction
return not is_mapping or max_confidence == 0.0
@staticmethod
def _is_form_policy(policy_name: Text) -> bool:
from rasa.core.policies.form_policy import FormPolicy
return policy_name.endswith("_" + FormPolicy.__name__)
def _pick_best_policy(
self, predictions: Dict[Text, Prediction]
) -> Tuple[List[float], Optional[Text]]:
"""Picks the best policy prediction based on probabilities and policy priority.
Args:
predictions: the dictionary containing policy name as keys
and predictions as values
Returns:
best_probabilities: the list of probabilities for the next actions
best_policy_name: the name of the picked policy
"""
best_confidence = (-1, -1)
best_policy_name = None
# form and mapping policies are special:
# form should be above fallback
# mapping should be below fallback
# mapping is above form if it wins over fallback
# therefore form predictions are stored separately
form_confidence = None
form_policy_name = None
for policy_name, prediction in predictions.items():
confidence = (max(prediction.probabilities), prediction.priority)
if self._is_form_policy(policy_name):
# store form prediction separately
form_confidence = confidence
form_policy_name = policy_name
elif confidence > best_confidence:
# pick the best policy
best_confidence = confidence
best_policy_name = policy_name
if form_confidence is not None and self._is_not_mapping_policy(
best_policy_name, best_confidence[0]
):
# if mapping didn't win, check form policy predictions
if form_confidence > best_confidence:
best_policy_name = form_policy_name
return predictions[best_policy_name].probabilities, best_policy_name
def _best_policy_prediction(
self,
tracker: DialogueStateTracker,
domain: Domain,
interpreter: NaturalLanguageInterpreter,
) -> Tuple[List[float], Optional[Text]]:
"""Finds the best policy prediction.
Args:
tracker: the :class:`rasa.core.trackers.DialogueStateTracker`
domain: the :class:`rasa.shared.core.domain.Domain`
interpreter: Interpreter which may be used by the policies to create
additional features.
Returns:
probabilities: the list of probabilities for the next actions
policy_name: the name of the picked policy
"""
# find rejected action before running the policies
# because some of them might add events
rejected_action_name = None
last_action_event = next(
(
event
for event in reversed(tracker.events)
if isinstance(event, (ActionExecutionRejected, ActionExecuted))
),
None,
)
if len(tracker.events) > 0 and isinstance(
last_action_event, ActionExecutionRejected
):
rejected_action_name = last_action_event.action_name
predictions = {
f"policy_{i}_{type(p).__name__}": self._get_prediction(
p, tracker, domain, interpreter
)
for i, p in enumerate(self.policies)
}
if rejected_action_name:
logger.debug(
f"Execution of '{rejected_action_name}' was rejected. "
f"Setting its confidence to 0.0 in all predictions."
)
for prediction in predictions.values():
prediction.probabilities[
domain.index_for_action(rejected_action_name)
] = 0.0
return self._pick_best_policy(predictions)
@staticmethod
def _get_prediction(
policy: Policy,
tracker: DialogueStateTracker,
domain: Domain,
interpreter: NaturalLanguageInterpreter,
) -> Prediction:
number_of_arguments_in_rasa_1_0 = 2
arguments = rasa.shared.utils.common.arguments_of(
policy.predict_action_probabilities
)
if (
len(arguments) > number_of_arguments_in_rasa_1_0
and "interpreter" in arguments
):
probabilities = policy.predict_action_probabilities(
tracker, domain, interpreter
)
else:
rasa.shared.utils.io.raise_warning(
"The function `predict_action_probabilities` of "
"the `Policy` interface was changed to support "
"additional parameters. Please make sure to "
"adapt your custom `Policy` implementation.",
category=DeprecationWarning,
)
probabilities = policy.predict_action_probabilities(
tracker, domain, RegexInterpreter()
)
return Prediction(probabilities, policy.priority)
def _fallback_after_listen(
self, domain: Domain, probabilities: List[float], policy_name: Text
) -> Tuple[List[float], Text]:
"""Triggers fallback if `action_listen` is predicted after a user utterance.
This is done on the condition that:
- a fallback policy is present,
- there was just a user message and the predicted
action is action_listen by a policy
other than the MemoizationPolicy
Args:
domain: the :class:`rasa.shared.core.domain.Domain`
probabilities: the list of probabilities for the next actions
policy_name: the name of the picked policy
Returns:
probabilities: the list of probabilities for the next actions
policy_name: the name of the picked policy
"""
fallback_idx_policy = [
(i, p) for i, p in enumerate(self.policies) if isinstance(p, FallbackPolicy)
]
if fallback_idx_policy:
fallback_idx, fallback_policy = fallback_idx_policy[0]
logger.debug(
f"Action 'action_listen' was predicted after "
f"a user message using {policy_name}. Predicting "
f"fallback action: {fallback_policy.fallback_action_name}"
)
probabilities = fallback_policy.fallback_scores(domain)
policy_name = f"policy_{fallback_idx}_{type(fallback_policy).__name__}"
return probabilities, policy_name
def probabilities_using_best_policy(
self,
tracker: DialogueStateTracker,
domain: Domain,
interpreter: NaturalLanguageInterpreter,
**kwargs: Any,
) -> Tuple[List[float], Optional[Text]]:
"""Predicts the next action the bot should take after seeing the tracker.
Picks the best policy prediction based on probabilities and policy priority.
Triggers fallback if `action_listen` is predicted after a user utterance.
Args:
tracker: the :class:`rasa.core.trackers.DialogueStateTracker`
domain: the :class:`rasa.shared.core.domain.Domain`
interpreter: Interpreter which may be used by the policies to create
additional features.
Returns:
best_probabilities: the list of probabilities for the next actions
best_policy_name: the name of the picked policy
"""
probabilities, policy_name = self._best_policy_prediction(
tracker, domain, interpreter
)
if (
tracker.latest_action_name == ACTION_LISTEN_NAME
and probabilities is not None
and probabilities.index(max(probabilities))
== domain.index_for_action(ACTION_LISTEN_NAME)
and self.is_not_memo_policy(policy_name, max(probabilities))
):
probabilities, policy_name = self._fallback_after_listen(
domain, probabilities, policy_name
)
logger.debug(f"Predicted next action using {policy_name}")
return probabilities, policy_name
def _check_policy_for_forms_available(
domain: Domain, ensemble: Optional["PolicyEnsemble"]
) -> None:
if not ensemble:
return
from rasa.core.policies.form_policy import FormPolicy
suited_policies_for_forms = (FormPolicy, RulePolicy)
has_policy_for_forms = ensemble is not None and any(
isinstance(policy, suited_policies_for_forms) for policy in ensemble.policies
)
if domain.form_names and not has_policy_for_forms:
raise InvalidDomain(
"You have defined a form action, but haven't added the "
"FormPolicy to your policy ensemble. Either remove all "
"forms from your domain or exclude the FormPolicy from your "
"policy configuration."
)
class InvalidPolicyConfig(RasaException):
"""Exception that can be raised when policy config is not valid."""
pass
| 37.868966
| 96
| 0.633691
|
8dbbc2dd665f1b60a391ff1fc62a57eff5fb2484
| 74,217
|
py
|
Python
|
robot/Cumulus/resources/NPSP.py
|
sungysang/NPSP
|
172c51f6c1508d0b6463bc8f45749d5013f4cd79
|
[
"BSD-3-Clause"
] | null | null | null |
robot/Cumulus/resources/NPSP.py
|
sungysang/NPSP
|
172c51f6c1508d0b6463bc8f45749d5013f4cd79
|
[
"BSD-3-Clause"
] | null | null | null |
robot/Cumulus/resources/NPSP.py
|
sungysang/NPSP
|
172c51f6c1508d0b6463bc8f45749d5013f4cd79
|
[
"BSD-3-Clause"
] | null | null | null |
import logging
import warnings
import time
import random
import string
from datetime import datetime
from datetime import timedelta
from robot.libraries.BuiltIn import RobotNotRunningError
from selenium.common.exceptions import ElementNotInteractableException
from selenium.common.exceptions import StaleElementReferenceException
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoSuchWindowException
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common.keys import Keys
from SeleniumLibrary.errors import ElementNotFound
from simple_salesforce import SalesforceMalformedRequest
from simple_salesforce import SalesforceResourceNotFound
from selenium.webdriver import ActionChains
from cumulusci.robotframework.utils import selenium_retry
from cumulusci.robotframework.utils import capture_screenshot_on_error
from email.mime import text
from cumulusci.tasks.apex.anon import AnonymousApexTask
from cumulusci.core.config import TaskConfig
from tasks.salesforce_robot_library_base import SalesforceRobotLibraryBase
from BaseObjects import BaseNPSPPage
from locators_48 import npsp_lex_locators as locators_48
from locators_47 import npsp_lex_locators as locators_47
locators_by_api_version = {
48.0: locators_48, # spring '20
47.0: locators_47, # winter '20
}
# will get populated in _init_locators
npsp_lex_locators = {}
@selenium_retry
class NPSP(BaseNPSPPage,SalesforceRobotLibraryBase):
ROBOT_LIBRARY_SCOPE = 'GLOBAL'
ROBOT_LIBRARY_VERSION = 1.0
def __init__(self, debug=False):
self.debug = debug
self.current_page = None
self._session_records = []
self.val=0
self.payment_list= []
# Turn off info logging of all http requests
logging.getLogger('requests.packages.urllib3.connectionpool').setLevel(logging.WARN)
self._init_locators()
def _init_locators(self):
try:
client = self.cumulusci.tooling
response = client._call_salesforce(
'GET', 'https://{}/services/data'.format(client.sf_instance))
self.latest_api_version = float(response.json()[-1]['version'])
if not self.latest_api_version in locators_by_api_version:
warnings.warn("Could not find locator library for API %d" % self.latest_api_version)
self.latest_api_version = max(locators_by_api_version.keys())
except RobotNotRunningError:
# We aren't part of a running test, likely because we are
# generating keyword documentation. If that's the case, assume
# the latest supported version
self.latest_api_version = max(locators_by_api_version.keys())
locators = locators_by_api_version[self.latest_api_version]
npsp_lex_locators.update(locators)
def get_namespace_prefix(self, name):
parts = name.split('__')
if parts[-1] == 'c':
parts = parts[:-1]
if len(parts) > 1:
return parts[0] + '__'
else:
return ''
def get_npsp_namespace_prefix(self):
if not hasattr(self.cumulusci, '_describe_result'):
self.cumulusci._describe_result = self.cumulusci.sf.describe()
objects = self.cumulusci._describe_result['sobjects']
level_object = [o for o in objects if o['label'] == 'Level'][0]
return self.get_namespace_prefix(level_object['name'])
def populate_campaign(self,loc,value):
"""This is a temporary keyword added to address difference in behaviour between summer19 and winter20 release"""
self.search_field_by_value(loc, value)
print(self.latest_api_version)
self.selenium.click_link(value)
def click_record_button(self, title):
""" Pass title of the button to click the buttons on the records edit page. Usually save and cancel are the buttons seen.
"""
locator = npsp_lex_locators['record']['button'].format(title)
self.selenium.set_focus_to_element(locator)
button = self.selenium.get_webelement(locator)
button.click()
time.sleep(5)
@capture_screenshot_on_error
def select_tab(self, title):
""" Switch between different tabs on a record page like Related, Details, News, Activity and Chatter
Pass title of the tab
"""
tab_found = False
locators = npsp_lex_locators["tabs"].values()
for i in locators:
locator = i.format(title)
if self.check_if_element_exists(locator):
print(locator)
buttons = self.selenium.get_webelements(locator)
for button in buttons:
print(button)
if button.is_displayed():
print("button displayed is {}".format(button))
self.salesforce._focus(button)
button.click()
time.sleep(5)
tab_found = True
break
assert tab_found, "tab not found"
def click_special_related_list_button(self, heading, button_title):
""" To Click on a related list button which would open up a new lightning page rather than a modal.
Pass the list name and button name"""
self.salesforce.load_related_list(heading)
b_found = False
locator = npsp_lex_locators["record"]["related"]["button"].format(
heading, button_title
)
buttons = self.selenium.driver.find_elements_by_xpath(locator)
for button in buttons:
if button.is_displayed():
self.selenium.driver.execute_script('arguments[0].click()', button)
b_found = True
break
assert b_found, "{} related list with button {} not found.".format(heading, button_title)
@capture_screenshot_on_error
def click_related_list_dd_button(self, heading, dd_title, button_title):
""" To Click on a related list dropdown button.
Pass the list name, dd name and button name"""
self.salesforce.load_related_list(heading)
locator = npsp_lex_locators["record"]["related"]["button"].format(heading, dd_title)
element = self.selenium.driver.find_element_by_xpath(locator)
self.selenium.driver.execute_script('arguments[0].click()', element)
time.sleep(1)
loc=npsp_lex_locators["record"]["related"]["dd-link"].format(button_title)
self.selenium.wait_until_element_is_visible(loc)
element = self.selenium.driver.find_element_by_xpath(loc)
self.selenium.driver.execute_script('arguments[0].click()', element)
@capture_screenshot_on_error
def click_flexipage_dropdown(self, title,value):
"""Click the lightning dropdown to open it and select value"""
locator = npsp_lex_locators['record']['flexipage-list'].format(title)
self.selenium.wait_until_page_contains_element(locator)
self.selenium.get_webelement(locator).click()
self.wait_for_locator('flexipage-popup')
option=npsp_lex_locators['span'].format(value)
self.selenium.click_element(option)
def open_date_picker(self, title):
locator = npsp_lex_locators['record']['list'].format(title)
self.selenium.set_focus_to_element(locator)
self.selenium.get_webelement(locator).click()
def pick_date(self, value):
"""To pick a date from the date picker"""
locator = npsp_lex_locators['record']['datepicker'].format(value)
self.selenium.set_focus_to_element(locator)
self.selenium.get_webelement(locator).click()
def change_month(self, value):
"""To pick month in the date picker"""
locator = npsp_lex_locators['record']['month_pick'].format(value)
self.selenium.set_focus_to_element(locator)
self.selenium.get_webelement(locator).click()
# def select_row(self,value):
# """To select a row on object page based on name and open the dropdown"""
# drop_down = npsp_lex_locators['locating_delete_dropdown'].format(value)
# self.selenium.get_webelement(drop_down).click()
# #self.selenium.get_webelement(drop_down).click()
# def select_row(self,value):
# """To select a row on object page based on name and open the dropdown"""
# locator = npsp_lex_locators['select_name'].format(value)
# self.selenium.set_focus_to_element(locator)
# drop_down = npsp_lex_locators['locating_delete_dropdown'].format(value)
# time.sleep(1)
# return drop_down
def select_row(self, value):
"""To select a row on object page based on name and open the dropdown"""
locators = npsp_lex_locators['name']
list_ele = self.selenium.get_webelements(locators)
for index, element in enumerate(list_ele):
if element.text == value:
drop_down = npsp_lex_locators['locate_dropdown'].format(index + 1)
self.selenium.get_webelement(drop_down).click()
self.selenium.wait_until_page_contains("Delete")
def select_related_row(self, value):
"""To select a row on object page based on name and open the dropdown"""
locators = npsp_lex_locators['related_name']
list_ele = self.selenium.get_webelements(locators)
for index, element in enumerate(list_ele):
if element.text == value:
drop_down = npsp_lex_locators['rel_loc_dd'].format(index + 1)
self.selenium.get_webelement(drop_down).click()
time.sleep(1)
# def select_row(self, value ):
# """To select a row on object page based on name and open the dropdown"""
# locators = npsp_lex_locators['name']
# list_ele = self.selenium.get_webelements(locators)
# index= 1
# for locator in list_ele:
# global index
# if locator.text != value:
# index = index+1
# else:
# drop_down = npsp_lex_locators['locate_dropdown'].format(index)
# self.selenium.get_webelement(drop_down).click()
# self.selenium.get_webelement(drop_down).click()
# def select_related_row(self, value ):
# """To select row from a related list based on name and open the dropdown"""
# locators = npsp_lex_locators['related_name']
# list_ele = self.selenium.get_webelements(locators)
# index= 1
# for locator in list_ele:
# global index
# if locator.text != value:
# index = index+1
# else:
# drop_down = npsp_lex_locators['rel_loc_dd'].format(index)
# self.selenium.get_webelement(drop_down).click()
# self.selenium.get_webelement(drop_down).click()
def click_id(self, title):
locator=npsp_lex_locators['aff_id'].format(title)
self.selenium.get_webelement(locator).click()
def select_object_dropdown(self):
locator=npsp_lex_locators['object_dd']
self.selenium.get_webelement(locator).click()
def check_status(self, acc_name):
aff_list = npsp_lex_locators['aff_status'].format(acc_name)
aff_list_text=self.selenium.get_webelement(aff_list).text
self.aff_id=npsp_lex_locators['aff_id'].format(acc_name)
self.aff_id_text=self.selenium.get_webelement(self.aff_id).text
return self.aff_id_text,aff_list_text
def get_id(self):
locator=npsp_lex_locators['click_aff_id'].format(self.aff_id_text)
self.selenium.get_webelement(locator).click()
# @capture_screenshot_on_error
# def navigate_to_and_validate_field_value(self, field,status,value,section=None):
# """If status is 'contains' then the specified value should be present in the field
# 'does not contain' then the specified value should not be present in the field
# """
# if section is not None:
# section="text:"+section
# self.selenium.scroll_element_into_view(section)
# list_found = False
# locators = npsp_lex_locators["confirm"].values()
# for i in locators:
# locator = i.format(field,value)
# if self.check_if_element_exists(locator):
# print(f"element exists {locator}")
# actual_value=self.selenium.get_webelement(locator).text
# print(f"actual value is {actual_value}")
# if status == "contains":
# assert value == actual_value, "Expected value to be {} but found {}".format(value, actual_value)
# elif status == "does not contain":
# assert value != actual_value, "Expected value {} and actual value {} should not match".format(value, actual_value)
# list_found = True
# break
#
# assert list_found, "locator not found"
@selenium_retry
@capture_screenshot_on_error
def navigate_to_and_validate_field_value(self, field,status,value,section=None):
"""If status is 'contains' then the specified value should be present in the field
'does not contain' then the specified value should not be present in the field
"""
if section is not None:
section="text:"+section
self.selenium.scroll_element_into_view(section)
list_found = False
locators = npsp_lex_locators["confirm"].values()
if status == "contains":
for i in locators:
print("inside for loop")
locator = i.format(field,value)
if self.check_if_element_exists(locator):
print(f"element exists {locator}")
actual_value=self.selenium.get_webelement(locator).text
print(f"actual value is {actual_value}")
assert value == actual_value, "Expected {} value to be {} but found {}".format(field,value, actual_value)
list_found=True
break
if status == "does not contain":
for i in locators:
locator = i.format(field,value)
if self.check_if_element_exists(locator):
print(f"locator is {locator}")
raise Exception(f"{field} should not contain value {value}")
list_found = True
assert list_found, "locator not found"
@capture_screenshot_on_error
def verify_record(self, name):
""" Checks for the record in the object page and returns true if found else returns false
"""
locator=npsp_lex_locators['account_list'].format(name)
self.selenium.wait_until_page_contains_element(locator, error="could not find "+name+" on the page")
def select_option(self, name):
"""selects various options in Contact>New opportunity page using name
"""
locator=npsp_lex_locators['dd_options'].format(name)
self.selenium.get_webelement(locator).click()
def verify_related_list_items(self,list_name,value):
"""Verifies a specified related list has specified value(doesn't work if the list is in table format)"""
self.salesforce.load_related_list(list_name)
locator=npsp_lex_locators['related_list_items'].format(list_name,value)
self.selenium.page_should_contain_element(locator)
def click_span_button(self,title):
"""clicks on the button under span tag"""
locator=npsp_lex_locators['span_button'].format(title)
self.selenium.get_webelement(locator).click()
def header_field_value(self,title,value):
"""Validates if the specified header field has specified value"""
locator= npsp_lex_locators['header_field_value'].format(title,value)
self.selenium.page_should_contain_element(locator)
def verify_header(self,value):
"""Validates header value"""
locator= npsp_lex_locators['header'].format(value)
self.selenium.page_should_contain_element(locator)
def verify_related_list(self,list_name,status,name):
"""If status is 'contains' then the specified related list should contain name
'does not contain' then the specified related list should not contain name"""
locator = self.salesforce.get_locator('record.related.link', list_name, name)
if status=="contains":
self.selenium.page_should_contain_element(locator)
elif status=="does not contain":
self.selenium.page_should_not_contain_element(locator)
def fill_address_form(self, **kwargs):
"""Validates if the affiliated contacts have the added contact details enter Y for positive case and N for negative case"""
for label, value in kwargs.items():
locator= npsp_lex_locators['manage_hh_page']['address'].format(label,value)
if label=="Street":
locator = locator+"textarea"
self.selenium.get_webelement(locator).send_keys(value)
else:
locator = locator+"input"
self.selenium.get_webelement(locator).send_keys(value)
def fill_bge_form(self, **kwargs):
for label, value in kwargs.items():
if label=="Batch Description" or label == "custom_textarea":
locator= npsp_lex_locators['bge']['field-text'].format(label,value)
self.selenium.click_element(locator)
self.salesforce._populate_field(locator, value)
else:
locator= npsp_lex_locators['bge']['field-input'].format(label,value)
self.selenium.click_element(locator)
self.salesforce._populate_field(locator, value)
def verify_address_details(self,field,value,**kwargs):
"""Validates if the details page address field has specified value
Field is the The address type field we are trying to match to the Expected address Map that is sent through Kwargs"""
locator= npsp_lex_locators['detail_page']['address'].format(field)
street, city, country = self.selenium.get_webelements(locator)
status = None
for key, value in kwargs.items():
if street.text == kwargs.get("street") and city.text == kwargs.get("city") and country.text == kwargs.get("country"):
status = "pass"
else:
status = "fail"
if value.lower() == "contains":
assert status == "pass", "Expected address {} , {}, {} does not match".format(street.text,city.text,country.text)
def validate_checkboxes(self,name,checkbox_title):
"""validates all 3 checkboxes for contact on manage hh page and returns locator for the checkbox thats required"""
locator=npsp_lex_locators['manage_hh_page']['mhh_checkbox'].format(name,"fauxCBInformal")
self.selenium.page_should_contain_element(locator)
locator=npsp_lex_locators['manage_hh_page']['mhh_checkbox'].format(name,"fauxCBFormal")
self.selenium.page_should_contain_element(locator)
locator=npsp_lex_locators['manage_hh_page']['mhh_checkbox'].format(name,"fauxCBExName")
self.selenium.page_should_contain_element(locator)
if checkbox_title == "Informal Greeting":
locator=npsp_lex_locators['manage_hh_page']['mhh_checkbox'].format(name,"fauxCBInformal")
elif checkbox_title == "Formal Greeting":
locator=npsp_lex_locators['manage_hh_page']['mhh_checkbox'].format(name,"fauxCBFormal")
elif checkbox_title.capitalize() == "Household Name":
locator=npsp_lex_locators['manage_hh_page']['mhh_checkbox'].format(name,"fauxCBExName")
return locator
def check_field_value(self, title, value):
"""checks value of a field in details page(section without header)"""
fv_found=False
locators = npsp_lex_locators['detail_page']["field-value"].values()
for i in locators:
locator = i.format(title,value)
if self.check_if_element_exists(locator):
self.selenium.page_should_contain_element(locator)
fv_found = True
break
assert fv_found, "{} with {} not found".format(title,value)
def click_managehh_button(self,title):
"""clicks on the new contact button on manage hh page"""
locator=npsp_lex_locators['manage_hh_page']['button'].format(title)
self.selenium.get_webelement(locator).click()
def click_managehh_link(self,title):
locator=npsp_lex_locators['manage_hh_page']['address_link'].format(title)
self.selenium.get_webelement(locator).click()
def set_checkbutton_to(self,title,status):
"""If status is 'checked' then checks the box if its not already checked. Prints a warning msg if already checked
'unchecked' then unchecks the box if its not already checked. Prints a warning msg if already unchecked
"""
cb_found=False
locators = npsp_lex_locators["checkbox"].values()
for i in locators:
locator = i.format(title)
if self.check_if_element_exists(locator):
checkbox=self.selenium.get_webelement(locator)
if (status == 'checked' and checkbox.is_selected() == False) or (status == 'unchecked' and checkbox.is_selected() == True):
self.selenium.click_element(checkbox)
else:
self.builtin.log("This checkbox is already in the expected status", "WARN")
cb_found = True
break
assert cb_found, "Checkbox not found"
def select_bge_checkbox(self,title):
locator=npsp_lex_locators['bge']['checkbox'].format(title)
self.selenium.get_webelement(locator).click()
def populate_modal_field(self, title, value):
locator=npsp_lex_locators['modal_field'].format(title,value)
self.salesforce._populate_field(locator, value)
def populate_field_with_id(self,id,value):
locator=npsp_lex_locators['id'].format(id)
if value == 'null':
field = self.selenium.get_webelement(locator)
self.salesforce._clear(field)
else :
self.salesforce._populate_field(locator, value)
@capture_screenshot_on_error
def validate_related_record_count(self,title,value):
self.select_tab("Related")
self.salesforce.load_related_list(title)
locator=npsp_lex_locators['record']['related']['check_occurrence'].format(title,value)
actual_value=self.selenium.get_webelement(locator).text
exp_value="("+value+")"
assert exp_value == actual_value, "Expected value to be {} but found {}".format(
exp_value, actual_value
)
def verify_occurence(self,title,value):
self.salesforce.load_related_list(title)
time.sleep(1)
locator=npsp_lex_locators['record']['related']['check_occurrence'].format(title,value)
actual_value=self.selenium.get_webelement(locator).text
exp_value="("+value+")"
assert exp_value == actual_value, "Expected value to be {} but found {}".format(
exp_value, actual_value
)
def check_record_related_item(self,title,value):
"""Verifies that the given value is displayed under the related list identified by title on a record view page"""
self.salesforce.load_related_list(title)
locator=npsp_lex_locators['record']['related']['item'].format(title,value)
self.selenium.wait_until_page_contains_element(locator)
actual_value=self.selenium.get_webelement(locator).text
assert value == actual_value, "Expected value to be {} but found {}".format(
value, actual_value
)
def select_related_dropdown(self,title):
"""Clicks on the dropdown next to Related List"""
locator=npsp_lex_locators['record']['related']['drop-down'].format(title)
self.selenium.get_webelement(locator).click()
def get_header_date_value(self,title):
"""Validates if the specified header field has specified value"""
locator= npsp_lex_locators['header_datepicker'].format(title)
date=self.selenium.get_webelement(locator).text
return date
def get_main_header(self):
header_found = False
locators = npsp_lex_locators["main-header"].values()
for locator in locators:
if self.check_if_element_exists(locator):
header = self.selenium.get_webelement(locator).text
header_found = True
return header
assert header_found, "Header with the provided locator not found"
def verify_contact_role(self,name,role):
"""verifies the contact role on opportunity page"""
locator=npsp_lex_locators['opportunity']['contact_role'].format(name,role)
self.selenium.page_should_contain_element(locator)
def select_relatedlist(self,title):
"""click on the related list to open it"""
locator=npsp_lex_locators['record']['related']['title'].format(title)
element = self.selenium.driver.find_element_by_xpath(locator)
self.selenium.driver.execute_script('arguments[0].click()', element)
def verify_related_list_field_values(self, listname=None, **kwargs):
"""verifies the values in the related list objects page"""
if listname is not None:
self.selenium.wait_until_page_contains(listname)
self.select_relatedlist(listname)
for name, value in kwargs.items():
locator= npsp_lex_locators['record']['related']['field_value'].format(name,value)
self.selenium.wait_until_page_contains_element(locator,error="Could not find the "+ name +" with value " + value + " on the page")
def verify_related_object_field_values(self, rel_object,**kwargs):
"""verifies the specified field,value pairs in the related object page (table format)"""
self.salesforce.load_related_list(rel_object)
self.select_relatedlist(rel_object)
for name, value in kwargs.items():
locator= npsp_lex_locators['object']['field-value'].format(name,value)
self.selenium.wait_until_page_contains_element(locator,error="Could not find the "+ name +" with value " + value + " on the page")
def page_contains_record(self,title):
"""Validates if the specified record is present on the page"""
locator= npsp_lex_locators['object']['record'].format(title)
self.selenium.wait_until_page_does_not_contain_element(locator)
def click_special_object_button(self, title):
"""Clicks a button in an object's actions but doesn't wait for a model to open"""
locator = npsp_lex_locators['object']['button'].format(title)
self.selenium.wait_until_element_is_visible(locator,error="Button "+ title +" not found on the page")
self.selenium.get_webelement(locator).click()
def check_related_list_values(self,list_name,*args):
"""Verifies the value of custom related list"""
self.salesforce.load_related_list(list_name)
for value in args:
locator = npsp_lex_locators['check_related_list_item'].format(list_name,value)
self.selenium.page_should_contain_element(locator)
def check_activity_tasks(self, *args):
"""verifies that the specified tasks are present under activity tab """
for value in args:
locator = npsp_lex_locators['engagement_plan']['tasks'].format(value)
self.selenium.page_should_contain_element(locator)
def select_app_launcher_link(self,title):
locator = npsp_lex_locators['app_launcher']['select-option'].format(title)
self.selenium.get_webelement(locator).click()
time.sleep(1)
def click_on_first_record(self):
"""selects first record of the page"""
locator = npsp_lex_locators['select_one_record']
self.selenium.get_webelement(locator).click()
time.sleep(1)
def select_search(self, index, value):
""""""
locator = npsp_lex_locators["click_search"].format(index)
loc_value = self.selenium.get_webelement(locator).send_keys(value)
loc = self.selenium.get_webelement(locator)
#loc.send_keys(Keys.TAB+ Keys.RETURN)
time.sleep(1)
def enter_gau(self, value):
id = "lksrch"
locator = npsp_lex_locators["id"].format(id)
loc = self.selenium.get_webelement(locator)
loc.send_keys(value)
self.selenium.get_webelement("//*[@title='Go!']").click()
time.sleep(1)
def click_save(self, page):
if page== "GAU":
id="j_id0:theForm:j_id9:j_id10:saveBTN"
locator = npsp_lex_locators["id"].format(id)
self.selenium.get_webelement(locator).click()
def enter_payment_schedule(self, *args):
"""Enter values into corresponding fields in Levels page"""
#if name == "Payments":
#id = ["paymentCount","intervals","intervalunits"]
id = ["paymentCount","vfForm:intervalnumber","intervalunits"]
for i in range(len(args)):
locator = npsp_lex_locators['id'].format(id[i])
loc = self.selenium.get_webelement(locator)
self.selenium.set_focus_to_element(locator)
self.selenium.select_from_list_by_label(loc,args[i])
time.sleep(2)
def verify_payment_split(self, amount, no_payments):
#loc = "//input[@value= '{}']"
input_loc = npsp_lex_locators['button']
values = int(amount)/int(no_payments)
values_1 = "{:0.2f}".format(values)
self.val = str(values_1)
input_field = input_loc.format(self.val)
list_payments = self.selenium.get_webelements(input_field)
self.t_loc=len(list_payments)
if self.t_loc == int(no_payments):
for i in list_payments:
self.selenium.page_should_contain_element(i)
actual_payments = str(self.t_loc)
else:
actual_payments = str(self.t_loc)
assert no_payments == actual_payments, "Expected {} number of payment but found {}".format(no_payments,actual_payments)
def verify_date_split(self,date, no_payments, interval):
ddate=[]
mm, dd, yyyy = date.split("/")
mm, dd, yyyy = int(mm), int(dd), int(yyyy)
locator = npsp_lex_locators['payments']['date_loc'].format(date)
t_dates = self.selenium.get_webelement(locator)
self.selenium.page_should_contain_element(t_dates)
# for i in range(int(no_payments) + 1):
if mm <= 12:
date_list = [mm, dd, yyyy]
dates = list(map(str, date_list))
new_date = "/".join(dates)
mm = mm + int(interval)
dates = list(map(str, date_list))
#if new_date not in t_dates:
date_locator = npsp_lex_locators['payments']['date_loc'].format(new_date)
t_dates = self.selenium.get_webelement(date_locator)
self.selenium.page_should_contain_element(t_dates)
elif mm > 12:
yyyy = yyyy + 1
mm = (mm + int(interval))-(12+int(interval))
#return "pass"
# else:
# return "fail"
def click_viewall_related_list (self,title):
"""clicks on the View All link under the Related List"""
locator=npsp_lex_locators['record']['related']['viewall'].format(title)
element = self.selenium.driver.find_element_by_xpath(locator)
self.selenium.driver.execute_script('arguments[0].click()', element)
def click_button_with_value (self,title):
"""clicks on the button on the payments page"""
locator=npsp_lex_locators['button'].format(title)
self.selenium.get_webelement(locator).click()
def verify_details(self, **kwargs):
"""To verify no. of records with given same column values
key is value in a table column, value is expected count of rows with that value
"""
for key, value in kwargs.items():
locators = npsp_lex_locators['payments']['pays'].format(key)
list_ele = self.selenium.get_webelements(locators)
p_count=len(list_ele)
assert p_count == int(value), "Expected {} payment with status {} but found {}".format(value, key, p_count)
def verify_allocations(self,header, **kwargs):
"""To verify allocations, header is related list
key is value in 1st td element, value is value in 2nd element
"""
self.salesforce.load_related_list(header)
for key, value in kwargs.items():
locator = npsp_lex_locators['record']['related']['allocations'].format(header,key,value)
self.selenium.wait_until_page_contains_element(locator,error="Expected {} allocation of {} was not found".format(key,value))
# ele = self.selenium.get_webelement(locator).text
# assert ele == value, "Expected {} allocation to be {} but found {}".format(key,value,ele)
def verify_occurrence_payments(self,title,value=None):
""""""
locator=npsp_lex_locators['payments']['check_occurrence'].format(title)
occ_value=self.selenium.get_webelement(locator).text
return occ_value
def verify_payment(self):
locators=npsp_lex_locators['payments']['no_payments']
list_ele=self.selenium.get_webelements(locators)
l_no_payments = len(list_ele)
for element in list_ele:
payment_com=self.selenium.get_webelement(element).text
cc=payment_com.replace("$","")
if cc == str(self.val) and self.t_loc == l_no_payments :
result = 'pass'
else:
result = "fail"
assert result == 'pass', "Expected payment value not present."
def select_value_from_bge_dd(self, list_name,value):
list_found = False
locators = npsp_lex_locators["bge-lists"].values()
for i in locators:
locator = i.format(list_name)
if self.check_if_element_exists(locator):
loc=self.selenium.get_webelement(locator)
self.selenium.set_focus_to_element(locator)
self.selenium.select_from_list_by_label(loc,value)
list_found = True
break
assert list_found, "Dropdown with the provided locator not found"
def check_if_element_exists(self, xpath):
elements =self.selenium.get_element_count(xpath)
return True if elements > 0 else False
def select_multiple_values_from_list(self,list_name,*args):
"""Pass the list name and values to be selected from the dropdown. Please note that this doesn't unselect the existing values"""
locator = npsp_lex_locators['npsp_settings']['multi_list'].format(list_name)
loc = self.selenium.get_webelement(locator)
self.selenium.set_focus_to_element(locator)
self.selenium.select_from_list_by_label(loc,*args)
def choose_frame(self, value):
"""Returns the first displayed iframe on the page with the given name or title"""
locator = npsp_lex_locators['frame_new'].format(value,value)
frames = self.selenium.get_webelements(locator)
self.selenium.capture_page_screenshot()
print(f'list of frames {frames}')
for frame in frames:
print(f'inside for loop for {frame}')
self.selenium.capture_page_screenshot()
if frame.is_displayed():
try:
print("inside try")
self.selenium.select_frame(frame)
except NoSuchWindowException:
print("inside except")
self.builtin.log("caught NoSuchWindowException;trying gain..","WARN")
time.sleep(.5)
self.selenium.select_frame(frame)
return frame
raise Exception('unable to find visible iframe with title "{}"'.format(value))
@capture_screenshot_on_error
def select_frame_and_click_element(self,iframe,path, *args, **kwargs):
"""Waits for the iframe and Selects the first displayed frame with given name or title and scrolls to element identified by locator and clicks """
self.wait_for_locator('frame_new',iframe,iframe)
self.choose_frame(iframe)
loc = self.get_npsp_locator(path, *args, **kwargs)
self.selenium.wait_until_element_is_visible(loc, timeout=60)
self.selenium.scroll_element_into_view(loc)
self.selenium.click_element(loc)
def get_npsp_locator(self, path, *args, **kwargs):
""" Returns a rendered locator string from the npsp_lex_locators
dictionary. This can be useful if you want to use an element in
a different way than the built in keywords allow.
"""
locator = npsp_lex_locators
for key in path.split('.'):
locator = locator[key]
main_loc = locator.format(*args, **kwargs)
return main_loc
def wait_for_locator(self, path, *args, **kwargs):
"""Waits for 60 sec for the specified locator"""
main_loc = self.get_npsp_locator(path,*args, **kwargs)
self.selenium.wait_until_element_is_visible(main_loc, timeout=60)
def wait_for_locator_is_not_visible(self, path, *args, **kwargs):
"""Waits for 60 sec for the specified locator"""
main_loc = self.get_npsp_locator(path,*args, **kwargs)
self.selenium.wait_until_element_is_not_visible(main_loc, timeout=60)
@capture_screenshot_on_error
def wait_for_batch_to_complete(self, path, *args, **kwargs):
"""Checks every 15 secs for upto 3.5mins for batch with given status
"""
i = 0
locator = self.get_npsp_locator(path,*args, **kwargs)
while True:
i += 1
if i > 14:
self.selenium.capture_page_screenshot()
raise AssertionError(
"Timed out waiting for batch with locator {} to load.".format(locator)
)
else:
try:
self.selenium.wait_until_element_is_visible(locator)
break
except Exception:
time.sleep(15)
@capture_screenshot_on_error
def wait_for_batch_to_process(self, batch,status):
"""Checks every 30 secs for upto 9mins for batch with given status
"""
i = 0
sec=0
expected = npsp_lex_locators['batch_status'].format(batch,status)
error = npsp_lex_locators['batch_status'].format(batch,"Errors")
while True:
i += 1
if i > 18:
self.selenium.capture_page_screenshot()
raise AssertionError("Timed out waiting for batch {} with status {} to load.".format(batch,status))
elif self.check_if_element_exists(error):
if status != "Errors":
raise AssertionError("Batch {} failed with Error".format(batch))
break
else:
try:
self.selenium.wait_until_element_is_visible(expected)
break
except Exception:
sec= sec+30
print("Batch processing is not finished with {} status in {} seconds".format(status,sec))
def get_npsp_settings_value(self,field_name):
locator = npsp_lex_locators['npsp_settings']['field_value'].format(field_name)
loc = self.selenium.get_webelement(locator).text
return loc
def verify_payment_details(self, numpayments):
"""Gets the payment details from the UI and compares with the expected number of payments"""
locator = "//tbody/tr/td[2]/span/span"
locs1 = self.selenium.get_webelements(locator)
locator2 = "//tbody/tr/td[3]/span/span"
locs2 = self.selenium.get_webelements(locator2)
for i, j in list(zip(locs1, locs2)):
#loc1_vaue = self.selenium.get_webelemt(i).text
#loc2_vaue = self.selenium.get_webelemt(j).text
if i.text == "Pledged" and j.text == "$100.00":
pass
else:
return "fail"
self.builtin.should_be_equal_as_strings(len(locs1), numpayments)
# def verify_opportunities(self, len_value):
# locator = "//tbody/tr[12]/th"
# s = self.selenium.get_webelement(locator).text
# #return s
# strip_list = s.split(" ")
# date = strip_list[-1]
# date = date.split("/")
# date = list(map(int, date))
# mm, dd, yyyy = date
# for _ in range(int(len_value)):
# if mm == 12:
# mm = 1
# yyyy = yyyy + 1
# date = [mm, dd, yyyy]
# date = list(map(str, date))
# date = "/".join(date)
# loctor_contains = "//tbody//a[contains(@title , '{}')]".format(date)
# self.selenium.page_should_contain_element(loctor_contains)
# else:
# mm = mm + 1
# date = [mm, dd, yyyy]
# date = list(map(str, date))
# date = "/".join(date)
# loctor_contains = "//tbody//a[contains(@title , '{}')]".format(date)
# self.selenium.page_should_contain_element(loctor_contains)
def click_object_manager_button(self,title):
"""clicks on the buttons in object manager"""
locator=npsp_lex_locators['object_manager']['button'].format(title)
self.selenium.get_webelement(locator).click()
def click_bge_button(self,text):
"""clicks on buttons for BGE"""
locator=npsp_lex_locators['bge']['button'].format(text)
time.sleep(1)
element = self.selenium.driver.find_element_by_xpath(locator)
self.selenium.driver.execute_script('arguments[0].click()', element)
def verify_title(self,title,value):
""""""
locator=npsp_lex_locators['bge']['title'].format(title,value)
actual_value=self.selenium.get_webelement(locator).text
assert value == actual_value, "Expected value to be {} but found {}".format(
value, actual_value
)
def page_scroll_to_locator(self, path, *args, **kwargs):
locator = self.get_npsp_locator(path, *args, **kwargs)
self.selenium.scroll_element_into_view(locator)
def get_bge_card_header(self,title):
"""Validates if the specific header field has specified value"""
locator= npsp_lex_locators['bge']['card-header'].format(title)
id=self.selenium.get_webelement(locator).text
return id
def click_bge_edit_button(self, title):
"""clicks the button in the table by using name mentioned in data-label"""
locator=npsp_lex_locators['bge']['edit_button'].format(title)
#self.selenium.get_webelement(locator).click()
self.selenium.click_button(locator)
def populate_bge_edit_field(self, title, value):
"""Clears the data in input field and enters the value specified """
locator=npsp_lex_locators['bge']['edit_field'].format(title)
field=self.salesforce._populate_field(locator, value)
def verify_row_count(self,value):
"""verifies if actual row count matches with expected value"""
locator=npsp_lex_locators['bge']['count']
actual_value=self.selenium.get_webelements(locator)
count=len(actual_value)
assert int(value) == count, "Expected rows to be {} but found {}".format(
value, count
)
def return_locator_value(self, path, *args, **kwargs):
"""Returns the value pointed by the specified locator"""
locator=self.get_npsp_locator(path, *args, **kwargs)
self.selenium.wait_until_page_contains_element(locator)
value=self.selenium.get_webelement(locator).text
return value
def return_list(self, path, *args, **kwargs):
"""Returns all the values pointed by the specified locator"""
locator=self.get_npsp_locator(path, *args, **kwargs)
values=self.selenium.get_webelements(locator)
return [i.text for i in values]
def select_bge_row(self, value):
"""To select a row on object page based on name and open the dropdown"""
locators = npsp_lex_locators['bge']['name']
list_ele = self.selenium.get_webelements(locators)
for index, element in enumerate(list_ele):
if element.text == value:
drop_down = npsp_lex_locators['bge']['locate_dropdown'].format(index+1)
self.selenium.click_element(drop_down)
time.sleep(1)
def click_link_with_text(self, text):
self.builtin.log("This test is using the 'Click link with text' workaround", "WARN")
locator = npsp_lex_locators['link-text'].format(text)
self.selenium.wait_until_page_contains_element(locator)
element = self.selenium.driver.find_element_by_xpath(locator)
self.selenium.driver.execute_script('arguments[0].click()', element)
def verify_expected_batch_values(self, batch_id,**kwargs):
"""To verify that the data in Data Import Batch matches expected value provide batch_id and the data u want to verify"""
ns=self.get_npsp_namespace_prefix()
table=ns + "DataImportBatch__c"
bge_batch=self.salesforce.salesforce_get(table,batch_id)
for key, value in kwargs.items():
label=ns + key
self.builtin.should_be_equal_as_strings(bge_batch[label], value)
def click_element_with_locator(self, path, *args, **kwargs):
"""Pass the locator and its values for the element you want to click """
locator=self.get_npsp_locator(path, *args, **kwargs)
self.selenium.click_element(locator)
def wait_for_record_to_update(self, id, value):
"""Waits for specified record header to be updated by checking every second for 10 times.
"""
i = 0
while True:
i += 1
if i > 10:
raise AssertionError(
"Timed out waiting for record name to be {} .".format(value)
)
self.salesforce.go_to_record_home(id)
try:
self.verify_header(value)
break
except Exception:
time.sleep(1)
def load_locator(self, locator):
"""Scrolls down until the specified locator is found.
"""
i = 0
while True:
i += 1
if i > 20:
raise AssertionError(
"Timed out waiting for locator {} to load.".format(locator)
)
self.selenium.execute_javascript("window.scrollBy(0, 100)")
self.wait_for_aura()
try:
self.selenium.get_webelement(locator)
break
except ElementNotFound:
time.sleep(0.2)
def select_multiple_values_from_duellist(self,path,list_name,section,*args):
"""Pass the list name and values to be selected from the dropdown. """
main_loc = npsp_lex_locators
for key in path.split('.'):
main_loc = main_loc[key]
for i in args:
locator = main_loc.format(list_name,section,i)
if args.index(i)==0:
self.selenium.click_element(locator)
else:
self.selenium.click_element(locator,'COMMAND')
def click_duellist_button(self, list_name,button):
list_found = False
locators = npsp_lex_locators["bge-duellist-btn"].values()
for i in locators:
locator = i.format(list_name,button)
if self.check_if_element_exists(locator):
loc=self.selenium.get_webelement(locator)
self.selenium.click_element(locator)
list_found = True
break
assert list_found, "Dropdown with the provided locator not found"
def verify_expected_values(self,ns_ind,obj_api,rec_id,**kwargs):
"""To verify that the data in database table match with expected value,
provide ns if object has namespace prefix otherwise nonns,
object api name, record_id and the data u want to verify"""
if(ns_ind=='ns'):
ns=self.get_npsp_namespace_prefix()
table=ns + obj_api
else:
table=obj_api
try :
rec=self.salesforce.salesforce_get(table,rec_id)
for key, value in kwargs.items():
print(f"executing {key}, {value} pair")
self.builtin.should_be_equal_as_strings(rec[key], value)
except Exception :
print("Retrying after exception")
time.sleep(10)
rec=self.salesforce.salesforce_get(table,rec_id)
for key, value in kwargs.items():
print(f"executing {key}, {value} pair")
self.builtin.should_be_equal_as_strings(rec[key], value)
def get_org_namespace_prefix(self):
if self.cumulusci.org.namespaced:
return "npsp__"
else:
return ""
@capture_screenshot_on_error
def click_first_matching_related_item_popup_link(self,heading,rel_status,link):
'''Clicks a link in the popup menu for first matching related list item.
heading specifies the name of the list,
rel_status specifies the status or other field vaule to identify a particular item,
and link specifies the name of the link'''
self.salesforce.load_related_list(heading)
locator = npsp_lex_locators["record"]["related"]["link"].format(heading, rel_status)
mylist=self.selenium.get_webelements(locator)
title=mylist[0].text
print(f"title is {title}")
self.click_special_related_item_popup_link(heading, title, link)
def click_special_related_item_popup_link(self, heading, title, link):
"""Clicks a link in the popup menu for a related list item.
heading specifies the name of the list,
title specifies the name of the item,
and link specifies the name of the link
"""
self.salesforce.load_related_list(heading)
locator = npsp_lex_locators["record"]["related"]["popup_trigger"].format(heading, title)
self.selenium.wait_until_page_contains_element(locator)
self.salesforce._jsclick(locator)
locator = npsp_lex_locators["popup-link"].format(link)
self.salesforce._jsclick(locator)
self.salesforce.wait_until_loading_is_complete()
def verify_field_values(self,**kwargs):
"""Verifies values in the specified fields"""
for key, value in kwargs.items():
locator=npsp_lex_locators["field-value"].format(key)
res=self.selenium.get_webelement(locator).text
assert value == res, "Expected {} value to be {} but found {}".format(key,value,res)
def checkbox_status(self,cbx_name,status):
"""verifies if the specified checkbox is with expected status in readonly mode"""
locator=npsp_lex_locators["custom_settings"]["cbx_status"].format(cbx_name,status)
self.selenium.page_should_contain_element(locator, message='{cbx_name} checkbox is supposed to be {status}')
def go_to_setup_page(self,page):
""" Navigates to the specified page in Salesforce Setup """
url = self.cumulusci.org.lightning_base_url
url = "{}/lightning/setup/{}/home".format(url,page)
self.selenium.go_to(url)
self.salesforce.wait_until_loading_is_complete()
def click_wrapper_related_list_button(self,heading,button_title):
"""Clicks a button in the heading of a related list when the related list is enclosed in wrapper.
Waits for a modal to open after clicking the button.
"""
locator = npsp_lex_locators["record"]["related"]["button"].format(heading, button_title)
element = self.selenium.driver.find_element_by_xpath(locator)
self.selenium.driver.execute_script('arguments[0].click()', element)
# def wait_until_url_contains(self,exp_text):
# """Waits for maximum of 90sec for current url to contain the exp_text"""
# self.builtin.log("This keyword can be removed once we support SeleniumLibrary 4.0.")
# url=self.selenium.get_location()
# i=0
# for i in range(10):
# if i == 9:
# raise AssertionError("Failed to find an url containing {} in 90 seconds".format(exp_text))
# if exp_text in url:
# break
# else:
# time.sleep(10)
# url=self.selenium.get_location()
# i += 1
@capture_screenshot_on_error
def change_view_to(self,view_name):
"""Selects a different view for the object records in listing page"""
locator=npsp_lex_locators['object_dd']
view=npsp_lex_locators['link'].format(view_name,view_name)
self.selenium.wait_until_page_contains("List Views")
self.selenium.get_webelement(locator).click()
element = self.selenium.driver.find_element_by_xpath(view)
self.selenium.driver.execute_script('arguments[0].click()', element)
self.selenium.wait_until_page_contains(view_name)
def search_field_by_value(self, fieldname, value):
""" Searches the field with the placeholder given by 'fieldname' for the given 'value'
"""
xpath = npsp_lex_locators["placeholder"].format(fieldname)
field = self.selenium.get_webelement(xpath)
self.selenium.clear_element_text(field)
field.send_keys(value)
time.sleep(2)
field.send_keys(Keys.ENTER)
def save_current_record_id_for_deletion(self,object_name):
"""Gets the current page record id and stores it for specified object
in order to delete record during suite teardown """
# self.pageobjects.current_page_should_be("Details",object_name)
id=self.salesforce.get_current_record_id()
self.salesforce.store_session_record(object_name,id)
return id
def verify_record_is_created_in_database(self,object_name,id):
"""Verifies that a record with specified id is saved
in specified object table in database and returns the record"""
record=self.salesforce.salesforce_get(object_name,id)
self.builtin.should_not_be_empty(record, msg="The database object {} with id {} is not in the database".format(object_name,id))
return record
@capture_screenshot_on_error
def select_value_from_dropdown(self,dropdown,value):
"""Select given value in the dropdown field"""
locator = npsp_lex_locators['record']['list'].format(dropdown)
self.selenium.scroll_element_into_view(locator)
self.selenium.get_webelement(locator).click()
self.wait_for_locator('popup')
self.selenium.click_link(value)
def edit_record(self):
"""Clicks on the edit button on record page for standard objects
and waits for the modal to open"""
self.salesforce.click_object_button("Edit")
self.salesforce.wait_until_modal_is_open()
def randomString(self,stringLength=10):
"""Generate a random string of fixed length """
letters = string.ascii_lowercase
return ''.join(random.choice(letters) for i in range(stringLength))
@capture_screenshot_on_error
def scroll_button_into_view_and_click_using_js(self, value):
"""Scrolls the button element into view and clicksthe button using JS """
xpath = npsp_lex_locators['button'].format(value)
self.selenium.wait_until_element_is_visible(xpath)
javascript = (
"window.document.evaluate("
f" '{xpath}', document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null"
").singleNodeValue.scrollIntoView(true)"
)
self.selenium.execute_javascript(javascript)
time.sleep(2)
self.npsp.click_button_with_value(value)
time.sleep(1)
def setupdata(self, name, contact_data=None, opportunity_data=None, account_data=None, payment_data=None, engagement_data=None,
recurringdonation_data=None, gau_data=None):
""" Creates an Account if account setup data is passed
Creates a contact if contact_data is passed
Creates an opportunity for the contact if opportunit_data is provided
Creates a contact and sets an opportunity simultaneously if both the
contact_data and opportunity_data is specified
Creates a contact and sets up an engagement plan with both contact and engagement plan information is provided
"""
# get the data variable, or an empty dictionary if not set
data = self.builtin.get_variable_value("${data}", {})
ns=self.get_npsp_namespace_prefix()
if account_data is not None:
# create the account based on the user input specified account type
acctname = self.randomString(10);
rt_id = self.salesforce.get_record_type_id("Account",account_data["Type"])
account_data.update( {'Name' : acctname,'RecordTypeId' : rt_id})
account_id = self.salesforce.salesforce_insert("Account", **account_data)
account = self.salesforce.salesforce_get("Account",account_id)
# save the account object to data dictionary
data[name] = account
if contact_data is not None:
# create the contact
firstname = self.randomString(10);
lastname = self.randomString(10);
contact_data.update( {'Firstname' : firstname,'Lastname' : lastname})
contact_id = self.salesforce.salesforce_insert("Contact", **contact_data)
contact = self.salesforce.salesforce_get("Contact",contact_id)
# save the contact object to data dictionary
data[name] = contact
if engagement_data is not None:
# set up enegagement template based on the user input specified and link the contact to the engagement template
engobjname = "Engagement_Plan_Template__c"
contactobjname = "Contact__c"
# Fromatting the objects names with namespace prefix
formattedengobjname = "{}{}".format(self.cumulusci.get_namespace_prefix(), engobjname)
formattedcontactobjname = "{}{}".format(self.cumulusci.get_namespace_prefix(), contactobjname)
engagement_id = self.salesforce.salesforce_insert(formattedengobjname, **engagement_data)
engagement = self.salesforce.salesforce_get(formattedengobjname,engagement_id)
# If the keyword is contact, link the contact to the engagement plan created
if name.lower() == 'contact':
testdata={}
testdata.update( {formattedcontactobjname : data[name]["Id"], formattedengobjname: engagement_id } )
self.salesforce.salesforce_insert(formattedengobjname, **testdata)
# save the engagement object to data dictionary
if name.lower() == 'contact':
data[f"{name}_engagement"] = engagement
else:
data[name] = engagement
# set a recurring donation for a contact
if recurringdonation_data is not None:
recurringdonation_data.update( {'npe03__Contact__c' : data[name]["Id"] } )
rd_id = self.salesforce.salesforce_insert("npe03__Recurring_Donation__c", **recurringdonation_data)
recurringdonation = self.salesforce.salesforce_get("npe03__Recurring_Donation__c",rd_id)
data[f"{name}_rd"] = recurringdonation
#set gau data
if gau_data is not None:
object_key = f"{ns}General_Accounting_Unit__c"
gauname = gau_data['Name']
random = self.randomString(10);
gau_data.update( {'name' : f"{random}{gauname}"} )
gau_id = self.salesforce.salesforce_insert(object_key, **gau_data)
gau = self.salesforce.salesforce_get(object_key,gau_id)
data[name] = gau
# set opportunity association with a contact or account
if opportunity_data is not None:
# create opportunity
rt_id = self.salesforce.get_record_type_id("Opportunity",opportunity_data["Type"])
# if user did not specify any date value add the default value
if 'CloseDate' not in opportunity_data:
date = datetime.now().strftime('%Y-%m-%d')
opportunity_data.update({'CloseDate' : date})
if 'npe01__Do_Not_Automatically_Create_Payment__c' not in opportunity_data:
Automatically_create_key = 'npe01__Do_Not_Automatically_Create_Payment__c'
Automatically_create_value = 'true'
opportunity_data.update({Automatically_create_key : Automatically_create_value})
if 'StageName' not in opportunity_data:
opportunity_data.update( {'StageName' : 'Closed Won'} )
if 'AccountId' not in opportunity_data:
opportunity_data.update( {'AccountId' : data[name]["AccountId"] } )
opportunity_data.update( {'RecordTypeId': rt_id } )
opportunity_id = self.salesforce.salesforce_insert("Opportunity", **opportunity_data)
opportunity = self.salesforce.salesforce_get("Opportunity",opportunity_id)
# save the opportunity
data[f"{name}_opportunity"] = opportunity
if payment_data is not None:
numdays = 30
i = 1
while i <= int(payment_data['NumPayments']):
payment_schedule_data = {}
numdays = numdays*2
scheduled_date = (datetime.now() + timedelta(days = numdays)).strftime('%Y-%m-%d')
payment_schedule_data.update( {'npe01__Opportunity__c' : data[f"{name}_opportunity"]["Id"] , 'npe01__Scheduled_Date__c' : scheduled_date, 'npe01__Payment_Amount__c' : payment_data['Amount'] } )
payment_id = self.salesforce.salesforce_insert("npe01__OppPayment__c", **payment_schedule_data)
i = i+1
self.builtin.set_suite_variable('${data}', data)
return data
def delete_record(self,value):
"""Select the row to be deleted on the listing page, click delete
and wait till the focus is back on the listings page."""
self.select_row(value)
self.selenium.click_link("Delete")
self.selenium.wait_until_location_contains("/list")
self.selenium.wait_until_page_does_not_contain(value)
@capture_screenshot_on_error
def populate_modal_form(self,**kwargs):
"""Populates modal form with the field-value pairs
supported keys are any input, textarea, lookup, checkbox, date and dropdown fields"""
for key, value in kwargs.items():
locator = npsp_lex_locators["modal-form"]["label"].format(key)
if self.check_if_element_exists(locator):
ele=self.selenium.get_webelements(locator)
for e in ele:
classname=e.get_attribute("class")
# print("key is {} and class is {}".format(key,classname))
if "Lookup" in classname and "readonly" not in classname:
self.salesforce.populate_lookup_field(key,value)
print("Executed populate lookup field for {}".format(key))
break
elif "Select" in classname and "readonly" not in classname:
self.select_value_from_dropdown(key,value)
print("Executed select value from dropdown for {}".format(key))
break
elif "Checkbox" in classname and "readonly" not in classname:
if value == "checked":
locator = npsp_lex_locators["checkbox"]["model-checkbox"].format(key)
self.selenium.get_webelement(locator).click()
break
elif "Date" in classname and "readonly" not in classname:
self.open_date_picker(key)
self.pick_date(value)
print("Executed open date picker and pick date for {}".format(key))
break
else:
try :
self.search_field_by_value(key,value)
print("Executed search field by value for {}".format(key))
except Exception :
try :
self.salesforce.populate_field(key,value)
print("Executed populate field for {}".format(key))
except Exception:
print ("class name for key {} did not match with field type supported by this keyword".format(key))
else:
raise Exception("Locator for {} is not found on the page".format(key))
def verify_toast_message(self,value):
"""Verifies that toast contains specified value"""
locator=npsp_lex_locators["toast-msg"]
self.selenium.wait_until_page_contains_element(locator)
msg=self.selenium.get_webelement(locator).text
if msg == value:
print("Toast message verified")
else:
raise Exception("Expected Toast message not found on page")
def edit_record_field_value(self,field,value):
"""Scrolls just a little below the field
Clicks on Edit icon next to field and enters a value into the field"""
scroll_loc=npsp_lex_locators["span_button"].format(field)
# To make sure the field we want to edit has rendered
# and is not obscured by the footer, scroll down a little below the element
self.selenium.scroll_element_into_view(scroll_loc)
self.selenium.execute_javascript("window.scrollBy(0,50)")
btn="Edit "+field
self.selenium.click_button(btn)
footer=npsp_lex_locators["record"]["footer"]
self.selenium.wait_until_page_contains_element(footer)
self.salesforce.populate_lookup_field(field,value)
@capture_screenshot_on_error
def edit_record_dropdown_value(self,field,value):
"""Scrolls just a little below the field
Clicks on Edit icon next to field and enters a value into the field"""
scroll_loc=npsp_lex_locators["span_button"].format(field)
# To make sure the field we want to edit has rendered
# and is not obscured by the footer, scroll down a little below the element
self.selenium.scroll_element_into_view(scroll_loc)
self.selenium.execute_javascript("window.scrollBy(0,50)")
btn="Edit "+field
self.selenium.click_button(btn)
footer=npsp_lex_locators["record"]["footer"]
self.selenium.wait_until_page_contains_element(footer)
time.sleep(2)
self.click_flexipage_dropdown(field, value)
def edit_record_checkbox(self,field,status):
"""Scrolls just a little below the field
Clicks on Edit icon next to field
checks if status is 'checked'
unchecks if status in 'unchecked'"""
scroll_loc=npsp_lex_locators["span_button"].format(field)
# To make sure the field we want to edit has rendered
# and is not obscured by the footer, scroll down a little below the element
self.selenium.scroll_element_into_view(scroll_loc)
self.selenium.execute_javascript("window.scrollBy(0,50)")
btn="Edit "+field
self.selenium.click_button(btn)
footer=npsp_lex_locators["record"]["footer"]
self.selenium.wait_until_page_contains_element(footer)
self.set_checkbutton_to(field,status)
def save_record(self):
"""Saves record by clicking on footer button 'Save'"""
footer=npsp_lex_locators["record"]["footer"]
self.click_record_button("Save")
self.selenium.wait_until_page_does_not_contain_element(footer)
#Once the record is saved, scroll to top in order to be able to interact with elements above this
self.selenium.execute_javascript("window.scrollTo(0,0)")
def Delete_record_field_value(self,field,value):
"""Scrolls just a little below the field
Clicks on Edit icon next to field and delete the value by clicking on 'X'"""
scroll_loc=npsp_lex_locators["span_button"].format(field)
# To make sure the field we want to edit has rendered
# and is not obscured by the footer, scroll down a little below the element
self.selenium.scroll_element_into_view(scroll_loc)
self.selenium.execute_javascript("window.scrollBy(0,50)")
btn="Edit "+field
self.selenium.click_button(btn)
footer=npsp_lex_locators["record"]["footer"]
self.selenium.wait_until_page_contains_element(footer)
locator=npsp_lex_locators['delete_icon_record'].format(field,value)
self.selenium.get_webelement(locator).click()
def select_date_from_datepicker(self,field,value):
field_loc=npsp_lex_locators["bge"]["field-input"].format(field)
self.selenium.click_element(field_loc)
locator=npsp_lex_locators["bge"]["datepicker_open"].format(field)
self.selenium.wait_until_page_contains_element(locator)
self.click_bge_button(value)
self.selenium.wait_until_page_does_not_contain_element(locator,error="could not open datepicker")
def click_more_actions_button(self):
"""clicks on the more actions dropdown button in the actions container on record page"""
locator=npsp_lex_locators['link'].format("more actions","more actions")
self.salesforce._jsclick(locator)
@capture_screenshot_on_error
def click_related_table_item_link(self, heading, title):
"""Clicks a table header field link in the related list identified with the specified heading.
This keyword will automatically call `Wait until loading is complete`
"""
self.builtin.log("loading related list...", "DEBUG")
self.salesforce.load_related_list(heading)
locator = npsp_lex_locators["record"]["related"]["link"].format(heading, title)
self.builtin.log("clicking...", "DEBUG")
self.salesforce._jsclick(locator)
self.builtin.log("waiting...", "DEBUG")
self.salesforce.wait_until_loading_is_complete()
def click_actions_link(self,title):
"""Clicks on the link in the actions container on top right corner of the page using Javascript"""
locator=npsp_lex_locators["link-title"].format(title)
self.salesforce._jsclick(locator)
def click_more_activity_button(self):
"""Clicks on View More button on Activity tab of the record"""
locator = npsp_lex_locators["record"]["activity-button"].format('showMoreButton')
self.salesforce._jsclick(locator)
def click_button_with_title(self,title):
"""Clicks button identified by title using Javascript"""
locator = npsp_lex_locators["button-title"].format(title)
self.salesforce._jsclick(locator)
def click_show_more_actions_button(self,title):
"""Clicks on more actions dropdown and click the given title"""
locator=npsp_lex_locators['link-contains'].format("more actions")
self.selenium.click_element(locator)
self.selenium.wait_until_page_contains(title)
link_locator=npsp_lex_locators['custom_objects']['actions-link'].format(title,title)
self.selenium.click_link(link_locator)
def get_url_formatted_object_name(self,name):
"""Returns a map with BaseURl and the namespace formatted object name"""
out = {}
base_url = self.cumulusci.org.lightning_base_url
object_name = "{}{}".format(self.cumulusci.get_namespace_prefix(), name)
out['baseurl'] = base_url
out['objectname'] = object_name
return out
| 47.912847
| 213
| 0.634841
|
bb881737d6d15beaeadec52cec2f381d7b0b6478
| 40,324
|
py
|
Python
|
clif/python/gen.py
|
snu5mumr1k/clif
|
3a907dd7b0986f2b3306c88503d414f4d4f963ae
|
[
"Apache-2.0"
] | null | null | null |
clif/python/gen.py
|
snu5mumr1k/clif
|
3a907dd7b0986f2b3306c88503d414f4d4f963ae
|
[
"Apache-2.0"
] | null | null | null |
clif/python/gen.py
|
snu5mumr1k/clif
|
3a907dd7b0986f2b3306c88503d414f4d4f963ae
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generator helpers.
Produces pieces of generated code.
"""
from clif.python import astutils
from clif.python import postconv
from clif.python import slots
VERSION = '0.3' # CLIF generated API version. Pure informative.
PY3OUTPUT = None # Target Python3 on True, Py2 on False, None-don't care.
I = ' '
def TopologicalSortSimple(ideps):
"""Simple topological sort working on sequence of integer indices."""
# Returns permutation indices (list of integers).
# Using variable names `cons` for dependent, `prod` for dependency
# (consumer, producer) to increase readability.
# cons is implied by the index into ideps.
# prod is the element of ideps (integer or None).
# This implies that each cons can only have one or no prod.
# Example: ideps = [2, None, 1]
# Read as:
# 0 depends on 2
# 1 has no dependency
# 2 depends on 1
# Expected output permutation: [1, 2, 0]
# The output permutation guarantees that prod appears before cons.
# Recursive implementation, subject to maximum recursion limit
# (sys.getrecursionlimit(), usually 1000).
permutation = []
permutation_set = set()
def FollowDeps(root, cons):
"""Recursively follows dependencies."""
if cons in permutation_set:
return
prod = ideps[cons]
if prod is not None:
if prod < 0:
raise ValueError(
'Negative value in ideps: ideps[%s] = %s' % (cons, prod))
if prod >= len(ideps):
raise ValueError(
'Value in ideps exceeds its length: ideps[%s] = %s >= %s'
% (cons, prod, len(ideps)))
if prod == cons:
raise ValueError(
'Trivial cyclic dependency in ideps: ideps[%s] = %s'
% (cons, prod))
if prod == root:
raise ValueError(
'Cyclic dependency in ideps: following dependencies from'
' %s leads back to %s.' % (root, root))
FollowDeps(root, prod)
permutation.append(cons)
permutation_set.add(cons)
for cons in range(len(ideps)):
FollowDeps(cons, cons)
assert len(permutation) == len(ideps)
return permutation
def WriteTo(channel, lines):
for s in lines:
channel.write(s)
channel.write('\n')
def Headlines(src_file, hdr_files=(), sys_hdr_files=(), open_ns=None):
"""Generate header comment and #includes.
Args:
src_file: str - full name of the source file (C++ header)
hdr_files: [str] - additional c++ headers to #include "str"
If the first name is PYTHON, #include <Python.h>.
If str == PYOBJ, forward declare PyObject.
sys_hdr_files: set(str) - additional c++ headers to #include <str>
open_ns: str - emit namespace open_ns if not empty.
Yields:
source code lines
"""
yield '/' * 70
yield ('// This file was automatically generated by CLIF'
+ ('' if PY3OUTPUT is None else
' to run under Python %d' % (3 if PY3OUTPUT else 2)))
yield '// Version %s' % VERSION
yield '/' * 70
if src_file:
yield '// source: %s' % src_file
yield ''
python_h = False
if hdr_files[:1] == ['PYTHON']:
python_h = True
yield '#include <Python.h>'
del hdr_files[0]
for h in sys_hdr_files:
if h:
yield '#include <%s>' % h
for h in hdr_files:
if h == 'PYOBJ' and not python_h:
yield ''
yield '// Forward "declare" PyObject (instead of #include <Python.h>)'
yield 'struct _object; typedef _object PyObject;'
elif h:
yield '#include "%s"' % h
if open_ns:
yield ''
yield OpenNs(open_ns)
def OpenNs(namespace):
namespace = (namespace or 'clif').strip(':')
return ' '.join('namespace %s {' % ns for ns in namespace.split('::'))
def CloseNs(namespace):
namespace = (namespace or 'clif').strip(':')
return '} '*(1+namespace.count('::'))+' // namespace '+namespace
def TypeConverters(type_namespace, types, *gen_cvt_args):
"""Generate type converters for types in type_namespace."""
type_namespace = type_namespace or 'clif'
yield ''
yield OpenNs(type_namespace)
if type_namespace != 'clif':
yield 'using namespace ::clif;'
yield 'using ::clif::Clif_PyObjAs;'
yield 'using ::clif::Clif_PyObjFrom;'
for t in types:
for s in t.GenConverters(*gen_cvt_args):
yield s
yield ''
yield CloseNs(type_namespace)
def _DefLine(pyname, cname, meth, doc):
if 'KEYWORD' in meth or 'NOARGS' in meth:
cname = '(PyCFunction)'+cname
if doc is None:
doc = 'nullptr'
else:
doc = 'C("%s")' % doc
return '{C("%s"), %s, %s, %s}' % (pyname, cname, meth, doc)
def _DefTable(ctype, cname, lines):
yield 'static %s %s[] = {' % (ctype, cname)
for p in lines:
yield I+_DefLine(*p)+','
yield I+'{}'
yield '};'
class _MethodDef(object):
name = 'MethodsStaticAlloc'
def __call__(self, methods):
yield ''
for s in _DefTable('PyMethodDef', self.name, methods):
yield s
MethodDef = _MethodDef() # pylint: disable=invalid-name
class _GetSetDef(object):
# pylint: disable=missing-class-docstring
name = 'Properties'
def __call__(self, properties, enable_instance_dict):
props = properties
if enable_instance_dict:
props = [
('__dict__',
'pyclif_instance_dict_get',
'pyclif_instance_dict_set',
None)] + props
for s in _DefTable('PyGetSetDef', 'Properties', props):
yield s
GetSetDef = _GetSetDef() # pylint: disable=invalid-name
def _TypesInitInDependencyOrder(types_init, raise_if_reordering=False):
"""Yields type_init items in dependency order: base classes before derived."""
cppname_indices = {}
for index, (cppname, _, _, _) in enumerate(types_init):
cppname_indices[cppname] = index
assert len(cppname_indices) == len(types_init)
ideps = []
for cppname, _, wrapped_base, _ in types_init:
if wrapped_base is not None and wrapped_base not in cppname_indices:
# INDIRECT DETECTION. Considering current development plans, this code
# generator is not worth more effort detecting the issue in a more direct
# way. This is still far better than crashing with a KeyError, or failing
# at compile time.
raise NameError(
'A .clif file is missing a Python-style `from ... import` for a'
' base class declared in another header (go/pyclif#pyimport):'
' wrapped_derived=%s, wrapped_base=%s' % (cppname, wrapped_base))
ideps.append(
None if wrapped_base is None else
cppname_indices[wrapped_base])
permutation = TopologicalSortSimple(ideps)
if raise_if_reordering: # For development / debugging.
if list(sorted(permutation)) != permutation:
msg = [
'Derived class appearing before base in .clif file: %s'
% str(permutation)]
for cppname, _, wrapped_base, _ in types_init:
msg.append(' %s -> %s' % (cppname, wrapped_base))
raise RuntimeError('\n'.join(msg))
for index in permutation:
yield types_init[index]
def ReadyFunction(types_init):
"""Generate Ready() function to call PyType_Ready for wrapped types."""
yield ''
yield 'bool Ready() {'
have_modname = False
pybases = set()
last_pybase = ''
for cppname, base, wrapped_base, _ in _TypesInitInDependencyOrder(types_init):
yield I+'%s =' % cppname
yield I+'%s::_build_heap_type();' % cppname.rsplit('::', 1)[0]
if base:
fq_name, toplevel_fq_name = base
# |base| is a fully qualified Python name.
# The caller ensures we have only one Python base per each class.
if base == last_pybase:
yield I+'Py_INCREF(base_cls);'
else:
type_prefix = '' if pybases else 'PyObject* '
if toplevel_fq_name:
yield I+('%sbase_cls = ImportFQName("%s", "%s");' %
(type_prefix, fq_name, toplevel_fq_name))
else:
yield I+('%sbase_cls = ImportFQName("%s");' %
(type_prefix, fq_name))
if base not in pybases:
yield I+'if (base_cls == nullptr) return false;'
yield I+'if (!PyObject_TypeCheck(base_cls, &PyType_Type)) {'
yield I+I+'Py_DECREF(base_cls);'
yield I+I+(
'PyErr_SetString(PyExc_TypeError, "Base class %s is not a '
'new style class inheriting from object.");' % fq_name)
yield I+I+'return false;'
yield I+'}'
yield I+cppname + '->tp_base = %s(base_cls);' % _Cast('PyTypeObject')
if base not in pybases:
yield I+'// Check that base_cls is a *statically* allocated PyType.'
yield I+'if (%s->tp_base->tp_alloc == PyType_GenericAlloc) {' % cppname
yield I+I+'Py_DECREF(base_cls);'
yield I+I+('PyErr_SetString(PyExc_TypeError, "Base class %s is a'
' dynamic (Python defined) class.");' % fq_name)
yield I+I+'return false;'
yield I+'}'
last_pybase = base
pybases.add(base)
elif wrapped_base:
# base is Python wrapper type in a C++ class namespace defined locally.
yield I+'Py_INCREF(%s);' % wrapped_base
yield I+'%s->tp_base = %s;' % (cppname, wrapped_base)
yield I+'if (PyType_Ready(%s) < 0) return false;' % cppname
if not have_modname:
pystr = ('PyUnicode_FromString' if PY3OUTPUT else
'PyString_FromString')
yield I+'PyObject *modname = %s(ThisModuleName);' % pystr
yield I+'if (modname == nullptr) return false;'
have_modname = True
yield I+('PyObject_SetAttrString((PyObject *) %s, "__module__", modname);'
% cppname)
yield I+'Py_INCREF(%s); // For PyModule_AddObject to steal.' % cppname
yield I+'return true;'
yield '}'
def InitFunction(doc, meth_ref, init, dict_):
"""Generate a function to create the module and initialize it."""
if PY3OUTPUT:
yield ''
yield 'static struct PyModuleDef Module = {'
yield I+'PyModuleDef_HEAD_INIT,'
yield I+'ThisModuleName,'
yield I+'"%s", // module doc' % doc
yield I+'-1, // module keeps state in global variables'
yield I+meth_ref+','
yield I+'nullptr, // m_slots a.k.a. m_reload'
yield I+'nullptr, // m_traverse'
yield I+'ClearImportCache // m_clear'
yield '};'
yield ''
yield 'PyObject* Init() {'
if PY3OUTPUT:
yield I+'PyObject* module = PyModule_Create(&Module);'
else:
yield I+'PyObject* module = Py_InitModule3(%s, %s, "%s");' % (
'ThisModuleName', meth_ref, doc)
yield I+'if (!module) return nullptr;'
init_needs_err = False
for s in init:
assert ' return' not in s, 'use "goto err;" to handle errors'
if ' err;' in s: init_needs_err = True
yield I+s
for pair in dict_:
yield I+'if (PyModule_AddObject(module, "%s", %s) < 0) goto err;' % pair
yield I+'return module;'
if init_needs_err or dict_:
yield 'err:'
if PY3OUTPUT:
yield I+'Py_DECREF(module);'
yield I+'return nullptr;'
yield '}'
def PyModInitFunction(init_name='', modname='', ns='', py3=False):
"""Generate extension module init function."""
assert (init_name or modname) and not (init_name and modname) # xor
name = init_name or (('PyInit_' if py3 else 'init') + modname)
yield ''
yield 'PyMODINIT_FUNC %s(void) {' % name
if py3:
yield I+'if (!%s::Ready()) return nullptr;' % ns
yield I+'return %s::Init();' % ns
else:
yield I+'%s::Ready() &&' % ns
yield I+'%s::Init();' % ns
yield '}'
def WrapperClassDef(name, ctype, cname, is_iter, has_iter, iter_ns,
enable_instance_dict):
"""Generate wrapper class."""
assert not (has_iter and is_iter)
yield ''
yield 'struct %s {' % name
yield I+'PyObject_HEAD'
if is_iter:
yield I+'iterator iter;'
else:
yield I+'::clif::Instance<%s> cpp;' % ctype
if enable_instance_dict:
yield I+'PyObject* instance_dict;'
yield '};'
if has_iter:
yield ''
yield 'namespace %s {' % iter_ns
yield 'typedef ::clif::Iterator<%s, %s> iterator;' % (cname, has_iter)
yield '}'
def VirtualOverriderClass(name, pyname, cname, cfqname, isabstract, idfunc,
pcfunc, vfuncs):
"""Generate a derived redirector class."""
yield ''
# Unfortunately the multiple-inheritance order here matters, probably caused
# by one or more improper `reinterpret_cast`s.
yield 'struct %s : %s, PyObjRef {' % (name, cname)
yield I+'using %s;' % cfqname
for f in vfuncs:
for s in _VirtualFunctionCall(
idfunc(f.name.cpp_name), f, pyname, isabstract, pcfunc):
yield s
yield '};'
def TypeObject(ht_qualname, tracked_slot_groups,
tp_slots, pyname, ctor, wname, fqclassname,
abstract, iterator, trivial_dtor, subst_cpp_ptr,
enable_instance_dict, cpp_has_ext_def_ctor):
"""Generate PyTypeObject methods and table.
Args:
ht_qualname: str - e.g. Struct or Outer.Inner
tracked_slot_groups: dict - from gen.GenSlots() call
tp_slots: dict - values for PyTypeObject slots
pyname: str - Python class name
ctor: str - (WRAPped/DEFault/None) type of generated ctor
wname: str - C++ wrapper class name
fqclassname: str - FQ C++ class (being wrapped) name
abstract: bool - wrapped C++ class is abstract
iterator: str - C++ iterator object if wrapping an __iter__ class else None
trivial_dtor: bool - if C++ destructor is trivial, no need to allow threads
subst_cpp_ptr: str - C++ "replacement" class (being wrapped) if any
enable_instance_dict: bool - add __dict__ to instance
cpp_has_ext_def_ctor: bool - if the C++ class has extended ctor
Yields:
Source code for PyTypeObject and tp_alloc / tp_init / tp_free methods.
"""
# NOTE: tracked_slot_groups['tp_slots'] and tp_group are similar but
# NOT identical. tp_group has additional customizations.
if ctor:
yield ''
yield '// %s __init__' % pyname
yield 'static int _ctor(PyObject* self, PyObject* args, PyObject* kw);'
if not iterator:
yield ''
yield '// %s __new__' % pyname
yield 'static PyObject* _new(PyTypeObject* type, Py_ssize_t nitems);'
tp_slots['tp_alloc'] = '_new'
tp_slots['tp_new'] = 'PyType_GenericNew'
yield ''
yield '// %s __del__' % pyname
# Use dtor for dynamic types (derived) to wind down malloc'ed C++ obj, so
# the C++ dtors are run.
tp_slots['tp_dealloc'] = '_dtor'
yield 'static void _dtor(PyObject* self) {'
if iterator or not trivial_dtor:
yield I+'Py_BEGIN_ALLOW_THREADS'
if iterator:
yield I+iterator+'.~iterator();'
else:
# Using ~Instance() leads to AddressSanitizer: heap-use-after-free.
yield I+'%s(self)->cpp.Destruct();' % _Cast(wname)
if iterator or not trivial_dtor:
yield I+'Py_END_ALLOW_THREADS'
if not iterator and enable_instance_dict:
yield I+'Py_CLEAR(%s(self)->instance_dict);' % _Cast(wname)
yield I+'Py_TYPE(self)->tp_free(self);'
yield '}'
if not iterator:
# Use delete for static types (not derived), allocated with _new.
tp_slots['tp_free'] = '_del'
yield ''
yield 'static void _del(void* self) {'
yield I+'delete %s(self);' % _Cast(wname)
yield '}'
tp_slots['tp_init'] = '_ctor' if ctor else 'Clif_PyType_Inconstructible'
tp_slots['tp_basicsize'] = 'sizeof(%s)' % wname
tp_slots['tp_itemsize'] = tp_slots['tp_version_tag'] = '0'
tp_slots['tp_dictoffset'] = tp_slots['tp_weaklistoffset'] = '0'
tp_slots['tp_flags'] = ' | '.join(tp_slots['tp_flags'])
if not tp_slots.get('tp_doc'):
tp_slots['tp_doc'] = '"CLIF wrapper for %s"' % fqclassname
wtype = '%s_Type' % wname
yield ''
yield 'PyTypeObject* %s = nullptr;' % wtype
yield ''
yield 'static PyTypeObject* _build_heap_type() {'
# http://third_party/pybind11/include/pybind11/detail/class.h?l=571&rcl=276599738
# was used as a reference for the code generated here.
yield I+'PyHeapTypeObject *heap_type ='
yield I+I+I+'(PyHeapTypeObject *) PyType_Type.tp_alloc(&PyType_Type, 0);'
yield I+'if (!heap_type)'
yield I+I+'return nullptr;'
if PY3OUTPUT:
# ht_qualname requires Python >= 3.3 (alwyas true for PyCLIF).
yield I+'heap_type->ht_qualname = (PyObject *) PyUnicode_FromString('
yield I+I+I+'"%s");' % ht_qualname
# Following the approach of pybind11 (ignoring the Python docs).
yield I+'Py_INCREF(heap_type->ht_qualname);'
yield I+'heap_type->ht_name = heap_type->ht_qualname;'
else:
yield I+'heap_type->ht_name = (PyObject *) PyString_FromString('
# Following the approach of pybind11 (ignoring the Python docs).
yield I+I+I+'"%s");' % ht_qualname
yield I+'PyTypeObject *ty = &heap_type->ht_type;'
yield I+'ty->tp_as_number = &heap_type->as_number;'
yield I+'ty->tp_as_sequence = &heap_type->as_sequence;'
yield I+'ty->tp_as_mapping = &heap_type->as_mapping;'
yield '#if PY_VERSION_HEX >= 0x03050000'
yield I+'ty->tp_as_async = &heap_type->as_async;'
yield '#endif'
for s in slots.GenTypeSlotsHeaptype(tracked_slot_groups, tp_slots, PY3OUTPUT):
yield s
if not iterator and enable_instance_dict:
yield I+'pyclif_instance_dict_enable(ty, offsetof(%s, instance_dict));' % (
wname)
yield I+'return ty;'
yield '}'
if ctor:
yield ''
yield 'static int _ctor(PyObject* self, PyObject* args, PyObject* kw) {'
if abstract:
yield I+'if (Py_TYPE(self) == %s) {' % wtype
yield I+I+'return Clif_PyType_Inconstructible(self, args, kw);'
yield I+'}'
cpp = '%s(self)->cpp' % _Cast(wname)
if ctor == 'DEF':
# Skip __init__ if it's a METH_NOARGS.
yield I+('if ((args && PyTuple_GET_SIZE(args) != 0) ||'
' (kw && PyDict_Size(kw) != 0)) {')
yield I+I+('PyErr_SetString(PyExc_TypeError, "%s takes no arguments");' %
pyname)
yield I+I+'return -1;'
yield I+'}'
# We have been lucky so far because NULL initialization of clif::Instance
# object is equivalent to constructing it with the default constructor.
# (NULL initialization happens in PyType_GenericAlloc).
# We don't have a place to call placement new. __init__ (and so _ctor) can
# be called many times and we have no way to ensure the previous object is
# destructed properly (it may be NULL or new initialized).
yield I+'%s = ::clif::MakeShared<%s>();' % (cpp,
subst_cpp_ptr or fqclassname)
if subst_cpp_ptr:
yield I+'%s->::clif::PyObjRef::Init(self);' % cpp
yield I+'return 0;'
else: # ctor is WRAP (holds 'wrapper name')
if cpp_has_ext_def_ctor:
yield I+('if ((args && PyTuple_GET_SIZE(args) != 0) ||'
' (kw && PyDict_Size(kw) != 0)) {')
yield I+I+(
'PyErr_SetString(PyExc_TypeError, "%s takes no arguments");' %
pyname)
yield I+I+'return -1;'
yield I+'}'
yield I+'PyObject* init = %s(self);' % ctor
else:
yield I+'PyObject* init = %s(self, args, kw);' % ctor
if subst_cpp_ptr:
yield I+'if (!init) return -1;'
yield I+'Py_DECREF(init);'
yield I+'%s->::clif::PyObjRef::Init(self);' % cpp
yield I+'return 0;'
else:
yield I+'Py_XDECREF(init);'
yield I+'return init? 0: -1;'
yield '}'
if not iterator:
yield ''
yield 'static PyObject* _new(PyTypeObject* type, Py_ssize_t nitems) {'
yield I+'DCHECK(nitems == 0);'
yield I+'%s* wobj = new %s;' % (wname, wname)
if enable_instance_dict:
yield I+'wobj->instance_dict = nullptr;'
yield I+'PyObject* self = %s(wobj);' % _Cast()
yield I+'return PyObject_Init(self, %s);' % wtype
yield '}'
def _CreateInputParameter(func_name, ast_param, arg, args):
"""Returns tuple of (bool, str) and appends to args."""
# First return value is bool check_nullptr.
# Second return value is a string to create C++ stack var named arg.
# Sideeffect: args += arg getter.
ptype = ast_param.type
ctype = ptype.cpp_type
smartptr = (ctype.startswith('::std::unique_ptr') or
ctype.startswith('::std::shared_ptr'))
# std::function special case
if not ctype:
assert ptype.callable, 'Non-callable param has empty cpp_type'
if len(ptype.callable.returns) > 1:
raise ValueError('Callbacks may not have any output parameters, '
'%s param %s has %d' % (func_name, ast_param.name.native,
len(ptype.callable.returns)-1))
args.append('std::move(%s)' % arg)
return (
False,
'std::function<%s> %s;' % (
astutils.StdFuncParamStr(ptype.callable), arg))
# T*
if ptype.cpp_raw_pointer:
if ptype.cpp_toptr_conversion:
args.append(arg)
return (False, '%s %s;' % (ctype, arg))
t = ctype[:-1]
if ctype.endswith('*'):
if ptype.cpp_abstract:
if ptype.cpp_touniqptr_conversion:
args.append(arg+'.get()')
return (False, '::std::unique_ptr<%s> %s;' % (t, arg))
elif ptype.cpp_has_public_dtor:
# Create a copy on stack and pass its address.
if ptype.cpp_has_def_ctor:
args.append('&'+arg)
return (False, '%s %s;' % (t, arg))
else:
args.append('&%s.value()' % arg)
return (False, '::absl::optional<%s> %s;' % (t, arg))
raise TypeError("Can't convert %s to %s" % (ptype.lang_type, ctype))
if (smartptr or ptype.cpp_abstract) and not ptype.cpp_touniqptr_conversion:
raise TypeError('Can\'t create "%s" variable (C++ type %s) in function %s'
', no valid conversion defined'
% (ast_param.name.native, ctype, func_name))
# unique_ptr<T>, shared_ptr<T>
if smartptr:
args.append('std::move(%s)' % arg)
return (False, '%s %s;' % (ctype, arg))
# T, [const] T&
if ptype.cpp_toptr_conversion:
args.append('*'+arg)
return (True, '%s* %s;' % (ctype, arg))
if ptype.cpp_abstract: # for AbstractType &
args.append('*'+arg)
return (False, 'std::unique_ptr<%s> %s;' % (ctype, arg))
# Create a copy on stack (even fot T&, most cases should have to_T* conv).
if ptype.cpp_has_def_ctor:
args.append('std::move(%s)' % arg)
return (False, '%s %s;' % (ctype, arg))
else:
args.append(arg+'.value()')
return (False, '::absl::optional<%s> %s;' % (ctype, arg))
def FunctionCall(pyname, wrapper, doc, catch, call, postcall_init,
typepostconversion, func_ast, lineno, prepend_self=None):
"""Generate PyCFunction wrapper from AST.FuncDecl func_ast.
Args:
pyname: str - Python function name (may be special: ends with @)
wrapper: str - generated function name
doc: str - C++ signature
catch: bool - catch C++ exceptions
call: str | [str] - C++ command(s) to call the wrapped function
(without "(params);" part).
postcall_init: str - C++ command; to (re)set ret0.
typepostconversion: dict(pytype, index) to convert to pytype
func_ast: AST.FuncDecl protobuf
lineno: int - .clif line number where func_ast defined
prepend_self: AST.Param - Use self as 1st parameter.
Yields:
Source code for wrapped function.
Raises:
ValueError: for non-supported default arguments
"""
ctxmgr = pyname.endswith('@')
if ctxmgr:
ctxmgr = pyname
assert ctxmgr in ('__enter__@', '__exit__@'), (
'Invalid context manager name ' + pyname)
pyname = pyname.rstrip('@')
nret = len(func_ast.returns)
return_type = astutils.FuncReturnType(func_ast) # Can't use cpp_exact_type.
# return_type mangled to FQN and drop &, sadly it also drop const.
void_return_type = 'void' == return_type
# Has extra func parameters for output values.
xouts = nret > (0 if void_return_type else 1)
params = [] # C++ parameter names.
nargs = len(func_ast.params)
is_ternaryfunc_slot = pyname == '__call__'
yield ''
if func_ast.classmethod:
yield '// @classmethod ' + doc
arg0 = 'cls' # Extra protection that generated code does not use 'self'.
else:
yield '// ' + doc
arg0 = 'self'
needs_kw = nargs or is_ternaryfunc_slot
yield 'static PyObject* %s(PyObject* %s%s) {' % (
wrapper, arg0, ', PyObject* args, PyObject* kw' if needs_kw else '')
if is_ternaryfunc_slot and not nargs:
yield I+('if (!ensure_no_args_and_kw_args("%s", args, kw)) return nullptr;'
% pyname)
if prepend_self:
unused_check_nullptr, out = _CreateInputParameter(
pyname+' line %d' % lineno, prepend_self, 'arg0', params)
yield I+out
yield I+'if (!Clif_PyObjAs(self, &arg0)) return nullptr;'
minargs = sum(1 for p in func_ast.params if not p.default_value)
if nargs:
yield I+'PyObject* a[%d]%s;' % (nargs, '' if minargs == nargs else '{}')
yield I+'char* names[] = {'
for p in func_ast.params:
yield I+I+I+'C("%s"),' % p.name.native
yield I+I+I+'nullptr'
yield I+'};'
yield I+('if (!PyArg_ParseTupleAndKeywords(args, kw, "%s:%s", names, %s)) '
'return nullptr;' % ('O'*nargs if minargs == nargs else
'O'*minargs+'|'+'O'*(nargs-minargs), pyname,
', '.join('&a[%d]'%i for i in range(nargs))))
if minargs < nargs and not xouts:
yield I+'int nargs; // Find how many args actually passed in.'
yield I+'for (nargs = %d; nargs > %d; --nargs) {' % (nargs, minargs)
yield I+I+'if (a[nargs-1] != nullptr) break;'
yield I+'}'
# Convert input parameters from Python.
for i, p in enumerate(func_ast.params):
n = i+1
arg = 'arg%d' % n
check_nullptr, out = _CreateInputParameter(
pyname+' line %d' % lineno, p, arg, params)
yield I+out
return_arg_err = (
'return ArgError("{func_name}", names[{i}], "{ctype}", a[{i}]);'
).format(i=i, func_name=pyname, ctype=astutils.Type(p))
cvt = ('if (!Clif_PyObjAs(a[{i}], &{cvar}{postconv})) {return_arg_err}'
).format(i=i, cvar=arg, return_arg_err=return_arg_err,
# Add post conversion parameter for std::function.
postconv='' if p.type.cpp_type else ', {%s}' % ', '.join(
postconv.Initializer(t.type, typepostconversion)
for t in p.type.callable.params))
def YieldCheckNullptr(ii):
# pylint: disable=cell-var-from-loop
if check_nullptr:
yield ii+'if (%s == nullptr) {' % arg
yield ii+I+return_arg_err
yield ii+'}'
if i < minargs:
# Non-default parameter.
yield I+cvt
for s in YieldCheckNullptr(I):
yield s
else:
if xouts:
_I = '' # pylint: disable=invalid-name
else:
_I = I # pylint: disable=invalid-name
yield I+'if (nargs > %d) {' % i
# Check if we're passed kw args, skipping some default C++ args.
# In this case we must substitute missed default args with default_value
if (p.default_value == 'default' # Matcher could not find the default.
or 'inf' in p.default_value): # W/A for b/29437257
if xouts:
raise ValueError("Can't supply the default for C++ function"
' argument. Drop =default in def %s(%s).'
% (pyname, p.name.native))
if n < nargs:
yield I+I+('if (!a[{i}]) return DefaultArgMissedError('
'"{}", names[{i}]);'.format(pyname, i=i))
yield I+I+cvt
for s in YieldCheckNullptr(I+I):
yield s
elif (p.default_value and
params[-1].startswith('&') and p.type.cpp_raw_pointer):
# Special case for a pointer to an integral type param (like int*).
raise ValueError('A default for integral type pointer argument is '
' not supported. Drop =default in def %s(%s).'
% (pyname, p.name.native))
else:
# C-cast takes care of the case where |arg| is an enum value, while
# the matcher would return an integral literal. Using static_cast
# would be ideal, but its argument should be an expression, which a
# struct value like {1, 2, 3} is not.
yield _I+I+'if (!a[%d]) %s = (%s)%s;' % (i, arg, astutils.Type(p),
p.default_value)
yield _I+I+'else '+cvt
for s in YieldCheckNullptr(_I+I):
yield s
if not xouts:
yield I+'}'
# Create input parameters for extra return values.
for n, p in enumerate(func_ast.returns):
if n or void_return_type:
yield I+'%s ret%d{};' % (astutils.Type(p), n)
params.append('&ret%d' % n)
yield I+'// Call actual C++ method.'
if isinstance(call, list):
for s in call[:-1]:
yield I+s
call = call[-1]
if not func_ast.py_keep_gil:
if nargs:
yield I+'Py_INCREF(args);'
yield I+'Py_XINCREF(kw);'
yield I+'PyThreadState* _save;'
yield I+'Py_UNBLOCK_THREADS'
optional_ret0 = False
convert_ref_to_ptr = False
if (minargs < nargs or catch) and not void_return_type:
if catch and return_type.rstrip().endswith('&'):
convert_ref_to_ptr = True
idx = return_type.rindex('&')
return_type = return_type[:idx] + '*'
if func_ast.returns[0].type.cpp_has_def_ctor:
yield I+return_type+' ret0;'
else:
# Using optional<> requires T be have T(x) and T::op=(x) available.
# While we need only t=x, implementing it will be a pain we skip for now.
yield I+'::absl::optional<%s> ret0;' % return_type
optional_ret0 = True
if catch:
for s in _GenExceptionTry():
yield s
if minargs < nargs and not xouts:
if not void_return_type:
call = 'ret0 = '+call
yield I+'switch (nargs) {'
for n in range(minargs, nargs+1):
yield I+'case %d:' % n
if func_ast.is_extend_method and func_ast.constructor:
call_with_params = call % (func_ast.name.cpp_name,
astutils.TupleStr(params[:n]))
else:
num_params = n
# extended methods need to include `self` as the first parameter, but
# extended constructors do not.
if func_ast.is_extend_method:
num_params += 1
call_with_params = call + astutils.TupleStr(params[:num_params])
yield I+I+'%s; break;' % call_with_params
yield I+'}'
else:
if func_ast.is_extend_method and func_ast.constructor:
call = call % (func_ast.name.cpp_name, astutils.TupleStr(params))
else:
call += astutils.TupleStr(params)
_I = I if catch else '' # pylint: disable=invalid-name
if void_return_type:
yield _I+I+call+';'
elif catch:
if convert_ref_to_ptr:
yield _I+I+'ret0 = &'+call+';'
else:
yield _I+I+'ret0 = '+call+';'
else:
yield _I+I+return_type+' ret0 = '+call+';'
if catch:
for s in _GenExceptionCatch():
yield s
if postcall_init:
if void_return_type:
yield I+postcall_init
else:
yield I+'ret0'+postcall_init
if not func_ast.py_keep_gil:
yield I+'Py_BLOCK_THREADS'
if nargs:
yield I+'Py_DECREF(args);'
yield I+'Py_XDECREF(kw);'
if catch:
for s in _GenExceptionRaise():
yield s
if func_ast.postproc == '->self':
func_ast.postproc = ''
return_self = True
assert nret == 0, '-> self must have no other output parameters'
else:
return_self = False
ret = '*ret' if convert_ref_to_ptr else 'ret'
# If ctxmgr, force return self on enter, None on exit.
if nret > 1 or (func_ast.postproc or ctxmgr) and nret:
yield I+'// Convert return values to Python.'
yield I+'PyObject* p, * result_tuple = PyTuple_New(%d);' % nret
yield I+'if (result_tuple == nullptr) return nullptr;'
for i in range(nret):
yield I+'if ((p=Clif_PyObjFrom(std::move(%s%d), %s)) == nullptr) {' % (
ret, i,
postconv.Initializer(func_ast.returns[i].type, typepostconversion))
yield I+I+'Py_DECREF(result_tuple);'
yield I+I+'return nullptr;'
yield I+'}'
yield I+'PyTuple_SET_ITEM(result_tuple, %d, p);' % i
if func_ast.postproc:
yield I+'PyObject* pyproc = ImportFQName("%s");' % func_ast.postproc
yield I+'if (pyproc == nullptr) {'
yield I+I+'Py_DECREF(result_tuple);'
yield I+I+'return nullptr;'
yield I+'}'
yield I+'p = PyObject_CallObject(pyproc, result_tuple);'
yield I+'Py_DECREF(pyproc);'
yield I+'Py_CLEAR(result_tuple);'
if ctxmgr:
yield I+'if (p == nullptr) return nullptr;'
yield I+'Py_DECREF(p); // Not needed by the context manager.'
else:
yield I+'result_tuple = p;'
if ctxmgr == '__enter__@':
yield I+'Py_XDECREF(result_tuple);'
yield I+'Py_INCREF(self);'
yield I+'return self;'
elif ctxmgr == '__exit__@':
yield I+'Py_XDECREF(result_tuple);'
yield I+'Py_RETURN_NONE;'
else:
yield I+'return result_tuple;'
elif nret:
yield I+'return Clif_PyObjFrom(std::move(%s0%s), %s);' % (
ret, ('.value()' if optional_ret0 else ''),
'py::postconv::MarkedNonRaising' if func_ast.marked_non_raising else
postconv.Initializer(func_ast.returns[0].type, typepostconversion))
elif return_self or ctxmgr == '__enter__@':
yield I+'Py_INCREF(self);'
yield I+'return self;'
else:
yield I+'Py_RETURN_NONE;'
yield '}'
def _GenExceptionTry():
yield I+'PyObject* err_type = nullptr;'
yield I+'std::string err_msg{"C++ exception"};'
yield I+'try {'
def _GenExceptionCatch():
yield I+'} catch(const std::exception& e) {'
yield I+I+'err_type = PyExc_RuntimeError;'
yield I+I+'err_msg += std::string(": ") + e.what();'
yield I+'} catch (...) {'
yield I+I+'err_type = PyExc_RuntimeError;'
yield I+'}'
def _GenExceptionRaise():
yield I+'if (err_type) {'
yield I+I+'PyErr_SetString(err_type, err_msg.c_str());'
yield I+I+'return nullptr;'
yield I+'}'
def _VirtualFunctionCall(fname, f, pyname, abstract, postconvinit):
"""Generate virtual redirector call wrapper from AST.FuncDecl f."""
name = f.name.cpp_name
ret = astutils.FuncReturnType(f, true_cpp_type=True)
arg = astutils.FuncParamStr(f, 'a', true_cpp_type=True)
mod = ['']
if f.cpp_const_method: mod.append('const')
if f.cpp_noexcept: mod.append('noexcept')
yield ''
yield I+'%s %s%s%s override {' % (ret, fname, arg, ' '.join(mod))
params = astutils.TupleStr('std::move(a%i)' % i for i in range(
len(f.params) + len(f.returns) - (ret != 'void')))
yield I+I+'SafeAttr impl(self(), "%s");' % f.name.native
yield I+I+'if (impl.get()) {'
ret_st = 'return ' if ret != 'void' else ''
yield I+I+I+'%s::clif::callback::Func<%s>(impl.get(), {%s})%s;' % (
ret_st, ', '.join(
[ret] +
list(astutils.ExactTypeOrType(a) for a in f.params) +
list(astutils.FuncReturns(f))),
', '.join(postconvinit(a.type) for a in f.params), params)
yield I+I+'} else {'
if abstract:
# This is only called from C++. Since f has no info if it is pure virtual,
# we can't always generate the call, so we always fail in an abstract class.
yield I+I+I+('Py_FatalError("@virtual method %s.%s has no Python '
'implementation.");' % (pyname, f.name.native))
# In Python 2 Py_FatalError is not marked __attribute__((__noreturn__)),
# so to avoid -Wreturn-type warning add extra abort(). It does not hurt ;)
yield I+I+I+'abort();'
else:
yield I+I+I+ret_st + name + params + ';'
yield I+I+'}'
yield I+'}'
def CastAsCapsule(wrapped_cpp, pointer_name, wrapper):
yield ''
yield '// Implicit cast this as %s*' % pointer_name
yield 'static PyObject* %s(PyObject* self) {' % wrapper
yield I+'%s* p = ::clif::python::Get(%s);' % (pointer_name, wrapped_cpp)
yield I+'if (p == nullptr) return nullptr;'
yield I+('return PyCapsule_New(p, C("%s"), nullptr);') % pointer_name
yield '}'
class _NewIter(object):
"""Generate the new_iter function."""
name = 'new_iter'
def __call__(self, wrapped_iter, ns, wrapper, wrapper_type):
yield ''
yield 'PyObject* new_iter(PyObject* self) {'
yield I+'if (!ThisPtr(self)) return nullptr;'
yield I+'%s* it = PyObject_New(%s, %s);' % (wrapper, wrapper, wrapper_type)
yield I+'if (!it) return nullptr;'
yield I+'using std::equal_to; // Often a default template argument.'
yield I+'new(&it->iter) %siterator(MakeStdShared(%s));' % (ns, wrapped_iter)
yield I+'return %s(it);' % _Cast()
yield '}'
NewIter = _NewIter() # pylint: disable=invalid-name
class _IterNext(object):
"""Generate the iternext function."""
name = 'iternext'
def __call__(self, wrapped_iter, is_async, postconversion):
"""Generate tp_iternext method implementation."""
yield ''
yield 'PyObject* iternext(PyObject* self) {'
if is_async:
yield I+'PyThreadState* _save;'
yield I+'Py_UNBLOCK_THREADS'
yield I+'auto* v = %s.Next();' % wrapped_iter
if is_async:
yield I+'Py_BLOCK_THREADS'
yield I+'return v? Clif_PyObjFrom(*v, %s): nullptr;' % postconversion
yield '}'
IterNext = _IterNext() # pylint: disable=invalid-name
def FromFunctionDef(ctype, wdef, wname, flags, doc):
"""PyCFunc definition."""
assert ctype.startswith('std::function<'), repr(ctype)
return 'static PyMethodDef %s = %s;' % (wdef, _DefLine('', wname, flags, doc))
def VarGetter(name, cfunc, error, cvar, pc, is_extend=False):
"""Generate var getter."""
xdata = '' if cfunc else ', void* xdata'
yield ''
yield 'static PyObject* %s(PyObject* self%s) {' % (name, xdata)
if error and not is_extend:
yield I+error+'return nullptr;'
yield I+'return Clif_PyObjFrom(%s, %s);' % (cvar, pc)
yield '}'
def VarSetter(name, cfunc, error, cvar, v, csetter, as_str, is_extend=False):
"""Generate var setter.
Args:
name: setter function name
cfunc: (True/False) generate setter as a CFunction
error: C++ condition to return error if any
cvar: C var name to set new value to directly
v: VAR AST
csetter: C++ call expression to set var (without '(newvalue)') if any
as_str: Python str -> C str function (different for Py2/3)
is_extend: True for @extend properties in the .clif file.
Yields:
Source code for setter function.
"""
yield ''
if cfunc:
yield 'static PyObject* %s(PyObject* self, PyObject* value) {' % name
ret_error = 'return nullptr;'
ret_ok = 'Py_RETURN_NONE;'
else:
yield ('static int %s(PyObject* self, PyObject* value, void* xdata) {'
% name)
ret_error = 'return -1;'
ret_ok = 'return 0;'
yield I+'if (value == nullptr) {'
yield I+I+('PyErr_SetString(PyExc_TypeError, "Cannot delete the'
' %s attribute");' % v.name.native)
yield I+I+ret_error
yield I+'}'
if csetter:
# Workaround BUG "v.type.cpp_type not updated by Matcher", so get p[0].
yield I+'%s cval;' % v.cpp_set.params[0].type.cpp_type
yield I+'if (Clif_PyObjAs(value, &cval)) {'
if error:
yield I+I+error+ret_error
if is_extend:
yield I+I+csetter + '(*cpp, cval);'
else:
yield I+I+csetter + '(cval);'
yield I+I+ret_ok
yield I+'}'
if not csetter:
if error:
yield I+error+ret_error
yield I+'if (Clif_PyObjAs(value, &%s)) ' % cvar + ret_ok
yield I+'PyObject* s = PyObject_Repr(value);'
yield I+('PyErr_Format(PyExc_ValueError, "%s is not valid for {}:{}", s? {}'
'(s): "input");').format(v.name.native, v.type.lang_type, as_str)
yield I+'Py_XDECREF(s);'
yield I+ret_error
yield '}'
def _Cast(t='PyObject'):
assert not t.endswith('*')
return 'reinterpret_cast<%s*>' % t
| 37.615672
| 83
| 0.624293
|
50cc09666a7eec0b7576617478058e3aea3eeac2
| 3,639
|
py
|
Python
|
blockbuster/bb_api_request_processor.py
|
mattstibbs/blockbuster-server
|
cc66278405fcb02ebf07624e70220550ef1ad13b
|
[
"MIT"
] | null | null | null |
blockbuster/bb_api_request_processor.py
|
mattstibbs/blockbuster-server
|
cc66278405fcb02ebf07624e70220550ef1ad13b
|
[
"MIT"
] | 455
|
2015-02-02T21:29:35.000Z
|
2021-08-02T05:37:49.000Z
|
blockbuster/bb_api_request_processor.py
|
greysteil/blockbuster-server
|
475aa1f6da608f12c9c05607e3f302a21a712dfd
|
[
"MIT"
] | 2
|
2016-03-14T16:39:40.000Z
|
2018-03-08T12:03:33.000Z
|
import json
import bb_dbconnector_factory
import datetime
from json import JSONEncoder
import logging
import blockbuster.bb_command_processor
import blockbuster.bb_types
logger = logging.getLogger(__name__)
class DateEncoder(JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.date):
return obj.isoformat()
return JSONEncoder.default(self, obj)
class APIRequestProcessor:
def __init__(self):
self.number = 1
pass
# Method to handle a service status request through the API.
# Parameters: None
# Returns: The status of the BlockBuster service
def service_status_get(self):
status = bb_dbconnector_factory.DBConnectorInterfaceFactory().create().db_status_check()
logger.debug(status)
return status
# Method to handle a stats request through the API.
# Parameters: None
# Returns: Today's stats for the BlockBuster service
def service_stats_get(self):
stats = bb_dbconnector_factory.DBConnectorInterfaceFactory().create().db_stats_check()
logger.debug(stats)
return stats
# Method to handle a user status request through the API.
# Parameters: User Mobile Number
# Returns: JSON object containing list of blocks by user, and a list of blocks against user
def status_get(self, requestermobile):
request = blockbuster.bb_types.APIRequestFactory().create()
request.requestormobile = requestermobile
request.servicenumber = 'API'
status = blockbuster.bb_command_processor.current_status(request)
print(status)
blocking = []
for b in status['blocking']:
block = {
"contact": b['blockee'],
"registration": b['blocked_reg']
}
blocking.append(block)
blockedBy = []
for b in status['blockedBy']:
block = {
"contact": b['blocker'],
"registration": b['blocked_reg']
}
blockedBy.append(block)
api_response = {
"blocking": blocking,
"blockedBy": blockedBy
}
return api_response
def cars_get(self, registration):
results = bb_dbconnector_factory.DBConnectorInterfaceFactory().create().api_registrations_get(registration)
if results is None:
return registration + " Not Found", 404
else:
return results
def cars_getall(self):
results = bb_dbconnector_factory.DBConnectorInterfaceFactory()\
.create().api_registrations_getall()
print(results)
if results is None:
return "Not Found", 404
else:
return results
def blocks_getall(self):
results = bb_dbconnector_factory.DBConnectorInterfaceFactory().create().api_blocks_getall()
if results is None:
return "Not Found", 404
else:
return results
def smslogs_get(self):
results = bb_dbconnector_factory.DBConnectorInterfaceFactory().create().api_smslogs_get()
if results is None:
return "Not Found", 404
else:
return results
def logs_get(self):
results = bb_dbconnector_factory.DBConnectorInterfaceFactory().create().api_logs_get()
if results is None:
return "Not Found", 404
else:
return results
def logsms_get(self):
dict = bb_dbconnector_factory.DBConnectorInterfaceFactory().create().api_logsms_get()
results = json.dumps(dict, cls=DateEncoder)
return results
| 30.07438
| 115
| 0.638912
|
435a6c5910bf1734e9978b958303ecff3a32b917
| 10,130
|
py
|
Python
|
lib/python3.8/site-packages/ansible_collections/community/general/plugins/modules/postgresql_slot.py
|
cjsteel/python3-venv-ansible-2.10.5
|
c95395c4cae844dc66fddde9b4343966f4b2ecd5
|
[
"Apache-1.1"
] | 1
|
2020-12-23T13:27:39.000Z
|
2020-12-23T13:27:39.000Z
|
.ansible/collections/ansible_collections/community/general/plugins/modules/database/postgresql/postgresql_slot.py
|
chronicc/proving-ground
|
3e392122a05fb8383a3700954baebb0df330e9e3
|
[
"MIT"
] | null | null | null |
.ansible/collections/ansible_collections/community/general/plugins/modules/database/postgresql/postgresql_slot.py
|
chronicc/proving-ground
|
3e392122a05fb8383a3700954baebb0df330e9e3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, John Scalia (@jscalia), Andrew Klychkov (@Andersson007) <aaklychkov@mail.ru>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: postgresql_slot
short_description: Add or remove replication slots from a PostgreSQL database
description:
- Add or remove physical or logical replication slots from a PostgreSQL database.
options:
name:
description:
- Name of the replication slot to add or remove.
type: str
required: yes
aliases:
- slot_name
slot_type:
description:
- Slot type.
type: str
default: physical
choices: [ logical, physical ]
state:
description:
- The slot state.
- I(state=present) implies the slot must be present in the system.
- I(state=absent) implies the I(groups) must be revoked from I(target_roles).
type: str
default: present
choices: [ absent, present ]
immediately_reserve:
description:
- Optional parameter that when C(yes) specifies that the LSN for this replication slot be reserved
immediately, otherwise the default, C(no), specifies that the LSN is reserved on the first connection
from a streaming replication client.
- Is available from PostgreSQL version 9.6.
- Uses only with I(slot_type=physical).
- Mutually exclusive with I(slot_type=logical).
type: bool
default: no
output_plugin:
description:
- All logical slots must indicate which output plugin decoder they're using.
- This parameter does not apply to physical slots.
- It will be ignored with I(slot_type=physical).
type: str
default: "test_decoding"
db:
description:
- Name of database to connect to.
type: str
aliases:
- login_db
session_role:
description:
- Switch to session_role after connecting.
The specified session_role must be a role that the current login_user is a member of.
- Permissions checking for SQL commands is carried out as though
the session_role were the one that had logged in originally.
type: str
trust_input:
description:
- If C(no), check the value of I(session_role) is potentially dangerous.
- It makes sense to use C(no) only when SQL injections via I(session_role) are possible.
type: bool
default: yes
version_added: '0.2.0'
notes:
- Physical replication slots were introduced to PostgreSQL with version 9.4,
while logical replication slots were added beginning with version 10.0.
seealso:
- name: PostgreSQL pg_replication_slots view reference
description: Complete reference of the PostgreSQL pg_replication_slots view.
link: https://www.postgresql.org/docs/current/view-pg-replication-slots.html
- name: PostgreSQL streaming replication protocol reference
description: Complete reference of the PostgreSQL streaming replication protocol documentation.
link: https://www.postgresql.org/docs/current/protocol-replication.html
- name: PostgreSQL logical replication protocol reference
description: Complete reference of the PostgreSQL logical replication protocol documentation.
link: https://www.postgresql.org/docs/current/protocol-logical-replication.html
author:
- John Scalia (@jscalia)
- Andrew Klychkov (@Andersson007)
- Thomas O'Donnell (@andytom)
extends_documentation_fragment:
- community.general.postgres
'''
EXAMPLES = r'''
- name: Create physical_one physical slot if doesn't exist
become_user: postgres
community.general.postgresql_slot:
slot_name: physical_one
db: ansible
- name: Remove physical_one slot if exists
become_user: postgres
community.general.postgresql_slot:
slot_name: physical_one
db: ansible
state: absent
- name: Create logical_one logical slot to the database acme if doesn't exist
community.general.postgresql_slot:
name: logical_slot_one
slot_type: logical
state: present
output_plugin: custom_decoder_one
db: "acme"
- name: Remove logical_one slot if exists from the cluster running on another host and non-standard port
community.general.postgresql_slot:
name: logical_one
login_host: mydatabase.example.org
port: 5433
login_user: ourSuperuser
login_password: thePassword
state: absent
'''
RETURN = r'''
name:
description: Name of the slot
returned: always
type: str
sample: "physical_one"
queries:
description: List of executed queries.
returned: always
type: str
sample: [ "SELECT pg_create_physical_replication_slot('physical_one', False, False)" ]
'''
try:
from psycopg2.extras import DictCursor
except ImportError:
# psycopg2 is checked by connect_to_db()
# from ansible.module_utils.postgres
pass
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.community.general.plugins.module_utils.database import (
check_input,
)
from ansible_collections.community.general.plugins.module_utils.postgres import (
connect_to_db,
exec_sql,
get_conn_params,
postgres_common_argument_spec,
)
# ===========================================
# PostgreSQL module specific support methods.
#
class PgSlot(object):
def __init__(self, module, cursor, name):
self.module = module
self.cursor = cursor
self.name = name
self.exists = False
self.kind = ''
self.__slot_exists()
self.changed = False
self.executed_queries = []
def create(self, kind='physical', immediately_reserve=False, output_plugin=False, just_check=False):
if self.exists:
if self.kind == kind:
return False
else:
self.module.warn("slot with name '%s' already exists "
"but has another type '%s'" % (self.name, self.kind))
return False
if just_check:
return None
if kind == 'physical':
# Check server version (needs for immedately_reserverd needs 9.6+):
if self.cursor.connection.server_version < 96000:
query = "SELECT pg_create_physical_replication_slot(%(name)s)"
else:
query = "SELECT pg_create_physical_replication_slot(%(name)s, %(i_reserve)s)"
self.changed = exec_sql(self, query,
query_params={'name': self.name, 'i_reserve': immediately_reserve},
return_bool=True)
elif kind == 'logical':
query = "SELECT pg_create_logical_replication_slot(%(name)s, %(o_plugin)s)"
self.changed = exec_sql(self, query,
query_params={'name': self.name, 'o_plugin': output_plugin}, return_bool=True)
def drop(self):
if not self.exists:
return False
query = "SELECT pg_drop_replication_slot(%(name)s)"
self.changed = exec_sql(self, query, query_params={'name': self.name}, return_bool=True)
def __slot_exists(self):
query = "SELECT slot_type FROM pg_replication_slots WHERE slot_name = %(name)s"
res = exec_sql(self, query, query_params={'name': self.name}, add_to_executed=False)
if res:
self.exists = True
self.kind = res[0][0]
# ===========================================
# Module execution.
#
def main():
argument_spec = postgres_common_argument_spec()
argument_spec.update(
db=dict(type="str", aliases=["login_db"]),
name=dict(type="str", required=True, aliases=["slot_name"]),
slot_type=dict(type="str", default="physical", choices=["logical", "physical"]),
immediately_reserve=dict(type="bool", default=False),
session_role=dict(type="str"),
output_plugin=dict(type="str", default="test_decoding"),
state=dict(type="str", default="present", choices=["absent", "present"]),
trust_input=dict(type="bool", default=True),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
name = module.params["name"]
slot_type = module.params["slot_type"]
immediately_reserve = module.params["immediately_reserve"]
state = module.params["state"]
output_plugin = module.params["output_plugin"]
if not module.params["trust_input"]:
check_input(module, module.params['session_role'])
if immediately_reserve and slot_type == 'logical':
module.fail_json(msg="Module parameters immediately_reserve and slot_type=logical are mutually exclusive")
# When slot_type is logical and parameter db is not passed,
# the default database will be used to create the slot and
# the user should know about this.
# When the slot type is physical,
# it doesn't matter which database will be used
# because physical slots are global objects.
if slot_type == 'logical':
warn_db_default = True
else:
warn_db_default = False
conn_params = get_conn_params(module, module.params, warn_db_default=warn_db_default)
db_connection = connect_to_db(module, conn_params, autocommit=True)
cursor = db_connection.cursor(cursor_factory=DictCursor)
##################################
# Create an object and do main job
pg_slot = PgSlot(module, cursor, name)
changed = False
if module.check_mode:
if state == "present":
if not pg_slot.exists:
changed = True
pg_slot.create(slot_type, immediately_reserve, output_plugin, just_check=True)
elif state == "absent":
if pg_slot.exists:
changed = True
else:
if state == "absent":
pg_slot.drop()
elif state == "present":
pg_slot.create(slot_type, immediately_reserve, output_plugin)
changed = pg_slot.changed
db_connection.close()
module.exit_json(changed=changed, name=name, queries=pg_slot.executed_queries)
if __name__ == '__main__':
main()
| 33.213115
| 114
| 0.675913
|
72240efab9807de3b7c0c8aab791092dbb0cd687
| 2,954
|
py
|
Python
|
ceilometer/tests/hardware/pollsters/base.py
|
orbitfp7/ceilometer
|
9905da14bbdf06f95e1e056c9ca0e18087214d0f
|
[
"Apache-2.0"
] | 2
|
2015-09-07T09:15:26.000Z
|
2015-09-30T02:13:23.000Z
|
ceilometer/tests/hardware/pollsters/base.py
|
orbitfp7/ceilometer
|
9905da14bbdf06f95e1e056c9ca0e18087214d0f
|
[
"Apache-2.0"
] | null | null | null |
ceilometer/tests/hardware/pollsters/base.py
|
orbitfp7/ceilometer
|
9905da14bbdf06f95e1e056c9ca0e18087214d0f
|
[
"Apache-2.0"
] | 1
|
2019-09-16T02:11:41.000Z
|
2019-09-16T02:11:41.000Z
|
#
# Copyright 2013 Intel Corp
#
# Authors: Lianhao Lu <lianhao.lu@intel.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
import mock
from ceilometer.central import manager
from ceilometer.hardware.inspector import base as inspector_base
from ceilometer.tests import base as test_base
class FakeInspector(inspector_base.Inspector):
net_metadata = dict(name='test.teest',
mac='001122334455',
ip='10.0.0.2',
speed=1000)
disk_metadata = dict(device='/dev/sda1', path='/')
DATA = {
'cpu.load.1min': (0.99, {}, {}),
'cpu.load.5min': (0.77, {}, {}),
'cpu.load.15min': (0.55, {}, {}),
'memory.total': (1000, {}, {}),
'memory.used': (90, {}, {}),
'network.incoming.bytes': (90, net_metadata, {}),
'network.outgoing.bytes': (80, net_metadata, {}),
'network.outgoing.errors': (1, net_metadata, {}),
'disk.size.total': (1000, disk_metadata, {}),
'disk.size.used': (90, disk_metadata, {}),
}
def inspect_generic(self, host, identifier, cache, extra_metadata=None):
yield self.DATA[identifier]
class TestPollsterBase(test_base.BaseTestCase):
def faux_get_inspector(url, namespace=None):
return FakeInspector()
def setUp(self):
super(TestPollsterBase, self).setUp()
self.hosts = ["test://test", "test://test2"]
self.useFixture(fixtures.MonkeyPatch(
'ceilometer.hardware.inspector.get_inspector',
self.faux_get_inspector))
@mock.patch('ceilometer.pipeline.setup_pipeline', mock.MagicMock())
def _check_get_samples(self, factory, name,
expected_value, expected_type, expected_unit=None):
mgr = manager.AgentManager()
pollster = factory()
cache = {}
samples = list(pollster.get_samples(mgr, cache, self.hosts))
self.assertTrue(samples)
self.assertIn(pollster.CACHE_KEY, cache)
for host in self.hosts:
self.assertIn(host, cache[pollster.CACHE_KEY])
self.assertEqual(set([name]),
set([s.name for s in samples]))
match = [s for s in samples if s.name == name]
self.assertEqual(expected_value, match[0].volume)
self.assertEqual(expected_type, match[0].type)
if expected_unit:
self.assertEqual(expected_unit, match[0].unit)
| 37.392405
| 78
| 0.635071
|
f529ac0281c085fa9895eeb4244662b5a6443c8c
| 55,820
|
py
|
Python
|
src/you_get/common.py
|
Abdo99ab/everything-downloader
|
bb66fa9427efa3002b9c336e2cc9d9bfe4fd5d27
|
[
"MIT"
] | 1
|
2019-09-27T11:14:05.000Z
|
2019-09-27T11:14:05.000Z
|
src/you_get/common.py
|
Abdo99ab/everything-downloader
|
bb66fa9427efa3002b9c336e2cc9d9bfe4fd5d27
|
[
"MIT"
] | null | null | null |
src/you_get/common.py
|
Abdo99ab/everything-downloader
|
bb66fa9427efa3002b9c336e2cc9d9bfe4fd5d27
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import io
import os
import re
import sys
import time
import json
import socket
import locale
import logging
import argparse
import ssl
from http import cookiejar
from importlib import import_module
from urllib import request, parse, error
from .version import __version__
from .util import log, term
from .util.git import get_version
from .util.strings import get_filename, unescape_html
from . import json_output as json_output_
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf8')
SITES = {
'163': 'netease',
'56': 'w56',
'365yg': 'toutiao',
'acfun': 'acfun',
'archive': 'archive',
'baidu': 'baidu',
'bandcamp': 'bandcamp',
'baomihua': 'baomihua',
'bigthink': 'bigthink',
'bilibili': 'bilibili',
'cctv': 'cntv',
'cntv': 'cntv',
'cbs': 'cbs',
'coub': 'coub',
'dailymotion': 'dailymotion',
'douban': 'douban',
'douyin': 'douyin',
'douyu': 'douyutv',
'ehow': 'ehow',
'facebook': 'facebook',
'fc2': 'fc2video',
'flickr': 'flickr',
'freesound': 'freesound',
'fun': 'funshion',
'google': 'google',
'giphy': 'giphy',
'heavy-music': 'heavymusic',
'huomao': 'huomaotv',
'iask': 'sina',
'icourses': 'icourses',
'ifeng': 'ifeng',
'imgur': 'imgur',
'in': 'alive',
'infoq': 'infoq',
'instagram': 'instagram',
'interest': 'interest',
'iqilu': 'iqilu',
'iqiyi': 'iqiyi',
'ixigua': 'ixigua',
'isuntv': 'suntv',
'iwara': 'iwara',
'joy': 'joy',
'kankanews': 'bilibili',
'khanacademy': 'khan',
'ku6': 'ku6',
'kuaishou': 'kuaishou',
'kugou': 'kugou',
'kuwo': 'kuwo',
'le': 'le',
'letv': 'le',
'lizhi': 'lizhi',
'longzhu': 'longzhu',
'magisto': 'magisto',
'metacafe': 'metacafe',
'mgtv': 'mgtv',
'miomio': 'miomio',
'mixcloud': 'mixcloud',
'mtv81': 'mtv81',
'musicplayon': 'musicplayon',
'miaopai': 'yixia',
'naver': 'naver',
'7gogo': 'nanagogo',
'nicovideo': 'nicovideo',
'pinterest': 'pinterest',
'pixnet': 'pixnet',
'pptv': 'pptv',
'qingting': 'qingting',
'qq': 'qq',
'showroom-live': 'showroom',
'sina': 'sina',
'smgbb': 'bilibili',
'sohu': 'sohu',
'soundcloud': 'soundcloud',
'ted': 'ted',
'theplatform': 'theplatform',
'tiktok': 'tiktok',
'tucao': 'tucao',
'tudou': 'tudou',
'tumblr': 'tumblr',
'twimg': 'twitter',
'twitter': 'twitter',
'ucas': 'ucas',
'videomega': 'videomega',
'vidto': 'vidto',
'vimeo': 'vimeo',
'wanmen': 'wanmen',
'weibo': 'miaopai',
'veoh': 'veoh',
'vine': 'vine',
'vk': 'vk',
'xiami': 'xiami',
'xiaokaxiu': 'yixia',
'xiaojiadianvideo': 'fc2video',
'ximalaya': 'ximalaya',
'yinyuetai': 'yinyuetai',
'yizhibo': 'yizhibo',
'youku': 'youku',
'youtu': 'youtube',
'youtube': 'youtube',
'zhanqi': 'zhanqi',
'zhibo': 'zhibo',
'zhihu': 'zhihu',
}
dry_run = False
json_output = False
force = False
skip_existing_file_size_check = False
player = None
extractor_proxy = None
cookies = None
output_filename = None
auto_rename = False
insecure = False
fake_headers = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', # noqa
'Accept-Charset': 'UTF-8,*;q=0.5',
'Accept-Encoding': 'gzip,deflate,sdch',
'Accept-Language': 'en-US,en;q=0.8',
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:64.0) Gecko/20100101 Firefox/64.0', # noqa
}
if sys.stdout.isatty():
default_encoding = sys.stdout.encoding.lower()
else:
default_encoding = locale.getpreferredencoding().lower()
def rc4(key, data):
# all encryption algo should work on bytes
assert type(key) == type(data) and type(key) == type(b'')
state = list(range(256))
j = 0
for i in range(256):
j += state[i] + key[i % len(key)]
j &= 0xff
state[i], state[j] = state[j], state[i]
i = 0
j = 0
out_list = []
for char in data:
i += 1
i &= 0xff
j += state[i]
j &= 0xff
state[i], state[j] = state[j], state[i]
prn = state[(state[i] + state[j]) & 0xff]
out_list.append(char ^ prn)
return bytes(out_list)
def general_m3u8_extractor(url, headers={}):
m3u8_list = get_content(url, headers=headers).split('\n')
urls = []
for line in m3u8_list:
line = line.strip()
if line and not line.startswith('#'):
if line.startswith('http'):
urls.append(line)
else:
seg_url = parse.urljoin(url, line)
urls.append(seg_url)
return urls
def maybe_print(*s):
try:
print(*s)
except:
pass
def tr(s):
if default_encoding == 'utf-8':
return s
else:
return s
# return str(s.encode('utf-8'))[2:-1]
# DEPRECATED in favor of match1()
def r1(pattern, text):
m = re.search(pattern, text)
if m:
return m.group(1)
# DEPRECATED in favor of match1()
def r1_of(patterns, text):
for p in patterns:
x = r1(p, text)
if x:
return x
def match1(text, *patterns):
"""Scans through a string for substrings matched some patterns (first-subgroups only).
Args:
text: A string to be scanned.
patterns: Arbitrary number of regex patterns.
Returns:
When only one pattern is given, returns a string (None if no match found).
When more than one pattern are given, returns a list of strings ([] if no match found).
"""
if len(patterns) == 1:
pattern = patterns[0]
match = re.search(pattern, text)
if match:
return match.group(1)
else:
return None
else:
ret = []
for pattern in patterns:
match = re.search(pattern, text)
if match:
ret.append(match.group(1))
return ret
def matchall(text, patterns):
"""Scans through a string for substrings matched some patterns.
Args:
text: A string to be scanned.
patterns: a list of regex pattern.
Returns:
a list if matched. empty if not.
"""
ret = []
for pattern in patterns:
match = re.findall(pattern, text)
ret += match
return ret
def launch_player(player, urls):
import subprocess
import shlex
if (sys.version_info >= (3, 3)):
import shutil
exefile = shlex.split(player)[0]
if shutil.which(exefile) is not None:
subprocess.call(shlex.split(player) + list(urls))
else:
log.wtf('[Failed] Cannot find player "%s"' % exefile)
else:
subprocess.call(shlex.split(player) + list(urls))
def parse_query_param(url, param):
"""Parses the query string of a URL and returns the value of a parameter.
Args:
url: A URL.
param: A string representing the name of the parameter.
Returns:
The value of the parameter.
"""
try:
return parse.parse_qs(parse.urlparse(url).query)[param][0]
except:
return None
def unicodize(text):
return re.sub(
r'\\u([0-9A-Fa-f][0-9A-Fa-f][0-9A-Fa-f][0-9A-Fa-f])',
lambda x: chr(int(x.group(0)[2:], 16)),
text
)
# DEPRECATED in favor of util.legitimize()
def escape_file_path(path):
path = path.replace('/', '-')
path = path.replace('\\', '-')
path = path.replace('*', '-')
path = path.replace('?', '-')
return path
def ungzip(data):
"""Decompresses data for Content-Encoding: gzip.
"""
from io import BytesIO
import gzip
buffer = BytesIO(data)
f = gzip.GzipFile(fileobj=buffer)
return f.read()
def undeflate(data):
"""Decompresses data for Content-Encoding: deflate.
(the zlib compression is used.)
"""
import zlib
decompressobj = zlib.decompressobj(-zlib.MAX_WBITS)
return decompressobj.decompress(data) + decompressobj.flush()
# DEPRECATED in favor of get_content()
def get_response(url, faker=False):
logging.debug('get_response: %s' % url)
# install cookies
if cookies:
opener = request.build_opener(request.HTTPCookieProcessor(cookies))
request.install_opener(opener)
if faker:
response = request.urlopen(
request.Request(url, headers=fake_headers), None
)
else:
response = request.urlopen(url)
data = response.read()
if response.info().get('Content-Encoding') == 'gzip':
data = ungzip(data)
elif response.info().get('Content-Encoding') == 'deflate':
data = undeflate(data)
response.data = data
return response
# DEPRECATED in favor of get_content()
def get_html(url, encoding=None, faker=False):
content = get_response(url, faker).data
return str(content, 'utf-8', 'ignore')
# DEPRECATED in favor of get_content()
def get_decoded_html(url, faker=False):
response = get_response(url, faker)
data = response.data
charset = r1(r'charset=([\w-]+)', response.headers['content-type'])
if charset:
return data.decode(charset, 'ignore')
else:
return data
def get_location(url, headers=None, get_method='HEAD'):
logging.debug('get_location: %s' % url)
if headers:
req = request.Request(url, headers=headers)
else:
req = request.Request(url)
req.get_method = lambda: get_method
res = urlopen_with_retry(req)
return res.geturl()
def urlopen_with_retry(*args, **kwargs):
retry_time = 3
for i in range(retry_time):
try:
if insecure:
# ignore ssl errors
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
return request.urlopen(*args, context=ctx, **kwargs)
else:
return request.urlopen(*args, **kwargs)
except socket.timeout as e:
logging.debug('request attempt %s timeout' % str(i + 1))
if i + 1 == retry_time:
raise e
# try to tackle youku CDN fails
except error.HTTPError as http_error:
logging.debug('HTTP Error with code{}'.format(http_error.code))
if i + 1 == retry_time:
raise http_error
def get_content(url, headers={}, decoded=True):
"""Gets the content of a URL via sending a HTTP GET request.
Args:
url: A URL.
headers: Request headers used by the client.
decoded: Whether decode the response body using UTF-8 or the charset specified in Content-Type.
Returns:
The content as a string.
"""
logging.debug('get_content: %s' % url)
req = request.Request(url, headers=headers)
if cookies:
cookies.add_cookie_header(req)
req.headers.update(req.unredirected_hdrs)
response = urlopen_with_retry(req)
data = response.read()
# Handle HTTP compression for gzip and deflate (zlib)
content_encoding = response.getheader('Content-Encoding')
if content_encoding == 'gzip':
data = ungzip(data)
elif content_encoding == 'deflate':
data = undeflate(data)
# Decode the response body
if decoded:
charset = match1(
response.getheader('Content-Type', ''), r'charset=([\w-]+)'
)
if charset is not None:
data = data.decode(charset, 'ignore')
else:
data = data.decode('utf-8', 'ignore')
return data
def post_content(url, headers={}, post_data={}, decoded=True, **kwargs):
"""Post the content of a URL via sending a HTTP POST request.
Args:
url: A URL.
headers: Request headers used by the client.
decoded: Whether decode the response body using UTF-8 or the charset specified in Content-Type.
Returns:
The content as a string.
"""
if kwargs.get('post_data_raw'):
logging.debug('post_content: %s\npost_data_raw: %s' % (url, kwargs['post_data_raw']))
else:
logging.debug('post_content: %s\npost_data: %s' % (url, post_data))
req = request.Request(url, headers=headers)
if cookies:
cookies.add_cookie_header(req)
req.headers.update(req.unredirected_hdrs)
if kwargs.get('post_data_raw'):
post_data_enc = bytes(kwargs['post_data_raw'], 'utf-8')
else:
post_data_enc = bytes(parse.urlencode(post_data), 'utf-8')
response = urlopen_with_retry(req, data=post_data_enc)
data = response.read()
# Handle HTTP compression for gzip and deflate (zlib)
content_encoding = response.getheader('Content-Encoding')
if content_encoding == 'gzip':
data = ungzip(data)
elif content_encoding == 'deflate':
data = undeflate(data)
# Decode the response body
if decoded:
charset = match1(
response.getheader('Content-Type'), r'charset=([\w-]+)'
)
if charset is not None:
data = data.decode(charset)
else:
data = data.decode('utf-8')
return data
def url_size(url, faker=False, headers={}):
if faker:
response = urlopen_with_retry(
request.Request(url, headers=fake_headers)
)
elif headers:
response = urlopen_with_retry(request.Request(url, headers=headers))
else:
response = urlopen_with_retry(url)
size = response.headers['content-length']
return int(size) if size is not None else float('inf')
def urls_size(urls, faker=False, headers={}):
return sum([url_size(url, faker=faker, headers=headers) for url in urls])
def get_head(url, headers=None, get_method='HEAD'):
logging.debug('get_head: %s' % url)
if headers:
req = request.Request(url, headers=headers)
else:
req = request.Request(url)
req.get_method = lambda: get_method
res = urlopen_with_retry(req)
return res.headers
def url_info(url, faker=False, headers={}):
logging.debug('url_info: %s' % url)
if faker:
response = urlopen_with_retry(
request.Request(url, headers=fake_headers)
)
elif headers:
response = urlopen_with_retry(request.Request(url, headers=headers))
else:
response = urlopen_with_retry(request.Request(url))
headers = response.headers
type = headers['content-type']
if type == 'image/jpg; charset=UTF-8' or type == 'image/jpg':
type = 'audio/mpeg' # fix for netease
mapping = {
'video/3gpp': '3gp',
'video/f4v': 'flv',
'video/mp4': 'mp4',
'video/MP2T': 'ts',
'video/quicktime': 'mov',
'video/webm': 'webm',
'video/x-flv': 'flv',
'video/x-ms-asf': 'asf',
'audio/mp4': 'mp4',
'audio/mpeg': 'mp3',
'audio/wav': 'wav',
'audio/x-wav': 'wav',
'audio/wave': 'wav',
'image/jpeg': 'jpg',
'image/png': 'png',
'image/gif': 'gif',
'application/pdf': 'pdf',
}
if type in mapping:
ext = mapping[type]
else:
type = None
if headers['content-disposition']:
try:
filename = parse.unquote(
r1(r'filename="?([^"]+)"?', headers['content-disposition'])
)
if len(filename.split('.')) > 1:
ext = filename.split('.')[-1]
else:
ext = None
except:
ext = None
else:
ext = None
if headers['transfer-encoding'] != 'chunked':
size = headers['content-length'] and int(headers['content-length'])
else:
size = None
return type, ext, size
def url_locations(urls, faker=False, headers={}):
locations = []
for url in urls:
logging.debug('url_locations: %s' % url)
if faker:
response = urlopen_with_retry(
request.Request(url, headers=fake_headers)
)
elif headers:
response = urlopen_with_retry(
request.Request(url, headers=headers)
)
else:
response = urlopen_with_retry(request.Request(url))
locations.append(response.url)
return locations
def url_save(
url, filepath, bar, refer=None, is_part=False, faker=False,
headers=None, timeout=None, **kwargs
):
tmp_headers = headers.copy() if headers is not None else {}
# When a referer specified with param refer,
# the key must be 'Referer' for the hack here
if refer is not None:
tmp_headers['Referer'] = refer
if type(url) is list:
file_size = urls_size(url, faker=faker, headers=tmp_headers)
is_chunked, urls = True, url
else:
file_size = url_size(url, faker=faker, headers=tmp_headers)
is_chunked, urls = False, [url]
continue_renameing = True
while continue_renameing:
continue_renameing = False
if os.path.exists(filepath):
if not force and (file_size == os.path.getsize(filepath) or skip_existing_file_size_check):
if not is_part:
if bar:
bar.done()
if skip_existing_file_size_check:
log.w(
'Skipping {} without checking size: file already exists'.format(
tr(os.path.basename(filepath))
)
)
else:
log.w(
'Skipping {}: file already exists'.format(
tr(os.path.basename(filepath))
)
)
else:
if bar:
bar.update_received(file_size)
return
else:
if not is_part:
if bar:
bar.done()
if not force and auto_rename:
path, ext = os.path.basename(filepath).rsplit('.', 1)
finder = re.compile(' \([1-9]\d*?\)$')
if (finder.search(path) is None):
thisfile = path + ' (1).' + ext
else:
def numreturn(a):
return ' (' + str(int(a.group()[2:-1]) + 1) + ').'
thisfile = finder.sub(numreturn, path) + ext
filepath = os.path.join(os.path.dirname(filepath), thisfile)
print('Changing name to %s' % tr(os.path.basename(filepath)), '...')
continue_renameing = True
continue
if log.yes_or_no('File with this name already exists. Overwrite?'):
log.w('Overwriting %s ...' % tr(os.path.basename(filepath)))
else:
return
elif not os.path.exists(os.path.dirname(filepath)):
os.mkdir(os.path.dirname(filepath))
temp_filepath = filepath + '.download' if file_size != float('inf') \
else filepath
received = 0
if not force:
open_mode = 'ab'
if os.path.exists(temp_filepath):
received += os.path.getsize(temp_filepath)
if bar:
bar.update_received(os.path.getsize(temp_filepath))
else:
open_mode = 'wb'
for url in urls:
received_chunk = 0
if received < file_size:
if faker:
tmp_headers = fake_headers
'''
if parameter headers passed in, we have it copied as tmp_header
elif headers:
headers = headers
else:
headers = {}
'''
if received and not is_chunked: # only request a range when not chunked
tmp_headers['Range'] = 'bytes=' + str(received) + '-'
if refer:
tmp_headers['Referer'] = refer
if timeout:
response = urlopen_with_retry(
request.Request(url, headers=tmp_headers), timeout=timeout
)
else:
response = urlopen_with_retry(
request.Request(url, headers=tmp_headers)
)
try:
range_start = int(
response.headers[
'content-range'
][6:].split('/')[0].split('-')[0]
)
end_length = int(
response.headers['content-range'][6:].split('/')[1]
)
range_length = end_length - range_start
except:
content_length = response.headers['content-length']
range_length = int(content_length) if content_length is not None \
else float('inf')
if is_chunked: # always append if chunked
open_mode = 'ab'
elif file_size != received + range_length: # is it ever necessary?
received = 0
if bar:
bar.received = 0
open_mode = 'wb'
with open(temp_filepath, open_mode) as output:
while True:
buffer = None
try:
buffer = response.read(1024 * 256)
except socket.timeout:
pass
if not buffer:
if is_chunked and received_chunk == range_length:
break
elif not is_chunked and received == file_size: # Download finished
break
# Unexpected termination. Retry request
if not is_chunked: # when
tmp_headers['Range'] = 'bytes=' + str(received) + '-'
response = urlopen_with_retry(
request.Request(url, headers=tmp_headers)
)
continue
output.write(buffer)
received += len(buffer)
received_chunk += len(buffer)
if bar:
bar.update_received(len(buffer))
assert received == os.path.getsize(temp_filepath), '%s == %s == %s' % (
received, os.path.getsize(temp_filepath), temp_filepath
)
if os.access(filepath, os.W_OK):
# on Windows rename could fail if destination filepath exists
os.remove(filepath)
os.rename(temp_filepath, filepath)
class SimpleProgressBar:
term_size = term.get_terminal_size()[1]
def __init__(self, total_size, total_pieces=1):
self.displayed = False
self.total_size = total_size
self.total_pieces = total_pieces
self.current_piece = 1
self.received = 0
self.speed = ''
self.last_updated = time.time()
total_pieces_len = len(str(total_pieces))
# 38 is the size of all statically known size in self.bar
total_str = '%5s' % round(self.total_size / 1048576, 1)
total_str_width = max(len(total_str), 5)
self.bar_size = self.term_size - 28 - 2 * total_pieces_len \
- 2 * total_str_width
self.bar = '{:>4}%% ({:>%s}/%sMB) ├{:─<%s}┤[{:>%s}/{:>%s}] {}' % (
total_str_width, total_str, self.bar_size, total_pieces_len,
total_pieces_len
)
def update(self):
self.displayed = True
bar_size = self.bar_size
percent = round(self.received * 100 / self.total_size, 1)
if percent >= 100:
percent = 100
dots = bar_size * int(percent) // 100
plus = int(percent) - dots // bar_size * 100
if plus > 0.8:
plus = '█'
elif plus > 0.4:
plus = '>'
else:
plus = ''
bar = '█' * dots + plus
bar = self.bar.format(
percent, round(self.received / 1048576, 1), bar,
self.current_piece, self.total_pieces, self.speed
)
print('[DISPLAY]')
print(str(self.speed))
print(str(percent) + "%")
sys.stdout.write('\r' + bar)
sys.stdout.flush()
def update_received(self, n):
self.received += n
time_diff = time.time() - self.last_updated
bytes_ps = n / time_diff if time_diff else 0
if bytes_ps >= 1024 ** 3:
self.speed = '{:.2f} GB/s'.format(bytes_ps / 1024 ** 3)
elif bytes_ps >= 1024 ** 2:
self.speed = '{:.2f} MB/s'.format(bytes_ps / 1024 ** 2)
elif bytes_ps >= 1024:
self.speed = '{:.2f} kB/s'.format(bytes_ps / 1024)
else:
self.speed = '{:.2f} B/s'.format(bytes_ps)
self.last_updated = time.time()
self.update()
def update_piece(self, n):
self.current_piece = n
def done(self):
if self.displayed:
print()
self.displayed = False
class PiecesProgressBar:
def __init__(self, total_size, total_pieces=1):
self.displayed = False
self.total_size = total_size
self.total_pieces = total_pieces
self.current_piece = 1
self.received = 0
def update(self):
self.displayed = True
bar = '{0:>5}%[{1:<40}] {2}/{3}'.format(
'', '=' * 40, self.current_piece, self.total_pieces
)
sys.stdout.write('\r' + bar)
sys.stdout.flush()
def update_received(self, n):
self.received += n
self.update()
def update_piece(self, n):
self.current_piece = n
def done(self):
if self.displayed:
print()
self.displayed = False
class DummyProgressBar:
def __init__(self, *args):
pass
def update_received(self, n):
pass
def update_piece(self, n):
pass
def done(self):
pass
def get_output_filename(urls, title, ext, output_dir, merge, **kwargs):
# lame hack for the --output-filename option
global output_filename
if output_filename:
result = output_filename
if kwargs.get('part', -1) >= 0:
result = '%s[%02d]' % (result, kwargs.get('part'))
if ext:
result = '%s.%s' % (result, ext)
return result
merged_ext = ext
if (len(urls) > 1) and merge:
from .processor.ffmpeg import has_ffmpeg_installed
if ext in ['flv', 'f4v']:
if has_ffmpeg_installed():
merged_ext = 'mp4'
else:
merged_ext = 'flv'
elif ext == 'mp4':
merged_ext = 'mp4'
elif ext == 'ts':
if has_ffmpeg_installed():
merged_ext = 'mkv'
else:
merged_ext = 'ts'
result = title
if kwargs.get('part', -1) >= 0:
result = '%s[%02d]' % (result, kwargs.get('part'))
result = '%s.%s' % (result, merged_ext)
return result
def print_user_agent(faker=False):
urllib_default_user_agent = 'Python-urllib/%d.%d' % sys.version_info[:2]
user_agent = fake_headers['User-Agent'] if faker else urllib_default_user_agent
print('User Agent: %s' % user_agent)
def download_urls(
urls, title, ext, total_size, output_dir='.', refer=None, merge=True,
faker=False, headers={}, **kwargs
):
assert urls
if json_output:
json_output_.download_urls(
urls=urls, title=title, ext=ext, total_size=total_size,
refer=refer
)
return
if dry_run:
print_user_agent(faker=faker)
try:
print('Real URLs:\n%s' % '\n'.join(urls))
except:
print('Real URLs:\n%s' % '\n'.join([j for i in urls for j in i]))
return
if player:
launch_player(player, urls)
return
if not total_size:
try:
total_size = urls_size(urls, faker=faker, headers=headers)
except:
import traceback
traceback.print_exc(file=sys.stdout)
pass
title = tr(get_filename(title))
output_filename = get_output_filename(urls, title, ext, output_dir, merge)
output_filepath = os.path.join(output_dir, output_filename)
if total_size:
if not force and os.path.exists(output_filepath) and not auto_rename \
and (os.path.getsize(output_filepath) >= total_size * 0.9 \
or skip_existing_file_size_check):
if skip_existing_file_size_check:
log.w('Skipping %s without checking size: file already exists' % output_filepath)
else:
log.w('Skipping %s: file already exists' % output_filepath)
print()
return
bar = SimpleProgressBar(total_size, len(urls))
else:
bar = PiecesProgressBar(total_size, len(urls))
if len(urls) == 1:
url = urls[0]
print('Downloading %s ...' % tr(output_filename))
bar.update()
url_save(
url, output_filepath, bar, refer=refer, faker=faker,
headers=headers, **kwargs
)
bar.done()
lv = open(".everything-downloader-master/list_videos_file", "a")
lv.write(output_filename + "\n")
lv.close()
else:
parts = []
print('Downloading %s ...' % tr(output_filename))
bar.update()
for i, url in enumerate(urls):
output_filename_i = get_output_filename(urls, title, ext, output_dir, merge, part=i)
output_filepath_i = os.path.join(output_dir, output_filename_i)
parts.append(output_filepath_i)
# print 'Downloading %s [%s/%s]...' % (tr(filename), i + 1, len(urls))
bar.update_piece(i + 1)
url_save(
url, output_filepath_i, bar, refer=refer, is_part=True, faker=faker,
headers=headers, **kwargs
)
bar.done()
lv = open(".everything-downloader-master/list_videos_file", "a")
lv.write(output_filename + "\n")
lv.close()
if not merge:
print()
return
if 'av' in kwargs and kwargs['av']:
from .processor.ffmpeg import has_ffmpeg_installed
if has_ffmpeg_installed():
from .processor.ffmpeg import ffmpeg_concat_av
ret = ffmpeg_concat_av(parts, output_filepath, ext)
print('Merged into %s' % output_filename)
if ret == 0:
for part in parts:
os.remove(part)
elif ext in ['flv', 'f4v']:
try:
from .processor.ffmpeg import has_ffmpeg_installed
if has_ffmpeg_installed():
from .processor.ffmpeg import ffmpeg_concat_flv_to_mp4
ffmpeg_concat_flv_to_mp4(parts, output_filepath)
else:
from .processor.join_flv import concat_flv
concat_flv(parts, output_filepath)
print('Merged into %s' % output_filename)
except:
raise
else:
for part in parts:
os.remove(part)
elif ext == 'mp4':
try:
from .processor.ffmpeg import has_ffmpeg_installed
if has_ffmpeg_installed():
from .processor.ffmpeg import ffmpeg_concat_mp4_to_mp4
ffmpeg_concat_mp4_to_mp4(parts, output_filepath)
else:
from .processor.join_mp4 import concat_mp4
concat_mp4(parts, output_filepath)
print('Merged into %s' % output_filename)
except:
raise
else:
for part in parts:
os.remove(part)
elif ext == 'ts':
try:
from .processor.ffmpeg import has_ffmpeg_installed
if has_ffmpeg_installed():
from .processor.ffmpeg import ffmpeg_concat_ts_to_mkv
ffmpeg_concat_ts_to_mkv(parts, output_filepath)
else:
from .processor.join_ts import concat_ts
concat_ts(parts, output_filepath)
print('Merged into %s' % output_filename)
except:
raise
else:
for part in parts:
os.remove(part)
else:
print("Can't merge %s files" % ext)
print()
def download_rtmp_url(
url, title, ext, params={}, total_size=0, output_dir='.', refer=None,
merge=True, faker=False
):
assert url
if dry_run:
print_user_agent(faker=faker)
print('Real URL:\n%s\n' % [url])
if params.get('-y', False): # None or unset -> False
print('Real Playpath:\n%s\n' % [params.get('-y')])
return
if player:
from .processor.rtmpdump import play_rtmpdump_stream
play_rtmpdump_stream(player, url, params)
return
from .processor.rtmpdump import (
has_rtmpdump_installed, download_rtmpdump_stream
)
assert has_rtmpdump_installed(), 'RTMPDump not installed.'
download_rtmpdump_stream(url, title, ext, params, output_dir)
def download_url_ffmpeg(
url, title, ext, params={}, total_size=0, output_dir='.', refer=None,
merge=True, faker=False, stream=True
):
assert url
if dry_run:
print_user_agent(faker=faker)
print('Real URL:\n%s\n' % [url])
if params.get('-y', False): # None or unset ->False
print('Real Playpath:\n%s\n' % [params.get('-y')])
return
if player:
launch_player(player, [url])
return
from .processor.ffmpeg import has_ffmpeg_installed, ffmpeg_download_stream
assert has_ffmpeg_installed(), 'FFmpeg not installed.'
global output_filename
if output_filename:
dotPos = output_filename.rfind('.')
if dotPos > 0:
title = output_filename[:dotPos]
ext = output_filename[dotPos + 1:]
else:
title = output_filename
title = tr(get_filename(title))
ffmpeg_download_stream(url, title, ext, params, output_dir, stream=stream)
def playlist_not_supported(name):
def f(*args, **kwargs):
raise NotImplementedError('Playlist is not supported for ' + name)
return f
def print_info(site_info, title, type, size, **kwargs):
if json_output:
json_output_.print_info(
site_info=site_info, title=title, type=type, size=size
)
return
if type:
type = type.lower()
if type in ['3gp']:
type = 'video/3gpp'
elif type in ['asf', 'wmv']:
type = 'video/x-ms-asf'
elif type in ['flv', 'f4v']:
type = 'video/x-flv'
elif type in ['mkv']:
type = 'video/x-matroska'
elif type in ['mp3']:
type = 'audio/mpeg'
elif type in ['mp4']:
type = 'video/mp4'
elif type in ['mov']:
type = 'video/quicktime'
elif type in ['ts']:
type = 'video/MP2T'
elif type in ['webm']:
type = 'video/webm'
elif type in ['jpg']:
type = 'image/jpeg'
elif type in ['png']:
type = 'image/png'
elif type in ['gif']:
type = 'image/gif'
if type in ['video/3gpp']:
type_info = '3GPP multimedia file (%s)' % type
elif type in ['video/x-flv', 'video/f4v']:
type_info = 'Flash video (%s)' % type
elif type in ['video/mp4', 'video/x-m4v']:
type_info = 'MPEG-4 video (%s)' % type
elif type in ['video/MP2T']:
type_info = 'MPEG-2 transport stream (%s)' % type
elif type in ['video/webm']:
type_info = 'WebM video (%s)' % type
# elif type in ['video/ogg']:
# type_info = 'Ogg video (%s)' % type
elif type in ['video/quicktime']:
type_info = 'QuickTime video (%s)' % type
elif type in ['video/x-matroska']:
type_info = 'Matroska video (%s)' % type
# elif type in ['video/x-ms-wmv']:
# type_info = 'Windows Media video (%s)' % type
elif type in ['video/x-ms-asf']:
type_info = 'Advanced Systems Format (%s)' % type
# elif type in ['video/mpeg']:
# type_info = 'MPEG video (%s)' % type
elif type in ['audio/mp4', 'audio/m4a']:
type_info = 'MPEG-4 audio (%s)' % type
elif type in ['audio/mpeg']:
type_info = 'MP3 (%s)' % type
elif type in ['audio/wav', 'audio/wave', 'audio/x-wav']:
type_info = 'Waveform Audio File Format ({})'.format(type)
elif type in ['image/jpeg']:
type_info = 'JPEG Image (%s)' % type
elif type in ['image/png']:
type_info = 'Portable Network Graphics (%s)' % type
elif type in ['image/gif']:
type_info = 'Graphics Interchange Format (%s)' % type
elif type in ['m3u8']:
if 'm3u8_type' in kwargs:
if kwargs['m3u8_type'] == 'master':
type_info = 'M3U8 Master {}'.format(type)
else:
type_info = 'M3U8 Playlist {}'.format(type)
else:
type_info = 'Unknown type (%s)' % type
maybe_print('Site: ', site_info)
maybe_print('Title: ', unescape_html(tr(title)))
print('Type: ', type_info)
if type != 'm3u8':
print(
'Size: ', round(size / 1048576, 2),
'MiB (' + str(size) + ' Bytes)'
)
if type == 'm3u8' and 'm3u8_url' in kwargs:
print('M3U8 Url: {}'.format(kwargs['m3u8_url']))
print()
def mime_to_container(mime):
mapping = {
'video/3gpp': '3gp',
'video/mp4': 'mp4',
'video/webm': 'webm',
'video/x-flv': 'flv',
}
if mime in mapping:
return mapping[mime]
else:
return mime.split('/')[1]
def parse_host(host):
"""Parses host name and port number from a string.
"""
if re.match(r'^(\d+)$', host) is not None:
return ("0.0.0.0", int(host))
if re.match(r'^(\w+)://', host) is None:
host = "//" + host
o = parse.urlparse(host)
hostname = o.hostname or "0.0.0.0"
port = o.port or 0
return (hostname, port)
def set_proxy(proxy):
proxy_handler = request.ProxyHandler({
'http': '%s:%s' % proxy,
'https': '%s:%s' % proxy,
})
opener = request.build_opener(proxy_handler)
request.install_opener(opener)
def unset_proxy():
proxy_handler = request.ProxyHandler({})
opener = request.build_opener(proxy_handler)
request.install_opener(opener)
# DEPRECATED in favor of set_proxy() and unset_proxy()
def set_http_proxy(proxy):
if proxy is None: # Use system default setting
proxy_support = request.ProxyHandler()
elif proxy == '': # Don't use any proxy
proxy_support = request.ProxyHandler({})
else: # Use proxy
proxy_support = request.ProxyHandler(
{'http': '%s' % proxy, 'https': '%s' % proxy}
)
opener = request.build_opener(proxy_support)
request.install_opener(opener)
def print_more_compatible(*args, **kwargs):
import builtins as __builtin__
"""Overload default print function as py (<3.3) does not support 'flush' keyword.
Although the function name can be same as print to get itself overloaded automatically,
I'd rather leave it with a different name and only overload it when importing to make less confusion.
"""
# nothing happens on py3.3 and later
if sys.version_info[:2] >= (3, 3):
return __builtin__.print(*args, **kwargs)
# in lower pyver (e.g. 3.2.x), remove 'flush' keyword and flush it as requested
doFlush = kwargs.pop('flush', False)
ret = __builtin__.print(*args, **kwargs)
if doFlush:
kwargs.get('file', sys.stdout).flush()
return ret
def download_main(download, download_playlist, urls, playlist, **kwargs):
for url in urls:
if re.match(r'https?://', url) is None:
url = 'http://' + url
if playlist:
download_playlist(url, **kwargs)
else:
download(url, **kwargs)
def load_cookies(cookiefile):
global cookies
if cookiefile.endswith('.txt'):
# MozillaCookieJar treats prefix '#HttpOnly_' as comments incorrectly!
# do not use its load()
# see also:
# - https://docs.python.org/3/library/http.cookiejar.html#http.cookiejar.MozillaCookieJar
# - https://github.com/python/cpython/blob/4b219ce/Lib/http/cookiejar.py#L2014
# - https://curl.haxx.se/libcurl/c/CURLOPT_COOKIELIST.html#EXAMPLE
# cookies = cookiejar.MozillaCookieJar(cookiefile)
# cookies.load()
from http.cookiejar import Cookie
cookies = cookiejar.MozillaCookieJar()
now = time.time()
ignore_discard, ignore_expires = False, False
with open(cookiefile, 'r', encoding='utf-8') as f:
for line in f:
# last field may be absent, so keep any trailing tab
if line.endswith("\n"): line = line[:-1]
# skip comments and blank lines XXX what is $ for?
if (line.strip().startswith(("#", "$")) or
line.strip() == ""):
if not line.strip().startswith('#HttpOnly_'): # skip for #HttpOnly_
continue
domain, domain_specified, path, secure, expires, name, value = \
line.split("\t")
secure = (secure == "TRUE")
domain_specified = (domain_specified == "TRUE")
if name == "":
# cookies.txt regards 'Set-Cookie: foo' as a cookie
# with no name, whereas http.cookiejar regards it as a
# cookie with no value.
name = value
value = None
initial_dot = domain.startswith(".")
if not line.strip().startswith('#HttpOnly_'): # skip for #HttpOnly_
assert domain_specified == initial_dot
discard = False
if expires == "":
expires = None
discard = True
# assume path_specified is false
c = Cookie(0, name, value,
None, False,
domain, domain_specified, initial_dot,
path, False,
secure,
expires,
discard,
None,
None,
{})
if not ignore_discard and c.discard:
continue
if not ignore_expires and c.is_expired(now):
continue
cookies.set_cookie(c)
elif cookiefile.endswith(('.sqlite', '.sqlite3')):
import sqlite3, shutil, tempfile
temp_dir = tempfile.gettempdir()
temp_cookiefile = os.path.join(temp_dir, 'temp_cookiefile.sqlite')
shutil.copy2(cookiefile, temp_cookiefile)
cookies = cookiejar.MozillaCookieJar()
con = sqlite3.connect(temp_cookiefile)
cur = con.cursor()
cur.execute("""SELECT host, path, isSecure, expiry, name, value
FROM moz_cookies""")
for item in cur.fetchall():
c = cookiejar.Cookie(
0, item[4], item[5], None, False, item[0],
item[0].startswith('.'), item[0].startswith('.'),
item[1], False, item[2], item[3], item[3] == '', None,
None, {},
)
cookies.set_cookie(c)
else:
log.e('[error] unsupported cookies format')
# TODO: Chromium Cookies
# SELECT host_key, path, secure, expires_utc, name, encrypted_value
# FROM cookies
# http://n8henrie.com/2013/11/use-chromes-cookies-for-easier-downloading-with-python-requests/
def set_socks_proxy(proxy):
try:
import socks
socks_proxy_addrs = proxy.split(':')
socks.set_default_proxy(
socks.SOCKS5,
socks_proxy_addrs[0],
int(socks_proxy_addrs[1])
)
socket.socket = socks.socksocket
def getaddrinfo(*args):
return [
(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))
]
socket.getaddrinfo = getaddrinfo
except ImportError:
log.w(
'Error importing PySocks library, socks proxy ignored.'
'In order to use use socks proxy, please install PySocks.'
)
def script_main(download, download_playlist, **kwargs):
logging.basicConfig(format='[%(levelname)s] %(message)s')
def print_version():
version = get_version(
kwargs['repo_path'] if 'repo_path' in kwargs else __version__
)
log.i(
'version {}, a tiny downloader that scrapes the web.'.format(
version
)
)
parser = argparse.ArgumentParser(
prog='you-get',
usage='you-get [OPTION]... URL...',
description='A tiny downloader that scrapes the web',
add_help=False,
)
parser.add_argument(
'-V', '--version', action='store_true',
help='Print version and exit'
)
parser.add_argument(
'-h', '--help', action='store_true',
help='Print this help message and exit'
)
dry_run_grp = parser.add_argument_group(
'Dry-run options', '(no actual downloading)'
)
dry_run_grp = dry_run_grp.add_mutually_exclusive_group()
dry_run_grp.add_argument(
'-i', '--info', action='store_true', help='Print extracted information'
)
dry_run_grp.add_argument(
'-u', '--url', action='store_true',
help='Print extracted information with URLs'
)
dry_run_grp.add_argument(
'--json', action='store_true',
help='Print extracted URLs in JSON format'
)
download_grp = parser.add_argument_group('Download options')
download_grp.add_argument(
'-n', '--no-merge', action='store_true', default=False,
help='Do not merge video parts'
)
download_grp.add_argument(
'--no-caption', action='store_true',
help='Do not download captions (subtitles, lyrics, danmaku, ...)'
)
download_grp.add_argument(
'-f', '--force', action='store_true', default=False,
help='Force overwriting existing files'
)
download_grp.add_argument(
'--skip-existing-file-size-check', action='store_true', default=False,
help='Skip existing file without checking file size'
)
download_grp.add_argument(
'-F', '--format', metavar='STREAM_ID',
help='Set video format to STREAM_ID'
)
download_grp.add_argument(
'-O', '--output-filename', metavar='FILE', help='Set output filename'
)
download_grp.add_argument(
'-o', '--output-dir', metavar='DIR', default='.',
help='Set output directory'
)
download_grp.add_argument(
'-p', '--player', metavar='PLAYER',
help='Stream extracted URL to a PLAYER'
)
download_grp.add_argument(
'-c', '--cookies', metavar='COOKIES_FILE',
help='Load cookies.txt or cookies.sqlite'
)
download_grp.add_argument(
'-t', '--timeout', metavar='SECONDS', type=int, default=600,
help='Set socket timeout'
)
download_grp.add_argument(
'-d', '--debug', action='store_true',
help='Show traceback and other debug info'
)
download_grp.add_argument(
'-I', '--input-file', metavar='FILE', type=argparse.FileType('r'),
help='Read non-playlist URLs from FILE'
)
download_grp.add_argument(
'-P', '--password', help='Set video visit password to PASSWORD'
)
download_grp.add_argument(
'-l', '--playlist', action='store_true',
help='Prefer to download a playlist'
)
download_grp.add_argument(
'-a', '--auto-rename', action='store_true', default=False,
help='Auto rename same name different files'
)
download_grp.add_argument(
'-k', '--insecure', action='store_true', default=False,
help='ignore ssl errors'
)
proxy_grp = parser.add_argument_group('Proxy options')
proxy_grp = proxy_grp.add_mutually_exclusive_group()
proxy_grp.add_argument(
'-x', '--http-proxy', metavar='HOST:PORT',
help='Use an HTTP proxy for downloading'
)
proxy_grp.add_argument(
'-y', '--extractor-proxy', metavar='HOST:PORT',
help='Use an HTTP proxy for extracting only'
)
proxy_grp.add_argument(
'--no-proxy', action='store_true', help='Never use a proxy'
)
proxy_grp.add_argument(
'-s', '--socks-proxy', metavar='HOST:PORT',
help='Use an SOCKS5 proxy for downloading'
)
download_grp.add_argument('--stream', help=argparse.SUPPRESS)
download_grp.add_argument('--itag', help=argparse.SUPPRESS)
parser.add_argument('URL', nargs='*', help=argparse.SUPPRESS)
args = parser.parse_args()
if args.help:
print_version()
parser.print_help()
sys.exit()
if args.version:
print_version()
sys.exit()
if args.debug:
# Set level of root logger to DEBUG
logging.getLogger().setLevel(logging.DEBUG)
global force
global skip_existing_file_size_check
global dry_run
global json_output
global player
global extractor_proxy
global output_filename
global auto_rename
global insecure
output_filename = args.output_filename
extractor_proxy = args.extractor_proxy
info_only = args.info
if args.force:
force = True
if args.skip_existing_file_size_check:
skip_existing_file_size_check = True
if args.auto_rename:
auto_rename = True
if args.url:
dry_run = True
if args.json:
json_output = True
# to fix extractors not use VideoExtractor
dry_run = True
info_only = False
if args.cookies:
load_cookies(args.cookies)
caption = True
stream_id = args.format or args.stream or args.itag
if args.no_caption:
caption = False
if args.player:
player = args.player
caption = False
if args.insecure:
# ignore ssl
insecure = True
if args.no_proxy:
set_http_proxy('')
else:
set_http_proxy(args.http_proxy)
if args.socks_proxy:
set_socks_proxy(args.socks_proxy)
URLs = []
if args.input_file:
logging.debug('you are trying to load urls from %s', args.input_file)
if args.playlist:
log.e(
"reading playlist from a file is unsupported "
"and won't make your life easier"
)
sys.exit(2)
URLs.extend(args.input_file.read().splitlines())
args.input_file.close()
URLs.extend(args.URL)
if not URLs:
parser.print_help()
sys.exit()
socket.setdefaulttimeout(args.timeout)
try:
extra = {}
if extractor_proxy:
extra['extractor_proxy'] = extractor_proxy
if stream_id:
extra['stream_id'] = stream_id
download_main(
download, download_playlist,
URLs, args.playlist,
output_dir=args.output_dir, merge=not args.no_merge,
info_only=info_only, json_output=json_output, caption=caption,
password=args.password,
**extra
)
except KeyboardInterrupt:
if args.debug:
raise
else:
sys.exit(1)
except UnicodeEncodeError:
if args.debug:
raise
log.e(
'[error] oops, the current environment does not seem to support '
'Unicode.'
)
log.e('please set it to a UTF-8-aware locale first,')
log.e(
'so as to save the video (with some Unicode characters) correctly.'
)
log.e('you can do it like this:')
log.e(' (Windows) % chcp 65001 ')
log.e(' (Linux) $ LC_CTYPE=en_US.UTF-8')
sys.exit(1)
except Exception:
if not args.debug:
log.e('[error] oops, something went wrong.')
log.e(
'don\'t panic, c\'est la vie. please try the following steps:'
)
log.e(' (1) Rule out any network problem.')
log.e(' (2) Make sure you-get is up-to-date.')
log.e(' (3) Check if the issue is already known, on')
log.e(' https://github.com/soimort/you-get/wiki/Known-Bugs')
log.e(' https://github.com/soimort/you-get/issues')
log.e(' (4) Run the command with \'--debug\' option,')
log.e(' and report this issue with the full output.')
else:
print_version()
log.i(args)
raise
sys.exit(1)
def google_search(url):
keywords = r1(r'https?://(.*)', url)
url = 'https://www.google.com/search?tbm=vid&q=%s' % parse.quote(keywords)
page = get_content(url, headers=fake_headers)
videos = re.findall(
r'<a href="(https?://[^"]+)" onmousedown="[^"]+"><h3 class="[^"]*">([^<]+)<', page
)
vdurs = re.findall(r'<span class="vdur[^"]*">([^<]+)<', page)
durs = [r1(r'(\d+:\d+)', unescape_html(dur)) for dur in vdurs]
print('Google Videos search:')
for v in zip(videos, durs):
print('- video: {} [{}]'.format(
unescape_html(v[0][1]),
v[1] if v[1] else '?'
))
print('# you-get %s' % log.sprint(v[0][0], log.UNDERLINE))
print()
print('Best matched result:')
return (videos[0][0])
def url_to_module(url):
try:
video_host = r1(r'https?://([^/]+)/', url)
video_url = r1(r'https?://[^/]+(.*)', url)
assert video_host and video_url
except AssertionError:
url = google_search(url)
video_host = r1(r'https?://([^/]+)/', url)
video_url = r1(r'https?://[^/]+(.*)', url)
if video_host.endswith('.com.cn') or video_host.endswith('.ac.cn'):
video_host = video_host[:-3]
domain = r1(r'(\.[^.]+\.[^.]+)$', video_host) or video_host
assert domain, 'unsupported url: ' + url
# all non-ASCII code points must be quoted (percent-encoded UTF-8)
url = ''.join([ch if ord(ch) in range(128) else parse.quote(ch) for ch in url])
video_host = r1(r'https?://([^/]+)/', url)
video_url = r1(r'https?://[^/]+(.*)', url)
k = r1(r'([^.]+)', domain)
if k in SITES:
return (
import_module('.'.join(['you_get', 'extractors', SITES[k]])),
url
)
else:
try:
location = get_location(url) # t.co isn't happy with fake_headers
except:
location = get_location(url, headers=fake_headers)
if location and location != url and not location.startswith('/'):
return url_to_module(location)
else:
return import_module('you_get.extractors.universal'), url
def any_download(url, **kwargs):
m, url = url_to_module(url)
m.download(url, **kwargs)
def any_download_playlist(url, **kwargs):
m, url = url_to_module(url)
m.download_playlist(url, **kwargs)
def main(**kwargs):
script_main(any_download, any_download_playlist, **kwargs)
| 31.465614
| 105
| 0.557005
|
c83734468693fa499f20034e79fe4f727b363d5f
| 6,265
|
py
|
Python
|
hata/discord/activity/activity_custom.py
|
Multiface24111/hata
|
cd28f9ef158e347363669cc8d1d49db0ff41aba0
|
[
"0BSD"
] | 173
|
2019-06-14T20:25:00.000Z
|
2022-03-21T19:36:10.000Z
|
hata/discord/activity/activity_custom.py
|
Multiface24111/hata
|
cd28f9ef158e347363669cc8d1d49db0ff41aba0
|
[
"0BSD"
] | 52
|
2020-01-03T17:05:14.000Z
|
2022-03-31T11:39:50.000Z
|
hata/discord/activity/activity_custom.py
|
Multiface24111/hata
|
cd28f9ef158e347363669cc8d1d49db0ff41aba0
|
[
"0BSD"
] | 47
|
2019-11-09T08:46:45.000Z
|
2022-03-31T14:33:34.000Z
|
__all__ = ('ActivityCustom',)
from ...backend.export import include
from ..utils import DISCORD_EPOCH_START, unix_time_to_datetime, datetime_to_unix_time
from .activity_base import ActivityBase
from . import activity_types as ACTIVITY_TYPES
create_partial_emoji_from_data = include('create_partial_emoji_from_data')
class ActivityCustom(ActivityBase):
"""
Represents a Discord custom activity.
Attributes
----------
created : `int`
When the status was created as Unix time in milliseconds. Defaults to `0`.
emoji : `None` or ``Emoji``
The emoji of the activity. If it has no emoji, then set as `None`.
state : `None` or `str`
The activity's text under it's emoji. Defaults to `None`.
Class Attributes
----------------
id : `int` = `0`
The activity's id.
type : `int` = `4`
The activity's type value.
"""
__slots__ = ('created_at', 'emoji', 'state', )
type = ACTIVITY_TYPES.custom
@classmethod
def from_data(cls, activity_data):
"""
Creates a new ``ActivityCustom`` instance from the given activity data.
Parameters
----------
activity_data : `dict` of (`str`, `Any`) items
Received activity data.
Returns
-------
self : ``ActivityCustom``
"""
self = object.__new__(cls)
self._update_attributes(activity_data)
return self
def __hash__(self):
"""Returns the activity's hash value."""
state = self.state
emoji = self.emoji
if (state is None):
if (emoji is None):
hash_ = 0
else:
hash_ = emoji.id
else:
hash_ = hash(state)
if (emoji is not None):
hash_ ^=emoji.id
return hash_
@property
def name(self):
"""
Returns the activity's display text.
Returns
-------
name : `str`
"""
state = self.state
emoji = self.emoji
if (state is None):
if (emoji is None):
name = ''
else:
name = emoji.as_emoji
else:
if (emoji is None):
name = state
else:
name = f'{emoji.as_emoji} {state}'
return name
def _update_attributes(self, activity_data):
"""
Updates the activity by overwriting it's old attributes.
Parameters
----------
activity_data : `dict` of (`str`, `Any`) items
Data received from Discord.
"""
self.state = activity_data.get('state', None)
emoji_data = activity_data.get('emoji', None)
if emoji_data is None:
emoji = None
else:
emoji = create_partial_emoji_from_data(emoji_data)
self.emoji = emoji
created_at = activity_data.get('created_at', None)
if created_at is None:
created_at = DISCORD_EPOCH_START
else:
created_at = unix_time_to_datetime(created_at)
self.created_at = created_at
def _difference_update_attributes(self, activity_data):
"""
Updates the activity and returns the changes in a `dict` of (`attribute-name`, `old-value`) items.
Parameters
----------
activity_data : `dict` of (`str`, `Any`) items
Data received from Discord.
Returns
-------
changes : `dict` of (`str`, `Any`) items
All item in the returned dict is optional.
The returned items might be the following:
+---------------+-----------------------+
| key | value |
+===============+=======================+
| created_at | `datetime` |
+---------------+-----------------------+
| emoji | `None` or ``Emoji`` |
+---------------+-----------------------+
| state | `None` or `str` |
+---------------+-----------------------+
"""
old_attributes = {}
state = activity_data.get('state', None)
if self.state != state:
old_attributes['state'] = self.state
self.state = state
emoji_data = activity_data.get('emoji', None)
if emoji_data is None:
emoji = None
else:
emoji = create_partial_emoji_from_data(emoji_data)
if self.emoji != emoji:
old_attributes['emoji'] = self.emoji
self.emoji = emoji
created_at = activity_data.get('created_at', None)
if created_at is None:
created_at = DISCORD_EPOCH_START
else:
created_at = unix_time_to_datetime(created_at)
if self.created_at != created_at:
old_attributes['created_at'] = self.created_at
self.created_at = created_at
return old_attributes
def full_dict(self):
"""
Converts the whole activity to a dictionary.
Returns
-------
activity_data : `dict` of (`str`, `Any`) items
"""
activity_data = {
'name': 'Custom Status',
'id': 'custom',
}
emoji = self.emoji
if (emoji is not None):
emoji_data = {}
if emoji.is_custom_emoji():
emoji_data['name'] = emoji.name
emoji_data['id'] = emoji.id
if emoji.animated:
emoji_data['animated'] = True
else:
emoji_data['name'] = emoji.unicode
activity_data['emoji']=emoji_data
state = self.state
if (state is not None):
activity_data['state'] = state
created_at = self.created_at
if created_at != DISCORD_EPOCH_START:
activity_data['created_at'] = datetime_to_unix_time(created_at)
return activity_data
| 30.2657
| 106
| 0.494334
|
ec01d62249cdc9a474f276da2ca77fb9d773b581
| 12,157
|
py
|
Python
|
hypercluster/visualize.py
|
liliblu/autocluster
|
95180aaaa42927bfd7fadedb11fb5e577851e387
|
[
"BSD-3-Clause"
] | 20
|
2020-01-16T01:57:34.000Z
|
2021-12-09T01:51:47.000Z
|
hypercluster/visualize.py
|
liliblu/autocluster
|
95180aaaa42927bfd7fadedb11fb5e577851e387
|
[
"BSD-3-Clause"
] | 2
|
2021-02-03T16:29:09.000Z
|
2021-02-05T15:07:44.000Z
|
hypercluster/visualize.py
|
liliblu/autocluster
|
95180aaaa42927bfd7fadedb11fb5e577851e387
|
[
"BSD-3-Clause"
] | 1
|
2020-02-21T20:43:18.000Z
|
2020-02-21T20:43:18.000Z
|
from typing import List, Optional
import logging
from collections import Counter
from itertools import cycle
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import seaborn as sns
from pandas import DataFrame
from scipy.cluster import hierarchy
from scipy.spatial.distance import pdist
from hypercluster.constants import param_delim
from hypercluster.utilities import convert_to_multiind, evaluate_one
matplotlib.rcParams["pdf.fonttype"] = 42
matplotlib.rcParams["ps.fonttype"] = 42
sns.set(font="arial", style="white", color_codes=True, font_scale=1.3)
matplotlib.rcParams.update({"savefig.bbox": "tight"})
cmap = sns.cubehelix_palette(
start=0,
rot=0.4,
gamma=1.0,
hue=0.82,
light=1,
dark=0,
reverse=False,
as_cmap=True
)
cmap.set_over('black')
cmap.set_under('white')
cmap.set_bad("#DAE0E6")
def zscore(df):
"""Row zscores a DataFrame, ignores np.nan
Args:
df (DataFrame): DataFrame to z-score
Returns (DataFrame):
Row-zscored DataFrame.
"""
return df.subtract(df.mean(axis=1), axis=0).divide(df.std(axis=1), axis=0)
def compute_order(
df,
dist_method: str = "euclidean",
cluster_method: str = "average"
):
"""Gives hierarchical clustering order for the rows of a DataFrame
Args:
df (DataFrame): DataFrame with rows to order.
dist_method (str): Distance method to pass to scipy.cluster.hierarchy.linkage.
cluster_method (str): Clustering method to pass to scipy.spatial.distance.pdist.
Returns (pandas.Index):
Ordered row index.
"""
dist_mat = pdist(df, metric=dist_method)
link_mat = hierarchy.linkage(dist_mat, method=cluster_method)
return df.index[hierarchy.leaves_list(hierarchy.optimal_leaf_ordering(link_mat, dist_mat))]
def visualize_evaluations(
evaluations_df: DataFrame,
savefig: bool = False,
output_prefix: str = "evaluations",
**heatmap_kws
) -> List[matplotlib.axes.Axes]:
"""Makes a z-scored visualization of all evaluations.
Args:
evaluations_df (DataFrame): Evaluations dataframe from clustering.optimize_clustering
output_prefix (str): If saving a figure, file prefix to use.
savefig (bool): Whether to save a pdf
**heatmap_kws: Additional keyword arguments to pass to seaborn.heatmap.
Returns (List[matplotlib.axes.Axes]):
List of all matplotlib axes.
"""
clusterers = sorted(
list(set([i.split(param_delim, 1)[0] for i in evaluations_df.columns]))
)
width_ratios = [
dict(
Counter(
[i.split(param_delim, 1)[0] for i in evaluations_df.columns]
)
)[clus]
for clus in clusterers
]
evaluations_df = zscore(evaluations_df)
width = 0.18 * (len(evaluations_df.columns) + 2 + (0.01 * (len(clusterers) - 1)))
height = 0.22 * (len(evaluations_df))
fig, axs = plt.subplots(
figsize=(width, height),
nrows=1,
ncols=(len(clusterers) + 1),
gridspec_kw=dict(
width_ratios=width_ratios + [2],
wspace=0.01,
left=0,
right=1,
top=1,
bottom=0,
),
)
vmin = np.nanquantile(evaluations_df, 0.1)
vmax = np.nanquantile(evaluations_df, 0.9)
heatmap_kws['cmap'] = heatmap_kws.get('cmap', cmap)
heatmap_kws['vmin'] = heatmap_kws.get('vmin', vmin)
heatmap_kws['vmax'] = heatmap_kws.get('vmax', vmax)
for i, clus in enumerate(clusterers):
temp = convert_to_multiind(clus, evaluations_df)
ax = axs[i]
sns.heatmap(
temp,
ax=ax,
yticklabels=temp.index,
xticklabels=["-".join([str(i) for i in col]) for col in temp.columns],
cbar_ax=axs[-1],
cbar_kws=dict(label="z-score"),
**heatmap_kws
)
ax.set_ylabel("")
ax.set_title(clus)
ax.set_yticklabels([])
axs[0].set_ylabel("evaluation method")
axs[0].set_yticklabels(temp.index, rotation=0)
if savefig:
plt.savefig("%s.pdf" % output_prefix)
return axs
def visualize_pairwise(
df: DataFrame,
savefig: bool = False,
output_prefix: Optional[str] = None,
method: Optional[str] = None,
**heatmap_kws
) -> List[matplotlib.axes.Axes]:
"""Visualize symmetrical square DataFrames.
Args:
df (DataFrame): DataFrame to visualize.
savefig (bool): Whether to save a pdf.
output_prefix (str): If saving a pdf, file prefix to use.
method (str): Label for cbar, if relevant.
**heatmap_kws: Additional keywords to pass to `seaborn.heatmap`_
Returns (List[matplotlib.axes.Axes]):
List of matplotlib axes for figure.
.. _seaborn.heatmap:
https://seaborn.pydata.org/generated/seaborn.heatmap.html
"""
heatmap_kws = {**heatmap_kws}
vmin = np.nanquantile(df, 0.1)
vmax = np.nanquantile(df, 0.9)
heatmap_kws['cmap'] = heatmap_kws.get('cmap', cmap)
heatmap_kws['vmin'] = heatmap_kws.get('vmin', vmin)
heatmap_kws['vmax'] = heatmap_kws.get('vmax', vmax)
cbar_kws = heatmap_kws.get('cbar_kws', {})
cbar_kws['label'] = cbar_kws.get('label', method)
heatmap_kws['cbar_kws'] = cbar_kws
cbar_ratio = 2
wspace = 0.01
height = 0.18 * len(df)
width = 0.18 * (len(df.columns)+cbar_ratio+wspace)
fig, axs = plt.subplots(
figsize=(width, height),
nrows=1,
ncols=2,
gridspec_kw=dict(
width_ratios=[len(df.columns), cbar_ratio],
wspace=wspace,
left=0,
right=1,
top=1,
bottom=0,
)
)
try:
order = compute_order(df.fillna(df.median()))
except ValueError:
order = df.index
df = df.loc[order, order]
sns.heatmap(
df,
xticklabels=order,
yticklabels=order,
ax=axs[0],
cbar_ax=axs[1],
**heatmap_kws
)
if savefig:
if output_prefix is None:
output_prefix = "heatmap.pairwise"
plt.savefig('%s.pdf' % output_prefix)
return axs
def visualize_label_agreement(
labels: DataFrame,
method: Optional[str] = None,
savefig: bool = False,
output_prefix: Optional[str] = None,
**heatmap_kws
) -> List[matplotlib.axes.Axes]:
"""Visualize similarity between clustering results given an evaluation metric.
Args:
labels (DataFrame): Labels DataFrame, e.g. from optimize_clustering or \
AutoClusterer.labels_
method (str): Method with which to compare labels. Must be a metric like the ones in \
constants.need_ground_truth, which takes two sets of labels.
savefig (bool): Whether to save a pdf.
output_prefix (str): If saving a pdf, file prefix to use.
**heatmap_kws: Additional keywords to pass to `seaborn.heatmap`_
Returns (List[matplotlib.axes.Axes]):
List of matplotlib axes
.. _seaborn.heatmap:
https://seaborn.pydata.org/generated/seaborn.heatmap.html
"""
if savefig and output_prefix is None:
output_prefix = 'heatmap.labels.pairwise'
if method is None:
method = 'adjusted_rand_score'
labels = labels.astype(float).corr(
lambda x, y: evaluate_one(x, method=method, gold_standard=y)
)
return visualize_pairwise(labels, savefig, output_prefix, method=method, **heatmap_kws)
def visualize_sample_label_consistency(
labels: DataFrame,
savefig: bool = False,
output_prefix: Optional[str] = None,
**heatmap_kws
) -> List[matplotlib.axes.Axes]:
"""Visualize how often two samples are labeled in the same group across conditions. Interpret
with care--if you use more conditions for some type of clusterers, e.g. more n_clusters for
KMeans, those cluster more similarly across conditions than between clusterers. This means
that more agreement in labeling could be due to the choice of clusterers rather than true
similarity between samples.
Args:
labels (DataFrame): Labels DataFrame, e.g. from optimize_clustering or \
AutoClusterer.labels_
savefig (bool): Whether to save a pdf.
output_prefix (str): If saving a pdf, file prefix to use.
**heatmap_kws: Additional keywords to pass to `seaborn.heatmap`_
Returns (List[matplotlib.axes.Axes]):
List of matplotlib axes
.. _seaborn.heatmap:
https://seaborn.pydata.org/generated/seaborn.heatmap.html
"""
if savefig and output_prefix is None:
output_prefix = "heatmap.sample.pairwise"
#TODO change this to much faster matmult
labels = labels.transpose().astype(float).corr(lambda x, y: sum(
np.equal(x[((x != -1) | (y != -1))], y[((x != -1) | (y != -1))])
))
return visualize_pairwise(labels, savefig, output_prefix, method='# same label', **heatmap_kws)
def visualize_for_picking_labels(
evaluation_df: DataFrame,
method: Optional[str] = None,
savefig_prefix: Optional[str] = None
):
"""Generates graphs similar to a `scree graph`_ for PCA for each parameter and each clusterer.
Args:
evaluation_df (DataFrame): DataFrame of evaluations to visualize. Clusterer.evaluation_df.
method (str): Which metric to visualize.
savefig_prefix (str): If not None, save a figure with give prefix.
Returns:
matplotlib axes.
.. _scree graph:
https://en.wikipedia.org/wiki/Scree_plot
"""
if method is None:
method = "silhouette_score"
cluss_temp = list(set([i.split(param_delim, 1)[0] for i in evaluation_df.columns]))
# get figure dimensions
ncols = 0
cluss = []
for ploti, clus in enumerate(cluss_temp):
scores = convert_to_multiind(
clus, evaluation_df.loc[[method], :]
).transpose().dropna(how='any')
if len(scores) == 0:
logging.error(
'Score %s is missing for clusterer %s, skipping visualization' % (method, clus)
)
continue
indep = scores.index.to_frame().reset_index(drop=True)
try:
indep.astype(float)
except ValueError or AssertionError:
logging.error('Cannot convert %s data to floats, skipping visualization' % clus)
continue
cluss.append(clus)
if scores.index.nlevels > ncols:
ncols = scores.index.nlevels
if not cluss:
logging.error('No valid clusterers, cannot visualize. ')
return None
cluss.sort()
ybuff = np.abs(np.nanquantile(evaluation_df.loc[method], 0.05))
ylim = (evaluation_df.loc[method].min() - ybuff, evaluation_df.loc[method].max() + ybuff)
colors = cycle(sns.color_palette('twilight', n_colors=len(cluss) * ncols))
fig = plt.figure(figsize=(5 * (ncols), 5 * len(cluss)))
gs = plt.GridSpec(nrows=len(cluss), ncols=ncols, wspace=0.25, hspace=0.25)
for ploti, clus in enumerate(cluss):
scores = convert_to_multiind(
clus, evaluation_df.loc[[method], :]
).transpose().dropna(how='any')
indep = scores.index.to_frame().reset_index(drop=True)
for whcol, col in enumerate(indep.columns):
if whcol == 0:
saveax = plt.subplot(gs[ploti, whcol])
ax = saveax
ax.set_ylim(ylim)
ax.set_ylabel(clus)
else:
ax = plt.subplot(gs[ploti, whcol], sharey=saveax)
color = next(colors)
# plot eval results
sns.regplot(
indep[col],
scores[method].values,
color=color,
ax=ax,
logistic=True,
)
axs = fig.get_axes()
axs[0].set_title('%s results per parameter' % method, ha='left')
if savefig_prefix:
plt.savefig('%s.pdf' % savefig_prefix)
return axs
| 32.945799
| 100
| 0.619972
|
56d5a9046d32b5ac1fb6b02698455e7a16af9bce
| 1,287
|
py
|
Python
|
DataTrigraph.py
|
MustafaYalciner/KeystrokeIdNeuralNetwork
|
8ad334492e2aec845766863707f4c8212217427f
|
[
"MIT"
] | 1
|
2020-09-05T11:20:29.000Z
|
2020-09-05T11:20:29.000Z
|
DataTrigraph.py
|
MustafaYalciner/KeystrokeIdNeuralNetwork
|
8ad334492e2aec845766863707f4c8212217427f
|
[
"MIT"
] | null | null | null |
DataTrigraph.py
|
MustafaYalciner/KeystrokeIdNeuralNetwork
|
8ad334492e2aec845766863707f4c8212217427f
|
[
"MIT"
] | null | null | null |
import csv
class DataTrigraph:
if __name__ == '__main__':
PREV_KEY_INDEX = 2
USER_ID_INDEX = 16
print('Test')
all = []
input_file = open('data/featureset.csv')
with open('data/output.csv', 'w') as csvoutput:
writer = csv.writer(csvoutput, lineterminator='\n')
reader = csv.reader(input_file)
header = next(reader)
header.append('KEYCODE_TRI')
writer.writerow(header)
print(header)
previousRow = next(reader)
prevKeycodeInPrevRow = previousRow[PREV_KEY_INDEX]
userInPrevRow = previousRow[USER_ID_INDEX]
previousRow.append(previousRow[PREV_KEY_INDEX])
writer.writerow(previousRow)
print('keycode', prevKeycodeInPrevRow)
print('user', userInPrevRow)
for item in reader:
if item[USER_ID_INDEX] == userInPrevRow:
item.append(prevKeycodeInPrevRow)
else:
item.append(item[PREV_KEY_INDEX])
prevKeycodeInPrevRow = item[PREV_KEY_INDEX]
userInPrevRow = item[USER_ID_INDEX]
writer.writerow(item)
input_file.close()
print('success')
| 35.75
| 63
| 0.567211
|
7aaa77bcfff98bddb746cf3cda348ae316e63ebe
| 725
|
py
|
Python
|
scripts/f1_plot.py
|
debajyotidasgupta/Multilingual-Event-Extraction
|
1f6f03f6b3a8d4789669bf7303be3e3b69314956
|
[
"MIT"
] | null | null | null |
scripts/f1_plot.py
|
debajyotidasgupta/Multilingual-Event-Extraction
|
1f6f03f6b3a8d4789669bf7303be3e3b69314956
|
[
"MIT"
] | null | null | null |
scripts/f1_plot.py
|
debajyotidasgupta/Multilingual-Event-Extraction
|
1f6f03f6b3a8d4789669bf7303be3e3b69314956
|
[
"MIT"
] | null | null | null |
from sys import argv
import matplotlib.pyplot as plt
def calc_plot(fname):
with open(fname, "r") as f:
lines = f.readlines()
f1_scores = []
epoch_num = []
for l in lines:
if l.startswith("F1"):
f1_scores.append(float(l.strip().split(":")[1]))
elif l.startswith("Epoch"):
epoch_num.append(int(l.strip().split(":")[1]))
return f1_scores, epoch_num
if __name__ == "__main__":
f1_beng, epoch_num = calc_plot(argv[1])
plt.plot(epoch_num, f1_beng, label="Bengali")
f1_hindi, epoch_num = calc_plot(argv[2])
plt.plot(epoch_num, f1_hindi, label="Hindi")
plt.xlabel("Epoch")
plt.ylabel("F1 score")
plt.legend()
plt.show()
| 22.65625
| 60
| 0.602759
|
328527d6e444cc5e6448cb7222f0fe2fe9beee52
| 2,003
|
py
|
Python
|
.history/my_classes/FirstClassFunctions/MapFilterZipList_20210706154936.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
.history/my_classes/FirstClassFunctions/MapFilterZipList_20210706154936.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
.history/my_classes/FirstClassFunctions/MapFilterZipList_20210706154936.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
"""Map Filter Zip List Comprehensions
Higher order functions
A function that takes a function as a parameter and/or returns a function as it's return value
Example: sorted
map _
|
-- modern alternative -> list comprehensions and generator expressions
|
filter -
The map function
map(func, *iterables)
*iterables -> avariable number of iterable objects
func -> some function that takes a many arguments as there are iterable objects passed to iterables
map(func, *iterables) will then return an iterator that calculates the function applied to each element of the iterables
The iterator stops as soon as one of the iterables has been exhausted, so, unequal length iterables can be used
Examples
"""
l = [2, 3, 4]
def sq(x):
return x**2
list(map(sq, l)) # [4, 9, 19]
l1 = [1, 2, 3]
l2 = [10, 20, 30]
def add(x, y):
return x + y
list(map(add, l1, l2)) # [11, 22, 33]
"""The filter function
filter(func, iterable)
iterable -> a single iterable
func -> some function that takes a single argument
filter(func, iterable) will then return an iterator that contains all the elements of the iterable for which the function called on it is Truthy
If the function is None, it simply returns the elements of iterable that are Truthy
Examples
"""
l = [0, 1, 2, 3, 4] # 0 is Falsey, all the other numbers are Truthy
list(filter(None, l)) # [1, 2, 3, 4]
def is_even(n):
return n % 2 == 0
list(filter(is_even, l)) # [0, 2, 4]
list(filter(lambda n: n % 2 == 0, l)) # [0, 2, 4]
"""The zip function zip(*iterables) # this is not a high order function that takes multiple iterables and return one interable
[1, 2, 3, 4] zip
#--------> (1, 10), (2, 20), (3, 30), (4, 40)
(10, 20, 30, 40)
[1, 2, 3] zip
[10, 20, 30] #-------->
['a', 'b', 'c']
"""
| 21.771739
| 144
| 0.597604
|
91c61063bb2f4d9030d75da028ac2bb463cf3cfc
| 6,154
|
py
|
Python
|
hack/monitor_prow.py
|
crombus/release
|
e205e65efd2a5e56ab2c2df8d91c10e059ceb294
|
[
"Apache-2.0"
] | null | null | null |
hack/monitor_prow.py
|
crombus/release
|
e205e65efd2a5e56ab2c2df8d91c10e059ceb294
|
[
"Apache-2.0"
] | null | null | null |
hack/monitor_prow.py
|
crombus/release
|
e205e65efd2a5e56ab2c2df8d91c10e059ceb294
|
[
"Apache-2.0"
] | null | null | null |
#!/bin/env python3
import glob
import multiprocessing.dummy as multiprocessing
import subprocess
import sys
import tempfile
import time
import json
import os
exec_cmd = lambda *cmd: subprocess.check_output(cmd).decode('utf-8')
RED = exec_cmd('tput', 'setaf', '1')
GREEN = exec_cmd('tput', 'setaf', '2')
YELLOW = exec_cmd('tput', 'setaf', '3')
BOLD = exec_cmd('tput', 'bold')
RESET = exec_cmd('tput', 'sgr0')
CLEAR = exec_cmd('tput', 'clear')
def run_oc(args):
command = ['oc', '--loglevel', '10', '--namespace', 'ci'] + args
try:
process = subprocess.run(command, capture_output=True, check=True)
except subprocess.CalledProcessError as exc:
print(exc.stderr.decode('utf-8'))
raise
return process.stdout.decode('utf-8')
def debug(msg):
if os.environ.get("DEBUG", "") == "true":
print(msg)
def main():
dcs = run_oc(['get', 'deployment', '--selector', 'app=prow', '--output', 'jsonpath={.items[*].metadata.name}']).split()
with tempfile.TemporaryDirectory() as log_dir:
fs = [(display, log_dir), *((highlight, log_dir, x) for x in dcs)]
with multiprocessing.Pool(len(fs)) as pool:
for _ in pool.imap_unordered(lambda x: x[0](*x[1:]), fs):
pass # a check for exceptions is implicit in the iteration
def display(log_dir):
logs = log_dir + '/*.log'
while True:
sys.stdout.write(CLEAR)
for log in sorted(glob.glob(logs)):
with open(log) as f:
if sys.stdout.write(f.read()):
sys.stdout.write('\n')
time.sleep(5)
def highlight(log_dir, dc):
warn = '"level":"warning"'
error = '"level":"error"'
fatal = '"level":"fatal"'
log = '{}/{}.log'.format(log_dir, dc)
while True:
debug("deployment/{}: gathering info".format(dc))
header = renderHeader(dc)
lines = []
log_lines = []
for pod in run_oc(['get', 'pods', '--selector', 'component={}'.format(dc), '--output', 'jsonpath={.items[*].metadata.name}']).split():
debug("deployment/{}: pod/{}: gathering info".format(dc, pod))
lines.extend(renderFlavor(pod, dc))
cmd = ['logs', '--since', '20m', 'pod/{}'.format(pod)]
if dc == 'deck-internal':
cmd += ['--container', 'deck']
if dc == 'boskos':
cmd += ['--container', 'boskos']
debug("deployment/{}: pod/{}: getting logs".format(dc, pod))
try:
for l in run_oc(cmd).splitlines():
if warn in l:
log_lines.append(YELLOW + l + RESET)
elif error in l or fatal in l:
log_lines.append(RED + l + RESET)
except subprocess.CalledProcessError:
debug("deployment/{}: pod/{}: getting logs failed".format(dc, pod))
if not log_lines and not lines:
header = "{} {}{}{}".format(header, GREEN, "OK", RESET)
with open(log, 'w') as f:
f.write('\n'.join([header, *lines, *log_lines[-5:]]))
time.sleep(60)
def renderHeader(dc):
debug("deployment/{}: rendering header".format(dc))
rawdc = json.loads(run_oc(['get', 'deployment/{}'.format(dc), '--output', 'json']))
spec = rawdc.get("spec", {})
status = rawdc.get("status", {})
desired = spec.get("replicas", 0)
current = status.get("replicas", 0)
updated = status.get("updatedReplicas", 0)
available = status.get("availableReplicas", 0)
version = "<unknown-version>"
containers = spec.get("template", {}).get("spec", {}).get("containers", [])
for container in containers:
if container.get("name") == dc:
image = container.get("image", "")
version = image.split(":")[-1]
headerColor = ''
if desired != current:
headerColor = RED
message = '{} at {} [{}/{}]'.format(dc, version, current, desired)
if updated != desired:
message += ' ({} stale replicas)'.format(desired - updated)
if available != desired:
message += ' ({} unavailable replicas)'.format(desired - available)
header = '{}{}{}:{}'.format(BOLD, headerColor, message, RESET)
debug("deployment/{}: got header {}".format(dc, header))
return header
def renderFlavor(pod, dc):
debug("deployment/{}: pod/{}: rendering flavor".format(dc, pod))
lines = []
raw = json.loads(run_oc(['get', 'pod/{}'.format(pod), '--output', 'json']))
status = raw.get("status", {})
phase = status.get("phase", "")
if phase != "Running":
reason = status.get("reason", "")
message = status.get("message", "")
color = YELLOW
if phase in ["Failed", "Unknown", "CrashLoopBackOff"]:
color = RED
lines.append(color + "pod {} is {}: {}, {}".format(pod, phase, reason, message))
for container in status.get("containerStatuses", []):
debug("pod/{}: handling status for container {}".format(pod, container.get("name", "")))
if container.get("name") == dc:
state = container.get("state", {})
if "running" not in state:
if "waiting" in state:
reason = state["waiting"].get("reason")
message = state["waiting"].get("message")
lines.append(YELLOW + "pod {} is waiting: {}".format(pod, reason) + RESET)
lines.append(YELLOW + "\t{}".format(message) + RESET)
if "terminated" in state:
reason = state["terminated"].get("reason")
message = state["terminated"].get("message")
lines.append(RED + "pod {} is terminated: {}".format(pod, reason) + RESET)
lines.append(RED + "\t{}".format(message) + RESET)
restartCount = container.get("restartCount", 0)
if restartCount != 0:
lines.append(RED + "pod {} has restarted {} times".format(pod, restartCount) + RESET)
debug("deployment/{}: pod/{}: got flavor {}".format(dc, pod, lines))
return lines
if __name__ == '__main__':
main()
| 38.949367
| 142
| 0.554111
|
7a9f7862a6acff5df5d853865a6341d61bedbda0
| 11,390
|
py
|
Python
|
pychron/experiment/experimentor.py
|
aelamspychron/pychron
|
ad87c22b0817c739c7823a24585053041ee339d5
|
[
"Apache-2.0"
] | 1
|
2019-02-27T21:57:44.000Z
|
2019-02-27T21:57:44.000Z
|
pychron/experiment/experimentor.py
|
aelamspychron/pychron
|
ad87c22b0817c739c7823a24585053041ee339d5
|
[
"Apache-2.0"
] | 20
|
2020-09-09T20:58:39.000Z
|
2021-10-05T17:48:37.000Z
|
pychron/experiment/experimentor.py
|
AGESLDEO/pychron
|
1a81e05d9fba43b797f335ceff6837c016633bcf
|
[
"Apache-2.0"
] | null | null | null |
# ===============================================================================
# Copyright 2013 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
from traits.api import Instance, List, on_trait_change, Bool, Event
from pychron.dvc.dvc_irradiationable import DVCIrradiationable
from pychron.experiment.experiment_executor import ExperimentExecutor
from pychron.experiment.factory import ExperimentFactory
from pychron.experiment.queue.experiment_queue import ExperimentQueue
class Experimentor(DVCIrradiationable):
experiment_factory = Instance(ExperimentFactory)
experiment_queue = Instance(ExperimentQueue)
executor = Instance(ExperimentExecutor)
experiment_queues = List
# stats = Instance(StatsGroup, ())
mode = None
# unique_executor_db = False
save_enabled = Bool
# ===========================================================================
# permissions
# ===========================================================================
# max_allowable_runs = 10000
# can_edit_scripts = True
# _last_ver_time = None
# _ver_timeout = 10
# ===========================================================================
# task events
# ===========================================================================
activate_editor_event = Event
save_event = Event
def prepare_destory(self):
if self.executor:
if self.executor.datahub:
self.executor.datahub.prepare_destroy()
if self.experiment_factory:
if self.experiment_factory.run_factory:
if self.experiment_factory.run_factory.datahub:
self.experiment_factory.run_factory.datahub.prepare_destroy()
def load(self):
self.experiment_factory.queue_factory.db_refresh_needed = True
self.experiment_factory.run_factory.db_refresh_needed = True
return True
def reset_run_generator(self):
if self.executor.is_alive():
self.debug('Queue modified. Reset run generator')
# self.executor.queue_modified = True
self.executor.set_queue_modified()
def refresh_executable(self, qs=None):
if qs is None:
qs = self.experiment_queues
if self.executor.is_alive():
qs = (self.executor.experiment_queue,)
self.executor.executable = all([ei.is_executable() for ei in qs])
self.debug('setting executable {}'.format(self.executor.executable))
def update_queues(self):
self._update_queues()
def update_info(self):
self._update()
# ===============================================================================
# info update
# ===============================================================================
def _get_all_automated_runs(self, qs=None):
if qs is None:
qs = self.experiment_queues
return [ai for ei in qs
for ai in ei.automated_runs
if ai.executable]
def _update(self, queues=None):
self.debug('update runs')
if queues is None:
queues = self.experiment_queues
queues = [qi for qi in queues if qi.is_updateable()]
if not queues:
return
self.debug('executor executable {}'.format(self.executor.executable))
self.debug('updating stats, ')
self.executor.stats.calculate()
self.refresh_executable(queues)
self._set_analysis_metadata()
self.debug('info updated')
for qi in queues:
qi.refresh_table_needed = True
def _set_analysis_metadata(self):
cache = dict()
db = self.get_database()
aruns = self._get_all_automated_runs()
with db.session_ctx():
for ai in aruns:
if ai.skip:
continue
ln = ai.labnumber
if ln == 'dg':
continue
# is run in cache
if ln not in cache:
info = db.get_identifier_info(ln)
self.debug('Info for {}={}'.format(ln, info))
if not info:
cache[ln] = dict(identifier_error=True)
else:
info['identifier_error'] = False
cache[ln] = info
ai.trait_set(**cache[ln])
def execute_queues(self, queues):
names = ','.join([e.name for e in queues])
self.debug('queues: n={}, names={}'.format(len(queues), names))
self.executor.trait_set(experiment_queues=queues, experiment_queue=queues[0])
return self.executor.execute()
def verify_database_connection(self, inform=True):
db = self.get_database()
if db is not None:
if db.connect(force=True):
return True
elif inform:
self.warning_dialog('No Database available')
def sync_queue(self, queue):
ms = queue.mass_spectrometer
ed = queue.extract_device
db = self.get_database()
with db.session_ctx():
next_pos = None
for i, ai in enumerate(queue.automated_runs):
if ai.skip or ai.is_special():
continue
kw = {'identifier': ai.identifier, 'position': ai.position,
'mass_spectrometer': ms.lower(),
'extract_device': ed}
if ai.is_step_heat():
kw['aliquot'] = ai.aliquot
kw['extract_value'] = ai.extract_value
self.debug('checking {}/{}. attr={}'.format(i, ai.runid, kw))
aa = db.get_analysis_by_attr(**kw)
if aa is None:
self.debug('----- not found')
if next_pos == ai:
i -= 1
break
elif not self.confirmation_dialog('Found analyses up to {}. '
'position={}, extract={}. '
'Continue searching?'.format(ai.runid, ai.extract_value,
ai.position)):
break
next_pos = queue.automated_runs[i + 1]
if i:
if i == len(queue.automated_runs) - 1:
self.information_dialog('All Analyses from this experiment have been run')
else:
queue.automated_runs = queue.automated_runs[i:]
else:
self.information_dialog('No Analyses from this experiment have been run')
# ===============================================================================
# handlers
# ===============================================================================
def _experiment_queue_changed(self, eq):
if eq:
self.experiment_factory.queue = eq
self.experiment_factory.sync_queue_meta()
self.experiment_factory.edit_enabled = True
else:
self.experiment_factory.edit_enabled = False
@on_trait_change('executor:experiment_queue')
def _activate_editor(self, eq):
self.activate_editor_event = id(eq)
@on_trait_change('experiment_queues[]')
def _update_queues(self):
qs = self.experiment_queues
self.executor.stats.experiment_queues = qs
@on_trait_change('experiment_factory:run_factory:changed')
def _queue_dirty(self):
self.experiment_queue.changed = True
@on_trait_change('experiment_queue:dclicked')
def _dclicked_changed(self, new):
self.experiment_factory.run_factory.edit_mode = True
self._set_factory_runs(self.experiment_queue.selected)
@on_trait_change('experiment_factory:run_factory:update_info_needed')
def _refresh3(self):
self.debug('update info needed fired')
self.update_info()
@on_trait_change('executor:queue_modified')
def _refresh5(self, new):
if new:
self.debug('queue modified fired')
self.update_info()
@on_trait_change('experiment_factory:run_factory:refresh_table_needed')
def _refresh4(self):
for qi in self.experiment_queues:
qi.refresh_table_needed = True
@on_trait_change('experiment_factory:save_button')
def _save_update(self):
self.save_event = True
self.update_info()
@on_trait_change('experiment_queue:refresh_info_needed')
def _handle_refresh(self):
self.update_info()
@on_trait_change('experiment_queue:selected')
def _selected_changed(self, new):
ef = self.experiment_factory
rf = ef.run_factory
rf.edit_mode = False
if new:
self._set_factory_runs(new)
# if self.executor.is_alive():
a = new[-1]
if not a.skip:
self.executor.stats.calculate_at(a, at_times=self.executor.is_alive())
# self.stats.calculate()
@on_trait_change('experiment_factory:queue_factory:delay_between_analyses')
def handle_delay_between_analyses(self, new):
if self.executor.is_alive():
self.executor.experiment_queue.delay_between_analyses = new
def _set_factory_runs(self, new):
ef = self.experiment_factory
rf = ef.run_factory
# print 'set runs'
# rf.special_labnumber = 'Special Labnumber'
rf.suppress_update = True
rf.set_selected_runs(new)
rf.suppress_update = False
def _executor_factory(self):
e = ExperimentExecutor(mode=self.mode,
application=self.application)
e.bind_preferences()
return e
# ===============================================================================
# defaults
# ===============================================================================
def _executor_default(self):
return self._executor_factory()
def _experiment_factory_default(self):
dms = 'Spectrometer'
if self.application:
p2 = 'pychron.spectrometer.base_spectrometer_manager.BaseSpectrometerManager'
spec = self.application.get_service(p2)
if spec:
dms = spec.name.capitalize()
e = ExperimentFactory(application=self.application,
dvc=self.dvc,
default_mass_spectrometer=dms)
return e
# ============= EOF =============================================
| 35.81761
| 110
| 0.544337
|
9a35bc3c287dc93e6d2d748af53a6f3ea68ab156
| 661
|
py
|
Python
|
AZkar_BackEnd/manage.py
|
AbdelrahmanElMahdy/Azkar-HisnMuslim
|
cc73ffbee38b9bb6f8d17b04a86fbf62e8a83db3
|
[
"MIT"
] | null | null | null |
AZkar_BackEnd/manage.py
|
AbdelrahmanElMahdy/Azkar-HisnMuslim
|
cc73ffbee38b9bb6f8d17b04a86fbf62e8a83db3
|
[
"MIT"
] | null | null | null |
AZkar_BackEnd/manage.py
|
AbdelrahmanElMahdy/Azkar-HisnMuslim
|
cc73ffbee38b9bb6f8d17b04a86fbf62e8a83db3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'azkar.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.73913
| 73
| 0.677761
|
d75e8c905f6b99b744369abbdf85d79cff723241
| 1,683
|
py
|
Python
|
jp.atcoder/typical90/typical90_z/26086653.py
|
kagemeka/atcoder-submissions
|
91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e
|
[
"MIT"
] | 1
|
2022-02-09T03:06:25.000Z
|
2022-02-09T03:06:25.000Z
|
jp.atcoder/typical90/typical90_z/26086653.py
|
kagemeka/atcoder-submissions
|
91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e
|
[
"MIT"
] | 1
|
2022-02-05T22:53:18.000Z
|
2022-02-09T01:29:30.000Z
|
jp.atcoder/typical90/typical90_z/26086653.py
|
kagemeka/atcoder-submissions
|
91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e
|
[
"MIT"
] | null | null | null |
import sys
import typing
import numba as nb
import numpy as np
@nb.njit
def csgraph_to_undirected(g: np.ndarray) -> np.ndarray:
m = len(g)
g = np.vstack((g, g))
g[m:, :2] = g[m:, 1::-1]
return g
@nb.njit
def sort_csgraph(
n: int,
g: np.ndarray,
) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray]:
sort_idx = np.argsort(g[:, 0], kind='mergesort')
g = g[sort_idx]
edge_idx = np.searchsorted(g[:, 0], np.arange(n + 1))
original_idx = np.arange(len(g))[sort_idx]
return g, edge_idx, original_idx
@nb.njit
def euler_tour(
g: np.ndarray,
edge_idx: np.ndarray,
root: int,
) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray]:
n = g[:, :2].max() + 1
parent = np.full(n, -1, np.int32)
depth = np.zeros(n, np.int32)
tour = np.empty(n * 2, np.int32)
st = [root]
for i in range(2 * n):
u = st.pop()
tour[i] = u
if u < 0: continue
st.append(-u - 1)
for v in g[edge_idx[u]:edge_idx[u + 1], 1][::-1]:
if v == parent[u]: continue
parent[v] = u
depth[v] = depth[u] + 1
st.append(v)
return tour, parent, depth
@nb.njit((nb.i8[:, :], ), cache=True)
def solve(ab: np.ndarray) -> typing.NoReturn:
n = len(ab) + 1
g = csgraph_to_undirected(ab)
g, edge_idx, _ = sort_csgraph(n, g)
_, _, depth = euler_tour(g, edge_idx, 0)
flg = np.count_nonzero(depth & 1) >= n // 2
res = np.flatnonzero(depth & 1 ^ ~flg) + 1
return res[:n // 2]
def main() -> typing.NoReturn:
n = int(input())
ab = np.array(
sys.stdin.read().split(),
dtype=np.int64,
).reshape(n - 1, 2) - 1
res = solve(ab)
print(*res)
main()
| 21.857143
| 56
| 0.561497
|
26ff2d87c35acdcadd0b8da9fd051afb082b895c
| 11,194
|
py
|
Python
|
tests/test_configuration.py
|
OnoArnaldo/py_staticpage_builder
|
32e9557aaa241e3f78075dc71c6ea7327d7f6890
|
[
"MIT"
] | 1
|
2020-11-30T07:31:38.000Z
|
2020-11-30T07:31:38.000Z
|
tests/test_configuration.py
|
OnoArnaldo/py_staticpage_builder
|
32e9557aaa241e3f78075dc71c6ea7327d7f6890
|
[
"MIT"
] | 11
|
2020-12-19T15:47:06.000Z
|
2021-11-02T23:53:22.000Z
|
tests/test_configuration.py
|
OnoArnaldo/py_staticpage_builder
|
32e9557aaa241e3f78075dc71c6ea7327d7f6890
|
[
"MIT"
] | null | null | null |
import os
import pytest
from pystaticpage import config
def test_config():
cfg = config.Config.from_dict({
'environment': 'prod',
'dirs': {
'sites': './dirs/sites',
'pages': './dirs/pages',
'templates': './dirs/templates',
'static': './dirs/static',
'cdn': './dirs/cdn',
'data': './dirs/data',
'sass': './dirs/sass',
},
'urls': {
'home': 'https://my-site.com',
'static': '/static',
'cdn': 'https://cdn.my-site.com/assets',
},
'builder': {
'clean_before_build': True,
'pages': {
'execute': True,
'only_index': True,
'skip_for_index': [
'help.html',
],
},
'static': {
'execute': True,
},
'minify': {
'execute': True,
'extensions': ['.css', '.js'],
'skip_files': [r'.*min\.\w+'],
},
'sass': {
'execute': True,
'output_style': 'nested',
'destination': 'static',
},
'gzip': {
'execute': True,
'extensions': ['.css', '.js'],
'skip_files': ['main.js'],
},
'cdn': {
'execute': True,
'service_name': "servname",
'region_name': "regname",
'bucket_name': "bucname",
'object_key_prefix': "keyprefix",
'endpoint': "https://the-url.com",
'aws_access_key': "the_key",
'aws_secret_access_key': "the_secret",
},
},
})
assert cfg.environment == 'prod'
assert cfg.dirs.sites == './dirs/sites'
assert cfg.dirs.pages == './dirs/pages'
assert cfg.dirs.templates == './dirs/templates'
assert cfg.dirs.static == './dirs/static'
assert cfg.dirs.cdn == './dirs/cdn'
assert cfg.dirs.data == './dirs/data'
assert cfg.dirs.sass == './dirs/sass'
assert cfg.urls.home == 'https://my-site.com'
assert cfg.urls.static == '/static'
assert cfg.urls.cdn == 'https://cdn.my-site.com/assets'
assert cfg.builder.clean_before_build
assert cfg.builder.pages.execute
assert cfg.builder.pages.only_index
assert cfg.builder.pages.skip_for_index == ['help.html']
assert cfg.builder.static.execute
assert cfg.builder.minify.execute
assert cfg.builder.minify.extensions == ['.css', '.js']
assert cfg.builder.minify.skip_files == [r'.*min\.\w+']
assert cfg.builder.sass.execute
assert cfg.builder.sass.output_style == 'nested'
assert cfg.builder.sass.destination == 'static'
assert cfg.builder.gzip.execute
assert cfg.builder.gzip.extensions == ['.css', '.js']
assert cfg.builder.gzip.skip_files == ['main.js']
assert cfg.builder.cdn.execute
assert cfg.builder.cdn.service_name == 'servname'
assert cfg.builder.cdn.region_name == 'regname'
assert cfg.builder.cdn.bucket_name == 'bucname'
assert cfg.builder.cdn.object_key_prefix == 'keyprefix'
assert cfg.builder.cdn.endpoint == 'https://the-url.com'
assert cfg.builder.cdn.aws_access_key == 'the_key'
assert cfg.builder.cdn.aws_secret_access_key == 'the_secret'
def assert_config(cfg):
with pytest.raises(config.MissingConfigKey) as ex:
cfg = config.Config.from_dict(cfg)
def test_config_mandatory():
assert_config({})
assert_config({'dirs': {'sites': './sites'}})
assert_config({'dirs': {'sites': './sites', 'pages': '/pages'}})
assert_config({'dirs': {
'sites': './sites', 'pages': '/pages', 'templates': '/templates'}
})
assert_config({'dirs': {
'sites': './sites', 'pages': '/pages', 'templates': '/templates',
'static': '/static'
}})
assert_config({'dirs': {
'sites': './sites', 'pages': '/pages', 'templates': '/templates',
'static': '/static', 'cdn': '/cdn'
}})
assert_config({'dirs': {
'sites': './sites', 'pages': '/pages', 'templates': '/templates',
'static': '/static', 'cdn': '/cdn', 'data': '/data'
}})
assert_config({'dirs': {
'sites': './sites', 'pages': '/pages', 'templates': '/templates',
'static': '/static', 'cdn': '/cdn', 'data': '/data', 'sass': '/sass'
}})
assert_config({
'dirs': {
'sites': './sites', 'pages': '/pages', 'templates': '/templates',
'static': '/static', 'cdn': '/cdn', 'data': '/data', 'sass': '/sass'
},
'urls': {
'home': '/'
}
})
assert_config({
'dirs': {
'sites': './sites', 'pages': '/pages', 'templates': '/templates',
'static': '/static', 'cdn': '/cdn', 'data': '/data', 'sass': '/sass'
},
'urls': {
'home': '/', 'static': '/static'
}
})
def test_default():
cfg = config.Config.from_dict({
'dirs': {
'sites': '/sites', 'pages': '/pages', 'templates': '/templates',
'static': '/static', 'cdn': '/cdn', 'data': '/data', 'sass': '/sass'
},
'urls': {
'home': '/', 'static': '/static', 'cdn': '/cdn'
}
})
assert cfg.environment == 'prod'
assert not cfg.builder.pages.execute
assert not cfg.builder.clean_before_build
assert cfg.builder.pages.only_index
assert cfg.builder.pages.skip_for_index == []
assert not cfg.builder.static.execute
assert not cfg.builder.minify.execute
assert cfg.builder.minify.extensions == []
assert cfg.builder.minify.skip_files == []
assert not cfg.builder.sass.execute
assert cfg.builder.sass.output_style == 'nested'
assert cfg.builder.sass.destination == 'static'
assert not cfg.builder.gzip.execute
assert cfg.builder.gzip.extensions == []
assert cfg.builder.gzip.skip_files == []
assert not cfg.builder.cdn.execute
assert cfg.builder.cdn.service_name == ''
assert cfg.builder.cdn.region_name == ''
assert cfg.builder.cdn.bucket_name == ''
assert cfg.builder.cdn.object_key_prefix == ''
assert cfg.builder.cdn.endpoint == ''
assert cfg.builder.cdn.aws_access_key == ''
assert cfg.builder.cdn.aws_secret_access_key == ''
def test_config_from_yaml():
cfg = config.Config.from_yaml(
'config:\n'
' environment: prod\n'
' dirs:\n'
' sites: ./dirs/sites\n'
' pages: ./dirs/pages\n'
' templates: ./dirs/templates\n'
' static: ./dirs/static\n'
' cdn: ./dirs/cdn\n'
' data: ./dirs/data\n'
' sass: ./dirs/sass\n'
' urls:\n'
' home: https://my-site.com\n'
' static: /static\n'
' cdn: https://cdn.my-site.com/assets\n'
' builder:\n'
' clean_before_build: True\n'
' pages:\n'
' execute: True\n'
' only_index: True\n'
' skip_for_index:\n'
' - help.html\n'
' static:\n'
' execute: True\n'
' minify:\n'
' execute: True\n'
' extensions: [.css, .js]\n'
' skip_files:\n'
r' - .*min\.\w+' '\n'
' sass:\n'
' execute: True\n'
' output_style: nested\n'
' destination: static\n'
' gzip:\n'
' execute: True\n'
' extensions: [.css, .js]\n'
' skip_files:\n'
' - main.js\n'
' cdn:\n'
' execute: True\n'
' service_name: servname\n'
' region_name: regname\n'
' bucket_name: bucname\n'
' object_key_prefix: keyprefix\n'
' endpoint: https://the-url.com\n'
' aws_access_key: the_key\n'
' aws_secret_access_key: the_secret'
)
assert cfg.environment == 'prod'
assert cfg.dirs.sites == './dirs/sites'
assert cfg.dirs.pages == './dirs/pages'
assert cfg.dirs.templates == './dirs/templates'
assert cfg.dirs.static == './dirs/static'
assert cfg.dirs.cdn == './dirs/cdn'
assert cfg.dirs.data == './dirs/data'
assert cfg.dirs.sass == './dirs/sass'
assert cfg.urls.home == 'https://my-site.com'
assert cfg.urls.static == '/static'
assert cfg.urls.cdn == 'https://cdn.my-site.com/assets'
assert cfg.builder.clean_before_build
assert cfg.builder.pages.execute
assert cfg.builder.pages.only_index
assert cfg.builder.pages.skip_for_index == ['help.html']
assert cfg.builder.static.execute
assert cfg.builder.minify.execute
assert cfg.builder.minify.extensions == ['.css', '.js']
assert cfg.builder.minify.skip_files == [r'.*min\.\w+']
assert cfg.builder.sass.execute
assert cfg.builder.sass.output_style == 'nested'
assert cfg.builder.sass.destination == 'static'
assert cfg.builder.gzip.execute
assert cfg.builder.gzip.extensions == ['.css', '.js']
assert cfg.builder.gzip.skip_files == ['main.js']
assert cfg.builder.cdn.execute
assert cfg.builder.cdn.service_name == 'servname'
assert cfg.builder.cdn.region_name == 'regname'
assert cfg.builder.cdn.bucket_name == 'bucname'
assert cfg.builder.cdn.object_key_prefix == 'keyprefix'
assert cfg.builder.cdn.endpoint == 'https://the-url.com'
assert cfg.builder.cdn.aws_access_key == 'the_key'
assert cfg.builder.cdn.aws_secret_access_key == 'the_secret'
def test_config_from_env():
os.environ['STATIC_ACCESS_KEY'] = 'access_key'
os.environ['STATIC_SECRET_KEY'] = 'secret_key'
cfg = config.Config.from_yaml(
'config:\n'
' environment: prod\n'
' dirs:\n'
' sites: ./dirs/sites\n'
' pages: ./dirs/pages\n'
' templates: ./dirs/templates\n'
' static: ./dirs/static\n'
' cdn: ./dirs/cdn\n'
' data: ./dirs/data\n'
' sass: ./dirs/sass\n'
' urls:\n'
' home: https://my-site.com\n'
' static: /static\n'
' cdn: https://cdn.my-site.com/assets\n'
' builder:\n'
' clean_before_build: True\n'
' cdn:\n'
' execute: True\n'
' service_name: servname\n'
' region_name: regname\n'
' bucket_name: bucname\n'
' object_key_prefix: keyprefix\n'
' endpoint: https://the-url.com\n'
' aws_access_key: "$ENV:STATIC_ACCESS_KEY"\n'
' aws_secret_access_key: "$ENV:STATIC_SECRET_KEY"'
)
assert cfg.builder.cdn.execute
assert cfg.builder.cdn.service_name == 'servname'
assert cfg.builder.cdn.region_name == 'regname'
assert cfg.builder.cdn.bucket_name == 'bucname'
assert cfg.builder.cdn.object_key_prefix == 'keyprefix'
assert cfg.builder.cdn.endpoint == 'https://the-url.com'
assert cfg.builder.cdn.aws_access_key == 'access_key'
assert cfg.builder.cdn.aws_secret_access_key == 'secret_key'
| 33.818731
| 80
| 0.541093
|
af7efbaddc3d4565e3e27f16fa4f9b9108a35af3
| 4,646
|
py
|
Python
|
test/functional/mempool_reorg.py
|
BramandUn/PALLY1
|
036c473dddc7534c0979b159ca458e8a3a8a10cb
|
[
"MIT"
] | null | null | null |
test/functional/mempool_reorg.py
|
BramandUn/PALLY1
|
036c473dddc7534c0979b159ca458e8a3a8a10cb
|
[
"MIT"
] | null | null | null |
test/functional/mempool_reorg.py
|
BramandUn/PALLY1
|
036c473dddc7534c0979b159ca458e8a3a8a10cb
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mempool re-org scenarios.
Test re-org scenarios with a mempool that contains transactions
that spend (directly or indirectly) coinbase transactions.
"""
from test_framework.test_framework import PALLY1TestFramework
from test_framework.util import *
# Create one-input, one-output, no-fee transaction:
class MempoolCoinbaseTest(PALLY1TestFramework):
def set_test_params(self):
self.num_nodes = 2
self.extra_args = [["-checkmempool"]] * 2
alert_filename = None # Set by setup_network
def run_test(self):
# Start with a 200 block chain
assert_equal(self.nodes[0].getblockcount(), 200)
# Mine four blocks. After this, nodes[0] blocks
# 101, 102, and 103 are spend-able.
new_blocks = self.nodes[1].generate(4)
self.sync_all()
node0_address = self.nodes[0].getnewaddress()
node1_address = self.nodes[1].getnewaddress()
# Three scenarios for re-orging coinbase spends in the memory pool:
# 1. Direct coinbase spend : spend_101
# 2. Indirect (coinbase spend in chain, child in mempool) : spend_102 and spend_102_1
# 3. Indirect (coinbase and child both in chain) : spend_103 and spend_103_1
# Use invalidatblock to make all of the above coinbase spends invalid (immature coinbase),
# and make sure the mempool code behaves correctly.
b = [ self.nodes[0].getblockhash(n) for n in range(101, 105) ]
coinbase_txids = [ self.nodes[0].getblock(h)['tx'][0] for h in b ]
spend_101_raw = create_tx(self.nodes[0], coinbase_txids[1], node1_address, 49.99)
spend_102_raw = create_tx(self.nodes[0], coinbase_txids[2], node0_address, 49.99)
spend_103_raw = create_tx(self.nodes[0], coinbase_txids[3], node0_address, 49.99)
# Create a transaction which is time-locked to two blocks in the future
timelock_tx = self.nodes[0].createrawtransaction([{"txid": coinbase_txids[0], "vout": 0}], {node0_address: 49.99})
# Set the time lock
timelock_tx = timelock_tx.replace("ffffffff", "11111191", 1)
timelock_tx = timelock_tx[:-8] + hex(self.nodes[0].getblockcount() + 2)[2:] + "000000"
timelock_tx = self.nodes[0].signrawtransactionwithwallet(timelock_tx)["hex"]
# This will raise an exception because the timelock transaction is too immature to spend
assert_raises_rpc_error(-26, "non-final", self.nodes[0].sendrawtransaction, timelock_tx)
# Broadcast and mine spend_102 and 103:
spend_102_id = self.nodes[0].sendrawtransaction(spend_102_raw)
spend_103_id = self.nodes[0].sendrawtransaction(spend_103_raw)
self.nodes[0].generate(1)
# Time-locked transaction is still too immature to spend
assert_raises_rpc_error(-26,'non-final', self.nodes[0].sendrawtransaction, timelock_tx)
# Create 102_1 and 103_1:
spend_102_1_raw = create_tx(self.nodes[0], spend_102_id, node1_address, 49.98)
spend_103_1_raw = create_tx(self.nodes[0], spend_103_id, node1_address, 49.98)
# Broadcast and mine 103_1:
spend_103_1_id = self.nodes[0].sendrawtransaction(spend_103_1_raw)
last_block = self.nodes[0].generate(1)
# Time-locked transaction can now be spent
timelock_tx_id = self.nodes[0].sendrawtransaction(timelock_tx)
# ... now put spend_101 and spend_102_1 in memory pools:
spend_101_id = self.nodes[0].sendrawtransaction(spend_101_raw)
spend_102_1_id = self.nodes[0].sendrawtransaction(spend_102_1_raw)
self.sync_all()
assert_equal(set(self.nodes[0].getrawmempool()), {spend_101_id, spend_102_1_id, timelock_tx_id})
for node in self.nodes:
node.invalidateblock(last_block[0])
# Time-locked transaction is now too immature and has been removed from the mempool
# spend_103_1 has been re-orged out of the chain and is back in the mempool
assert_equal(set(self.nodes[0].getrawmempool()), {spend_101_id, spend_102_1_id, spend_103_1_id})
# Use invalidateblock to re-org back and make all those coinbase spends
# immature/invalid:
for node in self.nodes:
node.invalidateblock(new_blocks[0])
self.sync_all()
# mempool should be empty.
assert_equal(set(self.nodes[0].getrawmempool()), set())
if __name__ == '__main__':
MempoolCoinbaseTest().main()
| 47.408163
| 122
| 0.691778
|
9fd2017c09ee6f5d3ffa5dff2e8b530d2c284882
| 614
|
py
|
Python
|
youtubesearchpython/__init__.py
|
a11cf0/youtube-search-python
|
5be693423fb3f2d6a13d691183ed015275828338
|
[
"MIT"
] | null | null | null |
youtubesearchpython/__init__.py
|
a11cf0/youtube-search-python
|
5be693423fb3f2d6a13d691183ed015275828338
|
[
"MIT"
] | null | null | null |
youtubesearchpython/__init__.py
|
a11cf0/youtube-search-python
|
5be693423fb3f2d6a13d691183ed015275828338
|
[
"MIT"
] | null | null | null |
from youtubesearchpython.search import Search, VideosSearch, ChannelsSearch, PlaylistsSearch, CustomSearch
from youtubesearchpython.extras import Video, Playlist, Suggestions
from youtubesearchpython.streamurlfetcher import StreamURLFetcher
from youtubesearchpython.internal.constants import *
__title__ = 'youtube-search-python'
__version__ = '1.4.3'
__author__ = 'alexmercerind'
__license__ = 'MIT'
''' Deprecated. Present for legacy support. '''
from youtubesearchpython.legacy import SearchVideos, SearchPlaylists
from youtubesearchpython.legacy import SearchVideos as searchYoutube
| 38.375
| 106
| 0.807818
|
200b9a5e08841447d3b391ac00a841fe953f7a25
| 3,275
|
py
|
Python
|
userDetails/userDetails/settings.py
|
mahesh-muttinti/Django-Sample-App
|
65b8eddd7f7b7e4837a61f72543a52460aadd3b8
|
[
"Apache-2.0"
] | null | null | null |
userDetails/userDetails/settings.py
|
mahesh-muttinti/Django-Sample-App
|
65b8eddd7f7b7e4837a61f72543a52460aadd3b8
|
[
"Apache-2.0"
] | null | null | null |
userDetails/userDetails/settings.py
|
mahesh-muttinti/Django-Sample-App
|
65b8eddd7f7b7e4837a61f72543a52460aadd3b8
|
[
"Apache-2.0"
] | null | null | null |
"""
Django settings for userDetails project.
Generated by 'django-admin startproject' using Django 3.1.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'w)qs34-7y7=nfi&t#u#+av(e5)gu*y^l&9a@+i6=#@8*um^uuh'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'getDetails.apps.GetdetailsConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'userDetails.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR,'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'userDetails.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
# STATICFILES_DIRS = (str(BASE_DIR.joinpath('static')),)
STATICSFILES_DIRS = [os.path.join(BASE_DIR, 'static')]
| 25.787402
| 91
| 0.698321
|
5485955331d22273f1276517368bd8f167b5ceb1
| 750
|
py
|
Python
|
tests/test_EbClient.py
|
danilocgsilva/smarteb
|
7c4cd6c18f37195e1749dffa713341560457cf55
|
[
"MIT"
] | null | null | null |
tests/test_EbClient.py
|
danilocgsilva/smarteb
|
7c4cd6c18f37195e1749dffa713341560457cf55
|
[
"MIT"
] | null | null | null |
tests/test_EbClient.py
|
danilocgsilva/smarteb
|
7c4cd6c18f37195e1749dffa713341560457cf55
|
[
"MIT"
] | null | null | null |
import sys
import re
import tempfile
import unittest
sys.path.insert(1, "..")
from smart_eb.EbClient import EbClient
class test_EbClient(unittest.TestCase):
def setUp(self):
self.ebClient = EbClient()
def test_new_path_not_exists(self):
non_existent_path = '/path/does/not/exists'
with self.assertRaises(OSError):
self.ebClient.new(non_existent_path, "project_name")
def test_new_missign_path(self):
with self.assertRaises(OSError):
self.ebClient.new("", "project_name")
def test_new_missing_name(self):
with self.assertRaises(TypeError):
self.ebClient.new(self.getTmpPathFolder())
def getTmpPathFolder(self):
return tempfile.gettempdir()
| 25.862069
| 64
| 0.688
|
54f869abb77fb7d44a751ad3143f492efaf6b93c
| 390
|
py
|
Python
|
hall_of_cards/cardsgame/admin.py
|
mrjmad/gnu_linux_mag_drf
|
c2be00ca2c37b23d35ab9e12dc8b68e727110682
|
[
"MIT"
] | null | null | null |
hall_of_cards/cardsgame/admin.py
|
mrjmad/gnu_linux_mag_drf
|
c2be00ca2c37b23d35ab9e12dc8b68e727110682
|
[
"MIT"
] | null | null | null |
hall_of_cards/cardsgame/admin.py
|
mrjmad/gnu_linux_mag_drf
|
c2be00ca2c37b23d35ab9e12dc8b68e727110682
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import CardType, Card
class CardTypeAdmin(admin.ModelAdmin):
list_display = ('name',)
class CardAdmin(admin.ModelAdmin):
list_display = ('name', 'card_type', 'mana_cost', 'life', 'damage', 'modified')
list_filter = ('card_type', 'mana_cost')
admin.site.register(CardType, CardTypeAdmin)
admin.site.register(Card, CardAdmin)
| 21.666667
| 83
| 0.725641
|
d6955565572fe48638e6814e8dae2b372ca2bd00
| 50,465
|
py
|
Python
|
qutip/piqs.py
|
sunash2/qutip
|
7a57bb0f576621590ecd0dd7d1d862bce57931ec
|
[
"BSD-3-Clause"
] | null | null | null |
qutip/piqs.py
|
sunash2/qutip
|
7a57bb0f576621590ecd0dd7d1d862bce57931ec
|
[
"BSD-3-Clause"
] | null | null | null |
qutip/piqs.py
|
sunash2/qutip
|
7a57bb0f576621590ecd0dd7d1d862bce57931ec
|
[
"BSD-3-Clause"
] | 1
|
2018-11-08T20:07:56.000Z
|
2018-11-08T20:07:56.000Z
|
# This file is part of QuTiP: Quantum Toolbox in Python.
#
# Copyright (c) 2011 and later, Paul D. Nation and Robert J. Johansson.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the QuTiP: Quantum Toolbox in Python nor the names
# of its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###############################################################################
"""Permutational Invariant Quantum Solver (PIQS)
This module calculates the Liouvillian for the dynamics of ensembles of
identical two-level systems (TLS) in the presence of local and collective
processes by exploiting permutational symmetry and using the Dicke basis.
"""
# Authors: Nathan Shammah, Shahnawaz Ahmed
# Contact: nathan.shammah@gmail.com, shahnawaz.ahmed95@gmail.com
from math import factorial
from decimal import Decimal
import numpy as np
from scipy.integrate import odeint
from scipy import constants
from scipy.sparse import dok_matrix, block_diag, lil_matrix
from qutip.solver import Options, Result
from qutip import (Qobj, spre, spost, tensor, identity, ket2dm,
vector_to_operator)
from qutip import sigmax, sigmay, sigmaz, sigmap, sigmam
from qutip.cy.piqs import Dicke as _Dicke
from qutip.cy.piqs import (jmm1_dictionary, _num_dicke_states,
_num_dicke_ladders, get_blocks, j_min,
m_vals, j_vals)
# Functions necessary to generate the Lindbladian/Liouvillian
def num_dicke_states(N):
"""Calculate the number of Dicke states.
Parameters
----------
N: int
The number of two-level systems.
Returns
-------
nds: int
The number of Dicke states.
"""
return _num_dicke_states(N)
def num_dicke_ladders(N):
"""Calculate the total number of ladders in the Dicke space.
For a collection of N two-level systems it counts how many different
"j" exist or the number of blocks in the block-diagonal matrix.
Parameters
----------
N: int
The number of two-level systems.
Returns
-------
Nj: int
The number of Dicke ladders.
"""
return _num_dicke_ladders(N)
def num_tls(nds):
"""Calculate the number of two-level systems.
Parameters
----------
nds: int
The number of Dicke states.
Returns
-------
N: int
The number of two-level systems.
"""
if np.sqrt(nds).is_integer():
# N is even
N = 2*(np.sqrt(nds)-1)
else:
# N is odd
N = 2*(np.sqrt(nds + 1/4)-1)
return int(N)
def isdiagonal(mat):
"""
Check if the input matrix is diagonal.
Parameters
==========
mat: ndarray/Qobj
A 2D numpy array
Returns
=======
diag: bool
True/False depending on whether the input matrix is diagonal.
"""
if isinstance(mat, Qobj):
mat = mat.full()
return np.all(mat == np.diag(np.diagonal(mat)))
class Dicke(object):
"""The Dicke class which builds the Lindbladian and Liouvillian matrix.
Example
-------
>>> from piqs import Dicke, jspin
>>> N = 2
>>> jx, jy, jz = jspin(N)
>>> jp = jspin(N, "+")
>>> jm = jspin(N, "-")
>>> ensemble = Dicke(N, emission=1.)
>>> L = ensemble.liouvillian()
Parameters
----------
N: int
The number of two-level systems.
hamiltonian: :class: qutip.Qobj
A Hamiltonian in the Dicke basis.
The matrix dimensions are (nds, nds),
with nds being the number of Dicke states.
The Hamiltonian can be built with the operators
given by the `jspin` functions.
emission: float
Incoherent emission coefficient (also nonradiative emission).
default: 0.0
dephasing: float
Local dephasing coefficient.
default: 0.0
pumping: float
Incoherent pumping coefficient.
default: 0.0
collective_emission: float
Collective (superradiant) emmission coefficient.
default: 0.0
collective_pumping: float
Collective pumping coefficient.
default: 0.0
collective_dephasing: float
Collective dephasing coefficient.
default: 0.0
Attributes
----------
N: int
The number of two-level systems.
hamiltonian: :class: qutip.Qobj
A Hamiltonian in the Dicke basis.
The matrix dimensions are (nds, nds),
with nds being the number of Dicke states.
The Hamiltonian can be built with the operators given
by the `jspin` function in the "dicke" basis.
emission: float
Incoherent emission coefficient (also nonradiative emission).
default: 0.0
dephasing: float
Local dephasing coefficient.
default: 0.0
pumping: float
Incoherent pumping coefficient.
default: 0.0
collective_emission: float
Collective (superradiant) emmission coefficient.
default: 0.0
collective_dephasing: float
Collective dephasing coefficient.
default: 0.0
collective_pumping: float
Collective pumping coefficient.
default: 0.0
nds: int
The number of Dicke states.
dshape: tuple
The shape of the Hilbert space in the Dicke or uncoupled basis.
default: (nds, nds).
"""
def __init__(self, N, hamiltonian=None,
emission=0., dephasing=0., pumping=0.,
collective_emission=0., collective_dephasing=0.,
collective_pumping=0.):
self.N = N
self.hamiltonian = hamiltonian
self.emission = emission
self.dephasing = dephasing
self.pumping = pumping
self.collective_emission = collective_emission
self.collective_dephasing = collective_dephasing
self.collective_pumping = collective_pumping
self.nds = num_dicke_states(self.N)
self.dshape = (num_dicke_states(self.N), num_dicke_states(self.N))
def __repr__(self):
"""Print the current parameters of the system."""
string = []
string.append("N = {}".format(self.N))
string.append("Hilbert space dim = {}".format(self.dshape))
string.append("Number of Dicke states = {}".format(self.nds))
string.append("Liouvillian space dim = {}".format((self.nds**2,
self.nds**2)))
if self.emission != 0:
string.append("emission = {}".format(self.emission))
if self.dephasing != 0:
string.append("dephasing = {}".format(self.dephasing))
if self.pumping != 0:
string.append("pumping = {}".format(self.pumping))
if self.collective_emission != 0:
string.append(
"collective_emission = {}".format(self.collective_emission))
if self.collective_dephasing != 0:
string.append(
"collective_dephasing = {}".format(self.collective_dephasing))
if self.collective_pumping != 0:
string.append(
"collective_pumping = {}".format(self.collective_pumping))
return "\n".join(string)
def lindbladian(self):
"""Build the Lindbladian superoperator of the dissipative dynamics.
Returns
-------
lindbladian: :class: qutip.Qobj
The Lindbladian matrix as a `qutip.Qobj`.
"""
cythonized_dicke = _Dicke(int(self.N),
float(self.emission),
float(self.dephasing),
float(self.pumping),
float(self.collective_emission),
float(self.collective_dephasing),
float(self.collective_pumping))
return cythonized_dicke.lindbladian()
def liouvillian(self):
"""Build the total Liouvillian using the Dicke basis.
Returns
-------
liouv: :class: qutip.Qobj
The Liouvillian matrix for the system.
"""
lindblad = self.lindbladian()
if self.hamiltonian is None:
liouv = lindblad
else:
hamiltonian = self.hamiltonian
hamiltonian_superoperator = - 1j * \
spre(hamiltonian) + 1j * spost(hamiltonian)
liouv = lindblad + hamiltonian_superoperator
return liouv
def pisolve(self, initial_state, tlist, options=None):
"""
Solve for diagonal Hamiltonians and initial states faster.
Parameters
==========
initial_state: :class: qutip.Qobj
An initial state specified as a density matrix of
`qutip.Qbj` type.
tlist: ndarray
A 1D numpy array of list of timesteps to integrate
options: :class: qutip.solver.Options
The options for the solver.
Returns
=======
result: list
A dictionary of the type `qutip.solver.Result` which holds the
results of the evolution.
"""
if isdiagonal(initial_state) == False:
msg = "`pisolve` requires a diagonal initial density matrix. "
msg += "In general construct the Liouvillian using "
msg += "`piqs.liouvillian` and use qutip.mesolve."
raise ValueError(msg)
if self.hamiltonian and isdiagonal(self.hamiltonian) == False:
msg = "`pisolve` should only be used for diagonal Hamiltonians. "
msg += "Construct the Liouvillian using `piqs.liouvillian` and"
msg += " use `qutip.mesolve`."
raise ValueError(msg)
if initial_state.full().shape != self.dshape:
msg = "Initial density matrix should be diagonal."
raise ValueError(msg)
pim = Pim(self.N, self.emission, self.dephasing, self.pumping,
self.collective_emission, self.collective_pumping,
self.collective_dephasing)
result = pim.solve(initial_state, tlist, options=None)
return result
def prune_eigenstates(self, liouvillian):
"""Remove spurious eigenvalues and eigenvectors of the Liouvillian.
Spurious means that the given eigenvector has elements outside of the
block-diagonal matrix.
Parameters
----------
liouvillian_eigenstates: list
A list with the eigenvalues and eigenvectors of the Liouvillian
including spurious ones.
Returns
-------
correct_eigenstates: list
The list with the correct eigenvalues and eigenvectors of the
Liouvillian.
"""
liouvillian_eigenstates = liouvillian.eigenstates()
N = self.N
block_mat = block_matrix(N)
nnz_tuple_bm = [(i, j) for i, j in zip(*block_mat.nonzero())]
# 0. Create a copy of the eigenvalues to approximate values
eig_val, eig_vec = liouvillian_eigenstates
tol = 10
eig_val_round = np.round(eig_val, tol)
# 2. Use 'block_matrix(N)' to remove eigenvectors with matrix
# elements
# outside of the block matrix.
forbidden_eig_index = []
for k in range(0, len(eig_vec)):
dm = vector_to_operator(eig_vec[k])
nnz_tuple = [(i, j) for i, j in zip(*dm.data.nonzero())]
for i in nnz_tuple:
if i not in nnz_tuple_bm:
if np.round(dm[i], tol) != 0:
forbidden_eig_index.append(k)
forbidden_eig_index = np.array(list(set(forbidden_eig_index)))
# 3. Remove the forbidden eigenvalues and eigenvectors.
correct_eig_val = np.delete(eig_val, forbidden_eig_index)
correct_eig_vec = np.delete(eig_vec, forbidden_eig_index)
correct_eigenstates = correct_eig_val, correct_eig_vec
return correct_eigenstates
def c_ops(self):
"""Build collapse operators in the full Hilbert space 2^N.
Returns
-------
c_ops_list: list
The list with the collapse operators in the 2^N Hilbert space.
"""
ce = self.collective_emission
cd = self.collective_dephasing
cp = self.collective_pumping
c_ops_list = collapse_uncoupled(N=self.N,
emission=self.emission,
dephasing=self.dephasing,
pumping=self.pumping,
collective_emission=ce,
collective_dephasing=cd,
collective_pumping=cp)
return c_ops_list
def coefficient_matrix(self):
"""Build coefficient matrix for ODE for a diagonal problem.
Returns
-------
M: ndarray
The matrix M of the coefficients for the ODE dp/dt = Mp.
p is the vector of the diagonal matrix elements
of the density matrix rho in the Dicke basis.
"""
diagonal_system = Pim(N=self.N,
emission=self.emission,
dephasing=self.dephasing,
pumping=self.pumping,
collective_emission=self.collective_emission,
collective_dephasing=self.collective_dephasing,
collective_pumping=self.collective_pumping)
coef_matrix = diagonal_system.coefficient_matrix()
return coef_matrix
# Utility functions for properties of the Dicke space
def energy_degeneracy(N, m):
"""Calculate the number of Dicke states with same energy.
The use of the `Decimals` class allows to explore N > 1000,
unlike the built-in function `scipy.special.binom`
Parameters
----------
N: int
The number of two-level systems.
m: float
Total spin z-axis projection eigenvalue.
This is proportional to the total energy.
Returns
-------
degeneracy: int
The energy degeneracy
"""
numerator = Decimal(factorial(N))
d1 = Decimal(factorial(N/2 + m))
d2 = Decimal(factorial(N/2 - m))
degeneracy = numerator/(d1 * d2)
return int(degeneracy)
def state_degeneracy(N, j):
"""Calculate the degeneracy of the Dicke state.
Each state :math:`|j, m\\rangle` includes D(N,j) irreducible
representations :math:`|j, m, \\alpha\\rangle`.
Uses Decimals to calculate higher numerator and denominators numbers.
Parameters
----------
N: int
The number of two-level systems.
j: float
Total spin eigenvalue (cooperativity).
Returns
-------
degeneracy: int
The state degeneracy.
"""
if j < 0:
raise ValueError("j value should be >= 0")
numerator = Decimal(factorial(N)) * Decimal(2*j + 1)
denominator_1 = Decimal(factorial(N/2 + j + 1))
denominator_2 = Decimal(factorial(N/2 - j))
degeneracy = numerator/(denominator_1 * denominator_2)
degeneracy = int(np.round(float(degeneracy)))
return degeneracy
def m_degeneracy(N, m):
"""Calculate the number of Dicke states :math:`|j, m\\rangle` with
same energy.
Parameters
----------
N: int
The number of two-level systems.
m: float
Total spin z-axis projection eigenvalue (proportional to the total
energy).
Returns
-------
degeneracy: int
The m-degeneracy.
"""
jvals = j_vals(N)
maxj = np.max(jvals)
if m < -maxj:
e = "m value is incorrect for this N."
e += " Minimum m value can be {}".format(-maxj)
raise ValueError(e)
degeneracy = N/2 + 1 - abs(m)
return int(degeneracy)
def ap(j, m):
"""Calculate the coefficient `ap` by applying J_+ |j, m>.
The action of ap is given by:
:math:`J_{+}|j, m\\rangle = A_{+}(j, m)|j, m+1\\rangle`
Parameters
----------
j, m: float
The value for j and m in the dicke basis |j,m>.
Returns
-------
a_plus: float
The value of :math:`a_{+}`.
"""
a_plus = np.sqrt((j-m) * (j+m+1))
return a_plus
def am(j, m):
"""Calculate the operator `am` used later.
The action of ap is given by: J_{-}|j, m> = A_{-}(jm)|j, m-1>
Parameters
----------
j: float
The value for j.
m: float
The value for m.
Returns
-------
a_minus: float
The value of :math:`a_{-}`.
"""
a_minus = np.sqrt((j+m) * (j-m+1))
return a_minus
def spin_algebra(N, op=None):
"""Create the list [sx, sy, sz] with the spin operators.
The operators are constructed for a collection of N two-level systems
(TLSs). Each element of the list, i.e., sx, is a vector of `qutip.Qobj`
objects (spin matrices), as it cointains the list of the SU(2) Pauli
matrices for the N TLSs. Each TLS operator sx[i], with i = 0, ..., (N-1),
is placed in a :math:`2^N`-dimensional Hilbert space.
Notes
-----
sx[i] is :math:`\\frac{\\sigma_x}{2}` in the composite Hilbert space.
Parameters
----------
N: int
The number of two-level systems.
Returns
-------
spin_operators: list or :class: qutip.Qobj
A list of `qutip.Qobj` operators - [sx, sy, sz] or the
requested operator.
"""
# 1. Define N TLS spin-1/2 matrices in the uncoupled basis
N = int(N)
sx = [0 for i in range(N)]
sy = [0 for i in range(N)]
sz = [0 for i in range(N)]
sp = [0 for i in range(N)]
sm = [0 for i in range(N)]
sx[0] = 0.5 * sigmax()
sy[0] = 0.5 * sigmay()
sz[0] = 0.5 * sigmaz()
sp[0] = sigmap()
sm[0] = sigmam()
# 2. Place operators in total Hilbert space
for k in range(N - 1):
sx[0] = tensor(sx[0], identity(2))
sy[0] = tensor(sy[0], identity(2))
sz[0] = tensor(sz[0], identity(2))
sp[0] = tensor(sp[0], identity(2))
sm[0] = tensor(sm[0], identity(2))
# 3. Cyclic sequence to create all N operators
a = [i for i in range(N)]
b = [[a[i - i2] for i in range(N)] for i2 in range(N)]
# 4. Create N operators
for i in range(1, N):
sx[i] = sx[0].permute(b[i])
sy[i] = sy[0].permute(b[i])
sz[i] = sz[0].permute(b[i])
sp[i] = sp[0].permute(b[i])
sm[i] = sm[0].permute(b[i])
spin_operators = [sx, sy, sz]
if not op:
return spin_operators
elif op == 'x':
return sx
elif op == 'y':
return sy
elif op == 'z':
return sz
elif op == '+':
return sp
elif op == '-':
return sm
else:
raise TypeError('Invalid type')
def _jspin_uncoupled(N, op=None):
"""Construct the the collective spin algebra in the uncoupled basis.
jx, jy, jz, jp, jm are constructed in the uncoupled basis of the
two-level system (TLS). Each collective operator is placed in a
Hilbert space of dimension 2^N.
Parameters
----------
N: int
The number of two-level systems.
op: str
The operator to return 'x','y','z','+','-'.
If no operator given, then output is the list of operators
for ['x','y','z',].
Returns
-------
collective_operators: list or :class: qutip.Qobj
A list of `qutip.Qobj` representing all the operators in
uncoupled" basis or a single operator requested.
"""
# 1. Define N TLS spin-1/2 matrices in the uncoupled basis
N = int(N)
sx, sy, sz = spin_algebra(N)
sp, sm = spin_algebra(N, "+"), spin_algebra(N, "-")
jx = sum(sx)
jy = sum(sy)
jz = sum(sz)
jp = sum(sp)
jm = sum(sm)
collective_operators = [jx, jy, jz]
if not op:
return collective_operators
elif op == 'x':
return jx
elif op == 'y':
return jy
elif op == 'z':
return jz
elif op == '+':
return jp
elif op == '-':
return jm
else:
raise TypeError('Invalid type')
def jspin(N, op=None, basis="dicke"):
"""
Calculate the list of collective operators of the total algebra.
The Dicke basis :math:`|j,m\\rangle\\langle j,m'|` is used by
default. Otherwise with "uncoupled" the operators are in a
:math:`2^N` space.
Parameters
----------
N: int
Number of two-level systems.
op: str
The operator to return 'x','y','z','+','-'.
If no operator given, then output is the list of operators
for ['x','y','z'].
basis: str
The basis of the operators - "dicke" or "uncoupled"
default: "dicke".
Returns
-------
j_alg: list or :class: qutip.Qobj
A list of `qutip.Qobj` representing all the operators in
the "dicke" or "uncoupled" basis or a single operator requested.
"""
if basis == "uncoupled":
return _jspin_uncoupled(N, op)
nds = num_dicke_states(N)
num_ladders = num_dicke_ladders(N)
jz_operator = dok_matrix((nds, nds))
jp_operator = dok_matrix((nds, nds))
jm_operator = dok_matrix((nds, nds))
s = 0
for k in range(0, num_ladders):
j = 0.5 * N - k
mmax = int(2*j + 1)
for i in range(0, mmax):
m = j-i
jz_operator[s, s] = m
if (s+1) in range(0, nds):
jp_operator[s, s+1] = ap(j, m-1)
if (s-1) in range(0, nds):
jm_operator[s, s-1] = am(j, m+1)
s = s+1
jx_operator = 1/2 * (jp_operator+jm_operator)
jy_operator = 1j/2 * (jm_operator-jp_operator)
jx = Qobj(jx_operator)
jy = Qobj(jy_operator)
jz = Qobj(jz_operator)
jp = Qobj(jp_operator)
jm = Qobj(jm_operator)
if not op:
return [jx, jy, jz]
if op == '+':
return jp
elif op == '-':
return jm
elif op == 'x':
return jx
elif op == 'y':
return jy
elif op == 'z':
return jz
else:
raise TypeError('Invalid type')
def collapse_uncoupled(N, emission=0., dephasing=0., pumping=0.,
collective_emission=0., collective_dephasing=0.,
collective_pumping=0.):
"""
Create the collapse operators (c_ops) of the Lindbladian in the
uncoupled basis
These operators are in the uncoupled basis of the two-level
system (TLS) SU(2) Pauli matrices.
Notes
-----
The collapse operator list can be given to `qutip.mesolve`.
Notice that the operators are placed in a Hilbert space of
dimension :math:`2^N`. Thus the method is suitable only for
small N (of the order of 10).
Parameters
----------
N: int
The number of two-level systems.
emission: float
Incoherent emission coefficient (also nonradiative emission).
default: 0.0
dephasing: float
Local dephasing coefficient.
default: 0.0
pumping: float
Incoherent pumping coefficient.
default: 0.0
collective_emission: float
Collective (superradiant) emmission coefficient.
default: 0.0
collective_pumping: float
Collective pumping coefficient.
default: 0.0
collective_dephasing: float
Collective dephasing coefficient.
default: 0.0
Returns
-------
c_ops: list
The list of collapse operators as `qutip.Qobj` for the system.
"""
N = int(N)
if N > 10:
msg = "N > 10. dim(H) = 2^N. "
msg += "Better use `piqs.lindbladian` to reduce Hilbert space "
msg += "dimension and exploit permutational symmetry."
raise Warning(msg)
[sx, sy, sz] = spin_algebra(N)
sp, sm = spin_algebra(N, "+"), spin_algebra(N, "-")
[jx, jy, jz] = jspin(N, basis="uncoupled")
jp, jm = (jspin(N, "+", basis = "uncoupled"),
jspin(N, "-", basis="uncoupled"))
c_ops = []
if emission != 0:
for i in range(0, N):
c_ops.append(np.sqrt(emission) * sm[i])
if dephasing != 0:
for i in range(0, N):
c_ops.append(np.sqrt(dephasing) * sz[i])
if pumping != 0:
for i in range(0, N):
c_ops.append(np.sqrt(pumping) * sp[i])
if collective_emission != 0:
c_ops.append(np.sqrt(collective_emission) * jm)
if collective_dephasing != 0:
c_ops.append(np.sqrt(collective_dephasing) * jz)
if collective_pumping != 0:
c_ops.append(np.sqrt(collective_pumping) * jp)
return c_ops
# State definitions in the Dicke basis with an option for basis transformation
def dicke_basis(N, jmm1=None):
"""
Initialize the density matrix of a Dicke state for several (j, m, m1).
This function can be used to build arbitrary states in the Dicke basis
:math:`|j, m\\rangle \\langle j, m^{\\prime}|`. We create coefficients for each
(j, m, m1) value in the dictionary jmm1. The mapping for the (i, k)
index of the density matrix to the |j, m> values is given by the
cythonized function `jmm1_dictionary`. A density matrix is created from
the given dictionary of coefficients for each (j, m, m1).
Parameters
----------
N: int
The number of two-level systems.
jmm1: dict
A dictionary of {(j, m, m1): p} that gives a density p for the
(j, m, m1) matrix element.
Returns
-------
rho: :class: qutip.Qobj
The density matrix in the Dicke basis.
"""
if jmm1 is None:
msg = "Please specify the jmm1 values as a dictionary"
msg += "or use the `excited(N)` function to create an"
msg += "excited state where jmm1 = {(N/2, N/2, N/2): 1}"
raise AttributeError(msg)
nds = _num_dicke_states(N)
rho = np.zeros((nds, nds))
jmm1_dict = jmm1_dictionary(N)[1]
for key in jmm1:
i, k = jmm1_dict[key]
rho[i, k] = jmm1[key]
return Qobj(rho)
def dicke(N, j, m):
"""
Generate a Dicke state as a pure density matrix in the Dicke basis.
For instance, the superradiant state given by
:math:`|j, m\\rangle = |1, 0\\rangle` for N = 2,
and the state is represented as a density matrix of size (nds, nds) or
(4, 4), with the (1, 1) element set to 1.
Parameters
----------
N: int
The number of two-level systems.
j: float
The eigenvalue j of the Dicke state (j, m).
m: float
The eigenvalue m of the Dicke state (j, m).
Returns
-------
rho: :class: qutip.Qobj
The density matrix.
"""
nds = num_dicke_states(N)
rho = np.zeros((nds, nds))
jmm1_dict = jmm1_dictionary(N)[1]
i, k = jmm1_dict[(j, m, m)]
rho[i, k] = 1.
return Qobj(rho)
# Uncoupled states in the full Hilbert space. These are returned with the
# choice of the keyword argument `basis="uncoupled"` in the state functions.
def _uncoupled_excited(N):
"""
Generate the density matrix of the excited Dicke state in the full
:math:`2^N` dimensional Hilbert space.
Parameters
----------
N: int
The number of two-level systems.
Returns
-------
psi0: :class: qutip.Qobj
The density matrix for the excited state in the uncoupled basis.
"""
N = int(N)
jz = jspin(N, "z", basis="uncoupled")
en, vn = jz.eigenstates()
psi0 = vn[2**N - 1]
return ket2dm(psi0)
def _uncoupled_superradiant(N):
"""
Generate the density matrix of a superradiant state in the full
:math:`2^N`-dimensional Hilbert space.
Parameters
----------
N: int
The number of two-level systems.
Returns
-------
psi0: :class: qutip.Qobj
The density matrix for the superradiant state in the full Hilbert
space.
"""
N = int(N)
jz = jspin(N, "z", basis="uncoupled")
en, vn = jz.eigenstates()
psi0 = vn[2**N - (N+1)]
return ket2dm(psi0)
def _uncoupled_ground(N):
"""
Generate the density matrix of the ground state in the full\
:math:`2^N`-dimensional Hilbert space.
Parameters
----------
N: int
The number of two-level systems.
Returns
-------
psi0: :class: qutip.Qobj
The density matrix for the ground state in the full Hilbert space.
"""
N = int(N)
jz = jspin(N, "z", basis="uncoupled")
en, vn = jz.eigenstates()
psi0 = vn[0]
return ket2dm(psi0)
def _uncoupled_ghz(N):
"""
Generate the density matrix of the GHZ state in the full 2^N
dimensional Hilbert space.
Parameters
----------
N: int
The number of two-level systems.
Returns
-------
ghz: :class: qutip.Qobj
The density matrix for the GHZ state in the full Hilbert space.
"""
N = int(N)
rho = np.zeros((2**N, 2**N))
rho[0, 0] = 1/2
rho[2**N - 1, 0] = 1/2
rho[0, 2**N - 1] = 1/2
rho[2**N - 1, 2**N - 1] = 1/2
spin_dim = [2 for i in range(0, N)]
spins_dims = list((spin_dim, spin_dim))
rho = Qobj(rho, dims=spins_dims)
return rho
def _uncoupled_css(N, a, b):
"""
Generate the density matrix of the CSS state in the full 2^N
dimensional Hilbert space.
The CSS states are non-entangled states given by
:math:`|a, b\\rangle = \\prod_i (a|1\\rangle_i + b|0\\rangle_i)`.
Parameters
----------
N: int
The number of two-level systems.
a: complex
The coefficient of the :math:`|1_i\rangle` state.
b: complex
The coefficient of the :math:`|0_i\rangle` state.
Returns
-------
css: :class: qutip.Qobj
The density matrix for the CSS state in the full Hilbert space.
"""
N = int(N)
# 1. Define i_th factorized density matrix in the uncoupled basis
rho_i = np.zeros((2, 2), dtype=complex)
rho_i[0, 0] = a * np.conj(a)
rho_i[1, 1] = b * np.conj(b)
rho_i[0, 1] = a * np.conj(a)
rho_i[1, 0] = b * np.conj(b)
rho_i = Qobj(rho_i)
rho = [0 for i in range(N)]
rho[0] = rho_i
# 2. Place single-two-level-system density matrices in total Hilbert space
for k in range(N - 1):
rho[0] = tensor(rho[0], identity(2))
# 3. Cyclic sequence to create all N factorized density matrices
# |CSS>_i<CSS|_i
a = [i for i in range(N)]
b = [[a[i - i2] for i in range(N)] for i2 in range(N)]
# 4. Create all other N-1 factorized density matrices
# |+><+| = Prod_(i=1)^N |CSS>_i<CSS|_i
for i in range(1, N):
rho[i] = rho[0].permute(b[i])
identity_i = Qobj(np.eye(2**N), dims=rho[0].dims, shape=rho[0].shape)
rho_tot = identity_i
for i in range(0, N):
rho_tot = rho_tot * rho[i]
return rho_tot
def excited(N, basis="dicke"):
"""
Generate the density matrix for the excited state.
This state is given by (N/2, N/2) in the default Dicke basis. If the
argument `basis` is "uncoupled" then it generates the state in a
2**N dim Hilbert space.
Parameters
----------
N: int
The number of two-level systems.
basis: str
The basis to use. Either "dicke" or "uncoupled".
Returns
-------
state: :class: qutip.Qobj
The excited state density matrix in the requested basis.
"""
if basis == "uncoupled":
state = _uncoupled_excited(N)
return state
jmm1 = {(N/2, N/2, N/2): 1}
return dicke_basis(N, jmm1)
def superradiant(N, basis="dicke"):
"""
Generate the density matrix of the superradiant state.
This state is given by (N/2, 0) or (N/2, 0.5) in the Dicke basis.
If the argument `basis` is "uncoupled" then it generates the state
in a 2**N dim Hilbert space.
Parameters
----------
N: int
The number of two-level systems.
basis: str
The basis to use. Either "dicke" or "uncoupled".
Returns
-------
state: :class: qutip.Qobj
The superradiant state density matrix in the requested basis.
"""
if basis == "uncoupled":
state = _uncoupled_superradiant(N)
return state
if N % 2 == 0:
jmm1 = {(N/2, 0, 0): 1.}
return dicke_basis(N, jmm1)
else:
jmm1 = {(N/2, 0.5, 0.5): 1.}
return dicke_basis(N, jmm1)
def css(N, x=1/np.sqrt(2), y=1/np.sqrt(2),
basis="dicke", coordinates="cartesian"):
"""
Generate the density matrix of the Coherent Spin State (CSS).
It can be defined as,
:math:`|CSS \\rangle = \\prod_i^N(a|1\\rangle_i + b|0\\rangle_i)`
with :math:`a = sin(\\frac{\\theta}{2})`,
:math:`b = e^{i \\phi}\\cos(\\frac{\\theta}{2})`.
The default basis is that of Dicke space
:math:`|j, m\\rangle \\langle j, m'|`.
The default state is the symmetric CSS,
:math:`|CSS\\rangle = |+\\rangle`.
Parameters
----------
N: int
The number of two-level systems.
x, y: float
The coefficients of the CSS state.
basis: str
The basis to use. Either "dicke" or "uncoupled".
coordinates: str
Either "cartesian" or "polar". If polar then the coefficients
are constructed as sin(x/2), cos(x/2)e^(iy).
Returns
-------
rho: :class: qutip.Qobj
The CSS state density matrix.
"""
if coordinates == "polar":
a = np.cos(0.5 * x) * np.exp(1j * y)
b = np.sin(0.5 * x)
else:
a = x
b = y
if basis == "uncoupled":
return _uncoupled_css(N, a, b)
nds = num_dicke_states(N)
num_ladders = num_dicke_ladders(N)
rho = dok_matrix((nds, nds))
# loop in the allowed matrix elements
jmm1_dict = jmm1_dictionary(N)[1]
j = 0.5*N
mmax = int(2*j + 1)
for i in range(0, mmax):
m = j-i
psi_m = np.sqrt(float(energy_degeneracy(N, m))) * \
a**(N*0.5 + m) * b**(N*0.5 - m)
for i1 in range(0, mmax):
m1 = j - i1
row_column = jmm1_dict[(j, m, m1)]
psi_m1 = np.sqrt(float(energy_degeneracy(N, m1))) * \
np.conj(a)**(N*0.5 + m1) * np.conj(b)**(N*0.5 - m1)
rho[row_column] = psi_m*psi_m1
return Qobj(rho)
def ghz(N, basis="dicke"):
"""
Generate the density matrix of the GHZ state.
If the argument `basis` is "uncoupled" then it generates the state
in a :math:`2^N`-dimensional Hilbert space.
Parameters
----------
N: int
The number of two-level systems.
basis: str
The basis to use. Either "dicke" or "uncoupled".
Returns
-------
state: :class: qutip.Qobj
The GHZ state density matrix in the requested basis.
"""
if basis == "uncoupled":
return _uncoupled_ghz(N)
nds = _num_dicke_states(N)
rho = dok_matrix((nds, nds))
rho[0, 0] = 1/2
rho[N, N] = 1/2
rho[N, 0] = 1/2
rho[0, N] = 1/2
return Qobj(rho)
def ground(N, basis="dicke"):
"""
Generate the density matrix of the ground state.
This state is given by (N/2, -N/2) in the Dicke basis. If the argument
`basis` is "uncoupled" then it generates the state in a
:math:`2^N`-dimensional Hilbert space.
Parameters
----------
N: int
The number of two-level systems.
basis: str
The basis to use. Either "dicke" or "uncoupled"
Returns
-------
state: :class: qutip.Qobj
The ground state density matrix in the requested basis.
"""
if basis == "uncoupled":
state = _uncoupled_ground(N)
return state
nds = _num_dicke_states(N)
rho = dok_matrix((nds, nds))
rho[N, N] = 1
return Qobj(rho)
def identity_uncoupled(N):
"""
Generate the identity in a :math:`2^N`-dimensional Hilbert space.
The identity matrix is formed from the tensor product of N TLSs.
Parameters
----------
N: int
The number of two-level systems.
Returns
-------
identity: :class: qutip.Qobj
The identity matrix.
"""
N = int(N)
rho = np.zeros((2**N, 2**N))
for i in range(0, 2**N):
rho[i, i] = 1
spin_dim = [2 for i in range(0, N)]
spins_dims = list((spin_dim, spin_dim))
identity = Qobj(rho, dims=spins_dims)
return identity
def block_matrix(N):
"""Construct the block-diagonal matrix for the Dicke basis.
Parameters
----------
N: int
Number of two-level systems.
Returns
-------
block_matr: ndarray
A 2D block-diagonal matrix of ones with dimension (nds,nds),
where nds is the number of Dicke states for N two-level
systems.
"""
nds = _num_dicke_states(N)
# create a list with the sizes of the blocks, in order
blocks_dimensions = int(N/2 + 1 - 0.5*(N % 2))
blocks_list = [(2 * (i+1 * (N % 2)) + 1*((N+1) % 2))
for i in range(blocks_dimensions)]
blocks_list = np.flip(blocks_list, 0)
# create a list with each block matrix as element
square_blocks = []
k = 0
for i in blocks_list:
square_blocks.append(np.ones((i, i)))
k = k + 1
return block_diag(square_blocks)
# ============================================================================
# Adding a faster version to make a Permutational Invariant matrix
# ============================================================================
def tau_column(tau, k, j):
"""
Determine the column index for the non-zero elements of the matrix for a
particular row `k` and the value of `j` from the Dicke space.
Parameters
----------
tau: str
The tau function to check for this `k` and `j`.
k: int
The row of the matrix M for which the non zero elements have
to be calculated.
j: float
The value of `j` for this row.
"""
# In the notes, we indexed from k = 1, here we do it from k = 0
k = k + 1
mapping = {"tau3": k-(2 * j + 3),
"tau2": k-1,
"tau4": k+(2 * j - 1),
"tau5": k-(2 * j + 2),
"tau1": k,
"tau6": k+(2 * j),
"tau7": k-(2 * j + 1),
"tau8": k+1,
"tau9": k+(2 * j + 1)}
# we need to decrement k again as indexing is from 0
return int(mapping[tau] - 1)
class Pim(object):
"""
The Permutation Invariant Matrix class.
Initialize the class with the parameters for generating a Permutation
Invariant matrix which evolves a given diagonal initial state `p` as:
dp/dt = Mp
Parameters
----------
N: int
The number of two-level systems.
emission: float
Incoherent emission coefficient (also nonradiative emission).
default: 0.0
dephasing: float
Local dephasing coefficient.
default: 0.0
pumping: float
Incoherent pumping coefficient.
default: 0.0
collective_emission: float
Collective (superradiant) emmission coefficient.
default: 0.0
collective_pumping: float
Collective pumping coefficient.
default: 0.0
collective_dephasing: float
Collective dephasing coefficient.
default: 0.0
Attributes
----------
N: int
The number of two-level systems.
emission: float
Incoherent emission coefficient (also nonradiative emission).
default: 0.0
dephasing: float
Local dephasing coefficient.
default: 0.0
pumping: float
Incoherent pumping coefficient.
default: 0.0
collective_emission: float
Collective (superradiant) emmission coefficient.
default: 0.0
collective_dephasing: float
Collective dephasing coefficient.
default: 0.0
collective_pumping: float
Collective pumping coefficient.
default: 0.0
M: dict
A nested dictionary of the structure {row: {col: val}} which holds
non zero elements of the matrix M
"""
def __init__(self, N, emission=0., dephasing=0, pumping=0,
collective_emission=0, collective_pumping=0,
collective_dephasing=0):
self.N = N
self.emission = emission
self.dephasing = dephasing
self.pumping = pumping
self.collective_pumping = collective_pumping
self.collective_dephasing = collective_dephasing
self.collective_emission = collective_emission
self.M = {}
def isdicke(self, dicke_row, dicke_col):
"""
Check if an element in a matrix is a valid element in the Dicke space.
Dicke row: j value index. Dicke column: m value index.
The function returns True if the element exists in the Dicke space and
False otherwise.
Parameters
----------
dicke_row, dicke_col : int
Index of the element in Dicke space which needs to be checked
"""
rows = self.N + 1
cols = 0
if (self.N % 2) == 0:
cols = int(self.N/2 + 1)
else:
cols = int(self.N/2 + 1/2)
if (dicke_row > rows) or (dicke_row < 0):
return (False)
if (dicke_col > cols) or (dicke_col < 0):
return (False)
if (dicke_row < int(rows/2)) and (dicke_col > dicke_row):
return False
if (dicke_row >= int(rows/2)) and (rows - dicke_row <= dicke_col):
return False
else:
return True
def tau_valid(self, dicke_row, dicke_col):
"""
Find the Tau functions which are valid for this value of (dicke_row,
dicke_col) given the number of TLS. This calculates the valid tau
values and reurns a dictionary specifying the tau function name and
the value.
Parameters
----------
dicke_row, dicke_col : int
Index of the element in Dicke space which needs to be checked.
Returns
-------
taus: dict
A dictionary of key, val as {tau: value} consisting of the valid
taus for this row and column of the Dicke space element.
"""
tau_functions = [self.tau3, self.tau2, self.tau4,
self.tau5, self.tau1, self.tau6,
self.tau7, self.tau8, self.tau9]
N = self.N
if self.isdicke(dicke_row, dicke_col) is False:
return False
# The 3x3 sub matrix surrounding the Dicke space element to
# run the tau functions
indices = [(dicke_row + x, dicke_col + y) for x in range(-1, 2)
for y in range(-1, 2)]
taus = {}
for idx, tau in zip(indices, tau_functions):
if self.isdicke(idx[0], idx[1]):
j, m = self.calculate_j_m(idx[0], idx[1])
taus[tau.__name__] = tau(j, m)
return taus
def calculate_j_m(self, dicke_row, dicke_col):
"""
Get the value of j and m for the particular Dicke space element.
Parameters
----------
dicke_row, dicke_col: int
The row and column from the Dicke space matrix
Returns
-------
j, m: float
The j and m values.
"""
N = self.N
j = N/2 - dicke_col
m = N/2 - dicke_row
return(j, m)
def calculate_k(self, dicke_row, dicke_col):
"""
Get k value from the current row and column element in the Dicke space.
Parameters
----------
dicke_row, dicke_col: int
The row and column from the Dicke space matrix.
Returns
-------
k: int
The row index for the matrix M for given Dicke space
element.
"""
N = self.N
if dicke_row == 0:
k = dicke_col
else:
k = int(((dicke_col)/2) * (2 * (N + 1) - 2 * (dicke_col - 1)) +
(dicke_row - (dicke_col)))
return k
def coefficient_matrix(self):
"""
Generate the matrix M governing the dynamics for diagonal cases.
If the initial density matrix and the Hamiltonian is diagonal, the
evolution of the system is given by the simple ODE: dp/dt = Mp.
"""
N = self.N
nds = num_dicke_states(N)
rows = self.N + 1
cols = 0
sparse_M = lil_matrix((nds, nds), dtype=float)
if (self.N % 2) == 0:
cols = int(self.N/2 + 1)
else:
cols = int(self.N/2 + 1/2)
for (dicke_row, dicke_col) in np.ndindex(rows, cols):
if self.isdicke(dicke_row, dicke_col):
k = int(self.calculate_k(dicke_row, dicke_col))
row = {}
taus = self.tau_valid(dicke_row, dicke_col)
for tau in taus:
j, m = self.calculate_j_m(dicke_row, dicke_col)
current_col = tau_column(tau, k, j)
sparse_M[k, int(current_col)] = taus[tau]
return sparse_M.tocsr()
def solve(self, rho0, tlist, options=None):
"""
Solve the ODE for the evolution of diagonal states and Hamiltonians.
"""
if options is None:
options = Options()
output = Result()
output.solver = "pisolve"
output.times = tlist
output.states = []
output.states.append(Qobj(rho0))
rhs_generate = lambda y, tt, M: M.dot(y)
rho0_flat = np.diag(np.real(rho0.full()))
L = self.coefficient_matrix()
rho_t = odeint(rhs_generate, rho0_flat, tlist, args=(L,))
for r in rho_t[1:]:
diag = np.diag(r)
output.states.append(Qobj(diag))
return output
def tau1(self, j, m):
"""
Calculate coefficient matrix element relative to (j, m, m).
"""
yS = self.collective_emission
yL = self.emission
yD = self.dephasing
yP = self.pumping
yCP = self.collective_pumping
N = float(self.N)
spontaneous = yS * (1 + j - m) * (j + m)
losses = yL * (N/2 + m)
pump = yP * (N/2 - m)
collective_pump = yCP * (1 + j + m) * (j - m)
if j == 0:
dephase = yD * N/4
else:
dephase = yD * (N/4 - m**2 * ((1 + N/2)/(2 * j * (j+1))))
t1 = spontaneous + losses + pump + dephase + collective_pump
return -t1
def tau2(self, j, m):
"""
Calculate coefficient matrix element relative to (j, m+1, m+1).
"""
yS = self.collective_emission
yL = self.emission
N = float(self.N)
spontaneous = yS * (1 + j - m) * (j + m)
losses = yL * (((N/2 + 1) * (j - m + 1) * (j + m))/(2 * j * (j+1)))
t2 = spontaneous + losses
return t2
def tau3(self, j, m):
"""
Calculate coefficient matrix element relative to (j+1, m+1, m+1).
"""
yL = self.emission
N = float(self.N)
num = (j + m - 1) * (j + m) * (j + 1 + N/2)
den = 2 * j * (2 * j + 1)
t3 = yL * (num/den)
return t3
def tau4(self, j, m):
"""
Calculate coefficient matrix element relative to (j-1, m+1, m+1).
"""
yL = self.emission
N = float(self.N)
num = (j - m + 1) * (j - m + 2) * (N/2 - j)
den = 2 * (j + 1) * (2 * j + 1)
t4 = yL * (num/den)
return t4
def tau5(self, j, m):
"""
Calculate coefficient matrix element relative to (j+1, m, m).
"""
yD = self.dephasing
N = float(self.N)
num = (j - m) * (j + m) * (j + 1 + N/2)
den = 2 * j * (2 * j + 1)
t5 = yD * (num/den)
return t5
def tau6(self, j, m):
"""
Calculate coefficient matrix element relative to (j-1, m, m).
"""
yD = self.dephasing
N = float(self.N)
num = (j - m + 1) * (j + m + 1) * (N/2 - j)
den = 2 * (j + 1) * (2 * j + 1)
t6 = yD * (num/den)
return t6
def tau7(self, j, m):
"""
Calculate coefficient matrix element relative to (j+1, m-1, m-1).
"""
yP = self.pumping
N = float(self.N)
num = (j - m - 1) * (j - m) * (j + 1 + N/2)
den = 2 * j * (2 * j + 1)
t7 = yP * (float(num)/den)
return t7
def tau8(self, j, m):
"""
Calculate coefficient matrix element relative to (j, m-1, m-1).
"""
yP = self.pumping
yCP = self.collective_pumping
N = float(self.N)
num = (1 + N/2) * (j-m) * (j + m + 1)
den = 2 * j * (j+1)
pump = yP * (float(num)/den)
collective_pump = yCP * (j-m) * (j+m+1)
t8 = pump + collective_pump
return t8
def tau9(self, j, m):
"""
Calculate coefficient matrix element relative to (j-1, m-1, m-1).
"""
yP = self.pumping
N = float(self.N)
num = (j+m+1) * (j+m+2) * (N/2 - j)
den = 2 * (j+1) * (2*j + 1)
t9 = yP * (float(num)/den)
return t9
| 29.860947
| 83
| 0.567799
|
eeca48f8d405c4c6719d701b6fb9e636a27e2628
| 686
|
py
|
Python
|
Test software/serialComms.py
|
edsonmatsuda/Python-Serial-Monitor
|
471b9019538325d6dbfb52922a6f4596d87fd9ed
|
[
"MIT"
] | null | null | null |
Test software/serialComms.py
|
edsonmatsuda/Python-Serial-Monitor
|
471b9019538325d6dbfb52922a6f4596d87fd9ed
|
[
"MIT"
] | null | null | null |
Test software/serialComms.py
|
edsonmatsuda/Python-Serial-Monitor
|
471b9019538325d6dbfb52922a6f4596d87fd9ed
|
[
"MIT"
] | null | null | null |
import serial
serialPort = serial.Serial(port="COM7", baudrate=9600)
serialString = "" # Used to hold data coming over UART
while(1):
# Wait until there is data waiting in the serial buffer
if(serialPort.in_waiting > 0):
# Read data out of the buffer until a carriage return / new line is found
serialString = serialPort.readline()
# Print the contents of the serial data
print(serialString.decode('Ascii'))
# Tell the device connected over the serial port that we recevied the data!
# The b at the beginning is used to indicate bytes!
serialPort.write(b"Thank you for sending data \r\n")
| 32.666667
| 83
| 0.657434
|
5c22f365b01aa807b41b9371a4f9167bb39516bb
| 15,289
|
py
|
Python
|
chia/clvm/spend_sim.py
|
hashgreen/chia-blockchain
|
b1acb5597ba242649d1dc97de7fd605148e33816
|
[
"Apache-2.0"
] | null | null | null |
chia/clvm/spend_sim.py
|
hashgreen/chia-blockchain
|
b1acb5597ba242649d1dc97de7fd605148e33816
|
[
"Apache-2.0"
] | null | null | null |
chia/clvm/spend_sim.py
|
hashgreen/chia-blockchain
|
b1acb5597ba242649d1dc97de7fd605148e33816
|
[
"Apache-2.0"
] | null | null | null |
import aiosqlite
from dataclasses import dataclass
from typing import Optional, List, Dict, Tuple, Any
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.blockchain_format.coin import Coin
from chia.types.blockchain_format.program import Program, SerializedProgram
from chia.util.ints import uint64, uint32
from chia.util.hash import std_hash
from chia.util.errors import Err, ValidationError
from chia.util.db_wrapper import DBWrapper
from chia.util.streamable import Streamable, streamable
from chia.types.coin_record import CoinRecord
from chia.types.spend_bundle import SpendBundle
from chia.types.generator_types import BlockGenerator
from chia.types.mempool_inclusion_status import MempoolInclusionStatus
from chia.types.coin_spend import CoinSpend
from chia.full_node.bundle_tools import simple_solution_generator
from chia.full_node.mempool_manager import MempoolManager
from chia.full_node.coin_store import CoinStore
from chia.full_node.mempool_check_conditions import get_puzzle_and_solution_for_coin
from chia.consensus.constants import ConsensusConstants
from chia.consensus.default_constants import DEFAULT_CONSTANTS
from chia.consensus.coinbase import create_pool_coin, create_farmer_coin
from chia.consensus.block_rewards import calculate_pool_reward, calculate_base_farmer_reward
from chia.consensus.cost_calculator import NPCResult
"""
The purpose of this file is to provide a lightweight simulator for the testing of Chialisp smart contracts.
The Node object uses actual MempoolManager, Mempool and CoinStore objects, while substituting FullBlock and
BlockRecord objects for trimmed down versions.
There is also a provided NodeClient object which implements many of the methods from chia.rpc.full_node_rpc_client
and is designed so that you could test with it and then swap in a real rpc client that uses the same code you tested.
"""
@dataclass(frozen=True)
@streamable
class SimFullBlock(Streamable):
transactions_generator: Optional[BlockGenerator]
height: uint32 # Note that height is not on a regular FullBlock
@dataclass(frozen=True)
@streamable
class SimBlockRecord(Streamable):
reward_claims_incorporated: List[Coin]
height: uint32
prev_transaction_block_height: uint32
timestamp: uint64
is_transaction_block: bool
header_hash: bytes32
prev_transaction_block_hash: bytes32
@classmethod
def create(cls, rci: List[Coin], height: uint32, timestamp: uint64):
return cls(
rci,
height,
uint32(height - 1 if height > 0 else 0),
timestamp,
True,
std_hash(bytes(height)),
std_hash(std_hash(height)),
)
@dataclass(frozen=True)
@streamable
class SimStore(Streamable):
timestamp: uint64
block_height: uint32
block_records: List[SimBlockRecord]
blocks: List[SimFullBlock]
class SpendSim:
connection: aiosqlite.Connection
mempool_manager: MempoolManager
block_records: List[SimBlockRecord]
blocks: List[SimFullBlock]
timestamp: uint64
block_height: uint32
defaults: ConsensusConstants
@classmethod
async def create(cls, db_path=":memory:", defaults=DEFAULT_CONSTANTS):
self = cls()
self.connection = DBWrapper(await aiosqlite.connect(db_path))
coin_store = await CoinStore.create(self.connection)
self.mempool_manager = MempoolManager(coin_store, defaults)
self.defaults = defaults
# Load the next data if there is any
await self.connection.db.execute("CREATE TABLE IF NOT EXISTS block_data(data blob PRIMARY_KEY)")
cursor = await self.connection.db.execute("SELECT * from block_data")
row = await cursor.fetchone()
await cursor.close()
if row is not None:
store_data = SimStore.from_bytes(row[0])
self.timestamp = store_data.timestamp
self.block_height = store_data.block_height
self.block_records = store_data.block_records
self.blocks = store_data.blocks
else:
self.timestamp = 1
self.block_height = 0
self.block_records = []
self.blocks = []
return self
async def close(self):
c = await self.connection.db.execute("DELETE FROM block_data")
await c.close()
c = await self.connection.db.execute(
"INSERT INTO block_data VALUES(?)",
(bytes(SimStore(self.timestamp, self.block_height, self.block_records, self.blocks)),),
)
await c.close()
await self.connection.db.commit()
await self.connection.db.close()
async def new_peak(self):
await self.mempool_manager.new_peak(self.block_records[-1], [])
def new_coin_record(self, coin: Coin, coinbase=False) -> CoinRecord:
return CoinRecord(
coin,
uint32(self.block_height + 1),
uint32(0),
coinbase,
self.timestamp,
)
async def all_non_reward_coins(self) -> List[Coin]:
coins = set()
cursor = await self.mempool_manager.coin_store.coin_record_db.execute(
"SELECT * from coin_record WHERE coinbase=0 AND spent=0 ",
)
rows = await cursor.fetchall()
await cursor.close()
for row in rows:
coin = Coin(bytes32(bytes.fromhex(row[6])), bytes32(bytes.fromhex(row[5])), uint64.from_bytes(row[7]))
coins.add(coin)
return list(coins)
async def generate_transaction_generator(self, bundle: Optional[SpendBundle]) -> Optional[BlockGenerator]:
if bundle is None:
return None
return simple_solution_generator(bundle)
async def farm_block(self, puzzle_hash: bytes32 = bytes32(b"0" * 32)):
# Fees get calculated
fees = uint64(0)
if self.mempool_manager.mempool.spends:
for _, item in self.mempool_manager.mempool.spends.items():
fees = uint64(fees + item.spend_bundle.fees())
# Rewards get created
next_block_height: uint32 = uint32(self.block_height + 1) if len(self.block_records) > 0 else self.block_height
pool_coin: Coin = create_pool_coin(
next_block_height,
puzzle_hash,
calculate_pool_reward(next_block_height),
self.defaults.GENESIS_CHALLENGE,
)
farmer_coin: Coin = create_farmer_coin(
next_block_height,
puzzle_hash,
uint64(calculate_base_farmer_reward(next_block_height) + fees),
self.defaults.GENESIS_CHALLENGE,
)
await self.mempool_manager.coin_store._add_coin_records(
[self.new_coin_record(pool_coin, True), self.new_coin_record(farmer_coin, True)]
)
# Coin store gets updated
generator_bundle: Optional[SpendBundle] = None
return_additions: List[Coin] = []
return_removals: List[Coin] = []
if (len(self.block_records) > 0) and (self.mempool_manager.mempool.spends):
peak = self.mempool_manager.peak
if peak is not None:
result = await self.mempool_manager.create_bundle_from_mempool(peak.header_hash)
if result is not None:
bundle, additions, removals = result
generator_bundle = bundle
return_additions = additions
return_removals = removals
await self.mempool_manager.coin_store._add_coin_records(
[self.new_coin_record(addition) for addition in additions]
)
await self.mempool_manager.coin_store._set_spent(
[r.name() for r in removals], uint32(self.block_height + 1)
)
# SimBlockRecord is created
generator: Optional[BlockGenerator] = await self.generate_transaction_generator(generator_bundle)
self.block_records.append(
SimBlockRecord.create(
[pool_coin, farmer_coin],
next_block_height,
self.timestamp,
)
)
self.blocks.append(SimFullBlock(generator, next_block_height))
# block_height is incremented
self.block_height = next_block_height
# mempool is reset
await self.new_peak()
# return some debugging data
return return_additions, return_removals
def get_height(self) -> uint32:
return self.block_height
def pass_time(self, time: uint64):
self.timestamp = uint64(self.timestamp + time)
def pass_blocks(self, blocks: uint32):
self.block_height = uint32(self.block_height + blocks)
async def rewind(self, block_height: uint32):
new_br_list = list(filter(lambda br: br.height <= block_height, self.block_records))
new_block_list = list(filter(lambda block: block.height <= block_height, self.blocks))
self.block_records = new_br_list
self.blocks = new_block_list
await self.mempool_manager.coin_store.rollback_to_block(block_height)
self.mempool_manager.mempool.spends = {}
self.block_height = block_height
if new_br_list:
self.timestamp = new_br_list[-1].timestamp
else:
self.timestamp = uint64(1)
class SimClient:
def __init__(self, service):
self.service = service
async def push_tx(self, spend_bundle: SpendBundle) -> Tuple[MempoolInclusionStatus, Optional[Err]]:
try:
cost_result: NPCResult = await self.service.mempool_manager.pre_validate_spendbundle(
spend_bundle, None, spend_bundle.name()
)
except ValidationError as e:
return MempoolInclusionStatus.FAILED, e.code
cost, status, error = await self.service.mempool_manager.add_spendbundle(
spend_bundle, cost_result, spend_bundle.name()
)
return status, error
async def get_coin_record_by_name(self, name: bytes32) -> CoinRecord:
return await self.service.mempool_manager.coin_store.get_coin_record(name)
async def get_coin_records_by_parent_ids(
self,
parent_ids: List[bytes32],
start_height: Optional[int] = None,
end_height: Optional[int] = None,
include_spent_coins: bool = False,
) -> List[CoinRecord]:
kwargs: Dict[str, Any] = {"include_spent_coins": include_spent_coins, "parent_ids": parent_ids}
if start_height is not None:
kwargs["start_height"] = start_height
if end_height is not None:
kwargs["end_height"] = end_height
return await self.service.mempool_manager.coin_store.get_coin_records_by_parent_ids(**kwargs)
async def get_coin_records_by_puzzle_hash(
self,
puzzle_hash: bytes32,
include_spent_coins: bool = True,
start_height: Optional[int] = None,
end_height: Optional[int] = None,
) -> List[CoinRecord]:
kwargs: Dict[str, Any] = {"include_spent_coins": include_spent_coins, "puzzle_hash": puzzle_hash}
if start_height is not None:
kwargs["start_height"] = start_height
if end_height is not None:
kwargs["end_height"] = end_height
return await self.service.mempool_manager.coin_store.get_coin_records_by_puzzle_hash(**kwargs)
async def get_coin_records_by_puzzle_hashes(
self,
puzzle_hashes: List[bytes32],
include_spent_coins: bool = True,
start_height: Optional[int] = None,
end_height: Optional[int] = None,
) -> List[CoinRecord]:
kwargs: Dict[str, Any] = {"include_spent_coins": include_spent_coins, "puzzle_hashes": puzzle_hashes}
if start_height is not None:
kwargs["start_height"] = start_height
if end_height is not None:
kwargs["end_height"] = end_height
return await self.service.mempool_manager.coin_store.get_coin_records_by_puzzle_hashes(**kwargs)
async def get_block_record_by_height(self, height: uint32) -> SimBlockRecord:
return list(filter(lambda block: block.height == height, self.service.block_records))[0]
async def get_block_record(self, header_hash: bytes32) -> SimBlockRecord:
return list(filter(lambda block: block.header_hash == header_hash, self.service.block_records))[0]
async def get_block_records(self, start: uint32, end: uint32) -> List[SimBlockRecord]:
return list(filter(lambda block: (block.height >= start) and (block.height < end), self.service.block_records))
async def get_block(self, header_hash: bytes32) -> SimFullBlock:
selected_block: SimBlockRecord = list(
filter(lambda br: br.header_hash == header_hash, self.service.block_records)
)[0]
block_height: uint32 = selected_block.height
block: SimFullBlock = list(filter(lambda block: block.height == block_height, self.service.blocks))[0]
return block
async def get_all_block(self, start: uint32, end: uint32) -> List[SimFullBlock]:
return list(filter(lambda block: (block.height >= start) and (block.height < end), self.service.blocks))
async def get_additions_and_removals(self, header_hash: bytes32) -> Tuple[List[CoinRecord], List[CoinRecord]]:
selected_block: SimBlockRecord = list(
filter(lambda br: br.header_hash == header_hash, self.service.block_records)
)[0]
block_height: uint32 = selected_block.height
additions: List[CoinRecord] = await self.service.mempool_manager.coin_store.get_coins_added_at_height(
block_height
) # noqa
removals: List[CoinRecord] = await self.service.mempool_manager.coin_store.get_coins_removed_at_height(
block_height
) # noqa
return additions, removals
async def get_puzzle_and_solution(self, coin_id: bytes32, height: uint32) -> Optional[CoinSpend]:
generator = list(filter(lambda block: block.height == height, self.service.blocks))[0].transactions_generator
coin_record = await self.service.mempool_manager.coin_store.get_coin_record(coin_id)
error, puzzle, solution = get_puzzle_and_solution_for_coin(
generator,
coin_id,
self.service.defaults.MAX_BLOCK_COST_CLVM,
)
if error:
return None
else:
puzzle_ser: SerializedProgram = SerializedProgram.from_program(Program.to(puzzle))
solution_ser: SerializedProgram = SerializedProgram.from_program(Program.to(solution))
return CoinSpend(coin_record.coin, puzzle_ser, solution_ser)
async def get_all_mempool_tx_ids(self) -> List[bytes32]:
return list(self.service.mempool_manager.mempool.spends.keys())
async def get_all_mempool_items(self) -> Dict[bytes32, Dict]:
spends = {}
for tx_id, item in self.service.mempool_manager.mempool.spends.items():
spends[tx_id] = item
return spends
async def get_mempool_item_by_tx_id(self, tx_id: bytes32) -> Optional[Dict]:
item = self.service.mempool_manager.get_mempool_item(tx_id)
if item is None:
return None
else:
return item.__dict__
| 41.210243
| 119
| 0.680489
|
a4b9e9b1672cd0412069a1d01e0b31e9c88deabc
| 3,990
|
py
|
Python
|
oo/carro.py
|
WilliamCampolina/pythonbirds
|
5eec6a6159f44fdda5d210fe0cee8894b75cb04b
|
[
"MIT"
] | null | null | null |
oo/carro.py
|
WilliamCampolina/pythonbirds
|
5eec6a6159f44fdda5d210fe0cee8894b75cb04b
|
[
"MIT"
] | null | null | null |
oo/carro.py
|
WilliamCampolina/pythonbirds
|
5eec6a6159f44fdda5d210fe0cee8894b75cb04b
|
[
"MIT"
] | null | null | null |
"""
Você deve criar uma classe carro que vai possuir
dois atributos compostos por outras duas classes:
1) Motor
2) Direção
O Motor terá a responsabilidade de controlar a velocidade.
Ele oferece os seguintes atributos:
1) Atributo de dado velocidade
2) Método acelerar, que deverá incremetar a velocidade de uma unidade
3) Método frear que deverá decrementar a velocidade em duas unidades
A Direção terá a responsabilidade de controlar a direção. Ela oferece
os seguintes atributos:
Valor de diração com valores possíveis: Norte, Sul, Leste, Oeste.
1) Método girar_a_direita
2) Método girar_a_esquerda
N
O L
S
Exemplo
>>> # Testando motor
>>> motor = Motor()
>>> motor.velocidade
0
>>> motor.acelerar()
>>> motor.velocidade
1
>>> motor.acelerar()
>>> motor.velocidade
2
>>> motor.acelerar()
>>> motor.velocidade
3
>>> motor.frear()
>>> motor.velocidade
1
>>> motor.frear()
>>> motor.velocidade
0
>>> # Testando Direcao
>>> direcao = Direcao()
>>> direcao.valor
'Norte'
>>> direcao.girar_a_direita()
>>> direcao.valor
'Leste'
>>> direcao.girar_a_direita()
>>> direcao.valor
'Sul'
>>> direcao.girar_a_direita()
>>> direcao.valor
'Oeste'
>>> direcao.girar_a_direita()
>>> direcao.valor
'Norte'
>>> direcao.girar_a_esquerda()
>>> direcao.valor
'Oeste'
>>> direcao.girar_a_esquerda()
>>> direcao.valor
'Sul'
>>> direcao.girar_a_esquerda()
>>> direcao.valor
'Leste'
>>> direcao.girar_a_esquerda()
>>> direcao.valor
'Norte'
>>> carro = Carro(direcao, motor)
>>> carro.calcular_velocidade()
0
>>> carro.acelerar()
>>> carro.calcular_velocidade()
1
>>> carro.acelerar()
>>> carro.calcular_velocidade()
2
>>> carro.frear()
>>> carro.calcular_velocidade()
0
>>> carro.calcular_direcao()
'Norte'
>>> carro.girar_a_direita()
>>> carro.calcular_direcao()
'Leste'
>>> carro.girar_a_esquerda()
>>> carro.calcular_direcao()
'Norte'
>>> carro.girar_a_esquerda()
>>> carro.calcular_direcao()
'Oeste'
"""
class Motor:
def __init__(self, velocidade=0):
self.velocidade = velocidade
def acelerar(self):
self.velocidade += 1
def frear(self):
self.velocidade -= 2
self.velocidade = max(0, self.velocidade)
# if self.velocidade == 1:
# self.velocidade = 0
# else:
# self.velocidade -= 2
NORTE = 'Norte'
SUL = 'Sul'
LESTE = 'Leste'
OESTE = 'Oeste'
class Direcao:
rotacao_a_direita_dct = {
NORTE: LESTE, LESTE: SUL, SUL: OESTE, OESTE: NORTE
}
rotacao_a_esquerda_dct = {
NORTE: OESTE, LESTE: NORTE, SUL: LESTE, OESTE: SUL
}
def __init__(self, valor=NORTE):
self.valor = valor
def girar_a_direita(self):
self.valor = self.rotacao_a_direita_dct[self.valor]
def girar_a_esquerda(self):
self.valor = self.rotacao_a_esquerda_dct[self.valor]
class Carro:
def __init__(self, direcao, motor):
self.direcao = direcao
self.motor = motor
def calcular_velocidade(self):
return self.motor.velocidade
def acelerar(self):
return self.motor.acelerar()
def frear(self):
return self.motor.frear()
def calcular_direcao(self):
return self.direcao.valor
def girar_a_direita(self):
return self.direcao.girar_a_direita()
def girar_a_esquerda(self):
return self.direcao.girar_a_esquerda()
# class Direcao:
#
# coordenadas = [NORTE, LESTE, SUL, OESTE]
# count = 0
#
# def __init__(self, valor=NORTE):
# self.valor = valor
#
# def girar_a_direita(self):
# self.count += 1
# self.count = 0 if self.count > 3 else self.count
# self.valor = self.coordenadas[self.count]
#
# def girar_a_esquerda(self):
# pass
| 22.41573
| 69
| 0.614286
|
a574b541d8082b0ca1cf282dd84950f76a23bce1
| 27
|
py
|
Python
|
src/euler_python_package/euler_python/medium/p392.py
|
wilsonify/euler
|
5214b776175e6d76a7c6d8915d0e062d189d9b79
|
[
"MIT"
] | null | null | null |
src/euler_python_package/euler_python/medium/p392.py
|
wilsonify/euler
|
5214b776175e6d76a7c6d8915d0e062d189d9b79
|
[
"MIT"
] | null | null | null |
src/euler_python_package/euler_python/medium/p392.py
|
wilsonify/euler
|
5214b776175e6d76a7c6d8915d0e062d189d9b79
|
[
"MIT"
] | null | null | null |
def problem392():
pass
| 9
| 17
| 0.62963
|
4c62b268927de9bab0c27fdec488d09d69b71354
| 2,349
|
py
|
Python
|
tests/sources/tools/simulation/human_model_generation/test_human_model_generation.py
|
makistsantekidis/opendr
|
07dee3b59d3487b9c5a93d6946317178a02c9890
|
[
"Apache-2.0"
] | 3
|
2021-06-24T01:54:25.000Z
|
2021-12-12T16:21:24.000Z
|
tests/sources/tools/simulation/human_model_generation/test_human_model_generation.py
|
makistsantekidis/opendr
|
07dee3b59d3487b9c5a93d6946317178a02c9890
|
[
"Apache-2.0"
] | 79
|
2021-06-23T10:40:10.000Z
|
2021-12-16T07:59:42.000Z
|
tests/sources/tools/simulation/human_model_generation/test_human_model_generation.py
|
makistsantekidis/opendr
|
07dee3b59d3487b9c5a93d6946317178a02c9890
|
[
"Apache-2.0"
] | 5
|
2021-07-04T07:38:50.000Z
|
2021-12-12T16:18:47.000Z
|
# Copyright 2020-2021 OpenDR European Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from opendr.engine.data import Image
import shutil
import os
from opendr.simulation.human_model_generation import PIFuGeneratorLearner
def rmdir(_dir):
try:
shutil.rmtree(_dir)
except OSError as e:
print("Error: %s - %s." % (e.filename, e.strerror))
class TestPIFuGeneratorLearner(unittest.TestCase):
@classmethod
def setUpClass(cls):
print("\n\n**********************************\nTEST PIFu Generator Learner\n"
"**********************************")
cls.temp_dir = os.path.join(os.environ['OPENDR_HOME'], "tests", "sources", "tools", "simulation",
"human_model_generation", "temp")
cls.learner = PIFuGeneratorLearner(device='cuda', checkpoint_dir=cls.temp_dir)
@classmethod
def tearDownClass(cls):
# Clean up downloaded files
rmdir(os.path.join(cls.temp_dir))
def test_infer(self):
img_rgb = Image.open(os.path.join(os.environ['OPENDR_HOME'], "projects", "simulation", "human_model_generation",
"demos", "imgs_input", "rgb", "result_0004.jpg"))
img_msk = Image.open(os.path.join(os.environ['OPENDR_HOME'], "projects", "simulation", "human_model_generation",
"demos", "imgs_input", "msk", "result_0004.jpg"))
model_3D = self.learner.infer(imgs_rgb=[img_rgb], imgs_msk=[img_msk], extract_pose=False)
# Default pretrained mobilenet model detects 18 keypoints on img with id 785
self.assertGreater(model_3D.get_vertices().shape[0], 52260,
msg="The generated 3D must have more than 52260 vertices.")
if __name__ == "__main__":
unittest.main()
| 39.15
| 120
| 0.645807
|
8eb06f4e932c51a3b45380ef45943a0f8d134918
| 2,129
|
py
|
Python
|
myconnectome/qa/run_qap_func.py
|
poldrack/myconnectome
|
201f414b3165894d6fe0be0677c8a58f6d161948
|
[
"MIT"
] | 28
|
2015-04-02T16:43:14.000Z
|
2020-06-17T20:04:26.000Z
|
myconnectome/qa/run_qap_func.py
|
poldrack/myconnectome
|
201f414b3165894d6fe0be0677c8a58f6d161948
|
[
"MIT"
] | 11
|
2015-05-19T02:57:22.000Z
|
2017-03-17T17:36:16.000Z
|
myconnectome/qa/run_qap_func.py
|
poldrack/myconnectome
|
201f414b3165894d6fe0be0677c8a58f6d161948
|
[
"MIT"
] | 10
|
2015-05-21T17:01:26.000Z
|
2020-11-11T04:28:08.000Z
|
"""
run quality assurance measures on functional data
"""
import sys,glob
sys.path.append('/corral-repl/utexas/poldracklab/software_lonestar/quality-assessment-protocol')
import os
import numpy
from run_shell_cmd import run_shell_cmd
from compute_fd import compute_fd
from qap import load_func,load_image, load_mask, summary_mask, cnr,efc,fber,fwhm,artifacts,ghost_all,calc_mean_func,calc_dvars,mean_outlier_timepoints,mean_quality_timepoints
basedir='/corral-repl/utexas/poldracklab/data/selftracking/shared_dataset'
funcfiles=glob.glob(os.path.join(basedir,'sub*/BOLD/resting_run001/bold.nii.gz'))
funcdata={'subcode':[],'func_efc':[],'func_fber':[],'func_fwhm':[],'func_gsr':[],'func_dvars':[],'func_outlier':[],'func_quality':[],'func_mean_fd':[],'func_num_fd':[],'func_perc_fd':[]}
#for funcfile in funcfiles:
func_file=funcfiles[0]
if 1:
subcode=func_file.split('/')[7]
print 'processing',subcode
funcdata['subcode'].append(subcode)
mask_file=func_file.replace('.nii.gz','_brain_mask.nii.gz')
if not os.path.exists(mask_file):
cmd='bet %s %s -m -F'%(func_file,func_file.replace('.nii.gz','_brain'))
print cmd
run_shell_cmd(cmd)
func_data = load_func(func_file,mask_file)
mean_func_data = calc_mean_func(func_file)
func_mask = load_mask(mask_file)
func_efc = efc(func_data)
#func_fber = fber(func_data, func_mask)
#func_fwhm = fwhm(func_file, mask_file, out_vox=False)
print 'running ghost_all'
_,func_gsr,_=ghost_all(mean_func_data,func_mask)
print 'running calc_dvars'
func_dvars = calc_dvars(func_data, output_all=False)
print 'running mean_outlier_timepoints'
func_outlier = mean_outlier_timepoints(func_file, mask_file, out_fraction=True)
print 'running compute_fd'
motpars=numpy.loadtxt(func_file.replace('.nii.gz','_mcf.par'))
fd=compute_fd(motpars)
sdf
funcdata['mean_gm'].append(mean_gm)
funcdata['mean_wm'].append(mean_wm)
funcdata['std_bg'].append(std_bg)
funcdata['anat_efc'].append(anat_efc)
funcdata['anat_fber'].append(anat_fber)
funcdata['anat_fwhm'].append(anat_fwhm)
funcdata['anat_qi1'].append(anat_qi1)
| 35.483333
| 186
| 0.754345
|
31e96153e26258578217512b0c79d21d6a304678
| 14,418
|
py
|
Python
|
examples/laplace3d.py
|
xywei/volumential
|
07c6ca8c623acf24fb8deddf93baa1035234db58
|
[
"MIT"
] | 6
|
2021-05-21T23:57:15.000Z
|
2022-03-07T22:02:50.000Z
|
examples/laplace3d.py
|
inducer/volumential
|
290a5943d3f47958dcab6736bc2b758525471570
|
[
"MIT"
] | 2
|
2021-03-26T15:41:27.000Z
|
2021-03-26T15:42:21.000Z
|
examples/laplace3d.py
|
inducer/volumential
|
290a5943d3f47958dcab6736bc2b758525471570
|
[
"MIT"
] | 1
|
2021-05-21T21:23:39.000Z
|
2021-05-21T21:23:39.000Z
|
""" This example evaluates the volume potential over
[-1,1]^3 with the Laplace kernel.
"""
from __future__ import absolute_import, division, print_function
__copyright__ = "Copyright (C) 2017 - 2018 Xiaoyu Wei"
__license__ = """
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import logging
import numpy as np
import pyopencl as cl
import pymbolic as pmbl
import pymbolic.functions
from volumential.tools import ScalarFieldExpressionEvaluation as Eval
from functools import partial
verbose = True
logger = logging.getLogger(__name__)
if verbose:
logging.basicConfig(level=logging.INFO)
else:
logging.basicConfig(level=logging.CRITICAL)
def main():
print("*************************")
print("* Setting up...")
print("*************************")
dim = 3
# download precomputation results for the 3D Laplace kernel
download_table = True
table_filename = "nft_laplace3d.hdf5"
logger.info("Using table cache: " + table_filename)
q_order = 7 # quadrature order
n_levels = 5
use_multilevel_table = False
adaptive_mesh = False
n_refinement_loops = 100
refined_n_cells = 5e5
rratio_top = 0.2
rratio_bot = 0.5
dtype = np.float64
m_order = 10 # multipole order
force_direct_evaluation = False
logger.info("Multipole order = " + str(m_order))
logger.info("Quad order = " + str(q_order))
logger.info("N_levels = " + str(n_levels))
# a solution that is nearly zero at the boundary
# exp(-40) = 4.25e-18
alpha = 80
x = pmbl.var("x")
y = pmbl.var("y")
z = pmbl.var("z")
expp = pmbl.var("exp")
norm2 = x ** 2 + y ** 2 + z ** 2
source_expr = -(4 * alpha ** 2 * norm2 - 6 * alpha) * expp(-alpha * norm2)
solu_expr = expp(-alpha * norm2)
logger.info("Source expr: " + str(source_expr))
logger.info("Solu expr: " + str(solu_expr))
# bounding box
a = -0.5
b = 0.5
root_table_source_extent = 2
ctx = cl.create_some_context()
queue = cl.CommandQueue(ctx)
# logger.info("Summary of params: " + get_param_summary())
source_eval = Eval(dim, source_expr, [x, y, z])
# {{{ generate quad points
import volumential.meshgen as mg
# Show meshgen info
mg.greet()
mesh = mg.MeshGen3D(q_order, n_levels, a, b, queue=queue)
if not adaptive_mesh:
mesh.print_info()
q_points = mesh.get_q_points()
q_weights = mesh.get_q_weights()
else:
iloop = -1
while mesh.n_active_cells() < refined_n_cells:
iloop += 1
cell_centers = mesh.get_cell_centers()
cell_measures = mesh.get_cell_measures()
density_vals = source_eval(
queue,
np.array([[center[d] for center in cell_centers]
for d in range(dim)]))
crtr = np.abs(cell_measures * density_vals)
mesh.update_mesh(crtr, rratio_top, rratio_bot)
if iloop > n_refinement_loops:
print("Max number of refinement loops reached.")
break
mesh.print_info()
q_points = mesh.get_q_points()
q_weights = mesh.get_q_weights()
if 1:
try:
mesh.generate_gmsh("box_grid.msh")
except Exception as e:
print(e)
pass
legacy_msh_file = True
if legacy_msh_file:
import os
os.system("gmsh box_grid.msh convert_grid -")
assert len(q_points) == len(q_weights)
assert q_points.shape[1] == dim
q_points = np.ascontiguousarray(np.transpose(q_points))
from pytools.obj_array import make_obj_array
q_points = make_obj_array(
[cl.array.to_device(queue, q_points[i]) for i in range(dim)])
q_weights = cl.array.to_device(queue, q_weights)
# }}}
# {{{ discretize the source field
logger.info("discretizing source field")
source_vals = cl.array.to_device(
queue, source_eval(queue, np.array([coords.get() for coords in q_points]))
)
# particle_weigt = source_val * q_weight
# }}} End discretize the source field
# {{{ build tree and traversals
from boxtree.tools import AXIS_NAMES
axis_names = AXIS_NAMES[:dim]
from pytools import single_valued
coord_dtype = single_valued(coord.dtype for coord in q_points)
from boxtree.bounding_box import make_bounding_box_dtype
bbox_type, _ = make_bounding_box_dtype(ctx.devices[0], dim, coord_dtype)
bbox = np.empty(1, bbox_type)
for ax in axis_names:
bbox["min_" + ax] = a
bbox["max_" + ax] = b
# tune max_particles_in_box to reconstruct the mesh
# TODO: use points from FieldPlotter are used as target points for better
# visuals
print("building tree")
from boxtree import TreeBuilder
tb = TreeBuilder(ctx)
tree, _ = tb(
queue,
particles=q_points,
targets=q_points,
bbox=bbox,
max_particles_in_box=q_order ** 3 * 8 - 1,
kind="adaptive-level-restricted",
)
from boxtree.traversal import FMMTraversalBuilder
tg = FMMTraversalBuilder(ctx)
trav, _ = tg(queue, tree)
# }}} End build tree and traversals
# {{{ build near field potential table
from volumential.table_manager import NearFieldInteractionTableManager
import os
if download_table and (not os.path.isfile(table_filename)):
import json
with open("table_urls.json", 'r') as fp:
urls = json.load(fp)
print("Downloading table from %s" % urls['Laplace3D'])
import subprocess
subprocess.call(["wget", "-q", urls['Laplace3D'], table_filename])
tm = NearFieldInteractionTableManager(
table_filename, root_extent=root_table_source_extent,
queue=queue)
if use_multilevel_table:
logger.info("Using multilevel tables")
assert (
abs(
int((b - a)
/ root_table_source_extent) * root_table_source_extent
- (b - a))
< 1e-15)
nftable = []
for lev in range(0, tree.nlevels + 1):
print("Getting table at level", lev)
tb, _ = tm.get_table(
dim,
"Laplace",
q_order,
source_box_level=lev,
compute_method="DrosteSum",
queue=queue,
n_brick_quad_points=120,
adaptive_level=False,
use_symmetry=True,
alpha=0,
n_levels=1,
)
nftable.append(tb)
print("Using table list of length", len(nftable))
else:
logger.info("Using single level table")
force_recompute = False
# 15 levels are sufficient (the inner most brick is 1e-15**3 in volume)
nftable, _ = tm.get_table(
dim,
"Laplace",
q_order,
force_recompute=force_recompute,
compute_method="DrosteSum",
queue=queue,
n_brick_quad_points=120,
adaptive_level=False,
use_symmetry=True,
alpha=0,
n_levels=1,
)
# }}} End build near field potential table
# {{{ sumpy expansion for laplace kernel
from sumpy.expansion import DefaultExpansionFactory
from sumpy.kernel import LaplaceKernel
knl = LaplaceKernel(dim)
out_kernels = [knl]
expn_factory = DefaultExpansionFactory()
local_expn_class = expn_factory.get_local_expansion_class(knl)
mpole_expn_class = expn_factory.get_multipole_expansion_class(knl)
exclude_self = True
from volumential.expansion_wrangler_fpnd import (
FPNDExpansionWrangler,
FPNDExpansionWranglerCodeContainer)
wcc = FPNDExpansionWranglerCodeContainer(
ctx,
partial(mpole_expn_class, knl),
partial(local_expn_class, knl),
out_kernels,
exclude_self=exclude_self,
)
if exclude_self:
target_to_source = np.arange(tree.ntargets, dtype=np.int32)
self_extra_kwargs = {"target_to_source": target_to_source}
else:
self_extra_kwargs = {}
wrangler = FPNDExpansionWrangler(
code_container=wcc,
queue=queue,
tree=tree,
near_field_table=nftable,
dtype=dtype,
fmm_level_to_order=lambda kernel, kernel_args, tree, lev: m_order,
quad_order=q_order,
self_extra_kwargs=self_extra_kwargs,
)
# }}} End sumpy expansion for laplace kernel
print("*************************")
print("* Performing FMM ...")
print("*************************")
# {{{ conduct fmm computation
from volumential.volume_fmm import drive_volume_fmm
import time
queue.finish()
t0 = time.time()
pot, = drive_volume_fmm(
trav,
wrangler,
source_vals * q_weights,
source_vals,
direct_evaluation=force_direct_evaluation,
list1_only=False)
t1 = time.time()
print("Finished in %.2f seconds." % (t1 - t0))
print("(%e points per second)" % (
len(q_weights) / (t1 - t0)
))
# }}} End conduct fmm computation
print("*************************")
print("* Postprocessing ...")
print("*************************")
# {{{ postprocess and plot
# print(pot)
solu_eval = Eval(dim, solu_expr, [x, y, z])
# x = q_points[0].get()
# y = q_points[1].get()
# z = q_points[2].get()
test_x = np.array([0.0])
test_y = np.array([0.0])
test_z = np.array([0.0])
test_nodes = make_obj_array(
# get() first for CL compatibility issues
[
cl.array.to_device(queue, test_x),
cl.array.to_device(queue, test_y),
cl.array.to_device(queue, test_z),
]
)
from volumential.volume_fmm import interpolate_volume_potential
ze = solu_eval(queue, np.array([test_x, test_y, test_z]))
zs = interpolate_volume_potential(test_nodes, trav, wrangler, pot).get()
print_error = True
if print_error:
err = np.max(np.abs(ze - zs))
print("Error =", err)
# Boxtree
if 0:
import matplotlib.pyplot as plt
if dim == 2:
plt.plot(q_points[0].get(), q_points[1].get(), ".")
from boxtree.visualization import TreePlotter
plotter = TreePlotter(tree.get(queue=queue))
plotter.draw_tree(fill=False, edgecolor="black")
# plotter.draw_box_numbers()
plotter.set_bounding_box()
plt.gca().set_aspect("equal")
plt.draw()
plt.show()
# plt.savefig("tree.png")
# Direct p2p
if 0:
print("Performing P2P")
pot_direct, = drive_volume_fmm(
trav, wrangler, source_vals * q_weights,
source_vals, direct_evaluation=True)
zds = pot_direct.get()
zs = pot.get()
print("P2P-FMM diff =", np.max(np.abs(zs - zds)))
print("P2P Error =", np.max(np.abs(ze - zds)))
# Write vtk
if 0:
from meshmode.mesh.io import read_gmsh
modemesh = read_gmsh("box_grid.msh", force_ambient_dim=None)
from meshmode.discretization.poly_element import (
LegendreGaussLobattoTensorProductGroupFactory,
)
from meshmode.array_context import PyOpenCLArrayContext
from meshmode.discretization import Discretization
actx = PyOpenCLArrayContext(queue)
box_discr = Discretization(
actx, modemesh,
LegendreGaussLobattoTensorProductGroupFactory(q_order))
box_nodes_x = box_discr.nodes()[0].with_queue(queue).get()
box_nodes_y = box_discr.nodes()[1].with_queue(queue).get()
box_nodes_z = box_discr.nodes()[2].with_queue(queue).get()
box_nodes = make_obj_array(
# get() first for CL compatibility issues
[
cl.array.to_device(queue, box_nodes_x),
cl.array.to_device(queue, box_nodes_y),
cl.array.to_device(queue, box_nodes_z),
]
)
visual_order = 1
from meshmode.discretization.visualization import make_visualizer
vis = make_visualizer(queue, box_discr, visual_order)
from volumential.volume_fmm import interpolate_volume_potential
volume_potential = interpolate_volume_potential(
box_nodes, trav, wrangler, pot)
# qx = q_points[0].get()
# qy = q_points[1].get()
# qz = q_points[2].get()
exact_solution = cl.array.to_device(
queue, solu_eval(queue,
np.array([box_nodes_x, box_nodes_y, box_nodes_z])))
# clean up the mess
def clean_file(filename):
import os
try:
os.remove(filename)
except OSError:
pass
vtu_filename = "laplace3d.vtu"
clean_file(vtu_filename)
vis.write_vtk_file(
vtu_filename,
[
("VolPot", volume_potential),
# ("SrcDensity", source_density),
("ExactSol", exact_solution),
("Error", volume_potential - exact_solution),
],
)
print("Written file " + vtu_filename)
# }}} End postprocess and plot
if __name__ == '__main__':
main()
# vim: filetype=python.pyopencl:foldmethod=marker
| 28.951807
| 82
| 0.613192
|
f5e35bbd6b167d87ac817c84f6c0be5dfcdf7f93
| 803
|
py
|
Python
|
python/2020/october/oct-21/main.py
|
lineville/Daily-Coding-Problem
|
9088616669c5f183457c9238128f0d47b85097d9
|
[
"MIT"
] | null | null | null |
python/2020/october/oct-21/main.py
|
lineville/Daily-Coding-Problem
|
9088616669c5f183457c9238128f0d47b85097d9
|
[
"MIT"
] | 4
|
2021-08-09T00:30:32.000Z
|
2021-10-04T21:38:12.000Z
|
python/2020/october/oct-21/main.py
|
lineville/Daily-Coding-Problem
|
9088616669c5f183457c9238128f0d47b85097d9
|
[
"MIT"
] | null | null | null |
# * Daily Coding Problem October 21 2020
# * [Easy] -- Amazon
# * Write a function that takes a natural number as input and
# * returns the number of digits the input has.
# Constraint: don't use any loops.
# * This works but not clear if python is using loops under the hood to convert to string
def numberLength(n: int) -> int:
return len(str(n))
def main():
n = 12345
result = numberLength(n)
print(f"{n} has {result} digits")
# > python3 ./main.py
main()
# * ____________________________________TESTS____________________________________
# * To run tests:
# > pytest-3 ./main.py
def testNumberLength1():
assert numberLength(1) == 1
def testNumberLength2():
assert numberLength(12) == 2
def testNumberLength15():
assert numberLength(123456789876543) == 15
| 19.585366
| 89
| 0.697385
|
ad221f33daef8d05255ab18c997b27a52880895b
| 5,108
|
py
|
Python
|
RecoFramework/loginController.py
|
kuangzijian/RecoSys
|
765806ffbf6bed3bd6c599cc84217872dfa560ae
|
[
"MIT"
] | null | null | null |
RecoFramework/loginController.py
|
kuangzijian/RecoSys
|
765806ffbf6bed3bd6c599cc84217872dfa560ae
|
[
"MIT"
] | null | null | null |
RecoFramework/loginController.py
|
kuangzijian/RecoSys
|
765806ffbf6bed3bd6c599cc84217872dfa560ae
|
[
"MIT"
] | 1
|
2021-04-14T02:13:16.000Z
|
2021-04-14T02:13:16.000Z
|
from django.http import HttpResponse
from django.shortcuts import render,redirect
from django.views.decorators.csrf import csrf_exempt
#from django.db import models
import numpy as np
from lightfm import LightFM
from lightfm.data import Dataset
import sqlite3
from lightfm.evaluation import precision_at_k
from lightfm.evaluation import auc_score
from lightfm.cross_validation import random_train_test_split
from RecoFramework.models import UserInfo
import imdb
import json
def dict_factory(cursor, row):
d = {}
d[row[0]] = row[1:]
return d
def fetch_data():
# Create a SQL connection to our SQLite database
con = sqlite3.connect("db.sqlite3")
cur = con.cursor()
# The result of a "cursor.execute" can be iterated over by row
data = []
users = []
movies = []
for row in cur.execute('SELECT id FROM RecoFramework_userinfo;'):
users.append(row[0])
for row in cur.execute('SELECT movieId FROM RecoFramework_movies;'):
movies.append(row[0])
for row in cur.execute('SELECT userId, movieId, rating FROM RecoFramework_ratings WHERE rating = 5;'):
data.append(row)
dataset = Dataset()
#print("Loading dataset...")
dataset.fit(users, movies)
interactions, ratings = dataset.build_interactions(data)
# Be sure to close the connection
con.close()
train, test = random_train_test_split(interactions)
model = LightFM(loss='warp')
# train lightFM model using fit method
#print("Starting training the model...")
model.fit(train, epochs=30, num_threads=2)
user_dict = dataset._user_id_mapping
movie_dict = dataset._item_id_mapping
return model, ratings, user_dict, movie_dict, train, test
def recommend_by_userid(userid):
# fetch data movie data and trained model from our database
model, _, user_dict, movie_dict, train, test = fetch_data()
movie_indices = list(movie_dict.keys())
movie_ids = list(movie_dict.values())
known_movies = []
recommended_movies = []
# number of movies and users in our dataset
n_users, n_items = train.shape
user_index = user_dict[userid]
# list of movie indices that user likes
known = train.tocsr()[user_index].indices
for i, movie in enumerate(known):
known_movies.append(movie_indices[movie_ids.index(movie)])
# predicting the scores
#print('Predicting the scores')
scores = model.predict(user_index, np.arange(n_items))
# ranking them in non increasing order
top = np.argsort(-scores)
for i, movie in enumerate(top):
recommended_movies.append(movie_indices[movie_ids.index(movie)])
# display results
#print("User %s" % str(userid))
#print("------Known Choices:")
# for x in known_movies[:20]:
# print("%s" % x)
# print("------Recomended:")
# for x in recommended_movies[:10]:
# print("%s" % x)
con = sqlite3.connect("db.sqlite3")
con.row_factory = dict_factory
cur = con.cursor()
result = []
for mid in recommended_movies[:10]:
cur.execute("SELECT * FROM RecoFramework_movies WHERE movieId=?", (mid, ))
r = cur.fetchone()
result.append(r)
con.close()
return result
def mvid_l(recommended_movies):
imbdid_li = []
for item in recommended_movies:
imbdid_li.append(list(item.values())[0][0])
return imbdid_li
def get_ImgURL_from_ID_lst(id_lst):
d = []
access = imdb.IMDb()
for id in id_lst:
movie = access.get_movie(id)
d.append((movie['title'], movie['cover url']))
return d
def login_map(request):
return render(request, 'adminPage.html')
def login_success(request):
return render(request, 'sessionTest.html')
@csrf_exempt
def ccid_verify(request):
# try:
# UserInfo.objects.create(username='BQ', password='456', age=27)
# print('successful')
# except:
# print('failed')
# try:
# for i in range(941):
# UserInfo.objects.create(username='admin', password='123', age=18)
# print('successful')
# except:
# print('failed')
user_list_obj = UserInfo.objects.values()
ccid_list = []
for item in user_list_obj:
username = item['username']
ccid_list.append(username)
#print(ccid_list)
if request.method == "POST":
username = request.POST['username']
userid = UserInfo.objects.filter(username=username)
if username in ccid_list:
request.session['userid'] = userid # also cookie
userid = userid.values()[0]['id']
print(userid)
movieids = recommend_by_userid(userid)
mvidList = mvid_l(movieids)
print(movieids)
url_lis = json.dumps(get_ImgURL_from_ID_lst(mvidList))
#url_lis = json.dumps([(1,'bs'),(3,'shit'),(5,'fuck'),(7,'basu'),(9,'ma4')])
return render(request, '802project.html', {'urllist': url_lis})
else:
return render(request, 'adminPage.html',{'script': "alert", 'wrong': 'You have input wrong ccid, please re-input'})
| 29.188571
| 127
| 0.653876
|
707ad7c7c3cdcc649881572213d38c69e388ed13
| 502
|
py
|
Python
|
loan_appraiser/config/loan_appraiser.py
|
Rashidsalim/loan-appraiser
|
08b9ef32c02298b8745311299a9cc83a4a1084d2
|
[
"MIT"
] | null | null | null |
loan_appraiser/config/loan_appraiser.py
|
Rashidsalim/loan-appraiser
|
08b9ef32c02298b8745311299a9cc83a4a1084d2
|
[
"MIT"
] | null | null | null |
loan_appraiser/config/loan_appraiser.py
|
Rashidsalim/loan-appraiser
|
08b9ef32c02298b8745311299a9cc83a4a1084d2
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"label": _("Appraiser"),
"icon": "octicon octicon-file-directory",
"items": [
{
"doctype": "doc",
"type": "doctype",
"name": "Customer",
"label": _("Customer"),
"description": _("Register Customers"),
}
]
}
]
| 25.1
| 59
| 0.39243
|
ce613bac5560bc71ee1477487d581f2a640fe12a
| 524
|
py
|
Python
|
src/fixture/festivalfixture.py
|
rangoski/gymkhana_portal
|
49d1c696beada33a504898a7bb8eef65ad509635
|
[
"MIT"
] | null | null | null |
src/fixture/festivalfixture.py
|
rangoski/gymkhana_portal
|
49d1c696beada33a504898a7bb8eef65ad509635
|
[
"MIT"
] | null | null | null |
src/fixture/festivalfixture.py
|
rangoski/gymkhana_portal
|
49d1c696beada33a504898a7bb8eef65ad509635
|
[
"MIT"
] | null | null | null |
import factory
import random
COLOUR = ["yellow", "black", "purple", "red", "orange", "green", '#084594', '#2171b5', '#4292c6', '#6baed6', '#9ecae1',
'#c6dbef', '#deebf7', '#f7fbff'
]
class FestivalFactory(factory.django.DjangoModelFactory):
class Meta:
model = 'main.Festival'
name = random.choice(['IGNS', 'VRCHS', 'SPNDN', 'NMBL'])
photo = factory.django.ImageField(color=random.choice(COLOUR))
about = factory.Faker('sentence', nb_words=30)
link = factory.Faker('url')
| 30.823529
| 119
| 0.622137
|
0c2592ec42acd08111188e5618e285dcfdec5666
| 10,240
|
py
|
Python
|
ppr-api/tests/unit/endpoints/test_financing_statement.py
|
bcgov/ppr-deprecated
|
c8925b6f6b0d7fb3f4e267dfe25650a1045ef2e3
|
[
"Apache-2.0"
] | 1
|
2019-11-15T19:07:25.000Z
|
2019-11-15T19:07:25.000Z
|
ppr-api/tests/unit/endpoints/test_financing_statement.py
|
bryan-gilbert/ppr
|
c8925b6f6b0d7fb3f4e267dfe25650a1045ef2e3
|
[
"Apache-2.0"
] | 6
|
2021-03-03T05:18:35.000Z
|
2022-02-10T21:55:45.000Z
|
ppr-api/tests/unit/endpoints/test_financing_statement.py
|
bcgov/ppr-deprecated
|
c8925b6f6b0d7fb3f4e267dfe25650a1045ef2e3
|
[
"Apache-2.0"
] | null | null | null |
import datetime
import datedelta
import fastapi
import pytest
import endpoints.financing_statement
import models.collateral
import models.financing_statement
import models.party
import schemas.financing_statement
from schemas.financing_statement import RegistrationType
from schemas.party import PartyType
def test_read_financing_statement_maps_model_to_schema():
base_reg_num = '123456A'
stub_fs = stub_financing_statement(base_reg_num)
repo = MockFinancingStatementRepository(stub_fs)
result = endpoints.financing_statement.read_financing_statement(base_reg_num, repo)
assert isinstance(result, schemas.financing_statement.FinancingStatement)
assert result.baseRegistrationNumber == base_reg_num
def test_read_financing_statement_not_found():
repo = MockFinancingStatementRepository(None)
try:
endpoints.financing_statement.read_financing_statement('987654Z', repo)
except fastapi.HTTPException as ex:
assert ex.status_code == 404
else:
pytest.fail('A Not Found error was expected')
def test_read_financing_statement_life_years_is_none_when_negative():
base_reg_num = '123456B'
stub_fs = stub_financing_statement(base_reg_num, years=-1)
repo = MockFinancingStatementRepository(stub_fs)
result = endpoints.financing_statement.read_financing_statement(base_reg_num, repo)
assert result.lifeYears is None
assert result.expiryDate == stub_fs.expiry_date # expiry_date is None in this case
def test_read_financing_statement_life_years_is_applied_when_positive():
base_reg_num = '123456B'
stub_fs = stub_financing_statement(base_reg_num, years=25)
repo = MockFinancingStatementRepository(stub_fs)
result = endpoints.financing_statement.read_financing_statement(base_reg_num, repo)
assert result.lifeYears == 25
assert result.expiryDate == stub_fs.expiry_date
def test_read_financing_statement_includes_registration_type_name():
base_reg_num = '123456C'
stub_fs = stub_financing_statement(base_reg_num, reg_type=RegistrationType.REPAIRERS_LIEN)
repo = MockFinancingStatementRepository(stub_fs)
result = endpoints.financing_statement.read_financing_statement(base_reg_num, repo)
assert result.type == RegistrationType.REPAIRERS_LIEN.name
def test_read_financing_statement_registration_date_taken_from_base_event():
base_reg_num = '123456C'
stub_fs = stub_financing_statement(base_reg_num)
stub_fs.last_updated = datetime.datetime.now() + datetime.timedelta(seconds=1)
repo = MockFinancingStatementRepository(stub_fs)
result = endpoints.financing_statement.read_financing_statement(base_reg_num, repo)
assert result.registrationDateTime == stub_fs.events[0].registration_date
def test_read_financing_statement_registration_is_none_when_base_event_not_present():
base_reg_num = '123456C'
stub_fs = stub_financing_statement(base_reg_num)
stub_fs.events = []
repo = MockFinancingStatementRepository(stub_fs)
result = endpoints.financing_statement.read_financing_statement(base_reg_num, repo)
assert result.registrationDateTime is None
def test_read_financing_statement_registering_party_name_should_be_included():
base_reg_num = '123456C'
reg_party = models.party.Party(type_code=PartyType.REGISTERING.value, base_registration_number=base_reg_num,
starting_registration_number=base_reg_num, first_name='Homer', middle_name='Jay',
last_name='Simpson')
stub_fs = stub_financing_statement(base_reg_num, parties=[reg_party])
repo = MockFinancingStatementRepository(stub_fs)
result = endpoints.financing_statement.read_financing_statement(base_reg_num, repo)
assert type(result.registeringParty) == schemas.party.Party
assert result.registeringParty.personName.first == 'Homer'
assert result.registeringParty.personName.middle == 'Jay'
assert result.registeringParty.personName.last == 'Simpson'
def test_read_financing_statement_registration_party_should_be_none_when_not_present():
base_reg_num = '123456C'
stub_fs = stub_financing_statement(base_reg_num, parties=[])
repo = MockFinancingStatementRepository(stub_fs)
result = endpoints.financing_statement.read_financing_statement(base_reg_num, repo)
assert result.registeringParty is None
def test_read_financing_statement_debtor_should_be_mapped_to_schema():
base_reg_num = '123456C'
debtor = models.party.Party(
type_code=PartyType.DEBTOR.value, base_registration_number=base_reg_num,
starting_registration_number=base_reg_num, first_name='Homer', middle_name='Jay', last_name='Simpson',
business_name='Mr. Plow', birthdate=datetime.date(1990, 6, 15)
)
stub_fs = stub_financing_statement(base_reg_num, parties=[debtor])
repo = MockFinancingStatementRepository(stub_fs)
result = endpoints.financing_statement.read_financing_statement(base_reg_num, repo)
assert len(result.debtors) == 1
debtor = result.debtors[0]
assert type(debtor) == schemas.party.Debtor
assert debtor.personName.first == 'Homer'
assert debtor.personName.middle == 'Jay'
assert debtor.personName.last == 'Simpson'
assert debtor.businessName == 'Mr. Plow'
assert debtor.birthdate == datetime.date(1990, 6, 15)
assert debtor.address is None
def test_read_financing_statement_debtor_address_should_be_mapped_to_schema():
base_reg_num = '123456C'
address_model = models.party.Address(line1='123 Fake Street', line2='Suite 100', city='Victoria', region='BC',
country='CA', postal_code='V1V 1V1')
debtor = models.party.Party(
type_code=PartyType.DEBTOR.value, address=address_model, first_name='Homer', last_name='Simpson',
base_registration_number=base_reg_num, starting_registration_number=base_reg_num
)
stub_fs = stub_financing_statement(base_reg_num, parties=[debtor])
repo = MockFinancingStatementRepository(stub_fs)
result = endpoints.financing_statement.read_financing_statement(base_reg_num, repo)
address_result = result.debtors[0].address
assert address_result.street == '123 Fake Street'
assert address_result.streetAdditional == 'Suite 100'
assert address_result.city == 'Victoria'
assert address_result.region == 'BC'
assert address_result.country == 'CA'
assert address_result.postalCode == 'V1V 1V1'
def test_read_financing_statement_general_collateral_should_be_included():
base_reg_num = '123456D'
collateral1 = models.collateral.GeneralCollateral(description='collateral description', index=1)
collateral2 = models.collateral.GeneralCollateral(description=' plus appended portion', index=2)
stub_fs = stub_financing_statement(base_reg_num, general_collateral=[collateral1, collateral2])
collateral1.start_event = stub_fs.get_base_event()
collateral2.start_event = stub_fs.get_base_event()
repo = MockFinancingStatementRepository(stub_fs)
result = endpoints.financing_statement.read_financing_statement(base_reg_num, repo)
assert len(result.generalCollateral) == 1
assert result.generalCollateral[0].description == 'collateral description plus appended portion'
def test_read_financing_statement_general_collateral_when_list_is_empty():
base_reg_num = '123456D'
stub_fs = stub_financing_statement(base_reg_num, general_collateral=[])
repo = MockFinancingStatementRepository(stub_fs)
result = endpoints.financing_statement.read_financing_statement(base_reg_num, repo)
assert len(result.generalCollateral) == 0
def test_read_financing_statement_vehicle_collateral_should_be_included():
base_reg_num = '123456E'
collateral1 = models.collateral.VehicleCollateral(type_code='BO', serial_number='5654768V2')
collateral2 = models.collateral.VehicleCollateral(type_code='MH', mhr_number='5678943')
stub_fs = stub_financing_statement(base_reg_num, vehicle_collateral=[collateral1, collateral2])
repo = MockFinancingStatementRepository(stub_fs)
result = endpoints.financing_statement.read_financing_statement(base_reg_num, repo)
assert len(result.vehicleCollateral) == 2
assert next(x for x in result.vehicleCollateral if x.serial == '5654768V2')
assert next(x for x in result.vehicleCollateral if x.manufacturedHomeRegNumber == '5678943')
def stub_financing_statement(base_reg_number: str, years: int = None, parties: list = None, general_collateral=[],
vehicle_collateral=[], reg_type: RegistrationType = RegistrationType.SECURITY_AGREEMENT):
if reg_type == RegistrationType.REPAIRERS_LIEN:
expiry = datetime.date.today() + datedelta.datedelta(days=180)
else:
years = -1 if years is None else years
expiry = datetime.date.today() + datedelta.datedelta(years=years) if years > 0 else None
parties = [models.party.Party(type_code=PartyType.REGISTERING.value, base_registration_number=base_reg_number,
starting_registration_number=base_reg_number, first_name='Fred',
last_name='Flintstone')] if parties is None else parties
return models.financing_statement.FinancingStatement(
registration_number=base_reg_number, registration_type_code=reg_type.value, status='A', discharged=False,
life_in_years=years, expiry_date=expiry, last_updated=datetime.datetime.now(),
events=[models.financing_statement.FinancingStatementEvent(
registration_number=base_reg_number, base_registration_number=base_reg_number,
registration_date=datetime.datetime.now(), starting_parties=parties
)],
parties=parties, general_collateral=general_collateral, vehicle_collateral=vehicle_collateral
)
class MockFinancingStatementRepository:
def __init__(self, financing_statement_result):
self.financing_statement = financing_statement_result
def get_financing_statement(self, base_registration_number: str):
if self.financing_statement:
assert base_registration_number == self.financing_statement.registration_number
return self.financing_statement
| 44.137931
| 118
| 0.772949
|
24303fc2bb03c5d83a46893d3ff74a2ec5baeb20
| 3,168
|
py
|
Python
|
src/programy/dialog/joiner/joiner.py
|
cen-ai/program-y
|
a753667638147544c54dbebd9f1c8f9ae7f2159e
|
[
"MIT"
] | 5
|
2018-08-21T00:13:45.000Z
|
2018-09-01T20:00:55.000Z
|
src/programy/dialog/joiner/joiner.py
|
cen-ai/program-y
|
a753667638147544c54dbebd9f1c8f9ae7f2159e
|
[
"MIT"
] | 1
|
2018-09-12T18:30:17.000Z
|
2018-09-12T18:30:17.000Z
|
src/programy/dialog/joiner/joiner.py
|
cen-ai/program-y
|
a753667638147544c54dbebd9f1c8f9ae7f2159e
|
[
"MIT"
] | 5
|
2018-08-21T00:08:36.000Z
|
2018-09-23T06:11:04.000Z
|
"""
Copyright (c) 2016-2018 Keith Sterling http://www.keithsterling.com
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from programy.utils.logging.ylogger import YLogger
from programy.config.bot.joiner import BotSentenceJoinerConfiguration
from programy.utils.classes.loader import ClassLoader
class SentenceJoiner(object):
def __init__(self, joiner_config):
assert (joiner_config is not None)
assert (isinstance(joiner_config, BotSentenceJoinerConfiguration))
self._configuration = joiner_config
def combine_answers(self, answers, srai):
final_sentences = []
for sentence in answers:
if sentence:
# Capitalise the start of each sentence
if sentence[0].isalpha():
sentence = sentence[0].upper() + sentence[1:]
# If it ends with a terminator, keep the terminator, otherwise add a full stop
if self.ends_with_terminator(sentence):
final_sentences.append(sentence)
else:
if srai is False:
final_sentences.append(sentence+self._configuration.terminator)
else:
final_sentences.append(sentence)
return " ".join([sentence for sentence in final_sentences])
def ends_with_terminator(self, sentence):
for ch in self._configuration.join_chars:
if sentence.endswith(ch):
return True
return False
@staticmethod
def initiate_sentence_joiner(joiner_config):
if joiner_config.classname is not None:
try:
YLogger.info(None, "Loading sentence joiner from class [%s]", joiner_config.classname)
joiner_class = ClassLoader.instantiate_class(joiner_config.classname)
sentence_joiner = joiner_class(joiner_config)
return sentence_joiner
except Exception as excep:
YLogger.exception(None, "Failed to initiate sentence joiner", excep)
else:
YLogger.warning(None, "No configuration setting for sentence joiner!")
return None
| 44.619718
| 120
| 0.687816
|
f8725473e04403da65a6c157167a89021bda2762
| 966
|
py
|
Python
|
test_lab/server.py
|
Volodar/test_lab
|
829aef5792bd4ab7eaa61b48d8b6e249fb0e78f1
|
[
"MIT"
] | null | null | null |
test_lab/server.py
|
Volodar/test_lab
|
829aef5792bd4ab7eaa61b48d8b6e249fb0e78f1
|
[
"MIT"
] | null | null | null |
test_lab/server.py
|
Volodar/test_lab
|
829aef5792bd4ab7eaa61b48d8b6e249fb0e78f1
|
[
"MIT"
] | null | null | null |
from http.server import HTTPServer
from http.server import BaseHTTPRequestHandler
from .log import Log
class HttpServer(BaseHTTPRequestHandler):
request_handler_class = None
@staticmethod
def start(url, port, request_handler_class):
HttpServer.request_handler_class = request_handler_class
server = HTTPServer((url, port), HttpServer)
return server
def do_GET(self):
try:
self.send_response(200)
self.send_header('content-type', 'text/html')
self.end_headers()
s = self.path
payload = s
address = self.client_address
request_handler = HttpServer.request_handler_class(self)
request_handler.handle(address, payload)
except Exception as inst:
self.wfile.write("error({})".format(inst))
Log.error("error({})".format(inst))
def send(self, message):
self.wfile.write(message)
| 29.272727
| 68
| 0.640787
|
d5079bac50dbe08691fe493ed46da1a02716e3ad
| 897
|
py
|
Python
|
leetcode/187.py
|
windniw/just-for-fun
|
54e5c2be145f3848811bfd127f6a89545e921570
|
[
"Apache-2.0"
] | 1
|
2019-08-28T23:15:25.000Z
|
2019-08-28T23:15:25.000Z
|
leetcode/187.py
|
windniw/just-for-fun
|
54e5c2be145f3848811bfd127f6a89545e921570
|
[
"Apache-2.0"
] | null | null | null |
leetcode/187.py
|
windniw/just-for-fun
|
54e5c2be145f3848811bfd127f6a89545e921570
|
[
"Apache-2.0"
] | null | null | null |
"""
link: https://leetcode-cn.com/problems/repeated-dna-sequences
problem: 由 ACGT 组成的字符串,求其中重复出现的,长度为10的子串,注意重复子串可重叠
solution: 压缩搜索。将四个字母分别视为 0b00, 0b01, 0b10, 0b11,则长度为 10 的子串可被压缩为一个 20 位的整数进行存储,通过 & mask 即可消除高位,
用 >> 和 + 来补充低位,压缩完丢集合进行查找,若出现相同的整数,即存在重复子串。
"""
class Solution:
def findRepeatedDnaSequences(self, s: str) -> List[str]:
def f(x: str):
if x[0] == 'A': return 0
if x[0] == 'C': return 1
if x[0] == 'G': return 2
if x[0] == 'T': return 3
if len(s) < 10:
return []
m, mask, merge_sum = set(), 0xfffff, f(s[0])
res = set()
for i in range(1, len(s)):
merge_sum = (merge_sum << 2 & mask) + f(s[i])
if i >= 9:
if merge_sum in m:
res.add(s[i - 9:i + 1])
m.add(merge_sum)
return list(res)
| 29.9
| 96
| 0.507246
|
5fd57c5579d7b05c320e7bbf61d29e4a5c210238
| 380
|
py
|
Python
|
Backend/core/users/signals.py
|
Extraordinary01/freshnesecom
|
e16047d7f8a8d771125c4656351bae2b4389a1a6
|
[
"MIT"
] | null | null | null |
Backend/core/users/signals.py
|
Extraordinary01/freshnesecom
|
e16047d7f8a8d771125c4656351bae2b4389a1a6
|
[
"MIT"
] | null | null | null |
Backend/core/users/signals.py
|
Extraordinary01/freshnesecom
|
e16047d7f8a8d771125c4656351bae2b4389a1a6
|
[
"MIT"
] | null | null | null |
from os import path
from django.conf import settings
from django.db.models.signals import pre_delete
from django.dispatch import receiver
from .models import User
@receiver(pre_delete, sender=User)
def delete_img_hook(sender, instance, using, **kwargs):
if instance.img.path != path.join(settings.BASE_DIR, 'media\default_user.jpg'):
instance.img.delete(save=False)
| 31.666667
| 83
| 0.776316
|
505f8b5bdd95a6e4c8bbee16921665eabff84351
| 3,841
|
py
|
Python
|
code/generate_submission.py
|
kode-git/network-configuration-behaviour-evaluation
|
1c5fb273fbb5bb0e6e0fd2a04f1237cdcd7ff5a0
|
[
"Apache-2.0"
] | null | null | null |
code/generate_submission.py
|
kode-git/network-configuration-behaviour-evaluation
|
1c5fb273fbb5bb0e6e0fd2a04f1237cdcd7ff5a0
|
[
"Apache-2.0"
] | null | null | null |
code/generate_submission.py
|
kode-git/network-configuration-behaviour-evaluation
|
1c5fb273fbb5bb0e6e0fd2a04f1237cdcd7ff5a0
|
[
"Apache-2.0"
] | null | null | null |
import tensorflow as tf
import configparser
import pandas as pd
import numpy as np
from read_dataset import input_fn
from routenet_model import model_fn
###################
# Input variables #
###################
# Path to the test dataset root directory
test_dataset_directory = "../data/sample_data/test"
# path to the configuration file (set correctly the paths to the trained model within this file - i.e., "logs" variable)
config_file_path = '../code/config.ini'
# The filename of the output compressed CSV file (in ZIP format)
output_filename = 'submission_file'
def generate_upload_csv(test_dir, model_dir, filename, config):
"""Generates, compresses (in ZIP) and saves a Pandas Dataframe in CSV format with the predicted delays.
Args:
test_dir (string): Path of the test dataset root directory.
model_dir (string): Directory of the trained model.
filename (string): The filename of the compressed CSV file.
config (configparser): Config file containing the different configurations
and hyperparameters.
"""
# IMPORTANT NOTE! In order to compress the data, pandas needs for the output file a simple filename, without including the route or path and the extension.
# (i.e., "submission_file", not "./home/dataset/submission_file.zip")
if '/' in filename:
print("---WARNING---")
print("---Filename must be a simple filename, it should not include a path--- Use \"submission_file\" instead of \"./home/dataset/submission_file.zip\"")
print("GENERATING DELAY LABELS WITH THE TRAINED MODEL...")
########################
# Generate predictions #
########################
# Create the estimator loading the model
estimator = tf.estimator.Estimator(
model_fn=model_fn,
model_dir=model_dir,
params=config
)
# Generate the dataset and make the predictions
pred_results = estimator.predict(input_fn=lambda: input_fn(test_dir, repeat=False, shuffle=False))
# Collect the predictions
pred = np.array([pred['predictions'] for pred in pred_results])
###################
# Denormalization #
###################
# If you have applied any normalization, please denormalize the predicted values here
####################
# Prepare the data #
####################
print("RESHAPING THE DATA...")
# Prepare the data as it should be in the CSV file (each line contains the 342 src-dst delays of a sample)
# The network of the test dataset has in total 342 src-dst paths (19 sources x 18 destinations = 342 src-dst pairs)
pred = pred.reshape(int(pred.shape[0] / 342), 342)
print("CHECKING CSV format...")
if pred.shape != (50000, 342):
print("--- WARNING ---")
print("--- The format of the CSV file is not correct. It must have 50,000 lines with 342 values each one---")
print("It has currently the following lines and and elements: " +str(pred.shape))
print("SAVING CSV FILE COMPRESSED IN ZIP...")
df = pd.DataFrame(pred)
# The CSV file will be directly compressed in ZIP
compression_options = dict(method='zip', archive_name=f'{filename}.csv')
# The CSV file uses ";" as separator between values
df.to_csv(f'{filename}.zip', header=False, index=False, sep=";", compression=compression_options)
# MAIN
# Loading the Configuration file with the model hyperparameters
config = configparser.ConfigParser()
config._interpolation = configparser.ExtendedInterpolation()
config.read(config_file_path)
generate_upload_csv(test_dataset_directory,
# It loads the last saved model within the "logs" directory of the config.ini file
config['DIRECTORIES']['logs'],
output_filename,
config._sections)
| 40.861702
| 161
| 0.661546
|
6bd8d4f2f9525ab0057b3baa80c7e5ee667f337a
| 3,257
|
py
|
Python
|
src/distdl/nn/__init__.py
|
philippwitte/distdl
|
e77e0c355d617def88b4acfcc12f0e92e9fb2fe5
|
[
"BSD-2-Clause"
] | null | null | null |
src/distdl/nn/__init__.py
|
philippwitte/distdl
|
e77e0c355d617def88b4acfcc12f0e92e9fb2fe5
|
[
"BSD-2-Clause"
] | null | null | null |
src/distdl/nn/__init__.py
|
philippwitte/distdl
|
e77e0c355d617def88b4acfcc12f0e92e9fb2fe5
|
[
"BSD-2-Clause"
] | null | null | null |
import distdl.nn.loss # noqa: F401
from . import mixins # noqa: F401
from .all_sum_reduce import AllSumReduce # noqa: F401
from .batchnorm import DistributedBatchNorm # noqa: F401
from .broadcast import Broadcast # noqa: F401
from .conv import DistributedConv1d # noqa: F401
from .conv import DistributedConv2d # noqa: F401
from .conv import DistributedConv3d # noqa: F401
from .conv_channel import DistributedChannelConv1d # noqa: F401
from .conv_channel import DistributedChannelConv2d # noqa: F401
from .conv_channel import DistributedChannelConv3d # noqa: F401
from .conv_feature import DistributedFeatureConv1d # noqa: F401
from .conv_feature import DistributedFeatureConv2d # noqa: F401
from .conv_feature import DistributedFeatureConv3d # noqa: F401
from .conv_general import DistributedGeneralConv1d # noqa: F401
from .conv_general import DistributedGeneralConv2d # noqa: F401
from .conv_general import DistributedGeneralConv3d # noqa: F401
from .halo_exchange import HaloExchange # noqa: F401
from .interpolate import Interpolate # noqa: F401
from .linear import DistributedLinear # noqa: F401
from .loss import DistributedBCELoss # noqa: F401
from .loss import DistributedBCEWithLogitsLoss # noqa: F401
from .loss import DistributedKLDivLoss # noqa: F401
from .loss import DistributedL1Loss # noqa: F401
from .loss import DistributedMSELoss # noqa: F401
from .loss import DistributedPoissonNLLLoss # noqa: F401
from .module import Module # noqa: F401
from .pooling import DistributedAvgPool1d # noqa: F401
from .pooling import DistributedAvgPool2d # noqa: F401
from .pooling import DistributedAvgPool3d # noqa: F401
from .pooling import DistributedMaxPool1d # noqa: F401
from .pooling import DistributedMaxPool2d # noqa: F401
from .pooling import DistributedMaxPool3d # noqa: F401
from .repartition import Repartition # noqa: F401
from .sum_reduce import SumReduce # noqa: F401
from .transpose import DistributedTranspose # noqa: F401
from .upsampling import DistributedUpsample # noqa: F401
__all__ = ["AllSumReduce",
"Broadcast",
"DistributedBatchNorm",
"DistributedConv1d",
"DistributedConv2d",
"DistributedConv3d",
"DistributedChannelConv1d",
"DistributedChannelConv2d",
"DistributedChannelConv3d",
"DistributedFeatureConv1d",
"DistributedFeatureConv2d",
"DistributedFeatureConv3d",
"DistributedGeneralConv1d",
"DistributedGeneralConv2d",
"DistributedGeneralConv3d",
"HaloExchange",
"DistributedLinear",
"DistributedL1Loss",
"DistributedMSELoss",
"DistributedPoissonNLLLoss",
"DistributedBCELoss",
"DistributedBCEWithLogitsLoss",
"DistributedKLDivLoss",
"Module",
"DistributedAvgPool1d",
"DistributedAvgPool2d",
"DistributedAvgPool3d",
"DistributedMaxPool1d",
"DistributedMaxPool2d",
"DistributedMaxPool3d",
"Repartition",
"SumReduce",
"DistributedTranspose",
"Interpolate",
"DistributedUpsample",
"loss",
]
| 42.298701
| 64
| 0.707092
|
5ac1b585ef063a893c876462f8dc9a3a101862cf
| 15,618
|
py
|
Python
|
data/scannetv2_inst.py
|
PatrickFeng/PointGroup
|
7b7d51a2edc013328adea6d5facee271fb732166
|
[
"Apache-2.0"
] | 2
|
2020-09-11T12:36:54.000Z
|
2020-11-07T04:10:38.000Z
|
data/scannetv2_inst.py
|
PatrickFeng/PointGroup
|
7b7d51a2edc013328adea6d5facee271fb732166
|
[
"Apache-2.0"
] | null | null | null |
data/scannetv2_inst.py
|
PatrickFeng/PointGroup
|
7b7d51a2edc013328adea6d5facee271fb732166
|
[
"Apache-2.0"
] | 1
|
2021-05-18T02:46:20.000Z
|
2021-05-18T02:46:20.000Z
|
'''
ScanNet v2 Dataloader (Modified from SparseConvNet Dataloader)
Written by Li Jiang
'''
import os, sys, glob, math, numpy as np
import scipy.ndimage
import scipy.interpolate
import torch
from torch.utils.data import DataLoader
sys.path.append('../')
from util.config import cfg
from util.log import logger
from lib.pointgroup_ops.functions import pointgroup_ops
class Dataset:
def __init__(self, test=False):
self.data_root = cfg.data_root
self.dataset = cfg.dataset
self.filename_suffix = cfg.filename_suffix
self.batch_size = cfg.batch_size
self.train_workers = cfg.train_workers
self.val_workers = cfg.train_workers
self.full_scale = cfg.full_scale
self.scale = cfg.scale
self.max_npoint = cfg.max_npoint
self.mode = cfg.mode
if test:
self.test_split = cfg.split # val or test
self.test_workers = cfg.test_workers
cfg.batch_size = 1
def trainLoader(self):
self.train_files = sorted(glob.glob(os.path.join(self.data_root, self.dataset, 'train', '*' + self.filename_suffix)))
#self.train_files = [torch.load(i) for i in train_file_names]
logger.info('Training samples: {}'.format(len(self.train_files)))
train_set = list(range(len(self.train_files)))
self.train_data_loader = DataLoader(train_set, batch_size=self.batch_size, collate_fn=lambda x: x, num_workers=self.train_workers,
shuffle=True, sampler=None, drop_last=True, pin_memory=True)
def valLoader(self):
val_file_names = sorted(glob.glob(os.path.join(self.data_root, self.dataset, 'val', '*' + self.filename_suffix)))
self.val_files = [torch.load(i) for i in val_file_names]
logger.info('Validation samples: {}'.format(len(self.val_files)))
val_set = list(range(len(self.val_files)))
self.val_data_loader = DataLoader(val_set, batch_size=self.batch_size, collate_fn=self.valMerge, num_workers=self.val_workers,
shuffle=False, drop_last=False, pin_memory=True)
def testLoader(self):
self.test_file_names = sorted(glob.glob(os.path.join(self.data_root, self.dataset, self.test_split, '*' + self.filename_suffix)))
self.test_files = [torch.load(i) for i in self.test_file_names]
logger.info('Testing samples ({}): {}'.format(self.test_split, len(self.test_files)))
test_set = list(np.arange(len(self.test_files)))
self.test_data_loader = DataLoader(test_set, batch_size=1, collate_fn=self.testMerge, num_workers=self.test_workers,
shuffle=False, drop_last=False, pin_memory=True)
#Elastic distortion
def elastic(self, x, gran, mag):
blur0 = np.ones((3, 1, 1)).astype('float32') / 3
blur1 = np.ones((1, 3, 1)).astype('float32') / 3
blur2 = np.ones((1, 1, 3)).astype('float32') / 3
bb = np.abs(x).max(0).astype(np.int32)//gran + 3
noise = [np.random.randn(bb[0], bb[1], bb[2]).astype('float32') for _ in range(3)]
noise = [scipy.ndimage.filters.convolve(n, blur0, mode='constant', cval=0) for n in noise]
noise = [scipy.ndimage.filters.convolve(n, blur1, mode='constant', cval=0) for n in noise]
noise = [scipy.ndimage.filters.convolve(n, blur2, mode='constant', cval=0) for n in noise]
noise = [scipy.ndimage.filters.convolve(n, blur0, mode='constant', cval=0) for n in noise]
noise = [scipy.ndimage.filters.convolve(n, blur1, mode='constant', cval=0) for n in noise]
noise = [scipy.ndimage.filters.convolve(n, blur2, mode='constant', cval=0) for n in noise]
ax = [np.linspace(-(b-1)*gran, (b-1)*gran, b) for b in bb]
interp = [scipy.interpolate.RegularGridInterpolator(ax, n, bounds_error=0, fill_value=0) for n in noise]
def g(x_):
return np.hstack([i(x_)[:,None] for i in interp])
return x + g(x) * mag
def getInstanceInfo(self, xyz, instance_label):
'''
:param xyz: (n, 3)
:param instance_label: (n), int, (0~nInst-1, -100)
:return: instance_num, dict
'''
instance_info = np.ones((xyz.shape[0], 9), dtype=np.float32) * -100.0 # (n, 9), float, (cx, cy, cz, minx, miny, minz, maxx, maxy, maxz)
instance_pointnum = [] # (nInst), int
instance_num = int(instance_label.max()) + 1
for i_ in range(instance_num):
inst_idx_i = np.where(instance_label == i_)
### instance_info
xyz_i = xyz[inst_idx_i]
min_xyz_i = xyz_i.min(0)
max_xyz_i = xyz_i.max(0)
mean_xyz_i = xyz_i.mean(0)
instance_info_i = instance_info[inst_idx_i]
instance_info_i[:, 0:3] = mean_xyz_i
instance_info_i[:, 3:6] = min_xyz_i
instance_info_i[:, 6:9] = max_xyz_i
instance_info[inst_idx_i] = instance_info_i
### instance_pointnum
instance_pointnum.append(inst_idx_i[0].size)
return instance_num, {"instance_info": instance_info, "instance_pointnum": instance_pointnum}
def dataAugment(self, xyz, jitter=False, flip=False, rot=False):
m = np.eye(3)
if jitter:
m += np.random.randn(3, 3) * 0.1
if flip:
m[0][0] *= np.random.randint(0, 2) * 2 - 1 # flip x randomly
if rot:
theta = np.random.rand() * 2 * math.pi
m = np.matmul(m, [[math.cos(theta), math.sin(theta), 0], [-math.sin(theta), math.cos(theta), 0], [0, 0, 1]]) # rotation
return np.matmul(xyz, m)
def crop(self, xyz):
'''
:param xyz: (n, 3) >= 0
'''
xyz_offset = xyz.copy()
valid_idxs = (xyz_offset.min(1) >= 0)
assert valid_idxs.sum() == xyz.shape[0]
full_scale = np.array([self.full_scale[1]] * 3)
room_range = xyz.max(0) - xyz.min(0)
while (valid_idxs.sum() > self.max_npoint):
offset = np.clip(full_scale - room_range + 0.001, None, 0) * np.random.rand(3)
xyz_offset = xyz + offset
valid_idxs = (xyz_offset.min(1) >= 0) * ((xyz_offset < full_scale).sum(1) == 3)
full_scale[:2] -= 32
return xyz_offset, valid_idxs
def getCroppedInstLabel(self, instance_label, valid_idxs):
instance_label = instance_label[valid_idxs]
j = 0
while (j < instance_label.max()):
if (len(np.where(instance_label == j)[0]) == 0):
instance_label[instance_label == instance_label.max()] = j
j += 1
return instance_label
def trainMerge(self, id):
locs = []
locs_float = []
feats = []
labels = []
instance_labels = []
instance_infos = [] # (N, 9)
instance_pointnum = [] # (total_nInst), int
batch_offsets = [0]
total_inst_num = 0
for i, idx in enumerate(id):
xyz_origin, rgb, label, instance_label = torch.load(self.train_files[idx])
### jitter / flip x / rotation
xyz_middle = self.dataAugment(xyz_origin, True, True, True)
### scale
xyz = xyz_middle * self.scale
### elastic
xyz = self.elastic(xyz, 6 * self.scale // 50, 40 * self.scale / 50)
xyz = self.elastic(xyz, 20 * self.scale // 50, 160 * self.scale / 50)
### offset
xyz -= xyz.min(0)
### crop
xyz, valid_idxs = self.crop(xyz)
xyz_middle = xyz_middle[valid_idxs]
xyz = xyz[valid_idxs]
rgb = rgb[valid_idxs]
label = label[valid_idxs]
instance_label = self.getCroppedInstLabel(instance_label, valid_idxs)
### get instance information
inst_num, inst_infos = self.getInstanceInfo(xyz_middle, instance_label.astype(np.int32))
inst_info = inst_infos["instance_info"] # (n, 9), (cx, cy, cz, minx, miny, minz, maxx, maxy, maxz)
inst_pointnum = inst_infos["instance_pointnum"] # (nInst), list
instance_label[np.where(instance_label != -100)] += total_inst_num
total_inst_num += inst_num
### merge the scene to the batch
batch_offsets.append(batch_offsets[-1] + xyz.shape[0])
locs.append(torch.cat([torch.LongTensor(xyz.shape[0], 1).fill_(i), torch.from_numpy(xyz).long()], 1))
locs_float.append(torch.from_numpy(xyz_middle))
feats.append(torch.from_numpy(rgb) + torch.randn(3) * 0.1)
labels.append(torch.from_numpy(label))
instance_labels.append(torch.from_numpy(instance_label))
instance_infos.append(torch.from_numpy(inst_info))
instance_pointnum.extend(inst_pointnum)
### merge all the scenes in the batchd
batch_offsets = torch.tensor(batch_offsets, dtype=torch.int) # int (B+1)
locs = torch.cat(locs, 0) # long (N, 1 + 3), the batch item idx is put in locs[:, 0]
locs_float = torch.cat(locs_float, 0).to(torch.float32) # float (N, 3)
feats = torch.cat(feats, 0) # float (N, C)
labels = torch.cat(labels, 0).long() # long (N)
instance_labels = torch.cat(instance_labels, 0).long() # long (N)
instance_infos = torch.cat(instance_infos, 0).to(torch.float32) # float (N, 9) (meanxyz, minxyz, maxxyz)
instance_pointnum = torch.tensor(instance_pointnum, dtype=torch.int) # int (total_nInst)
spatial_shape = np.clip((locs.max(0)[0][1:] + 1).numpy(), self.full_scale[0], None) # long (3)
### voxelize
voxel_locs, p2v_map, v2p_map = pointgroup_ops.voxelization_idx(locs, self.batch_size, self.mode)
return {'locs': locs, 'voxel_locs': voxel_locs, 'p2v_map': p2v_map, 'v2p_map': v2p_map,
'locs_float': locs_float, 'feats': feats, 'labels': labels, 'instance_labels': instance_labels,
'instance_info': instance_infos, 'instance_pointnum': instance_pointnum,
'id': id, 'offsets': batch_offsets, 'spatial_shape': spatial_shape}
def valMerge(self, id):
locs = []
locs_float = []
feats = []
labels = []
instance_labels = []
instance_infos = [] # (N, 9)
instance_pointnum = [] # (total_nInst), int
batch_offsets = [0]
total_inst_num = 0
for i, idx in enumerate(id):
xyz_origin, rgb, label, instance_label = self.val_files[idx]
### flip x / rotation
xyz_middle = self.dataAugment(xyz_origin, False, True, True)
### scale
xyz = xyz_middle * self.scale
### offset
xyz -= xyz.min(0)
### crop
xyz, valid_idxs = self.crop(xyz)
xyz_middle = xyz_middle[valid_idxs]
xyz = xyz[valid_idxs]
rgb = rgb[valid_idxs]
label = label[valid_idxs]
instance_label = self.getCroppedInstLabel(instance_label, valid_idxs)
### get instance information
inst_num, inst_infos = self.getInstanceInfo(xyz_middle, instance_label.astype(np.int32))
inst_info = inst_infos["instance_info"] # (n, 9), (cx, cy, cz, minx, miny, minz, maxx, maxy, maxz)
inst_pointnum = inst_infos["instance_pointnum"] # (nInst), list
instance_label[np.where(instance_label != -100)] += total_inst_num
total_inst_num += inst_num
### merge the scene to the batch
batch_offsets.append(batch_offsets[-1] + xyz.shape[0])
locs.append(torch.cat([torch.LongTensor(xyz.shape[0], 1).fill_(i), torch.from_numpy(xyz).long()], 1))
locs_float.append(torch.from_numpy(xyz_middle))
feats.append(torch.from_numpy(rgb))
labels.append(torch.from_numpy(label))
instance_labels.append(torch.from_numpy(instance_label))
instance_infos.append(torch.from_numpy(inst_info))
instance_pointnum.extend(inst_pointnum)
### merge all the scenes in the batch
batch_offsets = torch.tensor(batch_offsets, dtype=torch.int) # int (B+1)
locs = torch.cat(locs, 0) # long (N, 1 + 3), the batch item idx is put in locs[:, 0]
locs_float = torch.cat(locs_float, 0).to(torch.float32) # float (N, 3)
feats = torch.cat(feats, 0) # float (N, C)
labels = torch.cat(labels, 0).long() # long (N)
instance_labels = torch.cat(instance_labels, 0).long() # long (N)
instance_infos = torch.cat(instance_infos, 0).to(torch.float32) # float (N, 9) (meanxyz, minxyz, maxxyz)
instance_pointnum = torch.tensor(instance_pointnum, dtype=torch.int) # int (total_nInst)
spatial_shape = np.clip((locs.max(0)[0][1:] + 1).numpy(), self.full_scale[0], None) # long (3)
### voxelize
voxel_locs, p2v_map, v2p_map = pointgroup_ops.voxelization_idx(locs, self.batch_size, self.mode)
return {'locs': locs, 'voxel_locs': voxel_locs, 'p2v_map': p2v_map, 'v2p_map': v2p_map,
'locs_float': locs_float, 'feats': feats, 'labels': labels, 'instance_labels': instance_labels,
'instance_info': instance_infos, 'instance_pointnum': instance_pointnum,
'id': id, 'offsets': batch_offsets, 'spatial_shape': spatial_shape}
def testMerge(self, id):
locs = []
locs_float = []
feats = []
batch_offsets = [0]
for i, idx in enumerate(id):
if self.test_split == 'val':
xyz_origin, rgb, label, instance_label = self.test_files[idx]
elif self.test_split == 'test':
xyz_origin, rgb = self.test_files[idx]
else:
print("Wrong test split: {}!".format(self.test_split))
exit(0)
### flip x / rotation
xyz_middle = self.dataAugment(xyz_origin, False, True, True)
### scale
xyz = xyz_middle * self.scale
### offset
xyz -= xyz.min(0)
### merge the scene to the batch
batch_offsets.append(batch_offsets[-1] + xyz.shape[0])
locs.append(torch.cat([torch.LongTensor(xyz.shape[0], 1).fill_(i), torch.from_numpy(xyz).long()], 1))
locs_float.append(torch.from_numpy(xyz_middle))
feats.append(torch.from_numpy(rgb))
### merge all the scenes in the batch
batch_offsets = torch.tensor(batch_offsets, dtype=torch.int) # int (B+1)
locs = torch.cat(locs, 0) # long (N, 1 + 3), the batch item idx is put in locs[:, 0]
locs_float = torch.cat(locs_float, 0).to(torch.float32) # float (N, 3)
feats = torch.cat(feats, 0) # float (N, C)
spatial_shape = np.clip((locs.max(0)[0][1:] + 1).numpy(), self.full_scale[0], None) # long (3)
### voxelize
voxel_locs, p2v_map, v2p_map = pointgroup_ops.voxelization_idx(locs, self.batch_size, self.mode)
return {'locs': locs, 'voxel_locs': voxel_locs, 'p2v_map': p2v_map, 'v2p_map': v2p_map,
'locs_float': locs_float, 'feats': feats,
'id': id, 'offsets': batch_offsets, 'spatial_shape': spatial_shape}
| 42.555858
| 145
| 0.587079
|
6d7087b8adf57fd7c706498b1f5336e24ca35c9d
| 26
|
py
|
Python
|
drupal/drupalorg/tests/plugins/authorization/__init__.py
|
csadsl/poc_exp
|
e3146262e7403f19f49ee2db56338fa3f8e119c9
|
[
"MIT"
] | 11
|
2020-05-30T13:53:49.000Z
|
2021-03-17T03:20:59.000Z
|
drupal/drupalorg/tests/plugins/authorization/__init__.py
|
csadsl/poc_exp
|
e3146262e7403f19f49ee2db56338fa3f8e119c9
|
[
"MIT"
] | 6
|
2020-05-13T03:25:18.000Z
|
2020-07-21T06:24:16.000Z
|
drupal/drupalorg/tests/plugins/authorization/__init__.py
|
csadsl/poc_exp
|
e3146262e7403f19f49ee2db56338fa3f8e119c9
|
[
"MIT"
] | 6
|
2020-05-30T13:53:51.000Z
|
2020-12-01T21:44:26.000Z
|
__author__ = 'christophe'
| 13
| 25
| 0.769231
|
d286d53a2c01f87ac14977b82805f74c4cda3020
| 1,139
|
py
|
Python
|
qiskit/chemistry/components/initial_states/__init__.py
|
stefan-woerner/aqua
|
12e1b867e254977d9c5992612a7919d8fe016cb4
|
[
"Apache-2.0"
] | 504
|
2018-12-15T16:34:03.000Z
|
2022-03-26T11:24:53.000Z
|
qiskit/chemistry/components/initial_states/__init__.py
|
stefan-woerner/aqua
|
12e1b867e254977d9c5992612a7919d8fe016cb4
|
[
"Apache-2.0"
] | 746
|
2018-12-16T16:44:42.000Z
|
2021-07-10T16:59:43.000Z
|
qiskit/chemistry/components/initial_states/__init__.py
|
stefan-woerner/aqua
|
12e1b867e254977d9c5992612a7919d8fe016cb4
|
[
"Apache-2.0"
] | 421
|
2018-12-22T14:49:00.000Z
|
2022-03-04T09:47:07.000Z
|
# This code is part of Qiskit.
#
# (C) Copyright IBM 2018, 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
Initial States (:mod:`qiskit.chemistry.components.initial_states`)
==================================================================
These are chemistry specific Aqua Initial States where they inherit from
Aqua :class:`~qiskit.aqua.components.initial_states.InitialState`.
As they rely on chemistry specific knowledge and/or functions they live here rather than in Aqua.
.. currentmodule:: qiskit.chemistry.components.initial_states
Initial States
==============
.. autosummary::
:toctree: ../stubs/
:nosignatures:
HartreeFock
VSCF
"""
from .hartree_fock import HartreeFock
from .vscf import VSCF
__all__ = ['HartreeFock', 'VSCF']
| 29.973684
| 97
| 0.707638
|
acc9eaf459f525e01ea7a02ec051d7e1e3d6b65a
| 1,592
|
py
|
Python
|
examples/using_steem_offline.py
|
dpays/dpay-cli
|
dfa80898e1faea2cee92ebec6fe04873381bd40f
|
[
"MIT"
] | null | null | null |
examples/using_steem_offline.py
|
dpays/dpay-cli
|
dfa80898e1faea2cee92ebec6fe04873381bd40f
|
[
"MIT"
] | null | null | null |
examples/using_steem_offline.py
|
dpays/dpay-cli
|
dfa80898e1faea2cee92ebec6fe04873381bd40f
|
[
"MIT"
] | null | null | null |
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import sys
from datetime import datetime, timedelta
import time
import io
import logging
from dpaycli.blockchain import Blockchain
from dpaycli.block import Block
from dpaycli.account import Account
from dpaycli.amount import Amount
from dpaycli.witness import Witness
from dpayclibase import operations
from dpaycli.transactionbuilder import TransactionBuilder
from dpaycligraphenebase.account import PasswordKey, PrivateKey, PublicKey
from dpaycli.dpay import DPay
from dpaycli.utils import parse_time, formatTimedelta
from dpaycliapi.exceptions import NumRetriesReached
from dpaycli.nodelist import NodeList
from dpayclibase.transactions import getBlockParams
log = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
# example wif
wif = "5KQwrPbwdL6PhXujxW37FSSQZ1JiwsST4cqQzDeyXtP79zkvFD3"
if __name__ == "__main__":
stm_online = DPay()
ref_block_num, ref_block_prefix = getBlockParams(stm_online)
print("ref_block_num %d - ref_block_prefix %d" % (ref_block_num, ref_block_prefix))
stm = DPay(offline=True)
op = operations.Transfer({'from': 'dpayclibot',
'to': 'holger80',
'amount': "0.001 BBD",
'memo': ""})
tb = TransactionBuilder(dpay_instance=stm)
tb.appendOps([op])
tb.appendWif(wif)
tb.constructTx(ref_block_num=ref_block_num, ref_block_prefix=ref_block_prefix)
tx = tb.sign(reconstruct_tx=False)
print(tx.json())
| 33.166667
| 87
| 0.755653
|
a1f03b250043218f706cbf6d0c66bcf09a407b59
| 4,846
|
py
|
Python
|
qiskit/transpiler/passes/__init__.py
|
irajput/qiskit-terra
|
e9f06e457bfb67afe1b36b6b9b0b315eb1a24800
|
[
"Apache-2.0"
] | 1
|
2021-07-06T09:07:47.000Z
|
2021-07-06T09:07:47.000Z
|
qiskit/transpiler/passes/__init__.py
|
evercodes/qiskit-terra
|
649fec2cd1644c43eabc39b0a588c0a9347a2b50
|
[
"Apache-2.0"
] | null | null | null |
qiskit/transpiler/passes/__init__.py
|
evercodes/qiskit-terra
|
649fec2cd1644c43eabc39b0a588c0a9347a2b50
|
[
"Apache-2.0"
] | null | null | null |
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2018.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
===================================================
Transpiler Passes (:mod:`qiskit.transpiler.passes`)
===================================================
.. currentmodule:: qiskit.transpiler.passes
Layout Selection (Placement)
============================
.. autosummary::
:toctree: ../stubs/
SetLayout
TrivialLayout
DenseLayout
NoiseAdaptiveLayout
SabreLayout
CSPLayout
ApplyLayout
Layout2qDistance
EnlargeWithAncilla
FullAncillaAllocation
Routing
=======
.. autosummary::
:toctree: ../stubs/
BasicSwap
LookaheadSwap
StochasticSwap
SabreSwap
BIPMapping
Basis Change
============
.. autosummary::
:toctree: ../stubs/
Unroller
Unroll3qOrMore
Decompose
UnrollCustomDefinitions
BasisTranslator
Optimizations
=============
.. autosummary::
:toctree: ../stubs/
Optimize1qGates
Optimize1qGatesDecomposition
Collect2qBlocks
ConsolidateBlocks
CXCancellation
CommutationAnalysis
CommutativeCancellation
RemoveDiagonalGatesBeforeMeasure
RemoveResetInZeroState
CrosstalkAdaptiveSchedule
TemplateOptimization
Scheduling
=============
.. autosummary::
:toctree: ../stubs/
ALAPSchedule
ASAPSchedule
RZXCalibrationBuilder
AlignMeasures
ValidatePulseGates
Circuit Analysis
================
.. autosummary::
:toctree: ../stubs/
Width
Depth
Size
CountOps
CountOpsLongestPath
NumTensorFactors
DAGLongestPath
Synthesis
=============
.. autosummary::
:toctree: ../stubs/
UnitarySynthesis
Additional Passes
=================
.. autosummary::
:toctree: ../stubs/
CheckMap
CheckCXDirection
CheckGateDirection
CXDirection
GateDirection
MergeAdjacentBarriers
RemoveBarriers
BarrierBeforeFinalMeasurements
RemoveFinalMeasurements
DAGFixedPoint
FixedPoint
"""
# layout selection (placement)
from .layout import SetLayout
from .layout import TrivialLayout
from .layout import DenseLayout
from .layout import NoiseAdaptiveLayout
from .layout import SabreLayout
from .layout import CSPLayout
from .layout import ApplyLayout
from .layout import Layout2qDistance
from .layout import EnlargeWithAncilla
from .layout import FullAncillaAllocation
# routing
from .routing import BasicSwap
from .routing import LayoutTransformation
from .routing import LookaheadSwap
from .routing import StochasticSwap
from .routing import SabreSwap
from .routing import BIPMapping
# basis change
from .basis import Decompose
from .basis import Unroller
from .basis import UnrollCustomDefinitions
from .basis import Unroll3qOrMore
from .basis import BasisTranslator
# optimization
from .optimization import Optimize1qGates
from .optimization import Optimize1qGatesDecomposition
from .optimization import Collect2qBlocks
from .optimization import ConsolidateBlocks
from .optimization import CommutationAnalysis
from .optimization import CommutativeCancellation
from .optimization import CXCancellation
from .optimization import OptimizeSwapBeforeMeasure
from .optimization import RemoveResetInZeroState
from .optimization import RemoveDiagonalGatesBeforeMeasure
from .optimization import CrosstalkAdaptiveSchedule
from .optimization import HoareOptimizer
from .optimization import TemplateOptimization
# circuit analysis
from .analysis import ResourceEstimation
from .analysis import Depth
from .analysis import Size
from .analysis import Width
from .analysis import CountOps
from .analysis import CountOpsLongestPath
from .analysis import NumTensorFactors
from .analysis import DAGLongestPath
# synthesis
from .synthesis import UnitarySynthesis
# circuit scheduling
from .scheduling import ALAPSchedule
from .scheduling import ASAPSchedule
from .scheduling import RZXCalibrationBuilder
from .scheduling import TimeUnitConversion
from .scheduling import AlignMeasures
from .scheduling import ValidatePulseGates
# additional utility passes
from .utils import CheckMap
from .utils import CheckCXDirection # Deprecated
from .utils import CXDirection # Deprecated
from .utils import CheckGateDirection
from .utils import GateDirection
from .utils import BarrierBeforeFinalMeasurements
from .utils import RemoveFinalMeasurements
from .utils import MergeAdjacentBarriers
from .utils import DAGFixedPoint
from .utils import FixedPoint
from .utils import Error
from .utils import RemoveBarriers
| 23.298077
| 77
| 0.767437
|
18f3d13029756dbeeda99717e11bd23df3fa716e
| 385
|
py
|
Python
|
pred/wsgi.py
|
zhenyu1311/hdb4213
|
168fbdc999525abd31a3d54bef3b2c38c22afb8f
|
[
"Apache-2.0"
] | null | null | null |
pred/wsgi.py
|
zhenyu1311/hdb4213
|
168fbdc999525abd31a3d54bef3b2c38c22afb8f
|
[
"Apache-2.0"
] | null | null | null |
pred/wsgi.py
|
zhenyu1311/hdb4213
|
168fbdc999525abd31a3d54bef3b2c38c22afb8f
|
[
"Apache-2.0"
] | null | null | null |
"""
WSGI config for pred project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/4.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pred.settings')
application = get_wsgi_application()
| 22.647059
| 78
| 0.781818
|
bec5e194d7749bcc76fa2a90593aff0fb181dc6e
| 383
|
py
|
Python
|
bruteforce.py
|
ipsarros/dolphinnpy
|
e55df7c66b91c06bc908e48947a72f6703eb2a15
|
[
"BSD-2-Clause"
] | 13
|
2017-03-20T14:44:06.000Z
|
2021-09-29T01:54:31.000Z
|
bruteforce.py
|
iemiris/DolphinnPy
|
ce9ff09ebdb90e15fc5faee9c9e9d01f067773c0
|
[
"BSD-2-Clause"
] | null | null | null |
bruteforce.py
|
iemiris/DolphinnPy
|
ce9ff09ebdb90e15fc5faee9c9e9d01f067773c0
|
[
"BSD-2-Clause"
] | 4
|
2018-06-24T15:43:41.000Z
|
2021-09-29T01:54:33.000Z
|
import sys
sys.path.append('/usr/local/lib/python3.4/dist-packages/')
import numpy as np
def bruteforce(P, Q):
solQ=[]
for q in Q:
md=np.linalg.norm(P[0]-q)
mi=0
for i in range(len(P)):
if np.linalg.norm(np.subtract(P[i],q))<md:
md=np.linalg.norm(np.subtract(P[i],q))
mi=i
solQ.append(mi)
return solQ
| 23.9375
| 58
| 0.548303
|
b9caa4dc64e663a87c3d503791c2acf82fa1f20c
| 226
|
py
|
Python
|
tests/conftest.py
|
LeviBorodenko/dgcnn
|
760c5dd1b795bcacba50ed3a50cc3615472cd885
|
[
"MIT"
] | 22
|
2020-01-26T18:59:48.000Z
|
2022-03-07T07:08:00.000Z
|
tests/conftest.py
|
LeviBorodenko/dgcnn
|
760c5dd1b795bcacba50ed3a50cc3615472cd885
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
LeviBorodenko/dgcnn
|
760c5dd1b795bcacba50ed3a50cc3615472cd885
|
[
"MIT"
] | 3
|
2020-01-27T10:01:53.000Z
|
2020-06-29T14:21:22.000Z
|
# -*- coding: utf-8 -*-
"""
Dummy conftest.py for dgcnn.
If you don't know what this is for, just leave it empty.
Read more about conftest.py under:
https://pytest.org/latest/plugins.html
"""
# import pytest
| 20.545455
| 60
| 0.646018
|
845e297a0607ebb909dfaf2d63f7edeb64ba3205
| 546
|
py
|
Python
|
Trakttv.bundle/Contents/Libraries/Shared/plex_database/models/account.py
|
disrupted/Trakttv.bundle
|
24712216c71f3b22fd58cb5dd89dad5bb798ed60
|
[
"RSA-MD"
] | 1,346
|
2015-01-01T14:52:24.000Z
|
2022-03-28T12:50:48.000Z
|
Trakttv.bundle/Contents/Libraries/Shared/plex_database/models/account.py
|
alcroito/Plex-Trakt-Scrobbler
|
4f83fb0860dcb91f860d7c11bc7df568913c82a6
|
[
"RSA-MD"
] | 474
|
2015-01-01T10:27:46.000Z
|
2022-03-21T12:26:16.000Z
|
Trakttv.bundle/Contents/Libraries/Shared/plex_database/models/account.py
|
alcroito/Plex-Trakt-Scrobbler
|
4f83fb0860dcb91f860d7c11bc7df568913c82a6
|
[
"RSA-MD"
] | 191
|
2015-01-02T18:27:22.000Z
|
2022-03-29T10:49:48.000Z
|
from plex_database.core import db
from peewee import *
class Account(Model):
class Meta:
database = db
db_table = 'accounts'
name = CharField(null=True)
hashed_password = CharField(null=True)
salt = CharField(null=True)
created_at = DateTimeField(null=True)
updated_at = DateTimeField(null=True)
default_audio_language = CharField(null=True)
default_subtitle_language = CharField(null=True)
auto_select_subtitle = BooleanField(null=True)
auto_select_audio = BooleanField(null=True)
| 22.75
| 52
| 0.716117
|
58b5849af0ab600c7d51f89f765d1ddf510aa8a2
| 13,807
|
py
|
Python
|
tests/agent_features/test_notice_error.py
|
newrelic/newrelic-python-agen
|
4f292ec1219c0daffc5721a7b3a245b97d0f83ba
|
[
"Apache-2.0"
] | 92
|
2020-06-12T17:53:23.000Z
|
2022-03-01T11:13:21.000Z
|
tests/agent_features/test_notice_error.py
|
newrelic/newrelic-python-agen
|
4f292ec1219c0daffc5721a7b3a245b97d0f83ba
|
[
"Apache-2.0"
] | 347
|
2020-07-10T00:10:19.000Z
|
2022-03-31T17:58:56.000Z
|
tests/agent_features/test_notice_error.py
|
newrelic/newrelic-python-agen
|
4f292ec1219c0daffc5721a7b3a245b97d0f83ba
|
[
"Apache-2.0"
] | 58
|
2020-06-17T13:51:57.000Z
|
2022-03-06T14:26:53.000Z
|
# Copyright 2010 New Relic, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from testing_support.fixtures import (
core_application_stats_engine_error,
error_is_saved,
override_application_settings,
reset_core_stats_engine,
validate_application_error_event_count,
validate_application_error_trace_count,
validate_application_errors,
validate_transaction_error_event_count,
validate_transaction_error_trace_count,
validate_transaction_errors,
)
from newrelic.api.application import application_instance as application
from newrelic.api.application import application_settings
from newrelic.api.background_task import background_task
from newrelic.api.settings import STRIP_EXCEPTION_MESSAGE
from newrelic.api.time_trace import notice_error
from newrelic.common.object_names import callable_name
_runtime_error_name = callable_name(RuntimeError)
_type_error_name = callable_name(TypeError)
# =============== Test errors during a transaction ===============
_test_notice_error_sys_exc_info = [(_runtime_error_name, "one")]
@validate_transaction_errors(errors=_test_notice_error_sys_exc_info)
@background_task()
def test_notice_error_sys_exc_info():
try:
raise RuntimeError("one")
except RuntimeError:
notice_error(sys.exc_info())
_test_notice_error_no_exc_info = [(_runtime_error_name, "one")]
@validate_transaction_errors(errors=_test_notice_error_no_exc_info)
@background_task()
def test_notice_error_no_exc_info():
try:
raise RuntimeError("one")
except RuntimeError:
notice_error()
_test_notice_error_custom_params = [(_runtime_error_name, "one")]
@validate_transaction_errors(errors=_test_notice_error_custom_params, required_params=[("key", "value")])
@background_task()
def test_notice_error_custom_params():
try:
raise RuntimeError("one")
except RuntimeError:
notice_error(sys.exc_info(), attributes={"key": "value"})
_test_notice_error_multiple_different_type = [(_runtime_error_name, "one"), (_type_error_name, "two")]
@validate_transaction_errors(errors=_test_notice_error_multiple_different_type)
@background_task()
def test_notice_error_multiple_different_type():
try:
raise RuntimeError("one")
except RuntimeError:
notice_error()
try:
raise TypeError("two")
except TypeError:
notice_error()
_test_notice_error_multiple_same_type = [(_runtime_error_name, "one"), (_runtime_error_name, "two")]
@validate_transaction_errors(errors=_test_notice_error_multiple_same_type)
@background_task()
def test_notice_error_multiple_same_type():
try:
raise RuntimeError("one")
except RuntimeError:
notice_error()
try:
raise RuntimeError("two")
except RuntimeError:
notice_error()
# =============== Test errors outside a transaction ===============
_test_application_exception = [(_runtime_error_name, "one")]
@reset_core_stats_engine()
@validate_application_errors(errors=_test_application_exception)
def test_application_exception():
try:
raise RuntimeError("one")
except RuntimeError:
application_instance = application()
notice_error(application=application_instance)
_test_application_exception_sys_exc_info = [(_runtime_error_name, "one")]
@reset_core_stats_engine()
@validate_application_errors(errors=_test_application_exception_sys_exc_info)
def test_application_exception_sys_exec_info():
try:
raise RuntimeError("one")
except RuntimeError:
application_instance = application()
notice_error(sys.exc_info(), application=application_instance)
_test_application_exception_custom_params = [(_runtime_error_name, "one")]
@reset_core_stats_engine()
@validate_application_errors(errors=_test_application_exception_custom_params, required_params=[("key", "value")])
def test_application_exception_custom_params():
try:
raise RuntimeError("one")
except RuntimeError:
application_instance = application()
notice_error(attributes={"key": "value"}, application=application_instance)
_test_application_exception_multiple = [(_runtime_error_name, "one"), (_runtime_error_name, "one")]
@reset_core_stats_engine()
@validate_application_errors(errors=_test_application_exception_multiple)
@background_task()
def test_application_exception_multiple():
"""Exceptions submitted straight to the stats engine doesn't check for
duplicates
"""
application_instance = application()
try:
raise RuntimeError("one")
except RuntimeError:
notice_error(application=application_instance)
try:
raise RuntimeError("one")
except RuntimeError:
notice_error(application=application_instance)
# =============== Test exception message stripping/whitelisting ===============
_test_notice_error_strip_message_disabled = [(_runtime_error_name, "one")]
_strip_message_disabled_settings = {
"strip_exception_messages.enabled": False,
}
@validate_transaction_errors(errors=_test_notice_error_strip_message_disabled)
@override_application_settings(_strip_message_disabled_settings)
@background_task()
def test_notice_error_strip_message_disabled():
settings = application_settings()
assert not settings.strip_exception_messages.enabled
try:
raise RuntimeError("one")
except RuntimeError:
notice_error()
class ErrorOne(Exception):
message = "error one message"
_error_one_name = callable_name(ErrorOne)
@override_application_settings(_strip_message_disabled_settings)
def test_notice_error_strip_message_disabled_outside_transaction():
settings = application_settings()
assert not settings.strip_exception_messages.enabled
try:
assert not error_is_saved(ErrorOne)
raise ErrorOne(ErrorOne.message)
except ErrorOne:
application_instance = application()
application_instance.notice_error()
my_error = core_application_stats_engine_error(_error_one_name)
assert my_error.message == ErrorOne.message
_test_notice_error_strip_message_enabled = [(_runtime_error_name, STRIP_EXCEPTION_MESSAGE)]
_strip_message_enabled_settings = {
"strip_exception_messages.enabled": True,
}
@validate_transaction_errors(errors=_test_notice_error_strip_message_enabled)
@override_application_settings(_strip_message_enabled_settings)
@background_task()
def test_notice_error_strip_message_enabled():
settings = application_settings()
assert settings.strip_exception_messages.enabled
try:
raise RuntimeError("message not displayed")
except RuntimeError:
notice_error()
class ErrorTwo(Exception):
message = "error two message"
_error_two_name = callable_name(ErrorTwo)
@override_application_settings(_strip_message_enabled_settings)
def test_notice_error_strip_message_enabled_outside_transaction():
settings = application_settings()
assert settings.strip_exception_messages.enabled
try:
assert not error_is_saved(ErrorTwo)
raise ErrorTwo(ErrorTwo.message)
except ErrorTwo:
application_instance = application()
application_instance.notice_error()
my_error = core_application_stats_engine_error(_error_two_name)
assert my_error.message == STRIP_EXCEPTION_MESSAGE
_test_notice_error_strip_message_in_whitelist = [(_runtime_error_name, "original error message")]
_strip_message_in_whitelist_settings = {
"strip_exception_messages.enabled": True,
"strip_exception_messages.whitelist": [_runtime_error_name],
}
@validate_transaction_errors(errors=_test_notice_error_strip_message_in_whitelist)
@override_application_settings(_strip_message_in_whitelist_settings)
@background_task()
def test_notice_error_strip_message_in_whitelist():
settings = application_settings()
assert settings.strip_exception_messages.enabled
assert _runtime_error_name in settings.strip_exception_messages.whitelist
try:
raise RuntimeError("original error message")
except RuntimeError:
notice_error()
class ErrorThree(Exception):
message = "error three message"
_error_three_name = callable_name(ErrorThree)
_strip_message_in_whitelist_settings_outside_transaction = {
"strip_exception_messages.enabled": True,
"strip_exception_messages.whitelist": [_error_three_name],
}
@override_application_settings(_strip_message_in_whitelist_settings_outside_transaction)
def test_notice_error_strip_message_in_whitelist_outside_transaction():
settings = application_settings()
assert settings.strip_exception_messages.enabled
assert _error_three_name in settings.strip_exception_messages.whitelist
try:
assert not error_is_saved(ErrorThree)
raise ErrorThree(ErrorThree.message)
except ErrorThree:
application_instance = application()
application_instance.notice_error()
my_error = core_application_stats_engine_error(_error_three_name)
assert my_error.message == ErrorThree.message
_test_notice_error_strip_message_not_in_whitelist = [(_runtime_error_name, STRIP_EXCEPTION_MESSAGE)]
_strip_message_not_in_whitelist_settings = {
"strip_exception_messages.enabled": True,
"strip_exception_messages.whitelist": ["FooError", "BarError"],
}
@validate_transaction_errors(errors=_test_notice_error_strip_message_not_in_whitelist)
@override_application_settings(_strip_message_not_in_whitelist_settings)
@background_task()
def test_notice_error_strip_message_not_in_whitelist():
settings = application_settings()
assert settings.strip_exception_messages.enabled
assert _runtime_error_name not in settings.strip_exception_messages.whitelist
try:
raise RuntimeError("message not displayed")
except RuntimeError:
notice_error()
class ErrorFour(Exception):
message = "error four message"
_error_four_name = callable_name(ErrorFour)
_strip_message_not_in_whitelist_settings_outside_transaction = {
"strip_exception_messages.enabled": True,
"strip_exception_messages.whitelist": ["ValueError", "BarError"],
}
@override_application_settings(_strip_message_not_in_whitelist_settings_outside_transaction)
def test_notice_error_strip_message_not_in_whitelist_outside_transaction():
settings = application_settings()
assert settings.strip_exception_messages.enabled
assert _error_four_name not in settings.strip_exception_messages.whitelist
try:
assert not error_is_saved(ErrorFour)
raise ErrorFour(ErrorFour.message)
except ErrorFour:
application_instance = application()
application_instance.notice_error()
my_error = core_application_stats_engine_error(_error_four_name)
assert my_error.message == STRIP_EXCEPTION_MESSAGE
# =============== Test exception limits ===============
def _raise_errors(num_errors, application=None):
for i in range(num_errors):
try:
raise RuntimeError("error" + str(i))
except RuntimeError:
notice_error(application=application)
_errors_per_transaction_limit = 5
_num_errors_transaction = 6
_errors_per_harvest_limit = 20
_num_errors_app = 26
_error_event_limit = 25
@override_application_settings({"agent_limits.errors_per_transaction": _errors_per_transaction_limit})
@validate_transaction_error_trace_count(_errors_per_transaction_limit)
@background_task()
def test_transaction_error_trace_limit():
_raise_errors(_num_errors_transaction)
@override_application_settings({"agent_limits.errors_per_harvest": _errors_per_harvest_limit})
@reset_core_stats_engine()
@validate_application_error_trace_count(_errors_per_harvest_limit)
def test_application_error_trace_limit():
_raise_errors(_num_errors_app, application())
# The limit for errors on transactions is shared for traces and errors
@override_application_settings(
{
"agent_limits.errors_per_transaction": _errors_per_transaction_limit,
"error_collector.max_event_samples_stored": _error_event_limit,
}
)
@validate_transaction_error_event_count(_errors_per_transaction_limit)
@background_task()
def test_transaction_error_event_limit():
_raise_errors(_num_errors_transaction)
# The harvest limit for error traces doesn't affect events
@override_application_settings(
{
"agent_limits.errors_per_harvest": _errors_per_harvest_limit,
"event_harvest_config.harvest_limits.error_event_data": _error_event_limit,
}
)
@reset_core_stats_engine()
@validate_application_error_event_count(_error_event_limit)
def test_application_error_event_limit():
_raise_errors(_num_errors_app, application())
# =============== Test params is not a dict ===============
@reset_core_stats_engine()
@validate_transaction_error_trace_count(num_errors=1)
@background_task()
def test_transaction_notice_error_params_not_a_dict():
try:
raise RuntimeError()
except RuntimeError:
notice_error(sys.exc_info(), attributes=[1, 2, 3])
@reset_core_stats_engine()
@validate_application_error_trace_count(num_errors=1)
def test_application_notice_error_params_not_a_dict():
try:
raise RuntimeError()
except RuntimeError:
notice_error(sys.exc_info(), attributes=[1, 2, 3], application=application())
| 30.888143
| 114
| 0.781343
|
5b864f6e503b54f333e9f41820a6fe556de69bb1
| 12,478
|
py
|
Python
|
qa/rpc-tests/p2p-acceptblock.py
|
gobytecoin/gobyte-dev
|
3378282545a42d93c6fb9c38e57fc4ef005491c4
|
[
"MIT"
] | 2
|
2018-10-15T14:54:38.000Z
|
2019-10-31T23:31:29.000Z
|
qa/rpc-tests/p2p-acceptblock.py
|
gobytecoin/gobyte-dev
|
3378282545a42d93c6fb9c38e57fc4ef005491c4
|
[
"MIT"
] | 1
|
2018-05-20T18:10:57.000Z
|
2018-05-20T18:10:57.000Z
|
qa/rpc-tests/p2p-acceptblock.py
|
gobytecoin/gobyte-dev
|
3378282545a42d93c6fb9c38e57fc4ef005491c4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import time
from test_framework.blocktools import create_block, create_coinbase
'''
AcceptBlockTest -- test processing of unrequested blocks.
Since behavior differs when receiving unrequested blocks from whitelisted peers
versus non-whitelisted peers, this tests the behavior of both (effectively two
separate tests running in parallel).
Setup: two nodes, node0 and node1, not connected to each other. Node0 does not
whitelist localhost, but node1 does. They will each be on their own chain for
this test.
We have one NodeConn connection to each, test_node and white_node respectively.
The test:
1. Generate one block on each node, to leave IBD.
2. Mine a new block on each tip, and deliver to each node from node's peer.
The tip should advance.
3. Mine a block that forks the previous block, and deliver to each node from
corresponding peer.
Node0 should not process this block (just accept the header), because it is
unrequested and doesn't have more work than the tip.
Node1 should process because this is coming from a whitelisted peer.
4. Send another block that builds on the forking block.
Node0 should process this block but be stuck on the shorter chain, because
it's missing an intermediate block.
Node1 should reorg to this longer chain.
4b.Send 288 more blocks on the longer chain.
Node0 should process all but the last block (too far ahead in height).
Send all headers to Node1, and then send the last block in that chain.
Node1 should accept the block because it's coming from a whitelisted peer.
5. Send a duplicate of the block in #3 to Node0.
Node0 should not process the block because it is unrequested, and stay on
the shorter chain.
6. Send Node0 an inv for the height 3 block produced in #4 above.
Node0 should figure out that Node0 has the missing height 2 block and send a
getdata.
7. Send Node0 the missing block again.
Node0 should process and the tip should advance.
'''
# TestNode: bare-bones "peer". Used mostly as a conduit for a test to sending
# p2p messages to a node, generating the messages in the main testing logic.
class TestNode(NodeConnCB):
def __init__(self):
NodeConnCB.__init__(self)
self.connection = None
self.ping_counter = 1
self.last_pong = msg_pong()
def add_connection(self, conn):
self.connection = conn
# Track the last getdata message we receive (used in the test)
def on_getdata(self, conn, message):
self.last_getdata = message
# Spin until verack message is received from the node.
# We use this to signal that our test can begin. This
# is called from the testing thread, so it needs to acquire
# the global lock.
def wait_for_verack(self):
while True:
with mininode_lock:
if self.verack_received:
return
time.sleep(0.05)
# Wrapper for the NodeConn's send_message function
def send_message(self, message):
self.connection.send_message(message)
def on_pong(self, conn, message):
self.last_pong = message
# Sync up with the node after delivery of a block
def sync_with_ping(self, timeout=30):
self.connection.send_message(msg_ping(nonce=self.ping_counter))
received_pong = False
sleep_time = 0.05
while not received_pong and timeout > 0:
time.sleep(sleep_time)
timeout -= sleep_time
with mininode_lock:
if self.last_pong.nonce == self.ping_counter:
received_pong = True
self.ping_counter += 1
return received_pong
class AcceptBlockTest(BitcoinTestFramework):
def add_options(self, parser):
parser.add_option("--testbinary", dest="testbinary",
default=os.getenv("GBXD", "gobyted"),
help="bitcoind binary to test")
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 2
def setup_network(self):
# Node0 will be used to test behavior of processing unrequested blocks
# from peers which are not whitelisted, while Node1 will be used for
# the whitelisted case.
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, ["-debug"],
binary=self.options.testbinary))
self.nodes.append(start_node(1, self.options.tmpdir,
["-debug", "-whitelist=127.0.0.1"],
binary=self.options.testbinary))
def run_test(self):
# Setup the p2p connections and start up the network thread.
test_node = TestNode() # connects to node0 (not whitelisted)
white_node = TestNode() # connects to node1 (whitelisted)
connections = []
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node))
connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], white_node))
test_node.add_connection(connections[0])
white_node.add_connection(connections[1])
NetworkThread().start() # Start up network handling in another thread
# Test logic begins here
test_node.wait_for_verack()
white_node.wait_for_verack()
# 1. Have both nodes mine a block (leave IBD)
[ n.generate(1) for n in self.nodes ]
tips = [ int("0x" + n.getbestblockhash(), 0) for n in self.nodes ]
# 2. Send one block that builds on each tip.
# This should be accepted.
blocks_h2 = [] # the height 2 blocks on each node's chain
block_time = get_mocktime() + 1
for i in range(2):
blocks_h2.append(create_block(tips[i], create_coinbase(2), block_time + 1))
blocks_h2[i].solve()
block_time += 1
test_node.send_message(msg_block(blocks_h2[0]))
white_node.send_message(msg_block(blocks_h2[1]))
[ x.sync_with_ping() for x in [test_node, white_node] ]
assert_equal(self.nodes[0].getblockcount(), 2)
assert_equal(self.nodes[1].getblockcount(), 2)
print("First height 2 block accepted by both nodes")
# 3. Send another block that builds on the original tip.
blocks_h2f = [] # Blocks at height 2 that fork off the main chain
for i in range(2):
blocks_h2f.append(create_block(tips[i], create_coinbase(2), blocks_h2[i].nTime+1))
blocks_h2f[i].solve()
test_node.send_message(msg_block(blocks_h2f[0]))
white_node.send_message(msg_block(blocks_h2f[1]))
[ x.sync_with_ping() for x in [test_node, white_node] ]
for x in self.nodes[0].getchaintips():
if x['hash'] == blocks_h2f[0].hash:
assert_equal(x['status'], "headers-only")
for x in self.nodes[1].getchaintips():
if x['hash'] == blocks_h2f[1].hash:
assert_equal(x['status'], "valid-headers")
print("Second height 2 block accepted only from whitelisted peer")
# 4. Now send another block that builds on the forking chain.
blocks_h3 = []
for i in range(2):
blocks_h3.append(create_block(blocks_h2f[i].sha256, create_coinbase(3), blocks_h2f[i].nTime+1))
blocks_h3[i].solve()
test_node.send_message(msg_block(blocks_h3[0]))
white_node.send_message(msg_block(blocks_h3[1]))
[ x.sync_with_ping() for x in [test_node, white_node] ]
# Since the earlier block was not processed by node0, the new block
# can't be fully validated.
for x in self.nodes[0].getchaintips():
if x['hash'] == blocks_h3[0].hash:
assert_equal(x['status'], "headers-only")
# But this block should be accepted by node0 since it has more work.
try:
self.nodes[0].getblock(blocks_h3[0].hash)
print("Unrequested more-work block accepted from non-whitelisted peer")
except:
raise AssertionError("Unrequested more work block was not processed")
# Node1 should have accepted and reorged.
assert_equal(self.nodes[1].getblockcount(), 3)
print("Successfully reorged to length 3 chain from whitelisted peer")
# 4b. Now mine 288 more blocks and deliver; all should be processed but
# the last (height-too-high) on node0. Node1 should process the tip if
# we give it the headers chain leading to the tip.
tips = blocks_h3
headers_message = msg_headers()
all_blocks = [] # node0's blocks
for j in range(2):
for i in range(288):
next_block = create_block(tips[j].sha256, create_coinbase(i + 4), tips[j].nTime+1)
next_block.solve()
if j==0:
test_node.send_message(msg_block(next_block))
all_blocks.append(next_block)
else:
headers_message.headers.append(CBlockHeader(next_block))
tips[j] = next_block
set_mocktime(get_mocktime() + 2)
set_node_times(self.nodes, get_mocktime())
for x in all_blocks:
try:
self.nodes[0].getblock(x.hash)
if x == all_blocks[287]:
raise AssertionError("Unrequested block too far-ahead should have been ignored")
except:
if x == all_blocks[287]:
print("Unrequested block too far-ahead not processed")
else:
raise AssertionError("Unrequested block with more work should have been accepted")
headers_message.headers.pop() # Ensure the last block is unrequested
white_node.send_message(headers_message) # Send headers leading to tip
white_node.send_message(msg_block(tips[1])) # Now deliver the tip
try:
white_node.sync_with_ping()
self.nodes[1].getblock(tips[1].hash)
print("Unrequested block far ahead of tip accepted from whitelisted peer")
except:
raise AssertionError("Unrequested block from whitelisted peer not accepted")
# 5. Test handling of unrequested block on the node that didn't process
# Should still not be processed (even though it has a child that has more
# work).
test_node.send_message(msg_block(blocks_h2f[0]))
# Here, if the sleep is too short, the test could falsely succeed (if the
# node hasn't processed the block by the time the sleep returns, and then
# the node processes it and incorrectly advances the tip).
# But this would be caught later on, when we verify that an inv triggers
# a getdata request for this block.
test_node.sync_with_ping()
assert_equal(self.nodes[0].getblockcount(), 2)
print("Unrequested block that would complete more-work chain was ignored")
# 6. Try to get node to request the missing block.
# Poke the node with an inv for block at height 3 and see if that
# triggers a getdata on block 2 (it should if block 2 is missing).
with mininode_lock:
# Clear state so we can check the getdata request
test_node.last_getdata = None
test_node.send_message(msg_inv([CInv(2, blocks_h3[0].sha256)]))
test_node.sync_with_ping()
with mininode_lock:
getdata = test_node.last_getdata
# Check that the getdata includes the right block
assert_equal(getdata.inv[0].hash, blocks_h2f[0].sha256)
print("Inv at tip triggered getdata for unprocessed block")
# 7. Send the missing block for the third time (now it is requested)
test_node.send_message(msg_block(blocks_h2f[0]))
test_node.sync_with_ping()
assert_equal(self.nodes[0].getblockcount(), 290)
print("Successfully reorged to longer chain from non-whitelisted peer")
[ c.disconnect_node() for c in connections ]
if __name__ == '__main__':
AcceptBlockTest().main()
| 42.587031
| 107
| 0.651066
|
fab3fb249e642ab6880b29b77dfc3442f571b961
| 2,214
|
py
|
Python
|
examples/demo_actor/demo_actor/demo_actor_client.py
|
wmeints/python-sdk
|
33e7d48ffbed9b175ff095d18247f328e990d4ca
|
[
"MIT"
] | 1
|
2021-04-05T11:29:16.000Z
|
2021-04-05T11:29:16.000Z
|
examples/demo_actor/demo_actor/demo_actor_client.py
|
Chemdevil/python-sdk
|
b62d3b75478359a637d1e3ae162a9d21c8029bf6
|
[
"MIT"
] | 12
|
2021-09-14T05:13:26.000Z
|
2022-03-25T05:16:24.000Z
|
examples/demo_actor/demo_actor/demo_actor_client.py
|
Chemdevil/python-sdk
|
b62d3b75478359a637d1e3ae162a9d21c8029bf6
|
[
"MIT"
] | 1
|
2022-02-23T12:44:51.000Z
|
2022-02-23T12:44:51.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) Microsoft Corporation and Dapr Contributors.
# Licensed under the MIT License.
import asyncio
from dapr.actor import ActorProxy, ActorId
from demo_actor_interface import DemoActorInterface
async def main():
# Create proxy client
proxy = ActorProxy.create('DemoActor', ActorId('1'), DemoActorInterface)
# -----------------------------------------------
# Actor invocation demo
# -----------------------------------------------
# non-remoting actor invocation
print("call actor method via proxy.invoke_method()", flush=True)
rtn_bytes = await proxy.invoke_method("GetMyData")
print(rtn_bytes, flush=True)
# RPC style using python duck-typing
print("call actor method using rpc style", flush=True)
rtn_obj = await proxy.GetMyData()
print(rtn_obj, flush=True)
# -----------------------------------------------
# Actor state management demo
# -----------------------------------------------
# Invoke SetMyData actor method to save the state
print("call SetMyData actor method to save the state", flush=True)
await proxy.SetMyData({'data': 'new_data'})
# Invoke GetMyData actor method to get the state
print("call GetMyData actor method to get the state", flush=True)
rtn_obj = await proxy.GetMyData()
print(rtn_obj, flush=True)
# -----------------------------------------------
# Actor reminder demo
# -----------------------------------------------
# Invoke SetReminder actor method to set actor reminder
print("Register reminder", flush=True)
await proxy.SetReminder(True)
# -----------------------------------------------
# Actor timer demo
# -----------------------------------------------
# Invoke SetTimer to set actor timer
print("Register timer", flush=True)
await proxy.SetTimer(True)
# Wait for 30 seconds to see reminder and timer is triggered
print("waiting for 30 seconds", flush=True)
await asyncio.sleep(30)
# Stop reminder and timer
print("stop reminder", flush=True)
await proxy.SetReminder(False)
print("stop timer", flush=True)
await proxy.SetTimer(False)
asyncio.run(main())
| 34.59375
| 76
| 0.573622
|
7b24c60032cdaf1d93f7b02765661b724cb5f895
| 1,721
|
py
|
Python
|
week6_EDA_streamlit_flask_webscraping/day2_ds_presentation_flask_individual_project/flask_class/flask_essential/main_flask.py
|
paleomau/MGOL_BOOTCAMP
|
8c2b018f49fd12a255ea6f323141260d04d4421d
|
[
"MIT"
] | null | null | null |
week6_EDA_streamlit_flask_webscraping/day2_ds_presentation_flask_individual_project/flask_class/flask_essential/main_flask.py
|
paleomau/MGOL_BOOTCAMP
|
8c2b018f49fd12a255ea6f323141260d04d4421d
|
[
"MIT"
] | null | null | null |
week6_EDA_streamlit_flask_webscraping/day2_ds_presentation_flask_individual_project/flask_class/flask_essential/main_flask.py
|
paleomau/MGOL_BOOTCAMP
|
8c2b018f49fd12a255ea6f323141260d04d4421d
|
[
"MIT"
] | null | null | null |
from flask import Flask, request, render_template
from utils.functions import read_json
import os
# Mandatory
app = Flask(__name__) # __name__ --> __main__
# ---------- Flask functions ----------
@app.route("/") # @ --> esto representa el decorador de la función
def home():
""" Default path """
return app.send_static_file('greet.html')
@app.route("/greet")
def greet():
username = request.args.get('name')
return render_template('index.html', name=username)
@app.route("/info")
def create_json():
return 'Tienes que acceder al endpoint "/give_me_id" pasando por parámetro "password" con la contraseña correcta'
@app.route('/give_me_id', methods=['GET'])
def give_id():
x = request.args['password']
if x == "12345":
return request.args
else:
return "No es la contraseña correcta"
# ---------- Other functions ----------
def main():
print("---------STARTING PROCESS---------")
print(__file__)
# Get the settings fullpath
# \\ --> WINDOWS
# / --> UNIX
# Para ambos: os.sep
settings_file = os.path.dirname(__file__) + os.sep + "settings.json"
print(settings_file)
# Load json from file
json_readed = read_json(fullpath=settings_file)
# Load variables from jsons
SERVER_RUNNING = json_readed["server_running"]
print("SERVER_RUNNING", SERVER_RUNNING)
if SERVER_RUNNING:
DEBUG = json_readed["debug"]
HOST = json_readed["host"]
PORT_NUM = json_readed["port"]
app.run(debug=DEBUG, host=HOST, port=PORT_NUM)
else:
print("Server settings.json doesn't allow to start server. " +
"Please, allow it to run it.")
if __name__ == "__main__":
main()
| 29.169492
| 118
| 0.63161
|
2f94aefb9a601baa7f6ecba7e2b09a34dea845f8
| 1,946
|
py
|
Python
|
cohesity_management_sdk/models/type_vault_stats_info_enum.py
|
chandrashekar-cohesity/management-sdk-python
|
9e6ec99e8a288005804b808c4e9b19fd204e3a8b
|
[
"Apache-2.0"
] | 1
|
2021-01-07T20:36:22.000Z
|
2021-01-07T20:36:22.000Z
|
cohesity_management_sdk/models/type_vault_stats_info_enum.py
|
chandrashekar-cohesity/management-sdk-python
|
9e6ec99e8a288005804b808c4e9b19fd204e3a8b
|
[
"Apache-2.0"
] | null | null | null |
cohesity_management_sdk/models/type_vault_stats_info_enum.py
|
chandrashekar-cohesity/management-sdk-python
|
9e6ec99e8a288005804b808c4e9b19fd204e3a8b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2019 Cohesity Inc.
class TypeVaultStatsInfoEnum(object):
"""Implementation of the 'Type_VaultStatsInfo' enum.
Specifies the Vault type.
Attributes:
KNEARLINE: TODO: type description here.
KGLACIER: TODO: type description here.
KS3: TODO: type description here.
KAZURESTANDARD: TODO: type description here.
KS3COMPATIBLE: TODO: type description here.
KQSTARTAPE: TODO: type description here.
KGOOGLESTANDARD: TODO: type description here.
KGOOGLEDRA: TODO: type description here.
KAMAZONS3STANDARDIA: TODO: type description here.
KAWSGOVCLOUD: TODO: type description here.
KNAS: TODO: type description here.
KCOLDLINE: TODO: type description here.
KAZUREGOVCLOUD: TODO: type description here.
KAZUREARCHIVE: TODO: type description here.
KAZURE: TODO: type description here.
KGOOGLE: TODO: type description here.
KAMAZON: TODO: type description here.
KORACLE: TODO: type description here.
KORACLETIERSTANDARD: TODO: type description here.
KORACLETIERARCHIVE: TODO: type description here.
KAMAZONC2S: TODO: type description here.
"""
KNEARLINE = 'kNearline'
KGLACIER = 'kGlacier'
KS3 = 'kS3'
KAZURESTANDARD = 'kAzureStandard'
KS3COMPATIBLE = 'kS3Compatible'
KQSTARTAPE = 'kQStarTape'
KGOOGLESTANDARD = 'kGoogleStandard'
KGOOGLEDRA = 'kGoogleDRA'
KAMAZONS3STANDARDIA = 'kAmazonS3StandardIA'
KAWS_GO_VCLOUD = 'kAWSGovCloud'
KNAS = 'kNAS'
KCOLDLINE = 'kColdline'
K_AZURE_GO_VCLOUD = 'kAzureGovCloud'
KAZUREARCHIVE = 'kAzureArchive'
KAZURE = 'kAzure'
KGOOGLE = 'kGoogle'
KAMAZON = 'kAmazon'
KORACLE = 'kOracle'
KORACLETIERSTANDARD = 'kOracleTierStandard'
KORACLETIERARCHIVE = 'kOracleTierArchive'
KAMAZONC2S = 'kAmazonC2S'
| 25.272727
| 57
| 0.672662
|
ea5f2e17759eaafaeb91243175e0f7e8a873fd94
| 999
|
py
|
Python
|
neptune/new/constants.py
|
janbolle/neptune-client
|
33b1876b361d9a7184f557d7bd6e016cb08bd59f
|
[
"Apache-2.0"
] | null | null | null |
neptune/new/constants.py
|
janbolle/neptune-client
|
33b1876b361d9a7184f557d7bd6e016cb08bd59f
|
[
"Apache-2.0"
] | null | null | null |
neptune/new/constants.py
|
janbolle/neptune-client
|
33b1876b361d9a7184f557d7bd6e016cb08bd59f
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright (c) 2020, Neptune Labs Sp. z o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Constants used by Neptune"""
ANONYMOUS = 'ANONYMOUS'
ANONYMOUS_API_TOKEN = 'eyJhcGlfYWRkcmVzcyI6Imh0dHBzOi8vdWkubmVwdHVuZS5haSIsImFwaV91cmwiOiJodHRwczovL3VpLm5lcHR1bmUuYW' \
'kiLCJhcGlfa2V5IjoiYjcwNmJjOGYtNzZmOS00YzJlLTkzOWQtNGJhMDM2ZjkzMmU0In0='
NEPTUNE_RUNS_DIRECTORY = '.neptune'
OFFLINE_DIRECTORY = 'offline'
ASYNC_DIRECTORY = 'async'
OFFLINE_NAME_PREFIX = 'offline/'
| 33.3
| 120
| 0.767768
|
9f68cb95cc87b832d92f67acef699a4436b307a3
| 4,086
|
py
|
Python
|
thunderpush/messenger.py
|
uthunderbird/thunderpush
|
e2b311510221f2cd332136cb08b64851b201468f
|
[
"BSD-3-Clause"
] | null | null | null |
thunderpush/messenger.py
|
uthunderbird/thunderpush
|
e2b311510221f2cd332136cb08b64851b201468f
|
[
"BSD-3-Clause"
] | null | null | null |
thunderpush/messenger.py
|
uthunderbird/thunderpush
|
e2b311510221f2cd332136cb08b64851b201468f
|
[
"BSD-3-Clause"
] | null | null | null |
import logging
import re
logger = logging.getLogger()
class Messenger(object):
"""
Handles dispatching messages to Channels and Users
for given client.
"""
def __init__(self, apikey, apisecret, *args, **kwargs):
self.apikey = apikey
self.apisecret = apisecret
self.users = {}
self.channels = {}
self.user_count = 0
@staticmethod
def is_valid_channel_name(name):
return not re.match("^[a-zA-Z0-9_\-\=\@\,\.\;]{1,64}$", name) is None
def send_to_channel(self, channel, message):
"""
Sends a message to given channel.
Returns a count of messages sent.
"""
data = {'payload': message, 'channel': channel}
users = self.get_users_in_channel(channel)
return self._send_to_users(users, data)
def send_to_user(self, userid, message):
"""
Sends a message to given user.
Returns a count of messages sent.
"""
data = {'payload': message}
users = self.users.get(userid, [])
return self._send_to_users(users, data)
def _send_to_users(self, users, message):
if users:
users[0].broadcast(users, message)
return len(users)
def register_user(self, user):
self.users.setdefault(user.userid, []).append(user)
def subscribe_user_to_channel(self, user, channel):
if self.is_valid_channel_name(channel):
self.channels.setdefault(channel, []).append(user)
logger.debug("User %s subscribed to %s." % (user.userid, channel,))
logger.debug("User count in %s: %d." %
(channel, self.get_channel_user_count(channel)))
else:
logger.debug("Invalid channel name %s." % channel)
def unsubscribe_user_from_channel(self, user, channel):
try:
self.channels[channel].remove(user)
# free up the memory used by empty channel index
if self.channels[channel]:
del self.channels[channel]
logger.debug("%s unsubscribed from %s." % (user.userid, channel,))
logger.debug("User count in %s: %d." %
(channel, self.get_channel_user_count(channel)))
except KeyError:
logger.debug("Channel %s not found." % (channel,))
except ValueError:
logger.debug("User %s not found in %s." % (user.userid, channel,))
def unregister_user(self, user):
channels_to_free = []
names = self.channels.iterkeys() \
if hasattr(self.channels, 'iterkeys') \
else self.channels.keys()
for name in names:
try:
self.channels[name].remove(user)
# as we can't delete keys from the dict as we are iterating
# over it, we do it outside of this loop
if not self.channels[name]:
channels_to_free.append(name)
except ValueError:
pass
# free up the memory used by empty channel index
for channel in channels_to_free:
del self.channels[channel]
self.users[user.userid].remove(user)
# free up the memory used by empty user index
if not self.users[user.userid]:
del self.users[user.userid]
def force_disconnect_user(self, userid):
handlers = self.users.get(userid, [])
for handler in handlers:
handler.force_disconnect()
def get_user_count(self):
return len(self.users)
def get_connections_count(self):
return sum(len(connections) for connections in self.users.values())
def is_user_online(self, userid):
return bool(self.users.get(userid, 0))
def get_channel_user_count(self, channel):
return len(self.get_users_in_channel(channel))
def get_users_in_channel(self, channel):
return self.channels.get(channel, [])
def destroy(self):
for userid in self.users.keys():
self.force_disconnect_user(userid)
| 31.19084
| 79
| 0.595448
|
445980a15a928b6cd06afc36e4b18260ef49c7f3
| 679
|
py
|
Python
|
repo_controller_manager/__init__.py
|
grahamhayes/repo-controller-manager
|
8e38c2531ce23c8f10d29494e956ec9e350a761c
|
[
"Apache-2.0"
] | null | null | null |
repo_controller_manager/__init__.py
|
grahamhayes/repo-controller-manager
|
8e38c2531ce23c8f10d29494e956ec9e350a761c
|
[
"Apache-2.0"
] | null | null | null |
repo_controller_manager/__init__.py
|
grahamhayes/repo-controller-manager
|
8e38c2531ce23c8f10d29494e956ec9e350a761c
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo(
'repo-controller-manager').version_string()
| 33.95
| 75
| 0.749632
|
7299aaaa6e7d4b8f4ffe0d017c35a2b930fcd2fb
| 151
|
py
|
Python
|
learning_logs/admin.py
|
princ3raj/learning-log
|
c396bc23509f0adc589966ec688c3388a051d006
|
[
"Apache-2.0"
] | null | null | null |
learning_logs/admin.py
|
princ3raj/learning-log
|
c396bc23509f0adc589966ec688c3388a051d006
|
[
"Apache-2.0"
] | 3
|
2021-03-30T14:15:20.000Z
|
2021-09-22T19:31:57.000Z
|
learning_logs/admin.py
|
princ3raj/learning-log
|
c396bc23509f0adc589966ec688c3388a051d006
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
from .models import Topic,Entry
admin.site.register(Topic)
admin.site.register(Entry)
# Register your models here.
| 16.777778
| 32
| 0.794702
|
a8e12fd1630739d4104d72fe8e039e3e113c0e8a
| 1,969
|
py
|
Python
|
communities/tests.py
|
powerblossom/workcloud
|
fd943220366ebeadfa90c59fc395f84a734b5686
|
[
"MIT"
] | 1
|
2019-10-18T05:57:13.000Z
|
2019-10-18T05:57:13.000Z
|
communities/tests.py
|
powerblossom/workcloud
|
fd943220366ebeadfa90c59fc395f84a734b5686
|
[
"MIT"
] | 11
|
2019-12-02T13:59:22.000Z
|
2021-04-24T08:52:19.000Z
|
communities/tests.py
|
powerblossom/workcloud
|
fd943220366ebeadfa90c59fc395f84a734b5686
|
[
"MIT"
] | null | null | null |
from core.testcase import TestCase as CoreTestCase
from . import models
class TestCase(CoreTestCase):
def create_option(
self,
is_active=True,
permission_read='all',
permission_write='all',
permission_reply='all'
):
self.option = models.Option.objects.create(
is_active=is_active,
permission_read=permission_read,
permission_write=permission_write,
permission_reply=permission_reply
)
return self.option
def create_forum(
self,
name='illegallysmolcats',
title='Illegally Small Cats',
description='Why so small',
option=None
):
if not option:
option = self.create_option()
self.forum = models.Forum.objects.create(
name=name,
title=title,
description=description,
option=option
)
self.forum.managers.add(self.user)
return self.forum
def create_thread(
self,
forum=None,
user=None,
name=None,
title='Hello',
content='Kitty'
):
if not forum:
forum = self.forum
if not user and not name:
user = self.user
self.thread = models.Thread.objects.create(
forum=forum,
user=user,
name=name,
title=title,
content=content
)
return self.thread
def create_reply(
self,
thread=None,
reply_id=0,
user=None,
name=None,
content='Meow'
):
if not thread:
thread = self.thread
if not user and not name:
user = self.user
self.reply = models.Reply.objects.create(
thread=thread,
reply_id=reply_id,
user=user,
name=name,
content=content
)
return self.reply
| 23.440476
| 51
| 0.530726
|
f6ce66818fb6a248a656d831fec82ec19f4e9871
| 2,604
|
py
|
Python
|
ceilometer/publisher/meter_publish.py
|
CiscoSystems/ceilometer
|
a9267fd94e7854afa0720d761fbe75d946e7167d
|
[
"Apache-2.0"
] | null | null | null |
ceilometer/publisher/meter_publish.py
|
CiscoSystems/ceilometer
|
a9267fd94e7854afa0720d761fbe75d946e7167d
|
[
"Apache-2.0"
] | null | null | null |
ceilometer/publisher/meter_publish.py
|
CiscoSystems/ceilometer
|
a9267fd94e7854afa0720d761fbe75d946e7167d
|
[
"Apache-2.0"
] | null | null | null |
# -*- encoding: utf-8 -*-
#
# Copyright © 2012 New Dream Network, LLC (DreamHost)
#
# Author: Doug Hellmann <doug.hellmann@dreamhost.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Publish a counter using the preferred RPC mechanism.
"""
import itertools
from oslo.config import cfg
from ceilometer.collector import meter as meter_api
from ceilometer.openstack.common import log
from ceilometer.openstack.common import rpc
from ceilometer import publisher
LOG = log.getLogger(__name__)
PUBLISH_OPTS = [
cfg.StrOpt('metering_topic',
default='metering',
help='the topic ceilometer uses for metering messages',
),
]
def register_opts(config):
"""Register the options for publishing metering messages.
"""
config.register_opts(PUBLISH_OPTS)
register_opts(cfg.CONF)
class MeterPublisher(publisher.PublisherBase):
def publish_counters(self, context, counters, source):
"""Send a metering message for publishing
:param context: Execution context from the service or RPC call
:param counter: Counter from pipeline after transformation
:param source: counter source
"""
meters = [
meter_api.meter_message_from_counter(counter,
cfg.CONF.metering_secret,
source)
for counter in counters
]
topic = cfg.CONF.metering_topic
msg = {
'method': 'record_metering_data',
'version': '1.0',
'args': {'data': meters},
}
LOG.debug('PUBLISH: %s', str(msg))
rpc.cast(context, topic, msg)
for meter_name, meter_list in itertools.groupby(
sorted(meters, key=lambda m: m['counter_name']),
lambda m: m['counter_name']):
msg = {
'method': 'record_metering_data',
'version': '1.0',
'args': {'data': list(meter_list)},
}
rpc.cast(context, topic + '.' + meter_name, msg)
| 31
| 75
| 0.625576
|
03ff9b94985ddc2c068fdd82a1303d33a35ce72f
| 3,460
|
py
|
Python
|
checkSpeed.py
|
sunsetmountain/raspberrySpeedtest
|
1b3d18deabec30779a06e11fc5a00fbf86dfce41
|
[
"Apache-2.0"
] | null | null | null |
checkSpeed.py
|
sunsetmountain/raspberrySpeedtest
|
1b3d18deabec30779a06e11fc5a00fbf86dfce41
|
[
"Apache-2.0"
] | null | null | null |
checkSpeed.py
|
sunsetmountain/raspberrySpeedtest
|
1b3d18deabec30779a06e11fc5a00fbf86dfce41
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
import os
import re
import subprocess
import json
import time
import socket
import datetime
import calendar
from tendo import singleton
me = singleton.SingleInstance() # will sys.exit(-1) if another instance of this script is already running
dataDir = "/home/pi/raspberrySpeedtest/speedtestResults"
def mkEpoch(inputDatestamp, inputTimestamp):
inputDatestamp = inputDatestamp.replace("/", "-")
inputStr = inputDatestamp + " " + inputTimestamp
datetimeObj = datetime.datetime.strptime(inputStr, "%Y-%m-%d %H:%M:%S")
epochVal = calendar.timegm(datetimeObj.timetuple())
epochString = str(epochVal)
return epochString
def list2obj(timestamp, currentDate, currentTime, ping, download, upload, ssid, freq, signal, bitrate, hostname):
outputObj = {}
outputObj["timestamp"] = timestamp
outputObj["collectiondate"] = currentDate
outputObj["collectiontime"] = currentTime
outputObj["ping"] = ping
outputObj["download"] = download
outputObj["upload"] = upload
outputObj["ssid"] = ssid
outputObj["frequency"] = freq
outputObj["signal"] = signal
outputObj["bitrate"] = bitrate
outputObj["hostname"] = hostname
return outputObj
def main():
while True:
currentDate = datetime.datetime.utcnow().strftime('%Y-%m-%d')
currentTime = datetime.datetime.utcnow().strftime('%H:%M:%S')
timestamp = str(currentDate) + " " + str(currentTime)
#Run the speed test
response = subprocess.Popen('speedtest-cli --simple', shell=True, stdout=subprocess.PIPE).stdout.read()
#Collect speed test results
ping = re.findall('Ping:\s(.*?)\s', response, re.MULTILINE)
download = re.findall('Download:\s(.*?)\s', response, re.MULTILINE)
upload = re.findall('Upload:\s(.*?)\s', response, re.MULTILINE)
ping[0] = ping[0].replace(',', '.')
download[0] = download[0].replace(',', '.')
upload[0] = upload[0].replace(',', '.')
#Collect wifi information
wifiResponse = subprocess.Popen('iw dev wlan0 link', shell=True, stdout=subprocess.PIPE).stdout.read()
if wifiResponse[0:14] == "Not connected.":
ssid = ["lan0"]
freq = ["0"]
signal= ["0"]
bitrate = ["0"]
else:
ssid = re.findall('SSID:\s(.*?)\s', wifiResponse, re.MULTILINE)
freq = re.findall('freq:\s(.*?)\s', wifiResponse, re.MULTILINE)
signal = re.findall('signal:\s(.*?)\s', wifiResponse, re.MULTILINE)
bitrate = re.findall('tx bitrate:\s(.*?)\s', wifiResponse, re.MULTILINE)
ssid[0] = ssid[0].replace(',', '.')
freq[0] = freq[0].replace(',', '.')
signal[0] = signal[0].replace(',', '.')
bitrate[0] = bitrate[0].replace(',', '.')
#Determine the hostname of this computer
hostname = subprocess.Popen('hostname', shell=True, stdout=subprocess.PIPE).stdout.read().rstrip()
tmpObj = {}
tmpObj = list2obj(timestamp, currentDate, currentTime, ping[0], download[0], upload[0], ssid[0], freq[0], signal[0], bitrate[0], hostname)
#Create a unique filename to write to
filename = hostname + "-" + mkEpoch(str(currentDate), str(currentTime))
filePath = dataDir + "/" + filename + ".json"
#Make sure there is a directory to put the results into
try:
os.mkdir(dataDir)
except OSError:
pass
else:
print ("Successfully created the directory %s " % dataDir)
#Write the results to a JSON file
try:
open(filePath, "wb").write(json.dumps(tmpObj))
print "Successfully wrote results to " + filePath
except:
print "Error writing results..."
pass
time.sleep(3600)
if __name__ == '__main__':
main()
| 32.641509
| 139
| 0.687572
|
07b1069f2cb5224a37c1b8ff1129b124ce02279f
| 5,086
|
py
|
Python
|
test/functional/rpc_whitelist.py
|
syglee7/zenacoin-ver2
|
90079b95bdf0ea2b7fce644c56d2a9626526e5e4
|
[
"MIT"
] | null | null | null |
test/functional/rpc_whitelist.py
|
syglee7/zenacoin-ver2
|
90079b95bdf0ea2b7fce644c56d2a9626526e5e4
|
[
"MIT"
] | null | null | null |
test/functional/rpc_whitelist.py
|
syglee7/zenacoin-ver2
|
90079b95bdf0ea2b7fce644c56d2a9626526e5e4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2017-2019 The Zenacoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
A test for RPC users with restricted permissions
"""
from test_framework.test_framework import ZenacoinTestFramework
import os
from test_framework.util import (
get_datadir_path,
assert_equal,
str_to_b64str
)
import http.client
import urllib.parse
def rpccall(node, user, method):
url = urllib.parse.urlparse(node.url)
headers = {"Authorization": "Basic " + str_to_b64str('{}:{}'.format(user[0], user[3]))}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "' + method + '"}', headers)
resp = conn.getresponse()
conn.close()
return resp
class RPCWhitelistTest(ZenacoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def setup_chain(self):
super().setup_chain()
# 0 => Username
# 1 => Password (Hashed)
# 2 => Permissions
# 3 => Password Plaintext
self.users = [
["user1", "50358aa884c841648e0700b073c32b2e$b73e95fff0748cc0b517859d2ca47d9bac1aa78231f3e48fa9222b612bd2083e", "getbestblockhash,getblockcount,", "12345"],
["user2", "8650ba41296f62092377a38547f361de$4620db7ba063ef4e2f7249853e9f3c5c3592a9619a759e3e6f1c63f2e22f1d21", "getblockcount", "54321"]
]
# For exceptions
self.strange_users = [
# Test empty
["strangedude", "62d67dffec03836edd698314f1b2be62$c2fb4be29bb0e3646298661123cf2d8629640979cabc268ef05ea613ab54068d", ":", "s7R4nG3R7H1nGZ"],
["strangedude2", "575c012c7fe4b1e83b9d809412da3ef7$09f448d0acfc19924dd62ecb96004d3c2d4b91f471030dfe43c6ea64a8f658c1", "", "s7R4nG3R7H1nGZ"],
# Test trailing comma
["strangedude3", "23189c561b5975a56f4cf94030495d61$3a2f6aac26351e2257428550a553c4c1979594e36675bbd3db692442387728c0", ":getblockcount,", "s7R4nG3R7H1nGZ"],
# Test overwrite
["strangedude4", "990c895760a70df83949e8278665e19a$8f0906f20431ff24cb9e7f5b5041e4943bdf2a5c02a19ef4960dcf45e72cde1c", ":getblockcount, getbestblockhash", "s7R4nG3R7H1nGZ"],
["strangedude4", "990c895760a70df83949e8278665e19a$8f0906f20431ff24cb9e7f5b5041e4943bdf2a5c02a19ef4960dcf45e72cde1c", ":getblockcount", "s7R4nG3R7H1nGZ"],
# Testing the same permission twice
["strangedude5", "d12c6e962d47a454f962eb41225e6ec8$2dd39635b155536d3c1a2e95d05feff87d5ba55f2d5ff975e6e997a836b717c9", ":getblockcount,getblockcount", "s7R4nG3R7H1nGZ"]
]
# These commands shouldn't be allowed for any user to test failures
self.never_allowed = ["getnetworkinfo"]
with open(os.path.join(get_datadir_path(self.options.tmpdir, 0), "zenacoin.conf"), 'a', encoding='utf8') as f:
f.write("\nrpcwhitelistdefault=0\n")
for user in self.users:
f.write("rpcauth=" + user[0] + ":" + user[1] + "\n")
f.write("rpcwhitelist=" + user[0] + ":" + user[2] + "\n")
# Special cases
for strangedude in self.strange_users:
f.write("rpcauth=" + strangedude[0] + ":" + strangedude[1] + "\n")
f.write("rpcwhitelist=" + strangedude[0] + strangedude[2] + "\n")
def run_test(self):
for user in self.users:
permissions = user[2].replace(" ", "").split(",")
# Pop all empty items
i = 0
while i < len(permissions):
if permissions[i] == '':
permissions.pop(i)
i += 1
for permission in permissions:
self.log.info("[" + user[0] + "]: Testing a permitted permission (" + permission + ")")
assert_equal(200, rpccall(self.nodes[0], user, permission).status)
for permission in self.never_allowed:
self.log.info("[" + user[0] + "]: Testing a non permitted permission (" + permission + ")")
assert_equal(403, rpccall(self.nodes[0], user, permission).status)
# Now test the strange users
for permission in self.never_allowed:
self.log.info("Strange test 1")
assert_equal(403, rpccall(self.nodes[0], self.strange_users[0], permission).status)
for permission in self.never_allowed:
self.log.info("Strange test 2")
assert_equal(403, rpccall(self.nodes[0], self.strange_users[1], permission).status)
self.log.info("Strange test 3")
assert_equal(200, rpccall(self.nodes[0], self.strange_users[2], "getblockcount").status)
self.log.info("Strange test 4")
assert_equal(403, rpccall(self.nodes[0], self.strange_users[3], "getbestblockhash").status)
self.log.info("Strange test 5")
assert_equal(200, rpccall(self.nodes[0], self.strange_users[4], "getblockcount").status)
if __name__ == "__main__":
RPCWhitelistTest().main()
| 50.356436
| 184
| 0.653755
|
e4b60b2d5d762bd4defd24f8994f6fbf90207821
| 1,178
|
py
|
Python
|
common/src/stack/command/stack/commands/unload/attrfile/__init__.py
|
khanfluence/stacki-cumulus-switch
|
df54afb20f6ea6a3a136b3c09b30df54ea79ffcc
|
[
"BSD-3-Clause"
] | null | null | null |
common/src/stack/command/stack/commands/unload/attrfile/__init__.py
|
khanfluence/stacki-cumulus-switch
|
df54afb20f6ea6a3a136b3c09b30df54ea79ffcc
|
[
"BSD-3-Clause"
] | null | null | null |
common/src/stack/command/stack/commands/unload/attrfile/__init__.py
|
khanfluence/stacki-cumulus-switch
|
df54afb20f6ea6a3a136b3c09b30df54ea79ffcc
|
[
"BSD-3-Clause"
] | null | null | null |
# @copyright@
# Copyright (c) 2006 - 2018 Teradata
# All rights reserved. Stacki(r) v5.x stacki.com
# https://github.com/Teradata/stacki/blob/master/LICENSE.txt
# @copyright@
import os.path
import stack.commands
from stack.exception import CommandError
class Command(stack.commands.unload.command):
"""
Unload (remove) attributes from the database
<param type='string' name='file' optional='0'>
The file that contains the attribute data to be removed from the
database.
</param>
<param type='string' name='processor'>
The processor used to parse the file and to remove the data into the
database. Default: default.
</param>
<example cmd='unload attrfile file=attrs.csv'>
Remove all the attributes in file named attrs.csv and use the default
processor.
</example>
<related>load attrfile</related>
"""
def run(self, params, args):
filename, processor = self.fillParams([
('file', None, True),
('processor', 'default')
])
if not os.path.exists(filename):
raise CommandError(self, 'file "%s" does not exist' % filename)
self.attrs = {}
self.runImplementation('unload_%s' % processor, (filename, ))
self.runPlugins(self.attrs)
| 24.541667
| 70
| 0.713922
|
2096ecd7c077630215c040ca5e09cc47c29b5b69
| 176
|
py
|
Python
|
findthebot/__init__.py
|
MarcoGiancarli/findthebot
|
a162df4168800a2ba245da6dbd6bdd0f85f75dfa
|
[
"MIT"
] | null | null | null |
findthebot/__init__.py
|
MarcoGiancarli/findthebot
|
a162df4168800a2ba245da6dbd6bdd0f85f75dfa
|
[
"MIT"
] | null | null | null |
findthebot/__init__.py
|
MarcoGiancarli/findthebot
|
a162df4168800a2ba245da6dbd6bdd0f85f75dfa
|
[
"MIT"
] | null | null | null |
from flask import Flask
from flask_socketio import SocketIO
app = Flask(__name__)
app.config['SECRET_KEY'] = 'notsosecret'
socketio = SocketIO(app)
import findthebot.views
| 16
| 40
| 0.784091
|
c5c76090808bd12331a699fa285d9eef4ad1eb03
| 1,833
|
py
|
Python
|
structural_patterns/facade_pattern.py
|
foo290/software-design-patterns
|
4a6ee660acf64f94278054bdcce78a4d4caf3da1
|
[
"MIT"
] | null | null | null |
structural_patterns/facade_pattern.py
|
foo290/software-design-patterns
|
4a6ee660acf64f94278054bdcce78a4d4caf3da1
|
[
"MIT"
] | null | null | null |
structural_patterns/facade_pattern.py
|
foo290/software-design-patterns
|
4a6ee660acf64f94278054bdcce78a4d4caf3da1
|
[
"MIT"
] | null | null | null |
class Task1:
def __init__(self):
self.name = self.__class__.__name__
self.state = "Running"
def mark_complete(self):
print(f"Marking {self.name} complete.")
self.state = "Complete"
class Task2:
def __init__(self):
self.name = self.__class__.__name__
self.state = "Running"
def mark_complete(self):
print(f"Marking {self.name} complete.")
self.state = "Complete"
class Task3:
def __init__(self):
self.name = self.__class__.__name__
self.state = "Running"
def mark_complete(self):
print(f"Marking {self.name} complete.")
self.state = "Complete"
class TaskAdapter:
_initialized = False
def __init__(self, task, **kwargs):
super().__init__()
self.task = task
for key, val in kwargs.items():
func = getattr(self.task, val)
self.__setattr__(key, func)
self._initialized = True
def __getattr__(self, item):
return getattr(self.task, item)
def __setattr__(self, key, value):
if not self._initialized:
super().__setattr__(key, value)
else:
setattr(self.task, key, value)
class TaskFacade:
task_adapters = None
@classmethod
def create_tasks(cls):
print("Initializing tasks...")
cls.task_adapters = [
TaskAdapter(Task1(), complete='mark_complete'),
TaskAdapter(Task2(), complete='mark_complete'),
TaskAdapter(Task3(), complete='mark_complete')
]
@classmethod
def mark_all_complete(cls):
print("Marking all tasks as complete.")
for adapter in cls.task_adapters:
adapter.mark_complete()
if __name__ == '__main__':
TaskFacade.create_tasks()
TaskFacade.mark_all_complete()
| 24.44
| 59
| 0.60611
|
088fb044bdeeb70fffb234ee68f2da0056ccfa99
| 64,031
|
py
|
Python
|
flink-ai-flow/lib/airflow/airflow/models/baseoperator.py
|
shanshanpt/flink-ai-extended
|
c9f4a980ac229188a2bc09558952f7e0085bda70
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
flink-ai-flow/lib/airflow/airflow/models/baseoperator.py
|
shanshanpt/flink-ai-extended
|
c9f4a980ac229188a2bc09558952f7e0085bda70
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
flink-ai-flow/lib/airflow/airflow/models/baseoperator.py
|
shanshanpt/flink-ai-extended
|
c9f4a980ac229188a2bc09558952f7e0085bda70
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Base operator for all operators."""
import abc
import copy
import functools
import logging
import sys
import warnings
from abc import ABCMeta, abstractmethod
from datetime import datetime, timedelta
from typing import (
TYPE_CHECKING,
Any,
Callable,
ClassVar,
Dict,
FrozenSet,
Iterable,
List,
Optional,
Sequence,
Set,
Tuple,
Type,
Union,
)
import attr
import jinja2
from cached_property import cached_property
from dateutil.relativedelta import relativedelta
from sqlalchemy.orm import Session
from airflow.configuration import conf
from airflow.exceptions import AirflowException
from airflow.executors.scheduling_action import SchedulingAction
from airflow.lineage import apply_lineage, prepare_lineage
from airflow.models.base import Operator
from airflow.models.eventhandler import EventHandler
from airflow.models.pool import Pool
from airflow.models.taskinstance import Context, TaskInstance, clear_task_instances
from airflow.models.taskmixin import TaskMixin
from airflow.models.taskstate import TaskState
from airflow.models.xcom import XCOM_RETURN_KEY
from airflow.ti_deps.deps.base_ti_dep import BaseTIDep
from airflow.ti_deps.deps.not_in_retry_period_dep import NotInRetryPeriodDep
from airflow.ti_deps.deps.not_previously_skipped_dep import NotPreviouslySkippedDep
from airflow.ti_deps.deps.prev_dagrun_dep import PrevDagrunDep
from airflow.ti_deps.deps.trigger_rule_dep import TriggerRuleDep
from airflow.utils import timezone
from airflow.utils.decorators import apply_defaults
from airflow.utils.helpers import validate_key
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.operator_resources import Resources
from airflow.utils.session import provide_session
from airflow.utils.trigger_rule import TriggerRule
from airflow.utils.weight_rule import WeightRule
from notification_service.base_notification import UNDEFINED_EVENT_TYPE, BaseEvent
if TYPE_CHECKING:
from airflow.utils.task_group import TaskGroup # pylint: disable=cyclic-import
ScheduleInterval = Union[str, timedelta, relativedelta]
TaskStateChangeCallback = Callable[[Context], None]
class EventMetHandler(object):
"""
EventMetHandler: process event[Event] message and judge if operator met scheduled condition.
"""
def handle_event(self,
event: BaseEvent,
ti: TaskInstance,
ts: TaskState,
session=None) -> SchedulingAction:
"""
:param event: scheduler accepted event.
:param ti: task instance
:param ts: TaskState ,store task instance state.
:param session: connection session
:return: None
"""
pass
class DefaultEventMetHandler(EventMetHandler):
def handle_event(self, event: BaseEvent, ti: TaskInstance, ts: TaskState, session=None):
return SchedulingAction.START
class BaseOperatorMeta(abc.ABCMeta):
"""Base metaclass of BaseOperator."""
def __call__(cls, *args, **kwargs):
"""
Called when you call BaseOperator(). In this way we are able to perform an action
after initializing an operator no matter where the ``super().__init__`` is called
(before or after assign of new attributes in a custom operator).
"""
obj: BaseOperator = type.__call__(cls, *args, **kwargs)
# Here we set upstream task defined by XComArgs passed to template fields of the operator
obj.set_xcomargs_dependencies()
# Mark instance as instantiated https://docs.python.org/3/tutorial/classes.html#private-variables
obj._BaseOperator__instantiated = True
return obj
class EventOperator(LoggingMixin, Operator):
"""
The EventOperator can subscribe to the event that it is interested in. It also has
the EventHandler to handle the event
"""
def __init__(self, event_handler: EventHandler = None):
"""
:param event_handler: EventHandler to handle subscribed events.
:type event_handler: EventHandler
"""
super().__init__()
self._subscribed_events: Set[Tuple[str, str, str, str]] = set()
self._events_handler: EventHandler = event_handler
def subscribe_event(self, event_key: str,
event_type: str = UNDEFINED_EVENT_TYPE,
event_namespace: str = 'default',
from_task_id: str = None):
"""
Subscribe to the events with the event_key and event_type. The event_handler will only handle the
event that the operator subscribes to. event_type is optional, if it is not specify, it subscribes to all type
of the event.
:param event_key: the key of the event to subscribe to.
:type event_key: str
:param event_namespace: namespace of the event to subscribe to, default namespace is default
:type event_namespace: str
:param event_type: the type of the event to subscribe to.
:type event_type: str
:param from_task_id: which task send the event
:type from_task_id: str
:return: None
"""
self._subscribed_events.add((event_namespace, event_key, event_type, from_task_id))
def get_subscribed_events(self) -> Set[Tuple[str, str, str, str]]:
"""
:return: the set of events that the operator subscribes to.
:rtype: Set[str, str, str, str]
"""
return self._subscribed_events
def get_events_handler(self) -> EventHandler:
"""
:return: the event_handler of the operator.
:rtype: EventHandler
"""
return self._events_handler
def set_events_handler(self, events_handler: EventHandler) -> None:
"""
:return: the event_handler of the operator.
:rtype: EventHandler
"""
self._events_handler = events_handler
# pylint: disable=too-many-instance-attributes,too-many-public-methods
@functools.total_ordering
class BaseOperator(EventOperator, LoggingMixin, TaskMixin, metaclass=BaseOperatorMeta):
"""
Abstract base class for all operators. Since operators create objects that
become nodes in the dag, BaseOperator contains many recursive methods for
dag crawling behavior. To derive this class, you are expected to override
the constructor as well as the 'execute' method.
Operators derived from this class should perform or trigger certain tasks
synchronously (wait for completion). Example of operators could be an
operator that runs a Pig job (PigOperator), a sensor operator that
waits for a partition to land in Hive (HiveSensorOperator), or one that
moves data from Hive to MySQL (Hive2MySqlOperator). Instances of these
operators (tasks) target specific operations, running specific scripts,
functions or data transfers.
This class is abstract and shouldn't be instantiated. Instantiating a
class derived from this one results in the creation of a task object,
which ultimately becomes a node in DAG objects. Task dependencies should
be set by using the set_upstream and/or set_downstream methods.
:param task_id: a unique, meaningful id for the task
:type task_id: str
:param owner: the owner of the task, using the unix username is recommended
:type owner: str
:param email: the 'to' email address(es) used in email alerts. This can be a
single email or multiple ones. Multiple addresses can be specified as a
comma or semi-colon separated string or by passing a list of strings.
:type email: str or list[str]
:param email_on_retry: Indicates whether email alerts should be sent when a
task is retried
:type email_on_retry: bool
:param email_on_failure: Indicates whether email alerts should be sent when
a task failed
:type email_on_failure: bool
:param retries: the number of retries that should be performed before
failing the task
:type retries: int
:param retry_delay: delay between retries
:type retry_delay: datetime.timedelta
:param retry_exponential_backoff: allow progressive longer waits between
retries by using exponential backoff algorithm on retry delay (delay
will be converted into seconds)
:type retry_exponential_backoff: bool
:param max_retry_delay: maximum delay interval between retries
:type max_retry_delay: datetime.timedelta
:param start_date: The ``start_date`` for the task, determines
the ``execution_date`` for the first task instance. The best practice
is to have the start_date rounded
to your DAG's ``schedule_interval``. Daily jobs have their start_date
some day at 00:00:00, hourly jobs have their start_date at 00:00
of a specific hour. Note that Airflow simply looks at the latest
``execution_date`` and adds the ``schedule_interval`` to determine
the next ``execution_date``. It is also very important
to note that different tasks' dependencies
need to line up in time. If task A depends on task B and their
start_date are offset in a way that their execution_date don't line
up, A's dependencies will never be met. If you are looking to delay
a task, for example running a daily task at 2AM, look into the
``TimeSensor`` and ``TimeDeltaSensor``. We advise against using
dynamic ``start_date`` and recommend using fixed ones. Read the
FAQ entry about start_date for more information.
:type start_date: datetime.datetime
:param end_date: if specified, the scheduler won't go beyond this date
:type end_date: datetime.datetime
:param depends_on_past: when set to true, task instances will run
sequentially and only if the previous instance has succeeded or has been skipped.
The task instance for the start_date is allowed to run.
:type depends_on_past: bool
:param wait_for_downstream: when set to true, an instance of task
X will wait for tasks immediately downstream of the previous instance
of task X to finish successfully or be skipped before it runs. This is useful if the
different instances of a task X alter the same asset, and this asset
is used by tasks downstream of task X. Note that depends_on_past
is forced to True wherever wait_for_downstream is used. Also note that
only tasks *immediately* downstream of the previous task instance are waited
for; the statuses of any tasks further downstream are ignored.
:type wait_for_downstream: bool
:param dag: a reference to the dag the task is attached to (if any)
:type dag: airflow.models.DAG
:param priority_weight: priority weight of this task against other task.
This allows the executor to trigger higher priority tasks before
others when things get backed up. Set priority_weight as a higher
number for more important tasks.
:type priority_weight: int
:param weight_rule: weighting method used for the effective total
priority weight of the task. Options are:
``{ downstream | upstream | absolute }`` default is ``downstream``
When set to ``downstream`` the effective weight of the task is the
aggregate sum of all downstream descendants. As a result, upstream
tasks will have higher weight and will be scheduled more aggressively
when using positive weight values. This is useful when you have
multiple dag run instances and desire to have all upstream tasks to
complete for all runs before each dag can continue processing
downstream tasks. When set to ``upstream`` the effective weight is the
aggregate sum of all upstream ancestors. This is the opposite where
downstream tasks have higher weight and will be scheduled more
aggressively when using positive weight values. This is useful when you
have multiple dag run instances and prefer to have each dag complete
before starting upstream tasks of other dags. When set to
``absolute``, the effective weight is the exact ``priority_weight``
specified without additional weighting. You may want to do this when
you know exactly what priority weight each task should have.
Additionally, when set to ``absolute``, there is bonus effect of
significantly speeding up the task creation process as for very large
DAGS. Options can be set as string or using the constants defined in
the static class ``airflow.utils.WeightRule``
:type weight_rule: str
:param queue: which queue to target when running this job. Not
all executors implement queue management, the CeleryExecutor
does support targeting specific queues.
:type queue: str
:param pool: the slot pool this task should run in, slot pools are a
way to limit concurrency for certain tasks
:type pool: str
:param pool_slots: the number of pool slots this task should use (>= 1)
Values less than 1 are not allowed.
:type pool_slots: int
:param sla: time by which the job is expected to succeed. Note that
this represents the ``timedelta`` after the period is closed. For
example if you set an SLA of 1 hour, the scheduler would send an email
soon after 1:00AM on the ``2016-01-02`` if the ``2016-01-01`` instance
has not succeeded yet.
The scheduler pays special attention for jobs with an SLA and
sends alert
emails for sla misses. SLA misses are also recorded in the database
for future reference. All tasks that share the same SLA time
get bundled in a single email, sent soon after that time. SLA
notification are sent once and only once for each task instance.
:type sla: datetime.timedelta
:param execution_timeout: max time allowed for the execution of
this task instance, if it goes beyond it will raise and fail.
:type execution_timeout: datetime.timedelta
:param on_failure_callback: a function to be called when a task instance
of this task fails. a context dictionary is passed as a single
parameter to this function. Context contains references to related
objects to the task instance and is documented under the macros
section of the API.
:type on_failure_callback: TaskStateChangeCallback
:param on_execute_callback: much like the ``on_failure_callback`` except
that it is executed right before the task is executed.
:type on_execute_callback: TaskStateChangeCallback
:param on_retry_callback: much like the ``on_failure_callback`` except
that it is executed when retries occur.
:type on_retry_callback: TaskStateChangeCallback
:param on_success_callback: much like the ``on_failure_callback`` except
that it is executed when the task succeeds.
:type on_success_callback: TaskStateChangeCallback
:param trigger_rule: defines the rule by which dependencies are applied
for the task to get triggered. Options are:
``{ all_success | all_failed | all_done | one_success |
one_failed | none_failed | none_failed_or_skipped | none_skipped | dummy}``
default is ``all_success``. Options can be set as string or
using the constants defined in the static class
``airflow.utils.TriggerRule``
:type trigger_rule: str
:param resources: A map of resource parameter names (the argument names of the
Resources constructor) to their values.
:type resources: dict
:param run_as_user: unix username to impersonate while running the task
:type run_as_user: str
:param task_concurrency: When set, a task will be able to limit the concurrent
runs across execution_dates
:type task_concurrency: int
:param executor_config: Additional task-level configuration parameters that are
interpreted by a specific executor. Parameters are namespaced by the name of
executor.
**Example**: to run this task in a specific docker container through
the KubernetesExecutor ::
MyOperator(...,
executor_config={
"KubernetesExecutor":
{"image": "myCustomDockerImage"}
}
)
:type executor_config: dict
:param do_xcom_push: if True, an XCom is pushed containing the Operator's
result
:type do_xcom_push: bool
"""
# For derived classes to define which fields will get jinjaified
template_fields: Iterable[str] = ()
# Defines which files extensions to look for in the templated fields
template_ext: Iterable[str] = ()
# Template field renderers indicating type of the field, for example sql, json, bash
template_fields_renderers: Dict[str, str] = {}
# Defines the color in the UI
ui_color = '#fff' # type: str
ui_fgcolor = '#000' # type: str
pool = "" # type: str
# base list which includes all the attrs that don't need deep copy.
_base_operator_shallow_copy_attrs: Tuple[str, ...] = (
'user_defined_macros',
'user_defined_filters',
'params',
'_log',
)
# each operator should override this class attr for shallow copy attrs.
shallow_copy_attrs: Tuple[str, ...] = ()
# Defines the operator level extra links
operator_extra_links: Iterable['BaseOperatorLink'] = ()
# The _serialized_fields are lazily loaded when get_serialized_fields() method is called
__serialized_fields: Optional[FrozenSet[str]] = None
_comps = {
'task_id',
'dag_id',
'owner',
'email',
'email_on_retry',
'retry_delay',
'retry_exponential_backoff',
'max_retry_delay',
'start_date',
'depends_on_past',
'wait_for_downstream',
'priority_weight',
'sla',
'execution_timeout',
'on_execute_callback',
'on_failure_callback',
'on_success_callback',
'on_retry_callback',
'do_xcom_push',
}
# Defines if the operator supports lineage without manual definitions
supports_lineage = False
# If True then the class constructor was called
__instantiated = False
# Set to True before calling execute method
_lock_for_execution = False
# pylint: disable=too-many-arguments,too-many-locals, too-many-statements
@apply_defaults
def __init__(
self,
task_id: str,
owner: str = conf.get('operators', 'DEFAULT_OWNER'),
email: Optional[Union[str, Iterable[str]]] = None,
email_on_retry: bool = conf.getboolean('email', 'default_email_on_retry', fallback=True),
email_on_failure: bool = conf.getboolean('email', 'default_email_on_failure', fallback=True),
retries: Optional[int] = conf.getint('core', 'default_task_retries', fallback=0),
retry_delay: timedelta = timedelta(seconds=300),
retry_exponential_backoff: bool = False,
max_retry_delay: Optional[datetime] = None,
start_date: Optional[datetime] = None,
end_date: Optional[datetime] = None,
depends_on_past: bool = False,
wait_for_downstream: bool = False,
dag=None,
params: Optional[Dict] = None,
default_args: Optional[Dict] = None, # pylint: disable=unused-argument
priority_weight: int = 1,
weight_rule: str = WeightRule.DOWNSTREAM,
queue: str = conf.get('celery', 'default_queue'),
pool: Optional[str] = None,
pool_slots: int = 1,
sla: Optional[timedelta] = None,
execution_timeout: Optional[timedelta] = None,
on_execute_callback: Optional[TaskStateChangeCallback] = None,
on_failure_callback: Optional[TaskStateChangeCallback] = None,
on_success_callback: Optional[TaskStateChangeCallback] = None,
on_retry_callback: Optional[TaskStateChangeCallback] = None,
trigger_rule: str = TriggerRule.ALL_SUCCESS,
resources: Optional[Dict] = None,
run_as_user: Optional[str] = None,
task_concurrency: Optional[int] = None,
executor_config: Optional[Dict] = None,
do_xcom_push: bool = True,
inlets: Optional[Any] = None,
outlets: Optional[Any] = None,
task_group: Optional["TaskGroup"] = None,
event_handler: Optional[EventHandler] = None,
**kwargs,
):
from airflow.models.dag import DagContext
from airflow.utils.task_group import TaskGroupContext
super().__init__(event_handler)
if kwargs:
if not conf.getboolean('operators', 'ALLOW_ILLEGAL_ARGUMENTS'):
raise AirflowException(
"Invalid arguments were passed to {c} (task_id: {t}). Invalid "
"arguments were:\n**kwargs: {k}".format(c=self.__class__.__name__, k=kwargs, t=task_id),
)
warnings.warn(
'Invalid arguments were passed to {c} (task_id: {t}). '
'Support for passing such arguments will be dropped in '
'future. Invalid arguments were:'
'\n**kwargs: {k}'.format(c=self.__class__.__name__, k=kwargs, t=task_id),
category=PendingDeprecationWarning,
stacklevel=3,
)
validate_key(task_id)
self.task_id = task_id
self.label = task_id
task_group = task_group or TaskGroupContext.get_current_task_group(dag)
if task_group:
self.task_id = task_group.child_id(task_id)
task_group.add(self)
self.owner = owner
self.email = email
self.email_on_retry = email_on_retry
self.email_on_failure = email_on_failure
self.start_date = start_date
if start_date and not isinstance(start_date, datetime):
self.log.warning("start_date for %s isn't datetime.datetime", self)
elif start_date:
self.start_date = timezone.convert_to_utc(start_date)
self.end_date = end_date
if end_date:
self.end_date = timezone.convert_to_utc(end_date)
if not TriggerRule.is_valid(trigger_rule):
raise AirflowException(
"The trigger_rule must be one of {all_triggers},"
"'{d}.{t}'; received '{tr}'.".format(
all_triggers=TriggerRule.all_triggers(),
d=dag.dag_id if dag else "",
t=task_id,
tr=trigger_rule,
)
)
self.trigger_rule = trigger_rule
self.depends_on_past = depends_on_past
self.wait_for_downstream = wait_for_downstream
if wait_for_downstream:
self.depends_on_past = True
self.retries = retries
self.queue = queue
self.pool = Pool.DEFAULT_POOL_NAME if pool is None else pool
self.pool_slots = pool_slots
if self.pool_slots < 1:
raise AirflowException(f"pool slots for {self.task_id} in dag {dag.dag_id} cannot be less than 1")
self.sla = sla
self.execution_timeout = execution_timeout
self.on_execute_callback = on_execute_callback
self.on_failure_callback = on_failure_callback
self.on_success_callback = on_success_callback
self.on_retry_callback = on_retry_callback
if isinstance(retry_delay, timedelta):
self.retry_delay = retry_delay
else:
self.log.debug("Retry_delay isn't timedelta object, assuming secs")
self.retry_delay = timedelta(seconds=retry_delay) # noqa
self.retry_exponential_backoff = retry_exponential_backoff
self.max_retry_delay = max_retry_delay
self.params = params or {} # Available in templates!
self.priority_weight = priority_weight
if not WeightRule.is_valid(weight_rule):
raise AirflowException(
"The weight_rule must be one of {all_weight_rules},"
"'{d}.{t}'; received '{tr}'.".format(
all_weight_rules=WeightRule.all_weight_rules,
d=dag.dag_id if dag else "",
t=task_id,
tr=weight_rule,
)
)
self.weight_rule = weight_rule
self.resources: Optional[Resources] = Resources(**resources) if resources else None
self.run_as_user = run_as_user
self.task_concurrency = task_concurrency
self.executor_config = executor_config or {}
self.do_xcom_push = do_xcom_push
# Private attributes
self._upstream_task_ids: Set[str] = set()
self._downstream_task_ids: Set[str] = set()
self._dag = None
self.dag = dag or DagContext.get_current_dag()
# subdag parameter is only set for SubDagOperator.
# Setting it to None by default as other Operators do not have that field
from airflow.models.dag import DAG
self.subdag: Optional[DAG] = None
self._log = logging.getLogger("airflow.task.operators")
# Lineage
self.inlets: List = []
self.outlets: List = []
self._inlets: List = []
self._outlets: List = []
if inlets:
self._inlets = (
inlets
if isinstance(inlets, list)
else [
inlets,
]
)
if outlets:
self._outlets = (
outlets
if isinstance(outlets, list)
else [
outlets,
]
)
def __eq__(self, other):
if type(self) is type(other) and self.task_id == other.task_id:
return all(self.__dict__.get(c, None) == other.__dict__.get(c, None) for c in self._comps)
return False
def __ne__(self, other):
return not self == other
def __hash__(self):
hash_components = [type(self)]
for component in self._comps:
val = getattr(self, component, None)
try:
hash(val)
hash_components.append(val)
except TypeError:
hash_components.append(repr(val))
return hash(tuple(hash_components))
# including lineage information
def __or__(self, other):
"""
Called for [This Operator] | [Operator], The inlets of other
will be set to pickup the outlets from this operator. Other will
be set as a downstream task of this operator.
"""
if isinstance(other, BaseOperator):
if not self._outlets and not self.supports_lineage:
raise ValueError("No outlets defined for this operator")
other.add_inlets([self.task_id])
self.set_downstream(other)
else:
raise TypeError(f"Right hand side ({other}) is not an Operator")
return self
# /Composing Operators ---------------------------------------------
def __gt__(self, other):
"""
Called for [Operator] > [Outlet], so that if other is an attr annotated object
it is set as an outlet of this Operator.
"""
if not isinstance(other, Iterable):
other = [other]
for obj in other:
if not attr.has(obj):
raise TypeError(f"Left hand side ({obj}) is not an outlet")
self.add_outlets(other)
return self
def __lt__(self, other):
"""
Called for [Inlet] > [Operator] or [Operator] < [Inlet], so that if other is
an attr annotated object it is set as an inlet to this operator
"""
if not isinstance(other, Iterable):
other = [other]
for obj in other:
if not attr.has(obj):
raise TypeError(f"{obj} cannot be an inlet")
self.add_inlets(other)
return self
def __setattr__(self, key, value):
super().__setattr__(key, value)
if self._lock_for_execution:
# Skip any custom behaviour during execute
return
if self.__instantiated and key in self.template_fields:
# Resolve upstreams set by assigning an XComArg after initializing
# an operator, example:
# op = BashOperator()
# op.bash_command = "sleep 1"
self.set_xcomargs_dependencies()
def add_inlets(self, inlets: Iterable[Any]):
"""Sets inlets to this operator"""
self._inlets.extend(inlets)
def add_outlets(self, outlets: Iterable[Any]):
"""Defines the outlets of this operator"""
self._outlets.extend(outlets)
def get_inlet_defs(self):
""":return: list of inlets defined for this operator"""
return self._inlets
def get_outlet_defs(self):
""":return: list of outlets defined for this operator"""
return self._outlets
@property
def dag(self) -> Any:
"""Returns the Operator's DAG if set, otherwise raises an error"""
if self.has_dag():
return self._dag
else:
raise AirflowException(f'Operator {self} has not been assigned to a DAG yet')
@dag.setter
def dag(self, dag: Any):
"""
Operators can be assigned to one DAG, one time. Repeat assignments to
that same DAG are ok.
"""
from airflow.models.dag import DAG
if dag is None:
self._dag = None
return
if not isinstance(dag, DAG):
raise TypeError(f'Expected DAG; received {dag.__class__.__name__}')
elif self.has_dag() and self.dag is not dag:
raise AirflowException(f"The DAG assigned to {self} can not be changed.")
elif self.task_id not in dag.task_dict:
dag.add_task(self)
elif self.task_id in dag.task_dict and dag.task_dict[self.task_id] is not self:
dag.add_task(self)
self._dag = dag
def has_dag(self):
"""Returns True if the Operator has been assigned to a DAG."""
return getattr(self, '_dag', None) is not None
@property
def dag_id(self) -> str:
"""Returns dag id if it has one or an adhoc + owner"""
if self.has_dag():
return self.dag.dag_id
else:
return 'adhoc_' + self.owner
deps: Iterable[BaseTIDep] = frozenset(
{
NotInRetryPeriodDep(),
PrevDagrunDep(),
TriggerRuleDep(),
NotPreviouslySkippedDep(),
}
)
"""
Returns the set of dependencies for the operator. These differ from execution
context dependencies in that they are specific to tasks and can be
extended/overridden by subclasses.
"""
def prepare_for_execution(self) -> "BaseOperator":
"""
Lock task for execution to disable custom action in __setattr__ and
returns a copy of the task
"""
other = copy.copy(self)
other._lock_for_execution = True # pylint: disable=protected-access
return other
def set_xcomargs_dependencies(self) -> None:
"""
Resolves upstream dependencies of a task. In this way passing an ``XComArg``
as value for a template field will result in creating upstream relation between
two tasks.
**Example**: ::
with DAG(...):
generate_content = GenerateContentOperator(task_id="generate_content")
send_email = EmailOperator(..., html_content=generate_content.output)
# This is equivalent to
with DAG(...):
generate_content = GenerateContentOperator(task_id="generate_content")
send_email = EmailOperator(
..., html_content="{{ task_instance.xcom_pull('generate_content') }}"
)
generate_content >> send_email
"""
from airflow.models.xcom_arg import XComArg
def apply_set_upstream(arg: Any): # noqa
if isinstance(arg, XComArg):
self.set_upstream(arg.operator)
elif isinstance(arg, (tuple, set, list)):
for elem in arg:
apply_set_upstream(elem)
elif isinstance(arg, dict):
for elem in arg.values():
apply_set_upstream(elem)
elif hasattr(arg, "template_fields"):
for elem in arg.template_fields:
apply_set_upstream(elem)
for field in self.template_fields:
if hasattr(self, field):
arg = getattr(self, field)
apply_set_upstream(arg)
@property
def priority_weight_total(self) -> int:
"""
Total priority weight for the task. It might include all upstream or downstream tasks.
depending on the weight rule.
- WeightRule.ABSOLUTE - only own weight
- WeightRule.DOWNSTREAM - adds priority weight of all downstream tasks
- WeightRule.UPSTREAM - adds priority weight of all upstream tasks
"""
if self.weight_rule == WeightRule.ABSOLUTE:
return self.priority_weight
elif self.weight_rule == WeightRule.DOWNSTREAM:
upstream = False
elif self.weight_rule == WeightRule.UPSTREAM:
upstream = True
else:
upstream = False
if not self._dag:
return self.priority_weight
from airflow.models.dag import DAG
dag: DAG = self._dag
return self.priority_weight + sum(
map(
lambda task_id: dag.task_dict[task_id].priority_weight,
self.get_flat_relative_ids(upstream=upstream),
)
)
@cached_property
def operator_extra_link_dict(self) -> Dict[str, Any]:
"""Returns dictionary of all extra links for the operator"""
op_extra_links_from_plugin: Dict[str, Any] = {}
from airflow import plugins_manager
plugins_manager.initialize_extra_operators_links_plugins()
if plugins_manager.operator_extra_links is None:
raise AirflowException("Can't load operators")
for ope in plugins_manager.operator_extra_links:
if ope.operators and self.__class__ in ope.operators:
op_extra_links_from_plugin.update({ope.name: ope})
operator_extra_links_all = {link.name: link for link in self.operator_extra_links}
# Extra links defined in Plugins overrides operator links defined in operator
operator_extra_links_all.update(op_extra_links_from_plugin)
return operator_extra_links_all
@cached_property
def global_operator_extra_link_dict(self) -> Dict[str, Any]:
"""Returns dictionary of all global extra links"""
from airflow import plugins_manager
plugins_manager.initialize_extra_operators_links_plugins()
if plugins_manager.global_operator_extra_links is None:
raise AirflowException("Can't load operators")
return {link.name: link for link in plugins_manager.global_operator_extra_links}
@prepare_lineage
def pre_execute(self, context: Any):
"""This hook is triggered right before self.execute() is called."""
def execute(self, context: Any):
"""
This is the main method to derive when creating an operator.
Context is the same dictionary used as when rendering jinja templates.
Refer to get_template_context for more context.
"""
raise NotImplementedError()
@apply_lineage
def post_execute(self, context: Any, result: Any = None):
"""
This hook is triggered right after self.execute() is called.
It is passed the execution context and any results returned by the
operator.
"""
def on_kill(self) -> None:
"""
Override this method to cleanup subprocesses when a task instance
gets killed. Any use of the threading, subprocess or multiprocessing
module within an operator needs to be cleaned up or it will leave
ghost processes behind.
"""
def __deepcopy__(self, memo):
"""
Hack sorting double chained task lists by task_id to avoid hitting
max_depth on deepcopy operations.
"""
sys.setrecursionlimit(5000) # TODO fix this in a better way
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
shallow_copy = (
cls.shallow_copy_attrs + cls._base_operator_shallow_copy_attrs
) # pylint: disable=protected-access
for k, v in self.__dict__.items():
if k not in shallow_copy:
setattr(result, k, copy.deepcopy(v, memo)) # noqa
else:
setattr(result, k, copy.copy(v))
return result
def __getstate__(self):
state = dict(self.__dict__)
del state['_log']
return state
def __setstate__(self, state):
self.__dict__ = state # pylint: disable=attribute-defined-outside-init
self._log = logging.getLogger("airflow.task.operators")
def render_template_fields(self, context: Dict, jinja_env: Optional[jinja2.Environment] = None) -> None:
"""
Template all attributes listed in template_fields. Note this operation is irreversible.
:param context: Dict with values to apply on content
:type context: dict
:param jinja_env: Jinja environment
:type jinja_env: jinja2.Environment
"""
if not jinja_env:
jinja_env = self.get_template_env()
self._do_render_template_fields(self, self.template_fields, context, jinja_env, set())
def _do_render_template_fields(
self,
parent: Any,
template_fields: Iterable[str],
context: Dict,
jinja_env: jinja2.Environment,
seen_oids: Set,
) -> None:
for attr_name in template_fields:
content = getattr(parent, attr_name)
if content:
rendered_content = self.render_template(content, context, jinja_env, seen_oids)
setattr(parent, attr_name, rendered_content)
def render_template( # pylint: disable=too-many-return-statements
self,
content: Any,
context: Dict,
jinja_env: Optional[jinja2.Environment] = None,
seen_oids: Optional[Set] = None,
) -> Any:
"""
Render a templated string. The content can be a collection holding multiple templated strings and will
be templated recursively.
:param content: Content to template. Only strings can be templated (may be inside collection).
:type content: Any
:param context: Dict with values to apply on templated content
:type context: dict
:param jinja_env: Jinja environment. Can be provided to avoid re-creating Jinja environments during
recursion.
:type jinja_env: jinja2.Environment
:param seen_oids: template fields already rendered (to avoid RecursionError on circular dependencies)
:type seen_oids: set
:return: Templated content
"""
if not jinja_env:
jinja_env = self.get_template_env()
# Imported here to avoid circular dependency
from airflow.models.dagparam import DagParam
from airflow.models.xcom_arg import XComArg
if isinstance(content, str):
if any(content.endswith(ext) for ext in self.template_ext):
# Content contains a filepath
return jinja_env.get_template(content).render(**context)
else:
return jinja_env.from_string(content).render(**context)
elif isinstance(content, (XComArg, DagParam)):
return content.resolve(context)
if isinstance(content, tuple):
if type(content) is not tuple: # pylint: disable=unidiomatic-typecheck
# Special case for named tuples
return content.__class__(
*(self.render_template(element, context, jinja_env) for element in content) # noqa
)
else:
return tuple(self.render_template(element, context, jinja_env) for element in content)
elif isinstance(content, list):
return [self.render_template(element, context, jinja_env) for element in content]
elif isinstance(content, dict):
return {key: self.render_template(value, context, jinja_env) for key, value in content.items()}
elif isinstance(content, set):
return {self.render_template(element, context, jinja_env) for element in content}
else:
if seen_oids is None:
seen_oids = set()
self._render_nested_template_fields(content, context, jinja_env, seen_oids)
return content
def _render_nested_template_fields(
self, content: Any, context: Dict, jinja_env: jinja2.Environment, seen_oids: Set
) -> None:
if id(content) not in seen_oids:
seen_oids.add(id(content))
try:
nested_template_fields = content.template_fields
except AttributeError:
# content has no inner template fields
return
self._do_render_template_fields(content, nested_template_fields, context, jinja_env, seen_oids)
def get_template_env(self) -> jinja2.Environment:
"""Fetch a Jinja template environment from the DAG or instantiate empty environment if no DAG."""
return self.dag.get_template_env() if self.has_dag() else jinja2.Environment(cache_size=0) # noqa
def prepare_template(self) -> None:
"""
Hook that is triggered after the templated fields get replaced
by their content. If you need your operator to alter the
content of the file before the template is rendered,
it should override this method to do so.
"""
def resolve_template_files(self) -> None:
"""Getting the content of files for template_field / template_ext"""
if self.template_ext: # pylint: disable=too-many-nested-blocks
for field in self.template_fields:
content = getattr(self, field, None)
if content is None: # pylint: disable=no-else-continue
continue
elif isinstance(content, str) and any(content.endswith(ext) for ext in self.template_ext):
env = self.get_template_env()
try:
setattr(self, field, env.loader.get_source(env, content)[0])
except Exception as e: # pylint: disable=broad-except
self.log.exception(e)
elif isinstance(content, list):
env = self.dag.get_template_env()
for i in range(len(content)): # pylint: disable=consider-using-enumerate
if isinstance(content[i], str) and any(
content[i].endswith(ext) for ext in self.template_ext
):
try:
content[i] = env.loader.get_source(env, content[i])[0]
except Exception as e: # pylint: disable=broad-except
self.log.exception(e)
self.prepare_template()
@property
def upstream_list(self) -> List["BaseOperator"]:
"""@property: list of tasks directly upstream"""
return [self.dag.get_task(tid) for tid in self._upstream_task_ids]
@property
def upstream_task_ids(self) -> Set[str]:
"""@property: set of ids of tasks directly upstream"""
return self._upstream_task_ids
@property
def downstream_list(self) -> List["BaseOperator"]:
"""@property: list of tasks directly downstream"""
return [self.dag.get_task(tid) for tid in self._downstream_task_ids]
@property
def downstream_task_ids(self) -> Set[str]:
"""@property: set of ids of tasks directly downstream"""
return self._downstream_task_ids
@provide_session
def clear(
self,
start_date: Optional[datetime] = None,
end_date: Optional[datetime] = None,
upstream: bool = False,
downstream: bool = False,
session: Session = None,
):
"""
Clears the state of task instances associated with the task, following
the parameters specified.
"""
qry = session.query(TaskInstance).filter(TaskInstance.dag_id == self.dag_id)
if start_date:
qry = qry.filter(TaskInstance.execution_date >= start_date)
if end_date:
qry = qry.filter(TaskInstance.execution_date <= end_date)
tasks = [self.task_id]
if upstream:
tasks += [t.task_id for t in self.get_flat_relatives(upstream=True)]
if downstream:
tasks += [t.task_id for t in self.get_flat_relatives(upstream=False)]
qry = qry.filter(TaskInstance.task_id.in_(tasks))
results = qry.all()
count = len(results)
clear_task_instances(results, session, dag=self.dag)
session.commit()
return count
@provide_session
def get_task_instances(
self,
start_date: Optional[datetime] = None,
end_date: Optional[datetime] = None,
session: Session = None,
) -> List[TaskInstance]:
"""
Get a set of task instance related to this task for a specific date
range.
"""
end_date = end_date or timezone.utcnow()
return (
session.query(TaskInstance)
.filter(TaskInstance.dag_id == self.dag_id)
.filter(TaskInstance.task_id == self.task_id)
.filter(TaskInstance.execution_date >= start_date)
.filter(TaskInstance.execution_date <= end_date)
.order_by(TaskInstance.execution_date)
.all()
)
def get_flat_relative_ids(
self,
upstream: bool = False,
found_descendants: Optional[Set[str]] = None,
) -> Set[str]:
"""Get a flat set of relatives' ids, either upstream or downstream."""
if not self._dag:
return set()
if not found_descendants:
found_descendants = set()
relative_ids = self.get_direct_relative_ids(upstream)
for relative_id in relative_ids:
if relative_id not in found_descendants:
found_descendants.add(relative_id)
relative_task = self._dag.task_dict[relative_id]
relative_task.get_flat_relative_ids(upstream, found_descendants)
return found_descendants
def get_flat_relatives(self, upstream: bool = False):
"""Get a flat list of relatives, either upstream or downstream."""
if not self._dag:
return set()
from airflow.models.dag import DAG
dag: DAG = self._dag
return list(map(lambda task_id: dag.task_dict[task_id], self.get_flat_relative_ids(upstream)))
def run(
self,
start_date: Optional[datetime] = None,
end_date: Optional[datetime] = None,
ignore_first_depends_on_past: bool = True,
ignore_ti_state: bool = False,
mark_success: bool = False,
) -> None:
"""Run a set of task instances for a date range."""
start_date = start_date or self.start_date
end_date = end_date or self.end_date or timezone.utcnow()
for execution_date in self.dag.date_range(start_date, end_date=end_date):
TaskInstance(self, execution_date).run(
mark_success=mark_success,
ignore_depends_on_past=(execution_date == start_date and ignore_first_depends_on_past),
ignore_ti_state=ignore_ti_state,
)
def dry_run(self) -> None:
"""Performs dry run for the operator - just render template fields."""
self.log.info('Dry run')
for field in self.template_fields:
content = getattr(self, field)
if content and isinstance(content, str):
self.log.info('Rendering template for %s', field)
self.log.info(content)
def get_direct_relative_ids(self, upstream: bool = False) -> Set[str]:
"""
Get set of the direct relative ids to the current task, upstream or
downstream.
"""
if upstream:
return self._upstream_task_ids
else:
return self._downstream_task_ids
def get_direct_relatives(self, upstream: bool = False) -> List["BaseOperator"]:
"""
Get list of the direct relatives to the current task, upstream or
downstream.
"""
if upstream:
return self.upstream_list
else:
return self.downstream_list
def __repr__(self):
return "<Task({self.task_type}): {self.task_id}>".format(self=self)
@property
def task_type(self) -> str:
"""@property: type of the task"""
return self.__class__.__name__
def add_only_new(self, item_set: Set[str], item: str) -> None:
"""Adds only new items to item set"""
if item in item_set:
self.log.warning('Dependency %s, %s already registered', self, item)
else:
item_set.add(item)
@property
def roots(self) -> List["BaseOperator"]:
"""Required by TaskMixin"""
return [self]
@property
def leaves(self) -> List["BaseOperator"]:
"""Required by TaskMixin"""
return [self]
def _set_relatives(
self,
task_or_task_list: Union[TaskMixin, Sequence[TaskMixin]],
upstream: bool = False,
) -> None:
"""Sets relatives for the task or task list."""
if not isinstance(task_or_task_list, Sequence):
task_or_task_list = [task_or_task_list]
task_list: List["BaseOperator"] = []
for task_object in task_or_task_list:
task_object.update_relative(self, not upstream)
relatives = task_object.leaves if upstream else task_object.roots
task_list.extend(relatives)
for task in task_list:
if not isinstance(task, BaseOperator):
raise AirflowException(
"Relationships can only be set between "
"Operators; received {}".format(task.__class__.__name__)
)
# relationships can only be set if the tasks share a single DAG. Tasks
# without a DAG are assigned to that DAG.
dags = {
task._dag.dag_id: task._dag # type: ignore # pylint: disable=protected-access,no-member
for task in self.roots + task_list
if task.has_dag() # pylint: disable=no-member
}
if len(dags) > 1:
raise AirflowException(
f'Tried to set relationships between tasks in more than one DAG: {dags.values()}'
)
elif len(dags) == 1:
dag = dags.popitem()[1]
else:
raise AirflowException(
"Tried to create relationships between tasks that don't have "
"DAGs yet. Set the DAG for at least one "
"task and try again: {}".format([self] + task_list)
)
if dag and not self.has_dag():
# If this task does not yet have a dag, add it to the same dag as the other task and
# put it in the dag's root TaskGroup.
self.dag = dag
self.dag.task_group.add(self)
for task in task_list:
if dag and not task.has_dag():
# If the other task does not yet have a dag, add it to the same dag as this task and
# put it in the dag's root TaskGroup.
task.dag = dag
task.dag.task_group.add(task)
if upstream:
task.add_only_new(task.get_direct_relative_ids(upstream=False), self.task_id)
self.add_only_new(self._upstream_task_ids, task.task_id)
else:
self.add_only_new(self._downstream_task_ids, task.task_id)
task.add_only_new(task.get_direct_relative_ids(upstream=True), self.task_id)
def set_downstream(self, task_or_task_list: Union[TaskMixin, Sequence[TaskMixin]]) -> None:
"""
Set a task or a task list to be directly downstream from the current
task. Required by TaskMixin.
"""
self._set_relatives(task_or_task_list, upstream=False)
def set_upstream(self, task_or_task_list: Union[TaskMixin, Sequence[TaskMixin]]) -> None:
"""
Set a task or a task list to be directly upstream from the current
task. Required by TaskMixin.
"""
self._set_relatives(task_or_task_list, upstream=True)
@property
def output(self):
"""Returns reference to XCom pushed by current operator"""
from airflow.models.xcom_arg import XComArg
return XComArg(operator=self)
@staticmethod
def xcom_push(
context: Any,
key: str,
value: Any,
execution_date: Optional[datetime] = None,
) -> None:
"""
Make an XCom available for tasks to pull.
:param context: Execution Context Dictionary
:type: Any
:param key: A key for the XCom
:type key: str
:param value: A value for the XCom. The value is pickled and stored
in the database.
:type value: any pickleable object
:param execution_date: if provided, the XCom will not be visible until
this date. This can be used, for example, to send a message to a
task on a future date without it being immediately visible.
:type execution_date: datetime
"""
context['ti'].xcom_push(key=key, value=value, execution_date=execution_date)
@staticmethod
def xcom_pull(
context: Any,
task_ids: Optional[List[str]] = None,
dag_id: Optional[str] = None,
key: str = XCOM_RETURN_KEY,
include_prior_dates: Optional[bool] = None,
) -> Any:
"""
Pull XComs that optionally meet certain criteria.
The default value for `key` limits the search to XComs
that were returned by other tasks (as opposed to those that were pushed
manually). To remove this filter, pass key=None (or any desired value).
If a single task_id string is provided, the result is the value of the
most recent matching XCom from that task_id. If multiple task_ids are
provided, a tuple of matching values is returned. None is returned
whenever no matches are found.
:param context: Execution Context Dictionary
:type: Any
:param key: A key for the XCom. If provided, only XComs with matching
keys will be returned. The default key is 'return_value', also
available as a constant XCOM_RETURN_KEY. This key is automatically
given to XComs returned by tasks (as opposed to being pushed
manually). To remove the filter, pass key=None.
:type key: str
:param task_ids: Only XComs from tasks with matching ids will be
pulled. Can pass None to remove the filter.
:type task_ids: str or iterable of strings (representing task_ids)
:param dag_id: If provided, only pulls XComs from this DAG.
If None (default), the DAG of the calling task is used.
:type dag_id: str
:param include_prior_dates: If False, only XComs from the current
execution_date are returned. If True, XComs from previous dates
are returned as well.
:type include_prior_dates: bool
"""
return context['ti'].xcom_pull(
key=key, task_ids=task_ids, dag_id=dag_id, include_prior_dates=include_prior_dates
)
@cached_property
def extra_links(self) -> List[str]:
"""@property: extra links for the task"""
return list(
set(self.operator_extra_link_dict.keys()).union(self.global_operator_extra_link_dict.keys())
)
def get_extra_links(self, dttm: datetime, link_name: str) -> Optional[Dict[str, Any]]:
"""
For an operator, gets the URL that the external links specified in
`extra_links` should point to.
:raise ValueError: The error message of a ValueError will be passed on through to
the fronted to show up as a tooltip on the disabled link
:param dttm: The datetime parsed execution date for the URL being searched for
:param link_name: The name of the link we're looking for the URL for. Should be
one of the options specified in `extra_links`
:return: A URL
"""
if link_name in self.operator_extra_link_dict:
return self.operator_extra_link_dict[link_name].get_link(self, dttm)
elif link_name in self.global_operator_extra_link_dict:
return self.global_operator_extra_link_dict[link_name].get_link(self, dttm)
else:
return None
@classmethod
def get_serialized_fields(cls):
"""Stringified DAGs and operators contain exactly these fields."""
if not cls.__serialized_fields:
from airflow.models.dag import DagContext
# make sure the following dummy task is not added to current active
# dag in context, otherwise, it will result in
# `RuntimeError: dictionary changed size during iteration`
# Exception in SerializedDAG.serialize_dag() call.
DagContext.push_context_managed_dag(None)
cls.__serialized_fields = frozenset(
vars(BaseOperator(task_id='test')).keys()
- {
'inlets',
'outlets',
'_upstream_task_ids',
'default_args',
'dag',
'_dag',
'_BaseOperator__instantiated',
}
| {
'_task_type',
'subdag',
'ui_color',
'ui_fgcolor',
'template_fields',
'template_fields_renderers',
}
)
DagContext.pop_context_managed_dag()
return cls.__serialized_fields
def is_smart_sensor_compatible(self):
"""Return if this operator can use smart service. Default False."""
return False
@property
def inherits_from_dummy_operator(self):
"""Used to determine if an Operator is inherited from DummyOperator"""
# This looks like `isinstance(self, DummyOperator) would work, but this also
# needs to cope when `self` is a Serialized instance of a DummyOperator or one
# of its sub-classes (which don't inherit from anything but BaseOperator).
return getattr(self, '_is_dummy', False)
def chain(*tasks: Union[BaseOperator, Sequence[BaseOperator]]):
r"""
Given a number of tasks, builds a dependency chain.
Support mix airflow.models.BaseOperator and List[airflow.models.BaseOperator].
If you want to chain between two List[airflow.models.BaseOperator], have to
make sure they have same length.
.. code-block:: python
chain(t1, [t2, t3], [t4, t5], t6)
is equivalent to::
/ -> t2 -> t4 \
t1 -> t6
\ -> t3 -> t5 /
.. code-block:: python
t1.set_downstream(t2)
t1.set_downstream(t3)
t2.set_downstream(t4)
t3.set_downstream(t5)
t4.set_downstream(t6)
t5.set_downstream(t6)
:param tasks: List of tasks or List[airflow.models.BaseOperator] to set dependencies
:type tasks: List[airflow.models.BaseOperator] or airflow.models.BaseOperator
"""
for index, up_task in enumerate(tasks[:-1]):
down_task = tasks[index + 1]
if isinstance(up_task, BaseOperator):
up_task.set_downstream(down_task)
continue
if isinstance(down_task, BaseOperator):
down_task.set_upstream(up_task)
continue
if not isinstance(up_task, Sequence) or not isinstance(down_task, Sequence):
raise TypeError(
'Chain not supported between instances of {up_type} and {down_type}'.format(
up_type=type(up_task), down_type=type(down_task)
)
)
up_task_list = up_task
down_task_list = down_task
if len(up_task_list) != len(down_task_list):
raise AirflowException(
f'Chain not supported different length Iterable '
f'but get {len(up_task_list)} and {len(down_task_list)}'
)
for up_t, down_t in zip(up_task_list, down_task_list):
up_t.set_downstream(down_t)
def cross_downstream(
from_tasks: Sequence[BaseOperator], to_tasks: Union[BaseOperator, Sequence[BaseOperator]]
):
r"""
Set downstream dependencies for all tasks in from_tasks to all tasks in to_tasks.
.. code-block:: python
cross_downstream(from_tasks=[t1, t2, t3], to_tasks=[t4, t5, t6])
is equivalent to::
t1 ---> t4
\ /
t2 -X -> t5
/ \
t3 ---> t6
.. code-block:: python
t1.set_downstream(t4)
t1.set_downstream(t5)
t1.set_downstream(t6)
t2.set_downstream(t4)
t2.set_downstream(t5)
t2.set_downstream(t6)
t3.set_downstream(t4)
t3.set_downstream(t5)
t3.set_downstream(t6)
:param from_tasks: List of tasks to start from.
:type from_tasks: List[airflow.models.BaseOperator]
:param to_tasks: List of tasks to set as downstream dependencies.
:type to_tasks: List[airflow.models.BaseOperator]
"""
for task in from_tasks:
task.set_downstream(to_tasks)
@attr.s(auto_attribs=True)
class BaseOperatorLink(metaclass=ABCMeta):
"""Abstract base class that defines how we get an operator link."""
operators: ClassVar[List[Type[BaseOperator]]] = []
"""
This property will be used by Airflow Plugins to find the Operators to which you want
to assign this Operator Link
:return: List of Operator classes used by task for which you want to create extra link
"""
@property
@abstractmethod
def name(self) -> str:
"""
Name of the link. This will be the button name on the task UI.
:return: link name
"""
@abstractmethod
def get_link(self, operator: BaseOperator, dttm: datetime) -> str:
"""
Link to external system.
:param operator: airflow operator
:param dttm: datetime
:return: link to external system
"""
| 39.994379
| 118
| 0.639643
|
caf3c1b94f14214ca730cd89c280f151dc2164ee
| 1,702
|
py
|
Python
|
pycon/tests/factories.py
|
stevepiercy/pycon
|
e3bb94206622264b492744d3257a1c0357d7a267
|
[
"BSD-3-Clause"
] | 1
|
2016-06-12T08:14:05.000Z
|
2016-06-12T08:14:05.000Z
|
pycon/tests/factories.py
|
alex/pycon
|
d1437a9f2ac1ec4f4fd5ad41ef3a7fe06958b52b
|
[
"BSD-3-Clause"
] | null | null | null |
pycon/tests/factories.py
|
alex/pycon
|
d1437a9f2ac1ec4f4fd5ad41ef3a7fe06958b52b
|
[
"BSD-3-Clause"
] | null | null | null |
import random
import factory
import factory.django
import factory.fuzzy
from django.contrib.auth import models as auth
from pycon.models import PyConProposalCategory, PyConProposal, \
PyConTalkProposal, PyConTutorialProposal
from symposion.proposals.tests.factories import ProposalKindFactory, \
ProposalBaseFactory
class UserFactory(factory.django.DjangoModelFactory):
FACTORY_FOR = auth.User
username = factory.fuzzy.FuzzyText()
first_name = factory.fuzzy.FuzzyText()
last_name = factory.fuzzy.FuzzyText()
email = factory.Sequence(lambda n: 'user{}@example.com'.format(n))
class PyConProposalCategoryFactory(factory.django.DjangoModelFactory):
FACTORY_FOR = PyConProposalCategory
class PyConProposalFactory(ProposalBaseFactory):
FACTORY_FOR = PyConProposal
ABSTRACT_FACTORY = True
category = factory.SubFactory(PyConProposalCategoryFactory)
audience_level = factory.LazyAttribute(lambda a: random.choice([1, 2, 3]))
class PyConTalkProposalFactory(PyConProposalFactory):
FACTORY_FOR = PyConTalkProposal
duration = 0
kind = factory.SubFactory(ProposalKindFactory,
name="talk",
slug="talk")
outline = "outline"
audience = "audience"
perceived_value = "perceived_value"
class PyConTutorialProposalFactory(PyConProposalFactory):
FACTORY_FOR = PyConTutorialProposal
kind = factory.SubFactory(ProposalKindFactory,
name="tutorial",
slug="tutorial")
domain_level = 1
outline = "outline"
more_info = "more info"
audience = "audience"
perceived_value = "perceived_value"
| 27.451613
| 78
| 0.712103
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.