hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 | count_classes int64 0 1.6M | score_classes float64 0 1 | count_generators int64 0 651k | score_generators float64 0 1 | count_decorators int64 0 990k | score_decorators float64 0 1 | count_async_functions int64 0 235k | score_async_functions float64 0 1 | count_documentation int64 0 1.04M | score_documentation float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
70e52df4dc9953cd6ed3848558c135eb3f637b16 | 1,984 | py | Python | nbpkg/pkginspect/nbpkgdescr.py | kiaderouiche/nbpkgquery | 17c996398fad922276c2dc250392959e7bfc31f0 | [
"MIT"
] | 1 | 2017-05-27T13:30:41.000Z | 2017-05-27T13:30:41.000Z | nbpkg/pkginspect/nbpkgdescr.py | kiaderouiche/nbpkgquery | 17c996398fad922276c2dc250392959e7bfc31f0 | [
"MIT"
] | null | null | null | nbpkg/pkginspect/nbpkgdescr.py | kiaderouiche/nbpkgquery | 17c996398fad922276c2dc250392959e7bfc31f0 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
'''
nbpkg defspec
'''
NBPKG_MAGIC_NUMBER = b'\x1f\x8b'
NBPKG_HEADER_MAGIC_NUMBER = '\037\213'
NBPKGINFO_MIN_NUMBER = 1000
NBPKGINFO_MAX_NUMBER = 1146
# data types definition
NBPKG_DATA_TYPE_NULL = 0
NBPKG_DATA_TYPE_CHAR = 1
NBPKG_DATA_TYPE_INT8 = 2
NBPKG_DATA_TYPE_INT16 = 3
NBPKG_DATA_TYPE_INT32 = 4
NBPKG_DATA_TYPE_INT64 = 5
NBPKG_DATA_TYPE_STRING = 6
NBPKG_DATA_TYPE_BIN = 7
NBPKG_DATA_TYPE_STRING_ARRAY = 8
NBPKG_DATA_TYPE_I18NSTRING_TYPE = 9
NBPKG_DATA_TYPES = (NBPKG_DATA_TYPE_NULL,
NBPKG_DATA_TYPE_CHAR,
NBPKG_DATA_TYPE_INT8,
NBPKG_DATA_TYPE_INT16,
NBPKG_DATA_TYPE_INT32,
NBPKG_DATA_TYPE_INT64,
NBPKG_DATA_TYPE_STRING,
NBPKG_DATA_TYPE_BIN,
NBPKG_DATA_TYPE_STRING_ARRAY,)
NBPKGINFO_DISTNAME = 1000
NBPKGINFO_PKGNAME = 1000
NBPKGINFO_CATEGORY = 1000
NBPKGINFO_MAINTAINER = 1000
NBPKGINFO_HOMEPAGE = 1020
NBPKGINFO_COMMENT = 1000
NBPKGINFO_LICENSE = 1000
NBPKGINFO_VERSION = 1001
NBPKGINFO_RELEASE = 1002
NBPKGINFO_DESCRIPTION = 1005
NBPKGINFO_LONG_DESCRIPTION = 1005
NBPKGINFO_OS_VERSION = 1000
NBPKGINFO_COPYRIGHT = 1014
NBPKGINFO_SIZE_PKG = 1000
NBPKGINFO_MACHINE_ARCH = 1022
NBPKGINFOS = (
NBPKGINFO_DISTNAME,
NBPKGINFO_PKGNAME,
NBPKGINFO_CATEGORY,
NBPKGINFO_MAINTAINER,
NBPKGINFO_HOMEPAGE,
NBPKGINFO_COMMENT,
NBPKGINFO_LICENSE,
NBPKGINFO_VERSION,
NBPKGINFO_RELEASE,
NBPKGINFO_LONG_DESCRIPTION,
NBPKGINFO_OS_VERSION,
NBPKGINFO_SIZE_PKG,
NBPKGINFO_MACHINE_ARCH,
)
NBPKG_HEADER_BASIC_FILES = dict()
NBPKG_HEADER_BASIC_FILES = {
'NBPKG_BUILD_INFO':'+BUILD_INFO',
'NBPKG_BUILD_VERSION':'+BUILD_VERSION',
'NBPKG_COMMENT':'+COMMENT',
'NBPKG_CONTENTS':'+CONTENTS',
'NBPKG_DESC':'+DESC',
'NBPKG_SIZE_ALL':'+SIZE_ALL',
'NBPKG_SIZE_PKG':'+SIZE_PKG',
}
| 25.435897 | 48 | 0.703629 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 281 | 0.141633 |
70e5936d22684ae5af27f5e2945439d629f68c79 | 15,699 | py | Python | utils/speakerid.py | JaejinCho/espnet | a52bdebb08558b63df23564d6e67dfcba8a41d78 | [
"Apache-2.0"
] | 4 | 2020-10-28T00:34:21.000Z | 2021-08-02T05:43:59.000Z | utils/speakerid.py | JaejinCho/espnet_spkidtts | a52bdebb08558b63df23564d6e67dfcba8a41d78 | [
"Apache-2.0"
] | null | null | null | utils/speakerid.py | JaejinCho/espnet_spkidtts | a52bdebb08558b63df23564d6e67dfcba8a41d78 | [
"Apache-2.0"
] | null | null | null | import torch
import torch.nn as nn
import math
import torch.utils.model_zoo as model_zoo
import numpy as np
from scipy.fftpack import dct, idct
from scipy import linalg as la
import torch.nn.functional as F
import logging
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class ResNet(nn.Module):
'''
:param: int AvgPool2d_fre: This is the kernel size for AvgPool2d (for self.avgpool) towards frequency dimension.
Default is set to 10 assuming 80 dimensional fbank is given as input. While going through
layers from 1 to 4 (self.layer1 to self.layer4), the frequency dim gets down-sampled by 8.
So to make 80/8 = 10 numbers to one value, the freq dimension (kwargs['AvgPool2d_fre_ksize'])
in nn.AvgPool2d should be 10 (as default). It was 3 origianlly from Nanxin's code since he
used 23 dim mfcc/filter bank feature. (e.g. ceil(23/8) = 3)
'''
def __init__(self, block, layers, **kwargs):
self.inplanes = 16
super(ResNet, self).__init__()
self.conv1 = nn.Conv2d(1, 16, kernel_size=7, stride=1, padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(16)
self.relu = nn.ReLU(inplace=True)
#self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 16, layers[0])
self.layer2 = self._make_layer(block, 32, layers[1], stride=2)
self.layer3 = self._make_layer(block, 64, layers[2], stride=2)
self.layer4 = self._make_layer(block, 128, layers[3], stride=2)
if 'AvgPool2d_fre_ksize' in kwargs:
self.avgpool = nn.AvgPool2d((1, kwargs['AvgPool2d_fre_ksize']))
else:
self.avgpool = nn.AvgPool2d((1, 3)) # Nanxin's original code
#self.fc = nn.Linear(512 * block.expansion, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
x = x.view(x.size(0), 1, x.size(1), x.size(2))
#print(x.shape)
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
#print(x.shape)
#x = self.maxpool(x)
x = self.layer1(x)
#print(x.shape)
x = self.layer2(x)
#print(x.shape)
x = self.layer3(x)
#print(x.shape)
x = self.layer4(x)
#print(x.shape)
x = self.avgpool(x)
#print(x.shape)
x = x.view(x.size(0), x.size(1), x.size(2)).permute(0, 2, 1)
return x
def resnet34(**kwargs):
"""Constructs a ResNet-34 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(BasicBlock, [3, 4, 6, 3], **kwargs)
return model
class LDE(nn.Module):
def __init__(self, D, input_dim, with_bias=False, distance_type='norm', network_type='att', pooling='mean', regularization=None):
super(LDE, self).__init__()
self.dic = nn.Parameter(torch.randn(D, input_dim))
nn.init.uniform_(self.dic.data, -1, 1)
self.wei = nn.Parameter(torch.ones(D))
if with_bias:
self.bias = nn.Parameter(torch.zeros(D))
else:
self.bias = 0
assert distance_type == 'norm' or distance_type == 'sqr'
if distance_type == 'norm':
self.dis = lambda x: torch.norm(x, p=2, dim=-1)
else:
self.dis = lambda x: torch.sum(x**2, dim=-1)
assert network_type == 'att' or network_type == 'lde'
if network_type == 'att':
self.norm = lambda x: F.softmax(-self.dis(x) * self.wei + self.bias, dim = -2)
else:
self.norm = lambda x: F.softmax(-self.dis(x) * (self.wei ** 2) + self.bias, dim = -1)
assert pooling == 'mean' or pooling == 'mean+std'
self.pool = pooling
if regularization is None:
self.reg = None
else:
raise NotImplementedError()
def forward(self, x):
r = x.view(x.size(0), x.size(1), 1, x.size(2)) - self.dic
w = self.norm(r).view(r.size(0), r.size(1), r.size(2), 1)
w = w / (torch.sum(w, dim=1, keepdim=True) + 1e-9) #batch_size, timesteps, component
if self.pool == 'mean':
x = torch.sum(w * r, dim=1)
else:
x1 = torch.sum(w * r, dim=1)
x2 = torch.sqrt(torch.sum(w * r ** 2, dim=1)+1e-8)
x = torch.cat([x1, x2], dim=-1)
return x.view(x.size(0), -1)
# Model
class E2E_speakerid(nn.Module):
def __init__(self, input_dim, output_dim, Q, D, hidden_dim=128, distance_type='norm', network_type='att', pooling='mean', regularization=None, asoftmax=False, resnet_AvgPool2d_fre_ksize = 3):
super(E2E_speakerid, self).__init__()
#self.lift = nn.Parameter(torch.from_numpy(1./_make_liftering(input_dim, Q)), requires_grad=False)
#self.dct = nn.Parameter(torch.from_numpy(_make_dct(input_dim, input_dim, inv=True, normalize=True)))
self.res = resnet34(AvgPool2d_fre_ksize = resnet_AvgPool2d_fre_ksize)
self.pool = LDE(D, 128, distance_type=distance_type, network_type=network_type, pooling=pooling, regularization=regularization, with_bias=False)
if pooling=='mean':
self.fc1 = nn.Linear(128*D, hidden_dim)
if pooling=='mean+std':
self.fc1 = nn.Linear(256*D, hidden_dim)
self.bn1 = nn.BatchNorm1d(hidden_dim)
self.fc2 = nn.Linear(hidden_dim, output_dim)
logging.info("num speakers (spkid module's output dim): {}".format(output_dim))
self.asoftmax = asoftmax
def forward(self, x):
#x = x * self.lift
#x = F.linear(x, self.dct)
x = self.res(x)
x = self.pool(x)
x = self.fc1(x)
x_emb = self.bn1(x)
if self.asoftmax:
w = torch.transpose(self.fc2.weight, 0, 1) # size=(F,Classnum) F=in_features Classnum=out_features
ww = w.renorm(2,1,1e-5).mul(1e5)
xlen = x_emb.pow(2).sum(1).pow(0.5) # size=B
wlen = ww.pow(2).sum(0).pow(0.5) # size=Classnum
cos_theta = x_emb.mm(ww) # size=(B,Classnum)
cos_theta = cos_theta / xlen.view(-1,1) / wlen.view(1,-1)
cos_theta = cos_theta.clamp(-1,1)
self.mlambda = lambda x: 2*x**2-1
cos_m_theta = self.mlambda(cos_theta)
theta = torch.cuda.FloatTensor(cos_theta.data.acos())
k = (2*theta/3.14159265).floor()
n_one = k*0.0 - 1
phi_theta = (n_one**k) * cos_m_theta - 2*k
cos_theta = cos_theta * xlen.view(-1,1)
phi_theta = phi_theta * xlen.view(-1,1)
return x_emb, (cos_theta, phi_theta)
else:
x_emb = F.relu(x_emb)
x_emb = self.fc2(x_emb)
return F.log_softmax(x_emb, dim=-1)
def predict(self, x):
#x = x * self.lift
#x = F.linear(x, self.dct)
x = self.res(x)
x = self.pool(x)
if type(x) is tuple:
x = x[0]
x = self.fc1(x)
x = self.bn1(x)
return x
# Loss
class AngleLoss(nn.Module):
def __init__(self, gamma=0):
super(AngleLoss, self).__init__()
self.gamma = gamma
self.it = 0
self.LambdaMin = 5.0
self.LambdaMax = 1500.0
self.lamb = 1500.0
def forward(self, input, target):
self.it += 1
cos_theta,phi_theta = input
target = target.view(-1,1) #size=(B,1)
index = cos_theta.data * 0.0 #size=(B,Classnum)
index.scatter_(1,target.data.view(-1,1),1)
index = index.byte().detach()
#index = Variable(index)
self.lamb = max(self.LambdaMin,self.LambdaMax/(1+0.01*self.it ))
output = cos_theta * 1.0 #size=(B,Classnum)
output[index] -= cos_theta[index]*(1.0+0)/(1+self.lamb)
output[index] += phi_theta[index]*(1.0+0)/(1+self.lamb)
#logpt = F.log_softmax(output)
logpt = F.log_softmax(output,dim=-1)
logpt = logpt.gather(1,target)
logpt = logpt.view(-1)
pt = logpt.exp().detach()
loss = -1 * (1-pt)**self.gamma * logpt
loss = loss.mean()
return loss
# decoding: generate speaker embeddings from a trained model
import json
import os
from espnet.utils.deterministic_utils import set_deterministic_pytorch
from espnet.asr.asr_utils import get_model_conf
from espnet.utils.dynamic_import import dynamic_import
from espnet.nets.tts_interface import TTSInterface
from espnet.asr.asr_utils import torch_load
from utils.speakerid_kaldi_io import read_mat_scp
from espnet.utils.io_utils import LoadInputsAndTargets
from espnet.utils.training.batchfy import make_batchset
from espnet.utils.training.iterators import ToggleableShufflingSerialIterator
from chainer.datasets import TransformDataset
from espnet.tts.pytorch_backend.tts import CustomConverter
def decode(args):
"""Decode with E2E-TTS model."""
set_deterministic_pytorch(args)
# read training config
idim, odim, train_args = get_model_conf(args.model, args.model_conf)
# show arguments
for key in sorted(vars(args).keys()):
logging.info('args: ' + key + ': ' + str(vars(args)[key]))
# define model
model_class = dynamic_import(train_args.model_module)
model = model_class(idim, odim, train_args)
assert isinstance(model, TTSInterface)
logging.info(model)
# load trained model parameters
logging.info('reading model parameters from ' + args.model)
torch_load(args.model, model)
model.eval()
# set torch device
device = torch.device("cuda" if args.ngpu > 0 else "cpu")
model = model.to(device)
# generate speaker embeddings
SequenceGenerator(model.resnet_spkid, device, args.feat_scp, args.out_file)
def SequenceGenerator(model, device, feat_scp, out_file):
f = open(out_file, 'w')
with torch.no_grad():
for lab, x in read_mat_scp(feat_scp):
y=[x]
y_pred=model.predict(torch.from_numpy(np.array(y,dtype=np.float32)).to(device)).cpu().data.numpy().flatten()
f.write(lab+' [ '+' '.join(map(str, y_pred.tolist()))+' ]\n')
f.close()
#def classification_acc(model, ):
# pred = spkid_out[0].max(1, keepdim=True)[1] # JJ (TODO) : currently values are ordered in a same way for both cos_theta and logp (Just following Nanxin's suggestion but need to check it)
# correct = += pred.eq(spklabs.view_as(pred)).sum().item()
# spkid_acc = correct/float(spklabs.shape[0])
# return
def eval_spkidclassification(args):
"""Decode with E2E-TTS model."""
set_deterministic_pytorch(args)
# read training config
idim, odim, train_args = get_model_conf(args.model, args.model_conf)
# show arguments
for key in sorted(vars(args).keys()):
logging.info('args: ' + key + ': ' + str(vars(args)[key]))
# define model
model_class = dynamic_import(train_args.model_module)
model = model_class(idim, odim, train_args)
assert isinstance(model, TTSInterface)
logging.info(model)
# load trained model parameters
logging.info('reading model parameters from ' + args.model)
torch_load(args.model, model)
model.eval()
# set torch device
device = torch.device("cuda" if args.ngpu > 0 else "cpu")
model = model.to(device)
# read json data
with open(args.json, 'rb') as f:
valid_json = json.load(f)['utts']
# define iteratior
load_cv = LoadInputsAndTargets(
mode='tts', sort_in_input_length=False,
use_speaker_embedding=train_args.use_speaker_embedding,
train_spkid_extractor=train_args.train_spkid_extractor,
preprocess_conf=train_args.preprocess_conf
if args.preprocess_conf is None else args.preprocess_conf,
preprocess_args={'train': False} # Switch the mode of preprocessing
)
### JJ - added
## iterator related
valid_batchset = make_batchset(valid_json, args.batch_size,
train_args.maxlen_in, train_args.maxlen_out, train_args.minibatches,
batch_sort_key=train_args.batch_sort_key,
min_batch_size=train_args.ngpu if train_args.ngpu > 1 else 1,
count=train_args.batch_count,
batch_bins=train_args.batch_bins,
batch_frames_in=train_args.batch_frames_in,
batch_frames_out=train_args.batch_frames_out,
batch_frames_inout=train_args.batch_frames_inout,
swap_io=True, iaxis=0, oaxis=0)
valid_iter = ToggleableShufflingSerialIterator(
TransformDataset(valid_batchset, load_cv),
batch_size=1, repeat=False, shuffle=False)
## converter
converter = CustomConverter()
### JJ - added end
with torch.no_grad():
total_correct = 0
total_samples = 0
for batch in valid_iter:
# convert to torch tensor
x = converter(batch, device)
if isinstance(x, tuple):
num_correct, num_samples = model(*x, spk_clss_eval_on='on')
else:
num_correct, num_samples = model(**x, spk_clss_eval_on='on')
total_correct += num_correct
total_samples += num_samples
actual_samples = sum(list(map(len,valid_batchset)))
assert total_samples == actual_samples, "total samples: {} & actual samples: {}".format(total_samples, actual_samples)
logging.warning("Classification accuracy on {} utterances: {}".format(total_samples, total_correct/float(total_samples)))
#return total_correct, total_samples
| 39.946565 | 195 | 0.607873 | 9,451 | 0.602013 | 0 | 0 | 0 | 0 | 0 | 0 | 3,015 | 0.19205 |
70e60e00a82583d2b84159dd671f305b0d950716 | 642 | py | Python | import_data.py | plug8955/python-mysql-excel | 96bc11104fc2c488c8825b2c19a086a50f50153f | [
"MIT"
] | null | null | null | import_data.py | plug8955/python-mysql-excel | 96bc11104fc2c488c8825b2c19a086a50f50153f | [
"MIT"
] | null | null | null | import_data.py | plug8955/python-mysql-excel | 96bc11104fc2c488c8825b2c19a086a50f50153f | [
"MIT"
] | 1 | 2022-02-14T12:41:19.000Z | 2022-02-14T12:41:19.000Z | from openpyxl import load_workbook
import mysql.connector
# Excel
workbook = load_workbook('imported.xlsx')
sheet = workbook.active
values = []
for row in sheet.iter_rows(min_row=2, values_only=True):
print(row)
values.append(row)
# Database
db = mysql.connector.connect(
host='localhost',
port=3306,
user='root',
password='Admin@123',
database='Plug_want_to_buy'
)
curses = db.cursor()
sql = '''
insert into products (title, price, is_necessary)
values (%s, %s, %s);
'''
curses.executemany(sql, values)
db.commit()
print('เพิ่มข้อมูลจำนวน ' + str(curses.rowcount) + ' แถว')
curses.close()
db.close() | 19.454545 | 58 | 0.680685 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 227 | 0.333824 |
70e6640bc54f7fa74215cf2923affbdce6cc10d3 | 7,492 | py | Python | pytorch_pfn_extras/nn/modules/lazy.py | kmaehashi/pytorch-pfn-extras | 70b5db0dad8a8e342cc231e8a18c6f32ce250d1c | [
"MIT"
] | null | null | null | pytorch_pfn_extras/nn/modules/lazy.py | kmaehashi/pytorch-pfn-extras | 70b5db0dad8a8e342cc231e8a18c6f32ce250d1c | [
"MIT"
] | null | null | null | pytorch_pfn_extras/nn/modules/lazy.py | kmaehashi/pytorch-pfn-extras | 70b5db0dad8a8e342cc231e8a18c6f32ce250d1c | [
"MIT"
] | null | null | null | # mypy: ignore-errors
import inspect
from typing import Tuple
import warnings
import torch
class LazyInitializationMixin:
"""A mixin for modules that lazily initialize buffers and parameters.
Unlike regular modules, subclasses of this module can initialize
buffers and parameters outside of the constructor (``__init__``).
This allows you to, for example, initialize parameters in ``forward``
method to determine the shape of the weight based on the initial input.
Be sure to run "dummy" forward once to initialize all parameters that
should be trained, before passing ``module.parameters()`` to an optimizer;
otherwise weights initialized after ``module.parameters()`` (e.g., in
``forward`` function) will never be trained.
Note that lazy modules cannot validate if the shape is correct during
deserialization. Also note that the initial weights may become different
from the original (non-lazy) module even if the random seed is manually
configured, as the order of initialization is different from the original
one; especially, ``module.cuda()`` may cause the initialization to run on
a GPU.
The default value of lazy buffers and parameters are ``torch.Tensor([])``
and ``UninitializedParameter()``, respectively.
"""
# Subclasses must override these fields and list names of all buffers /
# parameters that will be initialized lazily.
lazy_buffer_names: Tuple[str, ...] = ()
lazy_parameter_names: Tuple[str, ...] = ()
def __init__(self, *args, **kwargs):
self._lazy_ready = False
super().__init__(*args, **kwargs)
for name in self.lazy_buffer_names:
self.register_buffer(name, torch.Tensor([]))
for name in self.lazy_parameter_names:
self.register_parameter(name, UninitializedParameter())
self._register_load_state_dict_pre_hook(self._lazy_load_hook)
self._lazy_ready = True
@property
def lazy_parmeters_determined(self):
"""Returns if all lazy parameters are determined.
Subclasses can perform parameters initialization after all lazy
parameters are determined. Note that this may be called during
``__init__``.
"""
return self._lazy_ready and all([
not isinstance(getattr(self, x), UninitializedParameter)
for x in self.lazy_parameter_names])
def state_dict(self, *args, **kwargs):
"""Returns a dictionary containing a whole state of the module.
This function overrides the default behavior to exclude uninitialized
parameter from serialization. This is needed because we need to
discriminate lazy parameters (``UninitializedParameter()`) and
initialized empty parameters (``torch.nn.Parameter(torch.Tensor())``)
during deserialization.
See comments of ``_lazy_load_hook`` for details.
"""
destination = super().state_dict(*args, **kwargs)
for name in self.lazy_parameter_names:
if isinstance(getattr(self, name), UninitializedParameter):
del destination[name]
return destination
def _lazy_load_hook(
self, state_dict, prefix, local_metadata, strict,
missing_keys, unexpected_keys, error_msgs):
"""load_state_dict pre-hook function for lazy buffers and parameters.
The purpose of this hook is to check the current state and/or
``state_dict`` being loaded and ensure that both are states
are properly initialized.
See comment in ``torch.nn.Module._register_load_state_dict_pre_hook``
for the details of the hook specification.
"""
for name in self.lazy_buffer_names:
key = prefix + name
module_initialized = getattr(self, name).shape != (0,)
state_initialized = state_dict[key].shape != (0,)
if module_initialized and not state_initialized:
raise RuntimeError(
'Can\'t load non-initialized buffers in already '
'initialized modules')
elif not module_initialized and state_initialized:
# Here we need to avoid a tensor size mismatch
# this is a regular tensor without a materialize
# method, so we can just resize for the load logic to copy
# the contents later to the correct device the module
# was moved to
getattr(self, name).resize_(state_dict[key].size())
for name in self.lazy_parameter_names:
# The parameter does not exist in the loaded ``state_dict`` if the
# original module was serialized before initializing lazy
# parameters (see comments of ``state_dict``).
key = prefix + name
module_initialized = not isinstance(
getattr(self, name), UninitializedParameter)
state_initialized = key in state_dict
if module_initialized and not state_initialized:
raise RuntimeError(
'Can\'t load uninitialized parameters in already '
'initialized modules')
elif not module_initialized and state_initialized:
getattr(self, name).materialize(state_dict[key].shape)
elif key not in state_dict and not module_initialized:
param = UninitializedParameter()
state_dict[key] = param
class UninitializedParameter(torch.nn.Parameter):
def __repr__(self):
return 'Uninitialized lazy parameter'
def share_memory_(self):
raise RuntimeError(
'Can\'t share memory on an unitialized parameter. '
'Run forward to initialize the network before calling '
'`module.share_memory()`.')
@property
def is_leaf(self):
# Hacky workaround to detect use of uninitialized lazy parameters.
# This overrides ``is_leaf`` attribute which should always be ``True``
# for parameters; optimizers check for this attribute and raise an
# error if non-leaf tensors are detected.
frame = inspect.currentframe()
if frame.f_back.f_globals['__package__'].startswith('torch.optim'):
warnings.warn('''
Use of uninitialized lazy parameter in Optimizer has been detected.
Maybe you forgot to run forward before passing `module.parameters()` to the optimizer?''') # NOQA
return True
def materialize(self, shape, device=None, dtype=None):
r"""Create a Parameter with the same properties of the uninitialized
one. Given a shape, it materializes a parameter in the same device
and with the same `dtype` as the current one or the specified ones in
the arguments.
Args:
shape : (tuple): the shape for the materialized tensor.
device (:class:`torch.device`): the desired device of the
parameters
and buffers in this module. Optional.
dtype (:class:`torch.dtype`): the desired floating point type of
the floating point parameters and buffers in this module.
Optional.
"""
if device is None:
device = self.data.device
if dtype is None:
dtype = self.data.dtype
self.data = torch.empty(shape, device=device, dtype=dtype)
self.__class__ = torch.nn.Parameter
| 43.306358 | 102 | 0.654031 | 7,393 | 0.986786 | 0 | 0 | 1,098 | 0.146556 | 0 | 0 | 4,188 | 0.558996 |
70e6d36077cbded389cedfadb9bbd0925029daf4 | 7,286 | py | Python | tensorflow/python/keras/distribute/mnist_multi_worker.py | 6paklata/tensorflow | d6464431256192d2cf1c9b20271792309af7a354 | [
"Apache-2.0"
] | 2 | 2019-05-08T10:02:57.000Z | 2019-05-08T10:02:59.000Z | tensorflow/python/keras/distribute/mnist_multi_worker.py | gurkangokdemir/tensorflow | 2e6446f8a9be31c59080b643d04f9b463c4201cf | [
"Apache-2.0"
] | null | null | null | tensorflow/python/keras/distribute/mnist_multi_worker.py | gurkangokdemir/tensorflow | 2e6446f8a9be31c59080b643d04f9b463c4201cf | [
"Apache-2.0"
] | 2 | 2020-03-25T12:52:20.000Z | 2020-08-11T09:31:43.000Z | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""An example training a Keras Model using MirroredStrategy and native APIs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
# pylint: disable=g-direct-tensorflow-import
from tensorflow.python import keras
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.distribute import collective_all_reduce_strategy as collective_strategy
from tensorflow.python.distribute import multi_worker_util
from tensorflow.python.distribute.cluster_resolver import TFConfigClusterResolver
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.keras import backend
from tensorflow.python.keras import utils
from tensorflow.python.keras.datasets import mnist
from tensorflow.python.keras.optimizer_v2 import rmsprop
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import app
from tensorflow.python.platform import tf_logging as logging
NUM_CLASSES = 10
flags.DEFINE_boolean(name='enable_eager', default=False, help='Enable eager?')
flags.DEFINE_enum('distribution_strategy', None, ['multi_worker_mirrored'],
'The Distribution Strategy to use.')
flags.DEFINE_string('model_dir', None, 'Directory for TensorBoard/Checkpoint.')
# TODO(rchao): Use multi_worker_util.maybe_shard_dataset() once that is provided
# there.
def maybe_shard_dataset(dataset):
"""Shard the dataset if running in multi-node environment."""
cluster_resolver = TFConfigClusterResolver()
cluster_spec = cluster_resolver.cluster_spec().as_dict()
if cluster_spec:
dataset = dataset.shard(
multi_worker_util.worker_count(cluster_spec,
cluster_resolver.task_type),
multi_worker_util.id_in_cluster(
cluster_spec, cluster_resolver.task_type, cluster_resolver.task_id))
return dataset
def get_data_shape():
# input image dimensions
img_rows, img_cols = 28, 28
if backend.image_data_format() == 'channels_first':
return 1, img_rows, img_cols
else:
return img_rows, img_cols, 1
def get_input_datasets(use_bfloat16=False):
"""Downloads the MNIST dataset and creates train and eval dataset objects.
Args:
use_bfloat16: Boolean to determine if input should be cast to bfloat16
Returns:
Train dataset and eval dataset. The dataset doesn't include batch dim.
"""
cast_dtype = dtypes.bfloat16 if use_bfloat16 else dtypes.float32
# the data, split between train and test sets
(x_train, y_train), (x_test, y_test) = mnist.load_data()
train_data_shape = (x_train.shape[0],) + get_data_shape()
test_data_shape = (x_test.shape[0],) + get_data_shape()
if backend.image_data_format() == 'channels_first':
x_train = x_train.reshape(train_data_shape)
x_test = x_test.reshape(test_data_shape)
else:
x_train = x_train.reshape(train_data_shape)
x_test = x_test.reshape(test_data_shape)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
# convert class vectors to binary class matrices
y_train = utils.to_categorical(y_train, NUM_CLASSES)
y_test = utils.to_categorical(y_test, NUM_CLASSES)
# train dataset
train_ds = dataset_ops.Dataset.from_tensor_slices((x_train, y_train))
# TODO(rchao): Remove maybe_shard_dataset() once auto-sharding is done.
train_ds = maybe_shard_dataset(train_ds)
train_ds = train_ds.repeat()
train_ds = train_ds.map(lambda x, y: (math_ops.cast(x, cast_dtype), y))
train_ds = train_ds.batch(64, drop_remainder=True)
# eval dataset
eval_ds = dataset_ops.Dataset.from_tensor_slices((x_test, y_test))
# TODO(rchao): Remove maybe_shard_dataset() once auto-sharding is done.
eval_ds = maybe_shard_dataset(eval_ds)
eval_ds = eval_ds.repeat()
eval_ds = eval_ds.map(lambda x, y: (math_ops.cast(x, cast_dtype), y))
eval_ds = eval_ds.batch(64, drop_remainder=True)
return train_ds, eval_ds
def get_model(index=0):
"""Builds a Sequential CNN model to recognize MNIST digits.
Args:
index: The worker index. Defaults to 0.
Returns:
a CNN Keras model used for MNIST
"""
# Define a CNN model to recognize MNIST digits.
model = keras.models.Sequential()
model.add(
keras.layers.Conv2D(
32,
kernel_size=(3, 3),
activation='relu',
input_shape=get_data_shape()))
model.add(keras.layers.Conv2D(64, (3, 3), activation='relu'))
model.add(keras.layers.MaxPooling2D(pool_size=(2, 2)))
model.add(keras.layers.Dropout(0.25, name='dropout_worker%s_first' % index))
model.add(keras.layers.Flatten())
model.add(keras.layers.Dense(128, activation='relu'))
model.add(keras.layers.Dropout(0.5, name='dropout_worker%s_second' % index))
model.add(keras.layers.Dense(NUM_CLASSES, activation='softmax'))
return model
def main(_):
if flags.FLAGS.enable_eager:
ops.enable_eager_execution()
logging.info('Eager execution enabled for MNIST Multi-Worker.')
else:
logging.info('Eager execution not enabled for MNIST Multi-Worker.')
# Build the train and eval datasets from the MNIST data.
train_ds, eval_ds = get_input_datasets()
if flags.FLAGS.distribution_strategy == 'multi_worker_mirrored':
# MultiWorkerMirroredStrategy for multi-worker distributed MNIST training.
strategy = collective_strategy.CollectiveAllReduceStrategy()
else:
raise ValueError('Only `multi_worker_mirrored` is supported strategy '
'in Keras MNIST example at this time. Strategy passed '
'in is %s' % flags.FLAGS.distribution_strategy)
# Create and compile the model under Distribution strategy scope.
# `fit`, `evaluate` and `predict` will be distributed based on the strategy
# model was compiled with.
with strategy.scope():
model = get_model()
optimizer = rmsprop.RMSProp(learning_rate=0.001)
model.compile(
loss=keras.losses.categorical_crossentropy,
optimizer=optimizer,
metrics=['accuracy'])
# Train the model with the train dataset.
tensorboard_callback = keras.callbacks.TensorBoard(
log_dir=flags.FLAGS.model_dir)
model.fit(
x=train_ds,
epochs=20,
steps_per_epoch=468,
callbacks=[tensorboard_callback])
# Evaluate the model with the eval dataset.
score = model.evaluate(eval_ds, steps=10, verbose=0)
logging.info('Test loss:{}'.format(score[0]))
logging.info('Test accuracy:{}'.format(score[1]))
if __name__ == '__main__':
logging.set_verbosity(logging.INFO)
app.run()
| 36.79798 | 94 | 0.734971 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,663 | 0.365495 |
70e79659cfe4792e70e9f358e5ca24377633a170 | 12,404 | py | Python | src/misc/decorator.py | JunManYuanLong/PyComs | 5df8b35048e5db4a8edf53e6703ef6d2ffd15cc6 | [
"MIT"
] | null | null | null | src/misc/decorator.py | JunManYuanLong/PyComs | 5df8b35048e5db4a8edf53e6703ef6d2ffd15cc6 | [
"MIT"
] | null | null | null | src/misc/decorator.py | JunManYuanLong/PyComs | 5df8b35048e5db4a8edf53e6703ef6d2ffd15cc6 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
from copy import deepcopy
from functools import wraps
from flask import request
from src.misc.render import json_detail_render
from config.settings import YML_JSON, logger
import datetime,json
def transfer(column):
def dec(func):
@wraps(func)
def _(*args, **kwargs):
tmap = {
'?': "",
'!': "",
'@': [],
'#': {},
'$': False,
}
result = func(*args, **kwargs)
if not isinstance(result, list):
raise('should be a list')
cols = [i.strip() for i in column.split('|')]
pure_cols = map(lambda x : x[1:], cols)
template = {col[1:]: tmap.get(col[0]) for col in cols}
key_col = filter(lambda x: '?' in x, cols)[0][1:]
tdata = [{item: getattr(res, item) for item in pure_cols} for res in result]
data = []
for d in tdata:
tpl = deepcopy(template)
for k, v in d.iteritems():
if isinstance(tpl[k], basestring) and v:
tpl[k] = v
elif isinstance(tpl[k], list) and v:
tlist = deepcopy(tpl[k])
tlist.append(v)
tpl[k] = tlist
elif isinstance(tpl[k], dict) and v:
tdict = deepcopy(tpl[k])
tdict.update(v)
tpl[k] = tdict
elif isinstance(tpl[k], bool):
t = deepcopy(tpl[k])
t = bool(v)
tpl[k] = t
data.append(tpl)
return data
return _
return dec
def transfer2json(column):
"""
? : key
! : string
@ : list
# : dict
$ : bool
& : tuple
"""
def dec(func):
@wraps(func)
def _(*args, **kwargs):
tmap = {
'?': "",
'!': "",
'@': [],
'#': {},
'$': False,
'&': (),
'~': ['~'],
}
result = func(*args, **kwargs)
if not isinstance(result, list):
raise('should be a list')
cols = [i.strip() for i in column.split('|')]
# key的list形式数据
pure_cols = map(lambda x : x[1:], cols)
# 键值对中给value赋值tmap
template = {col[1:]: tmap.get(col[0]) for col in cols}
key_col = filter(lambda x: '?' in x, cols)[0][1:]
# 键值对中给value赋值数据库
tdata = [{item: getattr(res, item) for item in pure_cols} for res in result]
data = []
for d in tdata:
fu = [i for i in data if i.get(key_col) == d.get(key_col)]
if len(fu) == 0:
tpl = deepcopy(template)
for k,v in d.iteritems():
if isinstance(tpl[k], basestring) and v!=None:
tpl[k] = v
elif tpl[k]==['~']:
tjlist = json.loads(v) if v else []
tpl[k] = tjlist
elif isinstance(tpl[k], list) and v:
tlist = deepcopy(tpl[k])
tlist.append(v)
tpl[k] = tlist
elif isinstance(tpl[k], dict) and v:
tdict = deepcopy(tpl[k])
tdict.update(v)
tpl[k] = tdict
elif isinstance(tpl[k], bool):
t = deepcopy(tpl[k])
t = bool(v)
tpl[k] = t
elif isinstance(tpl[k], tuple) and v:
tlist = deepcopy(tpl[k])
tmp = []
tmp.append(v)
tlist += tuple(tmp)
tpl[k] = tlist
data.append(tpl)
else:
fu = fu[0]
for k,v in d.iteritems():
if isinstance(fu[k], basestring) and v:
fu[k] = v
elif isinstance(fu[k], list) and v:
tlist = deepcopy(fu[k])
tlist.append(v)
fu[k] = list(set(tlist))
fu[k].sort(key=tlist.index)
elif isinstance(fu[k], dict) and v:
tdict = deepcopy(fu[k])
tdict.update(v)
fu[k] = tdict
elif isinstance(tpl[k], bool):
t = deepcopy(tpl[k])
t = bool(v)
tpl[k] = t
elif isinstance(fu[k], tuple) and v:
tlist = deepcopy(fu[k])
tmp = []
tmp.append(v)
tlist += tuple(tmp)
fu[k] = tuple(tlist)
return data
return _
return dec
def transfer2jsonwithoutset(column, ispagination=False):
"""
? : key
! : string
@ : list
# : dict
$ : bool
& : tuple
"""
def dec(func):
@wraps(func)
def _(*args, **kwargs):
t_map = {
'?': "key",
'!': "",
'@': [],
'#': {},
'$': False,
'&': (),
'~': ['~'],
}
count = 0
if ispagination:
results, count = func(*args, **kwargs)
else:
results = func(*args, **kwargs)
if not isinstance(results, list):
raise TypeError('should be a list')
# 原始 键值
origin_keys = [key.strip() for key in column.split('|')]
# 键值和类型对应的 字典
result_map = {
key.strip()[1:]: t_map.get(key.strip()[0])
for key in column.split('|')
}
# 找到 ? 为前缀的键值作为 key
key_of_data = list(filter(lambda x: '?' in x, origin_keys))[0][1:]
# result dict
results_dict = [{item: getattr(result, item) for item in result_map.keys()} for result in results]
data = {}
for result in results_dict:
if result.get(key_of_data) in data.keys():
temp = data.get(result.get(key_of_data))
for key, value in result_map.items():
data_value = result.get(key)
if isinstance(value, str) and data_value:
temp[key] = data_value
elif isinstance(value, list) and data_value:
t_list = deepcopy(temp[key])
t_list.append(data_value)
temp[key] = list(t_list)
elif isinstance(value, dict) and data_value:
tdict = deepcopy(temp[key])
tdict.update(data_value)
temp[key] = tdict
elif isinstance(value, bool):
t = bool(data_value)
temp[key] = t
elif isinstance(value, tuple) and data_value:
t_list = deepcopy(temp[key])
tmp = [data_value]
t_list += tuple(tmp)
temp[key] = tuple(t_list)
else:
temp = deepcopy(result_map)
for key, value in result_map.items():
data_value = result.get(key)
if isinstance(value, str) and data_value is not None:
temp[key] = data_value
elif value == ['~']:
tjlist = json.loads(data_value) if data_value else []
temp[key] = tjlist
elif isinstance(value, list) and data_value:
temp[key] = [data_value]
elif isinstance(value, dict) and data_value:
temp[key] = {data_value}
elif isinstance(value, bool):
temp[key] = bool(data_value)
elif isinstance(value, tuple) and data_value:
temp[key] = tuple([data_value])
data.update({result.get(key_of_data): temp})
if ispagination:
return list(data.values()), count
else:
return list(data.values())
return _
return dec
def slicejson(settings):
def _slicejson(ret):
config = [setting.split('|') for setting in settings]
for conf in config:
na = [[dict(i) for i in map(lambda x: zip((conf[1],conf[2]), x), zip(r.get(conf[3]),r.get(conf[4])))] for r in ret]
for index, item in enumerate(ret):
for k in [conf[3], conf[4]]:
del item[k]
item[conf[0]] = na[index]
return ret
def wrapper(func):
@wraps(func)
def _(*args, **kwargs):
ret = func(*args, **kwargs)
return _slicejson(ret)
return _
return wrapper
def validation(validate_name = None):
def validate_required(key, value):
request_value = request.json.get(key)
expect_value = value
if request_value is None:
return False, json_detail_render(201, [], "{} is required".format(key))
return True, 1
def validate_min_length(key, value):
request_value = request.json.get(key)
expect_value = value
if request_value is not None and len(request_value) < expect_value:
return False, json_detail_render(202, [], "{} min length is {}".format(key, value))
return True, 1
def validate_max_length(key, value):
request_value = request.json.get(key)
expect_value = value
if request_value is not None and len(request_value) > expect_value:
return False, json_detail_render(202, [], "{} max length is {}".format(key, value))
return True, 1
def validate_type(key, value):
ttype_dict = {
'list': list,
'basestring': basestring,
'dict': dict,
'int': int,
'bool': bool,
}
request_value = request.json.get(key)
expect_value = value
if request_value is not None and not isinstance(request_value, ttype_dict.get(value)):
return False, json_detail_render(203, [], "{} should be a {}".format(key, value))
return True, 1
KEY_FUNC_MAP = {
'required': validate_required,
'min_length': validate_min_length,
'max_length': validate_max_length,
'type': validate_type,
}
def wrapper(func):
@wraps(func)
def _(*args, **kwargs):
protocol, vname = validate_name.split(':')
if request.method == protocol:
all_json = YML_JSON
validate_json = deepcopy(all_json.get(vname))
del validate_json['returnvalue']
for item, settings in validate_json.items():
for key, value in settings.items():
f = KEY_FUNC_MAP.get(key)
ret = f(item, value)
if not ret[0]:
return ret[1]
return func(*args, **kwargs)
return _
return wrapper
| 36.482353 | 128 | 0.411238 | 0 | 0 | 0 | 0 | 9,442 | 0.755602 | 0 | 0 | 768 | 0.06146 |
70e99c7d827bb0c5ae82899785cbc3e22a7d8060 | 3,303 | py | Python | modules/tools/open_space_visualization/open_space_roi_visualizer.py | jzjonah/apollo | bc534789dc0548bf2d27f8d72fe255d5c5e4f951 | [
"Apache-2.0"
] | 22,688 | 2017-07-04T23:17:19.000Z | 2022-03-31T18:56:48.000Z | modules/tools/open_space_visualization/open_space_roi_visualizer.py | Songjiarui3313/apollo | df9113ae656e28e5374db32529d68e59455058a0 | [
"Apache-2.0"
] | 4,804 | 2017-07-04T22:30:12.000Z | 2022-03-31T12:58:21.000Z | modules/tools/open_space_visualization/open_space_roi_visualizer.py | Songjiarui3313/apollo | df9113ae656e28e5374db32529d68e59455058a0 | [
"Apache-2.0"
] | 9,985 | 2017-07-04T22:01:17.000Z | 2022-03-31T14:18:16.000Z | #!/usr/bin/env python3
###############################################################################
# Copyright 2018 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# @file to run it, change the modules/common/configs/config_gflags.cc to use sunnyvale_with_two_offices
from open_space_roi_interface import *
import matplotlib.pyplot as plt
# initialize object
open_space_roi = open_space_roi()
lane_id = "11564dup1_1_-1"
parking_id = "11543"
num_output_buffer = 50
unrotated_roi_boundary_x = (c_double * num_output_buffer)()
roi_boundary_x = (c_double * num_output_buffer)()
parking_spot_x = (c_double * num_output_buffer)()
unrotated_roi_boundary_y = (c_double * num_output_buffer)()
roi_boundary_y = (c_double * num_output_buffer)()
parking_spot_y = (c_double * num_output_buffer)()
end_pose = (c_double * num_output_buffer)()
xy_boundary = (c_double * num_output_buffer)()
origin_pose = (c_double * num_output_buffer)()
if not open_space_roi.ROITest(lane_id, parking_id,
unrotated_roi_boundary_x, unrotated_roi_boundary_y, roi_boundary_x, roi_boundary_y,
parking_spot_x, parking_spot_y, end_pose,
xy_boundary, origin_pose):
print("open_space_roi fail")
result_unrotated_roi_boundary_x = []
result_unrotated_roi_boundary_y = []
result_roi_boundary_x = []
result_roi_boundary_y = []
result_parking_spot_x = []
result_parking_spot_y = []
result_end_pose = []
result_xy_boundary = []
result_origin_pose = []
print("vertices of obstacles")
for i in range(0, 10):
result_unrotated_roi_boundary_x.append(float(unrotated_roi_boundary_x[i]))
result_unrotated_roi_boundary_y.append(float(unrotated_roi_boundary_y[i]))
result_roi_boundary_x.append(float(roi_boundary_x[i]))
result_roi_boundary_y.append(float(roi_boundary_y[i]))
print(str(float(roi_boundary_x[i])))
print(str(float(roi_boundary_y[i])))
print("parking spot")
for i in range(0, 4):
result_parking_spot_x.append(float(parking_spot_x[i]))
result_parking_spot_y.append(float(parking_spot_y[i]))
print("end_pose in x,y,phi,v")
for i in range(0, 4):
print(str(float(end_pose[i])))
print("xy_boundary in xmin xmax ymin ymax")
for i in range(0, 4):
print(str(float(xy_boundary[i])))
print("origin_pose")
for i in range(0, 2):
print(str(float(origin_pose[i])))
fig = plt.figure()
ax1 = fig.add_subplot(211)
ax1.scatter(result_unrotated_roi_boundary_x, result_unrotated_roi_boundary_y)
ax1.scatter(result_parking_spot_x, result_parking_spot_y)
ax2 = fig.add_subplot(212)
ax2.scatter(result_roi_boundary_x, result_roi_boundary_y)
plt.gca().set_aspect('equal', adjustable='box')
plt.show()
| 37.534091 | 113 | 0.719043 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,054 | 0.319104 |
70ea7b65b45e04018fd11eba650c698a19bfac5d | 556 | py | Python | build.py | MolGL/MolGL | e83b3a1edc83844368a8515b1324498aed418c7a | [
"Apache-2.0"
] | 2 | 2016-11-15T17:06:55.000Z | 2017-02-16T20:31:02.000Z | build.py | MolGL/MolGL | e83b3a1edc83844368a8515b1324498aed418c7a | [
"Apache-2.0"
] | 8 | 2016-11-04T05:51:51.000Z | 2016-11-18T06:44:30.000Z | build.py | MolGL/MolGL | e83b3a1edc83844368a8515b1324498aed418c7a | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# ————————————————————————————————————————————————————————————————————————————
# Copyright © 2014 - 2016, Sequømics Research, All rights reserved.
# Copyright © 2014 - 2016, Sequømics Corporation. All rights reserved.
# ————————————————————————————————————————————————————————————————————————————
# Licensed under the Apache License (the "License, version 2.0");
# you may not use this file except in compliance with the License.
# ————————————————————————————————————————————————————————————————————————————
| 50.545455 | 78 | 0.413669 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,006 | 0.990157 |
70eb8ef9456f695c6b3ab84cff23848ef3f71a4b | 7,767 | py | Python | adapted_network.py | Hong-Ming/Adaptive_Network_Slimming | 17f2e56d7bc41070d02799845af3bc353a76d766 | [
"MIT"
] | null | null | null | adapted_network.py | Hong-Ming/Adaptive_Network_Slimming | 17f2e56d7bc41070d02799845af3bc353a76d766 | [
"MIT"
] | null | null | null | adapted_network.py | Hong-Ming/Adaptive_Network_Slimming | 17f2e56d7bc41070d02799845af3bc353a76d766 | [
"MIT"
] | null | null | null |
import sys
sys.path.append('./train_model')
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision
import torchvision.transforms as transforms
import numpy as np
import os
import argparse
parser = argparse.ArgumentParser(description='Adaptive Network Slimming')
parser.add_argument('-net', type=str, help='pretrained pkl file')
parser.add_argument('--nonuniform', action='store_true', help='set non-uniform pruning rate')
args = parser.parse_args()
# from models import *
transform_test = transforms.Compose(
[transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010))])
testset = torchvision.datasets.CIFAR10(root='./cifar10',train=False,download=True,transform=transform_test)
testloader = torch.utils.data.DataLoader(testset,batch_size=128,shuffle=False)
classes = ('plane','car','bird','cat','deer','dog','frog','horse','ship','truck')
input_shape = (3,32,32)
if args.net == "resnet18":
START = 0.2
END = 0.81
netnum = 18
elif args.net == "resnet34":
START = 0.2
END = 0.81
netnum = 34
elif args.net == "resnet50":
START = 0.2
END = 0.8
netnum = 50
elif args.net == "resnet101":
START = 0.2
END = 0.8
netnum = 101
elif args.net == "resnet152":
START = 0.21
END = 0.79
netnum = 152
if args.nonuniform:
PRUNE_RATE = np.arange(START,END,(END-START)/(netnum-1))
FC_PRUNE_RATE = END
Model_Name = "ResNet" + str(netnum) + " (Non-uniform Pruning Rate)"
else:
PRUNE_RATE = np.zeros([netnum-1,1]) + 0.5
FC_PRUNE_RATE = 0.5
Model_Name = "ResNet" + str(netnum) + " (Uniform Pruning Rate)"
# -------------- Load Pretrained Model---------------
File_Name = "./model_pkl/" + args.net + ".pkl"
net = torch.load(File_Name, map_location= "cpu")
def RunData():
correct = 0
total = 0
with torch.no_grad():
net.eval()
net.cuda()
for (x,y) in testloader:
xa = x.cuda()
ya = y.cuda()
out = net(xa)
_,predicted = torch.max(out.data,1)
total += y.size(0)
correct += (predicted.cpu() == y).sum()
net.cpu()
Accuracy = 100*correct.cpu().numpy()/total
return Accuracy
def RunData2():
correct = 0
total = 0
for _,layer in net.named_modules():
if isinstance(layer,nn.BatchNorm2d):
layer.track_running_stats=False
with torch.no_grad():
net.eval()
net.cuda()
for (x,y) in testloader:
xa = x.cuda()
ya = y.cuda()
out = net(xa)
_,predicted = torch.max(out.data,1)
total += y.size(0)
correct += (predicted.cpu() == y).sum()
net.cpu()
Accuracy = 100*correct.cpu().numpy()/total
return Accuracy
def prune_filter(layer,PRUNE_RATE):
prune = np.sum(abs(layer),axis = (1,2,3))
sort_prune = np.sort(prune)
mask = np.ones(layer.shape)
for i in range(len(prune)):
if prune[i] < sort_prune[int(np.floor(PRUNE_RATE*len(prune)))]:
mask[i,:] = 0
return mask
def prune_weight(layer,PRUNE_RATE):
layer_flatten_sort = np.sort(abs(layer.flatten()))
mask = np.ones(layer.shape)
for i in range(layer.shape[0]):
for j in range(layer.shape[1]):
if abs(layer[i][j]) < layer_flatten_sort[int(np.floor(PRUNE_RATE*len(layer_flatten_sort)))]:
mask[i][j] = 0
return mask
def Calculate_flop():
FLOP = 0
shape = input_shape[1]
for name,layer in net.named_modules():
if isinstance(layer,nn.Conv2d) and 'shortcut' not in name:
filter_data = layer.weight.data.numpy()
skip = sum(np.sum(abs(filter_data),axis = (1,2,3)) == 0)
filter_shape = layer.weight.data.numpy().shape
padding = layer.padding
stride = layer.stride
n = filter_shape[1] * filter_shape[2] * filter_shape[3] # vector length
fpn = n + (n - 1) # n multiplication, n-1 addition
step_x = np.floor(((shape - filter_shape[2] + 2 * padding[0]) / stride[0]) + 1)
shape = step_x
step = step_x**2
fpf = fpn*step
FLOP += fpf*(filter_shape[0] - skip)
elif isinstance(layer,nn.Linear):
filter_data = layer.weight.data.numpy()
skip = sum(sum(filter_data == 0))
filter_shape = layer.weight.data.numpy().shape
FLOP += 2 * (filter_shape[0] * filter_shape[1] - skip)
return FLOP
ACC_before = RunData()
print("Model Name: " + Model_Name)
print("Accuracy : " + str(ACC_before) + "%")
FLOP_before = Calculate_flop()
if FLOP_before / 1e9 > 1: # for Giga Flops
print("FLOP : %4.2f GFLOP" % (FLOP_before / 1e9))
else:
print("FLOP : %4.2f MFLOP" % (FLOP_before / 1e6))
print(" ")
print(" Start Pruning ")
print("---------------------------------------------------")
print("|Layer| FLOP |#Filter or #Weight|Pruning |Filter|")
print("| No. | Save | before/after | Type | Size |")
print("|-----|--------|------------------|--------|------|")
# pruning
TOTAL_WEIGHT = 0
PRUNE_WEIGHT = 0
i = 0
for parname,layer in net.named_modules():
if isinstance(layer,nn.Conv2d) and 'shortcut' not in parname:
par = layer.weight.data.numpy()
par_size = par.shape
mask = prune_filter(par,PRUNE_RATE[i])
par = (par * mask)
print("| %3i" % (i+1), "|"+
" %5.2f" % float((1-(np.count_nonzero(mask)/mask.size)) * 100) + "% |"+
" %4i" % int((mask.size-np.count_nonzero(mask))/(par_size[1]*par_size[2]*par_size[2])),"/",
"%4i" % int(mask.size/(par_size[1]*par_size[2]*par_size[2])) + " | Filter |"+
" %1ix%1i |" % (par_size[2], par_size[3]))
TOTAL_WEIGHT = TOTAL_WEIGHT + (mask.size/(par_size[1]))
PRUNE_WEIGHT = PRUNE_WEIGHT + ((mask.size-np.count_nonzero(mask))/(par_size[1]))
i = i + 1
layer.weight.data = torch.from_numpy(par).type(torch.FloatTensor)
elif isinstance(layer,nn.Linear):
par = layer.weight.data.numpy()
par_size = par.shape
mask = prune_weight(par,FC_PRUNE_RATE)
par = (par * mask)
print("| %3i" % (i+1), "|"+
" %5.2f" % float((1-(np.count_nonzero(mask)/mask.size)) * 100) + "% |"+
" %5i" % int(mask.size-np.count_nonzero(mask)),"/",
"%5i" % int(mask.size) + " | Weight |" + " none |")
TOTAL_WEIGHT = TOTAL_WEIGHT + (mask.size)
PRUNE_WEIGHT = PRUNE_WEIGHT + (mask.size-np.count_nonzero(mask))
i = i + 1
layer.weight.data = torch.from_numpy(par).type(torch.FloatTensor)
print("---------------------------------------------------")
ACC_after = RunData2()
FLOP_after = Calculate_flop()
print(" ")
print(" After Pruning ")
print("Accuracy : " + str(ACC_before) + "% -> " + str(ACC_after) + "%")
if FLOP_after / 1e9 > 1: # for Giga Flops
if FLOP_before / 1e9 > 1: # for Giga Flops
print("FLOP : %4.2f GFLOP" % (FLOP_before / 1e9) + " -> %4.2f GFLOP" % (FLOP_after / 1e9))
else:
print("FLOP : %4.2f MFLOP" % (FLOP_before / 1e6) + " -> %4.2f GFLOP" % (FLOP_after / 1e9))
else:
if FLOP_before / 1e9 > 1: # for Giga Flops
print("FLOP : %4.2f GFLOP" % (FLOP_before / 1e9) + " -> %4.2f MFLOP" % (FLOP_after / 1e6))
else:
print("FLOP : %4.2f MFLOP" % (FLOP_before / 1e6) + " -> %4.2f MFLOP" % (FLOP_after / 1e6))
print("FLOP save: %5.2f" % (100*(FLOP_before - FLOP_after)/FLOP_before),"%") | 35.958333 | 108 | 0.557744 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,454 | 0.187202 |
70ecb14bd53988ce2d1a7a1d376ec3db5fa8d456 | 2,370 | py | Python | mcetl/user/access.py | materials-commons/pymcetl | b4311ba50bb35bc36527b9d313a91778f9550a92 | [
"MIT"
] | null | null | null | mcetl/user/access.py | materials-commons/pymcetl | b4311ba50bb35bc36527b9d313a91778f9550a92 | [
"MIT"
] | null | null | null | mcetl/user/access.py | materials-commons/pymcetl | b4311ba50bb35bc36527b9d313a91778f9550a92 | [
"MIT"
] | null | null | null | from flask import request
from ..utils.mcexceptions import AccessNotAllowedException
from . import apikeydb
from ..database.DB import DbConnection
_user_access_matrix = {}
_admins = []
def check(user, owner, project_id="Unknown"):
if not allowed(user, owner, project_id):
raise AccessNotAllowedException(project_id)
def reset():
global _admins
_user_access_matrix.clear()
_admins = []
def _user_in_owner_group(user, project_id):
if is_administrator(user):
return True
elif project_id not in _user_access_matrix:
_user_access_matrix[project_id] = []
_user_in_owner_group(user, project_id)
elif user not in _user_access_matrix[project_id]:
_load_user(project_id)
return _access_allowed(user, project_id)
def is_administrator(user):
if not _admins:
load_admins()
for u in _admins:
if u == user:
return True
return False
def get_admins():
if not _admins:
load_admins()
return _admins
def load_admins():
global _admins
conn = DbConnection().connection()
r = DbConnection().interface()
admin_group = list(r.table('users').get_all(True, index='admin')
.run(conn))
if not admin_group:
_admins = []
else:
for u in admin_group:
_admins.append(u['id'])
def _load_user(project_id):
conn = DbConnection().connection()
r = DbConnection().interface()
users = list(r.table('access')
.get_all(project_id, index='project_id')
.pluck('user_id')
.run(conn))
_user_access_matrix[project_id] = []
for u in users:
_user_access_matrix[project_id].append(u['user_id'])
def _access_allowed(user, project_id):
if user in _user_access_matrix[project_id]:
return True
else:
return False
def remove_user(user, project_id):
if user in _user_access_matrix[project_id]:
_user_access_matrix[project_id].pop(user, None)
def get_apiuser():
apikey = request.args.get('apikey')
api_user = apikeydb.apikey_user(apikey)
return api_user
def get_user():
return get_apiuser()
def allowed(user, owner, project_id):
if user == owner:
return True
if _user_in_owner_group(user, project_id):
return True
else:
return False
| 23.465347 | 68 | 0.654008 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 73 | 0.030802 |
70ecc3bb253029a64b550829a901f26d30203690 | 31 | py | Python | arxivtimes_indicator/server/__init__.py | chakki-works/arXivTimesIndicator | 501413571dc0024b5a9d0bc2e9f17f805de92690 | [
"Apache-2.0"
] | 34 | 2017-08-02T07:01:13.000Z | 2019-01-06T10:35:57.000Z | arxivtimes_indicator/server/__init__.py | arXivTimes/arXivTimesIndicator | 501413571dc0024b5a9d0bc2e9f17f805de92690 | [
"Apache-2.0"
] | 4 | 2017-08-09T06:47:26.000Z | 2017-10-13T01:47:01.000Z | arxivtimes_indicator/server/__init__.py | arXivTimes/arXivTimesIndicator | 501413571dc0024b5a9d0bc2e9f17f805de92690 | [
"Apache-2.0"
] | 2 | 2020-10-21T12:31:02.000Z | 2021-11-05T05:26:15.000Z | from .server import Application | 31 | 31 | 0.870968 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
70eea58422fe48ef43948fed2ecd47c6729bad81 | 5,056 | py | Python | national-rail/stations_and_services_scraper/config/aws_config.py | weizhi-luo/commute | a1d3e412df11971bb26db6316cf43bda29274dea | [
"MIT"
] | null | null | null | national-rail/stations_and_services_scraper/config/aws_config.py | weizhi-luo/commute | a1d3e412df11971bb26db6316cf43bda29274dea | [
"MIT"
] | null | null | null | national-rail/stations_and_services_scraper/config/aws_config.py | weizhi-luo/commute | a1d3e412df11971bb26db6316cf43bda29274dea | [
"MIT"
] | null | null | null | """Represent AWS config settings"""
import json
from typing import Iterable, Mapping
import boto3
from datetime import datetime, timedelta
from data_model import OriginAndCallingPointNames
from .config import ConfigSettings
class AwsAppConfigSettings(ConfigSettings):
"""Represent a collection of config settings on AWS"""
def __init__(self):
"""Create an instance of `AwsAppConfigSettings`"""
self._origins_and_calling_points_config_session =\
AwsAppConfigSession('stations_and_services_scraper', 'PROD',
'origins_and_calling_points', 1800)
self._stations_crs_codes_config_session =\
AwsAppConfigSession('stations_and_services_scraper', 'PROD',
'station_name_crs_code_mapping', 1800)
self._darwin_config_session =\
AwsAppConfigSession('stations_and_services_scraper', 'PROD',
'darwin', 1800)
self._darwin_token =\
json.loads(
get_secret('darwin/token', 'eu-west-2')['SecretString']
)['darwin_token']
def get_services_origin_and_calling_point_names(self)\
-> Iterable[OriginAndCallingPointNames]:
"""Get a collection of services' origin and calling point names
:return: A collection of instances of `OriginAndCallingPointNames`
"""
setting = json.loads(
self._origins_and_calling_points_config_session.get_config())
return [OriginAndCallingPointNames(**s) for s in setting]
def get_data_access_config(self) -> Mapping:
"""Get config for setting up data access
:return: Data access configuration
"""
return {
'station_name_crs_code_mapping':
self._stations_crs_codes_config_session.get_config(),
'wsdl':
json.loads(self._darwin_config_session.get_config())['wsdl'],
'token': self._darwin_token
}
def get_data_publisher_config(self) -> Mapping:
"""Get config for setting up data publish
:return: Data publish configuration
"""
return {}
class AwsAppConfigSession:
"""Represent an AWS app config session"""
def __init__(self, application_identifier: str, environment: str,
profile_identifier: str, poll_interval_seconds: int):
"""Create an instance of `AwsAppConfigSession`
:param application_identifier: Identifier of application
related to the config
:param environment: Environment of the config
:param profile_identifier: Identifier of the config
:param poll_interval_seconds: interval in seconds to pull config
"""
self._application_identifier = application_identifier
self._environment = environment
self._profile_identifier = profile_identifier
self._poll_interval_seconds = poll_interval_seconds
self._config = None
self._config_token = None
def get_config(self) -> str:
"""Get and return config"""
if self._config is None or self._config_token is None:
self._start_session_and_get_latest_config()
elif self._is_poll_interval_passed():
self._get_latest_config()
return self._config
def _start_session_and_get_latest_config(self):
self._start_session()
self._get_latest_config()
def _start_session(self):
self._client = boto3.client('appconfigdata')
response = self._client.start_configuration_session(
ApplicationIdentifier=self._application_identifier,
EnvironmentIdentifier=self._environment,
ConfigurationProfileIdentifier=self._profile_identifier,
RequiredMinimumPollIntervalInSeconds=self._poll_interval_seconds
)
self._config_token = response['InitialConfigurationToken']
def _get_latest_config(self):
response = self._client.get_latest_configuration(
ConfigurationToken=self._config_token)
self._config_token = response['NextPollConfigurationToken']
self._poll_interval_seconds =\
int(response['NextPollIntervalInSeconds'])
self._last_get_latest_config_time = datetime.utcnow()
self._update_config(response)
def _update_config(self, response):
config_bytes = response['Configuration'].read()
if config_bytes != b'':
self._config = config_bytes.decode('utf-8')
def _is_poll_interval_passed(self):
config_expiry_time = self._last_get_latest_config_time\
+ timedelta(seconds=self._poll_interval_seconds)
return config_expiry_time < datetime.utcnow()
def get_secret(secret_name: str, region_name: str) -> dict:
session = boto3.session.Session()
client = session.client(service_name='secretsmanager',
region_name=region_name)
return client.get_secret_value(SecretId=secret_name)
| 39.811024 | 77 | 0.668315 | 4,555 | 0.90091 | 0 | 0 | 0 | 0 | 0 | 0 | 1,343 | 0.265625 |
70ef08ce99b2902cbf44d027970fd4e38a0d4f18 | 712 | py | Python | src/tests/test_loading.py | danmysak/ipa-parser | bb4f5fc1a8f95ef87793d2ffd79430a9a0ffbeaf | [
"MIT"
] | null | null | null | src/tests/test_loading.py | danmysak/ipa-parser | bb4f5fc1a8f95ef87793d2ffd79430a9a0ffbeaf | [
"MIT"
] | null | null | null | src/tests/test_loading.py | danmysak/ipa-parser | bb4f5fc1a8f95ef87793d2ffd79430a9a0ffbeaf | [
"MIT"
] | null | null | null | from timeit import timeit
from unittest import TestCase
from ..ipaparser import IPA, load
__all__ = [
'TestLoading',
]
FACTOR = 10.0
def is_much_larger(a: float, b: float) -> bool:
return a > b * FACTOR
def are_roughly_equal(a: float, b: float) -> bool:
return not is_much_larger(a, b) and not is_much_larger(b, a)
class TestLoading(TestCase):
def test_loading_time(self) -> None:
loading_time = timeit(load, number=1)
first_parse = timeit(lambda: IPA('/abc/'), number=1)
second_parse = timeit(lambda: IPA('/def/'), number=1)
self.assertTrue(is_much_larger(loading_time, first_parse))
self.assertTrue(are_roughly_equal(first_parse, second_parse))
| 25.428571 | 69 | 0.688202 | 375 | 0.526685 | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 0.037921 |
70ef510161655a6820dfbc438830cb203101745d | 142 | py | Python | src/v2/log.py | Strangemother/project-conceptnet-graphing | 3fd1b3188088090c71c95b1a660770482123ce22 | [
"MIT"
] | null | null | null | src/v2/log.py | Strangemother/project-conceptnet-graphing | 3fd1b3188088090c71c95b1a660770482123ce22 | [
"MIT"
] | null | null | null | src/v2/log.py | Strangemother/project-conceptnet-graphing | 3fd1b3188088090c71c95b1a660770482123ce22 | [
"MIT"
] | null | null | null | import logging
logging.basicConfig(level=logging.DEBUG)
def log(*a):
logging.info(' '.join(map(str, a)))
warn = logging.warn
| 14.2 | 41 | 0.647887 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.021127 |
70efcdc28513f6b7a89674d1c7aad0fab1e49771 | 511 | py | Python | Newbies/namedtuple.py | Fernal73/LearnPython3 | 5288017c0dbf95633b84f1e6324f00dec6982d36 | [
"MIT"
] | 1 | 2021-12-17T11:03:13.000Z | 2021-12-17T11:03:13.000Z | Newbies/namedtuple.py | Fernal73/LearnPython3 | 5288017c0dbf95633b84f1e6324f00dec6982d36 | [
"MIT"
] | 1 | 2020-02-05T00:14:43.000Z | 2020-02-06T09:22:49.000Z | Newbies/namedtuple.py | Fernal73/LearnPython3 | 5288017c0dbf95633b84f1e6324f00dec6982d36 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""Named tuple example."""
from collections import namedtuple
Car = namedtuple('Car', 'color mileage')
# Our new "Car" class works as expected:
MY_CAR = Car('red', 3812.4)
print(MY_CAR.color)
print(MY_CAR.mileage)
# We get a nice string repr for free:
print(MY_CAR)
try:
MY_CAR.color = 'blue'
except AttributeError as inst:
print(type(inst)) # the exception instance
print(inst.args) # arguments stored in .args
print(inst)
finally:
print("Into finally")
| 22.217391 | 53 | 0.690802 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 221 | 0.432485 |
70efd770aa4920709c5661af3d95528fefd02946 | 6,750 | py | Python | tests/components/blebox/test_config_flow.py | pcaston/core | e74d946cef7a9d4e232ae9e0ba150d18018cfe33 | [
"Apache-2.0"
] | 1 | 2021-07-08T20:09:55.000Z | 2021-07-08T20:09:55.000Z | tests/components/blebox/test_config_flow.py | pcaston/core | e74d946cef7a9d4e232ae9e0ba150d18018cfe33 | [
"Apache-2.0"
] | 47 | 2021-02-21T23:43:07.000Z | 2022-03-31T06:07:10.000Z | tests/components/blebox/test_config_flow.py | OpenPeerPower/core | f673dfac9f2d0c48fa30af37b0a99df9dd6640ee | [
"Apache-2.0"
] | null | null | null | """Test Open Peer Power config flow for BleBox devices."""
from unittest.mock import DEFAULT, AsyncMock, PropertyMock, patch
import blebox_uniapi
import pytest
from openpeerpower import config_entries, data_entry_flow
from openpeerpower.components.blebox import config_flow
from openpeerpower.setup import async_setup_component
from .conftest import mock_config, mock_only_feature, setup_product_mock
def create_valid_feature_mock(path="openpeerpower.components.blebox.Products"):
"""Return a valid, complete BleBox feature mock."""
feature = mock_only_feature(
blebox_uniapi.cover.Cover,
unique_id="BleBox-gateBox-1afe34db9437-0.position",
full_name="gateBox-0.position",
device_class="gate",
state=0,
async_update=AsyncMock(),
current=None,
)
product = setup_product_mock("covers", [feature], path)
type(product).name = PropertyMock(return_value="My gate controller")
type(product).model = PropertyMock(return_value="gateController")
type(product).type = PropertyMock(return_value="gateBox")
type(product).brand = PropertyMock(return_value="BleBox")
type(product).firmware_version = PropertyMock(return_value="1.23")
type(product).unique_id = PropertyMock(return_value="abcd0123ef5678")
return feature
@pytest.fixture(name="valid_feature_mock")
def valid_feature_mock_fixture():
"""Return a valid, complete BleBox feature mock."""
return create_valid_feature_mock()
@pytest.fixture(name="flow_feature_mock")
def flow_feature_mock_fixture():
"""Return a mocked user flow feature."""
return create_valid_feature_mock(
"openpeerpower.components.blebox.config_flow.Products"
)
async def test_flow_works(opp, valid_feature_mock, flow_feature_mock):
"""Test that config flow works."""
result = await opp.config_entries.flow.async_init(
config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
result = await opp.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80},
)
assert result["type"] == "create_entry"
assert result["title"] == "My gate controller"
assert result["data"] == {
config_flow.CONF_HOST: "172.2.3.4",
config_flow.CONF_PORT: 80,
}
@pytest.fixture(name="product_class_mock")
def product_class_mock_fixture():
"""Return a mocked feature."""
path = "openpeerpower.components.blebox.config_flow.Products"
patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True)
yield patcher
async def test_flow_with_connection_failure(opp, product_class_mock):
"""Test that config flow works."""
with product_class_mock as products_class:
products_class.async_from_host = AsyncMock(
side_effect=blebox_uniapi.error.ConnectionError
)
result = await opp.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80},
)
assert result["errors"] == {"base": "cannot_connect"}
async def test_flow_with_api_failure(opp, product_class_mock):
"""Test that config flow works."""
with product_class_mock as products_class:
products_class.async_from_host = AsyncMock(
side_effect=blebox_uniapi.error.Error
)
result = await opp.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80},
)
assert result["errors"] == {"base": "cannot_connect"}
async def test_flow_with_unknown_failure(opp, product_class_mock):
"""Test that config flow works."""
with product_class_mock as products_class:
products_class.async_from_host = AsyncMock(side_effect=RuntimeError)
result = await opp.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80},
)
assert result["errors"] == {"base": "unknown"}
async def test_flow_with_unsupported_version(opp, product_class_mock):
"""Test that config flow works."""
with product_class_mock as products_class:
products_class.async_from_host = AsyncMock(
side_effect=blebox_uniapi.error.UnsupportedBoxVersion
)
result = await opp.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80},
)
assert result["errors"] == {"base": "unsupported_version"}
async def test_async_setup(opp):
"""Test async_setup (for coverage)."""
assert await async_setup_component(opp, "blebox", {"host": "172.2.3.4"})
await opp.async_block_till_done()
async def test_already_configured(opp, valid_feature_mock):
"""Test that same device cannot be added twice."""
config = mock_config("172.2.3.4")
config.add_to_opp(opp)
await opp.config_entries.async_setup(config.entry_id)
await opp.async_block_till_done()
result = await opp.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "address_already_configured"
async def test_async_setup_entry(opp, valid_feature_mock):
"""Test async_setup_entry (for coverage)."""
config = mock_config()
config.add_to_opp(opp)
assert await opp.config_entries.async_setup(config.entry_id)
await opp.async_block_till_done()
assert opp.config_entries.async_entries() == [config]
assert config.state is config_entries.ConfigEntryState.LOADED
async def test_async_remove_entry(opp, valid_feature_mock):
"""Test async_setup_entry (for coverage)."""
config = mock_config()
config.add_to_opp(opp)
assert await opp.config_entries.async_setup(config.entry_id)
await opp.async_block_till_done()
assert await opp.config_entries.async_remove(config.entry_id)
await opp.async_block_till_done()
assert opp.config_entries.async_entries() == []
assert config.state is config_entries.ConfigEntryState.NOT_LOADED
| 34.974093 | 81 | 0.707556 | 0 | 0 | 232 | 0.03437 | 672 | 0.099556 | 4,729 | 0.700593 | 1,343 | 0.198963 |
70f08443e2af2ec2ef573ac21c7eed5de4719665 | 3,521 | py | Python | train.py | turbohiro/IDCard_detection | c7c4949bdbe00cb759a41bf8d7c6f4ddfe704213 | [
"MIT"
] | null | null | null | train.py | turbohiro/IDCard_detection | c7c4949bdbe00cb759a41bf8d7c6f4ddfe704213 | [
"MIT"
] | null | null | null | train.py | turbohiro/IDCard_detection | c7c4949bdbe00cb759a41bf8d7c6f4ddfe704213 | [
"MIT"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
import os
import cv2
import glob
import seaborn as sns
from PIL import Image
import glob
import tensorflow as tf
import model
os.environ['CUDA_VISIBLE_DEVICES']='0'
dataDir = '/data/jupyter/libin713/sample_IDCard'
def read_and_decode(filename,batch_size): # 读入dog_train.tfrecords
filename_queue = tf.train.string_input_producer([filename]) # 生成一个queue队列
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue) # 返回文件名和文件
features = tf.parse_single_example(serialized_example,
features={
'label': tf.FixedLenFeature([], tf.int64),
'img_raw': tf.FixedLenFeature([], tf.string),
}) # 将image数据和label取出来
img = tf.decode_raw(features['img_raw'], tf.uint8)
img = tf.reshape(img, [500, 500, 3]) # reshape为128*128的3通道图片
img = tf.cast(img, tf.float32) * (1. / 255) - 0.5 # 在流中抛出img张量
label = tf.cast(features['label'], tf.int32) # 在流中抛出label张量
img_batch,label_batch = tf.train.batch([img,label],batch_size = batch_size,num_threads = 64,capacity = 2000)
return img_batch,tf.reshape(label_batch,[batch_size])
N_CLASSES = 3
IMG_W = 500 # resize the image, if the input image is too large, training will be very slow.
IMG_H = 500
BATCH_SIZE = 16
CAPACITY = 2000
MAX_STEP = 10000 # with current parameters, it is suggested to use MAX_STEP>10k
learning_rate = 0.0001 # with current parameters, it is suggested to use learning rate<0.0001
def run_training1():
logs_train_dir = dataDir + '/logs/recordstrain/'
#
# train, train_label = input_data.get_files(train_dir)
tfrecords_file = 'Idcard_train.tfrecords'
train_batch, train_label_batch = read_and_decode(tfrecords_file, batch_size=BATCH_SIZE)
train_batch = tf.cast(train_batch,dtype=tf.float32)
train_label_batch = tf.cast(train_label_batch,dtype=tf.int64)
train_logits = model.inference(train_batch, BATCH_SIZE, N_CLASSES)
train_loss = model.losses(train_logits, train_label_batch)
train_op = model.trainning(train_loss, learning_rate)
train__acc = model.evaluation(train_logits, train_label_batch)
summary_op = tf.summary.merge_all()
sess = tf.Session()
train_writer = tf.summary.FileWriter(logs_train_dir, sess.graph)
saver = tf.train.Saver()
sess.run(tf.global_variables_initializer())
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
try:
for step in np.arange(MAX_STEP):
if coord.should_stop():
break
_, tra_loss, tra_acc = sess.run([train_op, train_loss, train__acc])
if step % 50 == 0:
print('Step %d, train loss = %.2f, train accuracy = %.2f%%' %(step, tra_loss, tra_acc*100.0))
summary_str = sess.run(summary_op)
train_writer.add_summary(summary_str, step)
if step % 2000 == 0 or (step + 1) == MAX_STEP:
checkpoint_path = os.path.join(logs_train_dir, 'model.ckpt')
saver.save(sess, checkpoint_path, global_step=step)
except tf.errors.OutOfRangeError:
print('Done training -- epoch limit reached')
finally:
coord.request_stop()
coord.join(threads)
sess.close()
run_training1()
| 39.561798 | 112 | 0.648963 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 717 | 0.19878 |
70f12857a34e97687f3b8515a3ecdf1b8631fbee | 779 | py | Python | zeus/brewery/models.py | sdivakarrajesh/Zeus | 7a6ddd3d0375f3a2f131f6fa46539faafbd73766 | [
"MIT"
] | null | null | null | zeus/brewery/models.py | sdivakarrajesh/Zeus | 7a6ddd3d0375f3a2f131f6fa46539faafbd73766 | [
"MIT"
] | 5 | 2021-03-19T01:10:37.000Z | 2021-09-22T18:47:10.000Z | zeus/brewery/models.py | sdivakarrajesh/Zeus | 7a6ddd3d0375f3a2f131f6fa46539faafbd73766 | [
"MIT"
] | null | null | null | from django.db import models
# Create your models here.
class DrinkType(models.Model):
created = models.DateTimeField(auto_now_add=True, blank=True, null=True)
updated = models.DateTimeField(auto_now=True, blank=True, null=True)
title = models.CharField(max_length=100, null=True, blank=True)
def __str__(self):
return self.title or ''
class Drink(models.Model):
created = models.DateTimeField(auto_now_add=True, blank=True, null=True)
updated = models.DateTimeField(auto_now=True, blank=True, null=True)
name = models.CharField(max_length=300, null=True, blank=True)
image = models.URLField(blank=True, null=True)
drink_type = models.ManyToManyField(DrinkType, blank=True)
def __str__(self):
return self.name or '' | 32.458333 | 76 | 0.722721 | 720 | 0.924262 | 0 | 0 | 0 | 0 | 0 | 0 | 30 | 0.038511 |
70f1ae00dfc2656288e9dc5e008f66a0f5c57eeb | 18,127 | py | Python | src/IntraCodec.py | Joao-Nogueira-gh/video-compressin | 92429f85333d16b0e192532e520653dd5e8936e3 | [
"MIT"
] | null | null | null | src/IntraCodec.py | Joao-Nogueira-gh/video-compressin | 92429f85333d16b0e192532e520653dd5e8936e3 | [
"MIT"
] | null | null | null | src/IntraCodec.py | Joao-Nogueira-gh/video-compressin | 92429f85333d16b0e192532e520653dd5e8936e3 | [
"MIT"
] | 1 | 2020-01-01T07:20:04.000Z | 2020-01-01T07:20:04.000Z | ## @class IntraCodec
# Module designed for encoding and decoding YUV videos using the intra-frame method
# That is considering adjacent pixels in the same frame and encoding their errors
# @author Tiago Melo 89005
# @author João Nogueira 89262
import numpy as np
import math
from Golomb import *
from Bitstream import *
class IntraCodec:
## Initialization function
# @param[in] filename Path of the file to read
# @param[in] encoded A flag used to indicate if the video in the given path was encoded by this same class
# @param[in] limitFrames Optional parameter to limit the number of frames to considered
# Initializing and setting up some useful parameters and flags
def __init__(self, filename, encoded=False, limitFrames=None):
self.vid = filename
self.encoding='utf-8'
# Array of arrays containing each frame's components
self.frameY=[]
self.frameV=[]
self.frameU=[]
self.encoded=False
self.quantizationStep=None
self.colorSpace=None
np.seterr(over='ignore')
#calls read video on initialization
if not encoded:
self.read_video()
else:
self.encoded=True
self.read_encoded_video(limitFrames=limitFrames)
## read_video function
# Reads YUV video information from file, storing all its data in our structures, calculating different components lengths and shapes
def read_video(self):
f=open(self.vid,"rb")
c=1
for line in f:
# Processing header
if c==1:
line=line.decode(self.encoding)
self.header=line.strip()
self.handleHeader()
# Rest of the video
if c>=2:
frameY=f.read(self.yLength)
frameU=f.read(self.uLength)
frameV=f.read(self.vLength)
y=np.frombuffer(frameY, dtype=np.uint8)
u=np.frombuffer(frameU, dtype=np.uint8)
v=np.frombuffer(frameV, dtype=np.uint8)
y=y.reshape(self.shape)
u=u.reshape(self.other_shape)
v=v.reshape(self.other_shape)
self.frameY+=[y]
self.frameU+=[u]
self.frameV+=[v]
c+=1
self.TotalFrames=len(self.frameY)
f.close()
## read_encoded_video function
# @param[in] limitFrames Optional parameter to limit the number of frames to be decoded
# Reads video information (encoded by this class) from file
# Starts by decoding and interpreting the header, followed by decoding of all the pixel errors and recreating the original pixel based on the predictor that was used
def read_encoded_video(self,limitFrames=None):
bs=BitStream(self.vid,'READ')
headerlen=bs.read_n_bits(8)
chars=[]
for i in range(0,headerlen*8):
chars.append(str(bs._readbit()))
res=''.join(chars)
self.header=self.decode_binary_string(res)
#handle header
self.handleHeader()
g=Golomb(self.golombParam)
bitsResto=int(math.log(self.golombParam,2))
if limitFrames==None:
l=self.TotalFrames
else:
l=limitFrames
#
self.frameY=[None]*l
self.frameU=[None]*l
self.frameV=[None]*l
#
for frame in range(0,l):
print('decoding frame',frame)
y=np.zeros(shape=self.shape,dtype=np.uint8)
u=np.zeros(shape=self.other_shape,dtype=np.uint8)
v=np.zeros(shape=self.other_shape,dtype=np.uint8)
for line in range(0, self.height):
for column in range(0,self.width):
pixel=self.decodeWithBitstream(3,bs,g,bitsResto)
a=self.getYUVPixel(frame,line,column-1, resized=False)
c=self.getYUVPixel(frame,line-1,column-1, resized=False)
b=self.getYUVPixel(frame,line-1,column, resized=False)
x=self.predict(a,c,b)
pixel=self.sum(x,pixel)
pixel=tuple(pixel)
l,c=self.adjustCoord(line,column)
y[line,column]=pixel[0]
u[l,c]=pixel[1]
v[l,c]=pixel[2]
#
self.frameY[frame]=y
self.frameU[frame]=u
self.frameV[frame]=v
#por cada frame
self.frameY+=[y]
self.frameU+=[u]
self.frameV+=[v]
#
bs.close()
## handleHeader function
# Interpreting the header of the file, containing width, height, frames per second and color space, assigning them to class variables
# This header can also contain other parameters added while encoding, such as the parameter for Golomb and the quantization steps used for lossy coding
def handleHeader(self):
print(self.header)
fields=self.header.split(" ")
for field in fields:
c=field[0]
if c=='W':
self.width=int(field[1:])
elif c=='H':
self.height=int(field[1:])
elif c=='F':
self.fps=int(field[1:3])
elif c=='C':
self.colorSpace=int(field[1:])
elif c=='G':
self.golombParam=int(field[-1:])
self.encoded=True
elif c=='z':
self.TotalFrames=int(field[1:])
elif c=='q':
qlist=field[1:]
qsteps=qlist.split(':')
self.quantizationStep=[int(qsteps[0]),int(qsteps[1]),int(qsteps[2])]
self.computeShape()
print('width=',self.width, 'height=',self.height, self.fps, self.colorSpace, self.frameLength)
if self.encoded:
print('g=',self.golombParam, 'totalframes=',self.TotalFrames)
if self.quantizationStep!=None:
print('q=',self.quantizationStep)
## adjustCoord function
# @param[in] line Line where the pixel is located
# @param[in] column Column where the pixel is located
# @param[out] line Adjusted line number
# @param[out] column Adjusted column number
# Adjusts given line and column considering the different array shapes in different color spaces
# Useful when assigning new values to a certain pixel position
def adjustCoord(self,line,column):
if self.colorSpace=='4:2:2':
c=math.floor((column/2))
return line,c
elif self.colorSpace=='4:2:0':
c=math.floor((column/2))
l=math.floor((line/2))
return l,c
else:
return line,column
## computeShape function
# Calculating array shapes for YUV components based on the color space
def computeShape(self):
if self.colorSpace==444:
self.colorSpace='4:4:4'
self.frameLength=int(self.width*self.height*3)
self.yLength=self.uLength=self.vLength=int(self.frameLength/3)
self.shape = (int(self.height), self.width)
self.other_shape = (int(self.height), self.width)
elif self.colorSpace==422:
self.colorSpace='4:2:2'
self.frameLength=int(self.width*self.height*2)
self.yLength=int(self.frameLength/2)
self.vLength=self.uLength=int(self.frameLength/4)
self.shape = (int(self.height), self.width)
self.other_shape = (int(self.height), int(self.width/2))
else:
self.colorSpace='4:2:0'
self.frameLength=int(self.width*self.height*3/2)
self.yLength=int(self.frameLength*(2/3))
self.uLength=self.vLength=int(self.frameLength*(1/6))
self.shape = (int(self.height), self.width)
self.other_shape = (int(self.height/2), int(self.width/2))
## getYUVPixel function
# @param[in] frame Number of the frame from which to read the pixel from
# @param[in] line Line in which the pixel is located
# @param[in] column Column in which the pixel is located
# @param[in] resized A flag used to indicate if the arrays have been resized or not
# @param[out] p The pixel tuple in YUV format
# Returns 0,0,0 for non existent pixels, useful for the Codecs
# Adjust line and column numbers based on the color space (and array shapes)
def getYUVPixel(self, frame, line, column, resized):
yf=self.frameY[frame]
uf=self.frameU[frame]
vf=self.frameV[frame]
if resized==False:
if self.colorSpace=='4:2:2':
c=math.floor((column/2))
if line<0 or column<0 or c<0:
return 0,0,0
p=yf[line,column], uf[line,c], vf[line,c]
elif self.colorSpace=='4:2:0':
c=math.floor((column/2))
l=math.floor((line/2))
if line<0 or column<0 or c<0 or l<0:
return 0,0,0
p=yf[line,column], uf[l,c], vf[l,c]
else:
if line<0 or column<0:
return 0,0,0
p=yf[line,column], uf[line,column], vf[line,column]
else:
if line<0 or column<0:
return 0,0,0
p=yf[line,column], uf[line,column], vf[line,column]
return p
## updateYUVPixel function
# @param[in] compNumb Number of the pixel component to be changed (0=Y,1=U,2=V)
# @param[in] frame Number of the frame where the pixel is located
# @param[in] line Line in which the pixel is located
# @param[in] column Column in which the pixel is located
# @param[in] value New value of the pixel's component
# Used for avoiding error propagation in lossy coding
def updateYUVPixel(self,compNumb,frame,line,column,value):
l,c=self.adjustCoord(line,column)
if compNumb==0:
rf=self.frameY[frame]
rf.setflags(write=1)
rf[line,column]=value
elif compNumb==1:
rf=self.frameU[frame]
rf.setflags(write=1)
rf[l,c]=value
else:
rf=self.frameV[frame]
rf.setflags(write=1)
rf[l,c]=value
## encode_video function
# @param[in] filename Path of file to write with the encoded video information
# @param[in] golombparam Golomb's parameter M (factor)
# @param[in] q Optional parameter for specifying each components quantization steps for lossy coding
# @param[in] limitFrames Optional parameter for limiting number of frames to encode
# Starts by encoding the header, passing additional parameters such as the Golomb factor
# Proceeds to encode each pixel, by calculating each component's error according to the predictor function
def encode_video(self, filename, golombparam, q=None, limitFrames=None):
if limitFrames==None:
l=self.TotalFrames
else:
l=limitFrames
g=Golomb(golombparam)
bs=BitStream(filename,'WRITE')
header='ENCODED '+self.header+' Golomb'+str(golombparam)+' z'+str(self.TotalFrames)
if q!=None:
header+=' q'+str(q[0])+':'+str(q[1])+':'+str(q[2])
self.quantizationStep=q
headerlen=len(header)
bs.write_n_bits(headerlen,8)
bs.writeTxt(header)
for frame in range(0,l):
print('encoding frame',frame)
for line in range(0,self.height):
for column in range(0,self.width):
p=self.getYUVPixel(frame,line,column, resized=False)
a=self.getYUVPixel(frame,line,column-1, resized=False)
c=self.getYUVPixel(frame,line-1,column-1, resized=False)
b=self.getYUVPixel(frame,line-1,column, resized=False)
x=self.predict(a,c,b)
erro=self.diff(p,x)
self.encodeWithBitstream(erro,bs,g,pixel=p,frame=frame,line=line,column=column)
bs.close()
## predict function
# @param[in] a Adjacent pixel in position (line,col-1)
# @param[in] c Adjacent pixel in position (line-1,col-1)
# @param[in] b Adjacent pixel in position (line-1,col)
# @param[out] ret Most similar pixel
# The returned pixel is calculated using the JPEG-LS non-linear predictor formula
def predict(self,a,c,b):
y=[int(a[0]),int(c[0]),int(b[0])]
u=[int(a[1]),int(c[1]),int(b[1])]
v=[int(a[2]),int(c[2]),int(b[2])]
l=[y]+[u]+[v]
ret=[]
for component in l:
if component[1]>=max(component[0],component[2]):
x=min(component[0],component[2])
elif component[1]<=min(component[0],component[2]):
x=max(component[0],component[2])
else:
x=component[0]+component[2]-component[1]
ret.append(x)
return ret
## diff function
# @param[in] p First pixel
# @param[in] x Second pixel
# @param[out] r Pixel result of the difference between the two pixels
# Calculates the result pixel by calculating the difference between each yuv component
def diff(self,p,x):
ey=int(p[0])-int(x[0])
eu=int(p[1])-int(x[1])
ev=int(p[2])-int(x[2])
return(ey,eu,ev)
## sum function
# @param[in] p First pixel
# @param[in] x Second pixel
# @param[out] r Pixel result of the sum between the two pixels
# Calculates the result pixel by calculating the sum between each yuv component
def sum(self,p,x):
ey=p[0]+x[0]
eu=p[1]+x[1]
ev=p[2]+x[2]
return(ey,eu,ev)
## printPixels function
# Function for printing pixels, useful during development
def printPixels(self):
l=self.TotalFrames
l=1
h=self.height
#h=20
w=self.width
#w=20
for frame in range(0,l):
#print('processing frame',frame)
for line in range(0,h):
for column in range(0,w):
if line==0 and w-10<=column<w:
p=self.getYUVPixel(frame,line,column, resized=False)
print(p, end=';')
#print('')
## decode_binary_string function
# @param[in] s String
# @param[out] r Decoded binary string
# Additional function to decode binary strings
def decode_binary_string(self,s):
return ''.join(chr(int(s[i*8:i*8+8],2)) for i in range(len(s)//8))
## getFrames function
# @param[out] frames The data structures with all the frames of each component
# Useful to check data integrity
def getFrames(self):
return self.frameY, self.frameU,self.frameV
## encodeWithBitStream function
# @param[in] value Value to be encoded
# @param[in] bs Bitstream class object
# @param[in] g Golomb class object
# @param[in] pixel Current pixel values being encoded, used for lossy coding
# @param[in] frame Frame where the pixel being encoded is located
# @param[in] line Line where the pixel being encoded is located
# @param[in] column Column where the pixel being encoded is located
# Switches the value to be encoded to positive, writing a 1 or 0 according to the original value
# If using lossy coding functionality, divides color component by quantization step and updates pixel value
# Proceeds to write the encoded value by Golomb with the Bitstream
def encodeWithBitstream(self, value,bs,g, pixel=None, frame=None, line=None, column=None):
for i in range(0,len(value)):
if value[i]<0:
n=value[i]*-1
bs.writebits(1,1)
else:
bs.writebits(0,1)
n=value[i]
if self.quantizationStep!=None and self.quantizationStep[i]!=0:
#newValue=pixel[i]+(n)
n=math.floor(n/self.quantizationStep[i])
#if line!=0 and column!=0:
#self.updateYUVPixel(i,frame,line,column,newValue)
n=g.encode(n)
bs.writebits(int(n,2),len(n))
## decodeWithBitStream function
# @param[in] len Number of values to read
# @param[in] bs Bitstream class object
# @param[in] g Golomb class object
# @param[in] bitsResto Number of bits of the remainder = log(factor,2)
# @param[out] pixel Decoded value
# Starts by reading one bit 0 or 1, determing if number was negative
# Reads the bits from the Bitstream and decodes them with Golomb
# Multiplies by quantization step if using lossy coding
def decodeWithBitstream(self, len,bs,g,bitsResto):
pixel=[]
for i in range(0,len):
ay=bs.read_n_bits(1)
seq=''
while True:
r=str(bs.read_n_bits(1))
seq+=r
if r=='0':
break
seq+=str(bs.readbits(bitsResto))
comp=g.decode(seq)
if ay==1:
comp=comp*-1
if self.quantizationStep!=None and self.quantizationStep[i]!=0:
comp=comp*self.quantizationStep[i]
pixel.append(comp)
return pixel
## verifyData function
# @param[in] video Class containing video for comparison
# @param[in] numberoframes Limits number of frames to check
# Compares data between two videos
def verifyData(self,video,numberoframes):
m1,m2,m3=self.getFrames()
m4,m5,m6=video.getFrames()
for i in range(0,numberoframes):
if (np.array_equal(m1[i],m4[i])):
print('Y-',i,'correct')
for i in range(0,numberoframes):
if (np.array_equal(m2[i],m5[i])):
print('U-',i,'correct')
for i in range(0,numberoframes):
if (np.array_equal(m3[i],m6[i])):
print('V-',i,'correct')
| 38.242616 | 169 | 0.579964 | 17,802 | 0.982017 | 0 | 0 | 0 | 0 | 0 | 0 | 6,160 | 0.339806 |
70f1cf6cbe1d0d3ea9ded5e933650823891cd848 | 1,015 | py | Python | main.py | kylecorry31/lifx_effects | ea97e3274233fb844c416d4d5a95d06a8a1c7cde | [
"MIT"
] | null | null | null | main.py | kylecorry31/lifx_effects | ea97e3274233fb844c416d4d5a95d06a8a1c7cde | [
"MIT"
] | 1 | 2021-11-13T20:35:21.000Z | 2021-11-13T20:35:21.000Z | main.py | kylecorry31/lifx_effects | ea97e3274233fb844c416d4d5a95d06a8a1c7cde | [
"MIT"
] | null | null | null | from effects.keyboard_effect import KeyboardEffect
from utils.lights import get_lights
from effects.candle_effect import CandleEffect
from effects.phasma_hunt_effect import PhasmaHuntEffect
from effects.audio_spectrum_effect import AudioSpectrumEffect
from effects.audio_amplitude_effect import AudioAmplitudeEffect
from effects.midi_effect import MidiEffect
import time
lights = get_lights(3)
try:
# PhasmaHuntEffect(250).run(lights)
# CandleEffect(250, 45).run(lights)
# KeyboardEffect(['a', 'd'], 200).run(lights)
# MidiEffect('music/2.mid', [0, 0], 2).run(lights)
# MidiEffect('music/6.mid', [4, 9], 1, True).run(lights)
# AudioSpectrumEffect("music/1.wav", 1, bins=[2, 2], num_bins=1024).run(lights)
# AudioSpectrumEffect("music/2.wav", 1, bins=[14, 15, 16], num_bins=1024).run(lights)
AudioAmplitudeEffect("microphone", 3).run(lights)
finally:
time.sleep(0.2)
for light in lights:
light.on(255)
time.sleep(1)
for light in lights:
light.on(255) | 37.592593 | 89 | 0.727094 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 397 | 0.391133 |
70f6a101e87e43f1c6e0155f7b03c05850afb19f | 250 | py | Python | arts_localisation/beam_models/__init__.py | loostrum/arts_localisation | e3f3d1c8c0f4ee66d731634294b7b126ee28be1c | [
"Apache-2.0"
] | 1 | 2020-08-19T11:52:04.000Z | 2020-08-19T11:52:04.000Z | arts_localisation/beam_models/__init__.py | loostrum/arts_localisation | e3f3d1c8c0f4ee66d731634294b7b126ee28be1c | [
"Apache-2.0"
] | 10 | 2021-02-03T08:52:51.000Z | 2022-03-04T10:03:56.000Z | arts_localisation/beam_models/__init__.py | loostrum/arts_localisation | e3f3d1c8c0f4ee66d731634294b7b126ee28be1c | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
from .beamformer import BeamFormer
from .compound_beam import CompoundBeam
from .sb_generator import SBGenerator
from .simulate_sb_pattern import SBPattern
__all__ = ['BeamFormer', 'CompoundBeam', 'SBPattern', 'SBGenerator']
| 25 | 68 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 71 | 0.284 |
70f6b613f46f9572a127a9da271c9e2dc28fc894 | 3,090 | py | Python | gfauto/gfauto/test_util.py | KishkinJ10/graphicsfuzz | 673f9bba2b02608b26edef786095007949c00675 | [
"Apache-2.0"
] | 519 | 2018-09-25T12:55:31.000Z | 2022-03-30T17:26:53.000Z | gfauto/gfauto/test_util.py | KishkinJ10/graphicsfuzz | 673f9bba2b02608b26edef786095007949c00675 | [
"Apache-2.0"
] | 544 | 2018-09-25T08:54:28.000Z | 2022-03-07T16:04:51.000Z | gfauto/gfauto/test_util.py | KishkinJ10/graphicsfuzz | 673f9bba2b02608b26edef786095007949c00675 | [
"Apache-2.0"
] | 139 | 2018-09-26T05:11:20.000Z | 2022-02-15T09:14:27.000Z | # -*- coding: utf-8 -*-
# Copyright 2019 The GraphicsFuzz Project Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test utility module.
A test directory contains a Test proto stored in "source/test.json", the reference and variant shader jobs, and various
other files, including results.
This module is used to read Test proto files and get various paths that exist in test directories.
"""
from pathlib import Path
from gfauto import proto_util, util
from gfauto.test_pb2 import Test
TEST_METADATA = "test.json"
REFERENCE_DIR = "reference"
VARIANT_DIR = "variant"
SHADER_JOB = "shader.json"
SHADER_JOB_RESULT = "shader.info.json"
def get_source_dir(test_dir: Path) -> Path:
return test_dir / "source"
def get_metadata_path(test_dir: Path) -> Path:
return get_metadata_path_from_source_dir(get_source_dir(test_dir))
def get_metadata_path_from_source_dir(source_dir: Path) -> Path:
return source_dir / TEST_METADATA
def metadata_write(metadata: Test, test_dir: Path) -> Path:
metadata_write_to_path(metadata, get_metadata_path(test_dir))
return test_dir
def metadata_read(test_dir: Path) -> Test:
return metadata_read_from_path(get_metadata_path(test_dir))
def metadata_read_from_source_dir(source_dir: Path) -> Test:
return metadata_read_from_path(get_metadata_path_from_source_dir(source_dir))
def metadata_read_from_path(test_metadata_path: Path) -> Test:
text = util.file_read_text(test_metadata_path)
result = Test()
proto_util.json_to_message(text, result)
return result
def metadata_write_to_path(metadata: Test, test_metadata_path: Path) -> Path:
text = proto_util.message_to_json(metadata)
util.file_write_text(test_metadata_path, text)
return test_metadata_path
def get_shader_job_path(test_dir: Path, shader_name: str) -> Path:
return test_dir / "source" / shader_name / SHADER_JOB
def get_device_directory(test_dir: Path, device_name: str) -> Path:
return test_dir / "results" / device_name
def get_results_directory(test_dir: Path, device_name: str) -> Path:
return get_device_directory(test_dir, device_name) / "result"
def get_reductions_dir(test_dir: Path, device_name: str) -> Path:
return get_device_directory(test_dir, device_name) / "reductions"
def get_reduced_test_dir(test_dir: Path, device_name: str, reduction_name: str) -> Path:
return get_reductions_dir(test_dir, device_name) / reduction_name
def get_reduction_work_directory(reduced_test_dir: Path, name_of_shader: str) -> Path:
return reduced_test_dir / "reduction_work" / name_of_shader
| 32.1875 | 119 | 0.772168 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,009 | 0.326537 |
70f70a663eddf66eb95d76cfae9e8ce1ca181a38 | 2,266 | py | Python | core.py | iBug/OmniAE | dcaa3c671a36aeef1c7ffff79a859daaa6352fa0 | [
"MIT"
] | null | null | null | core.py | iBug/OmniAE | dcaa3c671a36aeef1c7ffff79a859daaa6352fa0 | [
"MIT"
] | 5 | 2019-10-28T21:16:50.000Z | 2020-06-08T21:13:06.000Z | core.py | iBug/OmniAE | dcaa3c671a36aeef1c7ffff79a859daaa6352fa0 | [
"MIT"
] | null | null | null | # LOL. Hope you're not fooled by the name of this file
import sys
import os
from configparser import ConfigParser
# Note: All classes here have N801 (CapWords naming convention) disabled.
# They're intended to be singletons
class Object(object):
def __init__(self, _default=None, **kwargs):
self.__dict__["_data"] = dict(kwargs)
self.__dict__["_default"] = _default
def __getattr__(self, attr):
try:
return self._data[attr]
except KeyError:
if self._default:
self._data[attr] = self._default()
return self._data[attr]
raise AttributeError("Object has no attribute {!r}".format(attr)) from None
def __setattr__(self, attr, value):
self._data[attr] = value
return value
def __getitem__(self, index):
return self._data[index]
def __setitem__(self, index, value):
self._data[index] = value
return value
def set_default(self, default=None):
if default is True:
default = Object
self._default = default
config = Object(_default=type(None))
class obj: # noqa: N801
site_list = None
sews = None
tasker = None
post_storage = None
class worker: # noqa: N801
sews = None
scanner = None
handler = None
check = Object(
development=None,
)
config_parser = ConfigParser()
def load():
global config_parser
if "pytest" in sys.modules:
config_parser.read("config.ci", encoding="utf-8")
elif os.path.isfile("config"):
config_parser.read("config", encoding="utf-8")
else:
config_parser.read("config.sample", encoding="utf-8")
conf = config_parser['Config']
config.read_key = conf.get('read_key', "IAkbitmze4B8KpacUfLqkw((")
config.write_key = conf.get('write_key')
config.write_token = conf.get('write_token')
config.site = conf.get('site', "android.stackexchange.com")
config.log_level = int(conf.get('log_level', 1))
config.file_log_level = int(conf.get('file_log_level', 3))
config.log_file = conf.get('log_file', "log.txt")
config.db_file = conf.get('db_file', "androidoverflow.db")
config.repo_slug = conf.get('repo_slug', "iBug/AndroidOverflow")
| 24.901099 | 87 | 0.643425 | 1,053 | 0.464695 | 0 | 0 | 0 | 0 | 0 | 0 | 513 | 0.22639 |
70f8bfd61dc80c47f58e8869a7a7e8ed2cf2c065 | 379 | py | Python | tests/test_transforms.py | KIT-MBS/nnicotine | 3681391d05d0a2d92e16431f5bc985cd6ff606fd | [
"MIT"
] | 2 | 2021-03-28T02:02:30.000Z | 2021-09-09T23:27:39.000Z | tests/test_transforms.py | KIT-MBS/nnicotine | 3681391d05d0a2d92e16431f5bc985cd6ff606fd | [
"MIT"
] | null | null | null | tests/test_transforms.py | KIT-MBS/nnicotine | 3681391d05d0a2d92e16431f5bc985cd6ff606fd | [
"MIT"
] | null | null | null | from collections import OrderedDict
from pytest import approx
def test_RandomDrop():
sample = None
raise
def test_TrimToTarget():
raise
def test_ComputeCouplings():
raise
def test_ToCategorical():
raise
def test_ToTensor_sample():
raise
def test_ToTensor_label():
raise
def test_ToDistance():
raise
def test_ToBinnedDistance():
raise
| 13.068966 | 35 | 0.722955 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
70faefbfea80d8e2843638eff582f5d8fefdfc7c | 1,512 | py | Python | table_creation/queryinfo.py | ashleyeah/spotifyu | d709c6b4833b8a745fbe141ff967f2234cf5e0da | [
"MIT"
] | null | null | null | table_creation/queryinfo.py | ashleyeah/spotifyu | d709c6b4833b8a745fbe141ff967f2234cf5e0da | [
"MIT"
] | null | null | null | table_creation/queryinfo.py | ashleyeah/spotifyu | d709c6b4833b8a745fbe141ff967f2234cf5e0da | [
"MIT"
] | null | null | null | import spotipy
from spotipy.oauth2 import SpotifyClientCredentials
import mysql.connector
auth_manager = SpotifyClientCredentials('f3dc4f3802254be091c8d8576961bc9d', 'b51d135ad7104add8f71933197e9cc14')
sp = spotipy.Spotify(auth_manager=auth_manager)
cnx = mysql.connector.connect(user='root', password='1234',
host='35.222.92.143',
database='main')
cursor = cnx.cursor()
file1 = open('artists.txt', 'r')
Lines = file1.readlines()
for i in range(1, len(Lines)):
artist_id = Lines[i].strip()
urn = 'spotify:artist:{}'.format(artist_id)
artist = sp.artist(urn)
for genre in artist['genres']:
query = ("SELECT genre_id FROM Genres "
"WHERE name = %(genre_name)s; ")
data_genre = {
'genre_name': genre
}
cursor.execute(query, data_genre)
for i in cursor:
add_row = ("INSERT IGNORE INTO ArtistsGenres "
"(artist_id, artist_name, genre_id, genre_name) "
"VALUES (%(artist_id)s, %(artist_name)s, %(genre_id)s, %(genre_name)s) ")
data_row = {
'artist_id': artist_id,
'artist_name': artist['name'],
'genre_id': i[0],
'genre_name': genre
}
# Insert new employee
cursor.execute(add_row, data_row)
# Make sure data is committed to the database
cnx.commit()
cursor.close()
cnx.close() | 31.5 | 111 | 0.576058 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 491 | 0.324735 |
70fdb66b9ded98ccf160a5d4bab704860540ae69 | 1,414 | py | Python | tests/data23/recipe-491264.py | JohannesBuchner/pystrict3 | f442a89ac6a23f4323daed8ef829d8e9e1197f90 | [
"BSD-2-Clause"
] | 1 | 2020-06-05T08:53:26.000Z | 2020-06-05T08:53:26.000Z | tests/data23/recipe-491264.py | JohannesBuchner/pystrict3 | f442a89ac6a23f4323daed8ef829d8e9e1197f90 | [
"BSD-2-Clause"
] | 1 | 2020-06-04T13:47:19.000Z | 2020-06-04T13:47:57.000Z | tests/data23/recipe-491264.py | JohannesBuchner/pystrict3 | f442a89ac6a23f4323daed8ef829d8e9e1197f90 | [
"BSD-2-Clause"
] | 1 | 2020-11-07T17:02:46.000Z | 2020-11-07T17:02:46.000Z | import socket
class DNSQuery:
def __init__(self, data):
self.data=data
self.dominio=''
tipo = (ord(data[2]) >> 3) & 15 # Opcode bits
if tipo == 0: # Standard query
ini=12
lon=ord(data[ini])
while lon != 0:
self.dominio+=data[ini+1:ini+lon+1]+'.'
ini+=lon+1
lon=ord(data[ini])
def respuesta(self, ip):
packet=''
if self.dominio:
packet+=self.data[:2] + "\x81\x80"
packet+=self.data[4:6] + self.data[4:6] + '\x00\x00\x00\x00' # Questions and Answers Counts
packet+=self.data[12:] # Original Domain Name Question
packet+='\xc0\x0c' # Pointer to domain name
packet+='\x00\x01\x00\x01\x00\x00\x00\x3c\x00\x04' # Response type, ttl and resource data length -> 4 bytes
packet+=str.join('',[chr(int(x)) for x in ip.split('.')]) # 4bytes of IP
return packet
if __name__ == '__main__':
ip='192.168.1.1'
print('pyminifakeDNS:: dom.query. 60 IN A %s' % ip)
udps = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
udps.bind(('',53))
try:
while 1:
data, addr = udps.recvfrom(1024)
p=DNSQuery(data)
udps.sendto(p.respuesta(ip), addr)
print('Respuesta: %s -> %s' % (p.dominio, ip))
except KeyboardInterrupt:
print('Finalizando')
udps.close()
| 32.136364 | 125 | 0.548091 | 966 | 0.683168 | 0 | 0 | 0 | 0 | 0 | 0 | 374 | 0.264498 |
70fe2c4951a1b415e80a8ca06456e10fa1d52d75 | 5,007 | py | Python | calculator.py | anon-cand/nexpreval | e033bb9f5f25c74c44d0eb29748bf48193c185ae | [
"MIT"
] | null | null | null | calculator.py | anon-cand/nexpreval | e033bb9f5f25c74c44d0eb29748bf48193c185ae | [
"MIT"
] | null | null | null | calculator.py | anon-cand/nexpreval | e033bb9f5f25c74c44d0eb29748bf48193c185ae | [
"MIT"
] | null | null | null | import os
import logging
from pathlib import Path
from operations import catalogue
from parsers import XMLSpecParser
class ExpressionCalculator:
"""
Processes all expression files with given extension in source directory
Assumes that all files with given extension are expression files
"""
__slots__ = ('source_dir', 'target_dir', 'extension', 'operations', 'spec_parser')
def __init__(self, source: str, target: str, extension: str):
"""
:param source: Path to source directory
:param target: Path to target directory
:param extension: extension of the files to be processed
:return: None
"""
self.operations = None
self.spec_parser = None
self.source_dir = None
self.target_dir = None
self.extension = extension
self.validate(source, target) # Validate the inputs
self.operations = catalogue() # Build the operations catalogues
self.spec_parser = XMLSpecParser(self.operations, extension) # Initialize the parser
def process(self):
"""
This function acts as a coordinator for actual execution
"""
logger = logging.getLogger(__name__)
logger.info('Collecting names of files that will be processed')
entries = self.entries()
logger.info('Found %d files to process', len(entries))
for spec in entries:
logger.info('Parsing spec: %s', spec)
operations = self.spec_parser.parse(spec)
logger.info('Evaluating operations found in the spec')
results = self.evaluate(operations)
logger.info('Persisting results for the spec')
self.persist(spec, results)
def entries(self) -> list:
"""
Walk through the file system and find all suitable files
:return: a list of files that have to be processed
"""
entries = []
logger = logging.getLogger(__name__)
logger.debug('Traversing the source directory')
for root, _, names in os.walk(self.source_dir):
for name in names:
if Path(name).suffix == self.extension:
full_path = os.path.join(root, name)
entries.append(full_path)
return entries
def evaluate(self, operations: dict) -> dict:
"""
Execute operations.
:param operations: a mapping of id and operations objects
:return: a map of id and results
"""
return {oid: obj() for oid, obj in operations.items()}
def persist(self, spec: str, results: dict):
"""
Transform the results using spec parser and save
them to the target directory
:param spec: name of the spec to be used for generating target file name
:param results: mapping of top-level operation id and their results
:return: information on the resultant file
"""
logger = logging.getLogger(__name__)
logger.debug('Preparing results for persistence')
serialized_result = self.spec_parser.serialize(results, 'expressions', 'result')
if serialized_result:
logger.debug('Determining result file path')
spec_path = Path(spec)
file_name = spec_path.name[:-len(spec_path.suffix)]
result_file_name = '%s_result%s' % (file_name, spec_path.suffix)
result_file_path = self.target_dir.joinpath(result_file_name)
with open(result_file_path, 'w') as rf:
rf.write(serialized_result)
logger.info('Results for spec %s have been saved to: %s', spec_path.name, result_file_path)
else:
logger.error("Failed to serialize results")
def validate(self, source: str, target: str):
"""
Validates inputs to the application
:param source: path to source directory
:param target: path to target directory
:return:
"""
logger = logging.getLogger(__name__)
source_path = Path(source) # Path to source directory
target_path = Path(target) # Path to destination directory
logger.debug('Validating paths are valid and are directories')
if not (source_path.exists() and source_path.is_dir()):
raise ValueError('Path to source directory is not valid.')
if not (target_path.exists() and target_path.is_dir()):
raise ValueError('Path to target directory is not valid.')
source = source_path.resolve()
target = target_path.resolve()
logger.debug('Checking if the directories are accessible to current user')
if not (os.access(source, os.R_OK)):
raise ValueError('Read permissions on source directory is missing.')
if not (os.access(target, os.R_OK | os.W_OK)):
raise ValueError('Read/write permissions on target directory are missing.')
self.source_dir = source
self.target_dir = target
| 39.738095 | 107 | 0.633513 | 4,886 | 0.975834 | 0 | 0 | 0 | 0 | 0 | 0 | 2,078 | 0.415019 |
70fe4c8b56bc3406c72affab4486d1a1d17c77b3 | 13,239 | py | Python | tinkup.py | jeromedontdev/tinkup | ebaa0a3289e54130447cd02a70201eae8f01c012 | [
"BSD-3-Clause"
] | 2 | 2022-01-22T07:19:29.000Z | 2022-02-01T05:39:11.000Z | tinkup.py | jeromedontdev/tinkup | ebaa0a3289e54130447cd02a70201eae8f01c012 | [
"BSD-3-Clause"
] | null | null | null | tinkup.py | jeromedontdev/tinkup | ebaa0a3289e54130447cd02a70201eae8f01c012 | [
"BSD-3-Clause"
] | null | null | null | from cgitb import text
import queue
from random import seed
import serial
import serial.tools.list_ports
from signal import signal, SIGINT
import sys
import threading
import time
import tkinter
from tkinter import END, W, PhotoImage, filedialog as fd, scrolledtext as sd
global fw_filename
fw_filename = ""
COM_OVERRIDE=None
VERSION='1.0'
DEBUG=False
running = True
class PrintLogger():
def __init__(self, textbox):
self.textbox = textbox
def write(self, text):
self.textbox.insert(tkinter.END, text)
self.textbox.see(END)
def flush(self):
pass
def on_closing():
global running
running = False
def sig_handler(signal_received, frame):
on_closing()
class Tink:
cmd = {
'CmdGetVer': b'\x01',
'CmdErase': b'\x02',
'CmdWrite': b'\x03',
'JumpApp': b'\x05',
}
ctrl = {
'SOH': b'\x01',
'EOT': b'\x04',
'DLE': b'\x10',
}
rxfsm = {
'RxIdle': 0,
'RxBuffer': 1,
'RxEscape': 2,
}
blfsm = {
'BlIdle': 0,
'BlVersion': 1,
'BlErase': 2,
'BlWrite': 3,
'BlJump': 4,
}
serial = None
rx_state = rxfsm['RxIdle']
def timer(self, timestamp):
# 100ms interval timer
if running:
timestamp += 0.1
self.timer_thread = threading.Timer(timestamp - time.time(), self.timer, args=(timestamp,)).start()
def calc_crc(self, b):
# NOTE: This is the CRC lookup table for polynomial 0x1021
lut = [
0, 4129, 8258, 12387,\
16516, 20645, 24774, 28903,\
33032, 37161, 41290, 45419,\
49548, 53677, 57806, 61935]
num1 = 0
for num2 in b:
num3 = (num1 >> 12) ^ (num2 >> 4)
num4 = (lut[num3 & 0x0F] ^ (num1 << 4)) & 0xFFFF
num5 = (num4 >> 12) ^ num2
num1 = (lut[num5 & 0x0F] ^ (num4 << 4)) & 0xFFFF
return num1
def rx_process(self, packet, debug=DEBUG):
if debug:
print('Processing packet: %s' % packet.hex())
crc_rx = (packet[-1] << 8) | packet[-2]
if self.calc_crc(packet[0:-2]) != crc_rx:
print('Bad CRC received, resetting state')
self.bl_state = self.blfsm['BlIdle']
else:
cmd = bytes([packet[0]])
payload = packet[1:-2]
if self.bl_state == self.blfsm['BlVersion']:
if cmd == self.cmd['CmdGetVer']:
print('Found device ID: %s' % payload.decode().split('\x00')[0])
print('Erasing device... ', end='')
self.tx_packet(self.cmd['CmdErase'])
self.bl_state = self.blfsm['BlErase']
else:
print('ERROR: Expected response code CmdGetVer, got %s' % packet[0])
elif self.bl_state == self.blfsm['BlErase']:
if cmd == self.cmd['CmdErase']:
print('OKAY')
self.hex_line = 1
self.fw_file = open(self.fw_name, 'r')
tx = bytearray(self.cmd['CmdWrite'])
hex_line = bytes.fromhex(self.fw_file.readline().rstrip()[1:])
tx += hex_line
print('Writing firmware %d/%d... ' % (self.hex_line, self.hex_nline), end='')
self.tx_packet(tx)
self.bl_state = self.blfsm['BlWrite']
else:
print('ERROR: Expected response code CmdErase, got %s' % packet[0])
elif self.bl_state == self.blfsm['BlWrite']:
if cmd == self.cmd['CmdWrite']:
print('OKAY')
self.hex_line = self.hex_line + 1
# hex_line starts at 1, so we need to send up to and
# including hex_nline
if self.hex_line > self.hex_nline:
print('Update complete, booting firmware')
self.bl_state = self.blfsm['BlJump']
self.tx_packet(self.cmd['JumpApp'])
button_state()
return
# There doesnt seem to be a response to the JumpApp
# command, so at this point we're done.
self.running = False
else:
tx = bytearray(self.cmd['CmdWrite'])
hex_line = bytes.fromhex(self.fw_file.readline().rstrip()[1:])
tx += hex_line
print('Writing firmware %d/%d... ' % (self.hex_line, self.hex_nline), end='')
self.tx_packet(tx)
else:
print('ERROR: Expected response code CmdWrite, got %s' % packet[0])
def rx_buffer(self, b, debug=DEBUG):
state_begin = self.rx_state
if self.rx_state == self.rxfsm['RxIdle']:
# Ignore bytes until we see SOH
if b == self.ctrl['SOH']:
self.rxbuf = bytearray()
self.rx_state = self.rxfsm['RxBuffer']
elif self.rx_state == self.rxfsm['RxBuffer']:
if b == self.ctrl['DLE']:
# Escape the next control sequence
self.rx_state = self.rxfsm['RxEscape']
elif b == self.ctrl['EOT']:
# End of transmission
self.rx_state = self.rxfsm['RxIdle']
self.rx_process(self.rxbuf)
else:
# Buffer the byte
self.rxbuf += b
elif self.rx_state == self.rxfsm['RxEscape']:
# Unconditionally buffer any byte following the escape sequence
self.rxbuf += b
self.rx_state = self.rxfsm['RxBuffer']
else:
# Shouldn't get here
print('Unknown state')
self.rx_state = self.rxfsm['RxIdle']
if debug:
keys = list(self.rxfsm.keys())
vals = list(self.rxfsm.values())
s0 = vals.index(state_begin)
s1 = vals.index(self.rx_state)
print('RX: %s, RX FSM state: %s -> %s' % (b.hex(), keys[s0], keys[s1]))
def rx(self):
while running:
if self.serial:
b = self.serial.read(1)
if b:
self.rx_buffer(b)
else:
print('RX timeout?')
else:
print('Lost serial port')
time.sleep(1)
def tx(self, b, debug=DEBUG):
if debug:
print('TX: %s' % b.hex())
if self.serial and self.serial.is_open:
try:
self.serial.write(b)
self.serial.flush()
except:
print('TX failure')
button_state()
return
else:
print('TX failure, serial port not writeable')
button_state()
return
def tx_packet(self, b):
# b should be a bytearray
crc = self.calc_crc(b)
b += bytes([crc & 0xFF])
b += bytes([(crc >> 8) & 0xFF])
b_tx = bytearray(self.ctrl['SOH'])
for bb in b:
bb = bytes([bb])
# Escape any control characters that appear in the TX buffer
if bb == self.ctrl['SOH'] or bb == self.ctrl['EOT'] or bb == self.ctrl['DLE']:
b_tx += self.ctrl['DLE']
b_tx += bb
b_tx += self.ctrl['EOT']
self.tx(b_tx)
def __init__(self, fw_name=None, port=None):
self.rx_state = self.rxfsm['RxIdle']
self.bl_state = self.blfsm['BlIdle']
self.fw_name = fw_name
self.hex_nline = 0
self.hex_line = 0
# Ensure the file exists, has valid Intel Hex checksums, and count lines
try:
with open(self.fw_name) as fw_file:
for line in fw_file:
self.hex_nline = self.hex_nline + 1
line = line.rstrip()[1:]
try:
checksum = bytes.fromhex(line[-2:])
except:
print('%s is not a valid hex file' % fw_name)
button_state()
return
# It seems to just load hex if it's blank
data = bytes.fromhex(line[:-2])
s = bytes([((~(sum(data) & 0xFF) & 0xFF) + 1) & 0xFF])
if checksum != s:
print('%s is not a valid hex file' % fw_name)
button_state()
return
except:
print('No file selected')
button_state()
return
comports = []
try:
if port == None:
comports_all = [comport for comport in serial.tools.list_ports.comports()]
for com in comports_all:
if com.manufacturer == 'FTDI':
comports.append(com.device)
else:
comports.append(port)
if comports:
if len(comports) > 1:
print('Several FTDI devices detected - not sure which to target. Aborting.')
# TODO: Add interactive device selector?
button_state()
return
for com in comports:
try:
self.serial = serial.Serial(com, baudrate=115200, timeout=None, rtscts=True)
print('Opened device at %s' % com)
except Exception as ex:
print('Could not open device at %s' % com)
print('Exception: %s' % ex)
button_state()
return
else:
print('No RetroTINK devices found')
button_state()
return
except:
print('No communication with device')
button_state()
return
if self.serial:
self.rx_process_thread = threading.Thread(target=self.rx, args=())
self.rx_process_thread.daemon = True
self.rx_process_thread.start()
self.timer_thread = threading.Thread(target=self.timer, args=(time.time() + 0.1,))
self.timer_thread.daemon = True
self.timer_thread.start()
else:
button_state()
return
self.running = True
retries = 1
self.bl_state = self.blfsm['BlVersion']
while retries and running:
retries = retries - 1
print('Probing device... ', end='')
self.tx_packet(self.cmd['CmdGetVer'])
time.sleep(1)
# Need to add a timeout
def file_select():
filetypes = (
('hex files', '*.hex'),
('All files', '*.*')
)
fw_filename = fd.askopenfilename(
title='Select hex',
initialdir='/',
filetypes=filetypes)
browse_box.configure(state="normal")
browse_box.delete(0, END)
browse_box.insert(0,fw_filename)
browse_box.configure(state="readonly")
def tink_flash():
fw_filename = browse_box.get()
try:
button_state()
tink = Tink(fw_name=fw_filename, port=COM_OVERRIDE)
except:
print('Could not execute flash')
button_state()
return
def button_state():
if browse_button['state'] == "normal":
browse_button.configure(state="disabled")
flash_button.configure(state="disabled")
else:
browse_button.configure(state="normal")
flash_button.configure(state="normal")
if __name__ == '__main__':
signal(SIGINT, sig_handler)
window = tkinter.Tk()
window.geometry('680x380')
window.iconbitmap(default='./assets/icon.ico')
window.title('tinkup-gui')
window.resizable(False,False)
window.eval('tk::PlaceWindow . center')
tink_logo = PhotoImage(file='./assets/RetroTINK-logo.png')
tink_logo = tink_logo.subsample(4,4)
tink_label = tkinter.Label(window,image=tink_logo)
tink_label.place(x=285, y=10)
fw_label = tkinter.Label(window,text="Hex File:")
fw_label.place(x=325, y=90)
browse_box = tkinter.Entry(window,textvariable=fw_filename)
browse_box.configure(state="readonly")
browse_box.place(x=10, y=120, width=582)
browse_button = tkinter.Button(window,text='Load HEX',command=file_select)
browse_button.place(x=610, y=115)
flash_button = tkinter.Button(window, text="Flash", command=tink_flash)
flash_button.place(x=330, y=145)
print_text = sd.ScrolledText(window, undo=True)
print_text.place(x=10, y=180, height=180)
logger = PrintLogger(print_text)
sys.stdout = logger
try:
from ctypes import windll
windll.shcore.SetProcessDpiAwareness(1)
finally:
window.mainloop()
on_closing() | 32.688889 | 111 | 0.502304 | 10,602 | 0.800816 | 0 | 0 | 0 | 0 | 0 | 0 | 2,251 | 0.170028 |
70fed1aa735bf095ea4ea44b9f449fbb2dd1b487 | 128 | py | Python | backend/src/analytics/admin.py | codingforentrepreneurs/Geolocator-2 | d4c2410301aa2497816e7ed2ec1890bc9be2e8fd | [
"MIT"
] | 34 | 2017-04-27T20:22:08.000Z | 2021-11-08T13:01:43.000Z | backend/src/analytics/admin.py | hercules261188/Geolocator-2 | d4c2410301aa2497816e7ed2ec1890bc9be2e8fd | [
"MIT"
] | null | null | null | backend/src/analytics/admin.py | hercules261188/Geolocator-2 | d4c2410301aa2497816e7ed2ec1890bc9be2e8fd | [
"MIT"
] | 34 | 2017-05-18T23:23:01.000Z | 2022-03-02T21:08:54.000Z | from django.contrib import admin
# Register your models here.
from .models import UserSession
admin.site.register(UserSession) | 21.333333 | 32 | 0.820313 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 28 | 0.21875 |
cb013ff854838f109743a88faf31b41b4d2a4d6d | 5,588 | py | Python | calculators/loan_calculator.py | wanderindev/financial-calculator-backend | ad7e736c858298c240eb9af52fbadcb02c693968 | [
"MIT"
] | 2 | 2021-01-08T04:26:54.000Z | 2022-02-04T22:22:27.000Z | calculators/loan_calculator.py | wanderindev/financial-calculator-backend | ad7e736c858298c240eb9af52fbadcb02c693968 | [
"MIT"
] | null | null | null | calculators/loan_calculator.py | wanderindev/financial-calculator-backend | ad7e736c858298c240eb9af52fbadcb02c693968 | [
"MIT"
] | 2 | 2019-06-06T19:36:17.000Z | 2020-05-20T12:37:08.000Z | from math import ceil
from numpy_financial import nper, pmt, rate
from typing import List, Tuple
from .calculator import Calculator
# noinspection PyTypeChecker
class LoanCalculator(Calculator):
def __init__(self, **kwargs):
super(LoanCalculator, self).__init__(**kwargs)
self.loan = self.get_float(kwargs.get("loan", 0))
self.reg_pmt = self.get_float(kwargs.get("reg_pmt", 0))
self.extra_pmt = self.get_float(kwargs.get("extra_pmt", 0))
self.extra_pmt_start = self.get_int(kwargs.get("extra_pmt_start", 0))
self.extra_pmt_f = self.get_int(kwargs.get("extra_pmt_f", 0))
self.pmt_when = self.get_int(kwargs.get("pmt_when", 0))
self.payments = []
self.payments_e = []
self.payments_r = []
self.payments_p = []
def get_balances_loans(self) -> List[float]:
balances = []
for x in self.periods:
bal = self.loan - sum(self.payments[:x]) + sum(self.interests[:x])
if bal < 0:
if self.reg_pmt + bal >= 0:
self.payments_r[x - 1] = self.reg_pmt + bal
self.payments[x - 1] = (
self.payments_r[x - 1] + self.payments_e[x - 1]
)
balances.append(0)
self.trunc_periods(x)
else:
self.payments_r[x - 1] = 0
self.payments_e[x - 1] = (
bal + self.payments_e[x - 1] + self.reg_pmt
)
self.payments[x - 1] = self.payments_e[x - 1]
balances.append(0)
self.trunc_periods(x)
return balances
else:
balances.append(
self.loan
- sum(self.payments[:x])
+ sum(self.interests[:x])
)
return balances
def get_interests_loans(self) -> List[float]:
_rate = self.rate / (100 * self.freq)
interests = [
round(
(self.loan - self.payments[0]) * _rate
if self.pmt_when
else self.loan * _rate,
4,
)
]
for x in self.periods[1:]:
if self.pmt_when:
interest = round(
(self.loan - sum(self.payments[:x]) + sum(interests[:x]))
* _rate,
4,
)
else:
interest = round(
(
self.loan
- sum(self.payments[: x - 1])
+ sum(interests[:x])
)
* _rate,
4,
)
if interest < 0:
interests.append(0)
else:
interests.append(interest)
return interests
def get_nper_loans(self) -> int:
_nper = ceil(
nper(
self.rate / (100 * self.freq),
-self.reg_pmt,
self.loan,
when=self.pmt_when,
)
)
self.num_of_years = round(_nper / self.freq, 2)
self.periods = self.get_periods()
self.periods_a = self.get_periods_a()
self.periods_m = self.get_periods_m()
return _nper
def get_payments(self) -> Tuple[List[float]]:
self.payments_r = self.get_payments_r()
self.payments_e = self.get_payments_e()
self.payments = [
round(self.payments_r[x - 1] + self.payments_e[x - 1], 4)
for x in self.periods
]
self.interests = self.get_interests_loans()
self.balances = self.get_balances_loans()
return self.payments, self.payments_e, self.payments_r
def get_payments_e(self) -> List[float]:
extra_pmt_p = []
if self.extra_pmt:
extra_pmt_p.append(self.extra_pmt_start)
if self.extra_pmt_f:
for x in self.periods[self.extra_pmt_start :]:
if not (x - self.extra_pmt_start) % (
12 / self.extra_pmt_f
):
extra_pmt_p.append(x)
return [
self.extra_pmt if x in extra_pmt_p else 0 for x in self.periods
]
def get_payments_r(self) -> List[float]:
return [self.reg_pmt for _ in self.periods]
def get_rate_loans(self):
return (
rate(
self.freq * self.num_of_years,
-self.reg_pmt,
self.loan,
0,
self.pmt_when,
)
* self.freq
* 100
)
def get_reg_pmt(self) -> float:
self.reg_pmt = round(
-pmt(
self.rate / (100 * self.freq),
self.freq * self.num_of_years,
self.loan,
when=self.pmt_when,
),
4,
)
return self.reg_pmt
def trunc_periods(self, p: int) -> None:
self.periods = self.periods[:p]
self.payments_r = self.payments_r[:p]
self.payments_e = self.payments_e[:p]
self.payments = self.payments[:p]
self.interests = self.interests[:p]
self.nper_t = p
self.num_of_years_t = p / self.freq
| 31.75 | 79 | 0.46403 | 5,414 | 0.968862 | 0 | 0 | 0 | 0 | 0 | 0 | 95 | 0.017001 |
cb01a78dea1ba413678bdaa15b5fb376d5cfbcae | 168 | py | Python | pydundas/tests/rest/test_project.py | autonopy/pydundas | 66fa46d593d3a2f9f3da7a0220f8a7dc495e2758 | [
"MIT"
] | 4 | 2019-04-09T05:30:10.000Z | 2022-01-26T14:45:52.000Z | pydundas/tests/rest/test_project.py | autonopy/pydundas | 66fa46d593d3a2f9f3da7a0220f8a7dc495e2758 | [
"MIT"
] | 4 | 2019-04-08T13:00:56.000Z | 2022-01-27T06:46:33.000Z | pydundas/tests/rest/test_project.py | autonopy/pydundas | 66fa46d593d3a2f9f3da7a0220f8a7dc495e2758 | [
"MIT"
] | 3 | 2019-04-08T07:07:12.000Z | 2022-01-26T14:46:00.000Z | import unittest
from pydundas import Api
class TestProject(unittest.TestCase):
def test_no_syntax_error(self):
self.assertIsNotNone(Api(None).project())
| 18.666667 | 49 | 0.755952 | 124 | 0.738095 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
cb0302f31a96c3dde5eb9872c52023093a2d18f1 | 516 | py | Python | odoo/custom/src/private/nxpo_budget_revision_monitoring_project/report/budget_monitor_revision_report.py | Saran440/nxpo | 769bbebc0df2e12acbf5b1e398e34b3405d53c50 | [
"BSL-1.0"
] | null | null | null | odoo/custom/src/private/nxpo_budget_revision_monitoring_project/report/budget_monitor_revision_report.py | Saran440/nxpo | 769bbebc0df2e12acbf5b1e398e34b3405d53c50 | [
"BSL-1.0"
] | null | null | null | odoo/custom/src/private/nxpo_budget_revision_monitoring_project/report/budget_monitor_revision_report.py | Saran440/nxpo | 769bbebc0df2e12acbf5b1e398e34b3405d53c50 | [
"BSL-1.0"
] | null | null | null | # Copyright 2020 Ecosoft Co., Ltd. (http://ecosoft.co.th)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class BudgetMonitorRevisionReport(models.Model):
_inherit = "budget.monitor.revision.report"
project_id = fields.Many2one(comodel_name="res.project")
def _select_budget(self):
select_budget_query = super()._select_budget()
select_budget_query = ",".join([select_budget_query, "aa.project_id"])
return select_budget_query
| 32.25 | 78 | 0.722868 | 358 | 0.693798 | 0 | 0 | 0 | 0 | 0 | 0 | 183 | 0.354651 |
cb0378636f9363b9345438112d17636d553ee87d | 1,975 | py | Python | src/ClusterManager/cluster_manager.py | nautilusshell/QianJiangYuan | 262a88ca559f62d6e8633b1596481515d32f7907 | [
"MIT"
] | null | null | null | src/ClusterManager/cluster_manager.py | nautilusshell/QianJiangYuan | 262a88ca559f62d6e8633b1596481515d32f7907 | [
"MIT"
] | null | null | null | src/ClusterManager/cluster_manager.py | nautilusshell/QianJiangYuan | 262a88ca559f62d6e8633b1596481515d32f7907 | [
"MIT"
] | 1 | 2019-12-27T07:57:48.000Z | 2019-12-27T07:57:48.000Z | #!/usr/bin/python
# -*- coding: UTF-8 -*-
import json
import os
import time
import argparse
import uuid
import subprocess
import sys
import datetime
import yaml
from jinja2 import Environment, FileSystemLoader, Template
import base64
import re
import thread
import threading
import random
import textwrap
import logging
import logging.config
import job_manager
import user_manager
import node_manager
import joblog_manager
import command_manager
from multiprocessing import Process, Manager
def create_log( logdir = '/var/log/dlworkspace' ):
if not os.path.exists( logdir ):
os.system("mkdir -p " + logdir )
with open('logging.yaml') as f:
logging_config = yaml.load(f)
f.close()
logging_config["handlers"]["file"]["filename"] = logdir+"/clustermanager.log"
logging.config.dictConfig(logging_config)
def Run():
create_log()
logging.info( "Starting job manager... " )
proc_job = Process(target=job_manager.Run)
proc_job.start()
logging.info( "Starting user manager... " )
proc_user = Process(target=user_manager.Run)
proc_user.start()
logging.info( "Starting node manager... " )
proc_node = Process(target=node_manager.Run)
proc_node.start()
logging.info( "Starting joblogging manager... " )
proc_joblog = Process(target=joblog_manager.Run)
proc_joblog.start()
logging.info( "Starting command manager... " )
proc_command = Process(target=command_manager.Run)
proc_command.start()
proc_job.join()
proc_user.join()
proc_node.join()
proc_joblog.join()
proc_command.join()
pass
if __name__ == '__main__':
#parser = argparse.ArgumentParser( prog='cluster_manager.py',
# formatter_class=argparse.RawDescriptionHelpFormatter,
# description=textwrap.dedent('''\
# ''') )
#parser.add_argument("help",
# help = "Show the usage of this program" )
#args = parser.parse_args()
Run() | 21.236559 | 85 | 0.691646 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 554 | 0.280506 |
cb0468927dd370e4c6e76788301b7d52a85a268f | 611 | py | Python | netmiko/ssh_exception.py | dalekirkman1/netmiko | a82ae784f4f1ef75e864ac972c088b8c963f77ff | [
"MIT"
] | 1 | 2020-12-11T00:48:09.000Z | 2020-12-11T00:48:09.000Z | netmiko/ssh_exception.py | rockenwind/netmiko | 24291029d0cdd5af660475ac1093a2dcd1c08af2 | [
"MIT"
] | null | null | null | netmiko/ssh_exception.py | rockenwind/netmiko | 24291029d0cdd5af660475ac1093a2dcd1c08af2 | [
"MIT"
] | null | null | null | from paramiko.ssh_exception import SSHException
from paramiko.ssh_exception import AuthenticationException
class NetmikoTimeoutException(SSHException):
"""SSH session timed trying to connect to the device."""
pass
class NetmikoAuthenticationException(AuthenticationException):
"""SSH authentication exception based on Paramiko AuthenticationException."""
pass
class ConfigInvalidException(Exception):
"""Exception raised for invalid configuration error."""
pass
NetMikoTimeoutException = NetmikoTimeoutException
NetMikoAuthenticationException = NetmikoAuthenticationException
| 24.44 | 81 | 0.813421 | 379 | 0.620295 | 0 | 0 | 0 | 0 | 0 | 0 | 188 | 0.307692 |
cb04d915b9d325336f938cf593f891e0a7292b7e | 5,793 | py | Python | src/REUTER.py | Qinaty/POA-spiders | 95e1c2cb59877d360806736bc995069885d34591 | [
"MIT"
] | null | null | null | src/REUTER.py | Qinaty/POA-spiders | 95e1c2cb59877d360806736bc995069885d34591 | [
"MIT"
] | null | null | null | src/REUTER.py | Qinaty/POA-spiders | 95e1c2cb59877d360806736bc995069885d34591 | [
"MIT"
] | null | null | null | import time
from bs4 import BeautifulSoup
from base import *
from db_info import *
# 构建映射url->article
_url2atc = dict()
month = dict({'Jan':1, 'Feb':2, 'Mar':3, 'Apr':4, 'May':5, 'Jun':6,
'Jul':7, 'Aug':8, 'Sep':9, 'Oct':10, 'Nov':11, 'Dec':12})
class REUTERURLManager(BaseURLManager):
def __init__(self, start_page=1, end_page=-1):
super().__init__(start_page, end_page)
def parse(self, page_cnt) -> list:
# 构造目录页url
# https://www.reuters.com/news/archive/china-news?view=page&page=1&pageSize=10
dir_url = f'https://www.reuters.com/news/archive/china-news?view=page&page={page_cnt}&pageSize=10'
# 获取目录页html
html = get_html(dir_url)
soup = BeautifulSoup(html, features="html.parser")
articles = soup.find(attrs={'class':'news-headline-list'}).find_all(attrs={'class':'story'})
urls = []
for i in articles:
# 子tag中的href属性
try:
url = 'https://www.reuters.com' + i.find('div', attrs={'story-content'}).find('a')['href']
except:
url = None
try:
pic_url = i.find('div', attrs={'story-photo lazy-photo'}).find('img')['org-src']
except:
pic_url = None
try:
title = i.find('div', attrs={'story-content'}).find('h3').text.strip()
except:
title = None
try:
date = i.find('div', attrs={'story-content'}).find('time').find('span').text
if date[0].isdigit():
date = time.strftime("%Y-%m-%d", time.localtime())
else:
temp = date.split()
date = temp[2] + '-' + str(month[temp[0]]) + '-' + temp[1]
except:
date = None
try:
abstract = i.find('div', attrs={'story-content'}).find('p').text.strip()
except:
abstract = None
act = Article(
publisher='REUTER',
url=url,
title=title,
date=date,
authors=None, # 这一项交给REUTERSpider填
content=None, # 这一项交给REUTERSpider填
abstract=abstract,
location=None, # 这一项交给REUTERSpider填
section=None,
category=None, # 这一项交给REUTERSpider填,
pic_url=pic_url,
type='passage'
)
urls.append(url)
print(url)
_url2atc[url] = act
return urls
class REUTERSpider(BaseSpider):
def __init__(self, server: str, database: str, url_manager: BaseURLManager, maximum=-1):
super().__init__(server, database, url_manager, maximum)
def parse(self, url) -> Article:
html = get_html(url)
# 构造解析器
soup = BeautifulSoup(html, features="html.parser")
# 获取内容
try:
authors_text = soup.find('div', attrs={'class', 'TwoColumnsLayout-body-86gsE ArticlePage-body-container-10RhS'}).find('div', attrs={'clss', 'Attribution-attribution-Y5JpY'}).text
authors_text = authors_text.split(';')[0].split(' ')[2:]
authors = []
location = ""
name = ""
for i in authors_text:
if i == 'and':
authors.append(name.strip())
name = ""
elif i == 'in':
authors.append(name.strip())
break
# l = authors_text.index(i)
# location = " ".join(authors_text[l+1:])
# break
elif authors_text.index(i) == (len(authors_text) - 1):
name = name + i + " "
authors.append(name.strip())
else:
name = name + i + " "
# if location == "":
# location = None
except:
authors = None
try:
raw_text = soup.find('div', attrs={'class', 'TwoColumnsLayout-body-86gsE ArticlePage-body-container-10RhS'}).\
find('div', attrs={'clss', 'ArticleBodyWrapper'}).\
find_all('p', attrs={'class', 'Paragraph-paragraph-2Bgue ArticleBody-para-TD_9x'})
try:
location = raw_text[0].text.split(' ')
loc_index = location.index('-')
if loc_index == 1:
location = None
else:
location = " ".join(location[:(loc_index-1)])
except:
location = None
raw_text = raw_text[1:] # 去掉abstract
text = ''
for i in raw_text:
text += f'{i.text}\n'
except:
text = None
try:
category = soup.find('div', attrs={'class', 'TwoColumnsLayout-hero-3H8pu'}).\
find('div', attrs={'clss', 'ArticleHeader-info-container-3-6YG'}).\
find('a').text
except:
category = None
atc = _url2atc[url]
atc.authors = str(authors)
atc.location = location
atc.content = text
atc.category = category
del _url2atc[url]
return atc
if __name__ == '__main__':
um = REUTERURLManager()
spider = REUTERSpider(
server=SERVER,
database=DATABASE,
url_manager=um,
)
spider.run() | 37.134615 | 191 | 0.461419 | 5,422 | 0.918827 | 0 | 0 | 0 | 0 | 0 | 0 | 1,347 | 0.228266 |
cb0781103336b3ed4a0d2565011f61861afb533b | 2,770 | py | Python | core/management_utils.py | crydotsnake/djangogirls | 0e764294085d6d7d3c4f61a7fe36f91640abedcd | [
"BSD-3-Clause"
] | 446 | 2015-01-04T20:58:26.000Z | 2022-03-30T23:08:26.000Z | core/management_utils.py | serenasensini/TheRedCode_Docker-per-Django-e-Postgres | 78a2ca1f09ab956a6936d14a5fd99336ff39f472 | [
"BSD-3-Clause"
] | 649 | 2015-01-09T23:42:14.000Z | 2022-03-31T17:27:19.000Z | core/management_utils.py | serenasensini/TheRedCode_Docker-per-Django-e-Postgres | 78a2ca1f09ab956a6936d14a5fd99336ff39f472 | [
"BSD-3-Clause"
] | 319 | 2015-01-06T20:58:42.000Z | 2022-03-30T06:29:04.000Z | import djclick as click
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from .forms import AddOrganizerForm
from .slack_client import slack
# "Get organizers info" functions used in 'new_event' and 'copy_event' management commands.
def get_main_organizer():
"""
We're asking user for name and address of main organizer, and return
a list of dictionary.
"""
team = []
click.echo(_("Let's talk about the team. First the main organizer:"))
main_name = click.prompt(click.style(
"First and last name", bold=True, fg='yellow'
))
main_email = click.prompt(click.style(
"E-mail address", bold=True, fg='yellow'
))
team.append({'name': main_name, 'email': main_email})
click.echo("All right, the main organizer is {0} ({1})".format(main_name, main_email))
return team
def get_team(team):
"""
We're asking user for names and address of the rest of the team,
and append that to a list we got from get_main_organizer
"""
add_team = click.confirm(click.style(
"Do you want to add additional team members?", bold=True, fg='yellow'
), default=False)
i = 1
while add_team:
i += 1
name = click.prompt(click.style(
f"First and last name of #{i} member", bold=True, fg='yellow'
))
email = click.prompt(click.style(
f"E-mail address of #{i} member", bold=True, fg='yellow'
))
if len(name) > 0:
team.append({'name': name, 'email': email})
click.echo(
f"All right, the #{i} team member of Django Girls is {name} ({email})"
)
add_team = click.confirm(click.style(
"Do you want to add additional team members?", bold=True, fg='yellow'
), default=False)
return team
def create_users(team, event):
"""
Create or get User objects based on team list
"""
members = []
for member in team:
member['event'] = event.pk
form = AddOrganizerForm(member)
user = form.save()
members.append(user)
return members
def brag_on_slack_bang(city, country, team):
"""
This is posting a message about Django Girls new event to #general channel on Slack!
"""
if settings.ENABLE_SLACK_NOTIFICATIONS:
text = f":django_pony: :zap: Woohoo! :tada: New Django Girls alert! " \
f"Welcome Django Girls {city}, {country}. " \
f"Congrats {', '.join(['{} {}'.format(x.first_name, x.last_name) for x in team])}!"
slack.chat.post_message(
channel='#general',
text=text,
username='Django Girls',
icon_emoji=':django_heart:'
)
| 31.123596 | 98 | 0.604693 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,188 | 0.428881 |
cb0840d5bc91aaecbc980279a62bae916478d2ee | 58 | py | Python | conference/forms/__init__.py | zevaverbach/epcon | 8352c030ee0d4197f559cdb58a54ee45c7a4471a | [
"BSD-2-Clause"
] | null | null | null | conference/forms/__init__.py | zevaverbach/epcon | 8352c030ee0d4197f559cdb58a54ee45c7a4471a | [
"BSD-2-Clause"
] | null | null | null | conference/forms/__init__.py | zevaverbach/epcon | 8352c030ee0d4197f559cdb58a54ee45c7a4471a | [
"BSD-2-Clause"
] | null | null | null | from .forms import * # noqa
from .talks import * # noqa
| 19.333333 | 28 | 0.655172 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 | 0.206897 |
cb08ea8fba01c9694bba7e015a61763f76ce96c2 | 8,235 | py | Python | siggregator/siggregator.py | packmad/Siggregator | 70ff7432e715b7a25a82098320c351ddb78daa7d | [
"Apache-2.0"
] | 3 | 2021-03-16T15:28:00.000Z | 2021-07-23T09:07:54.000Z | siggregator/siggregator.py | packmad/Siggregator | 70ff7432e715b7a25a82098320c351ddb78daa7d | [
"Apache-2.0"
] | null | null | null | siggregator/siggregator.py | packmad/Siggregator | 70ff7432e715b7a25a82098320c351ddb78daa7d | [
"Apache-2.0"
] | 1 | 2022-03-29T21:57:36.000Z | 2022-03-29T21:57:36.000Z | #!/usr/bin/env python3
import hashlib
import json
import magic
import os
import re
import subprocess
import sys
import yara
import ordlookup
import pefile
import ssdeep
import tlsh
from multiprocessing import Pool
from os.path import isdir, isfile, join, basename, abspath, dirname, realpath
from pathlib import Path
from tqdm import tqdm
from typing import Optional, Dict, List
sha256_regex = re.compile(r'^[a-f0-9]{64}$', re.IGNORECASE)
yara_signatures_dir = join(join(dirname(realpath(__file__)), 'yara_signatures'))
yarac_signatures_dir = join(join(dirname(realpath(__file__)), 'yarac_signatures'))
def compile_signatures() -> int:
for root, dirs, files in os.walk(yara_signatures_dir, topdown=False):
for dir in dirs:
dir_path = join(root, dir)
namespace_to_signatures = dict()
rule_found = False
for f in os.listdir(dir_path):
if f.endswith('.yara'):
rule_found = True
# -6 's.yara'
namespace_to_signatures[f[:-6]] = join(dir_path, f)
if rule_found:
p = Path(dir_path)
arch = p.name
fformat = p.parent.name
dst_file = join(yarac_signatures_dir, f'{fformat}_{arch}.yarac')
yara.compile(filepaths=namespace_to_signatures).save(dst_file)
return len(os.listdir(yarac_signatures_dir)) - 1 # 1 is .gitkeep file
def is_supported_file(file_path: str) -> bool:
try:
with open(file_path, 'rb') as fp:
first_two_bytes = fp.read(2)
if first_two_bytes == b'MZ':
return True # PE
if first_two_bytes == b'\x7fE' and fp.read(2) == b'LF':
return True # ELF
if first_two_bytes == b'\xfe\xed' and fp.read(1) == b'\xfa':
return True # Mach-O
except Exception:
pass
return False
def get_file_sha256sum(file_path: str) -> str:
hash_function = hashlib.sha256()
with open(file_path, 'rb', buffering=0) as f:
for chunk in iter(lambda: f.read(65536), b''):
hash_function.update(chunk)
return hash_function.hexdigest()
def get_impfuzzy(pe: pefile.PE) -> str:
impstrs = []
exts = ["ocx", "sys", "dll"]
if not hasattr(pe, "DIRECTORY_ENTRY_IMPORT"):
return ""
for entry in pe.DIRECTORY_ENTRY_IMPORT:
if isinstance(entry.dll, bytes):
libname = entry.dll.decode().lower()
else:
libname = entry.dll.lower()
parts = libname.rsplit(".", 1)
if len(parts) > 1 and parts[1] in exts:
libname = parts[0]
for imp in entry.imports:
funcname = None
if not imp.name:
funcname = ordlookup.ordLookup(
entry.dll.lower(), imp.ordinal, make_name=True
)
if not funcname:
raise pefile.PEFormatError(
f"Unable to look up ordinal {entry.dll}:{imp.ordinal:04x}"
)
else:
funcname = imp.name
if not funcname:
continue
if isinstance(funcname, bytes):
funcname = funcname.decode()
impstrs.append("%s.%s" % (libname.lower(), funcname.lower()))
return ssdeep.hash(",".join(impstrs).encode())
def diec(file_path: str) -> Optional[Dict]:
cmd = ['diec', '--json', file_path]
try:
out = json.loads(
subprocess.check_output(cmd, stderr=subprocess.STDOUT).strip().decode(errors='ignore'))
if len(out['detects']) == 0:
out['detects'] = None
else:
new_detects = list()
for d in out['detects']:
del d['string']
new_d = dict()
for k, v in d.items():
if v == '-':
new_d[k] = None
else:
new_d[k] = v
new_detects.append(new_d)
out['detects'] = new_detects
return out
except (subprocess.CalledProcessError, ValueError) as e:
sys.exit(f"Exception: {e.output.decode(errors='replace') if e.output else e}")
def yarac(file_path: str, fformat: str, arch: str) -> Optional[List[Dict[str, str]]]:
rules = yara.load(join(yarac_signatures_dir, f'{fformat}_{arch}.yarac'))
match = rules.match(file_path)
if len(match) == 0:
return None
out = list()
for m in match:
entry = dict()
entry['type'] = m.namespace
entry['rule'] = m.rule
m.meta.pop('pattern', None)
m.meta.pop('tool', None)
m.meta.pop('source', None)
for k, v in m.meta.items():
entry[k] = v
out.append(entry)
return out
def hashes(file_path: str) -> Optional[Dict]:
out = {}
with open(file_path, 'rb') as fp:
buff = fp.read()
out['ssdeep'] = ssdeep.hash(buff)
out['tlsh'] = tlsh.hash(buff)
cmd = ['sdhash', file_path]
out['sdhash'] = subprocess.check_output(cmd, stderr=subprocess.STDOUT).strip().decode(errors='ignore')
pe = pefile.PE(file_path)
out['imphash'] = pe.get_imphash()
out['impfuzzy'] = get_impfuzzy(pe)
return out
def aggregator(file_path: str) -> Optional[Dict]:
if not is_supported_file(file_path):
return None
out = dict()
magic_sig = magic.from_file(file_path)
out['magic'] = magic_sig
fformat = arch = None
if magic_sig.startswith('PE32'): # if x64 -> 'PE32+'
fformat = 'pe'
if magic_sig[4] == '+':
arch = 'x64'
else: # if '80386' in magic_sig:
arch = 'x86'
elif magic_sig.startswith('ELF'):
fformat = 'elf'
if '64-bit' in magic_sig:
arch = 'x64'
elif '32-bit' in magic_sig:
arch = 'x86'
elif magic_sig.startswith('MachO'):
fformat = 'macho'
if '64-bit' in magic_sig:
arch = 'x64'
elif '32-bit' in magic_sig:
arch = 'x86'
else:
try:
pe = pefile.PE(file_path, fast_load=True)
fformat = 'pe'
machine = pe.FILE_HEADER.Machine
if machine == 0x14c:
arch = 'x86'
elif machine & 0x00ff == 0x64:
arch = 'x64'
except:
pass
if fformat is None or arch is None:
return None
out['format'] = fformat
out['arch'] = arch
out['die'] = diec(file_path)
out['yara'] = yarac(file_path, fformat, arch)
out['hashes'] = hashes(file_path)
bname = basename(file_path)
if sha256_regex.match(bname):
out['sha256'] = bname
else:
out['sha256'] = get_file_sha256sum(file_path)
return out
def listdir_file_abspath(folder: str) -> List:
assert isdir(folder)
return [abspath(join(folder, f)) for f in os.listdir(folder)
if not isdir(abspath(join(folder, f)))]
def run_parallel(tgt_folder: str) -> List[Dict]:
print('> Scanning input directory...')
files = listdir_file_abspath(tgt_folder)
print(f'> Found {len(files)} files. Analysis in progress...')
with Pool() as pool:
outputs = list(tqdm(pool.imap(aggregator, files), total=len(files)))
print('> Analysis done!')
return list(filter(None, outputs))
if __name__ == "__main__":
assert isdir(yara_signatures_dir)
assert isdir(yarac_signatures_dir)
if len(os.listdir(yarac_signatures_dir)) <= 1:
nof_sigs = compile_signatures()
print(f'> {nof_sigs} rules compiled')
if len(sys.argv) != 3:
sys.exit(f'Usage: {basename(__file__)} IN_DIR OUT_FILE')
tgt_dir = sys.argv[1]
assert isdir(tgt_dir)
tgt_file = sys.argv[2]
if isfile(tgt_file):
os.remove(tgt_file)
results = run_parallel(tgt_dir)
print(f'> Found {len(results)} executable files. Writing output file...')
with open(tgt_file, 'w') as fp:
json.dump(results, fp)
print(f'> "{basename(tgt_file)}" written. Bye!')
| 32.678571 | 106 | 0.560291 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,086 | 0.131876 |
cb093c072ef7f389a955ddcff40785b65302fce1 | 272 | py | Python | rakesh_factorial.py | vinaymavi/alivenet-python-training- | f565dc1c19590815dbb17a3dcd012b02149dd804 | [
"MIT"
] | null | null | null | rakesh_factorial.py | vinaymavi/alivenet-python-training- | f565dc1c19590815dbb17a3dcd012b02149dd804 | [
"MIT"
] | 1 | 2018-07-17T17:58:46.000Z | 2018-07-17T17:58:46.000Z | rakesh_factorial.py | vinaymavi/alivenet-python-training- | f565dc1c19590815dbb17a3dcd012b02149dd804 | [
"MIT"
] | 9 | 2018-07-17T13:38:18.000Z | 2018-08-01T10:36:22.000Z | #Write a program which can compute the factorial of a given numbers.
#The results should be printed in a comma-separated sequence on a single line
number=int(input("Please Enter factorial Number: "))
j=1
fact = 1
for i in range(number,0,-1):
fact =fact*i
print(fact) | 27.2 | 77 | 0.742647 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 178 | 0.654412 |
cb098d10db1cb0c02e29f039ccbde0db55766abe | 5,598 | py | Python | .old/core/conf/_settings.py | Zadigo/Emails | c8577c56749fedc87c72cb19d1a764b2684da94a | [
"MIT"
] | null | null | null | .old/core/conf/_settings.py | Zadigo/Emails | c8577c56749fedc87c72cb19d1a764b2684da94a | [
"MIT"
] | null | null | null | .old/core/conf/_settings.py | Zadigo/Emails | c8577c56749fedc87c72cb19d1a764b2684da94a | [
"MIT"
] | null | null | null | import datetime
import json
import os
import secrets
from importlib import import_module
# PATH = 'C:\\Users\\Zadigo\\Documents\\Apps\\zemailer\\app\\core\\settings.json'
PATH = os.path.join(os.getcwd(), 'app', 'core', 'conf', 'settings.json')
def deserialize(func):
"""A decorator that deserializes objects stored
in the file
"""
def get(self, name):
class_name = self.__class__.__name__
# Check if there is a softlink
try:
parent, child = name.split('__', 1)
except ValueError:
# We have a simple string
searched_item = func(self, name)
else:
# BUG: TypeError: string indices must be integers
# We have a parent__child link
searched_item = func(self, parent)[child]
# Can only work if dictionnary and
# that the value is not None
if isinstance(searched_item, (dict, list)) \
and searched_item is not None:
# Converts the timestamp in its
# original datetime class
if '__class__' in searched_item:
tag = searched_item['__class__']
if tag == 'datetime':
datetime_class = datetime.date.fromtimestamp(searched_item['access_date'])
searched_item.update({'access_date_class': datetime_class})
# return "%s([%s])" % (class_name, searched_item)
return searched_item
return get
class Settings:
"""Construct a simple dictionnary object with all the settings
to be used with the application
"""
email_class = None
def __init__(self, name_or_path=None, **kwargs):
if not name_or_path:
# If no custom path has been provided,
# default to the custom path
name_or_path = 'settings.json'
self.name_or_path = name_or_path
settings_file = self.handler()
settings_dict = json.load(settings_file)
# If the file does not have the
# base structure {_id, settings:{}},
# raise an error
if not settings_dict:
raise ValueError('The file is not valid.')
# Work on a cache version of the
# different items in the settings file
self.cache = settings_dict.copy()
# Checks that the file has an _id and proceeds
# to populate the settings section of the
# file with the default values
self.check_file_id(settings_dict['_id'], settings_file)
def __repr__(self):
return f'{self.__class__.__name__}([{self.cache}])'
def handler(self):
"""A handler for opening the settings file
"""
try:
settings_file = open(self.name_or_path, 'r+', encoding='utf-8')
except FileExistsError:
raise
return settings_file
def check_file_id(self, file_id, handle=None, **kwargs):
if not file_id:
# Assume the file is a new version and proceed
# to populate all the required elements
self.cache['_id'] = secrets.token_hex(nbytes=25)
if handle:
# We need the file handle to proceed to
# the next section of populating the settings
populated_settings = self.populate(handle)
return populated_settings
else:
# If none, lets just simply change
# do the file _id and return
return
else:
# The file has already been created and
# populated so no need to pursue
handle.close()
return
def populate(self, handle):
"""Populates a settings file with the base parameters
for running the applications
"""
base_dir = str(os.getcwd())
base = {
'base_path': base_dir,
'data_path': os.path.join(base_dir, 'app\\data'),
'email_class': 'zemailer.app.core.sender',
'settings_class': 'zemailer.app.core.settings'
}
self.cache['settings'] = base
self.cache['last_updated'] = self.serialize_date()
# Empty the file. For whatever
# reason, json does not do so
# beforehand resulting
# in an erroneous file
handle.writelines('')
# Populate the settings file
# with the extra settings
json.dump(self.cache, handle, indent=4)
handle.close()
return self.cache
def serialize_date(self):
"""Serializes the current date in order to be stored
in the backup file
"""
return {'__class__': datetime.__name__, 'access_date': self.current_timestamp()}
@staticmethod
def current_timestamp():
"""Get the current date as a timestamp
"""
return datetime.datetime.now().timestamp()
@deserialize
def get(self, name):
try:
result = self.cache[name]
except KeyError:
result = None
return result
@staticmethod
def load_module(dotted_path=None):
"""This loads some required modules in order
to complete the file settings
"""
module = import_module('zemailer.app.core.sender')
senders = []
for klass, value in module.__dict__.items():
if isinstance(value, type):
items = {klass: value}
senders.append(items)
# This is the constructed initialized settings
# to use within the app or outside
initialized_settings = Settings(name_or_path=PATH)
| 33.927273 | 94 | 0.590389 | 3,990 | 0.712755 | 0 | 0 | 718 | 0.12826 | 0 | 0 | 2,220 | 0.39657 |
cb0b42243a5f1619da2be629a939fedeb68fb530 | 3,510 | py | Python | tools/Vitis-AI-Quantizer/vai_q_tensorflow2.x/setup.py | hito0512/Vitis-AI | 996459fb96cb077ed2f7e789d515893b1cccbc95 | [
"Apache-2.0"
] | 848 | 2019-12-03T00:16:17.000Z | 2022-03-31T22:53:17.000Z | tools/Vitis-AI-Quantizer/vai_q_tensorflow2.x/setup.py | wangyifan778/Vitis-AI | f61061eef7550d98bf02a171604c9a9f283a7c47 | [
"Apache-2.0"
] | 656 | 2019-12-03T00:48:46.000Z | 2022-03-31T18:41:54.000Z | tools/Vitis-AI-Quantizer/vai_q_tensorflow2.x/setup.py | wangyifan778/Vitis-AI | f61061eef7550d98bf02a171604c9a9f283a7c47 | [
"Apache-2.0"
] | 506 | 2019-12-03T00:46:26.000Z | 2022-03-30T10:34:56.000Z | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Install tensorflow_model_optimization."""
import datetime
import os
import sys
from setuptools import find_packages
from setuptools import setup
from setuptools.command.install import install as InstallCommandBase
from setuptools.dist import Distribution
# To enable importing version.py directly, we add its path to sys.path.
version_path = os.path.join(
os.path.dirname(__file__), 'tensorflow_model_optimization', 'python/core')
sys.path.append(version_path)
from version import __version__ # pylint: disable=g-import-not-at-top
# TODO(alanchiao): add explicit Tensorflow requirement once Tensorflow
# moves from a tf and tf-gpu packaging approach (where a user installs
# one of the two) to one where a user installs the tf package and then
# also installs the gpu package if they need gpu support. The latter allows
# us (and our dependents) to maintain a single package instead of two.
REQUIRED_PACKAGES = [
'numpy~=1.14',
'six~=1.10',
'enum34~=1.1;python_version<"3.4"',
'dm-tree~=0.1.1',
]
if '--release' in sys.argv:
release = True
sys.argv.remove('--release')
else:
# Build a nightly package by default.
release = False
if release:
project_name = 'vai-q-tensorflow2'
else:
# Nightly releases use date-based versioning of the form
# '0.0.1.dev20180305'
project_name = 'vai-q-tensorflow2-nightly'
datestring = datetime.datetime.now().strftime('%Y%m%d')
__version__ += datestring
class BinaryDistribution(Distribution):
"""This class is needed in order to create OS specific wheels."""
def has_ext_modules(self):
return False
setup(
name=project_name,
version=__version__,
description='Xilinx Vitis AI Quantizer for Tensorflow 2.x. '
'This is customized based on tensorflow-model-optimization('
'https://github.com/tensorflow/model-optimization)'
'A suite of tools that users, both novice and advanced'
' can use to optimize machine learning models for deployment'
' and execution.',
author='Xiao Sheng',
author_email='kylexiao@xilinx.com',
license='Apache 2.0',
packages=find_packages(),
install_requires=REQUIRED_PACKAGES,
# Add in any packaged data.
include_package_data=True,
package_data={'': ['*.so', '*.json']},
exclude_package_data={'': ['BUILD', '*.h', '*.cc']},
zip_safe=False,
distclass=BinaryDistribution,
cmdclass={
'pip_pkg': InstallCommandBase,
},
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
],
keywords='tensorflow model optimization machine learning',
)
| 35.454545 | 80 | 0.703989 | 154 | 0.043875 | 0 | 0 | 0 | 0 | 0 | 0 | 2,261 | 0.64416 |
cb0d83bca9047c82889cf5995c31524e13285dd4 | 39 | py | Python | perses/samplers/__init__.py | schallerdavid/perses | 58bd6e626e027879e136f56e175683893e016f8c | [
"MIT"
] | 99 | 2016-01-19T18:10:37.000Z | 2022-03-26T02:43:08.000Z | perses/samplers/__init__.py | schallerdavid/perses | 58bd6e626e027879e136f56e175683893e016f8c | [
"MIT"
] | 878 | 2015-09-18T19:25:30.000Z | 2022-03-31T02:33:04.000Z | perses/samplers/__init__.py | schallerdavid/perses | 58bd6e626e027879e136f56e175683893e016f8c | [
"MIT"
] | 30 | 2015-09-21T15:26:35.000Z | 2022-01-10T20:07:24.000Z | from perses.samplers.samplers import *
| 19.5 | 38 | 0.820513 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
cb0d9532ba61a5b5829e720211165c49e69be279 | 5,272 | py | Python | main.py | ROHITH-Singh/DataScience-Scrappping | 201b5e62e8d12c71628398d42ac40ef8d8ceb198 | [
"Apache-2.0"
] | null | null | null | main.py | ROHITH-Singh/DataScience-Scrappping | 201b5e62e8d12c71628398d42ac40ef8d8ceb198 | [
"Apache-2.0"
] | null | null | null | main.py | ROHITH-Singh/DataScience-Scrappping | 201b5e62e8d12c71628398d42ac40ef8d8ceb198 | [
"Apache-2.0"
] | null | null | null | import requests
import pymongo
from pymongo import MongoClient
import json
from bs4 import BeautifulSoup
cluster= MongoClient("mongodb+srv://admin:Rohit91138@cluster0.d4cq9.mongodb.net/Novelore?retryWrites=true&w=majority")
db=cluster["Novelore"]
collection=db["mangadex-scrap"]
url="https://mangadex.org"
data=list()
r=requests.get(url)
htmlcontent=r.content
#print(htmlcontent)
soup=BeautifulSoup(htmlcontent,'html.parser')
#print(soup.prettify)
#commonly used objects
#1 tag print(type(soup.title))
#2 navigable string print(type(title.string))
#3 Beautiful soup print(type(soup))
#4 comment
#get all paras from html content instead of p use a for anchor tag
#paras =soup.find_all('p')
#print(paras)
#print(soup.find('p')['class'])
#get all the links on the page
anchors=soup.find_all('a')
all_links =set()
for link in anchors:
if( link.get('href').startswith('/title')):
linkText=("https://mangadex.org"+link.get('href'))
all_links.add(linkText)
print(all_links)
s=list(all_links)
print(s)
for t in s:
l =requests.get(t)
htmlcontents=l.content
soup1=BeautifulSoup(htmlcontents,'html.parser')
sub_list=["MangaDex","-","(Title)"," ","<title>","</title>","Title ID:","\n","/"]
author=list()
titleid=list()
img=list()
for matter1 in soup1.find_all('div',{"class":"col-xl-9 col-lg-8 col-md-7"}):
titleid=list(matter1.find('div',{"class":"row m-0 py-1 px-0"}).strings)
author=list(matter1.find('a').strings)
for matter2 in soup1.find_all('div',{"class":"col-xl-3 col-lg-4 col-md-5"}):
img=(matter2.find('img',{"class":"rounded"}))
img=img["src"]
title_str=str(soup1.title)
descrption =soup1.find('meta',{"name":"description"})
descrption=(descrption["content"])
author= ''.join(map(str,author))
titleid= ''.join(map(str,titleid))
for sub in sub_list:
title_str=title_str.replace(sub,' ')
titleid=titleid.replace(sub,'')
titleid =titleid.replace(" ",'')
n=len(title_str)
title_name=title_str[1:n]
# print(titleid)
# print(title_name)
# print(author)
# print(descrption)
title_link=title_name.replace(' ','-')
title_link=title_link.replace('---','')
# print(title_link)
page_link_list = (soup1.find_all('a', {"class": "page-link"}))
page_link=list()
chapters = list()
if(page_link_list):
# print("page available")
for pagelink_chapter in page_link_list:
if (str(pagelink_chapter.get('href')).startswith('/title')):
pagelink = ("https://mangadex.org"+str(pagelink_chapter.get('href')))
page_link = [pagelink] + page_link
for x in page_link:
if page_link.count(x) > 1:
page_link.remove(x)
dspilt=''+page_link[0]
dspilt=dspilt.split('chapters/',1)
dspilt1=dspilt[1]
# print(page_link[0])
for sub in sub_list:
dspilt1=dspilt1.replace(sub,'')
numpages=int(dspilt1)
# print(numpages)
# for q in page_link:
# b=requests.get(q)
# htmlpagecontent=b.content
# soup3=BeautifulSoup(htmlpagecontent,'html.parser')
# page_link_list = (soup3.find_all('a', {"class": "page-link"}))
# for pagelink_chapter in page_link_list:
# if (str(pagelink_chapter.get('href')).startswith('/title')):
# pagelink = ("https://mangadex.org"+str(pagelink_chapter.get('href')))
# page_link =page_link +[pagelink]
#
# page_link=set(page_link)
# page_link=list(page_link)
#
# print((page_link))
for x in (range(1,numpages+1)):
m="https://mangadex.org/title/"+titleid+"/"+title_link+"/chapters/"+str(x)+"/"
# print(m)
p=requests.get(m)
htmlpagecontent=p.content
soup2=BeautifulSoup(htmlpagecontent,'html.parser')
anchors_chapter1 = (soup2.find_all('a', {"class": "text-truncate"}))
allchapter_link1 = list()
for link_chapter1 in anchors_chapter1:
if (link_chapter1.get('href').startswith('/chapter')):
chapterlink1 = ("https://mangadex.org" + link_chapter1.get('href'))
chapters=chapters+[chapterlink1]
# print(chapters)
else:
# print("notavailabele")
anchors_chapter = (soup1.find_all('a',{"class":"text-truncate"}))
allchapter_link=list()
for link_chapter in anchors_chapter:
if(link_chapter.get('href').startswith('/chapter')):
chapterlink=("https://mangadex.org"+link_chapter.get('href'))
chapters=chapters+[chapterlink]
# print(chapters)
data_set={"id":titleid ,"imgthmp":img,"Title":title_name,"description":descrption,"chapters":chapters}
collection.insert_one(data_set)
print(data_set)
result= collection.find({"Title":"solo-leveling"})
# data.append(data_set)
# with open('data.txt', 'a') as outfile:
# json.dump(data, outfile)
# print(data)
#mongodb+srv://admin:<password>@cluster0.d4cq9.mongodb.net/<dbname>?retryWrites=true&w=majority | 36.109589 | 119 | 0.612291 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,150 | 0.407815 |
cb0dc5cb2397b2b17050683d98107fc206b4c8db | 476 | py | Python | Leetcode/Stacks,_Queues/1_-_Easy/933._Number_of_Recent_Calls.py | Khalid-Sultan/Algorithms-Prep | 7773c5bc0448d8677bf324bd1d9bdc43b813fcd5 | [
"MIT"
] | 1 | 2020-09-21T10:01:26.000Z | 2020-09-21T10:01:26.000Z | Leetcode/Stacks,_Queues/1_-_Easy/933._Number_of_Recent_Calls.py | Khalid-Sultan/Algorithms-Prep | 7773c5bc0448d8677bf324bd1d9bdc43b813fcd5 | [
"MIT"
] | null | null | null | Leetcode/Stacks,_Queues/1_-_Easy/933._Number_of_Recent_Calls.py | Khalid-Sultan/Algorithms-Prep | 7773c5bc0448d8677bf324bd1d9bdc43b813fcd5 | [
"MIT"
] | 2 | 2020-09-01T12:33:55.000Z | 2020-11-30T13:23:50.000Z | from collections import deque
class RecentCounter:
def __init__(self):
self.buffer = deque()
def ping(self, t: int) -> int:
while self.buffer and self.buffer[-1]<t-3000:
self.buffer.pop()
self.buffer.appendleft(t)
return len(self.buffer)
#Your RecentCounter object will be instantiated and called as such:
# Your RecentCounter object will be instantiated and called as such:
# obj = RecentCounter()
# param_1 = obj.ping(t) | 31.733333 | 68 | 0.676471 | 259 | 0.544118 | 0 | 0 | 0 | 0 | 0 | 0 | 181 | 0.380252 |
cb0f3976b580dfbacb713d7e37ea1985da4c916a | 3,524 | py | Python | ssqueezepy/viz_toolkit.py | hydrogeoscience/ssqueezepy | fa7654dfad58eda86b5866140625d6976733bdfa | [
"MIT"
] | 3 | 2021-01-08T16:44:40.000Z | 2021-01-11T06:59:55.000Z | ssqueezepy/viz_toolkit.py | hydrogeoscience/ssqueezepy | fa7654dfad58eda86b5866140625d6976733bdfa | [
"MIT"
] | null | null | null | ssqueezepy/viz_toolkit.py | hydrogeoscience/ssqueezepy | fa7654dfad58eda86b5866140625d6976733bdfa | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Convenience visual methods"""
import numpy as np
import matplotlib.pyplot as plt
def imshow(data, title=None, show=1, cmap=None, norm=None, complex=None, abs=0,
w=None, h=None, ridge=0, ticks=1, yticks=None, aspect='auto', **kw):
kw['interpolation'] = kw.get('interpolation', 'none')
if norm is None:
mx = np.max(np.abs(data))
vmin, vmax = ((-mx, mx) if not abs else
(0, mx))
else:
vmin, vmax = norm
if cmap is None:
cmap = 'bone' if abs else 'bwr'
_kw = dict(vmin=vmin, vmax=vmax, cmap=cmap, aspect=aspect, **kw)
if abs:
plt.imshow(np.abs(data), **_kw)
else:
if (complex is None and np.sum(np.abs(np.imag(data))) < 1e-8) or (
complex is False):
plt.imshow(data.real, **_kw)
else:
fig, axes = plt.subplots(1, 2)
axes[0].imshow(data.real, **_kw)
axes[0].imshow(data.imag, **_kw)
plt.subplots_adjust(left=0, right=1, bottom=0, top=1,
wspace=0, hspace=0)
if w or h:
plt.gcf().set_size_inches(14 * (w or 1), 8 * (h or 1))
if ridge:
data_mx = np.where(np.abs(data) == np.abs(data).max(axis=0))
plt.scatter(data_mx[1], data_mx[0], color='r', s=4)
if not ticks:
plt.xticks([])
plt.yticks([])
if yticks is not None:
idxs = np.linspace(0, len(yticks) - 1, 8).astype('int32')
yt = ["%.2f" % h for h in yticks[idxs]]
plt.yticks(idxs, yt)
_maybe_title(title)
if show:
plt.show()
def plot(x, y=None, title=None, show=0, ax_equal=False, complex=0,
w=None, h=None, **kw):
if y is None:
y = x
x = np.arange(len(x))
if complex:
plt.plot(x, y.real, **kw)
plt.plot(x, y.imag, **kw)
else:
plt.plot(x, y, **kw)
_maybe_title(title)
_scale_plot(plt.gcf(), plt.gca(), show=show, ax_equal=ax_equal, w=w, h=h)
def scat(x, y=None, title=None, show=0, ax_equal=False, s=18, w=None, h=None,
**kw):
if y is None:
y = x
x = np.arange(len(x))
plt.scatter(x, y, s=s, **kw)
_maybe_title(title)
_scale_plot(plt.gcf(), plt.gca(), show=show, ax_equal=ax_equal, w=w, h=h)
def hist(x, bins=500, title=None, show=0, stats=0):
x = np.asarray(x)
_ = plt.hist(x.ravel(), bins=bins)
_maybe_title(title)
if show:
plt.show()
if stats:
mu, std, mn, mx = (x.mean(), x.std(), x.min(), x.max())
print("(mean, std, min, max) = ({}, {}, {}, {})".format(
*_fmt(mu, std, mn, mx)))
return mu, std, mn, mx
def _fmt(*nums):
return [(("%.3e" % n) if (abs(n) > 1e3 or abs(n) < 1e-3) else
("%.3f" % n)) for n in nums]
def _maybe_title(title):
if title is not None:
plt.title(str(title), loc='left', weight='bold', fontsize=15)
def _scale_plot(fig, ax, show=False, ax_equal=False, w=None, h=None):
xmin, xmax = ax.get_xlim()
rng = xmax - xmin
ax.set_xlim(xmin + .018 * rng, xmax - .018 * rng)
if w or h:
fig.set_size_inches(14*(w or 1), 8*(h or 1))
if ax_equal:
yabsmax = max(np.abs([*ax.get_ylim()]))
mx = max(yabsmax, max(np.abs([xmin, xmax])))
ax.set_xlim(-mx, mx)
ax.set_ylim(-mx, mx)
fig.set_size_inches(8*(w or 1), 8*(h or 1))
if show:
plt.show()
def plotenergy(x, axis=1, **kw):
plot(np.sum(np.abs(x) ** 2, axis=axis), **kw)
| 30.643478 | 79 | 0.530363 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 190 | 0.053916 |
cb0f58e18b94913a907849c50579e695ec74ed20 | 1,674 | py | Python | tests/tests.py | mghorbani2357/Necrypt | 7191c8b29c94bf0c2550ab28b2226f64553981fe | [
"MIT"
] | 1 | 2021-02-05T20:42:26.000Z | 2021-02-05T20:42:26.000Z | tests/tests.py | mghorbani2357/Necrypt | 7191c8b29c94bf0c2550ab28b2226f64553981fe | [
"MIT"
] | null | null | null | tests/tests.py | mghorbani2357/Necrypt | 7191c8b29c94bf0c2550ab28b2226f64553981fe | [
"MIT"
] | null | null | null | from unittest import TestCase
from necrypt import Necrypt
import os
class TestNecrypt(TestCase):
def test_unique_encryption(self):
n = Necrypt(1024)
plain = 'Text'
self.assertNotEqual(n.encrypt(plain), n.encrypt(plain))
def test_encrypt_decrypt(self):
n = Necrypt(1024)
plain = 'Text'
self.assertEqual(plain, n.decrypt(n.encrypt(plain)))
def test_sign_verify(self):
n = Necrypt(1024)
plain = 'Text'
signature = n.sign(plain + 's')
with self.assertRaises(ValueError) as context:
n.verify(plain, signature)
self.assertEqual('Invalid signature', str(context.exception))
def test_file_encryption_decryption(self):
plain_file_data = b'plain'
with open('plain_file', 'wb') as plain_file:
plain_file.write(plain_file_data)
n = Necrypt(1024)
n.encrypt_file('plain_file', 'cipher_file')
n.decrypt_file('cipher_file', 'decrypted_file')
with open('decrypted_file') as decrypted_file:
decrypted_file_data = decrypted_file.read()
files_to_remove = ['plain_file', 'cipher_file', 'decrypted_file']
for filename in files_to_remove:
if os.path.isfile(filename):
os.remove(filename)
self.assertEqual(plain_file_data, decrypted_file_data.encode())
def test_import_export_key(self):
n = Necrypt(1024)
plain = 'plain'
cipher = n.encrypt(plain)
n.export_key('key_file')
n.import_key('key_file')
decrypted_cipher = n.decrypt(cipher)
self.assertEqual(plain, decrypted_cipher)
| 26.15625 | 73 | 0.635603 | 1,603 | 0.957587 | 0 | 0 | 0 | 0 | 0 | 0 | 202 | 0.120669 |
cb0f869553f8c04f78839e888e57fd4c53f3e6c9 | 555 | py | Python | days/01-03-datetimes/code/calc_dts.py | greywidget/100daysofcode-with-python-course | 4deeb6c52f9f9ae6c415b9997f817b3565582083 | [
"MIT"
] | null | null | null | days/01-03-datetimes/code/calc_dts.py | greywidget/100daysofcode-with-python-course | 4deeb6c52f9f9ae6c415b9997f817b3565582083 | [
"MIT"
] | null | null | null | days/01-03-datetimes/code/calc_dts.py | greywidget/100daysofcode-with-python-course | 4deeb6c52f9f9ae6c415b9997f817b3565582083 | [
"MIT"
] | null | null | null | from datetime import date, timedelta
start_100days = date(2017, 3, 30)
pybites_founded = date(2016, 12, 19)
pycon_date = date(2018, 5, 8)
def get_hundred_days_end_date():
"""Return a string of yyyy-mm-dd"""
end_date = start_100days + timedelta(days=100)
return str(end_date)
def get_days_between_pb_start_first_joint_pycon():
"""Return the int number of days"""
return (pycon_date - pybites_founded).days
if __name__ == '__main__':
print(get_hundred_days_end_date())
print(get_days_between_pb_start_first_joint_pycon())
| 25.227273 | 56 | 0.735135 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 80 | 0.144144 |
cb1217616e7d5b0f03ecc1384f9bca660edef2b8 | 9,312 | py | Python | openprocurement/audit/monitoring/views/monitoring.py | ProzorroUKR/openprocurement.audit.api | a17836e29bca28d9151c091e1d2e42de9f70b949 | [
"Apache-2.0"
] | 1 | 2018-05-21T08:14:55.000Z | 2018-05-21T08:14:55.000Z | openprocurement/audit/monitoring/views/monitoring.py | ProzorroUKR/openprocurement.audit.api | a17836e29bca28d9151c091e1d2e42de9f70b949 | [
"Apache-2.0"
] | 59 | 2018-05-18T02:09:47.000Z | 2019-05-29T12:10:06.000Z | openprocurement/audit/monitoring/views/monitoring.py | ProzorroUKR/openprocurement.audit.api | a17836e29bca28d9151c091e1d2e42de9f70b949 | [
"Apache-2.0"
] | 1 | 2020-06-15T11:04:25.000Z | 2020-06-15T11:04:25.000Z | from logging import getLogger
from pyramid.security import ACLAllowed
from openprocurement.audit.api.constants import (
MONITORING_TIME,
ELIMINATION_PERIOD_TIME,
ELIMINATION_PERIOD_NO_VIOLATIONS_TIME,
DRAFT_STATUS,
ACTIVE_STATUS,
ADDRESSED_STATUS,
DECLINED_STATUS,
STOPPED_STATUS,
CANCELLED_STATUS,
)
from openprocurement.audit.api.utils import (
context_unpack, APIResource, APIResourceListing, json_view, forbidden,
)
from openprocurement.audit.api.utils import (
generate_id,
)
from openprocurement.audit.monitoring.design import FIELDS
from openprocurement.audit.monitoring.design import (
monitorings_real_by_dateModified_view,
monitorings_test_by_dateModified_view,
monitorings_by_dateModified_view,
monitorings_real_by_local_seq_view,
monitorings_test_by_local_seq_view,
monitorings_by_local_seq_view,
monitorings_real_draft_by_local_seq_view,
monitorings_all_draft_by_local_seq_view,
monitorings_real_draft_by_dateModified_view,
monitorings_all_draft_by_dateModified_view,
monitorings_real_count_view,
monitorings_test_count_view,
)
from openprocurement.audit.monitoring.utils import (
get_now, calculate_normalized_business_date, upload_objects_documents
)
from openprocurement.audit.monitoring.utils import (
save_monitoring,
monitoring_serialize,
apply_patch,
generate_monitoring_id,
generate_period,
set_ownership,
set_author,
get_monitoring_accelerator,
op_resource
)
from openprocurement.audit.monitoring.validation import (
validate_monitoring_data,
validate_patch_monitoring_data,
validate_credentials_generate,
validate_posting_elimination_resolution,
)
LOGGER = getLogger(__name__)
VIEW_MAP = {
u'': monitorings_real_by_dateModified_view,
u'test': monitorings_test_by_dateModified_view,
u'real_draft': monitorings_real_draft_by_dateModified_view,
u'all_draft': monitorings_all_draft_by_dateModified_view,
u'_all_': monitorings_by_dateModified_view,
}
CHANGES_VIEW_MAP = {
u'': monitorings_real_by_local_seq_view,
u'test': monitorings_test_by_local_seq_view,
u'real_draft': monitorings_real_draft_by_local_seq_view,
u'all_draft': monitorings_all_draft_by_local_seq_view,
u'_all_': monitorings_by_local_seq_view,
}
FEED = {
u'dateModified': VIEW_MAP,
u'changes': CHANGES_VIEW_MAP,
}
@op_resource(name='Monitorings', path='/monitorings')
class MonitoringsResource(APIResourceListing):
def __init__(self, request, context):
super(MonitoringsResource, self).__init__(request, context)
self.VIEW_MAP = VIEW_MAP
self.CHANGES_VIEW_MAP = CHANGES_VIEW_MAP
self.FEED = FEED
self.FIELDS = FIELDS
self.serialize_func = monitoring_serialize
self.object_name_for_listing = 'Monitorings'
self.log_message_id = 'monitoring_list_custom'
def get(self):
if self.request.params.get('mode') in ('real_draft', 'all_draft'):
perm = self.request.has_permission('view_draft_monitoring')
if not isinstance(perm, ACLAllowed):
return forbidden(self.request)
return super(MonitoringsResource, self).get()
@json_view(content_type='application/json',
permission='create_monitoring',
validators=(validate_monitoring_data,))
def post(self):
monitoring = self.request.validated['monitoring']
monitoring.id = generate_id()
monitoring.monitoring_id = generate_monitoring_id(get_now(), self.db, self.server_id)
if monitoring.decision:
upload_objects_documents(self.request, monitoring.decision, key="decision")
set_author(monitoring.decision.documents, self.request, 'author')
save_monitoring(self.request, date_modified=monitoring.dateCreated)
LOGGER.info('Created monitoring {}'.format(monitoring.id),
extra=context_unpack(self.request,
{'MESSAGE_ID': 'monitoring_create'},
{'MONITORING_ID': monitoring.id}))
self.request.response.status = 201
self.request.response.headers['Location'] = self.request.route_url('Monitoring', monitoring_id=monitoring.id)
return {'data': monitoring.serialize('view')}
@op_resource(name='Monitoring', path='/monitorings/{monitoring_id}')
class MonitoringResource(APIResource):
@json_view(permission='view_monitoring')
def get(self):
monitoring = self.request.validated['monitoring']
return {'data': monitoring.serialize('view')}
@json_view(content_type='application/json',
validators=(validate_patch_monitoring_data,),
permission='edit_monitoring')
def patch(self):
monitoring = self.request.validated['monitoring']
monitoring_old_status = monitoring.status
elimination_resolution = monitoring.eliminationResolution
apply_patch(self.request, save=False, src=self.request.validated['monitoring_src'])
now = get_now()
if monitoring_old_status == DRAFT_STATUS and monitoring.status == ACTIVE_STATUS:
set_author(monitoring.decision.documents, self.request, 'author')
accelerator = get_monitoring_accelerator(self.context)
monitoring.monitoringPeriod = generate_period(now, MONITORING_TIME, accelerator)
monitoring.decision.datePublished = now
monitoring.endDate = calculate_normalized_business_date(now, MONITORING_TIME, accelerator)
elif monitoring_old_status == ACTIVE_STATUS and monitoring.status == ADDRESSED_STATUS:
set_author(monitoring.conclusion.documents, self.request, 'author')
accelerator = get_monitoring_accelerator(self.context)
monitoring.conclusion.datePublished = now
monitoring.eliminationPeriod = generate_period(now, ELIMINATION_PERIOD_TIME, accelerator)
elif monitoring_old_status == ACTIVE_STATUS and monitoring.status == DECLINED_STATUS:
accelerator = get_monitoring_accelerator(self.context)
monitoring.eliminationPeriod = generate_period(now, ELIMINATION_PERIOD_NO_VIOLATIONS_TIME, accelerator)
monitoring.conclusion.datePublished = now
elif any([
monitoring_old_status == DRAFT_STATUS and monitoring.status == CANCELLED_STATUS,
monitoring_old_status == ACTIVE_STATUS and monitoring.status == STOPPED_STATUS,
monitoring_old_status == DECLINED_STATUS and monitoring.status == STOPPED_STATUS,
monitoring_old_status == ADDRESSED_STATUS and monitoring.status == STOPPED_STATUS
]):
set_author(monitoring.cancellation.documents, self.request, 'author')
monitoring.cancellation.datePublished = now
if not elimination_resolution and monitoring.eliminationResolution:
validate_posting_elimination_resolution(self.request)
monitoring.eliminationResolution.datePublished = monitoring.eliminationResolution.dateCreated
# download (change urls of) documents for Decision, Conclusion, etc.
raw_data = self.request.json.get("data", {})
for key in raw_data.keys():
if hasattr(getattr(monitoring, key, None), "documents") and "documents" in raw_data[key]:
upload_objects_documents(self.request, getattr(monitoring, key), key=key)
save_monitoring(self.request, date_modified=now)
LOGGER.info('Updated monitoring {}'.format(monitoring.id),
extra=context_unpack(self.request, {'MESSAGE_ID': 'monitoring_patch'}))
return {'data': monitoring.serialize('view')}
@op_resource(name='Monitoring credentials',
path='/monitorings/{monitoring_id}/credentials',
description="Monitoring credentials")
class MonitoringCredentialsResource(APIResource):
@json_view(permission='generate_credentials', validators=(validate_credentials_generate,))
def patch(self):
monitoring = self.request.validated['monitoring']
set_ownership(monitoring, self.request, 'tender_owner')
if save_monitoring(self.request):
self.LOGGER.info('Generate Monitoring credentials {}'.format(monitoring.id),
extra=context_unpack(self.request, {'MESSAGE_ID': 'monitoring_generate_credentials'}))
return {
'data': monitoring.serialize('view'),
'access': {
'token': monitoring.tender_owner_token
}
}
@op_resource(name='Monitoring count', path='/monitorings/count')
class MonitoringCountResource(APIResource):
def __init__(self, request, context):
super(MonitoringCountResource, self).__init__(request, context)
self.views = {
"": monitorings_real_count_view,
"test": monitorings_test_count_view,
}
@json_view(permission='view_listing')
def get(self):
mode = self.request.params.get('mode', '')
eval_view = self.views.get(mode, monitorings_real_count_view)
result = list(eval_view(self.db))
data = {
'data': result[0].value if len(result) else 0,
}
return data
| 42.520548 | 117 | 0.708655 | 6,556 | 0.704038 | 0 | 0 | 6,901 | 0.741087 | 0 | 0 | 1,048 | 0.112543 |
cb12a882c75f649d122a95115a98ac8b3e9de7de | 1,224 | py | Python | tests/utilities/test_spectrum_utils.py | jason-neal/companion_simulations | b5773e5539011d492b7128d0dd2778041ce50d52 | [
"MIT"
] | 1 | 2018-09-04T19:06:44.000Z | 2018-09-04T19:06:44.000Z | tests/utilities/test_spectrum_utils.py | jason-neal/companion_simulations | b5773e5539011d492b7128d0dd2778041ce50d52 | [
"MIT"
] | 85 | 2017-03-25T22:37:02.000Z | 2022-03-01T16:49:14.000Z | tests/utilities/test_spectrum_utils.py | jason-neal/companion_simulations | b5773e5539011d492b7128d0dd2778041ce50d52 | [
"MIT"
] | 1 | 2017-08-18T10:56:39.000Z | 2017-08-18T10:56:39.000Z | import os
import numpy as np
import pytest
from spectrum_overload import Spectrum
from mingle.utilities.spectrum_utils import load_spectrum, select_observation
@pytest.mark.parametrize("fname", ["HD30501-1-mixavg-tellcorr_1.fits", "HD30501-1-mixavg-h2otellcorr_1.fits"])
def test_load_spectrum(fname):
fname = os.path.join("tests", "testdata", "handy_spectra", fname)
results = load_spectrum(fname)
assert isinstance(results, Spectrum)
assert results.header["OBJECT"].upper() == "HD30501"
assert np.all(results.xaxis > 2110) # nm
assert np.all(results.xaxis < 2130) # nm
assert np.all(results.flux < 2)
assert np.all(results.flux >= 0)
def test_load_no_filename_fits():
"""Not a valid file."""
with pytest.raises(ValueError):
load_spectrum("")
@pytest.mark.parametrize("chip", [0, None, 5, 42])
def test_select_observation_with_bad_chip(chip):
with pytest.raises(ValueError):
select_observation("HD30501", "1", chip)
@pytest.mark.xfail()
def test_spectrum_plotter(spectra, label=None, show=False):
"""Plot a Spectrum object."""
assert False
@pytest.mark.xfail()
def test_plot_spectra(obs, model):
"""Plot two spectra."""
assert False
| 27.818182 | 110 | 0.710784 | 0 | 0 | 0 | 0 | 924 | 0.754902 | 0 | 0 | 230 | 0.187908 |
cb1307c481c9beb955137f8e7e42252585de4aba | 574 | py | Python | bots/raspador_template/raspador_template_pilot.py | xyla-io/raspador | 4e77234239d44a83faf5c1d3a6d022a9e3861f25 | [
"MIT"
] | null | null | null | bots/raspador_template/raspador_template_pilot.py | xyla-io/raspador | 4e77234239d44a83faf5c1d3a6d022a9e3861f25 | [
"MIT"
] | null | null | null | bots/raspador_template/raspador_template_pilot.py | xyla-io/raspador | 4e77234239d44a83faf5c1d3a6d022a9e3861f25 | [
"MIT"
] | null | null | null | from raspador import Pilot, UserInteractor, BrowserInteractor
from typing import Dict, List
class RaspadorTemplatePilot(Pilot):
config: Dict[str, any]
sign_in_wait = 3.0
def __init__(self, config: Dict[str, any], user: UserInteractor, browser: BrowserInteractor):
self.config = config
super().__init__(user=user, browser=browser)
@property
def email(self) -> str:
return self.config['email']
@property
def password(self) -> str:
return self.config['password']
@property
def base_url(self) -> str:
return self.config['base_url'] | 26.090909 | 95 | 0.709059 | 481 | 0.837979 | 0 | 0 | 213 | 0.37108 | 0 | 0 | 27 | 0.047038 |
cb16e92c91411943f9ef0b61014572ffa31aed21 | 9,480 | py | Python | umychart_python/umychart_complier_testcase.py | tokenchain/HQChart | 6d29839cfda853a6a0efe7f085af0cd9db8a549f | [
"Apache-2.0"
] | 4 | 2019-09-30T15:37:50.000Z | 2022-03-28T19:44:46.000Z | umychart_python/umychart_complier_testcase.py | tokenchain/HQChart | 6d29839cfda853a6a0efe7f085af0cd9db8a549f | [
"Apache-2.0"
] | null | null | null | umychart_python/umychart_complier_testcase.py | tokenchain/HQChart | 6d29839cfda853a6a0efe7f085af0cd9db8a549f | [
"Apache-2.0"
] | 2 | 2020-09-21T04:55:45.000Z | 2021-01-05T03:01:02.000Z | # 开源项目 https://github.com/jones2000/HQChart
import sys
import codecs
import webbrowser
from umychart_complier_jscomplier import JSComplier, SymbolOption, HQ_DATA_TYPE
from umychart_complier_jscomplier import ScriptIndexConsole, ScriptIndexItem, SymbolOption, RequestOption, HQ_DATA_TYPE, ArgumentItem
from umychart_webtemplate import *
from umychart_complier_pandas_help import JSComplierPandasHelper
class TestCase :
def __init__(self, code, option=SymbolOption()) :
self.Code=code
self.Option=option
def Run(self):
testCode=''
for item in self.Code:
testCode+=item
testCode+='\n'
result=JSComplier.Execute(testCode,self.Option)
return True if result else False
def Test_Tokenize():
code1='VARHIGH:=IF(VAR1<=REF(HH,-1),REF(H,BARSLAST(VAR1>=REF(HH,1))),DRAWNULL),COLORYELLOW;'
code2='VAR1=((SMA(MAX((CLOSE - LC),0),3,1) / SMA(ABS((CLOSE - LC)),3,1)) * 100);'
tokens=JSComplier.Tokenize(code1+code2)
return True if tokens else False
def Test_Parse():
code1='VARHIGH:=IF(VAR1<=REF(HH,-1),REF(H,BARSLAST(VAR1>=REF(HH,1))),DRAWNULL),COLORYELLOW;'
code2='VAR1=((SMA(MAX((CLOSE - LC),0),3,1) / SMA(ABS((CLOSE - LC)),3,1)) * 100);'
ast=JSComplier.Parse(code1+code2)
return True if ast else False
def Test_REF():
result=JSComplier.Execute('VAR2:C-REF(O,1)')
return True if result else False
def Test_Add() :
result=JSComplier.Execute('VAR2:C+100')
return True if result else False
def Test_Multiply():
code=[
'VAR2:C*O;',
"VAR3:100*100;"
]
result=JSComplier.Execute(code[0]+code[1])
return True if result else False
def Test_MAX_MIN():
code=[
'VAR2:MAX(C,O);',
"VAR3:MAX(C,100);",
"VAR4:MAX(100,C);",
'VAR5:MIN(C,O);',
'VAR5:MIN(C,4);'
]
result=JSComplier.Execute(code[0]+code[1]+code[2]+code[4]+code[3])
return True if result else False
def Test_MA() :
code=[
'VAR2:MA(C,5);',
'VAR3:MA(C,10);',
'VAR4:MA(C,15);',
'VAR4:MA(C,30);',
'VAR4:MA(C,33);',
]
result=JSComplier.Execute(code[0]+code[1]+code[2]+code[3])
return True if result else False
def Test_EMA():
code=[
'VAR2:EMA(C,5);',
'VAR3:EMA(C,10);',
'VAR4:EMA(C,15);',
'VAR4:EMA(C,30);',
]
result=JSComplier.Execute(code[0]+code[1]+code[2]+code[3])
return True if result else False
def Test_SMA():
code=[
'VAR2:SMA(C,5,10);',
'VAR3:SMA(C,10,10);',
'VAR4:SMA(C,15,10);',
'VAR4:SMA(C,30,10);',
]
result=JSComplier.Execute(code[0]+code[1]+code[2]+code[3])
return True if result else False
def Test_DMA():
code=[
'VAR3:C;',
'VAR2:DMA(C,O/C);',
]
result=JSComplier.Execute(code[0]+code[1])
return True if result else False
def Test_WMA() :
code=[
'VAR3:C;',
'VAR2:WMA(C,20);',
]
result=JSComplier.Execute(code[0]+code[1])
return True if result else False
def Test_SUMBARS() :
code=[
'VAR3:SUMBARS(C,O)',
'VAR2:C;',
]
option=SymbolOption()
option.Symbol='000001.sz'
option.HQDataType=HQ_DATA_TYPE.MINUTE_ID
result=JSComplier.Execute(code[0]+code[1],option)
return True if result else False
def Test_INDEX():
code=[
'VAR3:INDEXA;',
'VAR2:INDEXC;',
'VAR2:INDEXO;',
]
option=SymbolOption()
option.Period=5
result=JSComplier.Execute(code[0]+code[1]+code[2],option)
return True if result else False
def Test_COUNT():
code=[
'VAR3:COUNT(C,5);',
'VAR2:COUNT(O,10);',
'VAR2:COUNT(H,20);',
]
option=SymbolOption()
result=JSComplier.Execute(code[0]+code[1]+code[2],option)
return True if result else False
def Test_HHV_HHL() :
case =TestCase(
code=[
'VAR3:HHV(C,5);',
'VAR2:HHV(O,10);',
'VAR2:HHV(H,20);',
'VAR3:LLV(H,5);',
'VAR4:LLV(H,10);',
])
result=case.Run()
return result
def Test_STD():
case =TestCase(
code=[
'VAR3:STD(C,5);',
'VAR2:STD(O,10);',
'VAR2:STD(H,20);',
'VAR3:STD(H,15);',
'VAR4:STD(H,0);',
])
result=case.Run()
return result
def Test_AVEDEV():
case =TestCase(
code=[
'VAR3:AVEDEV(C,5);',
'VAR2:AVEDEV(O,10);',
'VAR2:AVEDEV(H,20);',
'VAR3:AVEDEV(H,15);',
'VAR4:AVEDEV(H,0);',
])
result=case.Run()
return result
def Test_CROSS() :
case =TestCase(
code=[
'VAR3:CROSS(C,O);',
'VAR2:CROSS(O,10);',
'VAR2:CROSS(O,C);',
])
result=case.Run()
return result
def Test_MULAR() :
case =TestCase(
code=[
'VAR3:MULAR(C,5);',
'VAR2:MULAR(O,10);',
'VAR2:MULAR(O,30);',
])
result=case.Run()
return result
def Test_SUM() :
case =TestCase(
code=[
'VAR3:SUM(C,5);',
'VAR2:SUM(O,0);',
'VAR2:BARSCOUNT(O);',
])
result=case.Run()
return result
def Test_DEVSQ():
case =TestCase(
code=[
'VAR3:DEVSQ(C,5);',
'VAR2:DEVSQ(O,0);',
'VAR2:DEVSQ(O,5);',
])
result=case.Run()
return result
def Test_FINANCE(): # 财务数据测试
case =TestCase(
code=[
'DRAWLINE(HIGH>=HHV(HIGH,20),HIGH,LOW<=LLV(LOW,20),LOW,1);'
'VAR4:CAPITAL;',
'VAR3:FINANCE(32);',
'VAR2:FINANCE(1);',
'VAR2:MA(FINANCE(33),5);',
"DRAWTEXT(C<=O,O,'xxxx');",
'STICKLINE(CLOSE>OPEN, CLOSE, OPEN, 0.8, 1);',
'DRAWNUMBER(CLOSE/OPEN>1.0001,LOW,C);',
'DRAWNUMBER(CLOSE/OPEN>1.0001,LOW,33);',
'DRAWICON(CLOSE>OPEN,LOW,1);',
'PLOYLINE(HIGH>=HHV(HIGH,20),HIGH);',
'CYW: SUM(VAR4,10)/10000, COLORSTICK;',
"DRAWCHANNEL(C>O,C,O,'rgb(20,20,20)',1,'3,4','rgb(40,40,40)');",
'SAR(10,2,20);',
'BACKSET(CLOSE>OPEN,2);',
'TT:DYNAINFO(13);',
'T2:MARGIN(1);',
'T5:MARGIN(6);',
"上涨家数:UPCOUNT('CNA.CI'),COLORRED;",
"下跌家数:DOWNCOUNT('CNA.CI'),COLORGREEN;",
"TTTT:NEWS(2)+NEWS(4);",
"TTT2:NEWS(1);",
'TT4:WINNER(CLOSE);',
'TT5:COST(10);',
])
result=case.Run()
return result
def Test_ScriptIndexConsole():
# 创建脚本, 及参数
scpritInfo=ScriptIndexItem(name='我的MA指标', id=888888,
script='MA1:MA(CLOSE,M1);\n' # 指标脚本代码
'MA2:MA(CLOSE,M2);\n'
'MA3:MA(CLOSE,M3);',
args=[ ArgumentItem(name='M1', value=5), ArgumentItem(name='M2', value=10), ArgumentItem(name='M3', value=20) ] # 参数
)
indexConsole = ScriptIndexConsole(scpritInfo)
option = SymbolOption(
symbol='000001.sz',
right=1, # 复权 0 不复权 1 前复权 2 后复权
period=0, # 周期 0=日线 1=周线 2=月线 3=年线 4=1分钟 5=5分钟 6=15分钟 7=30分钟 8=60分钟
request=RequestOption(maxDataCount=500,maxMinuteDayCount=3)
)
result=indexConsole.ExecuteScript(option)
if result.Error :
return
print('run successfully.')
JSComplierPandasHelper.ToDateTimeSeries(result) # 转化为pandas Series 数据格式
JSComplierPandasHelper.ToDataFrame(result) # 转化为pandas DataFrame 数据格式
jsonData=result.ToJson()
varName='jsonData' # 数据变量名字
HQChartOption= """g_KLineOption={
Symbol:'%(symbol)s', //股票代码
Right:%(right)d, //复权
Period:%(period)d, //周期
Windows:
[
{ Modify:false,Change:false,
Local:
{
Data:%(varName)s, //py执行以后的json数据
Type:'LocalJsonDataIndex' ,
Name:'%(name)s', //指标名字
Args:[ //指标参数
{ Name: '%(arg1)s', Value: %(argvalue1)d },
{ Name: '%(arg2)s', Value: %(argvalue2)d },
{ Name: '%(arg3)s', Value: %(argvalue3)d }]
}
},
//{Index:"VOL", Modify:false,Change:false},
]
}
""" %{"symbol":option.Symbol,'right':option.Right, 'period':option.Period, 'varName':varName, 'name':scpritInfo.Name,
'arg1':scpritInfo.Arguments[0].Name, 'argvalue1': scpritInfo.Arguments[0].Value,
'arg2':scpritInfo.Arguments[1].Name, 'argvalue2': scpritInfo.Arguments[1].Value,
'arg3':scpritInfo.Arguments[2].Name, 'argvalue3': scpritInfo.Arguments[2].Value }
localJsonData= varName + '=' + jsonData + '\n'
filePath='data.html'
# 生成图形化页面
with codecs.open(filePath,'w',"utf-8") as file:
file.write(HTML_PART1)
file.write(localJsonData)
file.write(HQChartOption)
file.write(HTML_PART_END)
file.close()
webbrowser.open(filePath,new = 1)
#Test_Add()
#Test_Multiply()
#Test_MAX_MIN()
#Test_FINANCE()
Test_ScriptIndexConsole() | 27.719298 | 134 | 0.523945 | 353 | 0.036332 | 0 | 0 | 0 | 0 | 0 | 0 | 3,678 | 0.378551 |
cb189bfc70c1ffe56009041687c4ff42f81f4d14 | 2,778 | py | Python | einguteswerkzeug/helpers/__init__.py | s3h10r/einguteswerkzeug | fe22afc3a6a16802d503dc4485c49d5f3eb8a53d | [
"MIT"
] | 6 | 2019-07-27T16:48:11.000Z | 2022-03-12T20:54:57.000Z | einguteswerkzeug/helpers/__init__.py | s3h10r/einguteswerkzeug | fe22afc3a6a16802d503dc4485c49d5f3eb8a53d | [
"MIT"
] | 20 | 2019-07-27T17:08:29.000Z | 2022-01-13T01:27:24.000Z | einguteswerkzeug/helpers/__init__.py | s3h10r/einguteswerkzeug | fe22afc3a6a16802d503dc4485c49d5f3eb8a53d | [
"MIT"
] | 2 | 2021-03-24T12:26:35.000Z | 2021-08-30T17:48:03.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
some helper funcs
"""
import json
import logging
import os
import site
import subprocess
import sys
import tempfile
import exifread
from PIL import Image
PACKAGE_NAME = "einguteswerkzeug"
# --- configure logging
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
if log.hasHandlers():
log.andlers.clear()
handler = logging.StreamHandler() # console-handler
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
log.propagate=False
# ---
def get_resource_file(basefile, PACKAGE_NAME = PACKAGE_NAME):
"""
gets the fully qualified name of a resource file
"""
fqn = os.path.join(os.path.dirname(os.path.realpath(__file__)), basefile)
if not os.path.isfile(fqn):
# when installed via pip the package_data (see MANIFEST.in, setup.py)
# should be located somewhere in site-packages path of the (virtual-)env
for dir in site.getsitepackages():
fqn = dir + "/" + PACKAGE_NAME + "/" + basefile
if os.path.isfile(fqn):
return fqn
break
return fqn
def show_error(msg):
"""
Show an error message and exit
"""
log.critical("Error: %s" % msg)
sys.exit(1)
def editor(text=None, default_editor="vi"):
"""
Open an editor (environment variable EDITOR).
Allows to edit (given) text like the functionality
that happens when doing 'git commit' on the console.
args:
text (string): the text to edit
optional args:
default_editor (string) : editor to use if environment varibale
EDITOR is not set
returns:
string: the edited text
"""
fd, fname = tempfile.mkstemp()
with os.fdopen(fd, 'w') as f:
f.write(text)
cmd = os.environ.get('EDITOR', default_editor) + ' ' + fname
subprocess.call(cmd, shell=True)
with open(fname, 'r') as f:
res = f.read()
os.unlink(fname)
return res
def confirm_prompt(question: str, yes_by_default = True) -> bool:
"""
Shows a yes/no question.
example:
>>> reply = confirm_prompt("Are you sure?")
>>> print(reply)
args:
yes_by_default (bool) : if True just hitting return equals yes.
returns:
bool : True if ansered with yes. False if answerded with no.
"""
replies_prompt = "y/n"
replies_values = ["y", "n"]
yes = ("y")
if yes_by_default:
replies_values.append("")
replies_prompt = "Y/n"
yes = ("", "y")
reply = None
while reply not in ("", "y", "n"):
reply = input(f"{question} ({replies_prompt}): ").lower()
return (reply in yes)
| 26.457143 | 85 | 0.61915 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,258 | 0.452844 |
cb19b5b9cfd192e1f6514f052406927f390c4881 | 2,357 | py | Python | ParadoxTrading/EngineExt/Futures/InterDayOnlineMarketSupply.py | gsamarakoon/ParadoxTrading | 2c4024e60b14bf630fd141ccd4c77f197b7c901a | [
"MIT"
] | 95 | 2018-01-14T14:35:35.000Z | 2021-03-17T02:10:24.000Z | ParadoxTrading/EngineExt/Futures/InterDayOnlineMarketSupply.py | yutiansut/ParadoxTrading | b915d1491663443bedbb048017abeed3f7dcd4e2 | [
"MIT"
] | 2 | 2018-01-14T14:35:51.000Z | 2018-07-06T02:57:49.000Z | ParadoxTrading/EngineExt/Futures/InterDayOnlineMarketSupply.py | yutiansut/ParadoxTrading | b915d1491663443bedbb048017abeed3f7dcd4e2 | [
"MIT"
] | 25 | 2018-01-14T14:38:08.000Z | 2020-07-15T16:03:04.000Z | import logging
import typing
from datetime import datetime
from ParadoxTrading.Engine import MarketSupplyAbstract, ReturnMarket, ReturnSettlement
from ParadoxTrading.Fetch import FetchAbstract
from ParadoxTrading.Utils import DataStruct
class InterDayOnlineMarketSupply(MarketSupplyAbstract):
def __init__(
self,
_fetcher: FetchAbstract,
_tradingday: str = None
):
super().__init__(_fetcher)
# default tradingday is today
self.tradingday = datetime.now().strftime('%Y%m%d')
if _tradingday is not None:
self.tradingday = _tradingday
# map symbol to data
self.data_dict: typing.Dict[str, DataStruct] = {}
self.flag = True
self.is_finish = False
def _get_data(self):
# fetch data from database
for k, v in self.register_dict.items():
symbol = self.fetcher.fetchSymbol(
self.tradingday, **v.toKwargs()
)
# whether symbol exists
if symbol is not None:
if symbol not in self.data_dict.keys():
# fetch data and set index to 0 init
self.data_dict[symbol] = self.fetcher.fetchData(
self.tradingday, _symbol=symbol
)
# map symbol to market register key
try:
self.symbol_dict[symbol].add(k)
except KeyError:
self.symbol_dict[symbol] = {k}
self.flag = False
def updateData(self) -> typing.Union[
None, ReturnMarket, ReturnSettlement
]:
if self.flag:
self._get_data()
self.is_finish = not self.data_dict
if self.data_dict:
logging.info('TradingDay: {}, Product: {}'.format(
self.tradingday, self.symbol_dict.keys()
))
if self.is_finish:
return None
try:
k, v = self.data_dict.popitem()
return self.addMarketEvent(k, v)
except KeyError:
self.is_finish = True
return self.addSettlementEvent(self.tradingday)
def getTradingDay(self) -> str:
return self.tradingday
def getDatetime(self) -> typing.Union[str, datetime]:
return self.tradingday
| 31.426667 | 86 | 0.573186 | 2,116 | 0.897751 | 0 | 0 | 0 | 0 | 0 | 0 | 206 | 0.087399 |
cb1e226e79e09f241ae2edae33072edf44edd04c | 2,301 | py | Python | bdd/contact_stepts.py | SvetlanaPopova/python_1 | 5acc26e3d3746d7fcf48603d9ca9064e39c248ca | [
"Apache-2.0"
] | null | null | null | bdd/contact_stepts.py | SvetlanaPopova/python_1 | 5acc26e3d3746d7fcf48603d9ca9064e39c248ca | [
"Apache-2.0"
] | null | null | null | bdd/contact_stepts.py | SvetlanaPopova/python_1 | 5acc26e3d3746d7fcf48603d9ca9064e39c248ca | [
"Apache-2.0"
] | null | null | null | __author__ = 'User'
from pytest_bdd import given, when, then
from model.contact import Contact
import random
@given('a contact list')
def contact_list(db):
return db.get_contact_list()
@given('a contact with <firstname>, <lastname>, <address> and <mobilephone>')
def new_contact(firstname, lastname, address, mobilephone):
return Contact(firstname=firstname, lastname=lastname, address=address, mobilephone=mobilephone)
@when('I add the contact to the list')
def add_new_contact(app, new_contact):
app.contact.add_new(new_contact)
@then('the new contact list is equal to the old contact list with the added contact')
def verify_contact_added(db, contact_list, new_contact, app, check_ui):
app.contact.check_add_new_success(db, new_contact, contact_list, check_ui)
@given('a non-empty contact list')
def non_empty_contact_list(app, db):
if len(db.get_contact_list()) < 0:
app.group.create(Contact(firstname='some firstname'))
return db.get_contact_list()
@given('a random contact from the list')
def random_contact(non_empty_contact_list):
return random.choice(non_empty_contact_list)
@when('I delete the contact from the list')
def delete_contact(app, random_contact):
app.contact.delete_contact_by_id(random_contact.id)
@then('the new contact list is equal to the old contact list without the contact')
def verify_contact_deleted(db, non_empty_contact_list, random_contact, app, check_ui):
app.contact.check_delete_success(db, random_contact, non_empty_contact_list, check_ui)
@when('I modify the contact from the list')
def modify_contact(app, new_contact, random_contact):
new_contact.id = random_contact.id
app.contact.modify_contact_by_id(new_contact)
@then('the new contact list is equal to the old contact list with the modified contact')
def verify_contact_deleted(db, non_empty_contact_list, new_contact, random_contact ,app, check_ui):
non_empty_contact_list.remove(random_contact)
random_contact.firstname = new_contact.firstname
random_contact.lastname = new_contact.lastname
random_contact.address = new_contact.address
random_contact.mobilephone =new_contact.mobilephone
non_empty_contact_list.append(random_contact)
app.contact.check_modify_contact_success(db, non_empty_contact_list, check_ui)
| 41.836364 | 100 | 0.788787 | 0 | 0 | 0 | 0 | 2,171 | 0.943503 | 0 | 0 | 502 | 0.218166 |
cb1eb4ad097f558852c6e842a35ce159078ca10c | 1,235 | py | Python | tests/models/input/types/test_file_input.py | TheLabbingProject/django_analyses | 08cac40a32754a265b37524f08ec6160c69ebea8 | [
"Apache-2.0"
] | 1 | 2020-12-30T12:43:34.000Z | 2020-12-30T12:43:34.000Z | tests/models/input/types/test_file_input.py | TheLabbingProject/django_analyses | 08cac40a32754a265b37524f08ec6160c69ebea8 | [
"Apache-2.0"
] | 59 | 2019-12-25T13:14:56.000Z | 2021-07-22T12:24:46.000Z | tests/models/input/types/test_file_input.py | TheLabbingProject/django_analyses | 08cac40a32754a265b37524f08ec6160c69ebea8 | [
"Apache-2.0"
] | 2 | 2020-05-24T06:44:27.000Z | 2020-07-09T15:47:31.000Z | from django.core.exceptions import ValidationError
from django.test import TestCase
from django_analyses.models.input.types.input_types import InputTypes
from tests.factories.input.types.file_input import FileInputFactory
class FileInputTestCase(TestCase):
"""
Tests for the :class:`~django_analyses.models.input.types.file_input.FileInput` model.
"""
def setUp(self):
"""
Adds the created instances to the tests' contexts.
For more information see unittest's :meth:`~unittest.TestCase.setUp` method.
"""
self.file_input = FileInputFactory()
###########
# Methods #
###########
def test_string(self):
value = str(self.file_input)
expected = f"'{self.file_input.key}' = {self.file_input.value}"
self.assertEqual(value, expected)
def test_none_value_if_required_raises_validation_error(self):
self.file_input.definition.required = True
self.file_input.definition.save()
self.file_input.value = None
with self.assertRaises(ValidationError):
self.file_input.save()
def test_get_type(self):
value = self.file_input.get_type()
self.assertEqual(value, InputTypes.FIL)
| 30.121951 | 90 | 0.678543 | 1,010 | 0.817814 | 0 | 0 | 0 | 0 | 0 | 0 | 348 | 0.281781 |
cb1fb8da536670946ce9d23e583964d60207cf34 | 8,052 | py | Python | videoarchiver.py | yannisHD/StreamRecorder | 90bccee236730fff6e148d24c26aa3e3104d3ca2 | [
"MIT"
] | null | null | null | videoarchiver.py | yannisHD/StreamRecorder | 90bccee236730fff6e148d24c26aa3e3104d3ca2 | [
"MIT"
] | null | null | null | videoarchiver.py | yannisHD/StreamRecorder | 90bccee236730fff6e148d24c26aa3e3104d3ca2 | [
"MIT"
] | null | null | null | #!/usr/bin/python
"""A concise tool for archiving video as it is recorded.
"""
import os, time, argparse
import subprocess32 as subprocess
from socket import gethostname
import dvrutils
def read_archive_config(fName):
with open(fName, 'r') as f:
flines = f.readlines()
streams = [] # [{'StreamName': <name>, 'VideoPath': <path>, 'ArchivePath': <path>},...]
for line in flines:
if line[0] != '#': # ignore comment lines
sName = line.split('#')[0].strip() if '#' in line else line.strip()
if len(sName) > 0:
streams.append({'StreamName': sName, 'VideoPath': '', 'ArchivePath': ''})
return streams
class StreamArchiver:
def __init__(self, logger, dvrName, streamListFile='videoarchiver.cfg', storagePath='/mnt/video', archivePath='/mnt/archive'):
self.logger = logger
self.dvrName = dvrName
self.streamListFile = os.path.join(archivePath,streamListFile)
self.storagePath = storagePath
self.archivePath = archivePath
if os.path.exists(self.streamListFile):
self.streams = read_archive_config(self.streamListFile)
self.check_directories()
else:
self.logger.error("The specified configuration file {} cannot be found!".format(self.streamListFile))
def check_directories(self):
# check which streams this computer has and make sure the directories are set up right
goodStreams = []
for s in self.streams:
s['VideoPath'] = os.path.join(self.storagePath,s['StreamName'])
if os.path.isdir(s['VideoPath']): # if we have this stream
#s['ArchivePath'] = os.path.join(self.archivePath,s['StreamName'])
s['ArchivePath'] = self.archivePath
if not os.path.isdir(s['ArchivePath']): # if the stream has no directory in the archive, then make it
if os.path.exists(s['ArchivePath']):
s['ArchivePath'] = dvrutils.get_unique_filename(s['ArchivePath'],nZeros=0) # if there is a file with the same name for whatever reason, change the directory name
os.makedirs(s['ArchivePath'])
if self.logger is not None:
self.logger.info('Created directory {} for stream {}!'.format(s['ArchivePath'],s['StreamName']))
goodStreams.append(s)
else: # if we don't, give a debug message for the user
if self.logger is not None:
self.logger.debug("Ignoring stream {} as it does not exist on this system.".format(s['StreamName']))
self.streams = goodStreams
def sync_streams(self):
# sync the streams one at a time to minimize fragmentation (NOTE: Eventually this will be pull-based, so there will be no fragmentation)
for s in self.streams:
# use rsync to perform the copy
syncCmd = ['rsync', '-rlptg', s['VideoPath'], s['ArchivePath']] # r = recurse; l = symlinks as symlinks; preserve: p = permissions, t = modification times, g = group
if self.logger is not None:
self.logger.info("Syncing video for stream {} in {} to archive at {}...".format(s['StreamName'], s['VideoPath'], s['ArchivePath']))
self.logger.debug("Syncing with the command: {}".format(syncCmd))
startTime = time.time()
subprocess.call(syncCmd)
elapsedTime = time.time() - startTime
if self.logger is not None:
self.logger.info("Sync for stream {} took {} seconds.".format(s['StreamName'], elapsedTime))
def start_sync_daemon(self, timeOfDay='1:00'):
# repeatedly sync streams on a schedule as determined by the timeOfDay parameter
# timeOfDay is a time string in HH:MM format
# by default it will sync at 1:00 AM every day
try:
syncHour, syncMin = [int(t) for t in timeOfDay.split(':')]
except:
syncHour, syncMin = 1, 0 # use default if the user input an incorrect time string
self.logger.warning("Invalid time string: '{}'! Reverting to default! This is probably not what you wanted!".format(timeOfDay))
self.logger.info("Going to sync daily at {}:{}".format(syncHour, syncMin))
self.syncHistory = {int(time.strftime('%Y%m%d')): False} # save a log to know if we have synced today or not
while True:
dayKey = int(time.strftime('%Y%m%d'))
if dayKey not in self.syncHistory: # if this is a new day, put an entry in the log so we know that it's a new day and we need to watch for the sync time
self.syncHistory.update({dayKey: False})
if not self.syncHistory[dayKey]: # if we haven't synced yet today, check the time to see if we should
currTime = time.localtime()
syncNow = False
if currTime.tm_hour > syncHour:
syncNow = True
elif currTime.tm_hour == syncHour and currTime.tm_min >= syncMin:
syncNow = True
if syncNow: # if we should, sync files and log the event
self.sync_streams()
self.syncHistory[dayKey] = True
# if we already synced today, we don't need to do anything
time.sleep(5)
if __name__ == "__main__":
# parse any arguments passed in
parser = argparse.ArgumentParser(prog='videoarchiver.py', usage='%(prog)s [configFilename]', description='Archives/backs up video from predefined camera streams to a defined location.')
parser.add_argument('streamListFile', help = '(Optional) Name of the configuration file to defining streams to backup (defaults to archivePath/dvrName).')
parser.add_argument('-t', '--time-of-day', dest = 'timeOfDay', default = '1:00', help = '(Optional) Time of day to perform the backup (HH:MM, 24-hour format) (default: %(default)s).')
parser.add_argument('-l', '--log-file', dest = 'logFilename', default = 'videoarchiver.cfg', help = '(Optional) Name of the file for logging (default: %(default)s).')
parser.add_argument('-v', '--loglevel', dest = 'loglevel', default = 'INFO', help = '(Optional) streamrecorder log level (does not affect FFMPEG log level). Specify numeric values (10, 20, 30, etc.) or strings like DEBUG or WARNING')
parser.add_argument('-s', '--storage-path', dest = 'storagePath', default = '/mnt/video', help = '(Optional) Location of the archive directory (default: %(default)s).')
parser.add_argument('-a', '--archive-path', dest = 'archivePath', default = '/mnt/archive', help = '(Optional) Location of the archive directory (default: %(default)s).')
parser.add_argument('-d', '--dvr-name', dest = 'dvrName', default = gethostname(), help = '(Optional) Name of the computer recording the stream (defaults to hostname: %(default)s).')
args = parser.parse_args()
# setup logging
logFilePath = os.path.join(args.archivePath, "{}.log".format(args.dvrName)) if args.logFilename is None else args.logFilename # by default, log to file: archivePath/dvrName.log
logger = dvrutils.setup_logging(logFilePath, args.loglevel, args.dvrName, logToFile=True, logToStdout=True) # this function will output the loglevel for verification
try:
# create the archiver object, which makes sure things are set up correctly
streamArchiver = StreamArchiver(logger, args.dvrName, args.streamListFile, args.storagePath, args.archivePath)
# start the daemon
streamArchiver.start_sync_daemon(args.timeOfDay)
except:
# if there was a crash, log it
# TODO: send an email alert (once it works)
logger.error("The program crashed unexpectedly!")
| 62.418605 | 237 | 0.615872 | 4,898 | 0.608296 | 0 | 0 | 0 | 0 | 0 | 0 | 3,512 | 0.436165 |
cb207174c9db151c0797349e4b3c26422a817936 | 5,416 | py | Python | test_nlp_util.py | kenttw/2021-bitbrain-shopee | f63975babc17f03718209432013e325b6eb5e39e | [
"Apache-2.0"
] | null | null | null | test_nlp_util.py | kenttw/2021-bitbrain-shopee | f63975babc17f03718209432013e325b6eb5e39e | [
"Apache-2.0"
] | null | null | null | test_nlp_util.py | kenttw/2021-bitbrain-shopee | f63975babc17f03718209432013e325b6eb5e39e | [
"Apache-2.0"
] | null | null | null | import nlp_util
def testGC():
raw_address = 'isn s.h. & rekan, somba opu 76'
result = nlp_util.genCC(raw_address)
print(result)
assert result
def test_getPair():
# label, raw = "hanief sembilan mtr -h", "kuripan hanief semb mtr -h, gajah mada, 58112"
# print(getPair(label, raw))
raw = " ".join(['a', 'b', 'c', 'd'])
label = " ".join(['ba', '-', 'cc'])
assert nlp_util.get_fuzzy_pairs(raw, label) == [1, [('ba', 'b'), ('-', None), ('cc', 'c')]]
raw = " ".join(['b', '-', 'c', 'd'])
label = " ".join(['ba', 'cc'])
assert nlp_util.get_fuzzy_pairs(raw, label) == [0, [('ba', 'b'), ('cc', 'c')]]
raw = ['a', 'b', 'c', 'd']
label = ['ba', 'cc']
assert nlp_util.get_fuzzy_pairs(raw, label) == [1, [('ba', 'b'), ('cc', 'c')]]
def test_get_range_kent():
label,raw = "hanief sembilan mtr -h", "kuripan hanief semb mtr -h, gajah mada, 58112"
result = nlp_util.get_range_kent(label,raw)
print(result)
assert raw[result[0]:result[1]] == 'hanief semb mtr -h'
def test_get_bio_tagging_range():
print("")
# case 1:
poi,street,raw = "toko bb kids", "raya samb gede", "xxx raya. sa-mb gede, 299 toko bb k&ids yyy",
p_start,p_end,s_start,s_end = nlp_util.get_bio_tagging_range(raw,street,poi)
print("case1")
print("POI==>", raw[p_start:p_end])
assert raw[p_start:p_end] == 'toko bb k&ids'
print("Street==>", raw[s_start:s_end])
assert raw[s_start:s_end] =='raya. sa-mb gede'
# case 2:
poi,street,raw = "toko bb kids", "raya samb gede", " toko bb kids, raya samb gede, 299",
p_start,p_end,s_start,s_end = nlp_util.get_bio_tagging_range(raw,street,poi)
print("case2")
print("POI==>", raw[p_start:p_end])
print("Street==>", raw[s_start:s_end])
# case 3:
poi,street,raw = "toko bb kids", "raya samb gede", "aaa toko bb kids, raya samb gede, 299",
p_start,p_end,s_start,s_end = nlp_util.get_bio_tagging_range(raw,street,poi)
print("case3")
print("POI==>", raw[p_start:p_end])
print("Street==>", raw[s_start:s_end])
poi,street,raw = "tahu jontor bung tomo ", "bung tomo", "tahu jon bung tomo bung tomo, sungai keledang samarinda seberang"
p_start,p_end,s_start,s_end = nlp_util.get_bio_tagging_range(raw,street,poi)
print("case4")
print("POI==>", raw[p_start:p_end])
print("Street==>", raw[s_start:s_end])
poi,street,raw = '',"citra yuda iv peru depok", "raya. sa-mb gede, 299 toko bb k&ids yyy"
p_start,p_end,s_start,s_end = nlp_util.get_bio_tagging_range(raw,street,poi)
print("case5")
print("POI==>", raw[p_start:p_end])
print("Street==>", raw[s_start:s_end])
print("")
# case 6:
poi,street,raw = "toko bb kids", "raya samb gede", "xxx raya. sa-mb gede, 299 toko bb k&ids yyy",
p_start,p_end,s_start,s_end = nlp_util.get_bio_tagging_range(raw,street,'')
print("case6")
assert p_start == None
print("Street==>", raw[s_start:s_end])
assert raw[s_start:s_end] =='raya. sa-mb gede'
print("")
# case 7:
poi,street,raw = "toko bb kids", "raya samb gede", "xxx raya. sa-mb gede, 299 toko bb k&ids yyy",
p_start,p_end,s_start,s_end = nlp_util.get_bio_tagging_range(raw,'',poi)
print("case7")
print("POI==>", raw[p_start:p_end])
assert raw[p_start:p_end] == 'toko bb k&ids'
assert s_start==None
poi, street, raw = 'sd neg 12 anggrek', '', 'sd negeri 12 anggrek'
p_start,p_end,s_start,s_end = nlp_util.get_bio_tagging_range(raw,street,poi)
print("case8")
print("POI==>", raw[p_start:p_end])
# print("Street==>", raw[s_start:s_end])
assert s_start == None
def test_get_bio_tagging_string():
print("")
# case 1:
poi, street, raw = "toko bb kids", "raya samb gede", "xxx raya. sa-mb gede, 299 toko bb k&ids yyy",
BIO = nlp_util.get_bio_tagging_string(raw, street, poi)
print(BIO)
print("")
poi,street,raw = None,"citra yuda iv peru depok", "raya. sa-mb gede, 299 toko bb k&ids yyy"
BIO = nlp_util.get_bio_tagging_string(raw, street, poi)
print(BIO)
poi,street,raw = "lapangan futsal sukaluyu", "tang", "xxx raya. sa-mb gede, 299 toko bb k&ids yyy"
BIO = nlp_util.get_bio_tagging_string(raw, street, poi)
print(BIO)
poi,street,raw = "lapangan futsal sukaluyu", "tang", "xxx raya. sa-mb gede, 299 toko bb k&ids yyy"
BIO = nlp_util.get_bio_tagging_string(raw, None, poi)
print(BIO)
poi,street,raw = "lapangan futsal sukaluyu", "tang", "xxx raya. sa-mb gede, 299 toko bb k&ids yyy"
BIO = nlp_util.get_bio_tagging_string(raw, None, None)
print(BIO)
def test_find_sub_list():
print("")
poi,street,raw = "tahu jontor bung tomo ", "bung tomo", "tahu jon bung tomo bung tomo, sungai keledang samarinda seberang"
p_start, p_end, s_start, s_end = nlp_util.get_bio_tagging_range(raw, street, poi)
text_splits = nlp_util.prepare_text(raw) #['tahu', 'jon', 'bung', 'tomo', 'bung', 'tomo', ',', 'sungai', 'keledang', 'samarinda', 'seberang']
p_splits = nlp_util.prepare_text(raw[p_start:p_end])
start, end = nlp_util.find_sub_list(p_splits, text_splits)
s_splits = nlp_util.prepare_text(raw[s_start:s_end])
start_2, end_2 = nlp_util.find_sub_list(s_splits, text_splits, (start,end))
set1 = set(range(start, end))
set2 = set(range(start_2, end_2))
print(start,end,start_2,end_2)
assert len(set1.intersection(set2))==0 | 37.351724 | 145 | 0.64051 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,862 | 0.343796 |
cb210538262eb886c5121655f0c6de13883af94c | 2,433 | py | Python | mlprimitives/adapters/keras.py | Hector-hedb12/MLPrimitives | 82e4826a83ca27e0c2e764625a0d862fc8b95af2 | [
"MIT"
] | null | null | null | mlprimitives/adapters/keras.py | Hector-hedb12/MLPrimitives | 82e4826a83ca27e0c2e764625a0d862fc8b95af2 | [
"MIT"
] | null | null | null | mlprimitives/adapters/keras.py | Hector-hedb12/MLPrimitives | 82e4826a83ca27e0c2e764625a0d862fc8b95af2 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import logging
import tempfile
import keras
import numpy as np
from mlprimitives.utils import import_object
LOGGER = logging.getLogger(__name__)
class Sequential(object):
"""A Wrapper around Sequential Keras models with a simpler interface."""
def __getstate__(self):
state = self.__dict__.copy()
with tempfile.NamedTemporaryFile(suffix='.hdf5', delete=True) as fd:
keras.models.save_model(state.pop('model'), fd.name, overwrite=True)
state['model_str'] = fd.read()
return state
def __setstate__(self, state):
with tempfile.NamedTemporaryFile(suffix='.hdf5', delete=True) as fd:
fd.write(state.pop('model_str'))
fd.flush()
state['model'] = keras.models.load_model(fd.name)
self.__dict__ = state
def _build_model(self, **kwargs):
model = keras.models.Sequential()
hyperparameters = self.hyperparameters.copy()
hyperparameters.update(kwargs)
for layer in self.layers:
layer_kwargs = layer['parameters'].copy()
for key, value in layer_kwargs.items():
if isinstance(value, str):
layer_kwargs[key] = hyperparameters.get(value, value)
model.add(layer['class'](**layer_kwargs))
model.compile(loss=self.loss, optimizer=self.optimizer(), metrics=self.metrics)
return model
def __init__(self, layers, loss, optimizer, classification,
metrics=None, epochs=10, verbose=False, **hyperparameters):
self.layers = list()
for layer in layers:
layer = layer.copy()
layer['class'] = import_object(layer['class'])
self.layers.append(layer)
self.optimizer = import_object(optimizer)
self.loss = import_object(loss)
self.metrics = metrics
self.epochs = epochs
self.verbose = verbose
self.classification = classification
self.hyperparameters = hyperparameters
def fit(self, X, y, **kwargs):
self.model = self._build_model(**kwargs)
if self.classification:
y = keras.utils.to_categorical(y)
self.model.fit(X, y, epochs=self.epochs, verbose=self.verbose)
def predict(self, X):
y = self.model.predict(X)
if self.classification:
y = np.argmax(y, axis=1)
return y
| 28.290698 | 87 | 0.618167 | 2,257 | 0.927661 | 0 | 0 | 0 | 0 | 0 | 0 | 178 | 0.073161 |
cb214db88b78d4f30ed19cb317af575bf87c3f7d | 686 | py | Python | src/django_perf_rec/settings.py | adamchainz/django-perf-rec | f543053d9de5bc7f52f5761fc914d342c78e37a1 | [
"MIT"
] | 147 | 2018-08-21T14:18:27.000Z | 2022-03-31T23:16:58.000Z | src/django_perf_rec/settings.py | adamchainz/django-perf-rec | f543053d9de5bc7f52f5761fc914d342c78e37a1 | [
"MIT"
] | 48 | 2018-07-15T11:07:08.000Z | 2022-03-26T16:00:22.000Z | src/django_perf_rec/settings.py | adamchainz/django-perf-rec | f543053d9de5bc7f52f5761fc914d342c78e37a1 | [
"MIT"
] | 11 | 2018-07-13T10:09:44.000Z | 2021-02-13T18:15:12.000Z | import sys
from typing import Any
from django.conf import settings
if sys.version_info >= (3, 8):
from typing import Literal
ModeType = Literal["once", "none", "all"]
else:
ModeType = str
class Settings:
defaults = {"HIDE_COLUMNS": True, "MODE": "once"}
def get_setting(self, key: str) -> Any:
try:
return settings.PERF_REC[key]
except (AttributeError, KeyError):
return self.defaults.get(key, None)
@property
def HIDE_COLUMNS(self) -> bool:
return self.get_setting("HIDE_COLUMNS")
@property
def MODE(self) -> ModeType:
return self.get_setting("MODE")
perf_rec_settings = Settings()
| 20.176471 | 53 | 0.635569 | 447 | 0.651603 | 0 | 0 | 174 | 0.253644 | 0 | 0 | 63 | 0.091837 |
cb21a8094269bad0f82713f212cba8533460c4c0 | 3,026 | py | Python | server_parse/server_parse.py | MrFlynn/mc-playerstat-webhook | ffacae4eeeb5baafc21d2bf9076c1472fd3fb06b | [
"MIT"
] | null | null | null | server_parse/server_parse.py | MrFlynn/mc-playerstat-webhook | ffacae4eeeb5baafc21d2bf9076c1472fd3fb06b | [
"MIT"
] | null | null | null | server_parse/server_parse.py | MrFlynn/mc-playerstat-webhook | ffacae4eeeb5baafc21d2bf9076c1472fd3fb06b | [
"MIT"
] | null | null | null | import os
import json
class ServerParse:
def __init__(self, directory: str) -> None:
"""Initializes class. Stores root directory to server and loads
whitelist and server name.
:param directory: directory containing all server files (configurations
saves, etc.).
:return: None
"""
self.directory = directory
# Strip training forward slash.
if self.directory[-1] == '/':
self.directory = self.directory[:-1]
self.reload()
def _get_world_name(self, filename: str = 'server.properties') -> str:
"""Reads the level-name property from the server properties file.
:param filename: filename of the server properties file. Defaults to
server.properties.
:return: level name of server.
"""
with open(f'{self.directory}/{filename}', 'r') as f:
lines = f.readlines()
# Get the line with the level name and extract the name.
level_name_line = list(filter(lambda x: 'level-name' in x, lines))
return level_name_line[0].split('=')[1].strip()
def _read_whitelist(self, filename: str = 'whitelist.json') -> object:
"""Reads the whitelist JSON file into a python dictionary and stores
it.
:param filename: name of the file containing whitelist information (
player UUIDS and usernames). Defaults to whitelist.json.
:return: Dictionary contents of whitelist file.
"""
json_file = open(f'{self.directory}/{filename}', 'r')
return json.load(json_file)
def reload(self) -> None:
"""Reloads the world name and the whitelist file.
:return: None
"""
self.world_name = self._get_world_name()
self.whitelist = self._read_whitelist()
def get_uuids_and_mtime(self) -> list:
"""Creates a list of tuples containing the UUID of each player and
their last playerdata modification time (this can be used to
approximate their last login time).
:return: list of tuples.
"""
files = os.listdir(f'{self.directory}/{self.world_name}/playerdata/')
uuids_and_mtimes = []
for file in files:
# Get the file content modification time (mtime).
mtime = os.path.getmtime(
f'{self.directory}/{self.world_name}/playerdata/{file}')
# Use tuples because the intersertion order is deterministic.
uuids_and_mtimes.append((file.split('.')[0], mtime))
return uuids_and_mtimes
def whitelist_lookup_by_uuid(self, uuid: str) -> str:
"""Looks up username from whitelist file based on the supplied UUID.
:param uuid: UUID of the user to look up.
:return: username corresponding to `uuid`.
"""
try:
matching_uuid = list(
filter(lambda x: x['uuid'] == uuid, self.whitelist))
return matching_uuid[0]
except:
return ""
| 34.781609 | 79 | 0.609385 | 3,001 | 0.991738 | 0 | 0 | 0 | 0 | 0 | 0 | 1,681 | 0.555519 |
cb224d882dc0ed19d5bdb03ccaf29d2bdc6ffef3 | 2,402 | py | Python | mpst_ts/codegen/generator/node/node_v2/node_strategy.py | stscript-cgo/STScript | d2ab2a05b997e9487fd3057a38dcec67feb20e53 | [
"Apache-2.0"
] | null | null | null | mpst_ts/codegen/generator/node/node_v2/node_strategy.py | stscript-cgo/STScript | d2ab2a05b997e9487fd3057a38dcec67feb20e53 | [
"Apache-2.0"
] | null | null | null | mpst_ts/codegen/generator/node/node_v2/node_strategy.py | stscript-cgo/STScript | d2ab2a05b997e9487fd3057a38dcec67feb20e53 | [
"Apache-2.0"
] | null | null | null | import os
from ...utils import CodeGenerationStrategy
from ....endpoint import Endpoint
from .....utils import TemplateGenerator
class NodeStrategy(CodeGenerationStrategy,
target='node'):
def __init__(self):
super().__init__()
self.output_dir = 'sandbox/node'
dirname = os.path.join(os.path.dirname(__file__), 'templates')
self.template_generator = TemplateGenerator(dirname=dirname)
def generate(self, endpoint: Endpoint):
"""
Files to generate:
{protocol}/EFSM.ts
{protocol}/{role}.ts
Returns:
A generator of (filepath, content_to_write).
"""
files = []
protocol = endpoint.protocol
role = endpoint.role
# Generate EFSM
files.append((os.path.join(self.output_dir, protocol, 'EFSM.ts'),
self.template_generator.render(path='efsm.ts.j2',
payload={'endpoint': endpoint})))
# Generate modules for send states
for state in endpoint.efsm.send_states:
files.append((os.path.join(self.output_dir, protocol, f'S{state}.ts'),
self.template_generator.render(path='send_module.ts.j2',
payload={'endpoint': endpoint,
'state': state})))
# Generate modules for receive states
for state in endpoint.efsm.receive_states:
files.append((os.path.join(self.output_dir, protocol, f'S{state}.ts'),
self.template_generator.render(path='receive_module.ts.j2',
payload={'endpoint': endpoint,
'state': state})))
# Generate runtime
files.append((os.path.join(self.output_dir, protocol, f'{role}.ts'),
self.template_generator.render(path='runtime.ts.j2',
payload={'endpoint': endpoint})))
# Generate session
files.append((os.path.join(self.output_dir, protocol, 'Session.ts'),
self.template_generator.render(path='Session.ts',
payload={})))
return files | 39.377049 | 86 | 0.517485 | 2,271 | 0.945462 | 0 | 0 | 0 | 0 | 0 | 0 | 530 | 0.220649 |
cb229d6175600c19fd97b95e24ba4adaaf6eb1d6 | 2,208 | py | Python | dump-pkg/src/dumpshmamp/collectors/docker.py | sha1n/macos-devenv-dump-poc | be439ad4a0c0ac265fe62d44bded73eab1a0c31d | [
"MIT"
] | null | null | null | dump-pkg/src/dumpshmamp/collectors/docker.py | sha1n/macos-devenv-dump-poc | be439ad4a0c0ac265fe62d44bded73eab1a0c31d | [
"MIT"
] | null | null | null | dump-pkg/src/dumpshmamp/collectors/docker.py | sha1n/macos-devenv-dump-poc | be439ad4a0c0ac265fe62d44bded73eab1a0c31d | [
"MIT"
] | null | null | null | from dumpshmamp.collectors.files import try_copyfile, file_path, mkdir
from shminspector.util.cmd import try_capture_output, is_command
def collect_docker_files(user_home_dir_path, target_dir_path, ctx):
if is_command("docker"):
ctx.logger.info("Collecting Docker information...")
mkdir(target_dir_path)
_collect_version(target_dir_path, ctx)
_collect_info(target_dir_path, ctx)
if ctx.snapshot.docker_configured():
ctx.logger.info("Collecting Docker files...")
_collect_config_files(target_dir_path, user_home_dir_path, ctx)
else:
ctx.logger.warn("'docker' not installed")
def _collect_config_files(target_dir_path, user_home_dir_path, ctx):
ctx.logger.progress("Collecting configuration files...")
source_settings_file_path = file_path(user_home_dir_path, "Library/Group Containers/group.com.docker/settings.json")
target_settings_file_path = file_path(target_dir_path, "settings.json")
try_copyfile(source_settings_file_path, target_settings_file_path)
source_docker_config_file_path = file_path(user_home_dir_path, ".docker/config.json")
target_docker_config_file_path = file_path(target_dir_path, "config.json")
try_copyfile(source_docker_config_file_path, target_docker_config_file_path)
source_docker_daemon_file_path = file_path(user_home_dir_path, ".docker/daemon.json")
target_docker_daemon_file_path = file_path(target_dir_path, "daemon.json")
try_copyfile(source_docker_daemon_file_path, target_docker_daemon_file_path)
def _collect_version(target_dir, ctx):
ctx.logger.progress("Collecting version information...")
return try_capture_output(cmd=["docker", "version"],
target_dir_path=target_dir,
file_name="docker_version.txt",
logger=ctx.logger)
def _collect_info(target_dir, ctx):
ctx.logger.progress("Collecting docker information...")
return try_capture_output(cmd=["docker", "info"],
target_dir_path=target_dir,
file_name="docker_info.txt",
logger=ctx.logger)
| 43.294118 | 120 | 0.714674 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 406 | 0.183877 |
cb239cecbad0a6d1b7d8d21689a46f10325162cb | 1,185 | py | Python | itamar/find_box.py | ijda3/TrabalhoVisaoEquipe2 | 242209688badafc5ddbbdccaf4f18d38d51ec6f9 | [
"MIT"
] | null | null | null | itamar/find_box.py | ijda3/TrabalhoVisaoEquipe2 | 242209688badafc5ddbbdccaf4f18d38d51ec6f9 | [
"MIT"
] | null | null | null | itamar/find_box.py | ijda3/TrabalhoVisaoEquipe2 | 242209688badafc5ddbbdccaf4f18d38d51ec6f9 | [
"MIT"
] | 1 | 2019-09-17T10:03:18.000Z | 2019-09-17T10:03:18.000Z | import cv2
import numpy as np
# Normal routines
img = cv2.imread('image3.png')
scale_percent = 30 # percent of original size
width = int(img.shape[1] * scale_percent / 100)
height = int(img.shape[0] * scale_percent / 100)
dim = (width, height)
# resize image
img = cv2.resize(img, dim, interpolation = cv2.INTER_AREA)
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
ret,thresh = cv2.threshold(gray,50,255,1)
# Remove some small noise if any.
dilate = cv2.dilate(thresh,None)
erode = cv2.erode(dilate,None)
# Find contours with cv2.RETR_CCOMP
contours,hierarchy = cv2.findContours(erode,cv2.RETR_CCOMP,cv2.CHAIN_APPROX_SIMPLE)
for i,cnt in enumerate(contours):
# Check if it is an external contour and its area is more than 100
if hierarchy[0,i,3] == -1 and cv2.contourArea(cnt)>100:
x,y,w,h = cv2.boundingRect(cnt)
if w > 65 and w < 150 and h > 65 and h < 150:
cv2.rectangle(img,(x,y),(x+w,y+h),(0,255,0),2)
# m = cv2.moments(cnt)
# cx,cy = m['m10']/m['m00'],m['m01']/m['m00']
# cv2.circle(img,(int(cx),int(cy)),3,255,-1)
cv2.imshow('img',img)
# cv2.imwrite('sofsqure.png',img)
cv2.waitKey(0)
cv2.destroyAllWindows() | 28.214286 | 83 | 0.667511 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 352 | 0.297046 |
cb24335794dfb74e005ddf509d5b53a427be39f3 | 749 | py | Python | P0023.py | sebastianaldi17/ProjectEuler | 19562fba3456ec904bcc264fb786a92610e42622 | [
"MIT"
] | null | null | null | P0023.py | sebastianaldi17/ProjectEuler | 19562fba3456ec904bcc264fb786a92610e42622 | [
"MIT"
] | null | null | null | P0023.py | sebastianaldi17/ProjectEuler | 19562fba3456ec904bcc264fb786a92610e42622 | [
"MIT"
] | null | null | null | # Non-abundant sums
# https://projecteuler.net/problem=23
# This actually took 5 seconds to process.
# Maybe a faster solution is present?
from math import sqrt
from collections import defaultdict
def divisors(n):
div = 0
for i in range(1, int(sqrt(n)) + 1):
if n%i == 0:
if i*i == n or n//i == n: div += i
else: div += i + n//i
if div > n: return True
return div > n
ab = []
ans = 0
d = defaultdict(int)
for i in range(1, 28124):
if divisors(i): ab.append(i)
print("Finished abundant number calculation")
for i in range(len(ab)):
for j in range(i, len(ab)):
d[ab[i] + ab[j]] = 1
print("Finished mapping numbers")
for i in range(1, 28124):
if d[i] == 0: ans += i
print(ans) | 24.16129 | 46 | 0.595461 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 199 | 0.265688 |
cb24beb92beb674d28a67267a6db323918c01739 | 1,665 | py | Python | api/scpca_portal/config/production.py | AlexsLemonade/scpca-portal | d60d6db5abe892ed58764128269df936778c6fd7 | [
"BSD-3-Clause"
] | null | null | null | api/scpca_portal/config/production.py | AlexsLemonade/scpca-portal | d60d6db5abe892ed58764128269df936778c6fd7 | [
"BSD-3-Clause"
] | 85 | 2021-07-27T14:33:55.000Z | 2022-03-28T20:18:41.000Z | api/scpca_portal/config/production.py | AlexsLemonade/scpca-portal | d60d6db5abe892ed58764128269df936778c6fd7 | [
"BSD-3-Clause"
] | null | null | null | import os
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
from scpca_portal.config.common import Common
class Production(Common):
INSTALLED_APPS = Common.INSTALLED_APPS
SECRET_KEY = os.getenv("DJANGO_SECRET_KEY")
# Site
# https://docs.djangoproject.com/en/2.0/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ["*"]
INSTALLED_APPS += ("gunicorn",)
UPDATE_IMPORTED_DATA = True
# AWS
AWS_REGION = os.getenv("AWS_REGION")
# AWS S3
AWS_S3_BUCKET_NAME = os.getenv("AWS_S3_BUCKET_NAME")
# https://developers.google.com/web/fundamentals/performance/optimizing-content-efficiency/http-caching#cache-control
# Response can be cached by browser and any intermediary caches (i.e. it is "public") for up to 1 day
# 86400 = (60 seconds x 60 minutes x 24 hours)
AWS_HEADERS = {
"Cache-Control": "max-age=86400, s-maxage=86400, must-revalidate",
}
DATABASES = {
"default": {
"ENGINE": "django.db.backends.postgresql_psycopg2",
"NAME": os.getenv("DATABASE_NAME"),
"USER": os.getenv("DATABASE_USER"),
"PASSWORD": os.getenv("DATABASE_PASSWORD"),
"HOST": os.getenv("DATABASE_HOST"),
"PORT": os.getenv("DATABASE_PORT"),
}
}
sentry_sdk.init(
dsn=os.getenv("SENTRY_IO_URL"),
integrations=[DjangoIntegration()],
traces_sample_rate=1.0,
environment=os.getenv("SENTRY_ENV"),
# If you wish to associate users to errors (assuming you are using
# django.contrib.auth) you may enable sending PII data.
send_default_pii=True,
)
| 32.019231 | 121 | 0.655856 | 1,525 | 0.915916 | 0 | 0 | 0 | 0 | 0 | 0 | 795 | 0.477477 |
cb25b7a3a87d4948f644a33014449a6de3b01732 | 1,057 | py | Python | CPP-PyTorch-Ext/test_validation.py | AmirOfir/GCK3x3ConvLayer | adf6ddefb01888fb247edaa0d87e2da546781584 | [
"BSD-3-Clause"
] | null | null | null | CPP-PyTorch-Ext/test_validation.py | AmirOfir/GCK3x3ConvLayer | adf6ddefb01888fb247edaa0d87e2da546781584 | [
"BSD-3-Clause"
] | null | null | null | CPP-PyTorch-Ext/test_validation.py | AmirOfir/GCK3x3ConvLayer | adf6ddefb01888fb247edaa0d87e2da546781584 | [
"BSD-3-Clause"
] | null | null | null | import gc
import math
import numpy as np
import torch
import torch.nn.functional as F
import timeit
import time
from gck_layer import GCK3x3Layer
kernel_dim = 3
def tensors_equal(a,b):
b = torch.allclose(a, b, atol=0.01)
if (b):
print('same: True')
else:
print('Same: False (diff:', ((a-b).max()), ')')
def compareResults(batch_size: int, in_channels: int, out_channels: int, input_dim: int):
input = torch.randint(0,100, (batch_size, in_channels, input_dim, input_dim), dtype=torch.float32)
kernels = torch.ones(out_channels, in_channels, kernel_dim, kernel_dim, dtype=torch.float32)
expected = F.conv2d(input, kernels)
layer = GCK3x3Layer(in_channels, out_channels, 3, False, input_dim - 2, kernels)
result = layer.forward(input)
tensors_equal(expected, result)
lst = [
(1,1,8,1024),
(1,1,16,256),
(1,1,64,512),
(1,1,128,512),
(1,8,16,128)
]
for batch_size, in_channels, out_channels, input_dim in lst:
compareResults(batch_size, in_channels, out_channels, input_dim) | 27.102564 | 102 | 0.685904 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 35 | 0.033113 |
cb277323c3dc4f94fa2e361b8e6832bfbfd523a1 | 1,009 | py | Python | now_playing_graph/stats.py | macbre/now-playing-graph | 2c211ef5544a6892e8e5fc2556ffcc6b7ca7ab84 | [
"MIT"
] | null | null | null | now_playing_graph/stats.py | macbre/now-playing-graph | 2c211ef5544a6892e8e5fc2556ffcc6b7ca7ab84 | [
"MIT"
] | 3 | 2019-02-07T17:46:04.000Z | 2019-02-15T14:27:09.000Z | now_playing_graph/stats.py | macbre/now-playing-graph | 2c211ef5544a6892e8e5fc2556ffcc6b7ca7ab84 | [
"MIT"
] | null | null | null | """
Prepare some stats from timelines
"""
# https://docs.python.org/3.7/library/collections.html#collections.Counter
from collections import Counter
def get_timeline_stats(timeline):
"""
:type timeline list[now_playing_graph.timeline.TimelineEntry]
:rtype: dict
"""
top_artists = Counter()
top_songs = Counter()
longest_songs = dict()
longest_artists = Counter() # artist whose songs are played for the longest time
for entry in timeline:
top_artists.update((entry.artist_name,))
top_songs.update((entry.song_title,))
if entry.song_title not in longest_songs:
longest_songs[entry.song_title] = entry.duration
longest_artists += Counter({entry.artist_name: entry.duration})
return dict(
top_artists=top_artists.most_common(10),
top_songs=top_songs.most_common(10),
longest_songs=Counter(longest_songs).most_common(10),
longest_artists=Counter(longest_artists).most_common(10),
)
| 30.575758 | 85 | 0.697721 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 261 | 0.258672 |
cb279012d1ad198dad56cddf7894586685f1a73f | 10,197 | py | Python | limnoria-plugins/SupportNotifications/plugin.py | chevah/ircbot-plugins | b3d7d274600c85ab856880b4ec693ee913c25c8a | [
"MIT"
] | null | null | null | limnoria-plugins/SupportNotifications/plugin.py | chevah/ircbot-plugins | b3d7d274600c85ab856880b4ec693ee913c25c8a | [
"MIT"
] | 2 | 2017-05-24T10:39:28.000Z | 2019-11-21T12:45:20.000Z | limnoria-plugins/SupportNotifications/plugin.py | chevah/ircbot-plugins | b3d7d274600c85ab856880b4ec693ee913c25c8a | [
"MIT"
] | null | null | null | """
Use GMail API to check an Inbox for new emails.
To generate the initial credentials you will need to execute this module from
python with first argument to a patch containing your API client details
and the second argument to the file where to store the credentials.
It uses the `historyID` configuration option to optimize the search.
It will start with searching all messages from Inbox and then will keep the
record of the history id.
So future runs and restarts should resume only from last history_id.
As long as there is no message in In
"""
from __future__ import print_function
from threading import Event, Thread
from supybot import (
ircmsgs,
)
from supybot.callbacks import ArgumentError, Plugin
import httplib2
from apiclient import discovery, errors
from oauth2client.file import Storage
class CancellableTimer(Thread):
"""
A threaded timer which can be closed before the scheduled execution.
"""
def __init__(self, delay, callback, irc, *args, **kwargs):
Thread.__init__(self)
self._event = Event()
self._step = 0.25
self._remaining = delay / self._step
self._irc = irc
self._callback = callback
self._args = args
self._kwargs = kwargs
def run(self):
while (
self._remaining > 0 and
not self._event.is_set()
and not self._irc.zombie
):
self._remaining -= self._step
self._event.wait(self._step)
self._callback(*self._args, **self._kwargs)
def cancel(self):
self._event.set()
def get_credentials(credentials_path):
"""
Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid it
returns None.
Returns:
Credentials, the obtained credential or None
"""
store = Storage(credentials_path)
credentials = store.get()
if not credentials or credentials.invalid:
return None
return credentials
class SupportNotifications(Plugin):
"""
Send to the configured channel info about latest support activity.
"""
threaded = True
def __init__(self, irc):
super(SupportNotifications, self).__init__(irc)
self._irc = irc
self._timer = None
self._channel_name = None
self._history_id = self.registryValue('historyID')
self._interval = self.registryValue('pollInterval')
self._target_address = self.registryValue('targetAddress').strip()
self._team_domain = self.registryValue('teamDomain').strip()
if not self._target_address:
raise ArgumentError('targetAddress not configured.')
if not self._team_domain:
raise ArgumentError('teamDomain not configured.')
credentials_path = self.registryValue('credentialsPath')
credentials = get_credentials(credentials_path)
if not credentials:
raise ArgumentError('Failed to load API credentials.')
http = credentials.authorize(httplib2.Http())
self._google = discovery.build('gmail', 'v1', http=http, cache=False)
def die(self):
"""
Called when the plugin should end.
"""
if self._timer is None:
return
self.log.info("Closing plugin. Cancelling the timer")
self._timer.cancel()
def do353(self, irc, msg):
"""
Called when we have joined the channel.
RPL_NAMREPLY - called when we have joined the channel and have
received the list of members.
"""
self._channel_name = msg.args[2]
self.log.info(
'Start polling for %s changes at %d seconds and sending to %s.' % (
self._target_address, self._interval, self._channel_name))
self._scheduleNextCheck()
def _scheduleNextCheck(self):
"""
Called to schedule the next API check.
"""
if self._irc.zombie:
# We are not connected.
return
if self._timer:
return
self._timer = CancellableTimer(self._interval, self._tick, self._irc)
self._timer.start()
self.log.debug('Scheduling a new check.')
def _tick(self):
"""
Called when we should check for new emails.
"""
if self._irc.zombie:
# We are not connected.
return
self._checkInbox()
# Schedule the next tick.
self._timer = None
self._scheduleNextCheck()
def _checkInbox(self):
"""
Emit changes for the new emails from inbox.
"""
inbox_labels = [u'INBOX', u'UNREAD']
details = None
messages = self._getNewMessages()
for message in messages:
details = self._getMessageDetails(msg_id=message['id'])
if set(inbox_labels) - set(details['labelIds']):
# Not a unread message which is in Inbox.
# Just ignore it for now.
continue
headers = {}
for raw_header in details['payload']['headers']:
headers[raw_header['name'].lower()] = raw_header['value']
self._checkMessage(headers)
# Any message is removed so that we will not process it later.
self._trash(msg_id=message['id'])
if details:
self._history_id = int(details['historyId'])
self.setRegistryValue('historyID', value=self._history_id)
def _getNewMessages(self):
"""
Return the new messages since the last run.
"""
if self._history_id:
return self._getMessagesFromHistory()
else:
return self._getMessagesFromInbox()
def _getMessagesFromHistory(self):
"""
Return the new messages based on account history.
"""
start_history_id = self._history_id
messages = []
try:
history = self._google.users().history().list(
userId='me',
startHistoryId=start_history_id,
historyTypes='messageAdded',
).execute()
changes = history['history'] if 'history' in history else []
for change in changes:
messages.extend(change.get('messages', []))
while 'nextPageToken' in history:
page_token = history['nextPageToken']
history = self._google.users().history().list(
userId='me',
startHistoryId=start_history_id,
historyTypes='messageAdded',
pageToken=page_token,
).execute()
for change in history['history']:
messages.extend(change.get('messages', []))
return messages
except errors.HttpError, error:
self.log.error(
'_getMessagesFromHistory: An error occurred: %s' % error)
return []
def _getMessagesFromInbox(self):
"""
Return all the messages from Inbox.
"""
label_ids = ['INBOX']
messages = []
try:
response = self._google.users().messages().list(
userId='me',
labelIds=label_ids,
).execute()
if 'messages' in response:
messages.extend(response['messages'])
while 'nextPageToken' in response:
page_token = response['nextPageToken']
response = self._google.users().messages().list(
userId='me',
labelIds=label_ids,
pageToken=page_token,
).execute()
messages.extend(response['messages'])
return messages
except errors.HttpError, error:
self.log.error(
'_getMessagesFromInbox: An error occurred: %s' % error)
return []
def _trash(self, msg_id):
"""
Delete a message by sending it to trash..
"""
try:
self._google.users().messages().trash(
userId='me', id=msg_id).execute()
except errors.HttpError, error:
self.log.error('_trash: An error occurred: %s' % error)
def _getMessageDetails(self, msg_id):
"""
Get a Message with given ID.
"""
try:
message = self._google.users().messages().get(
userId='me', id=msg_id).execute()
return message
except errors.HttpError, error:
self.log.error('_getMessageDetails: An error occurred: %s' % error)
def _checkMessage(self, headers):
"""
Check message to decide if we should send a notification.
"""
to = headers.get('to', '')
cc = headers.get('cc', '')
if not (self._target_address in to or self._target_address in cc):
# Not a message for our support email,
return
sender = headers.get('from', '')
if self._team_domain not in sender:
# No a message from our team
return
# We will ignore if the email is only to the internal team, that is
# it does not have any outside TO or CC.
has_outside = False
for email in to.split('>, ') + cc.split('>, '):
email = email.strip()
if not email:
continue
if self._team_domain not in email:
has_outside = True
break
if not has_outside:
# Not a message outside the team.
return
subject = headers.get('subject', 'NO SUBJECT')
self._sendStatus(sender, subject)
def _sendStatus(self, sender, subject):
"""
Send an IRC notification.
"""
author = sender.split('@', 1)[0].split('<', 1)[1]
self._irc.queueMsg(ircmsgs.notice(
recipient=self._channel_name,
s='[support-inbox][%s] Replied to: %s' % (author, subject),
prefix='<support-notifications>',
))
Class = SupportNotifications
| 30.9 | 79 | 0.577229 | 8,919 | 0.874669 | 0 | 0 | 0 | 0 | 0 | 0 | 3,085 | 0.30254 |
cb27ec0e5739de15db5126410f025e93999f0c1d | 435 | py | Python | qusetta/_version.py | qcware/qusetta | cf4c1e47d819679a8b4d683326bd6d005686a285 | [
"MIT"
] | 3 | 2020-07-15T15:59:41.000Z | 2022-02-05T15:06:16.000Z | qusetta/_version.py | qcware/qusetta | cf4c1e47d819679a8b4d683326bd6d005686a285 | [
"MIT"
] | 2 | 2021-01-21T12:03:24.000Z | 2021-01-26T18:07:27.000Z | qusetta/_version.py | qcware/qusetta | cf4c1e47d819679a8b4d683326bd6d005686a285 | [
"MIT"
] | 3 | 2020-07-15T15:59:47.000Z | 2022-02-07T15:09:51.000Z | """Defines the version number and details of ``qusetta``."""
__all__ = (
'__version__', '__author__', '__authoremail__', '__license__',
'__sourceurl__', '__description__'
)
__version__ = "0.0.0"
__author__ = "Joseph T. Iosue"
__authoremail__ = "joe.iosue@qcware.com"
__license__ = "MIT License"
__sourceurl__ = "https://github.com/qcware/qusetta"
__description__ = "Translating quantum circuits to and from representations"
| 29 | 76 | 0.731034 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 299 | 0.687356 |
cb27ec17609d44f92920d957068c2736f843142c | 777 | py | Python | LobbyService/test/test_get_lobbies.py | Devin0xFFFFFF/singed-feathers | 747eef56fb3ba2a73cb7ba851462894efeddd00b | [
"BSD-3-Clause"
] | 1 | 2017-06-20T15:19:40.000Z | 2017-06-20T15:19:40.000Z | LobbyService/test/test_get_lobbies.py | Devin0xFFFFFF/singed-feathers | 747eef56fb3ba2a73cb7ba851462894efeddd00b | [
"BSD-3-Clause"
] | 3 | 2019-02-13T16:05:25.000Z | 2019-02-15T03:31:06.000Z | LobbyService/test/test_get_lobbies.py | Devin0xFFFFFF/singed-feathers | 747eef56fb3ba2a73cb7ba851462894efeddd00b | [
"BSD-3-Clause"
] | null | null | null | import pytest
from mock import Mock, patch
from service import get_lobbies
@patch('service.lobby_service_common.get_public_lobbies')
def test_get_lobbies(get_public_lobbies_mock):
get_public_lobbies_mock.return_value = {}
response = get_lobbies.lambda_handler({}, None)
valid_response = {'statusCode': 200, 'body': '{"Lobbies": {}}', 'headers': {'Access-Control-Allow-Origin': '*'}}
get_public_lobbies_mock.assert_called()
assert response == valid_response
@patch('service.lobby_service_common.get_public_lobbies')
def test_get_lobbies_dynamo_failure(get_public_lobbies_mock):
with pytest.raises(IOError):
get_public_lobbies_mock.side_effect = Mock(side_effect=IOError('Dynamo Exception'))
get_lobbies.lambda_handler({}, None)
| 29.884615 | 116 | 0.759331 | 0 | 0 | 0 | 0 | 695 | 0.894466 | 0 | 0 | 192 | 0.247104 |
cb283b303e7b53a68315f74dab085b92d21f9186 | 1,031 | py | Python | truncande/cli.py | Ricyteach/truncande | b7c529cb141b6c534833b4f9536ea6910412d637 | [
"MIT"
] | null | null | null | truncande/cli.py | Ricyteach/truncande | b7c529cb141b6c534833b4f9536ea6910412d637 | [
"MIT"
] | null | null | null | truncande/cli.py | Ricyteach/truncande | b7c529cb141b6c534833b4f9536ea6910412d637 | [
"MIT"
] | null | null | null | import pathlib
import click
from . import candeout
@click.group()
@click.argument("ifile", type=click.Path(exists=True, dir_okay=False), required=True)
@click.argument(
"ofile",
type=click.Path(exists=False, dir_okay=False, writable=True),
required=False,
)
@click.pass_context
def main(ctx, ifile, ofile=""):
ifile = pathlib.Path(ifile)
ofile = (
pathlib.Path(ofile) if ofile else ifile.parent / (ifile.stem + " truncated.txt")
)
ctx.ensure_object(dict)
ctx.obj["ifile"] = ifile
ctx.obj["ofile"] = ofile
ctx.obj["candeout"] = candeout.CandeOut(ifile.read_text().split("\n"))
@main.command()
@click.argument("steps", nargs=-1, type=int)
@click.pass_context
def steps(ctx, steps=(-1,)):
cout: candeout.CandeOut = ctx.obj["candeout"]
ctx.obj["out"] = candeout.remove_steps(cout, steps)
@main.resultcallback()
@click.pass_context
def write_file(ctx, *args, **kwargs):
ofile: pathlib.Path = ctx.obj["ofile"]
ofile.write_text("\n".join(ctx.obj["candeout"].lines))
| 25.775 | 88 | 0.676043 | 0 | 0 | 0 | 0 | 969 | 0.939864 | 0 | 0 | 103 | 0.099903 |
cb2a679f6bf1c9fc3be6603b4c524bc925b318db | 756 | py | Python | conference/decorators.py | ethancarlsson/epcon | 10ae259ad75271651506d44cc5e71cf089349ea3 | [
"BSD-2-Clause"
] | 40 | 2015-03-03T22:14:58.000Z | 2022-02-15T22:27:48.000Z | conference/decorators.py | ethancarlsson/epcon | 10ae259ad75271651506d44cc5e71cf089349ea3 | [
"BSD-2-Clause"
] | 699 | 2015-01-21T10:13:29.000Z | 2022-02-08T09:26:36.000Z | conference/decorators.py | ethancarlsson/epcon | 10ae259ad75271651506d44cc5e71cf089349ea3 | [
"BSD-2-Clause"
] | 96 | 2015-01-22T11:03:13.000Z | 2022-01-31T05:35:34.000Z | import functools
from django.contrib import messages
from django.urls import reverse
from django.shortcuts import redirect
def full_profile_required(func):
@functools.wraps(func)
def wrapper(request, *args, **kwargs):
if (request.user
and request.user.id # FIXME test mocks mess with the above object so we have to check the id
and (not request.user.attendeeprofile or not request.user.attendeeprofile.gender)):
messages.warning(
request,
"Please update your profile to continue using the EuroPython website."
)
return redirect(reverse('user_panel:profile_settings'))
return func(request, *args, **kwargs)
return wrapper
| 31.5 | 109 | 0.661376 | 0 | 0 | 0 | 0 | 573 | 0.757937 | 0 | 0 | 171 | 0.22619 |
cb2b37553000857b2471180fda8b3f952f6a125d | 3,642 | py | Python | flaskdynamo_v1_completeWithCardsTwoPage_API_Version_json_req_type (copy)/2paginator.py | A9K5/Python_Flask | 6e63c80fff1073e4b2a7dbb4e75a15363a9fe432 | [
"MIT"
] | null | null | null | flaskdynamo_v1_completeWithCardsTwoPage_API_Version_json_req_type (copy)/2paginator.py | A9K5/Python_Flask | 6e63c80fff1073e4b2a7dbb4e75a15363a9fe432 | [
"MIT"
] | null | null | null | flaskdynamo_v1_completeWithCardsTwoPage_API_Version_json_req_type (copy)/2paginator.py | A9K5/Python_Flask | 6e63c80fff1073e4b2a7dbb4e75a15363a9fe432 | [
"MIT"
] | null | null | null | from flask import Flask, render_template, request, redirect, jsonify
from flask_cors import CORS, cross_origin
from datetime import datetime
from flask import Blueprint
from flask_paginate import Pagination, get_page_parameter
import botocore
import boto3
import decimal
import logging
import time
import argparse
import json
import random
import string
from boto3.dynamodb.conditions import Key, Attr
# dynamodb = boto3.resource('dynamodb')#, region_name='us-west-2', endpoint_url="http://localhost:8000")
# table = dynamodb.Table('IOT4')
# AllowedActions = ['both', 'publish', 'subscribe']
app = Flask(__name__)
cors = CORS(app)
app.config['CORS_HEADERS'] = 'Content-Type'
@app.route('/temp2')
def temp2():
# startToken = request.values.get("startToken")
# print(startToken)
client = boto3.client('dynamodb')
paginator = client.get_paginator('scan')
tasks = 1
startToken = 1
response_iterator = paginator.paginate(
TableName="IOT4",
# Limit=3
PaginationConfig={
'MaxItems': 5,
'PageSize': 5,
# 'StartingToken':startToken #page['LastEvaluatedKey']
}
)
for page in response_iterator:
# print(page['Items'])
# print(page)
tasks = page['Items']
# return (jsonify(page['Items']))
for key in page:
if key == "LastEvaluatedKey":
print(page['LastEvaluatedKey']['_id']['S'])
startToken = page['LastEvaluatedKey']['_id']['S']
return render_template('2paginator.html',startToken = startToken, tasks= tasks)
# if page['LastEvaluatedKey'] != Null:
# startToken = page['LastEvaluatedKey']['_id']['S']
# print (startToken)
# else:
# startToken = 0
# print(startToken)
# print(response_iterator)
# return response_iterator
@app.route('/temp3',methods=['POST'])
def temp3():
startToken = request.values.get("startToken")
print(startToken)
client = boto3.client('dynamodb')
paginator = client.get_paginator('scan')
tasks = 1
# startToken = 1
response_iterator = paginator.paginate(
TableName="IOT4",
# Limit=3
PaginationConfig={
'MaxItems': 5,
'PageSize': 5,
'StartingToken': { '_id':'2018-06-26 15:09:51.103' } #page['LastEvaluatedKey']
}
)
print(response_iterator)
for kay,val in response_iterator:
print(kay)
# for page in response_iterator:
# # print(page['Items'])
# # print(page)
# tasks = page['Items']
# return (jsonify(page['Items']))
# for key in page:
# if key == "LastEvaluatedKey":
# print(page['LastEvaluatedKey']['_id']['S'])
# startToken = page['LastEvaluatedKey']['_id']['S']
# return (jsonify(startToken))
# return render_template('2paginator.html',startToken = startToken, tasks= tasks)
@app.route('/temp4')
def temp4():
# startToken = request.values.get("startToken")
# print(startToken)
client = boto3.client('dynamodb')
paginator = client.get_paginator('scan')
tasks = 1
# startToken = 1
response_iterator = paginator.paginate(
TableName="IOT4",
# Limit=3
PaginationConfig={
'MaxItems': 5,
'PageSize': 5
}
)
for page in response_iterator:
print(page)
return("qwe")
if __name__ == '__main__':
app.run(debug = True, host="192.168.0.117") # 192.168.43.140 | 30.35 | 104 | 0.594728 | 0 | 0 | 0 | 0 | 2,863 | 0.786107 | 0 | 0 | 1,564 | 0.429434 |
cb2b648795be23b41ff6ba13bc051d25b71eee73 | 1,279 | py | Python | snap_scripts/old_scripts/tem_iem_older_scripts_april2018/tem_inputs_iem/min_max_deltas_tem_iem.py | ua-snap/downscale | 3fe8ea1774cf82149d19561ce5f19b25e6cba6fb | [
"MIT"
] | 5 | 2020-06-24T21:55:12.000Z | 2022-03-23T16:32:54.000Z | snap_scripts/old_scripts/tem_iem_older_scripts_april2018/tem_inputs_iem/min_max_deltas_tem_iem.py | ua-snap/downscale | 3fe8ea1774cf82149d19561ce5f19b25e6cba6fb | [
"MIT"
] | 17 | 2016-01-04T23:37:47.000Z | 2017-04-17T20:57:02.000Z | snap_scripts/old_scripts/tem_iem_older_scripts_april2018/tem_inputs_iem/min_max_deltas_tem_iem.py | ua-snap/downscale | 3fe8ea1774cf82149d19561ce5f19b25e6cba6fb | [
"MIT"
] | 3 | 2020-09-16T04:48:57.000Z | 2021-05-25T03:46:00.000Z | from downscale import DeltaDownscale
class DeltaDownscaleMM( DeltaDownscale ):
def _calc_anomalies( self ):
print('calculating anomalies')
def downscale( self, *args, **kwargs ):
print( 'downscaling...' )
# FOR RUN OF THE MIN / MAX TAS DATA:
# 1. COMPUTE DELTAS FIRST ANND WRITE TO NETCDF
# 2. USE `DeltaDownscaleMinMax` for the downscaling
def delta_mm( fn, mean_fn, variable, mean_variable, output_filename ):
'''
simple way to compute extreme - mean deltas as
native model resolution and write to NetCDF4 on disk
'''
import xarray as xr
ds = xr.open_dataset( fn )[ variable ]
ds_mean = xr.open_dataset( mean_fn )[ mean_variable ]
delta = ds - ds_mean
delta_ds = delta.to_dataset( name=variable )
delta_ds.to_netcdf( output_filename )
return output_filename
fn = '/Data/Base_Data/Climate/World/CRU_grids/CRU_TS323/cru_ts3.23.1901.2014.tmx.dat.nc'
mean_fn = '/Data/Base_Data/Climate/World/CRU_grids/CRU_TS323/cru_ts3.23.1901.2014.tmp.dat.nc'
variable = 'tmx'
mean_variable = 'tmp'
output_filename = '/workspace/Shared/Tech_Projects/ESGF_Data_Access/project_data/tem_data_sep2016/test/cru_ts3.23.1901.2014.tmx_delta_tmp.dat.nc'
delta_mm( fn, mean_fn, variable, mean_variable, output_filename )
# now use the new DeltaDownscaleMM class to do the work.
| 33.657895 | 145 | 0.762314 | 174 | 0.136044 | 0 | 0 | 0 | 0 | 0 | 0 | 642 | 0.501955 |
cb2cbf8ad2d571d45d6c4cf03720870a2ec15b6b | 13,552 | py | Python | mldftdat/dft/utils.py | mir-group/CiderPress | bf2b3536e6bd7432645c18dce5a745d63bc9df59 | [
"MIT"
] | 10 | 2021-09-09T06:51:57.000Z | 2021-12-17T09:48:41.000Z | mldftdat/dft/utils.py | mir-group/CiderPress | bf2b3536e6bd7432645c18dce5a745d63bc9df59 | [
"MIT"
] | null | null | null | mldftdat/dft/utils.py | mir-group/CiderPress | bf2b3536e6bd7432645c18dce5a745d63bc9df59 | [
"MIT"
] | null | null | null | import numpy as np
from mldftdat.pyscf_utils import *
from mldftdat.workflow_utils import safe_mem_cap_mb
from pyscf.dft.numint import eval_ao, make_mask
from mldftdat.density import LDA_FACTOR,\
contract21_deriv, contract21, GG_AMIN
def dtauw(rho_data):
return - get_gradient_magnitude(rho_data)**2 / (8 * rho_data[0,:]**2 + 1e-16),\
1 / (8 * rho_data[0,:] + 1e-16)
def dsdp(s):
return 1 / (2 * s)
def dasinhsdp(s):
return arcsinh_deriv(s) / (2 * s + 1e-10)
def ds2(rho_data):
# s = |nabla n| / (b * n)
rho = rho_data[0,:]
b = 2 * (3 * np.pi * np.pi)**(1.0/3)
s = get_gradient_magnitude(rho_data) / (b * rho**(4.0/3) + 1e-16)
s2 = s**2
return -8.0 * s2 / (3 * rho + 1e-16),\
1 / (b * rho**(4.0/3) + 1e-16)**2
def dalpha(rho_data):
rho = rho_data[0,:]
tau = rho_data[5,:]
tau0 = get_uniform_tau(rho) + 1e-16
mag_grad = get_gradient_magnitude(rho_data)
tauw = get_single_orbital_tau(rho, mag_grad)
dwdn, dwds = dtauw(rho_data)
return 5.0 * (tauw - tau) / (3 * tau0 * rho + 1e-16) - dwdn / tau0,\
- dwds / tau0,\
1 / tau0
LDA_FACTOR = - 3.0 / 4.0 * (3.0 / np.pi)**(1.0/3)
def v_semilocal(rho_data, F, dfdp, dfdalpha):
# 0 - n, 1 - p, 2 - nabla^2, 3 - alpha
v = np.zeros((4, rho_data.shape[1]))
rho = rho_data[0,:]
elda = LDA_FACTOR * rho**(4.0/3)
# dE/dn line 1
v[0] = 4.0 / 3 * LDA_FACTOR * rho**(1.0/3) * F
# dE/dp line 1
v[1] = elda * dfdp
# dE/dalpha line 1
v[3] = elda * dfdalpha
return v
def v_basis_transform(rho_data, v_npalpha):
"""
Transforms the basis of the exchange potential from
density, reduced gradient, and alpha to
density, contracted gradient, and kinetic energy.
v_npalpha is a 3xN array:
0 - Functional derivative of the exchange energy
explicitly with respect to the density, i.e.
not accounting for derivatives of the XEF features
wrt density
1 - Functional derivative wrt the square of the reduced
gradient p
2 - ZERO (Functional derivative wrt normalized laplacian)
3 - Functional derivative wrt the isoorbital indicator
alpha
Returns a 3xN array:
0 - Full functional derivative of the exchange energy
wrt the density, accounting for dp/dn and dalpha/dn
1 - Derivative wrt sigma, the contracted gradient |nabla n|^2
2 - ZERO (Derivative wrt the laplacian fo the density)
3 - Derivative wrt tau, the kinetic energy density
"""
v_nst = np.zeros(v_npalpha.shape)
# dE/dn lines 1-3
v_nst[0] = v_npalpha[0]
dpdn, dpdsigma = ds2(rho_data)
# dE/dn line 4 term 1
v_nst[0] += v_npalpha[1] * dpdn
# dE/dsigma term 1
v_nst[1] += v_npalpha[1] * dpdsigma
dadn, dadsigma, dadtau = dalpha(rho_data)
# dE/dn line 4 term 2
v_nst[0] += v_npalpha[3] * dadn
# dE/dsigma term 2
v_nst[1] += v_npalpha[3] * dadsigma
# dE/dtau
v_nst[3] = v_npalpha[3] * dadtau
return v_nst
def v_nonlocal_general(rho_data, grid, dedg, density, auxmol,
g, gr2, ovlp, l = 0, mul = 1.0):
# g should have shape (2l+1, N)
N = grid.weights.shape[0]
lc = get_dft_input2(rho_data)[:3]
if l == 0:
dedb = dedg.reshape(1, -1)
elif l == 1:
#dedb = 2 * elda * g * dfdg
dedb = 2 * dedg * g #/ (np.linalg.norm(g, axis=0) + 1e-10)
elif l == 2:
dedb = 2 * dedg * g / np.sqrt(5)
elif l == -2:
dedb = dedg
l = 2
elif l == -1:
dedb = dedg
l = 1
else:
raise ValueError('angular momentum code l=%d unknown' % l)
rho, s, alpha = lc
a = np.pi * (mul * rho / 2 + 1e-16)**(2.0 / 3)
scale = 1
fac = (6 * np.pi**2)**(2.0/3) / (16 * np.pi)
scale += GG_SMUL * fac * s**2
scale += GG_AMUL * 0.6 * fac * (alpha - 1)
a = a * scale
cond = a < GG_AMIN
da = np.exp(a[cond] / GG_AMIN - 1)
a[cond] = GG_AMIN * np.exp(a[cond] / GG_AMIN - 1)
# (ngrid * (2l+1), naux)
dedb[:,rho<1e-8] = 0
dedaux = np.dot((dedb * grid.weights).T.flatten(), ovlp)
dgda = l / (2 * a) * g - gr2
#print(dgda.shape, gr2.shape)
dgda[:,rho<1e-8] = 0
dadn = mul * a / (3 * (mul * rho / 2 + 1e-16))
dadp = GG_SMUL * np.pi * fac * (mul * rho / 2 + 1e-16)**(2.0/3)
dadalpha = GG_AMUL * 0.6 * np.pi * fac * (mul * rho / 2 + 1e-16)**(2.0/3)
dadn[cond] *= da
dadp[cond] *= da
dadalpha[cond] *= da
# add in line 3 of dE/dn, line 2 of dE/dp and dE/dalpha
v_npa = np.zeros((4, N))
deda = np.einsum('mi,mi->i', dedb, dgda)
v_npa[0] = deda * dadn
v_npa[1] = deda * dadp
v_npa[3] = deda * dadalpha
return v_npa, dedaux
def v_nonlocal(rho_data, grid, dedg, density, auxmol,
g, gr2, ovlp, l=0, a0=8.0, fac_mul=0.25,
amin=GG_AMIN, l_add=0, **kwargs):
#print(l, l_add, a0, fac_mul, amin)
# g should have shape (2l+1, N)
N = grid.weights.shape[0]
lc = get_dft_input2(rho_data)[:3]
if l == 0:
dedb = dedg.reshape(1, -1)
elif l == 1:
dedb = 2 * dedg * g
elif l == 2:
dedb = 2 * dedg * g / np.sqrt(5)
elif l == -2:
dedb = dedg
l = 2
elif l == -1:
dedb = dedg
l = 1
else:
raise ValueError('angular momentum code l=%d unknown' % l)
rho, s, alpha = lc
ratio = alpha + 5./3 * s**2
fac = fac_mul * 1.2 * (6 * np.pi**2)**(2.0/3) / np.pi
a = np.pi * (rho / 2 + 1e-16)**(2.0 / 3)
scale = a0 + (ratio-1) * fac
a = a * scale
cond = a < amin
da = np.exp(a[cond] / amin - 1)
a[cond] = amin * np.exp(a[cond] / amin - 1)
# (ngrid * (2l+1), naux)
dedb[:,rho<1e-8] = 0
dedaux = np.dot((dedb * grid.weights).T.flatten(), ovlp)
dgda = (l + l_add) / (2 * a) * g - gr2
dgda[:,rho<1e-8] = 0
dadn = 2 * a / (3 * rho + 1e-16)
dadalpha = np.pi * fac * (rho / 2 + 1e-16)**(2.0/3)
dadp = 5./3 * dadalpha
dadn[cond] *= da
dadp[cond] *= da
dadalpha[cond] *= da
# add in line 3 of dE/dn, line 2 of dE/dp and dE/dalpha
v_npa = np.zeros((4, N))
deda = np.einsum('mi,mi->i', dedb, dgda)
v_npa[0] = deda * dadn
v_npa[1] = deda * dadp
v_npa[3] = deda * dadalpha
return v_npa, dedaux
def functional_derivative_loop(mol, mlfunc, dEddesc,
raw_desc, raw_desc_r2,
rho_data, density, ovlps, grid):
"""
Core functional derivative loop for the CIDER features,
called by NLNumInt.
Args:
mol (pyscf.gto.Mole): molecule object
mlfunc (MLFunctional): Exchange functional
dEddesc (np.ndarray): ngrid x ndesc array of energy derivatives
with respect to the descriptors.
raw_desc (np.ndarray): raw CIDER descriptor vectors
raw_desc_r2 (np.ndarray): raw CIDER descriptor vectors <r^2>
for use in functional derivative with respect to the Gaussian
exponents
rho_data (np.ndarray): 6 x ngrid
density (np.ndarray): density in DF basis space
ovlps (np.ndarray): Overlaps of the CIDER descriptor functions with
the DF basis
grid: contains coords and weights of the real-space grid
"""
gg_dict = {
'a0': mlfunc.a0,
'amin': mlfunc.amin,
'fac_mul': mlfunc.fac_mul
}
N = grid.weights.shape[0]
naux = mol.auxmol.nao_nr()
sprefac = 2 * (3 * np.pi * np.pi)**(1.0/3)
n43 = rho_data[0]**(4.0/3)
svec = rho_data[1:4] / (sprefac * n43 + 1e-20)
v_npa = np.zeros((4, N))
v_aniso = np.zeros((3, N))
v_aux = np.zeros(naux)
for i, d in enumerate(mlfunc.desc_order):
if d == 0:
v_npa[0] += dEddesc[:,i]
elif d == 1:
v_npa[1] += dEddesc[:,i]
elif d == 2:
v_npa[3] += dEddesc[:,i]
else:
gg_kwargs = gg_dict
l_add = 0
if d in [3, 10, 11]:
if d == 3:
g = raw_desc[6]
ovlp = ovlps[0]
gr2 = raw_desc_r2[6:7]
elif d == 10:
g = raw_desc[15]
ovlp = ovlps[3]
gr2 = raw_desc_r2[15:16]
if mlfunc.desc_version == 'c':
l_add = 2
mul = 1.0
else:
mul = 0.25**(2./3)
gg_kwargs = {
'a0': mlfunc.a0 * mul,
'fac_mul': mlfunc.fac_mul * mul,
'amin': mlfunc.amin * mul
}
else:
g = raw_desc[16]
ovlp = ovlps[4]
gr2 = raw_desc_r2[16:17]
if mlfunc.desc_version == 'c':
mul = 2.0
else:
mul = 4**(2./3)
gg_kwargs = {
'a0': mlfunc.a0 * mul,
'fac_mul': mlfunc.fac_mul * mul,
'amin': mlfunc.amin * mul
}
l = 0
elif d == 4:
g = raw_desc[7:10]
gr2 = raw_desc_r2[7:10]
ovlp = ovlps[1]
l = 1
elif d == 6:
g = raw_desc[10:15]
gr2 = raw_desc_r2[10:15]
ovlp = ovlps[2]
l = 2
elif d == 5:
g = raw_desc[7:10]
gr2 = raw_desc_r2[7:10]
ovlp = ovlps[1]
dfmul = svec
v_aniso += dEddesc[:,i] * g
l = -1
elif d == 7:
l = -2
g = raw_desc[10:15]
gr2 = raw_desc_r2[10:15]
ovlp = ovlps[2]
dfmul = contract21_deriv(svec)
ddesc_dsvec = contract21(g, svec)
v_aniso += dEddesc[:,i] * 2 * ddesc_dsvec
elif d == 8:
g2 = raw_desc[10:15]
g2r2 = raw_desc_r2[10:15]
ovlp2 = ovlps[2]
g1 = raw_desc[7:10]
g1r2 = raw_desc_r2[7:10]
ovlp1 = ovlps[1]
dfmul = contract21_deriv(svec, g1)
ddesc_dsvec = contract21(g2, g1)
ddesc_dg1 = contract21(g2, svec)
v_aniso += dEddesc[:,i] * ddesc_dsvec
vtmp1, dedaux1 = v_nonlocal(rho_data, grid,
dEddesc[:,i] * ddesc_dg1,
density, mol.auxmol, g1,
g1r2, ovlp1, l=-1, **gg_kwargs)
vtmp2, dedaux2 = v_nonlocal(rho_data, grid,
dEddesc[:,i] * dfmul,
density, mol.auxmol, g2,
g2r2, ovlp2, l=-2, **gg_kwargs)
vtmp = vtmp1 + vtmp2
dedaux = dedaux1 + dedaux2
elif d == 9:
g2 = raw_desc[10:15]
g2r2 = raw_desc_r2[10:15]
ovlp2 = ovlps[2]
g1 = raw_desc[7:10]
g1r2 = raw_desc_r2[7:10]
ovlp1 = ovlps[1]
dfmul = contract21_deriv(g1)
ddesc_dg1 = 2 * contract21(g2, g1)
vtmp1, dedaux1 = v_nonlocal(rho_data, grid,
dEddesc[:,i] * ddesc_dg1,
density, mol.auxmol, g1,
g1r2, ovlp1, l=-1, **gg_kwargs)
vtmp2, dedaux2 = v_nonlocal(rho_data, grid,
dEddesc[:,i] * dfmul,
density, mol.auxmol, g2,
g2r2, ovlp2, l=-2, **gg_kwargs)
vtmp = vtmp1 + vtmp2
dedaux = dedaux1 + dedaux2
else:
raise NotImplementedError('Cannot take derivative for code %d' % d)
if d in [5, 7]:
vtmp, dedaux = v_nonlocal(rho_data, grid,
dEddesc[:,i] * dfmul,
density, mol.auxmol, g,
gr2, ovlp, l=l, **gg_kwargs)
elif d in [8, 9]:
pass
else:
vtmp, dedaux = v_nonlocal(rho_data, grid,
dEddesc[:,i],
density, mol.auxmol, g,
gr2, ovlp, l=l, l_add=l_add,
**gg_kwargs)
v_npa += vtmp
v_aux += dedaux
vtmp = None
dedaux = None
vmol = np.einsum('a,aij->ij', v_aux, mol.ao_to_aux)
v_nst = v_basis_transform(rho_data, v_npa)
v_nst[0] += np.einsum('ap,ap->p', -4.0 * svec / (3 * rho_data[0] + 1e-20), v_aniso)
v_grad = v_aniso / (sprefac * n43 + 1e-20)
return v_nst, v_grad, vmol
def get_density_in_basis(ao_to_aux, rdm1):
return np.einsum('npq,pq->n', ao_to_aux, rdm1)
def arcsinh_deriv(x):
return 1 / np.sqrt(x * x + 1)
def get_chi(alpha):
return 1 / (1 + alpha**2)
def chi_deriv(alpha):
return -2 * alpha / (1 + alpha**2)**2
| 35.018088 | 87 | 0.472845 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,565 | 0.189271 |
cb2cc7212e69c1cafb5036e456b7f734ca0ecc36 | 146 | py | Python | api/apps/boxes/apps.py | polart/vagrant-registry | 47fa53a93d506f2501f333a256ccf36e49970789 | [
"MIT"
] | 8 | 2020-03-16T21:41:08.000Z | 2021-12-16T05:44:04.000Z | api/apps/boxes/apps.py | polart/vagrant-registry | 47fa53a93d506f2501f333a256ccf36e49970789 | [
"MIT"
] | 6 | 2020-03-21T11:23:18.000Z | 2022-02-27T01:16:18.000Z | api/apps/boxes/apps.py | polart/vagrant-registry | 47fa53a93d506f2501f333a256ccf36e49970789 | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class BoxesConfig(AppConfig):
name = 'apps.boxes'
def ready(self):
import apps.boxes.signals
| 16.222222 | 33 | 0.691781 | 109 | 0.746575 | 0 | 0 | 0 | 0 | 0 | 0 | 12 | 0.082192 |
cb2db5b3ea07e66e233815cd31fd91213e31f66f | 728 | py | Python | generalexercise/05.py | haxuyennt38/python-learning | c5fcfc0abfd4aebf4ce58e2f23d378703bd48430 | [
"MIT"
] | null | null | null | generalexercise/05.py | haxuyennt38/python-learning | c5fcfc0abfd4aebf4ce58e2f23d378703bd48430 | [
"MIT"
] | null | null | null | generalexercise/05.py | haxuyennt38/python-learning | c5fcfc0abfd4aebf4ce58e2f23d378703bd48430 | [
"MIT"
] | null | null | null | ##Write a program to input from the input file a familiar greeting of any length, each word on a line. Output the greeting file you just received on a single line, the words separated by a space
#Mo file voi mode='r' de doc file
with open('05_ip.txt', 'r') as fileInp:
#Dung ham read() doc toan bo du lieu tu file
Filecomplete = fileInp.read()
#Dung ham splitlines() cat du lieu theo tung dong va luu thanh danh sach
listOfligne = Filecomplete.splitlines()
#Dung ham join() noi cac dong du lieu lai cach nhau 1 khoang trang
phrasecomplete = ' '.join(listOfligne)
print(phrasecomplete)
#Mo file voi mode='w' de ghi file
with open('05_out.txt', 'w') as fileOut:
#Ghi noi dung vao file
fileOut.write(phrasecomplete)
| 36.4 | 194 | 0.737637 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 496 | 0.681319 |
cb2f7039474202ce1d528910f710e53fb8291923 | 15,642 | py | Python | pyzem/dvid/dvidio.py | janelia-flyem/pyzem | c8e82e2fea880ef7f597ff5e4c685668ed0a4caa | [
"BSD-3-Clause"
] | 2 | 2018-11-02T07:38:46.000Z | 2019-11-09T02:26:23.000Z | pyzem/dvid/dvidio.py | janelia-flyem/pyzem | c8e82e2fea880ef7f597ff5e4c685668ed0a4caa | [
"BSD-3-Clause"
] | null | null | null | pyzem/dvid/dvidio.py | janelia-flyem/pyzem | c8e82e2fea880ef7f597ff5e4c685668ed0a4caa | [
"BSD-3-Clause"
] | 2 | 2017-12-26T16:15:34.000Z | 2020-04-15T12:18:38.000Z | from __future__ import print_function
from __future__ import absolute_import
import os
import sys
from optparse import OptionParser
import json
import requests
from pyzem.dvid import dvidenv
import datetime
def compute_age(d):
age = -1
if 'timestamp' in d:
t = datetime.datetime.strptime(d['timestamp'], '%Y-%m-%d %H:%M:%S.%f')
dt = datetime.datetime.now() - t
age = dt.seconds / 60
return age
class DvidClient:
def __init__(self, host = None, port = None, uuid = None, env = None):
if env:
self._url = dvidenv.DvidUrl(env)
else:
self._url = dvidenv.DvidUrl(dvidenv.DvidEnv(host = host, port = port, uuid = uuid))
def set_dvidenv(self, env):
self._url = dvidenv.DvidUrl(env)
def has_skeleton(self, id):
r = requests.get(self._url.get_skeleon_url(id))
return r.status_code == 200
def print_split_result(self):
url = self._url.join(self._url.get_split_result_url(), 'keyrange', 'task__0/task__z')
print(url)
r = requests.get(self._url.join(self._url.get_split_result_url(), 'keyrange', 'task__0/task__z'))
print(r.text)
resultList = json.loads(r.text)
for result in resultList:
r = requests.get(self._url.join(self._url.get_split_result_url(), 'key', result))
resultJson = json.loads(r.text)
print(resultJson[dvidenv.REF_KEY])
r = requests.get(self._url.join(self._url.get_node_url(), resultJson[dvidenv.REF_KEY]))
print(r.text)
def clear_split_task(self):
keys = self.read_keys(path = self._url.get_split_task_path())
for key in keys:
url = self._url.get_url(self._url.get_split_task_path(), 'key', key)
try:
r = requests.delete(url)
except Exception as e:
print("request failed")
print(url)
def clear_split_result(self):
keys = self.read_keys(path = self._url.get_split_result_path())
for key in keys:
url = self._url.get_url(self._url.get_split_result_path(), 'key', key)
try:
r = requests.delete(url)
except Exception as e:
print("request failed")
print(url)
def clear_empty_split_result(self):
keys = self.read_split_result_keys()
print(keys)
for key in keys:
url = self._url.get_url(self._url.get_split_result_path(), 'key', key)
try:
result = self.read_split_result(key)
if 'committed' in result:
if not result['committed']:
print(key, result)
r = requests.delete(url)
except Exception as e:
print(e)
print("request failed")
print(url)
def read_split_task_keys(self):
return self.read_keys(path = self._url.get_split_task_path(), range = ['task__0', 'task__z'])
def read_split_result_keys(self):
return self.read_keys(path = self._url.get_split_result_path(), range = ['task__0', 'task__z'])
def read_split_task(self, key):
try:
r = requests.get(self._url.get_url(self._url.get_split_task_path(), 'key', key))
taskJson = json.loads(r.text)
return taskJson
except Exception:
return None
def read_split_result(self, key):
try:
r = requests.get(self._url.get_url(self._url.get_split_result_path(), 'key', key))
resultJson = json.loads(r.text)
return resultJson
except Exception:
return None
def update_ref_set(self, refSet, key, source):
taskJson = {}
if source == 'task':
taskJson = self.read_split_task(key)
else:
taskJson = self.read_split_result(key)
if dvidenv.REF_KEY in taskJson:
refTask = taskJson[dvidenv.REF_KEY]
refTaskKey = refTask.split('/')[-1]
refSet.add(refTaskKey)
self.update_ref_set(refSet, refTaskKey, source)
def clear_task_garbage(self):
referredSet = set()
taskList = self.read_split_task_keys()
for task in taskList:
self.update_ref_set(refSet = referredSet, key = task, source = "task")
splitKeyList = self.read_keys(keyvalue = dvidenv.get_split_task())
for key in splitKeyList:
if key not in referredSet:
age = self.read_split_task_age(task)
if age < 1:
self.mark_split_task(key)
elif age > 100:
print("removing", key)
self.delete_split_task(key)
def clear_result_garbage(self):
referredSet = set()
taskList = self.read_split_task_keys()
for task in taskList:
self.update_ref_set(data = dvidenv.get_split_task(), key = task, result = referredSet, source = "result")
splitKeyList = self.read_keys(keyvalue = dvidenv.get_split_task())
for key in splitKeyList:
if key not in referredSet:
age = self.read_split_result_age(task)
if age < 1:
self.mark_split_result(key)
elif age > 100:
print("removing", key)
self.delete_split_result(key)
def print_split_task(self):
r = requests.get(self._url.get_url(self._url.get_split_task_path(), 'keyrange/task__0/task__z'))
taskList = json.loads(r.text)
for task in taskList:
r = requests.get(self._url.get_url(self._url.get_split_task_path(), 'key', task))
taskJson = json.loads(r.text)
if '->' in taskJson:
# print taskJson['->']
data = self.read_path(taskJson['->'])
# print data
taskJson = json.loads(data)
print(task)
print(' signal:', taskJson.get('signal'))
print(' #seeds:', len(taskJson.get('seeds')))
print(' Age:', self.read_split_task_age(task))
def decode_response(self, r):
return json.loads(r.text)
def read_path(self, path):
r = requests.get(self._url.join(self._url.get_node_url(), path))
return r.text
def read_keys(self, path = None, keyvalue = None, range = None):
print(keyvalue)
if keyvalue:
path = self._url.get_data_path(keyvalue)
print(path)
if path:
if not range:
url = self._url.get_url(path, 'keys')
print(url)
return self.decode_response(requests.get(url))
else:
return self.decode_response(requests.get(self._url.join(self._url.get_url(path, 'keyrange', range[0], range[1]))))
def has_key(self, path = None, key = None, keyvalue = None):
return True if self.read_key(path, key, keyvalue) else False
def read_key(self, path = None, key = None, keyvalue = None):
if keyvalue:
path = self._url.get_data_path(keyvalue)
try:
r = requests.get(self._url.get_url(path, 'key', key))
if r.status_code == 200:
return r.text
except:
print('No', key)
pass
return None
def get_env(self):
if self._url:
return self._url.get_env()
return None
def read_json_from_path(self, path):
try:
r = requests.get(self._url.get_url(path))
result = self.decode_response(r)
return result
except:
print("Failed to read", self._url.get_url(path))
def delete_skeleton(self, bodyId):
self.delete_key(self.get_env().get_skeleton_name(), str(bodyId) + "_swc")
def delete_thumbnail(self, bodyId):
self.delete_key(self.get_env().get_thumbnail_name(), str(bodyId) + "_mraw")
def delete_body_annotation(self, bodyId):
self.delete_key(self.get_env().get_body_annotation_name(), bodyId)
def delete_split_task(self, key):
self.delete_key(dvidenv.get_split_task(), key)
def delete_split_result(self, key):
self.delete_key(dvidenv.get_split_result(), key)
def delete_key(self, dataname, key):
print('Deleting', dataname, key)
try:
requests.delete(self._url.get_keyvalue_url(dataname, key))
except Exception:
pass
def read_split_task_property(self, key):
p = json.loads(self.read_key(self._url.get_split_task_property_path(key)))
p['age'] = compute_age(p)
return p
def write_key(self, name, key, data):
requests.post(self._url.get_keyvalue_url(name, key), data)
def is_split_task_processed(self, key):
v = self.read_key(keyvalue = dvidenv.get_split_task_property(), key = key)
if v:
data = json.loads(v)
return data.get('processed', False)
return False
def is_split_result_processed(self, key):
v = self.read_key(keyvalue = dvidenv.get_split_result_property(), key = key)
data = json.loads(v) if v else None
processed = data.get('processed', False) if data else False
# if not processed:
# v = self.read_key(keyvalue = dvidenv.get_split_result(), key = key)
# data = json.loads(v) if v else None
# if data:
# processed = "committed" in data
return processed
def has_split_result(self, key):
return self.has_key(keyvalue = dvidenv.get_split_result(), key = key)
def mark_split_task(self, key):
v = self.read_key(keyvalue = dvidenv.get_split_task_property(), key = key)
data = {}
if v:
data = json.loads(v)
data['timestamp'] = str(datetime.datetime.now())
self.write_key(dvidenv.get_split_task_property(), key, json.dumps(data))
def set_split_task_processed(self, key):
v = self.read_key(keyvalue = dvidenv.get_split_task_property(), key = key)
data = {}
if v:
data = json.loads(v)
data['processed'] = True
self.write_key(dvidenv.get_split_task_property(), key, json.dumps(data))
def mark_split_result(self, key):
v = self.read_key(keyvalue = dvidenv.get_split_result_property(), key = key)
data = {}
if v:
data = json.loads(v)
data['timestamp'] = str(datetime.datetime.now())
self.write_key(dvidenv.get_split_result_property(), key, json.dumps(data))
def set_split_result_processed(self, key):
v = self.read_key(keyvalue = dvidenv.get_split_result_property(), key = key)
data = {}
if v:
data = json.loads(v)
data['processed'] = True
self.write_key(dvidenv.get_split_result_property(), key, json.dumps(data))
def read_split_task_time_stamp(self, key):
text = self.read_key(keyvalue = dvidenv.get_split_task_property(), key = key)
try:
d = json.loads(text)
if 'timestamp' in d:
return datetime.datetime.strptime(d['timestamp'], '%Y-%m-%d %H:%M:%S.%f')
except Exception as e:
pass
return None
def read_split_result_time_stamp(self, key):
v = self.read_key(keyvalue = dvidenv.get_split_result_property(), key = key)
if v:
d = json.loads(v)
if 'timestamp' in d:
return datetime.datetime.strptime(d['timestamp'], '%Y-%m-%d %H:%M:%S.%f')
return None
def read_split_task_age(self, key):
age = -1
t = self.read_split_task_time_stamp(key)
if t:
dt = datetime.datetime.now() - t
age = dt.seconds / 60
return age
def read_split_result_age(self, key):
t = self.read_split_result_time_stamp(key)
age = 0
if t:
dt = datetime.datetime.now() - t
age = dt.seconds
return age
def create_data(self, data_type, name, versioned = True):
print("creating data: ", data_type, name)
if data_type and name:
payload = {"typename": data_type, "dataname": name, "version": "1" if versioned else "0"}
print(payload)
r = requests.post(self._url.get_url(self._url.get_repo_path(), "instance"), json=payload)
print(r.status_code, r.text)
def read_bookmark(self, user):
path = self._url.get_bookmark_path(user = user)
result = self.read_json_from_path(path)
return result
def delete_bookmark(self, pos):
if len(pos) == 3:
try:
requests.delete(self._url.get_bookmark_element_url(pos=pos))
except Exception:
pass
# class DvidService:
# def __init__(self, host = None, port = None, uuid = None):
# self._reader = DvidReader(host, port, uuid)
#def get_split_task_list(self):
class Librarian:
def __init__(self, host = None, port = None):
self._host = host
self._port = port
if __name__ == '__main__':
# dvid = DvidClient("emdata1.int.janelia.org", 8500, "b6bc")
# dvid.get_env().set_labelvol("bodies")
# print(dvid.get_env())
# dc = DvidClient("emdata4.int.janelia.org", 8900, "36b0")
# dc.get_env().set_segmentation("segmentation")
# print(dc._url.get_body_size_path(5813059915))
# print(dc.read_json_from_path(dc._url.get_sparsevol_size_path(5813059915)))
dc = DvidClient("127.0.0.1", 1600, "c315")
dc.get_env().set_segmentation("segmentation")
bookmarks = dc.read_bookmark(user="zhaot")
# for bookmark in bookmarks:
# dc.delete_bookmark(pos=bookmark['Pos'])
#
# dvid.delete_skeleton(13054149)
# dvid = DvidClient('zhaot-ws1', 9000, '194d')
# dvid.set_split_task_processed("task__http-++emdata1.int.janelia.org-8500+api+node+b6bc+bodies+sparsevol+17159670")
# p = dvid.read_split_task_property("task__http-++emdata1.int.janelia.org-8500+api+node+b6bc+bodies+sparsevol+17159670")
# print(p)
# print(dvid.has_split_result("task__http-++emdata1.int.janelia.org-8700+api+node+f46b+pb26-27-2-trm-eroded32_ffn-20170216-2_celis_cx2-2048_r10_0_seeded_64blksz_vol+sparsevol+87839"))
# dvid.set_split_result_processed("task__http-++emdata1.int.janelia.org-8700+api+node+f46b+pb26-27-2-trm-eroded32_ffn-20170216-2_celis_cx2-2048_r10_0_seeded_64blksz_vol+sparsevol+87839")
# print(dvid.is_split_result_processed("task__http-++emdata1.int.janelia.org-8700+api+node+f46b+pb26-27-2-trm-eroded32_ffn-20170216-2_celis_cx2-2048_r10_0_seeded_64blksz_vol+sparsevol+87839"))
# dvid.clear_empty_split_result()
#dvid.print_split_result()
#dvid.clear_split_result()
# dvid.write_key('result_split_property', 'test', 'test')
#dvid._conn.request("DELETE", '/api/node/4d3e/task_split/key/head__8b8ec933b673fb8a5e9c0abe819bf5d9')
#dvid._conn.request("DELETE", '/api/node/4d3e/task_split/key/head__9f2d3aa27cd0e3c3149c01baae438040')
#dvid._conn.request("DELETE", '/api/node/4d3e/task_split/key/head__82a437eabc6c55d8517c9c8eec948659')
#r = dvid._conn.getresponse()
#print r.status
#print r.msg
# keys = dvid.read_split_task_keys()
# print(keys)
# for key in keys:
# print(dvid.has_key(key = key, keyvalue = dvidenv.get_split_result()))
# dvid.print_split_task()/
# dvid.print_split_result()
#dvid.clear_split_task()
# except Exception as e:
# print e
#reader.print_split_result()
| 37.510791 | 196 | 0.609385 | 12,571 | 0.80367 | 0 | 0 | 0 | 0 | 0 | 0 | 3,282 | 0.20982 |
cb2f8c67b0e2ca2a922fcafb3a06a1e2018da4f3 | 3,970 | py | Python | tf src/app_tf.py | aj-naik/Emotion-Recognistion | 65068c357b8594ef486da2056e91c9cc83a1c1fe | [
"MIT"
] | null | null | null | tf src/app_tf.py | aj-naik/Emotion-Recognistion | 65068c357b8594ef486da2056e91c9cc83a1c1fe | [
"MIT"
] | null | null | null | tf src/app_tf.py | aj-naik/Emotion-Recognistion | 65068c357b8594ef486da2056e91c9cc83a1c1fe | [
"MIT"
] | 1 | 2021-06-23T20:07:47.000Z | 2021-06-23T20:07:47.000Z | import tkinter as tk
from tkinter import *
import cv2
from PIL import Image, ImageTk
import numpy as np
import tensorflow as tf
from tensorflow.keras import layers
from tensorflow import keras
emotion_model = keras.Sequential(
[
layers.Conv2D(32, kernel_size=(3,3), activation='relu', input_shape = (48,48,1)),
layers.Conv2D(64, kernel_size=(3,3), activation='relu'),
layers.MaxPooling2D(pool_size=(2,2)),
layers.Dropout(0.25),
layers.Conv2D(128, kernel_size=(3,3), activation='relu'),
layers.MaxPooling2D(pool_size=(2,2)),
layers.Conv2D(128, kernel_size=(3,3), activation='relu'),
layers.MaxPooling2D(pool_size=(2,2)),
layers.Dropout(0.25),
layers.Flatten(),
layers.Dense(1024, activation='relu'),
layers.Dropout(0.5),
layers.Dense(7, activation='softmax')
]
)
emotion_model.load_weights('model_tf.h5')
cv2.ocl.setUseOpenCL(False)
emotion_dict = {0:"Angry",1:"Disgusted",2:"Fearful",3:"Happy",4:"Neutral",5:"Sad",6:"Surprised"}
emoji_dist={0:"./emojis/1.jpg",1:"./emojis/2.jpg",2:"./emojis/3.jpg",3:"./emojis/4.jpg",4:"./emojis/5.png",5:"./emojis/6.jpg",6:"./emojis/7.jpg"}
global last_frame1
last_frame1 = np.zeros((480, 640, 3), dtype=np.uint8)
global cap1
show_text=[0]
def web_cam():
global cap1
cap1 = cv2.VideoCapture(0)
if not cap1.isOpened():
print("Cant open the camera")
flag1, frame1 = cap1.read()
frame1 = cv2.resize(frame1,(600,500))
bounding_box = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
gray_frame = cv2.cvtColor(frame1, cv2.COLOR_BGR2GRAY)
num_faces = bounding_box.detectMultiScale(gray_frame,scaleFactor=1.3, minNeighbors=5)
for (x, y, w, h) in num_faces:
cv2.rectangle(frame1, (x, y-50), (x+w, y+h+10), (255, 0, 0), 2)
roi_gray_frame = gray_frame[y:y + h, x:x + w]
cropped_img = np.expand_dims(np.expand_dims(cv2.resize(roi_gray_frame, (48, 48)), -1), 0)
prediction = emotion_model.predict(cropped_img)
maxindex = int(np.argmax(prediction))
cv2.putText(frame1, emotion_dict[maxindex], (x+20, y-60), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2, cv2.LINE_AA)
show_text[0]=maxindex
if flag1 is None:
print ("Major error!")
elif flag1:
global last_frame1
last_frame1 = frame1.copy()
pic = cv2.cvtColor(last_frame1, cv2.COLOR_BGR2RGB)
img = Image.fromarray(pic)
imgtk = ImageTk.PhotoImage(image=img)
lmain.imgtk = imgtk
lmain.configure(image=imgtk)
lmain.after(10, web_cam)
if cv2.waitKey(1) & 0xFF == ord('q'):
exit()
def emoji():
frame2=cv2.imread(emoji_dist[show_text[0]])
pic2=cv2.cvtColor(frame2,cv2.COLOR_BGR2RGB)
img2=Image.fromarray(frame2)
imgtk2=ImageTk.PhotoImage(image=img2)
lmain2.imgtk2=imgtk2
lmain3.configure(text=emotion_dict[show_text[0]],font=('arial',45,'bold'))
lmain2.configure(image=imgtk2)
lmain2.after(10, emoji)
if __name__ == '__main__':
root=tk.Tk()
heading2=Label(root,text="Emotion Recognistion",pady=20, font=('arial',45,'bold'),bg='white',fg='#CDCDCD')
heading2.pack()
lmain = tk.Label(master=root,padx=50,bd=10)
lmain2 = tk.Label(master=root,bd=10)
lmain3=tk.Label(master=root,bd=10,fg="#CDCDCD",bg='black')
lmain.pack(side=LEFT)
lmain.place(x=50,y=250)
lmain3.pack()
lmain3.place(x=960,y=250)
lmain2.pack(side=RIGHT)
lmain2.place(x=900,y=350)
root.title("Emotion Recognistion")
root.geometry("1400x900+100+10")
root['bg']='black'
exitbutton = Button(root, text='Quit',fg="red",command=root.destroy,font=('arial',25,'bold')).pack(side = BOTTOM)
web_cam()
emoji()
root.mainloop()
| 35.446429 | 145 | 0.623174 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 463 | 0.116625 |
cb31d3dbb2f45257182e33ad028503ef96676702 | 14,032 | py | Python | ostap/logger/mute.py | TatianaOvsiannikova/ostap | a005a78b4e2860ac8f4b618e94b4b563b2eddcf1 | [
"BSD-3-Clause"
] | 14 | 2017-03-24T12:38:08.000Z | 2022-02-21T05:00:57.000Z | ostap/logger/mute.py | TatianaOvsiannikova/ostap | a005a78b4e2860ac8f4b618e94b4b563b2eddcf1 | [
"BSD-3-Clause"
] | 10 | 2019-03-08T18:48:42.000Z | 2022-03-22T11:59:48.000Z | ostap/logger/mute.py | TatianaOvsiannikova/ostap | a005a78b4e2860ac8f4b618e94b4b563b2eddcf1 | [
"BSD-3-Clause"
] | 11 | 2017-03-23T15:29:58.000Z | 2022-02-21T05:03:57.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# =============================================================================
## @file
# Module with some simple but useful utilities
# - suppression of stdout/stderr
# @author Vanya BELYAEV Ivan.Belyaev@itep.ru
# @date 2013-02-10
#
# =============================================================================
"""Module with some simple but useful utilities"""
# =============================================================================
__version__ = "$Revision$"
__author__ = "Vanya BELYAEV Ivan.Belyaev@itep.ru"
__date__ = "2013-02-10"
# =============================================================================
__all__ = (
#
'tee_py' , ## tee for Python's printouts
'tee_cpp' , ## tee for C++'s printouts
'output' , ## redirect stdout/stderr into the file
'mute_py' , ## suppress stdout/strerr Python printout
'silence_py' , ## ditto
'mute' , ## context manager to suppress stdout/strerr printout
'silence' , ## ditto
##
'TeeCpp' , ## context manager (t ee for C/C++ code)
'TeePy' , ## context manager (tee for python code)
'MuteC' , ## context manager (mute for C/C++ code)
'MutePy' , ## context manager (mute for python code)
'OutputC' , ## context manager (output for C/C++ code)
)
# =============================================================================
import sys, os ## attention here!!
# =============================================================================
## @class MutePy
# Very simple context manager to suppress python printout
class MutePy(object):
"""A context manager for doing a ``deep suppression'' of stdout and stderr in
Python, i.e. will suppress all print, even if the print originates in a
compiled C/Fortran sub-function.
This will not suppress raised exceptions, since exceptions are printed
to stderr just before a script exits, and after the context manager has
exited (at least, I think that is why it lets exceptions through).
stallen from
http://stackoverflow.com/questions/11130156/suppress-stdout-stderr-print-from-python-functions
"""
def __init__( self , out = True , err = False ):
self._out = out
self._err = err
def __enter__(self):
#
## helper class to define empty stream
class Silent(object):
def write(self,*args,**kwards) : pass
self.stdout = sys.stdout
self.stderr = sys.stderr
if self._out : sys.stdout = Silent()
if self._err : sys.stderr = Silent()
return self
def __exit__(self, *_):
sys.stdout = self.stdout
sys.stderr = self.stderr
# ============================================================================
## @class MuteC
# context manager to suppress pythion prinout
# the actual code is stallen from
# http://stackoverflow.com/questions/11130156/suppress-stdout-stderr-print-from-python-functions
# A fix is added for "IOError: [Errno 24] Too many open files" :
# original code leaks the file descriptors
class MuteC(object):
"""A context manager for doing a ``deep suppression'' of stdout and stderr in
Python, i.e. will suppress all print, even if the print originates in a
compiled C/Fortran sub-function.
This will not suppress raised exceptions, since exceptions are printed
to stderr just before a script exits, and after the context manager has
exited (at least, I think that is why it lets exceptions through).
stallen from
http://stackoverflow.com/questions/11130156/suppress-stdout-stderr-print-from-python-functions
"""
#
## class variables: dev-null device & instance counter
_devnull = 0
_cnt = 0
def __init__( self , out = True , err = False ):
self._out = out
self._err = err
# increment instance counter
self.__class__._cnt += 1
# create dev-null if not done yet
if not self.__class__._devnull :
self.__class__._devnull = os.open ( os.devnull , os.O_WRONLY )
def __del__ ( self ) :
# decrement instance counter
self.__class__._cnt -= 1
# close dev-null if not done yet
if self.__class__._cnt <= 0 and self.__class__._devnull :
os.close ( self.__class__._devnull )
self.__class__._devnull = 0
## context-manager
def __enter__(self):
## Save the actual stdout (1) and stderr (2) file descriptors.
self.save_fds = os.dup(1), os.dup(2) # leak was here !!!
## mute it!
if self._out : os.dup2 ( self.__class__._devnull , 1 ) ## C/C++
if self._err : os.dup2 ( self.__class__._devnull , 2 ) ## C/C++
return self
## context-manager
def __exit__(self, *_):
# Re-assign the real stdout/stderr back to (1) and (2) (C/C++)
if self._err : os.dup2 ( self.save_fds[1] , 2 )
if self._out : os.dup2 ( self.save_fds[0] , 1 )
# fix the file descriptor leak
# (there were no such line in example, and it causes
# the sad: "IOError: [Errno 24] Too many open files"
os.close ( self.save_fds[1] )
os.close ( self.save_fds[0] )
# =============================================================================
## dump all stdout/stderr information (including C/C++) into separate file
# @code
# with output ('output.txt') :
# print 'ququ!'
# @endcode
# @see MuteC
class OutputC(object) :
"""Dump all stdout/stderr information into separate file:
>>> with output ('output.txt') :
... print 'ququ!'
"""
## constructor: file name
def __init__ ( self , filename , out = True , err = False ) :
"""Constructor
"""
self._out = out
self._err = err
self._file = open ( filename , 'w' )
## context-manager
def __enter__(self):
if self._out : sys.stdout.flush()
if self._err : sys.stderr.flush()
self._file.flush()
self._file.__enter__ ()
## Save the actual stdout (1) and stderr (2) file descriptors.
self.save_fds = os.dup(1), os.dup(2) # leak was here !!!
## mute it!
if self._out : os.dup2 ( self._file.fileno() , 1 ) ## C/C++
if self._err : os.dup2 ( self._file.fileno() , 2 ) ## C/C++
return self
## context-manager
def __exit__( self , *_ ):
if self._out : sys.stdout.flush()
if self._err : sys.stderr.flush()
self._file.flush()
# Re-assign the real stdout/stderr back to (1) and (2) (C/C++)
if self._err : os.dup2 ( self.save_fds[1] , 2 )
if self._out : os.dup2 ( self.save_fds[0] , 1 )
# fix the file descriptor leak
# (there were no such line in example, and it causes
# the sad: "IOError: [Errno 24] Too many open files"
os.close ( self.save_fds[1] )
os.close ( self.save_fds[0] )
self._file.__exit__ ( *_ )
sys.stdout.flush()
sys.stderr.flush()
# =============================================================================
## very simple context manager to duplicate Python-printout into file ("tee")
# into separate file
# @code
# with tee('tee.txt') :
# print 'ququ!'
# @endcode
# @attention: only Python printouts are grabbed
# @author Vanya BELYAEV Ivan.Belyaev@itep.ru
# date 2012-07-06
class TeePy(object) :
"""Very simple context manager to duplicate Python-printout into file (``tee'')
into separate file
>>> with tee('tee.txt') :
... print 'ququ!'
Unfortunately only Python printouts are grabbed
"""
## constructor
def __init__( self , filename ):
self._file = open ( filename , 'w' )
## context manager
def __enter__(self):
self._file . __enter__ ()
## helper class to define empty stream
class _Tee(object):
def __init__ ( self , the_file , the_stream ) :
self._stream = the_stream
self._log = the_file
def write(self,*args) :
self._stream .write ( *args )
self._log .write ( *args )
self.stdout = sys.stdout
sys.stdout = _Tee ( self._file , self.stdout )
return self
## context manager
def __exit__(self, *_):
self._file.flush ()
self.stdout.flush ()
sys.stdout = self.stdout
self._file.__exit__ ( *_ )
# =============================================================================
## very simple context manager to duplicate C++-printout into file ("tee")
# into separate file
# @code
# >>> with tee_cpp('tee.txt') :
# ... some_cpp_function()
# @endcode
# @see Ostap::Utils::Tee
# @attention: Python&C-printouts probably are not affected
# @author Vanya BELYAEV Ivan.Belyaev@itep.ru
# date 2012-07-07
class TeeCpp(object) :
"""Very simple context manager to duplicate C++-printout into file
into separate file
>>> with tee_cpp('tee.txt') :
... some_cpp_function()
"""
def __init__ ( self , fname ) :
sys.stdout.flush ()
sys.stderr.flush ()
from ostap.core.core import cpp
self.__tee = cpp.Ostap.Utils.Tee ( fname )
## context manager
def __enter__ ( self ) :
sys.stdout.flush ()
sys.stderr.flush ()
self.__tee.enter ()
return self
## context manager
def __exit__ ( self , *_ ) :
self.__tee.exit ()
del self.__tee
sys.stdout.flush ()
sys.stderr.flush ()
# =============================================================================
## very simple context manager to duplicate Python-printout into file ("tee")
# into separate file
# @code
# >>> with tee_py ('tee.txt') :
# ... print 'ququ!'
# @endcode
# @attention: only Python prinouts are grabbed
# @author Vanya BELYAEV Ivan.Belyaev@itep.ru
# date 2012-07-06
def tee_py ( filename ) :
"""Very simple context manager to duplicate Python-printout into file ("tee")
into separate file
>>> with tee('tee.txt') :
... print 'ququ!'
Unfortunately only Python printouts are grabbed
"""
return TeePy ( filename )
# =============================================================================
## very simple context manager to duplicate C++-printout into file ('tee')
# into separate file
# @code
# >>> with tee_cpp ('tee.txt') : some_cpp_code()
# @endcode
# @attention: only C/C++ printouts are grabbed
# @author Vanya BELYAEV Ivan.Belyaev@itep.ru
# date 2012-07-06
def tee_cpp ( filename ) :
"""Very simple context manager to duplicate C++-printout into file ('tee')
into separate file
>>> with tee_cpp('tee.txt') : some_cpp_code()
Unfortunately only C/C++ printouts are grabbed
"""
return TeeCpp ( filename )
# =============================================================================
## simple context manager to redirect all (C/C++/Python) printout
# into separate file
# @code
# >>> with output ('output.txt') :
# ... print 'ququ!'
# @endcode
# @author Vanya BELYAEV Ivan.Belyaev@itep.ru
# date 2012-07-06
def output ( fname , cout = True , cerr = False ) :
""" Simple context manager to redirect all (C/C++/Python) printotu
>>> with output ('output.txt') :
... print 'ququ!'
"""
return OutputC ( fname , cout , cerr )
# =============================================================================
## simple context manager to suppress C/C++-printout
#
# @code
# >>> with mute () :
# ... <some code here>
# @endcode
def mute ( cout = True , cerr = False ) :
"""Simple context manager to suppress C/C++ printout
>>> with mute () :
... <some code here>
"""
return MuteC ( cout , cerr )
# =============================================================================
## simple context manager to suppress Python-printout
#
# @code
# >>> with mute_py () :
# ... <some code here>
# @endcode
def mute_py ( cout = True , cerr = False ) :
"""Simple context manager to suppress python printouts
>>> with mute_py () :
... <some code here>
"""
return MutePy ( cout , cerr )
# ==============================================================================
## ditto
silence_py = mute_py # ditto
silence = mute # ditto
# =============================================================================
if '__main__' == __name__ :
from ostap.logger.logger import getLogger
logger = getLogger ('ostap.logger.mute')
from ostap import banner
logger.info ( __file__ + '\n' + banner )
logger.info ( 80*'*' )
logger.info ( __doc__ )
logger.info ( 80*'*' )
logger.info ( ' Author : %s' % __author__ )
logger.info ( ' Version : %s' % __version__ )
logger.info ( ' Date : %s' % __date__ )
logger.info ( ' Symbols : %s' % list ( __all__ ) )
logger.info ( 80*'*' )
# =============================================================================
## The END
# =============================================================================
| 34.141119 | 98 | 0.503278 | 7,062 | 0.503278 | 0 | 0 | 0 | 0 | 0 | 0 | 8,365 | 0.596137 |
cb321935a3fe0e9c8f187816524cd352184e7a6b | 629 | py | Python | day_1/day1.py | mickeelm/aoc2019 | 7fd532d2237e1cf0686c9b331a2b97515ee94c03 | [
"Unlicense"
] | 1 | 2021-02-02T08:32:36.000Z | 2021-02-02T08:32:36.000Z | day_1/day1.py | mickeelm/aoc2019 | 7fd532d2237e1cf0686c9b331a2b97515ee94c03 | [
"Unlicense"
] | null | null | null | day_1/day1.py | mickeelm/aoc2019 | 7fd532d2237e1cf0686c9b331a2b97515ee94c03 | [
"Unlicense"
] | null | null | null | def fuel_required_single_module(mass):
fuel = int(mass / 3) - 2
return fuel if fuel > 0 else 0
def fuel_required_multiple_modules(masses):
total_fuel = 0
for mass in masses:
total_fuel += fuel_required_single_module(mass)
return total_fuel
def recursive_fuel_required_single_module(mass):
total_fuel = 0
while mass := fuel_required_single_module(mass):
total_fuel += mass
return total_fuel
def recursive_fuel_required_multiple_modules(masses):
total_fuel = 0
for mass in masses:
total_fuel += recursive_fuel_required_single_module(mass)
return total_fuel
| 25.16 | 65 | 0.72814 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
cb36446e17dc2a78bb119a8682ef3d02f6e62079 | 22,403 | py | Python | saascs_sschoreo/feature_choreo/task_helper.py | muraligo/featuretimeline | 24c4f619079380dbe43cb8efae9455ae80de3ae0 | [
"MIT"
] | null | null | null | saascs_sschoreo/feature_choreo/task_helper.py | muraligo/featuretimeline | 24c4f619079380dbe43cb8efae9455ae80de3ae0 | [
"MIT"
] | null | null | null | saascs_sschoreo/feature_choreo/task_helper.py | muraligo/featuretimeline | 24c4f619079380dbe43cb8efae9455ae80de3ae0 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Mar 09 15:44:40 2020
@author: mugopala
Helper functions for tasks
"""
import os
import stat
import json
import threading
import time
import queue as stdq
import csv
from io import StringIO
from .helpers import file_reader
from . import task_consts, common_entities
from fabric2.tasks import task
from fabric2 import Connection
global _unqid
_unqid = int(1)
global _FILEPERMS
_FILEPERMS = int(0)
_FILEPERMS = stat.S_IRWXU | stat.S_IRGRP | stat.S_IROTH | stat.S_IXGRP | stat.S_IXOTH
@task
def perform_jira(conn, lggr, tskspec):
lggr.debug(" %s" % tskspec)
@task
def perform_shell(conn, lggr, tskspec):
global _unqid
global _FILEPERMS
result = None
lggr.debug(" %s" % tskspec)
if tskspec.locatespec['type'] == 'NONE':
pass
else:
tskspec.location.read_content()
lggr.debug(" %s" % tskspec.location.textdata)
scrptspath = "/Users/mugopala/tmp/script%d.sh" % _unqid
_unqid = _unqid + 1
with open(scrptspath, 'w') as scrptf:
scrptf.write(tskspec.location.textdata)
scrptf.close()
# make this file executable
os.chmod(scrptspath, _FILEPERMS)
result = conn.run(scrptspath)
if result.failed:
raise common_entities.M3GeneralChoreographyException('Execution of task {} failed with {}'.format(tskspec.location, result.stderr))
else:
return result.stdout
@task
def perform_chef(conn, lggr, tskspec):
lggr.debug(" %s" % tskspec)
@task
def perform_terraform(conn, lggr, tskspec):
lggr.debug(" %s" % tskspec)
class M3TaskPerformer(threading.Thread):
def __init__(self, task_queue, result_queue, lggr, appconfig):
super(M3TaskPerformer, self).__init__()
if appconfig['ENVNAME'] == 'local':
self.host = 'mugopala@localhost'
else:
self.host = None
self.logger = lggr
self.conn = Connection(self.host)
# TODO implement any other environment initialization
self.task_queue = task_queue
self.result_queue = result_queue
def run(self):
proc_name = self.name
while True:
taskval = self.task_queue.get()
if taskval is None:
# Poison pill means shutdown
self.logger.debug('{}: Exiting'.format(proc_name))
# ensure producer also ends
self.result_queue.put(None)
self.task_queue.task_done()
break
self.logger.debug('{}: Performing {}'.format(proc_name, taskval))
# TODO make task something that is executable by including a __call__ method in there
# answer = next_task()
# for now execute in traditional way
if taskval.executor == task_consts.M3TaskExecutor.MANUAL:
perform_jira(self.conn, self.logger, taskval.specification)
if taskval.executor == task_consts.M3TaskExecutor.SHELL:
perform_shell(self.conn, self.logger, taskval.specification)
if taskval.executor == task_consts.M3TaskExecutor.CHEF:
perform_chef(self.conn, self.logger, taskval.specification)
if taskval.executor == task_consts.M3TaskExecutor.TERRAFORM:
perform_terraform(self.conn, self.logger, taskval.specification)
self.task_queue.task_done()
# if it gets here without an exception that means it is successful
self.result_queue.put(taskval)
class M3TaskSetStateManager(threading.Thread):
def __init__(self, task_queue, result_queue, lggr, tskstage, firsttaskset, appconfig):
super(M3TaskSetStateManager, self).__init__()
# TODO implement any environment initialization
self.task_queue = task_queue
self.result_queue = result_queue
self.stage = tskstage
self.tasksetsinstage = {}
self.taskorderinsets = {}
self.tasksinsets = {}
self.logger = lggr
q = stdq.SimpleQueue()
q.put(firsttaskset)
while (not q.empty()):
tasksetval = q.get()
tasksetval.state = task_consts.M3TaskState.READY
self.tasksetsinstage[tasksetval.name] = tasksetval
_tsksinset = {}
for _ix in range(len(tasksetval.tasks)):
_tsksinset[tasksetval.tasks[_ix].name] = _ix
self.tasksinsets[tasksetval.tasks[_ix].name] = tasksetval
self.taskorderinsets[tasksetval.name] = _tsksinset
for tsksetinst in tasksetval.successors:
q.put(tsksetinst)
print("%s initialized" % self.getName())
def run(self):
proc_name = self.name
while True:
taskval = self.result_queue.get()
if taskval is None:
# Poison pill means shutdown
self.logger.debug('{}: Exiting'.format(proc_name))
break
self.logger.debug('{}: Completing {}'.format(proc_name, taskval))
taskval.state = task_consts.M3TaskState.DONE
_tskset = self.tasksinsets[taskval.name]
if not _tskset.alldone():
# while tasks in current set are NOT DONE, keep rolling through them
_tsksinset = self.taskorderinsets[_tskset.name]
_ix1tsk = _tsksinset[taskval.name]
_ix1tsk += 1
if _ix1tsk < len(_tskset.tasks):
_tskset.tasks[_ix1tsk].state = task_consts.M3TaskState.RUNNING
self.task_queue.put(_tskset.tasks[_ix1tsk])
continue
_alldone = True
# task set done, look at successors
_tskset.state = task_consts.M3TaskState.DONE
if len(_tskset.successors) <= 0:
# if we reach a tail task set, check if all task sets are DONE
# if so, inject a poison pill
for tsksetnm in self.tasksetsinstage:
if self.tasksetsinstage[tsksetnm].state == task_consts.M3TaskState.DONE:
continue
else:
_alldone = False
break
if _alldone == True:
# inject poison pill
self.task_queue.put(None)
continue
# loop thru successors adding any whose predecessors are all done
for tsksetinst in _tskset.successors:
_tskready = True
if len(tsksetinst.predecessors) > 0:
for tskpred in tsksetinst.predecessors:
if tskpred.state == task_consts.M3TaskState.DONE:
continue
_tskready = False
break
if _tskready == True:
tsksetinst.state = task_consts.M3TaskState.RUNNING
tskinst = tsksetinst.tasks[0]
tskinst.state = task_consts.M3TaskState.RUNNING
self.task_queue.put(tskinst)
def cs_parse_json_task(lggr, myapihandler, config, tskdict):
tsknote = tskdict['note'] if 'note' in tskdict else None
tskname = None
tsktext = []
tskexec = None
tsktype = None
tskexekey = None
tskteam = None
tskfailure = None
if 'name' not in tskdict:
raise common_entities.M3GeneralChoreographyException('Missing vital property in task specification')
else:
tskname = tskdict['name']
if 'type' not in tskdict:
raise common_entities.M3GeneralChoreographyException('Missing vital property in task specification')
else:
tsktype = task_consts.M3TaskType.from_name(tskdict['type'])
if 'team' not in tskdict:
raise common_entities.M3GeneralChoreographyException('Missing vital property in task specification')
else:
tskteam = tskdict['team']
if 'textlines' not in tskdict:
raise common_entities.M3GeneralChoreographyException('Missing vital property in task specification')
else:
for _tsktxit in tskdict['textlines']:
tsktext.append(_tsktxit)
if 'failure' in tskdict:
tskfailure = task_consts.M3TaskExecutor.from_name(tskdict['failure'])
else:
tskfailure = task_consts.M3TaskExecutor.MANUAL
if tsktype == task_consts.M3TaskType.CHECK:
tskexec = task_consts.M3TaskExecutor.CHECK
else:
if 'mode' not in tskdict:
raise common_entities.M3GeneralChoreographyException('Missing vital property in task specification')
else:
tskexec = task_consts.M3TaskExecutor.from_name(tskdict['mode'])
tskexekey = tskexec.section_name
taskval = None
# TODO Constructor for M3Task should take an array for tsktext
if tsknote is None:
taskval = task_consts.M3Task(tskname, tsktype, tskteam, tskexec, tskexekey,
tsktext, tskfailure, tskdict[tskexekey])
else:
taskval = task_consts.M3Task(tskname, tsktype, tskteam, tskexec, tskexekey,
tsktext, tskfailure, tskdict[tskexekey], tsknote)
if tskexekey == 'scriptspec':
taskval.specification.resolve_location(myapihandler, config)
return taskval
def load_tasks_fromjson(lggr, myapihandler, config, filename='tasks3.json', basepath=None):
_fullpath = './input/{}'.format(filename) if basepath is None else '{}/input/{}'.format(basepath, filename)
lggr.debug(_fullpath)
jshash = json.loads(file_reader(_fullpath))
if jshash and 'tasksets' in jshash and len(jshash['tasksets']) > 0:
stages = { task_consts.M3TaskStage.FOUNDATION:None,
task_consts.M3TaskStage.PRIMORDIAL:None,
task_consts.M3TaskStage.CORE:None,
task_consts.M3TaskStage.HIGHER:None
}
_allsets = {}
for tsksetdict in jshash['tasksets']:
tsksetname = None
tsksetarea = None
tsksetstage = None
if 'name' not in tsksetdict:
raise common_entities.M3GeneralChoreographyException('Missing vital property in task specification')
else:
tsksetname = tsksetdict['name']
if 'stage' not in tsksetdict:
raise common_entities.M3GeneralChoreographyException('Missing vital property in task specification')
else:
tsksetstage = task_consts.M3TaskStage.from_name(tsksetdict['stage'])
if 'area' not in tsksetdict:
raise common_entities.M3GeneralChoreographyException('Missing vital property in task specification')
else:
tsksetarea = task_consts.M3TaskArea.from_name(tsksetdict['area'])
tsksetval = task_consts.M3TaskSet(tsksetname, tsksetarea, tsksetstage)
for tskdict in tsksetdict['tasks']:
taskval = cs_parse_json_task(lggr, myapihandler, config, tskdict)
tsksetval.tasks.append(taskval)
# link predecessors
_prival = 0
if 'predecessors' in tsksetdict and len(tsksetdict['predecessors']) > 0:
for tskpred in tsksetdict['predecessors']:
tsksetval.predecessors.append(_allsets[tskpred])
_allsets[tskpred].successors.append(tsksetval)
if _allsets[tskpred].priority > _prival:
_prival = _allsets[tskpred].priority
_prival = _prival + 1
tsksetval.priority = _prival
for taskval in tsksetval.tasks:
taskval.priority = _prival
if stages[tsksetstage] is None:
stages[tsksetstage] = tsksetval
_allsets[tsksetname] = tsksetval
return stages
else:
raise common_entities.M3ReferenceDataException('Tasks', 'Empty')
def write_close_task(ischecktask, tasksjson, predecessors):
if ischecktask:
# close out CHECK task
tasksjson.write('] }}')
ischecktask = False
# Close out current task
# TODO shift predecessors from task to task set
tasksjson.write(', "failure": "manual", "predecessors": [')
_outstr = ','.join(predecessors)
tasksjson.write(_outstr)
tasksjson.write('] }}')
return ischecktask
"""
Fields in export of MPP into Excel
- ID
- Project (Env - Region)
- Task Name (Task outline Level is 3, outline level of 4 are steps within a Check Spec,
outline level 2 is a TaskSet, outline level 1 is a stage if followed by other outlines,
otherwise generate a name from ID and use the name as the text)
- Duration (For timelines; 0 duration tasks are markers NEED TO IMPLEMENT CONCEPT)
- Predecessors (outside group or phase should be changed to the whole group, inside could be to omitted as it is in sequence)
- Outline Level
Add
- type
- mode (any level 3 task with level 4 is of mode CHECK and so can be left blank)
- team (only for outline level 2)
- failure
- area (only for outline level 2)
"""
def load_convert_tasks_fromcsv(lggr, input='tasks2.csv', tmppath='/Users/mugopala/tmp', basepath=None):
_fullpath = './input/{}'.format(input) if basepath is None else '{}/input/{}'.format(basepath, input)
lggr.debug(_fullpath)
_outpath = '{}_tmptasks.json'.format(tmppath) if tmppath[-1] == '/' else '{}/_tmptasks.json'.format(tmppath)
_tsksstr = None
with open(_fullpath, newline='') as csvfile:
_csvrdr = csv.DictReader(csvfile)
_tasksjson = StringIO()
_stagerevidmap = {}
_taskidmap = []
_tsksetidmap = {}
_tsksetrevidmap = {}
_checkidmap = []
_taskparentidmap = {}
_currstgname = None
_currsetname = None
_currtskname = None
_predecessors = []
_isnewstage = False
_isnewtskset = False
_ischecktask = False
_isnewchecktask = False
for _tskdict in _csvrdr:
if 'Task Name' not in _tskdict:
raise common_entities.M3GeneralChoreographyException('Missing vital property in task specification')
if 'Outline Level' not in _tskdict:
raise common_entities.M3GeneralChoreographyException('Missing vital property in task specification')
# ASSUME: ID, Duration, and Predecessor exist as it is a standard MPP field
# each output is _tasksjson.write(thestring)
_outlinelevel = int(_tskdict['Outline Level'])
_tskid = int(_tskdict['ID'])
_nametext = _tskdict['Task Name'].strip()
_outstr = None
if _outlinelevel == 4:
# TODO generalize type; if type exists, create a condition spec to automatically execute check
_outstr = '{{"name": "{}", "type": "manual"}}'.format(_nametext) if _isnewchecktask else ', {{"name": "{}", "type": "manual"}}'.format(_nametext)
_tasksjson.write(_outstr)
_outstr = None
if _isnewchecktask:
_isnewchecktask = False
elif _outlinelevel == 1:
if _nametext == _tskdict['Project'].strip():
continue
elif 'Foundation' in _nametext:
_stagerevidmap[task_consts.M3TaskStage.FOUNDATION] = _tskid
_isnewstage = True
_currstgname = task_consts.M3TaskStage.FOUNDATION.name
elif 'Primordial' in _nametext:
_stagerevidmap[task_consts.M3TaskStage.PRIMORDIAL] = _tskid
_isnewstage = True
_currstgname = task_consts.M3TaskStage.PRIMORDIAL.name
elif 'Agent Based' in _nametext:
_stagerevidmap[task_consts.M3TaskStage.CORE] = _tskid
_isnewstage = True
_currstgname = task_consts.M3TaskStage.CORE.name
elif 'All Remaining' in _nametext:
_stagerevidmap[task_consts.M3TaskStage.HIGHER] = _tskid
_isnewstage = True
_currstgname = task_consts.M3TaskStage.HIGHER.name
else:
# TODO if 0 duration task it is a check task for all
# TODO: else it is a link task for all
pass
elif _outlinelevel == 2:
_ischecktask = write_close_task(_ischecktask, _tasksjson, _predecessors)
_currtskname = None
_predecessors.clear()
# Close out tasks in set, task set itself, then start new task set
# TODO shift predecessors from task to task set
_tasksjson.write('] }}, {{ "name": "{}", "stage": "{}", '.format(_nametext, _currstgname))
_currsetname = _nametext
_tsksetidmap[_tskid] = _currsetname
_tsksetrevidmap[_currsetname] = _tskid
_outstr = 'BUILD' if 'Area' not in _tskdict or _tskdict['Area'].isspace() else _tskdict['Area']
_tasksjson.write('"area": "{}", '.format(_outstr))
_outstr = None
_tasksjson.write('"tasks": [')
_isnewtskset = True
_taskparentidmap[_tskid] = _tskid # put self also in there
elif _outlinelevel == 3:
_ischecktask = write_close_task(_ischecktask, _tasksjson, _predecessors)
_predecessors.clear()
if _isnewtskset:
_isnewtskset = False
_currtskname = 'Task{}'.format(_tskid)
if 'Type' not in _tskdict or _tskdict['Type'].isspace():
raise common_entities.M3GeneralChoreographyException('Missing vital property in task specification')
if 'Team' not in _tskdict or _tskdict['Team'].isspace():
raise common_entities.M3GeneralChoreographyException('Missing vital property in task specification')
_tasksjson.write('{{ "name": "{}", "type": "{}", "team": "{}", "text": "{}", '.format(_currtskname, _tskdict['Type'].strip().lower(), _tskdict['Team'].strip().upper(), _nametext))
_tsktype = task_consts.M3TaskType.from_name(_tskdict['Type'].strip().lower())
_tsktarget = None
if _tsktype == task_consts.M3TaskType.CHECK:
_outstr = '"mode": "check", '
_mode = 'check'
pass
else:
if 'Mode' not in _tskdict or _tskdict['Mode'].isspace():
# TODO c. if does not exist and type not check, set mode as 'manual' and define a standard JIRA spec for the team
_outstr = ''
raise common_entities.M3GeneralChoreographyException('Missing vital property in task specification')
else:
if 'Target' not in _tskdict or _tskdict['Target'].isspace():
raise common_entities.M3GeneralChoreographyException('Missing vital property in task specification')
else:
_tsktarget = _tskdict['Target'].strip()
_mode = _tskdict['Mode'].strip().lower()
_outstr = '"mode": "{}", '.format(_mode)
_tasksjson.write(_outstr)
_outstr = None
_taskparentidmap[_tskid] = _tsksetrevidmap[_currsetname]
if _mode == 'check':
_outstr = '"checkspec": {{ "action": "ALL TRUE", "conditions": [ '
elif _mode == 'terraform':
_outstr = '"terraformspec": {{ "location": {{ "type": "object", "target": "{}"'.format(_tsktarget)
elif _mode == 'chef':
_outstr = '"chefspec": {{ "location": {{ "type": "object", "target": "{}"'.format(_tsktarget)
elif _mode == 'shell':
_outstr = '"scriptspec": {{ "location": {{ "type": "object", "target": "{}"'.format(_tsktarget)
elif _mode == 'manual':
_tgtparts = _tsktarget.split('.')
_outstr = '"jiraspec": {{ "project": "{}", "issuetype": "STORY", "components": ["{}"], "labels": ["{}"], "parent": {{ "issuetype": "EPIC", "issueid": "{}" }} }}'.format(_tgtparts[0], _tgtparts[2], _tgtparts[1], _tgtparts[3])
_tasksjson.write(_outstr)
_tgtparts = None
# if predecessors exist, look up the id and find the nearest task set and use its name instead
if 'Predecessors' in _tskdict and not _tskdict['Predecessors'].isspace():
_tgtparts = _tskdict['Predecessors'].split(',')
_predecessors = [_tsksetidmap[_taskparentidmap[_tgtprt]] for _tgtprt in _tgtparts]
else:
pass # invalid outline level
_tsksstr = _tasksjson.getvalue()
# TODO output _tsksstr to _outpath
return _outpath
def choreograph_tasksets(lggr, tasksbystage, appcfg):
# Establish communication queues
exeQ = stdq.Queue()
resultQ = stdq.Queue()
# Start consumers
# num_consumers = multiprocessing.cpu_count() * 2
# print('Creating {} consumers'.format(num_consumers))
# consumers = [
# SaasCs2TaskPerformer(exeQ, resultQ, appcfg)
# for i in range(num_consumers)
# ]
# for w in consumers:
# w.start()
consumer = M3TaskPerformer(exeQ, resultQ, lggr, appcfg)
consumer.start()
currstg = task_consts.M3TaskStage.PRIMORDIAL
stgstr = "%s" % currstg
print(stgstr)
producer = M3TaskSetStateManager(exeQ, resultQ, lggr, currstg, tasksbystage[currstg], appcfg)
producer.start()
exeQ.put(tasksbystage[currstg].tasks[0])
global tasksinstage
tasksinstage = []
def print_task_set(tsksetval):
global tasksinstage
if tsksetval is None:
print(' No task passed in')
else:
if len(tasksinstage) <= 0 or tsksetval.name not in tasksinstage:
print(" %s" % tsksetval)
tasksinstage.append(tsksetval)
if len(tsksetval.successors) <= 0:
print(' End of line')
else:
for tskinst in tsksetval.successors:
print_task_set(tskinst)
| 44.187377 | 244 | 0.599339 | 5,484 | 0.244789 | 0 | 0 | 1,095 | 0.048877 | 0 | 0 | 5,098 | 0.227559 |
cb3862c145315bab55381d425ba7e7e14a856cfa | 819 | py | Python | media.py | DojoZheng/Udacity-Movies-Website | e59d294b555e1abb9db7ecb46a51c9a72b9b8dcf | [
"MIT"
] | null | null | null | media.py | DojoZheng/Udacity-Movies-Website | e59d294b555e1abb9db7ecb46a51c9a72b9b8dcf | [
"MIT"
] | null | null | null | media.py | DojoZheng/Udacity-Movies-Website | e59d294b555e1abb9db7ecb46a51c9a72b9b8dcf | [
"MIT"
] | null | null | null | import webbrowser
class Movie():
""" This class provides a way to store movie related information """
# Class Variable: These are the Movies Ratings
# G: General Audiences. All ages admitted.
# PG: Parental Guidance Suggested. Some material may not be suitable for children.
# PG-13: Parents Strongly Cautioned. Some material may be inappropriate for children under 13.
# R: Restricted. Under 17 requires accompanying parent or adult guardian.
# NC-17: No Children. No one 17 and under admitted.
VALID_RATINGS = ["G", "PG","PG-13","R","NC-17"]
def __init__(self, movie_title, poster_image, trailer_youku, rating):
self.title = movie_title
self.poster_image_url = poster_image
self.trailer_url = trailer_youku
self.valid_rating = rating
def show_trailer(self):
webbrowser.open(self.trailer_url) | 37.227273 | 95 | 0.752137 | 800 | 0.976801 | 0 | 0 | 0 | 0 | 0 | 0 | 481 | 0.587302 |
cb397006da0e1fab13bd9fe5696085b9a41dee97 | 1,365 | py | Python | converter.py | Supercip971/convertisseur-python | 5e8e6c150fcc3c79e4902971ffd6227b32253450 | [
"MIT"
] | null | null | null | converter.py | Supercip971/convertisseur-python | 5e8e6c150fcc3c79e4902971ffd6227b32253450 | [
"MIT"
] | null | null | null | converter.py | Supercip971/convertisseur-python | 5e8e6c150fcc3c79e4902971ffd6227b32253450 | [
"MIT"
] | null | null | null | str_xdigits = [
"0",
"1",
"2",
"3",
"4",
"5",
"6",
"7",
"8",
"9",
"a",
"b",
"c",
"d",
"e",
"f",
]
def convert_digit(value: int, base: int) -> str:
return str_xdigits[value % base]
def convert_to_val(value: int, base: int) -> str:
if value == None:
return "Error"
current = int(value)
result = ""
while current != 0:
result = result + convert_digit(current, base)
current = current // base
if len(result) == 0:
return "0"
return result[::-1] # reverse string
def val_to_hex(value: int) -> str:
return "0x" + convert_to_val(value, 16)
def val_to_bin(value: int) -> str:
return "0b" + convert_to_val(value, 2)
def val_to_dec(value: int) -> str:
return convert_to_val(value, 10)
def val_from_str(value: str, base: int) -> int:
value = value.lower()
result = 0
for c in value:
if c not in str_xdigits or int(str_xdigits.index(c)) >= base:
return None
result = result * base + str_xdigits.index(c)
return result
def val_from_hex(value: str) -> int:
return val_from_str(value.removeprefix("0x"), 16)
def val_from_bin(value: str) -> int:
return val_from_str(value.removeprefix("0b"), 2)
def val_from_dec(value: str) -> int:
return val_from_str(value, 10)
| 17.727273 | 69 | 0.570696 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 92 | 0.067399 |
cb3c3f1580ef22e37e0c06b778c777c3a92768d2 | 13,914 | py | Python | backend/test/test_api.py | solevis/pixyship2 | 15a592a1961e4286a344a0d1664cfb491439bf09 | [
"MIT"
] | 8 | 2021-04-04T17:10:35.000Z | 2021-12-04T06:56:56.000Z | backend/test/test_api.py | solevis/pixyship2 | 15a592a1961e4286a344a0d1664cfb491439bf09 | [
"MIT"
] | 56 | 2021-02-18T14:50:32.000Z | 2022-03-28T14:03:30.000Z | backend/test/test_api.py | solevis/pixyship2 | 15a592a1961e4286a344a0d1664cfb491439bf09 | [
"MIT"
] | 2 | 2021-09-28T00:57:00.000Z | 2022-01-21T07:47:15.000Z | from pixelstarshipsapi import PixelStarshipsApi
from run import push_context
def test_login():
pixel_starships_api = PixelStarshipsApi()
device_key, device_checksum = pixel_starships_api.generate_device()
token = pixel_starships_api.get_device_token(device_key, device_checksum)
assert isinstance(token, str)
assert len(token) == 36
def test_settings():
pixel_starships_api = PixelStarshipsApi()
settings = pixel_starships_api.get_api_settings()
assert 'ProductionServer' in settings
assert 'MaintenanceMessage' in settings
def test_inspect_ship():
# avoid Flask RuntimeError: No application found
push_context()
pixel_starships_api = PixelStarshipsApi()
user_id = 6635604 # Solevis
inspect_ship = pixel_starships_api.inspect_ship(user_id)
# Player
user = inspect_ship['User']
assert 'Id' in user
assert 'Name' in user
assert 'IconSpriteId' in user
assert 'AllianceName' in user
assert 'AllianceSpriteId' in user
assert 'Trophy' in user
assert 'LastAlertDate' in user
# Ship
ship = inspect_ship['Ship']
assert 'ShipDesignId' in ship
assert 'ImmunityDate' in ship
assert 'ShipStatus' in ship
assert 'OriginalRaceId' in ship
# Room
room = inspect_ship['Ship']['Rooms'][0]
assert 'RoomId' in room
assert 'Row' in room
assert 'Column' in room
assert 'ConstructionStartDate' in room
def test_dailies():
pixel_starships_api = PixelStarshipsApi()
dailies = pixel_starships_api.get_dailies()
assert len(dailies) > 0
# Shop
assert 'LimitedCatalogCurrencyAmount' in dailies
assert 'LimitedCatalogType' in dailies
assert 'LimitedCatalogArgument' in dailies
assert 'LimitedCatalogCurrencyType' in dailies
assert 'LimitedCatalogQuantity' in dailies
assert 'LimitedCatalogMaxTotal' in dailies
assert 'LimitedCatalogExpiryDate' in dailies
# Blue cargo
assert 'CommonCrewId' in dailies
assert 'HeroCrewId' in dailies
# Green cargo
assert 'CargoItems' in dailies
assert 'CargoPrices' in dailies
# Daily reward
assert 'DailyRewardArgument' in dailies
assert 'DailyRewardType' in dailies
assert 'DailyItemRewards' in dailies
# Sale
assert 'SaleType' in dailies
assert 'SaleArgument' in dailies
assert 'SaleItemMask' in dailies
# News messages
assert 'News' in dailies
assert 'NewsUpdateDate' in dailies
assert 'TournamentNews' in dailies
assert 'NewsSpriteId' in dailies
def test_sprites():
pixel_starships_api = PixelStarshipsApi()
sprites = pixel_starships_api.get_sprites()
assert len(sprites) > 0
sprite = sprites[0]
assert 'SpriteId' in sprite
assert 'ImageFileId' in sprite
assert 'X' in sprite
assert 'Y' in sprite
assert 'Width' in sprite
assert 'Height' in sprite
assert 'SpriteKey' in sprite
def test_ships():
pixel_starships_api = PixelStarshipsApi()
ships = pixel_starships_api.get_ships()
assert len(ships) > 0
ship = ships[0]
assert 'ShipDesignName' in ship
assert 'ShipDescription' in ship
assert 'ShipLevel' in ship
assert 'Hp' in ship
assert 'RepairTime' in ship
assert 'InteriorSpriteId' in ship
assert 'ExteriorSpriteId' in ship
assert 'LogoSpriteId' in ship
assert 'MiniShipSpriteId' in ship
assert 'RoomFrameSpriteId' in ship
assert 'DoorFrameLeftSpriteId' in ship
assert 'DoorFrameRightSpriteId' in ship
assert 'Rows' in ship
assert 'Columns' in ship
assert 'RaceId' in ship
assert 'Mask' in ship
assert 'MineralCost' in ship
assert 'StarbuxCost' in ship
assert 'MineralCapacity' in ship
assert 'GasCapacity' in ship
assert 'EquipmentCapacity' in ship
assert 'ShipType' in ship
def test_researches():
pixel_starships_api = PixelStarshipsApi()
researches = pixel_starships_api.get_researches()
assert len(researches) > 0
research = researches[0]
assert 'ResearchName' in research
assert 'ResearchDescription' in research
assert 'GasCost' in research
assert 'StarbuxCost' in research
assert 'RequiredLabLevel' in research
assert 'ResearchTime' in research
assert 'LogoSpriteId' in research
assert 'ImageSpriteId' in research
assert 'RequiredResearchDesignId' in research
assert 'ResearchDesignType' in research
def test_rooms():
pixel_starships_api = PixelStarshipsApi()
rooms = pixel_starships_api.get_rooms()
assert len(rooms) > 0
room = rooms[0]
assert 'RoomName' in room
assert 'RoomShortName' in room
assert 'RoomType' in room
assert 'Level' in room
assert 'Capacity' in room
assert 'Rows' in room
assert 'Columns' in room
assert 'ImageSpriteId' in room
assert 'ConstructionSpriteId' in room
assert 'MaxSystemPower' in room
assert 'MaxPowerGenerated' in room
assert 'MinShipLevel' in room
assert 'UpgradeFromRoomDesignId' in room
assert 'DefaultDefenceBonus' in room
assert 'ReloadTime' in room
assert 'RefillUnitCost' in room
assert 'RoomType' in room
assert 'PriceString' in room
assert 'PriceString' in room
assert 'ConstructionTime' in room
assert 'RoomDescription' in room
assert 'ManufactureType' in room
room_with_missile_design = None
for room in rooms:
if room['MissileDesign']:
room_with_missile_design = room
break
assert room_with_missile_design
assert 'SystemDamage' in room_with_missile_design['MissileDesign']
assert 'HullDamage' in room_with_missile_design['MissileDesign']
assert 'CharacterDamage' in room_with_missile_design['MissileDesign']
room_with_purchase = None
for room in rooms:
if room['AvailabilityMask']:
room_with_purchase = room
break
assert room_with_purchase
assert 'AvailabilityMask' in room_with_purchase
def test_characters():
pixel_starships_api = PixelStarshipsApi()
characters = pixel_starships_api.get_characters()
assert len(characters) > 0
character = characters[0]
assert 'CharacterDesignName' in character
assert 'ProfileSpriteId' in character
assert 'Rarity' in character
assert 'Hp' in character
assert 'FinalHp' in character
assert 'Pilot' in character
assert 'FinalPilot' in character
assert 'Attack' in character
assert 'FinalAttack' in character
assert 'Repair' in character
assert 'FinalRepair' in character
assert 'Weapon' in character
assert 'FinalWeapon' in character
assert 'Engine' in character
assert 'FinalEngine' in character
assert 'Research' in character
assert 'FinalResearch' in character
assert 'Science' in character
assert 'FinalScience' in character
assert 'SpecialAbilityArgument' in character
assert 'SpecialAbilityFinalArgument' in character
assert 'SpecialAbilityType' in character
assert 'FireResistance' in character
assert 'WalkingSpeed' in character
assert 'RunSpeed' in character
assert 'TrainingCapacity' in character
assert 'ProgressionType' in character
assert 'CollectionDesignId' in character
assert 'EquipmentMask' in character
parts = character['CharacterParts']
assert 'StandardSpriteId' in parts['Head']
assert 'StandardSpriteId' in parts['Body']
assert 'StandardSpriteId' in parts['Leg']
def test_collections():
pixel_starships_api = PixelStarshipsApi()
collections = pixel_starships_api.get_collections()
assert len(collections) > 0
collection = collections[0]
assert 'CollectionName' in collection
assert 'MinCombo' in collection
assert 'MaxCombo' in collection
assert 'BaseEnhancementValue' in collection
assert 'SpriteId' in collection
assert 'StepEnhancementValue' in collection
assert 'IconSpriteId' in collection
def test_items():
pixel_starships_api = PixelStarshipsApi()
items = pixel_starships_api.get_items()
assert len(items) > 0
item = items[0]
assert 'ItemDesignName' in item
assert 'ItemDesignDescription' in item
assert 'ImageSpriteId' in item
assert 'ItemSubType' in item
assert 'EnhancementType' in item
assert 'Ingredients' in item
assert 'Content' in item
assert 'MarketPrice' in item
assert 'FairPrice' in item
assert 'ItemDesignId' in item
assert 'ItemType' in item
assert 'Rarity' in item
assert 'EnhancementValue' in item
def test_alliances():
pixel_starships_api = PixelStarshipsApi()
alliances = pixel_starships_api.get_alliances(42)
assert len(alliances) == 42
alliance = alliances[0]
assert 'AllianceId' in alliance
assert 'AllianceName' in alliance
def test_sales():
pixel_starships_api = PixelStarshipsApi()
sales = pixel_starships_api.get_sales(131, 0, 1) # Scratchy
assert len(sales) == 1
sale = sales[0]
assert 'SaleId' in sale
assert 'StatusDate' in sale
assert 'Quantity' in sale
assert 'CurrencyType' in sale
assert 'CurrencyValue' in sale
assert 'BuyerShipId' in sale
assert 'BuyerShipName' in sale
assert 'BuyerShipName' in sale
assert 'SellerShipId' in sale
assert 'SellerShipName' in sale
assert 'ItemId' in sale
def test_users():
# avoid Flask RuntimeError: No application found
push_context()
pixel_starships_api = PixelStarshipsApi()
users = pixel_starships_api.get_users() # top 10
assert len(users) == 100
user = users[0]
assert 'Id' in user
assert 'Name' in user
assert 'Trophy' in user
assert 'AllianceId' in user
assert 'LastLoginDate' in user
assert 'AllianceName' in user
assert 'AllianceSpriteId' in user
def test_alliance_users():
# avoid Flask RuntimeError: No application found
push_context()
pixel_starships_api = PixelStarshipsApi()
alliance_id = 9343 # Trek Federation
users = pixel_starships_api.get_alliance_users(alliance_id)
assert len(users) > 0
user = users[0]
assert 'Id' in user
assert 'Name' in user
assert 'Trophy' in user
assert 'AllianceId' in user
assert 'LastLoginDate' in user
assert 'AllianceName' in user
assert 'AllianceSpriteId' in user
def test_prestiges_character_to():
pixel_starships_api = PixelStarshipsApi()
character_id = 196 # PinkZilla
prestiges = pixel_starships_api.get_prestiges_character_to(character_id)
assert len(prestiges) > 0
prestige = prestiges[0]
assert 'CharacterDesignId1' in prestige
assert 'CharacterDesignId2' in prestige
def test_prestiges_character_from():
pixel_starships_api = PixelStarshipsApi()
character_id = 338 # Zongzi-Man
prestiges = pixel_starships_api.get_prestiges_character_from(character_id)
assert len(prestiges) > 0
prestige = prestiges[0]
assert 'CharacterDesignId1' in prestige
assert 'CharacterDesignId2' in prestige
def test_rooms_purchase():
pixel_starships_api = PixelStarshipsApi()
rooms_purchase = pixel_starships_api.get_rooms_purchase()
assert len(rooms_purchase) > 0
room_purchase = rooms_purchase[0]
assert 'RoomDesignId' in room_purchase
assert 'AvailabilityMask' in room_purchase
def test_search_users():
# avoid Flask RuntimeError: No application found
push_context()
pixel_starships_api = PixelStarshipsApi()
user_name_to_search = 'Solevis'
users = pixel_starships_api.search_users(user_name_to_search, True)
assert len(users) == 1
user = users[0]
assert 'Name' in user
assert user['Name'] == user_name_to_search
assert 'PVPAttackWins' in user
assert 'PVPAttackLosses' in user
assert 'PVPAttackDraws' in user
assert 'PVPDefenceDraws' in user
assert 'PVPDefenceWins' in user
assert 'PVPDefenceLosses' in user
assert 'HighestTrophy' in user
assert 'CrewDonated' in user
assert 'CrewReceived' in user
assert 'AllianceJoinDate' in user
assert 'CreationDate' in user
def test_trainings():
# avoid Flask RuntimeError: No application found
push_context()
pixel_starships_api = PixelStarshipsApi()
trainings = pixel_starships_api.get_trainings()
assert len(trainings) > 0
training = trainings[0]
assert 'TrainingDesignId' in training
assert 'TrainingSpriteId' in training
assert 'HpChance' in training
assert 'AttackChance' in training
assert 'PilotChance' in training
assert 'RepairChance' in training
assert 'WeaponChance' in training
assert 'ScienceChance' in training
assert 'EngineChance' in training
assert 'StaminaChance' in training
assert 'AbilityChance' in training
assert 'XpChance' in training
assert 'Fatigue' in training
assert 'MinimumGuarantee' in training
def test_achievements():
# avoid Flask RuntimeError: No application found
push_context()
pixel_starships_api = PixelStarshipsApi()
achievements = pixel_starships_api.get_achievements()
assert len(achievements) > 0
achievement = achievements[0]
assert 'AchievementDesignId' in achievement
assert 'AchievementTitle' in achievement
assert 'AchievementDescription' in achievement
assert 'SpriteId' in achievement
assert 'RewardString' in achievement
assert 'ParentAchievementDesignId' in achievement
def test_situations():
# avoid Flask RuntimeError: No application found
push_context()
pixel_starships_api = PixelStarshipsApi()
situations = pixel_starships_api.get_situations()
assert len(situations) > 0
situation = situations[0]
assert 'SituationDesignId' in situation
assert 'SituationName' in situation
assert 'SituationDescription' in situation
assert 'FromDate' in situation
assert 'EndDate' in situation
assert 'IconSpriteId' in situation
| 28.280488 | 78 | 0.720425 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,875 | 0.278496 |
cb3c41b02dde05dedca749f977bedf28e7616d9a | 2,776 | py | Python | tests/test_io.py | crindt/geofeather | 5cf339de6a6db894493c2ed0515a8947620276c7 | [
"MIT"
] | 60 | 2019-10-04T14:06:14.000Z | 2021-06-29T16:15:49.000Z | tests/test_io.py | crindt/geofeather | 5cf339de6a6db894493c2ed0515a8947620276c7 | [
"MIT"
] | 5 | 2019-10-07T01:48:15.000Z | 2020-06-25T02:48:11.000Z | tests/test_io.py | brendan-ward/geofeather | 412db7e410e44de14d9c408d91ea116f699ffe01 | [
"MIT"
] | null | null | null | import os
from geofeather import to_geofeather, from_geofeather
from pandas.testing import assert_frame_equal
import pytest
def test_points_geofeather(tmpdir, points_wgs84):
"""Confirm that we can round-trip points to / from feather file"""
filename = tmpdir / "points_wgs84.feather"
to_geofeather(points_wgs84, filename)
assert os.path.exists(filename)
df = from_geofeather(filename)
assert_frame_equal(df, points_wgs84)
assert df.crs == points_wgs84.crs
def test_points_geofeather_proj4(tmpdir, points_albers_conus_proj4):
"""Confirm that we can round-trip points to / from feather file with a proj4 defined CRS"""
filename = tmpdir / "points_albers_conus.feather"
to_geofeather(points_albers_conus_proj4, filename)
df = from_geofeather(filename)
assert_frame_equal(df, points_albers_conus_proj4)
# equality comparision fails for CRS object constructed from proj4, even though they are still the same
if hasattr(df.crs, "to_proj4"):
assert df.crs.to_proj4() == points_albers_conus_proj4.crs.to_proj4()
else:
assert df.crs == points_albers_conus_proj4.crs
def test_points_geofeather_wkt(tmpdir, points_albers_conus_wkt):
"""Confirm that we can round-trip points to / from feather file with a wkt defined CRS"""
filename = tmpdir / "points_albers_conus.feather"
to_geofeather(points_albers_conus_wkt, filename)
df = from_geofeather(filename)
assert_frame_equal(df, points_albers_conus_wkt)
assert df.crs == points_albers_conus_wkt.crs
def test_missing_crs_warning(tmpdir, points_wgs84):
"""Confirm that a warning is raised if the crs file is missing"""
filename = tmpdir / "points_wgs84.feather"
to_geofeather(points_wgs84, filename)
os.remove("{}.crs".format(filename))
with pytest.warns(UserWarning) as warning:
df = from_geofeather(filename)
assert (
"coordinate reference system file is missing" in warning[0].message.args[0]
)
assert df.crs is None
def test_lines_geofeather(tmpdir, lines_wgs84):
"""Confirm that we can round-trip lines to / from feather file"""
filename = tmpdir / "lines_wgs84.feather"
to_geofeather(lines_wgs84, filename)
assert os.path.exists(filename)
df = from_geofeather(filename)
assert_frame_equal(df, lines_wgs84)
assert df.crs == lines_wgs84.crs
def test_polygons_geofeather(tmpdir, polygons_wgs84):
"""Confirm that we can round-trip polygons to / from feather file"""
filename = tmpdir / "polygons_wgs84.feather"
to_geofeather(polygons_wgs84, filename)
assert os.path.exists(filename)
df = from_geofeather(filename)
assert_frame_equal(df, polygons_wgs84)
assert df.crs == polygons_wgs84.crs
| 31.191011 | 107 | 0.736671 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 757 | 0.272695 |
cb3cff9fc2f6aa6821bff72a0e29830eff5b0e64 | 4,133 | py | Python | pilmoji/helpers.py | solfisher/miq-fedi | 2eb7137410ce099ba995b96623191f8b391e446b | [
"MIT"
] | 8 | 2022-01-18T11:45:36.000Z | 2022-02-23T12:54:50.000Z | pilmoji/helpers.py | solfisher/miq-fedi | 2eb7137410ce099ba995b96623191f8b391e446b | [
"MIT"
] | 2 | 2022-01-21T12:34:50.000Z | 2022-01-23T03:18:34.000Z | pilmoji/helpers.py | solfisher/miq-fedi | 2eb7137410ce099ba995b96623191f8b391e446b | [
"MIT"
] | 1 | 2022-01-22T19:53:08.000Z | 2022-01-22T19:53:08.000Z | from __future__ import annotations
import re
from enum import Enum
from emoji import EMOJI_UNICODE
from PIL import ImageFont
from typing import Final, List, NamedTuple, TYPE_CHECKING
if TYPE_CHECKING:
from .core import FontT
# This is actually way faster than it seems
_UNICODE_EMOJI_REGEX = '|'.join(map(re.escape, sorted(EMOJI_UNICODE['en'].values(), key=len, reverse=True)))
_DISCORD_EMOJI_REGEX = '<a?:[a-zA-Z0-9_]{2,32}:[0-9]{17,22}>'
_FEDI_EMOJI_REGEX = ':[a-zA-Z0-9_]+:'
EMOJI_REGEX: Final[re.Pattern[str]] = re.compile(f'({_UNICODE_EMOJI_REGEX}|{_DISCORD_EMOJI_REGEX}|{_FEDI_EMOJI_REGEX})')
__all__ = (
'EMOJI_REGEX',
'Node',
'NodeType',
'to_nodes',
'getsize'
)
class NodeType(Enum):
"""|enum|
Represents the type of a :class:`~.Node`.
Attributes
----------
text
This node is a raw text node.
emoji
This node is a unicode emoji.
discord_emoji
This node is a Discord emoji.
fedi_emoji
This node is a Fediverse emoji.
"""
text = 0
emoji = 1
discord_emoji = 2
fedi_emoji = 3
class Node(NamedTuple):
"""Represents a parsed node inside of a string.
Attributes
----------
type: :class:`~.NodeType`
The type of this node.
content: str
The contents of this node.
"""
type: NodeType
content: str
def __repr__(self) -> str:
return f'<Node type={self.type.name!r} content={self.content!r}>'
def _parse_line(line: str, /, emojis: list = []) -> List[Node]:
nodes = []
for i, chunk in enumerate(EMOJI_REGEX.split(line)):
if not chunk:
continue
if not i % 2:
nodes.append(Node(NodeType.text, chunk))
continue
# fedi emojiであるかどうかチェックする
if chunk.startswith(':') and chunk.endswith(':'):
emoji_name = chunk.replace(':', '')
for e in emojis:
if e['name'] == emoji_name:
# 存在するならノード変換
node = Node(NodeType.fedi_emoji, e['url'])
break
else:
# 存在しない場合テキスト扱い
node = Node(NodeType.text, chunk)
elif len(chunk) > 18: # This is guaranteed to be a Discord emoji
node = Node(NodeType.discord_emoji, chunk.split(':')[-1][:-1])
else:
node = Node(NodeType.emoji, chunk)
nodes.append(node)
return nodes
def to_nodes(text: str, /, emojis: list = []) -> List[List[Node]]:
"""Parses a string of text into :class:`~.Node`s.
This method will return a nested list, each element of the list
being a list of :class:`~.Node`s and representing a line in the string.
The string ``'Hello\nworld'`` would return something similar to
``[[Node('Hello')], [Node('world')]]``.
Parameters
----------
text: str
The text to parse into nodes.
Returns
-------
List[List[:class:`~.Node`]]
"""
return [_parse_line(line, emojis=emojis) for line in text.splitlines()]
def getsize(
text: str,
font: FontT = None,
*,
spacing: int = 4,
emoji_scale_factor: float = 1
) -> Tuple[int, int]:
"""Return the width and height of the text when rendered.
This method supports multiline text.
Parameters
----------
text: str
The text to use.
font
The font of the text.
spacing: int
The spacing between lines, in pixels.
Defaults to `4`.
emoji_scale_factor: float
The rescaling factor for emojis.
Defaults to `1`.
"""
if font is None:
font = ImageFont.load_default()
x, y = 0, 0
nodes = to_nodes(text)
for line in nodes:
this_x = 0
for node in line:
content = node.content
if node.type is not NodeType.text:
width = int(emoji_scale_factor * font.size)
else:
width, _ = font.getsize(content)
this_x += width
y += spacing + font.size
if this_x > x:
x = this_x
return x, y - spacing
| 24.169591 | 120 | 0.571256 | 786 | 0.186831 | 0 | 0 | 0 | 0 | 0 | 0 | 1,829 | 0.434752 |
cb3d02f01e83a74d7111454d080e99724759677b | 53,276 | py | Python | networking_arista/ml2/mechanism_arista.py | sapcc/networking-arista | c274dac96e88c764decf2821c90c6866b868e1d6 | [
"Apache-2.0"
] | null | null | null | networking_arista/ml2/mechanism_arista.py | sapcc/networking-arista | c274dac96e88c764decf2821c90c6866b868e1d6 | [
"Apache-2.0"
] | 8 | 2018-03-28T07:55:54.000Z | 2020-05-29T05:36:38.000Z | networking_arista/ml2/mechanism_arista.py | sapcc/networking-arista | c274dac96e88c764decf2821c90c6866b868e1d6 | [
"Apache-2.0"
] | 1 | 2017-01-18T14:59:48.000Z | 2017-01-18T14:59:48.000Z | # Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
if not os.environ.get('DISABLE_EVENTLET_PATCHING'):
import eventlet
eventlet.monkey_patch()
from neutron.common import config as common_config
from neutron.db import models_v2
from neutron.db import securitygroups_db as sg_db
from neutron.plugins.ml2.common import exceptions as ml2_exc
from neutron_lib.api.definitions import portbindings
from neutron_lib import constants as n_const
from neutron_lib import constants as p_const
from neutron_lib.context import get_admin_context
from neutron_lib.db import api as db_api
from neutron_lib.plugins.ml2 import api
from oslo_config import cfg
from oslo_log import log as logging
from oslo_service import loopingcall
from networking_arista._i18n import _, _LI, _LE
from networking_arista.common import constants
from networking_arista.common import db
from networking_arista.common import db_lib
from networking_arista.common import exceptions as arista_exc
from networking_arista.common import util
from networking_arista.ml2 import arista_sync
from networking_arista.ml2.rpc.arista_eapi import AristaRPCWrapperEapi
from networking_arista.ml2.rpc import get_rpc_wrapper
from networking_arista.ml2 import sec_group_callback
LOG = logging.getLogger(__name__)
cfg.CONF.import_group('ml2_arista', 'networking_arista.common.config')
def pretty_log(tag, obj):
# import json
# log_data = json.dumps(obj, sort_keys=True, indent=4)
# LOG.debug(tag)
# LOG.debug(log_data)
pass
class AristaDriver(api.MechanismDriver):
"""Ml2 Mechanism driver for Arista networking hardware.
Remembers all networks and VMs that are provisioned on Arista Hardware.
Does not send network provisioning request if the network has already been
provisioned before for the given port.
"""
def __init__(self, rpc=None):
self.ndb = db_lib.NeutronNets()
confg = cfg.CONF.ml2_arista
self.segmentation_type = db_lib.VLAN_SEGMENTATION
self.timer = loopingcall.FixedIntervalLoopingCall(
self._synchronization_thread)
self.sync_timeout = confg['sync_interval']
if confg.save_config_interval > 0:
self._config_save_loop = loopingcall.FixedIntervalLoopingCall(
self._save_switch_configs_thread)
self.save_config_interval = confg.save_config_interval
self.managed_physnets = confg['managed_physnets']
self.eapi = None
if rpc:
LOG.info("Using passed in parameter for RPC")
self.rpc = rpc
self.eapi = rpc
else:
http_session = util.make_http_session()
api_type = confg['api_type'].upper()
self.rpc = get_rpc_wrapper(confg)(self.ndb,
http_session=http_session)
if api_type == 'NOCVX':
self.eapi = self.rpc
else:
self.eapi = AristaRPCWrapperEapi(self.ndb)
self.sync_service = arista_sync.SyncService(self.rpc, self.ndb)
self.rpc.sync_service = self.sync_service
self.sg_handler = None
def initialize(self):
if self.rpc.check_cvx_availability():
self.rpc.register_with_eos()
self.rpc.check_supported_features()
context = get_admin_context()
self._cleanup_db(context)
# Registering with EOS updates self.rpc.region_updated_time. Clear it
# to force an initial sync
self.rpc.clear_region_updated_time()
self.sg_handler = sec_group_callback.AristaSecurityGroupHandler(self)
self.timer.start(self.sync_timeout, stop_on_exception=False)
self._config_save_loop.start(self.save_config_interval,
stop_on_exception=False)
def create_network_precommit(self, context):
"""Remember the tenant, and network information."""
network = context.current
segments = context.network_segments
if not self.rpc.hpb_supported():
# Hierarchical port binding is not supported by CVX, only
# allow VLAN network type.
if segments[0][api.NETWORK_TYPE] != p_const.TYPE_VLAN:
return
network_id = network['id']
tenant_id = network['tenant_id'] or constants.INTERNAL_TENANT_ID
plugin_context = context._plugin_context
db_lib.remember_tenant(plugin_context, tenant_id)
for segment in segments:
db_lib.remember_network_segment(plugin_context,
tenant_id,
network_id,
segment.get('segmentation_id'),
segment.get('id'))
def create_network_postcommit(self, context):
"""Provision the network on the Arista Hardware."""
network = context.current
network_id = network['id']
network_name = network['name']
tenant_id = network['tenant_id'] or constants.INTERNAL_TENANT_ID
segments = context.network_segments
shared_net = network['shared']
plugin_context = context._plugin_context
if db_lib.is_network_provisioned(plugin_context, tenant_id,
network_id):
try:
network_dict = {
'network_id': network_id,
'segments': segments,
'network_name': network_name,
'shared': shared_net}
self.rpc.create_network(tenant_id, network_dict)
except arista_exc.AristaRpcError as err:
LOG.error(_LE("create_network_postcommit: Did not create "
"network %(name)s. Reason: %(err)s"),
{'name': network_name, 'err': err})
else:
LOG.info(_LI('Network %s is not created as it is not found in '
'Arista DB'), network_id)
def update_network_precommit(self, context):
"""At the moment we only support network name change
Any other change in network is not supported at this time.
We do not store the network names, therefore, no DB store
action is performed here.
"""
new_network = context.current
orig_network = context.original
if new_network['name'] != orig_network['name']:
LOG.info(_LI('Network name changed to %s'), new_network['name'])
def update_network_postcommit(self, context):
"""At the moment we only support network name change
If network name is changed, a new network create request is
sent to the Arista Hardware.
"""
new_network = context.current
orig_network = context.original
plugin_context = context._plugin_context
if (new_network['name'] != orig_network['name'] or
new_network['shared'] != orig_network['shared']):
network_id = new_network['id']
network_name = new_network['name']
tenant_id = (new_network['tenant_id'] or
constants.INTERNAL_TENANT_ID)
shared_net = new_network['shared']
if db_lib.is_network_provisioned(plugin_context,
tenant_id, network_id):
try:
network_dict = {
'network_id': network_id,
'segments': context.network_segments,
'network_name': network_name,
'shared': shared_net}
self.rpc.create_network(tenant_id, network_dict)
except arista_exc.AristaRpcError as err:
LOG.error(_LE('update_network_postcommit: Did not '
'update network %(name)s. '
'Reason: %(err)s'),
{'name': network_name, 'err': err})
else:
LOG.info(_LI('Network %s is not updated as it is not found'
' in Arista DB'), network_id)
def delete_network_precommit(self, context):
"""Delete the network information from the DB."""
network = context.current
network_id = network['id']
tenant_id = network['tenant_id'] or constants.INTERNAL_TENANT_ID
plugin_context = context._plugin_context
if db_lib.is_network_provisioned(plugin_context, tenant_id,
network_id):
if db_lib.are_ports_attached_to_network(plugin_context,
network_id):
LOG.info(_LI('Network %s can not be deleted as it '
'has ports attached to it'), network_id)
raise ml2_exc.MechanismDriverError(
method='delete_network_precommit')
else:
db_lib.forget_network_segment(plugin_context,
tenant_id, network_id)
def delete_network_postcommit(self, context):
"""Send network delete request to Arista HW."""
network = context.current
segments = context.network_segments
if not self.rpc.hpb_supported():
# Hierarchical port binding is not supported by CVX, only
# send the request if network type is VLAN.
if segments[0][api.NETWORK_TYPE] != p_const.TYPE_VLAN:
# If network type is not VLAN, do nothing
return
network_id = network['id']
tenant_id = network['tenant_id'] or constants.INTERNAL_TENANT_ID
# Succeed deleting network in case EOS is not accessible.
# EOS state will be updated by sync thread once EOS gets
# alive.
try:
self.rpc.delete_network(tenant_id, network_id, segments)
# if necessary, delete tenant as well.
self.delete_tenant(context, tenant_id)
except arista_exc.AristaRpcError as err:
LOG.error(_LE('delete_network_postcommit: Did not delete '
'network %(network_id)s. Reason: %(err)s'),
{'network_id': network_id, 'err': err})
def create_port_precommit(self, context):
"""Remember the information about a VM and its ports
A VM information, along with the physical host information
is saved.
"""
# Returning from here, since the update_port_precommit is performing
# same operation, and also need of port binding information to decide
# whether to react to a port create event which is not available when
# this method is called.
return
def _get_physnet_from_link_info(self, port, physnet_info):
binding_profile = port.get(portbindings.PROFILE)
if not binding_profile:
return
link_info = binding_profile.get('local_link_information')
if not link_info:
return
mac_to_hostname = physnet_info.get('mac_to_hostname', {})
for link in link_info:
if link.get('switch_id') in mac_to_hostname:
physnet = mac_to_hostname.get(link.get('switch_id'))
return self.rpc.mlag_pairs.get(physnet, physnet)
def _bind_port_to_baremetal(self, context, segment):
port = context.current
vnic_type = port.get('binding:vnic_type')
if vnic_type != portbindings.VNIC_BAREMETAL:
# We are only interested in binding baremetal ports.
return
binding_profile = port.get(portbindings.PROFILE)
if not binding_profile:
return
link_info = binding_profile.get('local_link_information')
if not link_info:
return
switch_list = []
for link in link_info:
switch_list.append(link.get('switch_id'))
if not switch_list:
return
vif_details = {
portbindings.VIF_DETAILS_VLAN: str(
segment[api.SEGMENTATION_ID])
}
context.set_binding(segment[api.ID],
portbindings.VIF_TYPE_OTHER,
vif_details,
p_const.ACTIVE)
LOG.debug("AristaDriver: bound port info- port ID %(id)s "
"on network %(network)s",
{'id': port['id'],
'network': context.network.current['id']})
def bind_port(self, context):
"""Bind port to a network segment.
Provisioning request to Arista Hardware to plug a host
into appropriate network is done when the port is created
this simply tells the ML2 Plugin that we are binding the port
"""
host_id = context.host
port = context.current
physnet_info = {}
for segment in context.segments_to_bind:
physnet = segment.get(api.PHYSICAL_NETWORK)
if not self._is_in_managed_physnets(physnet):
LOG.debug("bind_port for port %(port)s: physical_network "
"%(physnet)s is not managed by Arista "
"mechanism driver", {'port': port.get('id'),
'physnet': physnet})
continue
# If physnet is not set, we need to look it up using hostname
# and topology info
if not physnet:
if not physnet_info:
# We only need to get physnet_info once
physnet_info = self.eapi.get_physical_network(host_id)
if (port.get('binding:vnic_type') ==
portbindings.VNIC_BAREMETAL):
# Find physnet using link_information in baremetal case
physnet = self._get_physnet_from_link_info(port,
physnet_info)
else:
physnet = physnet_info.get('physnet')
# If physnet was not found, we cannot bind this port
if not physnet:
LOG.debug("bind_port for port %(port)s: no physical_network "
"found", {'port': port.get('id')})
continue
if segment[api.NETWORK_TYPE] == p_const.TYPE_VXLAN:
# Check if CVX supports HPB
if not self.rpc.hpb_supported():
LOG.debug("bind_port: HPB is not supported")
return
# The physical network is connected to arista switches,
# allocate dynamic segmentation id to bind the port to
# the network that the port belongs to.
try:
next_segment = context.allocate_dynamic_segment(
{'id': context.network.current['id'],
'network_type': p_const.TYPE_VLAN,
'physical_network': physnet})
except Exception as exc:
LOG.error(_LE("bind_port for port %(port)s: Failed to "
"allocate dynamic segment for physnet "
"%(physnet)s. %(exc)s"),
{'port': port.get('id'), 'physnet': physnet,
'exc': exc})
return
LOG.debug("bind_port for port %(port)s: "
"current_segment=%(current_seg)s, "
"next_segment=%(next_seg)s",
{'port': port.get('id'), 'current_seg': segment,
'next_seg': next_segment})
context.continue_binding(segment['id'], [next_segment])
elif port.get('binding:vnic_type') == portbindings.VNIC_BAREMETAL:
# The network_type is vlan, try binding process for baremetal.
self._bind_port_to_baremetal(context, segment)
else:
continue
def create_port_postcommit(self, context):
"""Plug a physical host into a network.
Send provisioning request to Arista Hardware to plug a host
into appropriate network.
"""
# Returning from here, since the update_port_postcommit is performing
# same operation, and also need of port binding information to decide
# whether to react to a port create event which is not available when
# this method is called.
return
def _network_owner_tenant(self, context, network_id, tenant_id):
tid = tenant_id
if network_id and tenant_id:
plugin_context = context._plugin_context
network_owner = self.ndb.get_network_from_net_id(
plugin_context,
network_id
)
if network_owner and network_owner[0]['tenant_id'] != tenant_id:
tid = network_owner[0]['tenant_id'] or tenant_id
return tid
def _is_in_managed_physnets(self, physnet):
if not self.managed_physnets:
# If managed physnet is empty, accept all.
return True
# managed physnet is not empty, find for matching physnet
return any(pn == physnet for pn in self.managed_physnets)
def _bound_segments(self, context):
"""Check if a given port is managed by the mechanism driver.
It returns bound segment dictionary, if physical network in the bound
segment is included in the managed physical network list.
"""
if not self.managed_physnets:
return [binding_level.get(api.BOUND_SEGMENT) for
binding_level in context.binding_levels or []]
bound_segments = []
for binding_level in (context.binding_levels or []):
bound_segment = binding_level.get(api.BOUND_SEGMENT)
if (bound_segment and
self._is_in_managed_physnets(
bound_segment.get(api.PHYSICAL_NETWORK))):
bound_segments.append(bound_segment)
return bound_segments
def _handle_port_migration_precommit(self, context):
"""Handles port migration in precommit
It updates the port's new host in the DB
"""
orig_port = context.original
orig_host = context.original_host
orig_status = context.original_status
new_status = context.status
new_host = context.host
port_id = orig_port['id']
if (new_host != orig_host and
orig_status == n_const.PORT_STATUS_ACTIVE and
new_status == n_const.PORT_STATUS_DOWN):
LOG.debug("Handling port migration for: %s " % orig_port)
network_id = orig_port['network_id']
tenant_id = orig_port['tenant_id'] or constants.INTERNAL_TENANT_ID
# Ensure that we use tenant Id for the network owner
tenant_id = self._network_owner_tenant(context, network_id,
tenant_id)
device_id = orig_port['device_id']
plugin_context = context._plugin_context
port_provisioned = db_lib.is_port_provisioned(plugin_context,
port_id,
orig_host)
if port_provisioned:
db_lib.update_port(plugin_context,
device_id, new_host, port_id,
network_id, tenant_id)
return True
def _handle_port_migration_postcommit(self, context):
"""Handles port migration in postcommit
In case of port migration, it removes the port from the original host
and also it release the segment id if no port is attached to the same
segment id that the port is attached to.
"""
orig_port = context.original
orig_host = context.original_host
orig_status = context.original_status
new_status = context.status
new_host = context.host
if (new_host != orig_host and
orig_status == n_const.PORT_STATUS_ACTIVE and
new_status == n_const.PORT_STATUS_DOWN):
self._try_to_release_dynamic_segment(context, migration=True)
# Handling migration case.
# 1. The port should be unplugged from network
# 2. If segment_id is provisioned and it not bound to any port it
# should be removed from EOS.
network_id = orig_port['network_id']
tenant_id = orig_port['tenant_id'] or constants.INTERNAL_TENANT_ID
# Ensure that we use tenant Id for the network owner
tenant_id = self._network_owner_tenant(context, network_id,
tenant_id)
for binding_level in context._original_binding_levels:
if self._network_provisioned(
context, tenant_id, network_id,
segment_id=binding_level.segment_id):
# Removing the port form original host
self._delete_port(context, orig_port, orig_host, tenant_id,
segments=[binding_level])
# If segment id is not bound to any port, then
# remove it from EOS
plugin_context = context._plugin_context
segment = self.ndb.get_segment_by_id(
plugin_context,
binding_level.segment_id)
if not segment:
try:
segment_info = [{
'id': binding_level.segment_id,
'network_id': network_id,
}]
LOG.debug("migration_postcommit:"
"deleting segment %s", segment_info)
self.rpc.delete_network_segments(tenant_id,
segment_info)
# Remove the segment from the provisioned
# network DB.
db_lib.forget_network_segment(
plugin_context, tenant_id, network_id,
binding_level.segment_id)
except arista_exc.AristaRpcError:
LOG.info(constants.EOS_UNREACHABLE_MSG)
return True
def update_port_precommit(self, context):
"""Update the name of a given port.
At the moment we only support port name change.
Any other change to port is not supported at this time.
We do not store the port names, therefore, no DB store
action is performed here.
"""
new_port = context.current
orig_port = context.original
if new_port['name'] != orig_port['name']:
LOG.info(_LI('Port name changed to %s'), new_port['name'])
device_id = new_port['device_id']
host = context.host
pretty_log("update_port_precommit: new", new_port)
pretty_log("update_port_precommit: orig", orig_port)
if new_port['device_owner'] == 'compute:probe':
return
# Check if the port is part of managed physical network
seg_info = self._bound_segments(context)
if not seg_info:
# Ignoring the update as the port is not managed by
# arista mechanism driver.
return
# Check if it is port migration case
if self._handle_port_migration_precommit(context):
return
# device_id and device_owner are set on VM boot
port_id = new_port['id']
network_id = new_port['network_id']
tenant_id = new_port['tenant_id'] or constants.INTERNAL_TENANT_ID
# Ensure that we use tenant Id for the network owner
tenant_id = self._network_owner_tenant(context, network_id, tenant_id)
plugin_context = context._plugin_context
for seg in seg_info:
if not self._network_provisioned(context, tenant_id, network_id,
seg[api.SEGMENTATION_ID],
seg[api.ID]):
LOG.info(
_LI("Adding %s to provisioned network database"), seg)
db_lib.remember_tenant(plugin_context, tenant_id)
db_lib.remember_network_segment(
plugin_context, tenant_id, network_id,
seg[api.SEGMENTATION_ID], seg[api.ID])
port_down = False
if (new_port['device_owner'] ==
n_const.DEVICE_OWNER_DVR_INTERFACE):
# We care about port status only for DVR ports because
# for DVR, a single port exists on multiple hosts. If a port
# is no longer needed on a host then the driver gets a
# port_update notification for that <port, host> with the
# port status as PORT_STATUS_DOWN.
port_down = context.status == n_const.PORT_STATUS_DOWN
if host and not port_down:
port_host_filter = None
if (new_port['device_owner'] ==
n_const.DEVICE_OWNER_DVR_INTERFACE):
# <port, host> uniquely identifies a DVR port. Other
# ports are identified by just the port id
port_host_filter = host
port_provisioned = db_lib.is_port_provisioned(
plugin_context,
port_id, port_host_filter)
if not port_provisioned:
LOG.info("Remembering the port")
# Create a new port in the DB
db_lib.remember_tenant(plugin_context, tenant_id)
db_lib.remember_vm(plugin_context,
device_id, host, port_id,
network_id, tenant_id)
else:
if (new_port['device_id'] != orig_port['device_id'] or
context.host != context.original_host or
new_port['network_id'] != orig_port['network_id'] or
new_port['tenant_id'] != orig_port['tenant_id']):
LOG.info("Updating the port")
# Port exists in the DB. Update it
db_lib.update_port(plugin_context, device_id, host,
port_id,
network_id, tenant_id)
else: # Unbound or down port does not concern us
orig_host = context.original_host
LOG.info("Forgetting the port on %s" % str(orig_host))
db_lib.forget_port(plugin_context, port_id, orig_host)
def _port_updated(self, context):
"""Returns true if any port parameters have changed."""
new_port = context.current
orig_port = context.original
return (new_port['device_id'] != orig_port['device_id'] or
context.host != context.original_host or
new_port['network_id'] != orig_port['network_id'] or
new_port['tenant_id'] != orig_port['tenant_id'])
def update_port_postcommit(self, context):
"""Update the name of a given port in EOS.
At the moment we only support port name change
Any other change to port is not supported at this time.
"""
port = context.current
orig_port = context.original
device_id = port['device_id']
device_owner = port['device_owner']
host = context.host
is_vm_boot = device_id and device_owner
vnic_type = port['binding:vnic_type']
binding_profile = port['binding:profile']
bindings = []
vlan_type = 'native' if vnic_type == 'baremetal' else 'allowed'
if binding_profile:
bindings = binding_profile.get('local_link_information', bindings)
vlan_type = binding_profile.get('vlan_type', vlan_type)
port_id = port['id']
port_name = port['name']
network_id = port['network_id']
tenant_id = port['tenant_id'] or constants.INTERNAL_TENANT_ID
# Ensure that we use tenant Id for the network owner
tenant_id = self._network_owner_tenant(context, network_id, tenant_id)
sg = port['security_groups']
orig_sg = orig_port['security_groups']
pretty_log("update_port_postcommit: new", port)
pretty_log("update_port_postcommit: orig", orig_port)
seg_info = self._bound_segments(context)
if not seg_info:
LOG.debug("Ignoring the update as the port %s is not managed by "
"Arista switches.", port_id)
return
# Check if it is port migration case
if self._handle_port_migration_postcommit(context):
# Return from here as port migration is already handled.
return
hostname = self._host_name(host)
port_host_filter = None
if port['device_owner'] == n_const.DEVICE_OWNER_DVR_INTERFACE:
# <port, host> uniquely identifies a DVR port. Other
# ports are identified by just the port id
port_host_filter = host
plugin_context = context._plugin_context
port_provisioned = db_lib.is_port_provisioned(plugin_context, port_id,
port_host_filter)
# If network does not exist under this tenant,
# it may be a shared network. Get shared network owner Id
net_provisioned = self._network_provisioned(context,
tenant_id, network_id)
for seg in seg_info:
if not self._network_provisioned(context, tenant_id, network_id,
segmentation_id=seg[
api.SEGMENTATION_ID]):
net_provisioned = False
break
segments = []
if net_provisioned:
if self.rpc.hpb_supported():
segments = seg_info
all_segments = self.ndb.get_all_network_segments(
plugin_context, network_id)
try:
self.rpc.create_network_segments(
tenant_id, network_id,
context.network.current['name'], all_segments)
except arista_exc.AristaRpcError:
LOG.error(_LE("Failed to create network segments"))
raise ml2_exc.MechanismDriverError()
else:
# For non HPB cases, the port is bound to the static
# segment
segments = self.ndb.get_network_segments(plugin_context,
network_id)
try:
orig_host = context.original_host
port_down = False
if port['device_owner'] == n_const.DEVICE_OWNER_DVR_INTERFACE:
# We care about port status only for DVR ports
port_down = context.status == n_const.PORT_STATUS_DOWN
if orig_host and (port_down or host != orig_host):
try:
LOG.info("Deleting the port %s" % str(orig_port))
# The port moved to a different host or the VM
# connected to the port was deleted or its in DOWN
# state. So delete the old port on the old host.
self._delete_port(context, orig_port, orig_host, tenant_id,
segments=segments)
except ml2_exc.MechanismDriverError:
# If deleting a port fails, then not much can be done
# about it. Log a warning and move on.
LOG.warning(constants.UNABLE_TO_DELETE_PORT_MSG)
if port_provisioned and net_provisioned and hostname and \
is_vm_boot and not port_down:
LOG.info(_LI("Port plugged into network"))
# Plug port into the network only if it exists in the db
# and is bound to a host and the port is up.
self.rpc.plug_port_into_network(device_id,
hostname,
port_id,
network_id,
tenant_id,
port_name,
device_owner,
sg, orig_sg,
vnic_type,
segments=segments,
switch_bindings=bindings,
vlan_type=vlan_type)
else:
LOG.info(_LI("Port not plugged into network"))
except arista_exc.AristaRpcError as err:
LOG.error(_LE('update_port_postcommit: Did not update '
'port %(port_id)s. Reason: %(err)s'),
{'port_id': port_id, 'err': err})
def delete_port_precommit(self, context):
"""Delete information about a VM and host from the DB."""
# Check if the port is part of managed physical network
seg_info = self._bound_segments(context)
if not seg_info:
# Ignoring the update as the port is not managed by
# arista mechanism driver.
return
port = context.current
pretty_log("delete_port_precommit:", port)
port_id = port['id']
host_id = context.host
if host_id:
plugin_context = context._plugin_context
db_lib.forget_port(plugin_context, port_id, host_id)
def delete_port_postcommit(self, context):
"""Unplug a physical host from a network.
Send provisioning request to Arista Hardware to unplug a host
from appropriate network.
"""
# Check if the port is part of managed physical network
seg_info = self._bound_segments(context)
if not seg_info:
# Ignoring the update as the port is not managed by
# arista mechanism driver.
return
port = context.current
host = context.host
network_id = port['network_id']
tenant_id = port['tenant_id'] or constants.INTERNAL_TENANT_ID
# Ensure that we use tenant Id for the network owner
tenant_id = self._network_owner_tenant(context, network_id, tenant_id)
pretty_log("delete_port_postcommit:", port)
# If this port is the last one using dynamic segmentation id,
# and the segmentation id was allocated by this driver, it needs
# to be released.
self._try_to_release_dynamic_segment(context)
try:
self._delete_port(context, port, host, tenant_id,
segments=seg_info)
self._delete_segment(context, tenant_id)
except ml2_exc.MechanismDriverError:
# Can't do much if deleting a port failed.
# Log a warning and continue.
LOG.warning(constants.UNABLE_TO_DELETE_PORT_MSG)
def _delete_port(self, context, port, host, tenant_id, segments=None):
"""Deletes the port from EOS.
param port: Port which is to be deleted
param host: The host on which the port existed
param tenant_id: The tenant to which the port belongs to. Some times
the tenant id in the port dict is not present (as in
the case of HA router).
"""
device_id = port['device_id']
port_id = port['id']
network_id = port['network_id']
device_owner = port['device_owner']
vnic_type = port['binding:vnic_type']
binding_profile = port['binding:profile']
switch_bindings = []
if binding_profile:
switch_bindings = binding_profile.get('local_link_information', [])
sg = port['security_groups']
if not device_id or not host:
LOG.warning(constants.UNABLE_TO_DELETE_DEVICE_MSG)
return
# sometimes segments are snapshot objects, let's resolve that here
if segments:
plugin_context = context._plugin_context
for n, segment in enumerate(segments):
if not isinstance(segment, dict) and \
not hasattr(segment, 'segmentation_id') and \
hasattr(segment, 'segment_id'):
segments[n] = db_lib.get_segmentation_id_by_segment_id(
plugin_context,
segment.segment_id
)
try:
device_ports = db_lib.get_bm_ports_for_device(
context._plugin_context, device_id)
port_net_in_use = False
for device_port in device_ports:
if device_port.id != port_id and \
device_port.network_id == network_id:
LOG.warning("Will not deprovision network %s on port %s "
"as port %s is still on this network",
network_id, port_id, device_port.id)
port_net_in_use = True
if not cfg.CONF.ml2_arista.skip_unplug and not port_net_in_use:
hostname = self._host_name(host)
self.rpc.unplug_port_from_network(
device_id, device_owner, hostname, port_id, network_id,
tenant_id, sg, vnic_type, switch_bindings=switch_bindings,
segments=segments)
if not cfg.CONF.ml2_arista.sec_group_background_only:
self.rpc.remove_security_group(sg, switch_bindings)
# if necessary, delete tenant as well.
self.delete_tenant(context, tenant_id)
except arista_exc.AristaRpcError:
LOG.info(constants.EOS_UNREACHABLE_MSG)
def _delete_segment(self, context, tenant_id):
"""Deletes a dynamic network segment from EOS.
param context: The port context
param tenant_id: The tenant which the port belongs to
"""
if not self.rpc.hpb_supported():
# Returning as HPB not supported by CVX
return
port = context.current
network_id = port.get('network_id')
if not context._binding_levels:
return
plugin_context = context._plugin_context
for binding_level in context._binding_levels:
LOG.debug("deleting segment %s", binding_level.segment_id)
if self._network_provisioned(context, tenant_id, network_id,
segment_id=binding_level.segment_id):
segment = self.ndb.get_segment_by_id(
plugin_context, binding_level.segment_id)
if not segment:
# The segment is already released. Delete it from EOS
LOG.debug("Deleting segment %s", binding_level.segment_id)
try:
segment_info = {
'id': binding_level.segment_id,
'network_id': network_id,
}
self.rpc.delete_network_segments(tenant_id,
[segment_info])
# Remove the segment from the provisioned network DB.
db_lib.forget_network_segment(plugin_context,
tenant_id, network_id,
binding_level.segment_id)
except arista_exc.AristaRpcError:
LOG.info(constants.EOS_UNREACHABLE_MSG)
else:
LOG.debug("Cannot delete segment_id %(segid)s "
"segment is %(seg)s",
{'segid': binding_level.segment_id,
'seg': segment})
def _try_to_release_dynamic_segment(self, context, migration=False):
"""Release dynamic segment allocated by the driver
If this port is the last port using the segmentation id allocated
by the driver, it should be released
"""
if migration:
host = context.original_host
else:
host = context.host
physnet_info = self.eapi.get_physical_network(host, context=context)
physnet = physnet_info.get('physnet')
if not physnet:
return
binding_levels = context.binding_levels
LOG.debug("_try_release_dynamic_segment: "
"binding_levels=%(bl)s", {'bl': binding_levels})
if not binding_levels:
return
segment_id = None
bound_drivers = []
for binding_level in binding_levels:
bound_segment = binding_level.get(api.BOUND_SEGMENT)
driver = binding_level.get(api.BOUND_DRIVER)
bound_drivers.append(driver)
if (bound_segment and
bound_segment.get('physical_network') == physnet and
bound_segment.get('network_type') == p_const.TYPE_VLAN):
segment_id = bound_segment.get('id')
break
plugin_context = context._plugin_context
# If the segment id is found and it is bound by this driver, and also
# the segment id is not bound to any other port, release the segment.
# When Arista driver participate in port binding by allocating dynamic
# segment and then calling continue_binding, the driver should the
# second last driver in the bound drivers list.
if (segment_id and bound_drivers[-2:-1] ==
[constants.MECHANISM_DRV_NAME]):
filters = {'segment_id': segment_id}
result = db_lib.get_port_binding_level(plugin_context, filters)
LOG.debug("Looking for entry with filters=%(filters)s "
"result=%(result)s ", {'filters': filters,
'result': result})
if not result:
# The requested segment_id does not exist in the port binding
# database. Release the dynamic segment.
context.release_dynamic_segment(segment_id)
LOG.debug("Released dynamic segment %(seg)s allocated "
"by %(drv)s", {'seg': segment_id,
'drv': bound_drivers[-2]})
def delete_tenant(self, context, tenant_id):
"""delete a tenant from DB.
A tenant is deleted only if there is no network or VM configured
configured for this tenant.
"""
plugin_context = context._plugin_context
objects_for_tenant = (
db_lib.num_nets_provisioned(plugin_context, tenant_id) +
db_lib.num_vms_provisioned(plugin_context, tenant_id)
)
if not objects_for_tenant:
db_lib.forget_tenant(plugin_context, tenant_id)
try:
self.rpc.delete_tenant(tenant_id)
except arista_exc.AristaRpcError:
LOG.info(constants.EOS_UNREACHABLE_MSG)
raise ml2_exc.MechanismDriverError(method='delete_tenant')
def _host_name(self, hostname):
fqdns_used = cfg.CONF.ml2_arista['use_fqdn']
return hostname if fqdns_used else hostname.split('.')[0]
def _save_switch_configs_thread(self):
if not self.sync_service.is_member_id_valid():
LOG.info("Switch config save thread was started unnecessarily "
"in this process, stopping it")
self._config_save_loop.stop()
return
self.sync_service.save_switch_configs()
def _synchronization_thread(self):
if not self.sync_service.is_member_id_valid():
LOG.info("Synchronization thread was started unnecessarily "
"in this process, stopping it")
self.timer.stop()
return
self.sync_service.do_synchronize()
def stop_synchronization_thread(self):
if self.timer:
self.timer.stop()
self.timer = None
@db_api.retry_db_errors
def _cleanup_db(self, context):
"""Clean up any unnecessary entries in our DB."""
session = context.session
with session.begin(subtransactions=True):
arista_vms = db.AristaProvisionedVms
arista_nets = db.AristaProvisionedNets
# DELETE FROM arista_provisioned_vms
# WHERE arista_provisioned_vms.port_id NOT IN (
# SELECT ports.id FROM ports)
all_ports = session.query(models_v2.Port.id)
session.query(arista_vms). \
filter(arista_vms.port_id.notin_(all_ports.subquery())). \
delete(synchronize_session=False)
# DELETE FROM arista_provisioned_nets
# WHERE arista_provisioned_nets.network_id NOT IN (
# SELECT networks.id FROM networks)
all_networks = session.query(models_v2.Network.id)
session.query(arista_nets). \
filter(arista_nets.network_id.notin_(all_networks.subquery())). \
delete(synchronize_session=False)
def _network_provisioned(self, context, tenant_id, network_id,
segmentation_id=None, segment_id=None):
# If network does not exist under this tenant,
# it may be a shared network.
plugin_context = context._plugin_context
return db_lib.is_network_provisioned(
plugin_context, tenant_id, network_id, segmentation_id,
segment_id) or \
self.ndb.get_shared_network_owner_id(plugin_context, network_id)
def create_security_group(self, context, sg):
pass
def delete_security_group(self, context, sg):
pass
def update_security_group(self, context, sg):
if (cfg.CONF.ml2_arista.sec_group_background_only or
not self._is_security_group_used(context, sg['id'])):
return
try:
self.rpc.create_acl(context, sg)
except Exception:
msg = (_('Failed to create ACL on EOS %s') % sg)
LOG.exception(msg)
raise arista_exc.AristaSecurityGroupError(msg=msg)
def create_security_group_rule(self, context, sgr):
if (cfg.CONF.ml2_arista.sec_group_background_only or
not self._is_security_group_used(context,
sgr['security_group_id'])):
return
try:
self.rpc.create_acl_rule(context, sgr)
except Exception:
msg = (_('Failed to create ACL rule on EOS %s') % sgr)
LOG.exception(msg)
raise arista_exc.AristaSecurityGroupError(msg=msg)
def delete_security_group_rule(self, context, sgr_id):
if cfg.CONF.ml2_arista.sec_group_background_only:
return
if not sgr_id:
return
sgr = self.ndb.get_security_group_rule(context, sgr_id)
if not sgr:
return
if not self._is_security_group_used(context, sgr['security_group_id']):
return
try:
self.rpc.delete_acl_rule(sgr)
except Exception:
msg = (_('Failed to delete ACL rule on EOS %s') % sgr)
LOG.exception(msg)
raise arista_exc.AristaSecurityGroupError(msg=msg)
@staticmethod
def _is_security_group_used(context, security_group_id):
sg_id = sg_db.SecurityGroupPortBinding.security_group_id
port_id = sg_db.SecurityGroupPortBinding.port_id
result = context.session.query(sg_id).filter(
sg_id == security_group_id).join(
db.AristaProvisionedVms, db.AristaProvisionedVms.port_id == port_id
).first()
return result is not None
def cli():
import json
import six
import sys
from collections import defaultdict
from neutron.db.models_v2 import Port
from neutron.plugins.ml2.models import NetworkSegment
from neutron.plugins.ml2.models import PortBindingLevel
from oslo_config import cfg
from sqlalchemy.orm import contains_eager, joinedload, relationship
cfg.CONF.register_cli_opts([
cfg.MultiStrOpt('port_id',
short='p',
default=[],
help=''),
cfg.BoolOpt('all_ports',
default=False,
help='Should we sync all ports'),
])
common_config.init(sys.argv[1:])
if not cfg.CONF.all_ports and not cfg.CONF.port_id:
LOG.error("Nothing to do, specify either port_id or all_ports")
return
context = get_admin_context()
ndb = db_lib.NeutronNets()
confg = cfg.CONF.ml2_arista
confg.http_pool_block = True
rpc = get_rpc_wrapper(confg)(ndb)
Port.port_binding_levels = relationship(PortBindingLevel)
PortBindingLevel.segment = relationship(NetworkSegment,
lazy='subquery')
items = defaultdict(list)
with context.session.begin():
session = context.session
ports = session.query(Port). \
join(Port.port_binding). \
join(Port.port_binding_levels). \
options(joinedload(Port.security_groups)). \
filter(PortBindingLevel.driver == constants.MECHANISM_DRV_NAME). \
options(contains_eager(Port.port_binding_levels))
if cfg.CONF.port_id:
ports = ports.filter(Port.id.in_(cfg.CONF.port_id))
for port in ports:
port_id = port.id
device_id = port.device_id
network_id = port.network_id
port_name = port.name
device_owner = port.device_owner
binding = port.port_binding
hostname = binding.host
vnic_type = binding.vnic_type
orig_sg = None
tenant_id = port.tenant_id
sg = [sg.security_group_id for sg in port.security_groups]
binding_profile = json.loads(binding.profile)
bindings = binding_profile.get('local_link_information', [])
vlan_type = binding_profile.get('vlan_type', 'native')
segments = [{'id': level.segment_id, 'level': level.level,
'physical_network': level.segment.physical_network,
'segmentation_id': level.segment.segmentation_id,
'network_type': level.segment.network_type,
'is_dynamic': level.segment.is_dynamic,
}
for level in port.port_binding_levels
if level.driver == 'arista'
]
items[device_id].append((hostname, port_id, network_id, tenant_id,
port_name, device_owner, sg, orig_sg, vnic_type,
segments, bindings, vlan_type))
from eventlet.greenpool import GreenPool as Pool
def plug(device_ports):
device_id, ports = device_ports
# Plug the ports, first the native, then allowed
for hostname, port_id, network_id, tenant_id, \
port_name, device_owner, sg, orig_sg, vnic_type, \
segments, bindings, vlan_type in \
sorted(ports, key=lambda x: x[-1] == 'allowed'):
print('Node: {}: Port {} {}'
.format(device_id, port_id, vlan_type))
rpc.plug_port_into_network(
device_id, hostname, port_id, network_id, tenant_id, port_name,
device_owner, sg, orig_sg, vnic_type,
segments=segments,
switch_bindings=bindings,
vlan_type=vlan_type)
p = Pool(8)
for item in p.imap(plug, six.iteritems(items)):
pass
| 43.138462 | 81 | 0.578028 | 47,209 | 0.886121 | 0 | 0 | 1,513 | 0.028399 | 0 | 0 | 13,534 | 0.254036 |
cb3d6a46bfe63becf40c12dca772072e068d92b3 | 4,524 | py | Python | yamlfred/alfred_object.py | uchida/yamlfred | bbd3ff1c875aef095556aaa6b5838e1f2a3ec01e | [
"CC0-1.0"
] | null | null | null | yamlfred/alfred_object.py | uchida/yamlfred | bbd3ff1c875aef095556aaa6b5838e1f2a3ec01e | [
"CC0-1.0"
] | null | null | null | yamlfred/alfred_object.py | uchida/yamlfred | bbd3ff1c875aef095556aaa6b5838e1f2a3ec01e | [
"CC0-1.0"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
import os.path
import uuid
from yamlfred.utils import remove_default, merge_dicts
from yamlfred.utils import Include
defaults = {
'alfred.workflow.output.notification': {
'config': {'removeextension': False, 'output': 0, 'lastpathcomponent': False, 'onlyshowifquerypopulated': False, 'sticky': False},
'version': 0,
},
'alfred.workflow.trigger.hotkey': {
'config': {'leftcursor': False, 'argument': 0, 'relatedAppsMode': 0, 'action': 0, 'hotkey': 0, 'hotstring': '', 'hotmod': 0, 'modsmode': 0},
'version': 1, },
'alfred.workflow.action.openfile': {
'config': {},
'version': 1,
},
'alfred.workflow.input.keyword': {
'config': {'argumenttype': 0, 'withspace': True},
'version': 0,
},
'alfred.workflow.trigger.external': {
'config': {},
'version': 0,
},
'alfred.workflow.output.largetype': {
'version': 0,
},
'alfred.workflow.action.revealfile': {
'version': 0,
},
'alfred.workflow.input.filefilter': {
'config': {'scopes': [], 'includesystem': False, 'withspace': True, 'anchorfields': True, 'daterange': 0, 'types': []},
'version': 0,
},
'alfred.workflow.input.scriptfilter': {
'config': {'withspace': True, 'escaping': 102, 'script': '', 'argumenttype': 0, 'type': 0,
'queuedelaycustom': 3, 'queuedelayimmediatelyinitially': True, 'queuedelaymode': 0, 'queuemode': 1},
'version': 0,
},
'alfred.workflow.action.browseinalfred': {
'config': {},
'version': 0,
},
'alfred.workflow.trigger.action': {
'config': {'filetypes': [], 'acceptsmulti': False},
'version': 0,
},
'alfred.workflow.output.clipboard': {
'config': {'clipboardtext': '', 'autopaste': False},
'version': 0, },
'alfred.workflow.output.script': {
'config': {'escaping': 102, 'type': 0, 'script': '', 'concurrently': False},
'version': 0, },
'alfred.workflow.action.launchfiles': {
'config': {'paths': [], 'toggle': False},
'version': 0,
},
'alfred.workflow.trigger.contact': {
'config': {},
'version': 0,
},
'alfred.workflow.action.systemwebsearch': {
'config': {},
'version': 0,
},
'alfred.workflow.trigger.fallback': {
'config': {},
'version': 0,
},
'alfred.workflow.action.openurl': {
'config': {'utf8': True, 'plusspaces': False},
'version': 0,
},
'alfred.workflow.action.systemcommand': {
'config': {'command': 0, 'confirm': False},
'version': 1,
},
'alfred.workflow.action.itunescommand': {
'config': {'command': 0},
'version': 0,
},
'alfred.workflow.action.script': {
'config': {'escaping': 102, 'type': 0, 'script': '', 'concurrently': False},
'version': 0,
},
'alfred.workflow.action.applescript': {
'config': {'cachescript': False, 'applescript': ''},
'version': 0,
},
'alfred.workflow.action.terminalcommand': {
'config': {'escaping': 0},
'version': 0,
},
'alfred.workflow.trigger.remote': {
'config': {'argumenttype': 0, 'workflowonly': False},
'version': 0,
},
}
class AlfredObject(object):
def __init__(self, dic):
self.type = dic['type']
default = defaults[self.type] if self.type in defaults else {}
self.prop = merge_dicts(default, dic)
if 'uid' not in self.prop:
self.prop['uid'] = uuid.uuid4()
self.script_type = None
if self.type == 'alfred.workflow.action.applescript':
self.script_type = 'applescript'
elif self.type in ['alfred.workflow.input.scriptfilter',
'alfred.workflow.output.script',
'alfred.workflow.action.script']:
self.script_type = 'script'
return
def dump(self, script_dir='.'):
default = defaults[self.type] if self.type in defaults else {}
prop = remove_default(self.prop, default)
if self.script_type:
path = os.path.join(script_dir, self.prop['uid'])
with open(path, 'w') as f:
script = self.prop['config'].get(self.script_type)
f.write(script)
prop['config'][self.script_type] = Include(path)
return prop
| 34.015038 | 148 | 0.552387 | 1,133 | 0.250442 | 0 | 0 | 0 | 0 | 0 | 0 | 2,087 | 0.461317 |
cb3d6e4db58133f9a78f2d8575a0e3cd280dbefc | 8,231 | py | Python | project2/main.py | DroogieDroog/python | 98ab243638b10270bbce5c38c60abea8823fa9ab | [
"MIT"
] | null | null | null | project2/main.py | DroogieDroog/python | 98ab243638b10270bbce5c38c60abea8823fa9ab | [
"MIT"
] | null | null | null | project2/main.py | DroogieDroog/python | 98ab243638b10270bbce5c38c60abea8823fa9ab | [
"MIT"
] | null | null | null | """
pirple/python/project2/main.py
Project #2
Create a hangman game
"""
from os import system, name
from time import sleep
from random import randint
import string
def clear_screen():
# for windows
if name == 'nt':
_ = system('cls')
# for mac and linux(here, os.name is 'posix')
else:
_ = system('clear')
def reset_game():
return False, 0, 'C'
def exit_program():
print('Goodbye!')
return True
def play_game(num_players):
clear_screen()
done, guesses_used, status = reset_game()
while not done:
secret_word, known_letters, guess_letters = set_secret_word(num_players)
draw_board(guesses_used, known_letters, guess_letters)
while(True):
guesses_used, guess_letters, known_letters, status = take_a_guess(guesses_used, guess_letters, known_letters, secret_word)
if status == 'C':
draw_board(guesses_used, known_letters, guess_letters)
else:
break
yn = input('Would you like to play again (y/n)? ')
if yn.upper() == 'Y':
clear_screen()
done, guesses_used, status = reset_game()
else:
done = True
def set_secret_word(num_players):
# The computer selects a word in the 1-player game
if num_players == 1:
secret_word = auto_select_secret_word()
# Otherwise one of the players chooses a word
else:
secret_word = input_secret_word()
blank_list = []
for i in range(len(secret_word)):
blank_list.append('_ ')
return secret_word, blank_list, []
def auto_select_secret_word():
r = randint(0, len(auto_word_list))
auto_word = auto_word_list[r]
print('OK, the computer has picked a word. Let\'s play!')
sleep(1)
return auto_word
def input_secret_word():
print('Player 1 will choose a word for player 2 to guess.')
print('When you have one chosen, ask player 2 to look away, and stay that way until you give the OK.')
print('Ready?\n')
while(True):
input_word = input('Player 1, enter your word:\n').upper()
yn = input('You have chosen {}. Is that correct (y/n)? '.format(input_word))
if yn.upper() != 'Y':
print('Oops! All right, try again.\n')
continue
else:
print('OK, let\'s play!!')
break
return input_word
def draw_board(guesses_used, known_letters, guess_letters):
gallows = [[' |````', ' |````|'],
[' |', ' | |'], #1 guess
[' |', ' | O'], #2
[' |', ' | /', ' | /|', ' | /|\\'], #3, 4, 5
[' |', ' | |'], #6
[' |', ' | /', ' | / \\'], # 7, 8
[' |'],
['`````']
]
guess_codes = [[0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 0, 0, 0, 0],
[1, 1, 1, 2, 0, 0, 0, 0],
[1, 1, 1, 3, 0, 0, 0, 0],
[1, 1, 1, 3, 1, 0, 0, 0],
[1, 1, 1, 3, 1, 1, 0, 0],
[1, 1, 1, 3, 1, 2, 0, 0]
]
clear_screen()
print()
for i in range(8):
print(gallows[i][guess_codes[guesses_used][i]])
for k in known_letters:
print(k, end=' ')
print('\n')
if len(guess_letters) > 0:
print('Letters already guessed:')
for g in guess_letters:
print(g, end=' ')
print('\n')
def take_a_guess(guesses_used, guess_letters, known_letters, secret_word):
solved = False
while(True):
letter = input('Player 2, guess a letter or enter a ? to guess the word: ')
if letter not in string.ascii_letters and letter !='?':
print('You must enter a ? or a letter. Try again.')
else:
if letter == '?':
solved, guesses_used = guess_the_word(secret_word, guesses_used, known_letters, guess_letters)
if solved:
break
else:
repeat = False
for k in known_letters:
if (letter.upper() in k):
repeat = True
if repeat:
print('You already guessed {}! Guess again.'.format(letter.upper()))
else:
break
if not solved:
found = False
for i in range(len(secret_word)):
if letter.upper() == secret_word[i]:
blank_to_correct = known_letters[i].maketrans('_', letter.upper())
known_letters[i] = known_letters[i].translate(blank_to_correct)
found = True
if not found:
guess_letters, guesses_used, status = wrong_guess(letter.upper(), guess_letters, guesses_used, known_letters, secret_word)
else:
status = correct_guess(letter.upper(), known_letters, guesses_used, guess_letters)
else:
status = 'S'
return guesses_used, guess_letters, known_letters, status
def guess_the_word(secret_word, guesses_used, known_letters, guess_letters):
valid_guess = False
while not valid_guess:
guess_word = input('You think you know it, eh? Alrighty then, what\'s the secret word? ')
valid_guess = True
for letter in guess_word:
if letter not in string.ascii_letters:
valid_guess = False
if not valid_guess:
print('You must enter a word comprised only of letters. Try again.')
if guess_word.upper() == secret_word:
known_letters = []
for letter in secret_word:
known_letters.append('{} '.format(letter))
draw_board(guesses_used, known_letters, guess_letters)
print('You guessed it! Nice job.')
sleep(1)
solved = True
else:
print('Sorry, {} is not the secret word. Keep trying'.format(guess_word.upper()))
sleep(1)
guesses_used += 1
draw_board(guesses_used, known_letters, guess_letters)
solved = False
return solved, guesses_used
def wrong_guess(letter, guess_letters, guesses_used, known_letters, secret_word):
if letter in guess_letters:
print('You already guessed {}! Guess again.'.format(letter))
else:
print('Sorry! There\'s no {} in the word.'.format(letter))
guess_letters.append(letter)
guess_letters = list(set(guess_letters))
guess_letters.sort()
guesses_used += 1
sleep(1)
if guesses_used == 8:
draw_board(guesses_used, known_letters, guess_letters)
print('Oh, no! You\'re hanged! The secret word was:')
print(secret_word)
status = 'L'
else:
status = 'C'
return guess_letters, guesses_used, status
def correct_guess(letter, known_letters, guesses_used, guess_letters):
status = 'W'
for known in known_letters:
if '_' in known:
status = 'C'
if status == 'W':
draw_board(guesses_used, known_letters, guess_letters)
print('You got it! Well done!')
sleep(1)
else:
print('Good guess!'.format(letter))
sleep(1)
return status
def create_auto_word_list():
auto_word_list = []
word_file = open('/usr/share/dict/words', 'r')
words = word_file.read()
word_list = words.splitlines()
for word in word_list:
if (word[0] not in string.ascii_uppercase) and (len(word) >= 6):
auto_word_list.append(word.upper())
return auto_word_list
def main():
global auto_word_list
print('Hey, there! Wanna play some Hangman?')
done = False
while not done:
choice = input('Tell me the number of players (1 or 2), or enter an X to exit the game: ')
if choice.upper() == 'X':
done = exit_program()
elif choice in ('1', '2'):
auto_word_list = create_auto_word_list()
play_game(int(choice))
done = exit_program()
else:
print('Enter a 1, 2 or X only. Try again')
done = False
if __name__ == '__main__':
main()
| 29.187943 | 134 | 0.556312 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,570 | 0.190742 |
cb3f41020f501bc2a6867a921cdc8450a62111fc | 5,533 | py | Python | deepnlpf/core/plugin_manager.py | deepnlpf/deepnlpf | 6508ab1e8fd395575d606ee20223f25591541e25 | [
"Apache-2.0"
] | 3 | 2020-04-11T14:12:45.000Z | 2020-05-30T16:31:06.000Z | deepnlpf/core/plugin_manager.py | deepnlpf/deepnlpf | 6508ab1e8fd395575d606ee20223f25591541e25 | [
"Apache-2.0"
] | 34 | 2020-03-20T19:36:40.000Z | 2022-03-20T13:00:32.000Z | deepnlpf/core/plugin_manager.py | deepnlpf/deepnlpf | 6508ab1e8fd395575d606ee20223f25591541e25 | [
"Apache-2.0"
] | 1 | 2020-09-05T06:44:15.000Z | 2020-09-05T06:44:15.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import deepnlpf.log as log
from deepnlpf.core.util import Util
class PluginManager:
def __init__(self):
self.HOME = os.environ["HOME"]
self.PLUGIN_SERVER = "https://github.com/deepnlpf/"
self.PLUGIN_PATH = self.HOME + "/deepnlpf_data/plugins/"
self.EXTENSION = ".zip"
def load_plugin(self, plugin_name):
directory, module_name = os.path.split(plugin_name)
module_name = os.path.splitext(module_name)[0]
path = list(sys.path)
sys.path.insert(0, self.PLUGIN_PATH + plugin_name)
try:
module = __import__("plugin_%s" % module_name)
finally:
sys.path[:] = path # restore.
return module
def load_manifest(self):
file_manifest = "manifest"
plugins = []
for plugin in os.listdir(self.PLUGIN_PATH):
location = os.path.join(self.PLUGIN_PATH, plugin)
if not os.path.isdir(location) or not file_manifest + ".json" in os.listdir(
location
):
continue
path = self.PLUGIN_PATH + "/" + plugin + "/" + file_manifest + ".json"
plugins.append(Util().openfile_json(path))
return plugins
def call_plugin_nlp(self, plugin_name, document, pipeline):
plugin = self.load_plugin(plugin_name)
return plugin.Plugin(document, pipeline).run()
def call_plugin_db(
self, plugin_name, operation, collection, document=None, key=None
):
plugin = self.load_plugin(plugin_name)
log.logger.info("Plugin call: {}".format(plugin_name))
if operation == "insert":
result = plugin.Plugin().insert(collection, document)
elif operation == "select_one":
result = plugin.Plugin().select_one(collection, key)
elif operation == "select_all":
result = plugin.Plugin().select_all(collection)
elif operation == "select_all_key":
result = plugin.Plugin().select_all_key(collection, key)
elif operation == "update":
result = plugin.Plugin().update(collection, key, document)
elif operation == "delete":
result = plugin.Plugin().delete(collection, key)
return result
def install(self, plugin_name):
import zipfile
from homura import download # gestor fast download file.
# URL for download of plugin.
# https://github.com/deepnlpf/plugin_stanza/archive/master.zip
URL = (
self.PLUGIN_SERVER
+ "plugin_"
+ plugin_name
+ "/archive/master"
+ self.EXTENSION
)
# Path for save plugin.
PATH_DOWNLOAD_PLUGIN = (
self.PLUGIN_PATH + "plugin_" + plugin_name + "-master" + self.EXTENSION
)
# check folder plugin exist.
if not os.path.exists(self.PLUGIN_PATH):
os.makedirs(self.PLUGIN_PATH)
# Download plugin.
try:
print("Downloading plugin", plugin_name, "..")
# check url exists.
download(url=URL, path=PATH_DOWNLOAD_PLUGIN)
except Exception as err:
print("❗️Plugin no found!")
log.logger.error(err)
sys.exit(0)
# Extracting files plugin.
try:
fantasy_zip = zipfile.ZipFile(PATH_DOWNLOAD_PLUGIN)
fantasy_zip.extractall(self.PLUGIN_PATH)
fantasy_zip.close()
except Exception as err:
print("❗️Error extracting files!")
log.logger.error(err)
sys.exit(0)
# Config dir name plugin.
try:
os.rename(
self.PLUGIN_PATH + "plugin_" + plugin_name + "-master",
self.PLUGIN_PATH + plugin_name,
)
except Exception as err:
print("❗️Error config directory plugin!")
log.logger.error(err)
sys.exit(0)
# Install requirements.
try:
# Check in plugin file requirements.sh exist.
if os.path.isfile(self.PLUGIN_PATH + plugin_name + "/requeriments.sh"):
print("Install requirements..")
os.system(
"cd "
+ str(
self.PLUGIN_PATH
+ plugin_name
+ " && chmod 777 requeriments.sh && ./requeriments.sh"
)
)
except Exception as err:
print("❗Error when executing the requeriments.sh plugin file!")
log.logger.error(err)
sys.exit(0)
os.remove(PATH_DOWNLOAD_PLUGIN) # clear file zip.
print("🎉 Plugin", plugin_name, "installed!")
log.logger.info("Plugin installed: {}".format(plugin_name))
print("Path of installed plugins:", self.PLUGIN_PATH)
sys.exit(0)
def uninstall(self, plugin_name):
# Path for save plugin.
PATH_DOWNLOAD_PLUGIN = self.HOME + self.PLUGIN_PATH + plugin_name
try:
print("Uninstall plugin", plugin_name, "..")
os.remove(PATH_DOWNLOAD_PLUGIN)
print("Plugin", plugin_name, "unistalled!")
log.logger.info("Plugin unistalled: {}".format(plugin_name))
except Exception as err:
log.logger.error(err)
print("Plugin not found!")
def listplugins(self):
# Path for save plugin.
pass
| 33.533333 | 88 | 0.566962 | 5,415 | 0.975676 | 0 | 0 | 0 | 0 | 0 | 0 | 1,128 | 0.203243 |
cb419d792f99a890b09abc5b89a8052227391ccc | 2,628 | py | Python | features/steps/config.py | wooga/karajan | b0952f156d69206fdcb1d71bd42c227077da6fd2 | [
"MIT"
] | null | null | null | features/steps/config.py | wooga/karajan | b0952f156d69206fdcb1d71bd42c227077da6fd2 | [
"MIT"
] | null | null | null | features/steps/config.py | wooga/karajan | b0952f156d69206fdcb1d71bd42c227077da6fd2 | [
"MIT"
] | 2 | 2018-02-01T14:00:07.000Z | 2022-03-26T18:09:14.000Z | #
# Copyright 2017 Wooga GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from datetime import datetime
def get_conf(context):
if 'conf' not in context:
context.conf = {
'targets': {},
'aggregations': {},
'context': {},
}
return context.conf
def get_model_conf(context, model):
conf = get_conf(context)
return conf.get('%ss' % model)
def get_aggregation_conf(context):
return get_model_conf(context, 'aggregation')
def get_target_conf(context):
return get_model_conf(context, 'target')
def get_context_conf(context):
conf = get_conf(context)
return conf.get('context')
def min_config():
return {
'targets': {
"test": {
'start_date': datetime.now(),
'schema': 'test',
'key_columns': [
'key_column',
],
'aggregated_columns': {
'test_agg': {
'test_val': None,
},
}
}
},
'aggregations': {
'test_agg': {
'query': "SELECT 'key' AS key_column, 'test_val' AS test_val FROM DUAL",
'time_key': 'time_key'
}
},
'context': {},
}
def min_dependency_config(dep_type):
conf = {
'tracking': {'schema': 'test', 'table': 'test'},
'delta': {'delta': 0},
'task': {'dag_id': 'test', 'task_id': 'test'},
'target': {'target': 'test'},
}[dep_type]
conf['type'] = dep_type
return conf
| 29.863636 | 88 | 0.603881 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,482 | 0.563927 |
cb448d0916ea853ce729b12d6f5c70de1ea2acee | 2,786 | py | Python | hexa/plugins/connector_postgresql/tests/test_models.py | qgerome/openhexa-app | 8c9377b2ad972121d8e9575f5d52420212b52ed4 | [
"MIT"
] | 4 | 2021-07-19T12:53:21.000Z | 2022-01-26T17:45:02.000Z | hexa/plugins/connector_postgresql/tests/test_models.py | qgerome/openhexa-app | 8c9377b2ad972121d8e9575f5d52420212b52ed4 | [
"MIT"
] | 20 | 2021-05-17T12:27:06.000Z | 2022-03-30T11:35:26.000Z | hexa/plugins/connector_postgresql/tests/test_models.py | qgerome/openhexa-app | 8c9377b2ad972121d8e9575f5d52420212b52ed4 | [
"MIT"
] | 2 | 2021-09-07T04:19:59.000Z | 2022-02-08T15:33:29.000Z | from django import test
from hexa.user_management.models import Membership, Team, User
from ..models import Database, DatabasePermission, Table
class PermissionTest(test.TestCase):
@classmethod
def setUpTestData(cls):
cls.DB1 = Database.objects.create(
hostname="host", username="user", password="pwd", database="db1"
)
cls.DB2 = Database.objects.create(
hostname="host", username="user", password="pwd", database="db2"
)
cls.TEAM1 = Team.objects.create(name="Test Team1")
cls.TEAM2 = Team.objects.create(name="Test Team2")
DatabasePermission.objects.create(database=cls.DB1, team=cls.TEAM1)
DatabasePermission.objects.create(database=cls.DB1, team=cls.TEAM2)
cls.USER_REGULAR = User.objects.create_user(
"jim@bluesquarehub.com",
"regular",
)
Membership.objects.create(team=cls.TEAM1, user=cls.USER_REGULAR)
Membership.objects.create(team=cls.TEAM2, user=cls.USER_REGULAR)
cls.USER_SUPER = User.objects.create_user(
"mary@bluesquarehub.com",
"super",
is_superuser=True,
)
for db in [cls.DB1, cls.DB2]:
for i in range(2):
Table.objects.create(name=f"table-{db.database}-{i}", database=db)
def test_instance_dedup(self):
"""
- user super see 2 db (all of them)
- user regular see only test db 1, one time
"""
self.assertEqual(
list(
Database.objects.filter_for_user(self.USER_REGULAR)
.order_by("database")
.values("database")
),
[{"database": "db1"}],
)
self.assertEqual(
list(
Database.objects.filter_for_user(self.USER_SUPER)
.order_by("database")
.values("database")
),
[{"database": "db1"}, {"database": "db2"}],
)
def test_table_dedup(self):
"""
regular user can see 2 tables
super user can see 4 tables
"""
self.assertEqual(
list(
Table.objects.filter_for_user(self.USER_REGULAR)
.order_by("name")
.values("name")
),
[{"name": "table-db1-0"}, {"name": "table-db1-1"}],
)
self.assertEqual(
list(
Table.objects.filter_for_user(self.USER_SUPER)
.order_by("name")
.values("name")
),
[
{"name": "table-db1-0"},
{"name": "table-db1-1"},
{"name": "table-db2-0"},
{"name": "table-db2-1"},
],
)
| 32.776471 | 82 | 0.524049 | 2,637 | 0.946518 | 0 | 0 | 1,143 | 0.410266 | 0 | 0 | 580 | 0.208184 |
cb4660aba1977432913e68e5b117b91b0389ccfc | 1,932 | py | Python | bcs-ui/backend/tests/components/test_bcs_api.py | kayinli/bk-bcs | 93a0856175f7b066ef835921572c1cac590dbd8e | [
"Apache-2.0"
] | 1 | 2021-11-16T08:15:13.000Z | 2021-11-16T08:15:13.000Z | bcs-ui/backend/tests/components/test_bcs_api.py | kayinli/bk-bcs | 93a0856175f7b066ef835921572c1cac590dbd8e | [
"Apache-2.0"
] | null | null | null | bcs-ui/backend/tests/components/test_bcs_api.py | kayinli/bk-bcs | 93a0856175f7b066ef835921572c1cac590dbd8e | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import pytest
from requests_mock import ANY
from backend.components.base import ComponentAuth
from backend.components.bcs_api import BcsApiClient
BCS_APIGW_TOKEN = 'example-auth-token'
@pytest.fixture(autouse=True)
def setup_token(settings):
settings.BCS_APIGW_TOKEN = BCS_APIGW_TOKEN
class TestBcsApiClient:
def test_get_cluster_simple(self, project_id, cluster_id, requests_mock):
requests_mock.get(ANY, json={'id': 'foo-id'})
client = BcsApiClient(ComponentAuth('fake_token'))
result = client.query_cluster_id('stag', project_id, cluster_id)
assert result == 'foo-id'
req_history = requests_mock.request_history[0]
# Assert token was in request headers and access_token was in query string
assert req_history.headers.get('Authorization') == f"Bearer {BCS_APIGW_TOKEN}"
assert 'access_token=fake_token' in req_history.url
def test_get_cluster_credentials(self, requests_mock):
requests_mock.get(ANY, json={'name': 'foo'})
client = BcsApiClient(ComponentAuth('fake_token'))
resp = client.get_cluster_credentials('stag', 'fake-bcs-cluster-foo')
assert resp == {'name': 'foo'}
| 40.25 | 115 | 0.745859 | 898 | 0.460513 | 0 | 0 | 103 | 0.052821 | 0 | 0 | 1,015 | 0.520513 |
cb469fd0baa70b22ad941da8c2a72d0f0b52a829 | 236 | py | Python | myproject/myproject/apps/sample/urls.py | gabfl/sample-django | e71e37d58f24c86001d4c11b8e0f9a9202313811 | [
"MIT"
] | 1 | 2018-03-27T07:09:50.000Z | 2018-03-27T07:09:50.000Z | myproject/myproject/apps/sample/urls.py | gabfl/sample-django | e71e37d58f24c86001d4c11b8e0f9a9202313811 | [
"MIT"
] | null | null | null | myproject/myproject/apps/sample/urls.py | gabfl/sample-django | e71e37d58f24c86001d4c11b8e0f9a9202313811 | [
"MIT"
] | 1 | 2018-03-27T07:09:51.000Z | 2018-03-27T07:09:51.000Z | from django.urls import path
from . import views
urlpatterns = [
path('', views.index),
path('hello', views.hello),
path('world', views.world),
path('users', views.users),
path('user/<int:user_id>', views.user),
]
| 19.666667 | 43 | 0.627119 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 43 | 0.182203 |
cb49e1326cd56c6448e56f7f68971d7cbde7addb | 760 | py | Python | CPGAN/options.py | xiangsheng1325/CPGAN | f05d7183a28601d5af849229766ddaf1e4c5cba8 | [
"MIT"
] | null | null | null | CPGAN/options.py | xiangsheng1325/CPGAN | f05d7183a28601d5af849229766ddaf1e4c5cba8 | [
"MIT"
] | null | null | null | CPGAN/options.py | xiangsheng1325/CPGAN | f05d7183a28601d5af849229766ddaf1e4c5cba8 | [
"MIT"
] | null | null | null | class SimpleOpt():
def __init__(self):
self.method = 'cpgan'
self.max_epochs = 100
self.graph_type = 'ENZYMES'
self.data_dir = './data/facebook.graphs'
self.gpu = '2'
self.lr = 0.003
self.encode_size = 16
self.decode_size = 16
self.pool_size = 10
self.epochs_log = 1
self.batch_size = 8
self.random_seed = 123
self.gen_times = 10
self.gen_gamma = 10
self.milestones = [4000, 8000, 12000]
class Options():
def __init__(self):
self.opt_type = 'simple'
# self.opt_type = 'argparser'
@staticmethod
def initialize(epoch_num=180):
opt = SimpleOpt()
opt.max_epochs = epoch_num
return opt
| 25.333333 | 48 | 0.565789 | 756 | 0.994737 | 0 | 0 | 128 | 0.168421 | 0 | 0 | 80 | 0.105263 |
cb4b06a0daa733982a8585a802efc451954cfd95 | 1,764 | py | Python | ops/data_utils.py | lunasara/learning3d | 6ddf529a1a69b780519688dd458aa4c75472ed92 | [
"MIT"
] | null | null | null | ops/data_utils.py | lunasara/learning3d | 6ddf529a1a69b780519688dd458aa4c75472ed92 | [
"MIT"
] | null | null | null | ops/data_utils.py | lunasara/learning3d | 6ddf529a1a69b780519688dd458aa4c75472ed92 | [
"MIT"
] | null | null | null | import torch
def mean_shift(template, source, p0_zero_mean, p1_zero_mean):
template_mean = torch.eye(3).view(1, 3, 3).expand(template.size(0), 3, 3).to(template) # [B, 3, 3]
source_mean = torch.eye(3).view(1, 3, 3).expand(source.size(0), 3, 3).to(source) # [B, 3, 3]
if p0_zero_mean:
p0_m = template.mean(dim=1) # [B, N, 3] -> [B, 3]
template_mean = torch.cat([template_mean, p0_m.unsqueeze(-1)], dim=2)
one_ = torch.tensor([[[0.0, 0.0, 0.0, 1.0]]]).repeat(template_mean.shape[0], 1, 1).to(template_mean) # (Bx1x4)
template_mean = torch.cat([template_mean, one_], dim=1)
template = template - p0_m.unsqueeze(1)
# else:
# q0 = template
if p1_zero_mean:
#print(numpy.any(numpy.isnan(p1.numpy())))
p1_m = source.mean(dim=1) # [B, N, 3] -> [B, 3]
source_mean = torch.cat([source_mean, -p1_m.unsqueeze(-1)], dim=2)
one_ = torch.tensor([[[0.0, 0.0, 0.0, 1.0]]]).repeat(source_mean.shape[0], 1, 1).to(source_mean) # (Bx1x4)
source_mean = torch.cat([source_mean, one_], dim=1)
source = source - p1_m.unsqueeze(1)
# else:
# q1 = source
return template, source, template_mean, source_mean
def postprocess_data(result, p0, p1, a0, a1, p0_zero_mean, p1_zero_mean):
#output' = trans(p0_m) * output * trans(-p1_m)
# = [I, p0_m;] * [R, t;] * [I, -p1_m;]
# [0, 1 ] [0, 1 ] [0, 1 ]
est_g = result['est_T']
if p0_zero_mean:
est_g = a0.to(est_g).bmm(est_g)
if p1_zero_mean:
est_g = est_g.bmm(a1.to(est_g))
result['est_T'] = est_g
est_gs = result['est_T_series'] # [M, B, 4, 4]
if p0_zero_mean:
est_gs = a0.unsqueeze(0).contiguous().to(est_gs).matmul(est_gs)
if p1_zero_mean:
est_gs = est_gs.matmul(a1.unsqueeze(0).contiguous().to(est_gs))
result['est_T_series'] = est_gs
return result
| 38.347826 | 115 | 0.638889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 358 | 0.202948 |
cb4b79bd2a64b669c02e75a4cc2d59b49df0e3ce | 12,449 | py | Python | test/test_igp_shortcuts.py | tim-fiola/network_traffic_modeler_py3 | d2bd7287ea86fb22ec41c2522164d5761fc225dd | [
"Apache-2.0"
] | 102 | 2018-09-16T00:14:05.000Z | 2022-03-24T09:07:37.000Z | test/test_igp_shortcuts.py | tim-fiola/network_traffic_modeler_py3 | d2bd7287ea86fb22ec41c2522164d5761fc225dd | [
"Apache-2.0"
] | 17 | 2019-01-17T14:00:50.000Z | 2021-05-05T14:59:36.000Z | test/test_igp_shortcuts.py | tim-fiola/network_traffic_modeler_py3 | d2bd7287ea86fb22ec41c2522164d5761fc225dd | [
"Apache-2.0"
] | 10 | 2019-08-14T20:57:34.000Z | 2022-03-24T09:07:42.000Z | import unittest
from pyNTM import FlexModel
from pyNTM import ModelException
from pyNTM import PerformanceModel
class TestIGPShortcuts(unittest.TestCase):
def test_traffic_on_shortcut_lsps(self):
"""
Verify Interface and LSP traffic when IGP shortcuts enabled
in baseline model.
"""
# The demands should take LSPs starting on the first
# node that has shortcuts and should take the LSP that
# leads it closest to the demand destination
model = FlexModel.load_model_file(
"test/igp_shortcuts_model_mult_lsps_in_path.csv"
)
model.update_simulation()
# Get all the interface objects
int_a_b = model.get_interface_object("A-B", "A")
int_b_c = model.get_interface_object("B-C", "B")
int_c_d = model.get_interface_object("C-D", "C")
int_d_e = model.get_interface_object("D-E", "D")
int_e_f = model.get_interface_object("E-F", "E")
int_a_g = model.get_interface_object("A-G", "A")
int_g_f = model.get_interface_object("G-F", "G")
# Get all LSP objects
lsp_b_d_1 = model.get_rsvp_lsp("B", "D", "lsp_b_d_1")
lsp_b_d_2 = model.get_rsvp_lsp("B", "D", "lsp_b_d_2")
lsp_c_e_1 = model.get_rsvp_lsp("C", "E", "lsp_c_e_1")
lsp_d_f_1 = model.get_rsvp_lsp("D", "F", "lsp_d_f_1")
# Get demand objects
dmd_a_f_1 = model.get_demand_object("A", "F", "dmd_a_f_1")
dmd_d_f_1 = model.get_demand_object("D", "F", "dmd_d_f_1")
# Verify traffic on LSPs
self.assertEqual(lsp_b_d_1.traffic_on_lsp(model), 2.5)
self.assertEqual(lsp_b_d_2.traffic_on_lsp(model), 2.5)
self.assertEqual(lsp_c_e_1.traffic_on_lsp(model), 0)
self.assertEqual(lsp_d_f_1.traffic_on_lsp(model), 13.0)
# Verify demand paths
self.assertIn([int_a_g, int_g_f], dmd_a_f_1.path)
self.assertIn([int_a_b, lsp_b_d_1, lsp_d_f_1], dmd_a_f_1.path)
self.assertIn([int_a_b, lsp_b_d_2, lsp_d_f_1], dmd_a_f_1.path)
self.assertEqual(dmd_d_f_1.path, [[lsp_d_f_1]])
# Verify interface traffic
self.assertEqual(int_a_b.traffic, 5.0)
self.assertEqual(int_b_c.traffic, 5.0)
self.assertEqual(int_c_d.traffic, 5.0)
self.assertEqual(int_d_e.traffic, 13.0)
self.assertEqual(int_e_f.traffic, 13.0)
self.assertEqual(int_a_g.traffic, 5.0)
self.assertEqual(int_g_f.traffic, 5.0)
# Verify LSPs on interfaces
self.assertIn(lsp_b_d_1, int_b_c.lsps(model))
self.assertIn(lsp_b_d_2, int_b_c.lsps(model))
self.assertIn(lsp_b_d_1, int_c_d.lsps(model))
self.assertIn(lsp_b_d_2, int_c_d.lsps(model))
self.assertIn(lsp_b_d_2, int_c_d.lsps(model))
self.assertIn(lsp_c_e_1, int_c_d.lsps(model))
def test_igp_shortcut_node_attributes(self):
# The IGP shortcut attribute should be True
model = FlexModel.load_model_file(
"test/igp_shortcuts_model_mult_lsps_in_path.csv"
)
node_b = model.get_node_object("B")
self.assertTrue(node_b.igp_shortcuts_enabled)
# Remove igp_shortcuts_enabled on node B, traffic should appear on lsp_c_e_1
# and disappear from lsp_b_d_1/2 and lsp_d_f_1
def test_remove_shortcuts_node_b(self):
model = FlexModel.load_model_file(
"test/igp_shortcuts_model_mult_lsps_in_path.csv"
)
node_b = model.get_node_object("B")
node_b.igp_shortcuts_enabled = False
model.update_simulation()
# Get LSP objects
lsp_b_d_1 = model.get_rsvp_lsp("B", "D", "lsp_b_d_1")
lsp_b_d_2 = model.get_rsvp_lsp("B", "D", "lsp_b_d_2")
lsp_c_e_1 = model.get_rsvp_lsp("C", "E", "lsp_c_e_1")
lsp_d_f_1 = model.get_rsvp_lsp("D", "F", "lsp_d_f_1")
dmd_a_f_1 = model.get_demand_object("A", "F", "dmd_a_f_1")
dmd_d_f_1 = model.get_demand_object("D", "F", "dmd_d_f_1")
# Half the traffic from dmd_a_f_1 should be on lsp_c_e_1
self.assertEqual(lsp_c_e_1.traffic_on_lsp(model), 5.0)
# dmd_a_f_1 should be the only demand on lsp_c_e_1
self.assertEqual(lsp_c_e_1.demands_on_lsp(model), [dmd_a_f_1])
# dmd_d_f_1 should be the only demand on lsp_d_f_1
self.assertEqual(lsp_d_f_1.demands_on_lsp(model), [dmd_d_f_1])
# LSPs from B to D should have no demands and no traffic
self.assertEqual(lsp_b_d_1.demands_on_lsp(model), [])
self.assertEqual(lsp_b_d_2.demands_on_lsp(model), [])
self.assertEqual(lsp_b_d_1.traffic_on_lsp(model), 0)
self.assertEqual(lsp_b_d_2.traffic_on_lsp(model), 0)
def test_demands_no_shortcuts(self):
"""
The demand should take the LSP if the IGP shortcut attribute is True on node B.
When the IGP shortcut attribute is turned to False, the demand should
only IGP route. Change all igp_shortcuts_enabled flags to False.
Test LSP and Interface traffic.
"""
model = FlexModel.load_model_file(
"test/igp_shortcuts_model_mult_lsps_in_path.csv"
)
model.update_simulation()
# Get all LSP objects
lsp_b_d_1 = model.get_rsvp_lsp("B", "D", "lsp_b_d_1")
lsp_b_d_2 = model.get_rsvp_lsp("B", "D", "lsp_b_d_2")
lsp_c_e_1 = model.get_rsvp_lsp("C", "E", "lsp_c_e_1")
lsp_d_f_1 = model.get_rsvp_lsp("D", "F", "lsp_d_f_1")
# Get some node objects
node_b = model.get_node_object("B")
node_c = model.get_node_object("C")
node_d = model.get_node_object("D")
node_e = model.get_node_object("E")
# Get LSP object
dmd_d_f_1 = model.get_demand_object("D", "F", "dmd_d_f_1")
# Set the node igp_shortcuts_enabled attribute to False
node_b.igp_shortcuts_enabled = False
node_c.igp_shortcuts_enabled = False
node_d.igp_shortcuts_enabled = False
node_e.igp_shortcuts_enabled = False
model.update_simulation()
# Only lsp_d_f_1 should have traffic/demands
self.assertEqual(lsp_b_d_1.demands_on_lsp(model), [])
self.assertEqual(lsp_b_d_2.demands_on_lsp(model), [])
self.assertEqual(lsp_c_e_1.demands_on_lsp(model), [])
self.assertEqual(lsp_b_d_1.traffic_on_lsp(model), 0)
self.assertEqual(lsp_b_d_2.traffic_on_lsp(model), 0)
self.assertEqual(lsp_c_e_1.traffic_on_lsp(model), 0)
self.assertEqual(lsp_d_f_1.demands_on_lsp(model), [dmd_d_f_1])
self.assertEqual(lsp_d_f_1.traffic_on_lsp(model), 8.0)
def test_igp_shortcut_perf_model(self):
model = PerformanceModel.load_model_file("test/igp_routing_topology.csv")
node_a = model.get_node_object("A")
node_a.igp_shortcuts_enabled = True
err_msg = "igp_shortcuts_enabled not allowed in PerformanceModel, but present on these Nodes"
with self.assertRaises(ModelException) as context:
model.update_simulation()
self.assertIn(err_msg, context.exception.args[0][1][0].keys())
# If one LSP from B to D is assigned a lower metric, traffic should
# not split at A
def test_changed_metric(self):
model = FlexModel.load_model_file(
"test/igp_shortcuts_model_mult_lsps_in_path.csv"
)
# Get all the interface objects
int_a_b = model.get_interface_object("A-B", "A")
int_b_c = model.get_interface_object("B-C", "B")
int_c_d = model.get_interface_object("C-D", "C")
int_d_e = model.get_interface_object("D-E", "D")
int_e_f = model.get_interface_object("E-F", "E")
int_a_g = model.get_interface_object("A-G", "A")
int_g_f = model.get_interface_object("G-F", "G")
# Get all LSP objects
lsp_b_d_1 = model.get_rsvp_lsp("B", "D", "lsp_b_d_1")
lsp_b_d_2 = model.get_rsvp_lsp("B", "D", "lsp_b_d_2")
lsp_c_e_1 = model.get_rsvp_lsp("C", "E", "lsp_c_e_1")
lsp_d_f_1 = model.get_rsvp_lsp("D", "F", "lsp_d_f_1")
# Get demand objects
dmd_a_f_1 = model.get_demand_object("A", "F", "dmd_a_f_1")
dmd_d_f_1 = model.get_demand_object("D", "F", "dmd_d_f_1")
# Give lsp a lower than default metric
lsp_b_d_1.manual_metric = 15
model.update_simulation()
dmd_path_1 = [int_a_b, lsp_b_d_1, lsp_d_f_1]
# Confirm demand path
self.assertIn(dmd_path_1, dmd_a_f_1.path)
# Verify traffic on LSPs
self.assertEqual(lsp_b_d_1.traffic_on_lsp(model), 10)
self.assertEqual(lsp_b_d_2.traffic_on_lsp(model), 0)
self.assertEqual(lsp_c_e_1.traffic_on_lsp(model), 0)
self.assertEqual(lsp_d_f_1.traffic_on_lsp(model), 18.0)
# Verify demand paths
self.assertNotIn([int_a_g, int_g_f], dmd_a_f_1.path)
self.assertIn([int_a_b, lsp_b_d_1, lsp_d_f_1], dmd_a_f_1.path)
self.assertNotIn(lsp_b_d_2, dmd_a_f_1.path)
self.assertEqual(dmd_d_f_1.path, [[lsp_d_f_1]])
# Verify interface traffic
self.assertEqual(int_a_b.traffic, 10.0)
self.assertEqual(int_b_c.traffic, 10.0)
self.assertEqual(int_c_d.traffic, 10.0)
self.assertEqual(int_d_e.traffic, 18.0)
self.assertEqual(int_e_f.traffic, 18.0)
self.assertEqual(int_a_g.traffic, 0.0)
self.assertEqual(int_g_f.traffic, 0.0)
# Verify LSPs on interfaces
self.assertIn(lsp_b_d_1, int_b_c.lsps(model))
self.assertIn(lsp_b_d_2, int_b_c.lsps(model))
self.assertIn(lsp_b_d_1, int_c_d.lsps(model))
self.assertIn(lsp_b_d_2, int_c_d.lsps(model))
self.assertIn(lsp_b_d_2, int_c_d.lsps(model))
self.assertIn(lsp_c_e_1, int_c_d.lsps(model))
# Give lsp_b_d_1 a higher than default metric
lsp_b_d_1.manual_metric = 25
model.update_simulation()
dmd_path_2_1 = [int_a_g, int_g_f]
dmd_path_2_2 = [int_a_b, lsp_b_d_2, lsp_d_f_1]
# Confirm demand path
self.assertIn(dmd_path_2_1, dmd_a_f_1.path)
self.assertIn(dmd_path_2_2, dmd_a_f_1.path)
# Verify traffic on LSPs
self.assertEqual(lsp_b_d_1.traffic_on_lsp(model), 0)
self.assertEqual(lsp_b_d_2.traffic_on_lsp(model), 5)
self.assertEqual(lsp_c_e_1.traffic_on_lsp(model), 0)
self.assertEqual(lsp_d_f_1.traffic_on_lsp(model), 13.0)
# Verify demand paths
self.assertIn([int_a_g, int_g_f], dmd_a_f_1.path)
self.assertNotIn(lsp_b_d_1, dmd_a_f_1.path)
self.assertIn([int_a_b, lsp_b_d_2, lsp_d_f_1], dmd_a_f_1.path)
self.assertEqual(dmd_d_f_1.path, [[lsp_d_f_1]])
# Verify interface traffic
self.assertEqual(int_a_b.traffic, 5.0)
self.assertEqual(int_b_c.traffic, 5.0)
self.assertEqual(int_c_d.traffic, 5.0)
self.assertEqual(int_d_e.traffic, 13.0)
self.assertEqual(int_e_f.traffic, 13.0)
self.assertEqual(int_a_g.traffic, 5.0)
self.assertEqual(int_g_f.traffic, 5.0)
# Verify LSPs on interfaces
self.assertIn(lsp_b_d_1, int_b_c.lsps(model))
self.assertIn(lsp_b_d_2, int_b_c.lsps(model))
self.assertIn(lsp_b_d_1, int_c_d.lsps(model))
self.assertIn(lsp_b_d_2, int_c_d.lsps(model))
self.assertIn(lsp_b_d_2, int_c_d.lsps(model))
self.assertIn(lsp_c_e_1, int_c_d.lsps(model))
# If an LSP from A to F is added, all traffic should take that LSP
def test_direct_lsp_preemption(self):
model = FlexModel.load_model_file(
"test/igp_shortcuts_model_mult_lsps_in_path.csv"
)
dmd_a_f_1 = model.get_demand_object("A", "F", "dmd_a_f_1")
model.add_rsvp_lsp("A", "F", "lsp_a_f_1")
lsp_a_f_1 = model.get_rsvp_lsp("A", "F", "lsp_a_f_1")
lsp_b_d_1 = model.get_rsvp_lsp("B", "D", "lsp_b_d_1")
int_a_g = model.get_interface_object("A-G", "A")
int_a_b = model.get_interface_object("A-B", "A")
model.update_simulation()
# Make sure dmd_a_f_1 takes lsp_a_f_1
self.assertEqual(lsp_a_f_1.demands_on_lsp(model), [dmd_a_f_1])
self.assertEqual(lsp_a_f_1.traffic_on_lsp(model), 10)
self.assertEqual(lsp_b_d_1.traffic_on_lsp(model), 0)
self.assertEqual(lsp_b_d_1.demands_on_lsp(model), [])
# lsp_a_f_1 will take path with fewest hops
self.assertEqual(int_a_g.traffic, 10)
self.assertEqual(int_a_b.traffic, 0)
| 40.288026 | 101 | 0.662383 | 12,334 | 0.990762 | 0 | 0 | 0 | 0 | 0 | 0 | 2,910 | 0.233754 |
cb4d4cf11ac360c8c881c5f2263fff2c5582e3f9 | 299 | py | Python | Cracking the Coding Interview/ctci-solutions-master/ch-06-math-and-logic-puzzles/07-the-apocalypse.py | nikku1234/Code-Practise | 94eb6680ea36efd10856c377000219285f77e5a4 | [
"Apache-2.0"
] | 9 | 2020-07-02T06:06:17.000Z | 2022-02-26T11:08:09.000Z | Cracking the Coding Interview/ctci-solutions-master/ch-06-math-and-logic-puzzles/07-the-apocalypse.py | nikku1234/Code-Practise | 94eb6680ea36efd10856c377000219285f77e5a4 | [
"Apache-2.0"
] | 1 | 2021-11-04T17:26:36.000Z | 2021-11-04T17:26:36.000Z | Cracking the Coding Interview/ctci-solutions-master/ch-06-math-and-logic-puzzles/07-the-apocalypse.py | nikku1234/Code-Practise | 94eb6680ea36efd10856c377000219285f77e5a4 | [
"Apache-2.0"
] | 8 | 2021-01-31T10:31:12.000Z | 2022-03-13T09:15:55.000Z | # What will the gender ratio be after every family stops having children after
# after they have a girl and not until then.
def birth_ratio():
# Everytime a child is born, there is a 0.5 chance of the baby being male
# and 0.5 chance of the baby being a girl. So the ratio is 1:1.
return 1
| 33.222222 | 78 | 0.725753 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 259 | 0.866221 |
cb4daf0e2bd9267bcf8579574005098133b4355c | 376 | py | Python | tests/test_transform_identity.py | dlshriver/Queryable | 0975f2687213958121ea4ac7efbaee9030193917 | [
"MIT"
] | 5 | 2016-04-04T16:49:11.000Z | 2020-10-12T13:00:06.000Z | tests/test_transform_identity.py | dlshriver/Queryable | 0975f2687213958121ea4ac7efbaee9030193917 | [
"MIT"
] | 3 | 2016-04-08T16:13:54.000Z | 2022-02-03T15:23:48.000Z | tests/test_transform_identity.py | dlshriver/pinq | 0975f2687213958121ea4ac7efbaee9030193917 | [
"MIT"
] | null | null | null | import unittest
from pinq.transforms import identity
class predicate_true_tests(unittest.TestCase):
def test_identity_int(self):
self.assertEqual(identity(123), 123)
def test_identity_str(self):
self.assertEqual(identity("apple"), "apple")
def test_identity_list(self):
self.assertEqual(identity(["great", 100029]), ["great", 100029])
| 25.066667 | 72 | 0.710106 | 320 | 0.851064 | 0 | 0 | 0 | 0 | 0 | 0 | 28 | 0.074468 |