content
stringlengths 5
1.05M
|
|---|
from django.urls import path
from . import views
urlpatterns = [
# path for signing up a new user
# full path is auth/signup, but auth is already in the root urls (yatube)
path('signup/', views.SignUp.as_view(), name='signup')
]
|
# Code from Chapter 10 of Machine Learning: An Algorithmic Perspective (2nd Edition)
# by Stephen Marsland (http://stephenmonika.net)
# You are free to use, change, or redistribute the code in any way you wish for
# non-commercial purposes, but please maintain the name of the original author.
# This code comes with no warranty of any kind.
# Stephen Marsland, 2008, 2014
# The four peaks fitness function
import numpy as np
def fourpeaks(population):
T = 15
start = np.zeros((np.shape(population)[0],1))
finish = np.zeros((np.shape(population)[0],1))
fitness = np.zeros((np.shape(population)[0],1))
for i in range(np.shape(population)[0]):
s = np.where(population[i,:]==1)
f = np.where(population[i,:]==0)
if np.size(s)>0:
start = s[0][0]
else:
start = 0
if np.size(f)>0:
finish = np.shape(population)[1] - f[-1][-1] -1
else:
finish = 0
if start>T and finish>T:
fitness[i] = np.maximum(start,finish)+100
else:
fitness[i] = np.maximum(start,finish)
fitness = np.squeeze(fitness)
return fitness
|
# ----------------------------------------
# Normalized Diversification
# NDiv loss implemented in Pytorch
# ----------------------------------------
import numpy as np
import torch
import torch.nn.functional as F
def compute_pairwise_distance(x):
''' computation of pairwise distance matrix
---- Input
- x: input tensor torch.Tensor [(bs), sample_num, dim_x]
---- Return
- matrix: output matrix torch.Tensor [(bs), sample_num, sample_num]
'''
if len(x.shape) == 2:
matrix = torch.norm(x[:,None,:] - x[None,:,:], p = 2, dim = 2)
elif len(x.shape) == 3:
matrix = torch.norm(x[:,:,None,:] - x[:,None,:,:], p = 2, dim = 3)
else:
raise NotImplementedError
return matrix
def compute_norm_pairwise_distance(x):
''' computation of normalized pairwise distance matrix
---- Input
- x: input tensor torch.Tensor [(bs), sample_num, dim_x]
---- Return
- matrix: output matrix torch.Tensor [(bs), sample_num, sample_num]
'''
x_pair_dist = compute_pairwise_distance(x)
normalizer = torch.sum(x_pair_dist, dim = -1)
x_norm_pair_dist = x_pair_dist / (normalizer[...,None] + 1e-12).detach()
#print('x_pair',x_pair_dist.shape,'normalizer',normalizer.shape)
return x_norm_pair_dist
def NDiv_loss(z, y, alpha=0.8):
''' NDiv loss function.
---- Input
- z: latent samples after embedding h_Z: torch.Tensor [(bs), sample_num, dim_z].
- y: corresponding outputs after embedding h_Y: torch.Tensor [(bs), sample_num, dim_y].
- alpha: hyperparameter alpha in NDiv loss.
---- Return
- loss: normalized diversity loss. torch.Tensor [(bs)]
'''
z=z.view(z.shape[0],-1)
y=y.view(y.shape[0],-1)
#print(z.shape,y.shape)
S = z.shape[-2] # sample number
y_norm_pair_dist = compute_norm_pairwise_distance(y)
z_norm_pair_dist = compute_norm_pairwise_distance(z)
#print('y_norm',y_norm_pair_dist.shape,'z_norm',z_norm_pair_dist.shape)
ndiv_loss_matrix = F.relu(z_norm_pair_dist * alpha - y_norm_pair_dist)
ndiv_loss = ndiv_loss_matrix.sum(-1).sum(-1) / (S * (S - 1))
#exit()
return ndiv_loss
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Assistant.nationality'
db.add_column('assistant', 'nationality',
self.gf('django.db.models.fields.CharField')(default='', max_length=16),
keep_default=False)
# Adding field 'Assistant.birthplace'
db.add_column('assistant', 'birthplace',
self.gf('django.db.models.fields.CharField')(default='', max_length=24),
keep_default=False)
# Adding field 'Assistant.constellation'
db.add_column('assistant', 'constellation',
self.gf('django.db.models.fields.CharField')(default='', max_length=8),
keep_default=False)
# Adding field 'Assistant.measurements'
db.add_column('assistant', 'measurements',
self.gf('django.db.models.fields.CharField')(default='', max_length=24),
keep_default=False)
# Adding field 'Assistant.experience'
db.add_column('assistant', 'experience',
self.gf('django.db.models.fields.IntegerField')(default=1),
keep_default=False)
# Adding field 'Assistant.favoriteplayers'
db.add_column('assistant', 'favoriteplayers',
self.gf('django.db.models.fields.CharField')(default='', max_length=64),
keep_default=False)
# Adding field 'Assistant.selfintroduce'
db.add_column('assistant', 'selfintroduce',
self.gf('django.db.models.fields.CharField')(default='', max_length=1024),
keep_default=False)
# Adding field 'Assistant.bestperformance'
db.add_column('assistant', 'bestperformance',
self.gf('django.db.models.fields.CharField')(default='', max_length=128),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Assistant.nationality'
db.delete_column('assistant', 'nationality')
# Deleting field 'Assistant.birthplace'
db.delete_column('assistant', 'birthplace')
# Deleting field 'Assistant.constellation'
db.delete_column('assistant', 'constellation')
# Deleting field 'Assistant.measurements'
db.delete_column('assistant', 'measurements')
# Deleting field 'Assistant.experience'
db.delete_column('assistant', 'experience')
# Deleting field 'Assistant.favoriteplayers'
db.delete_column('assistant', 'favoriteplayers')
# Deleting field 'Assistant.selfintroduce'
db.delete_column('assistant', 'selfintroduce')
# Deleting field 'Assistant.bestperformance'
db.delete_column('assistant', 'bestperformance')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'access_token': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'null': 'True'}),
'avatar': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'null': 'True'}),
'cellphone': ('django.db.models.fields.CharField', [], {'max_length': '11', 'null': 'True', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'expire_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'default': "'m'", 'max_length': '1', 'null': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'nickname': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'null': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'refresh_token': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'null': 'True'}),
'site_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64', 'null': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'billiards.assistant': {
'Meta': {'object_name': 'Assistant', 'db_table': "'assistant'"},
'bestperformance': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'birthday': ('django.db.models.fields.DateField', [], {}),
'birthplace': ('django.db.models.fields.CharField', [], {'max_length': '24'}),
'constellation': ('django.db.models.fields.CharField', [], {'max_length': '8'}),
'dress': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'drinks': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'experience': ('django.db.models.fields.IntegerField', [], {}),
'favoriteplayers': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'figure': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'food': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'gender': ('billiards.models.IntegerChoiceTypeField', [], {'default': '1'}),
'haircolor': ('billiards.models.ChoiceTypeField', [], {'max_length': '16'}),
'height': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interest': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'language': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'measurements': ('django.db.models.fields.CharField', [], {'max_length': '24'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '24'}),
'nationality': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'nickname': ('django.db.models.fields.CharField', [], {'max_length': '24'}),
'occupation': ('django.db.models.fields.CharField', [], {'max_length': '24'}),
'pubichair': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'scent': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'selfintroduce': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'state': ('billiards.models.IntegerChoiceTypeField', [], {'default': '1'}),
'uuid': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'blank': 'True'})
},
u'billiards.assistantappointment': {
'Meta': {'object_name': 'AssistantAppointment', 'db_table': "'assistant_appoinment'"},
'assitant': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billiards.Assistant']"}),
'createdDate': ('django.db.models.fields.DateTimeField', [], {}),
'duration': ('django.db.models.fields.IntegerField', [], {}),
'endtime': ('django.db.models.fields.DateTimeField', [], {}),
'goods': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billiards.Goods']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'poolroom': ('django.db.models.fields.IntegerField', [], {'blank': 'True'}),
'price': ('django.db.models.fields.IntegerField', [], {}),
'starttime': ('django.db.models.fields.DateTimeField', [], {}),
'state': ('billiards.models.IntegerChoiceTypeField', [], {'default': '1'}),
'transaction': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billiards.Transaction']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'billiards.assistantimage': {
'Meta': {'object_name': 'AssistantImage', 'db_table': "'assistant_images'"},
'assistant': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billiards.Assistant']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'imagepath': ('django.db.models.fields.files.ImageField', [], {'max_length': '250'}),
'iscover': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
u'billiards.assistantoffer': {
'Meta': {'object_name': 'AssistantOffer', 'db_table': "'assistant_offer'"},
'assistant': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billiards.Assistant']"}),
'day': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'endtime': ('django.db.models.fields.TimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'poolroom': ('django.db.models.fields.IntegerField', [], {'blank': 'True'}),
'price': ('django.db.models.fields.IntegerField', [], {}),
'starttime': ('django.db.models.fields.TimeField', [], {}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
u'billiards.challenge': {
'Meta': {'object_name': 'Challenge', 'db_table': "'challenge'"},
'expiretime': ('django.db.models.fields.DateTimeField', [], {}),
'geolocation': ('geosimple.fields.GeohashField', [], {'db_index': 'True', 'max_length': '12', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': u"orm['billiards.Group']", 'db_column': "'group'"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issuer': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'issuer_contact': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'issuer_nickname': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'lat': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '11', 'decimal_places': '7'}),
'lat_baidu': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '11', 'decimal_places': '7'}),
'level': ('billiards.models.ChoiceTypeField', [], {'max_length': '12'}),
'lng': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '11', 'decimal_places': '7'}),
'lng_baidu': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '11', 'decimal_places': '7'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'participant_count': ('billiards.models.IntegerChoiceTypeField', [], {'default': '1'}),
'poolroom': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billiards.Poolroom']", 'db_column': "'poolroom'"}),
'rule': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'source': ('billiards.models.IntegerChoiceTypeField', [], {'default': '1'}),
'starttime': ('django.db.models.fields.DateTimeField', [], {}),
'status': ('billiards.models.ChoiceTypeField', [], {'default': "'waiting'", 'max_length': '7'}),
'tabletype': ('billiards.models.ChoiceTypeField', [], {'max_length': '10'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'uuid': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'blank': 'True'})
},
u'billiards.challengeapply': {
'Meta': {'unique_together': "(('challenge', 'user'),)", 'object_name': 'ChallengeApply', 'db_table': "'challenge_apply'"},
'applytime': ('django.db.models.fields.DateTimeField', [], {}),
'challenge': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billiards.Challenge']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('billiards.models.ChoiceTypeField', [], {'default': "'submitted'", 'max_length': '10'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'billiards.coupon': {
'Meta': {'object_name': 'Coupon', 'db_table': "'coupon'"},
'description': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'discount': ('django.db.models.fields.IntegerField', [], {'max_length': '3'}),
'enddate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'poolroom': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billiards.Poolroom']"}),
'startdate': ('django.db.models.fields.DateField', [], {}),
'status': ('billiards.models.IntegerChoiceTypeField', [], {'default': '1'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'type': ('billiards.models.IntegerChoiceTypeField', [], {'default': '1'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'billiards.event': {
'Meta': {'object_name': 'Event', 'db_table': "'event'"},
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'enddate': ('django.db.models.fields.DateField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'month': ('django.db.models.fields.IntegerField', [], {}),
'pagename': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'picAD': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'startdate': ('django.db.models.fields.DateField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'titleabbrev': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'year': ('django.db.models.fields.IntegerField', [], {})
},
u'billiards.eventcode': {
'Meta': {'object_name': 'EventCode', 'db_table': "'eventcode'"},
'chargecode': ('django.db.models.fields.CharField', [], {'default': "'QP0UTOE'", 'max_length': '10'}),
'createdtime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 10, 12, 0, 0)'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billiards.Event']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'poolroom': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billiards.Poolroom']"}),
'used': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'usedtime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'userid': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
u'billiards.goods': {
'Meta': {'object_name': 'Goods', 'db_table': "'goods'"},
'description': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'price': ('billiards.models.CurrencyField', [], {'max_digits': '10', 'decimal_places': '2'}),
'sku': ('django.db.models.fields.CharField', [], {'default': "'BCOJO58V1W4QDPCYZWX8GVGZNJC8RTLL'", 'max_length': '32'}),
'state': ('billiards.models.IntegerChoiceTypeField', [], {'default': '1'}),
'type': ('billiards.models.IntegerChoiceTypeField', [], {'default': '1'})
},
u'billiards.group': {
'Meta': {'object_name': 'Group', 'db_table': "'fans_group'"},
'cardimg': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
u'billiards.match': {
'Meta': {'object_name': 'Match', 'db_table': "'match'"},
'bonus': ('django.db.models.fields.FloatField', [], {}),
'bonusdetail': ('django.db.models.fields.TextField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'enrollfee': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'enrollfocal': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'organizer': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': u"orm['billiards.Group']", 'db_column': "'organizer'"}),
'otherprize': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'poolroom': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billiards.Poolroom']"}),
'rechargeablecard': ('django.db.models.fields.FloatField', [], {}),
'rule': ('django.db.models.fields.TextField', [], {}),
'starttime': ('django.db.models.fields.DateTimeField', [], {}),
'status': ('billiards.models.ChoiceTypeField', [], {'default': "'approved'", 'max_length': '10'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'type': ('billiards.models.IntegerChoiceTypeField', [], {'default': '1'})
},
u'billiards.matchenroll': {
'Meta': {'object_name': 'MatchEnroll', 'db_table': "'match_enroll'"},
'enrolltime': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'match': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billiards.Match']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'billiards.membership': {
'Meta': {'object_name': 'Membership', 'db_table': "'membership'"},
'cellphone': ('django.db.models.fields.CharField', [], {'max_length': '11'}),
'gender': ('billiards.models.IntegerChoiceTypeField', [], {'default': '1'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'joindate': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 10, 12, 0, 0)'}),
'memberid': ('django.db.models.fields.CharField', [], {'default': "'31476480290'", 'max_length': '20'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'targetid': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': u"orm['billiards.Group']", 'db_column': "'target_group'"}),
'userid': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wechatid': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
u'billiards.payaccount': {
'Meta': {'object_name': 'PayAccount', 'db_table': "'payaccount'"},
'email': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'pid': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'type': ('billiards.models.IntegerChoiceTypeField', [], {'default': '1'})
},
u'billiards.poolroom': {
'Meta': {'object_name': 'Poolroom', 'db_table': "'poolroom'"},
'address': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'businesshours': ('django.db.models.fields.CharField', [], {'max_length': '60', 'null': 'True'}),
'city': ('django.db.models.fields.IntegerField', [], {'default': '10'}),
'district': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'exist': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lat': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '11', 'decimal_places': '7'}),
'lat_baidu': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '11', 'decimal_places': '7'}),
'lng': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '11', 'decimal_places': '7'}),
'lng_baidu': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '11', 'decimal_places': '7'}),
'location': ('geosimple.fields.GeohashField', [], {'max_length': '12', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'rating': ('django.db.models.fields.IntegerField', [], {'max_length': '2', 'null': 'True'}),
'review': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'size': ('django.db.models.fields.IntegerField', [], {'max_length': '8', 'null': 'True'}),
'tel': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'uuid': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'blank': 'True'})
},
u'billiards.poolroomequipment': {
'Meta': {'object_name': 'PoolroomEquipment', 'db_table': "'poolroomequipment'"},
'cue': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'poolroom': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billiards.Poolroom']"}),
'price': ('django.db.models.fields.IntegerField', [], {'max_length': '8', 'null': 'True'}),
'producer': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'quantity': ('django.db.models.fields.IntegerField', [], {'max_length': '8', 'null': 'True'}),
'tabletype': ('billiards.models.ChoiceTypeField', [], {'max_length': '10'})
},
u'billiards.poolroomimage': {
'Meta': {'object_name': 'PoolroomImage', 'db_table': "'poolroom_images'"},
'description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'imagepath': ('django.db.models.fields.files.ImageField', [], {'max_length': '250'}),
'iscover': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'poolroom': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billiards.Poolroom']"}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
u'billiards.poolroomuser': {
'Meta': {'object_name': 'PoolroomUser', 'db_table': "'poolroom_user'"},
'group': ('django.db.models.fields.related.ForeignKey', [], {'default': '0', 'to': u"orm['billiards.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'poolroom': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billiards.Poolroom']"}),
'type': ('billiards.models.IntegerChoiceTypeField', [], {'default': '1'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'billiards.poolroomuserapply': {
'Meta': {'object_name': 'PoolroomUserApply', 'db_table': "'poolroom_user_application'"},
'applytime': ('django.db.models.fields.DateTimeField', [], {}),
'cellphone': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'justification': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'poolroom': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billiards.Poolroom']"}),
'poolroomname_userinput': ('django.db.models.fields.CharField', [], {'default': 'True', 'max_length': '50', 'null': 'True'}),
'realname': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'status': ('billiards.models.ChoiceTypeField', [], {'default': "'submitted'", 'max_length': '10'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'billiards.transaction': {
'Meta': {'object_name': 'Transaction', 'db_table': "'transaction'"},
'buyeid': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'buyerEmail': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'closedDate': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'createdDate': ('django.db.models.fields.DateTimeField', [], {}),
'fee': ('billiards.models.CurrencyField', [], {'max_digits': '10', 'decimal_places': '2'}),
'goods': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billiards.Goods']", 'db_column': "'goods'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notifyid': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'paidDate': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'payaccount': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billiards.PayAccount']", 'db_column': "'payaccount'"}),
'paytradeNum': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'state': ('billiards.models.IntegerChoiceTypeField', [], {'default': '1'}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'tradeStatus': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'tradenum': ('django.db.models.fields.CharField', [], {'default': "'OH60PI9OV7DGM5LU1VUYWZSY2UHIH4JKP34W41M2L9OB5BHQXWQYAG92CR93WJ88'", 'max_length': '64'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'db_column': "'uid'"})
},
u'billiards.wechatactivity': {
'Meta': {'object_name': 'WechatActivity', 'db_table': "'wechat_activity'"},
'eventtype': ('billiards.models.ChoiceTypeField', [], {'max_length': '10'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keyword': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'receivedtime': ('django.db.models.fields.DateTimeField', [], {}),
'reply': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'target': ('billiards.models.IntegerChoiceTypeField', [], {'default': '1'}),
'userid': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
u'billiards.wechatcredential': {
'Meta': {'object_name': 'WechatCredential', 'db_table': "'wechat_credential'"},
'appid': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['billiards']
|
#!/usr/bin/env python3
"""
Functions for converting string values containing numbers, or an iterable containing such
values to floating point or integer numbers (or a similar type of array of floating points
or integer numbers).
Programmed in Python 3.5.2-final.
"""
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
__author__ = 'Pouria Hadjibagheri'
__copyright__ = 'Copyright 2016'
__credits__ = ['Pouria Hadjibagheri']
__license__ = 'MIT'
__maintainer__ = 'Pouria Hadjibagheri'
__email__ = 'p.bagheri@ucl.ac.uk'
__date__ = '12/10/2016, 19:22'
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def str2float(value):
"""
Converts numeric values from `str` or an array thereof to `float`.
If numeric value is in the correct format, but contains invalid
characters entered by mistake, such mistakes are automatically discarded.
_Note_: Existence of `E` or `e` in the number is regarded as exponent, not
a mistake.
:param value: `str` or an array thereof containing numeric values.
:type value: str, list, tuple, set
:return: `float`, or an array of `float` numbers in the same type as the one given.
:rtype: float, list, tuple, set
>>> value = '3.4'
>>> str2float(value)
3.4
>>> value = '3.4cvb'
>>> str2float(value)
3.4
>>> value_list = {'3.4', '5.6'}
>>> str2float(value_list)
{3.4, 5.6}
Timing for this example: 10000 loops, best of 3: 25.6 µs per loop
>>> value_list = ['5.3fs', '-5e4', '6.8', '3e16', '19.1rgf', '16.g5', '-4e3', '-6.3e-4']
>>> str2float(value_list)
[5.3, -50000.0, 6.8, 3e+16, 19.1, 16.5, -4000.0, -0.00063]
"""
if isinstance(value, (float, list, tuple, set)):
# Original type.
obj_type = type(value)
return obj_type(map(str2float, value))
elif not isinstance(value, str):
raise TypeError(
'Invalid inputs. Expected an argument of type str, '
'tuple, list or set; got "%s" instead.' %
value.__class__.__name__
)
try:
return float(value)
except ValueError:
# Handled outside the block.
pass
accepted_chars = '.-e'
lower_val = value.lower()
# Filters out numeric values and those defined in `accepted_chars`.
filtered_val = filter(
lambda val: str.isnumeric(val) or val in accepted_chars and
lower_val.count('-') <= 1 and
not (lower_val.find('e') < lower_val.find('-') if 'e' in lower_val else False),
lower_val
)
try:
return float(str.join('', filtered_val))
except ValueError as err:
raise ValueError('%sValue does not match a numeric pattern supported in Python.' % err)
def str2int(value):
"""
Converts numeric values from `str` or an array thereof to `int`.
If numeric value is in the correct format, but contains invalid
characters entered by mistake, such mistakes are automatically discarded.
_Note_: Existence of `E` or `e` in the number is regarded as exponent, not
a mistake.
:param value: `str` or an array thereof containing numeric values.
:type value: str, list, tuple, set
:return: `int`, or an array of `int` numbers in the same type as the one given.
:rtype: int, list, tuple, set
>>> value = '3.4'
>>> str2int(value)
3
>>> value = '3.4cvb'
>>> str2int(value)
3
>>> value_list = {'3.4', '5.6'}
>>> str2int(value_list)
{3, 5}
Timing for this example: 10000 loops, best of 3: 25.6 µs per loop
>>> value_list = ['5.3fs', '-5e4', '6.8', '3e16', '19.1rgf', '16.g5', '-4e3', '-6.3e-4']
>>> str2int(value_list)
[5, -50000, 6, 30000000000000000, 19, 16, -4000, 0]
"""
floated = str2float(value)
# Original type.
obj_type = type(value)
if not isinstance(floated, float):
# If not a single number:
converted = map(int, floated)
else:
return int(floated)
return obj_type(converted)
if __name__ == '__main__':
from doctest import testmod
# If ran directly, initiate doctests.
testmod()
|
##########################################################################################################################
# Import
from queue import Queue
# Modules
from .promise import Promise
from .cycle import Cycle
##########################################################################################################################
# SERIAL #
##########################################################################################################################
# Instant Scheduler Class
class Serial(Cycle):
# Init Now
def __init__(self, start=True):
# Check Parameters
if not isinstance(start, bool):
self = False
return None
# Define Execution Series
self.__queue__ = Queue()
# Set Thread Object
super().__init__(self.__pending__, start)
# Run Pending
def __pending__(self):
if not self.__queue__.empty():
promise = self.__queue__.get()
promise.start()
promise.wait()
# Add Functions
def add(self, function):
# Check for Callable
if not callable(function): return False
function = Promise(function, False)
self.__queue__.put(function)
# Return Promise
return function
##########################################################################################################################
|
import os
from PyQt5.QtWidgets import QWidget, QGridLayout, QLabel, QLineEdit, QPushButton, QHBoxLayout, QGroupBox, QSizePolicy, \
QDoubleSpinBox, QCheckBox
from PyQt5.QtWidgets import QFileDialog, QSpinBox, QVBoxLayout
from .model import Model, Variable
from .utils import connect_input_and_button
class ConfigurationWidget(QWidget):
def __init__(self, model:Model):
QWidget.__init__(self)
self.model = model
top_layout = QVBoxLayout()
self.setLayout(top_layout)
layout = QGridLayout()
top_layout.addLayout(layout)
layout.setColumnStretch(1, 1)
paths = (
("Fixed stack path", model.fixed_stack_path),
("Moving stack path", model.moving_stack_path),
("Output path", model.output_path)
)
for row_idx, (name, variable) in enumerate(paths):
label = QLabel(name)
layout.addWidget(label, row_idx, 0)
input = QLineEdit()
layout.addWidget(input, row_idx, 1)
button = QPushButton("...")
layout.addWidget(button, row_idx, 2)
self.connect_input_and_button(name, input, button, variable)
row_idx = len(paths)
#
# Voxel size
#
hlayout = QHBoxLayout()
top_layout.addLayout(hlayout)
hlayout.addWidget(QLabel("voxel size (μm):"))
for label, variable in (
("X", self.model.x_voxel_size),
("Y", self.model.y_voxel_size),
("Z", self.model.z_voxel_size)):
voxel_size_widget = QDoubleSpinBox()
voxel_size_widget.setMinimum(0.01)
voxel_size_widget.setMaximum(10.0)
voxel_size_widget.setDecimals(4)
hlayout.addWidget(QLabel(label))
hlayout.addWidget(voxel_size_widget)
variable.bind_double_spin_box(voxel_size_widget)
hlayout.addStretch(1)
hlayout = QHBoxLayout()
top_layout.addLayout(hlayout)
label = QLabel("# of workers")
hlayout.addWidget(label)
n_workers_widget = QSpinBox()
n_workers_widget.setMinimum(1)
n_workers_widget.setMaximum(os.cpu_count())
n_workers_widget.setValue(model.n_workers.get())
hlayout.addWidget(n_workers_widget)
self.model.n_workers.bind_spin_box(n_workers_widget)
hlayout.addStretch(1)
hlayout = QHBoxLayout()
top_layout.addLayout(hlayout)
label = QLabel("# of workers for I/O")
hlayout.addWidget(label)
n_io_workers_widget = QSpinBox()
n_io_workers_widget.setMinimum(1)
n_io_workers_widget.setMaximum(os.cpu_count())
n_io_workers_widget.setValue(model.n_io_workers.get())
hlayout.addWidget(n_io_workers_widget)
self.model.n_io_workers.bind_spin_box(n_io_workers_widget)
hlayout.addStretch(1)
hlayout = QHBoxLayout()
top_layout.addLayout(hlayout)
self.use_gpu_widget = QCheckBox("Use GPU")
hlayout.addWidget(self.use_gpu_widget)
self.model.use_gpu.bind_checkbox(self.use_gpu_widget)
hlayout.addStretch(1)
group_box = QGroupBox("Neuroglancer parameters")
group_box.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Minimum)
top_layout.addWidget(group_box)
layout = QVBoxLayout()
group_box.setLayout(layout)
hlayout = QHBoxLayout()
layout.addLayout(hlayout)
hlayout = QHBoxLayout()
layout.addLayout(hlayout)
hlayout.addWidget(QLabel("Bind address"))
bind_address_widget = QLineEdit()
bind_address_widget.setText(model.bind_address.get())
self.model.bind_address.bind_line_edit(bind_address_widget)
hlayout.addWidget(bind_address_widget)
hlayout = QHBoxLayout()
layout.addLayout(hlayout)
hlayout.addWidget(QLabel("Port number"))
port_number_widget = QSpinBox()
port_number_widget.setValue(model.port_number.get())
port_number_widget.setMinimum(0)
port_number_widget.setMaximum(65535)
self.model.port_number.bind_spin_box(port_number_widget)
hlayout.addWidget(port_number_widget)
hlayout.addStretch(1)
top_layout.addStretch(1)
def connect_input_and_button(self,
name:str,
input:QLineEdit,
button:QPushButton,
variable:Variable):
connect_input_and_button(self, name, input, button, variable)
|
import logging
logging.basicConfig(level=logging.DEBUG)
import time, tempfile, os, socket
from unittest import skip
import zmq
from work_managers.zeromq.core import ZMQCore
# Amount of time to wait after executing setUp() to allow sockets to settle
SETUP_WAIT = 0.010
# Amount of time to wait prior to executing tearDown(), to ensure that shutdown
# message don't get lost.
# The original value here (0.010 s = 10 ms) is probably quite generous.
TEARDOWN_WAIT = 0.010
# How long to wait to let shutdown signals sort themselves out
SHUTDOWN_WAIT = 1
BEACON_PERIOD = 0.2
BEACON_WAIT = BEACON_PERIOD * 5
def sockdelay():
'''Delay for slightly longer than the default auto-reconnect time for ZeroMQ (100 ms)'''
time.sleep(0.2)
def randport():
s = socket.socket()
s.bind(('127.0.0.1',0))
port = s.getsockname()[1]
s.close()
return port
def randipc():
(fd, socket_path) = tempfile.mkstemp()
os.close(fd)
endpoint = 'ipc://{}'.format(socket_path)
return endpoint
class ZMQTestBase(object):
'''Support routines'''
# default endpoint type for tests whose transport is not otherwise specified
endpoint_type = 'ipc'
def make_ipc_endpoint(self):
endpoint = randipc()
try:
self._endpoints.append(endpoint)
except AttributeError:
self._endpoints = [endpoint]
return endpoint
def make_tcp_endpoint(self):
return 'tcp://127.0.0.1:{}'.format(randport())
def make_endpoint(self):
try:
endpoint_type = self.endpoint_type
except AttributeError:
endpoint_type = 'ipc'
if endpoint_type == 'ipc':
return self.make_ipc_endpoint()
elif endpoint_type == 'tcp':
return self.make_tcp_endpoint()
else:
raise ValueError('invalid endpoint type set')
def cleanup_endpoints(self):
for endpoint in self._endpoints:
try:
os.unlink(endpoint[6:])
except OSError:
pass
del self._endpoints
def setUp(self):
self._endpoints = []
self.test_core = ZMQCore()
self.test_core.context = self.test_context = zmq.Context()
self.test_core.validation_fail_action = 'raise'
def tearDown(self):
self.cleanup_endpoints()
self.test_context.destroy(linger=1)
del self.test_context
|
# coding=utf-8
#import cPickle #python2
import pickle
import os
import cv2
from imutils import contours #pip install web.py==0.40-dev1
from e import ContourCountError, ContourPerimeterSizeError, PolyNodeCountError
import numpy as np
import settings
#from settings import ANS_IMG_THRESHOLD, CNT_PERIMETER_THRESHOLD, CHOICE_IMG_THRESHOLD, ANS_IMG_DILATE_ITERATIONS, \
# ANS_IMG_ERODE_ITERATIONS, CHOICE_IMG_DILATE_ITERATIONS, CHOICE_IMG_ERODE_ITERATIONS, CHOICE_MAX_AREA, \
#CHOICE_CNT_COUNT, ANS_IMG_KERNEL, CHOICE_IMG_KERNEL, CHOICE_MIN_AREA
from utils import detect_cnt_again, get_init_process_img, get_bright_process_img, get_max_area_cnt, get_ans,sort_by_row_hs,sort_by_row_hs2
'''
def get_choice_area(areas):
areas = sorted(areas)
segments = []
segment_areas = [areas[0]]
for i, c in enumerate(areas[1:]):
if abs(c-areas[i]) < 300:
segment_areas.append(areas[i])
else:
segments.append(segment_areas)
segment_areas = [areas[i]]
temp = segments[0]
for array in segments:
if len(array) > len(temp) or len(temp)> 250 and len(array) < 250:#小于250
temp = array
return temp[0]-10,temp[-1]+10
'''
def get_choice_area(areas):
areas = sorted(areas)
segments = []
segment_areas = []
for i, c in enumerate(areas[1:]):
#print c
if abs(c-areas[i]) < 200:
segment_areas.append(areas[i])
else:
segments.append(segment_areas)
segment_areas = []
segments.append(segment_areas)
temp = segments[0]
for array in segments:
if len(array) > len(temp) or len(temp)> 250:#小于250
temp = array
return temp[0]-60,temp[-1]+60
def brightness(im_file):
im = Image.open(im_file).convert('L')
stat = ImageStat.Stat(im)
return stat.mean[0]
def get_answer_from_sheet(base_img):
filepath,tempfilename = os.path.split(base_img);
file_name,_ = os.path.splitext(tempfilename)
obj_dir = os.path.curdir + "/img/new/"+ file_name
if not os.path.exists(obj_dir):
os.makedirs(obj_dir)
base_img = cv2.imread(base_img);
#保存最原始图片
cv2.imwrite(obj_dir+"/"+'origin.png', base_img)
# 灰度化然后进行边缘检测、二值化等等一系列处理
img = cv2.cvtColor(base_img, cv2.COLOR_BGR2GRAY)
img = get_init_process_img(img)
#写入图片
cv2.imwrite(obj_dir+"/"+'process-0.png',img)
# 获取最大面积轮廓并和图片大小作比较,看轮廓周长大小判断是否是答题卡的轮廓
cnt = get_max_area_cnt(img)
cnt_perimeter = cv2.arcLength(cnt, True)
cv2.drawContours(base_img, [cnt], 0, (0, 255, 0), 1)
#画边框
cv2.imwrite(obj_dir+"/"+'green_border.png', base_img)
base_img_perimeter = (base_img.shape[0] + base_img.shape[1]) * 2
if not cnt_perimeter > settings.CNT_PERIMETER_THRESHOLD * base_img_perimeter:
print ("边缘丢失")
exit()
# 计算多边形的顶点,并看是否是四个顶点
poly_node_list = cv2.approxPolyDP(cnt, cv2.arcLength(cnt, True) * 0.1, True)
if not poly_node_list.shape[0] == 4:
raise PolyNodeCountError
# 根据计算的多边形顶点继续处理图片,主要是是纠偏
processed_img = detect_cnt_again(poly_node_list, base_img)
#保存纠正图片
#processed_img = cv2.dilate(processed_img, kernel, iterations=1)
wait_draw = processed_img.copy()
cv2.imwrite(obj_dir+"/"+'correct-position.png', processed_img)
# 调整图片的亮度
processed_img = get_bright_process_img(processed_img)
cv2.imwrite(obj_dir+"/"+'brighten.png', processed_img)
#processed_img = processed_img[processed_img[1]+20:(processed_img[1] + processed_img[3]-20), processed_img[0]+20:(processed_img[0] + processed_img[2]-20)]
# 通过二值化和膨胀腐蚀获得填涂区域
#ans_img = cv2.adaptiveThreshold(processed_img,255,cv2.ADAPTIVE_THRESH_MEAN_C,cv2.THRESH_BINARY,35,2)
#ret, ans_img = cv2.threshold(processed_img,0,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU) #新的方法
#ans_img = cv2.dilate(processed_img, settings.ANS_IMG_KERNEL, iterations=settings.ANS_IMG_DILATE_ITERATIONS)
#ans_img = cv2.erode(ans_img, settings.ANS_IMG_KERNEL, iterations=settings.ANS_IMG_ERODE_ITERATIONS)
ans_img = cv2.morphologyEx(processed_img, cv2.MORPH_CLOSE, settings.ANS_IMG_KERNEL)
ans_img = cv2.adaptiveThreshold(ans_img,255,cv2.ADAPTIVE_THRESH_MEAN_C,cv2.THRESH_BINARY,45,1)
cv2.imwrite(obj_dir+"/"+'answer_area.png', ans_img)
# 通过二值化和膨胀腐蚀获得选项框区域
#choice_img = cv2.adaptiveThreshold(processed_img,255,cv2.ADAPTIVE_THRESH_MEAN_C,cv2.THRESH_BINARY_INV,35,2)
#ret, choice_img = cv2.threshold(processed_img,0,255,cv2.THRESH_BINARY_INV+cv2.THRESH_OTSU)#新方法
#choice_img = cv2.dilate(processed_img, settings.CHOICE_IMG_KERNEL, iterations=settings.CHOICE_IMG_DILATE_ITERATIONS)
#choice_img = cv2.erode(processed_img, settings.CHOICE_IMG_KERNEL, iterations=settings.CHOICE_IMG_ERODE_ITERATIONS)
choice_img = cv2.adaptiveThreshold(processed_img,255,cv2.ADAPTIVE_THRESH_MEAN_C,cv2.THRESH_BINARY_INV,11,2)
#choice_img = cv2.morphologyEx(choice_img,cv2.MORPH_GRADIENT,settings.ANS_IMG_KERNEL)
cv2.imwrite(obj_dir+"/"+'choice_area.png', choice_img)
#cv2.waitKey(0)
# 查找选项框以及前面题号的轮廓
cnts, h = cv2.findContours(choice_img, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
#cnts = []
#for c in cnt1s:
# cnts.append(cv2.approxPolyDP(cnt, cv2.arcLength(cnt, True) * 0.1, True))
question_cnts = []
cnts_areas = []
for i,c in enumerate(cnts):
cnts_areas.append(cv2.contourArea(c))
CHOICE_MIN_AREA,CHOICE_MAX_AREA = get_choice_area(cnts_areas)
#print "%d %d" %(CHOICE_MIN_AREA,CHOICE_MAX_AREA)
for i,c in enumerate(cnts):
w = cv2.boundingRect(c)[2]
h = cv2.boundingRect(c)[3]
if CHOICE_MIN_AREA < cnts_areas[i]< CHOICE_MAX_AREA \
and ((w/h<=1 and h/w <2) or (w/h>1 and w/h <2)):
question_cnts.append(c)
cv2.drawContours(wait_draw, question_cnts, -1, (0, 0, 255), 1)
cv2.imshow("img", wait_draw)
cv2.waitKey(0)
cv2.imwrite(obj_dir+"/"+'wait_draw5.png', wait_draw)
cv2.waitKey(0)
if len(question_cnts) < settings.CHOICE_CNT_COUNT/2:
print ("数目错误 %d %d" %(len(question_cnts),settings.CHOICE_CNT_COUNT))
exit()
#对轮廓之上而下的排序
question_cnts, cnts_pos = contours.sort_contours(question_cnts, method="left-to-right")
question_cnts, cnts_pos = contours.sort_contours(question_cnts, method="top-to-bottom")
rows = sort_by_row_hs2(list(cnts_pos))
get_ans(ans_img, rows)
|
import threading
import time
class WorkerThread(threading.Thread):
def __init__(self, name, callback):
super(WorkerThread, self).__init__(name=name)
self.callback = callback
self.stop_event = threading.Event()
def run(self):
while not self.stop_event.is_set():
self.callback(self.name)
time.sleep(1)
def stop(self):
self.stop_event.set()
class ThreadManager(object):
def __init__(self, callback1, callback2):
self.worker1 = WorkerThread("worker1", callback1)
self.worker2 = WorkerThread("worker2", callback2)
def start_thread(self):
self.worker1.start()
self.worker2.start()
def stop_thread(self):
self.worker1.stop()
self.worker2.stop()
self.worker1.join()
self.worker2.join()
|
from operator import attrgetter
from unittest.mock import patch
from django.conf import settings
from django.contrib.auth.models import Group
from django.http.response import HttpResponseForbidden, HttpResponseBadRequest, HttpResponseBase, \
HttpResponseNotAllowed
from django.test import TestCase
from django.urls import reverse
from changesets.views_uploadedcitizens import UploadedCitizenBrowse
from civil_registry.tests.factories import CitizenFactory
from libya_elections.tests.utils import assert_in_messages
from libya_site.tests.factories import UserFactory
from ..models import Changeset, CHANGE_CHANGESETS_GROUP, \
APPROVE_CHANGESETS_GROUP, QUEUE_CHANGESETS_GROUP
from .factories import ChangesetFactory, ChangeRecordFactory
from register.tests.factories import RegistrationCenterFactory
OK = HttpResponseBase.status_code
BAD_REQUEST = HttpResponseBadRequest.status_code
FORBIDDEN = HttpResponseForbidden.status_code
NOT_ALLOWED = HttpResponseNotAllowed.status_code
class GroupsMixin(object):
@classmethod
def setUpTestData(cls):
cls.change_group = Group.objects.get(name=CHANGE_CHANGESETS_GROUP)
cls.approve_group = Group.objects.get(name=APPROVE_CHANGESETS_GROUP)
cls.queue_group = Group.objects.get(name=QUEUE_CHANGESETS_GROUP)
class ViewPermissionTests(GroupsMixin, TestCase):
def setUp(self):
self.password = "PASSWORD"
self.peon = UserFactory(password=self.password)
self.changer = UserFactory(password=self.password)
self.changer.groups.add(self.change_group)
self.approver = UserFactory(password=self.password)
self.approver.groups.add(self.approve_group)
self.queuer = UserFactory(password=self.password)
self.queuer.groups.add(self.queue_group)
self.deleter = UserFactory(password=self.password)
self.deleter.groups.add(self.change_group)
def permitted(self, user):
"""Return True if user can visit self.url and not get a permission denied"""
assert self.client.login(username=user.username, password=self.password)
rsp = self.client.get(self.url)
self.assertIn(rsp.status_code, (OK, FORBIDDEN))
return rsp.status_code != FORBIDDEN
def test_redirect_to_login(self):
# When not logged in, users get redirected to login.
url = reverse('browse_changesets')
self.assertRedirects(self.client.get(url), reverse(settings.LOGIN_URL) + "?next=" + url)
changeset = ChangesetFactory(status=Changeset.STATUS_NEW)
for url in ('edit_changeset', 'read_changeset', 'delete_changeset'):
url = reverse(url, kwargs={'pk': changeset.pk})
self.assertRedirects(
self.client.get(url), reverse(settings.LOGIN_URL) + "?next=" + url,
msg_prefix="%s get did not redirect to login when not logged in" % url)
for url in ('browse_changerecords', 'browse_uploadedcitizens', ):
url = reverse(url)
self.assertRedirects(
self.client.get(url), reverse(settings.LOGIN_URL) + "?next=" + url,
msg_prefix="%s get did not redirect to login when not logged in" % url)
for url in ('approve_changeset', ):
url = reverse(url, kwargs={'pk': changeset.pk})
self.assertRedirects(
self.client.post(url), reverse(settings.LOGIN_URL) + "?next=" + url,
msg_prefix="%s post did not redirect to login when not logged in" % url)
def test_get_list_view(self):
# Need browse permission
self.url = reverse('browse_changesets')
self.assertFalse(self.permitted(self.peon))
self.assertTrue(self.permitted(self.changer))
self.assertTrue(self.permitted(self.approver))
self.assertTrue(self.permitted(self.queuer))
self.assertTrue(self.permitted(self.deleter))
def test_get_edit_view(self):
# Need edit permission
changeset = ChangesetFactory(status=Changeset.STATUS_NEW)
self.url = reverse('edit_changeset', kwargs={'pk': changeset.pk})
self.assertFalse(self.permitted(self.peon))
self.assertTrue(self.permitted(self.changer))
self.assertTrue(self.permitted(self.approver))
self.assertTrue(self.permitted(self.queuer))
self.assertTrue(self.permitted(self.deleter))
def test_get_view_view(self):
# Need read permission
changeset = ChangesetFactory(status=Changeset.STATUS_NEW)
self.url = reverse('read_changeset', kwargs={'pk': changeset.pk})
self.assertFalse(self.permitted(self.peon))
self.assertTrue(self.permitted(self.changer))
self.assertTrue(self.permitted(self.approver))
self.assertTrue(self.permitted(self.queuer))
self.assertTrue(self.permitted(self.deleter))
def test_get_create_view(self):
# Need add permission
self.url = reverse('add_changeset')
self.assertFalse(self.permitted(self.peon))
self.assertTrue(self.permitted(self.changer))
self.assertTrue(self.permitted(self.approver))
self.assertTrue(self.permitted(self.queuer))
self.assertTrue(self.permitted(self.deleter))
def test_get_delete_view(self):
changeset = ChangesetFactory(status=Changeset.STATUS_NEW)
self.url = reverse('delete_changeset', kwargs={'pk': changeset.pk})
self.assertFalse(self.permitted(self.peon))
self.assertTrue(self.permitted(self.changer))
self.assertTrue(self.permitted(self.approver))
self.assertTrue(self.permitted(self.queuer))
self.assertTrue(self.permitted(self.deleter))
class ListViewTest(GroupsMixin, TestCase):
def setUp(self):
self.password = "PASSWORD"
user = UserFactory(password=self.password)
user.groups.add(self.change_group)
assert self.client.login(username=user.username, password=self.password)
def test_list_view(self):
changeset1 = ChangesetFactory(name="Edward Einsenstein")
changeset2 = ChangesetFactory(name="Fritz Kumquat")
url = reverse('browse_changesets')
rsp = self.client.get(url)
self.assertContains(rsp, changeset1.name)
self.assertContains(rsp, changeset2.name)
class CreateViewTest(GroupsMixin, TestCase):
def setUp(self):
self.password = "PASSWORD"
user = UserFactory(password=self.password)
user.groups.add(self.change_group)
assert self.client.login(username=user.username, password=self.password)
def test_create_view(self):
url = reverse('add_changeset')
rsp = self.client.get(url)
self.assertEqual(OK, rsp.status_code)
self.center1 = RegistrationCenterFactory()
self.center2 = RegistrationCenterFactory()
data = {
'name': 'My Changeset',
'change': str(Changeset.CHANGE_CENTER),
'how_to_select': str(Changeset.SELECT_CENTERS),
'selected_centers_1': [str(self.center1.pk)],
'target_center_1': str(self.center2.pk),
'justification': 'Just because',
}
rsp = self.client.post(url, data=data, follow=True)
self.assertRedirects(rsp, reverse('browse_changesets'))
class EditViewTest(GroupsMixin, TestCase):
def setUp(self):
self.password = "PASSWORD"
user = UserFactory(password=self.password)
user.groups.add(self.change_group)
assert self.client.login(username=user.username, password=self.password)
def test_edit_view(self):
self.center1 = RegistrationCenterFactory()
self.center2 = RegistrationCenterFactory()
data = {
'name': 'My Changeset',
'change': str(Changeset.CHANGE_CENTER),
'how_to_select': str(Changeset.SELECT_CENTERS),
'selected_centers_1': [str(self.center1.pk)],
'target_center_1': str(self.center2.pk),
'justification': 'Just because',
}
changeset = ChangesetFactory(
name=data['name'],
change=Changeset.CHANGE_CENTER,
how_to_select=Changeset.SELECT_CENTERS,
target_center=self.center2,
justification='Just because',
)
changeset.selected_centers.add(self.center1)
url = reverse('edit_changeset', kwargs={'pk': changeset.pk})
rsp = self.client.get(url)
self.assertEqual(OK, rsp.status_code)
data['name'] = 'My Edited Changeset'
data['justification'] = 'My edited justficiation'
rsp = self.client.post(url, data=data, follow=False)
self.assertEqual(302, rsp.status_code) # , msg=rsp.content.decode('utf-8'))
self.assertRedirects(rsp, reverse('browse_changesets'))
changeset = Changeset.objects.get()
self.assertEqual(data['name'], changeset.name)
self.assertEqual(data['justification'], changeset.justification)
def test_edit_view_not_in_editable_state(self):
changeset = ChangesetFactory(status=Changeset.STATUS_FAILED)
url = reverse('edit_changeset', kwargs={'pk': changeset.pk})
rsp = self.client.get(url)
self.assertRedirects(rsp, reverse('read_changeset', kwargs=dict(pk=changeset.pk)))
class ViewViewTest(GroupsMixin, TestCase):
def setUp(self):
self.password = "PASSWORD"
user = UserFactory(password=self.password)
user.groups.add(self.change_group)
assert self.client.login(username=user.username, password=self.password)
def test_view_view(self):
self.center1 = RegistrationCenterFactory(name='Centero Uno')
# Try a non-ASCII center to exercise the view. See issue 1966:
# https://github.com/hnec-vr/libya-elections/issues/1966
self.center2 = RegistrationCenterFactory(name='Centre Tv\xe5')
changeset = ChangesetFactory(
name='My Changeset',
change=Changeset.CHANGE_CENTER,
how_to_select=Changeset.SELECT_CENTERS,
target_center=self.center2,
justification='Just because',
)
changeset.selected_centers.add(self.center1)
changeset.selected_centers.add(self.center2)
url = reverse('read_changeset', kwargs={'pk': changeset.pk})
rsp = self.client.get(url)
self.assertEqual(OK, rsp.status_code)
self.assertContains(rsp, self.center1.center_id)
self.assertContains(rsp, self.center2.center_id)
self.assertContains(rsp, 'Just because')
self.assertContains(rsp, 'My Changeset')
class ApproveViewTest(GroupsMixin, TestCase):
def setUp(self):
self.password = "PASSWORD"
self.peon = UserFactory(password=self.password)
self.changer = UserFactory(password=self.password)
self.changer.groups.add(self.change_group)
self.approver = UserFactory(password=self.password)
self.approver.groups.add(self.approve_group)
self.queuer = UserFactory(password=self.password)
self.queuer.groups.add(self.queue_group)
self.changeset = ChangesetFactory()
self.url = reverse('approve_changeset', kwargs={'pk': self.changeset.pk})
self.read_url = reverse('read_changeset', args=[self.changeset.pk])
def login(self, user):
assert self.client.login(username=user.username, password=self.password)
def test_theres_no_get_view(self):
self.login(self.approver)
rsp = self.client.get(self.url)
self.assertEqual(NOT_ALLOWED, rsp.status_code)
def test_peon_cannot_approve(self):
self.login(self.peon)
rsp = self.client.post(self.url, data={'approve': True})
self.assertEqual(FORBIDDEN, rsp.status_code)
def test_changer_cannot_approve(self):
self.login(self.changer)
rsp = self.client.post(self.url, data={'approve': True})
self.assertEqual(FORBIDDEN, rsp.status_code)
def test_approver_can_approve(self):
self.login(self.approver)
rsp = self.client.post(self.url, data={'approve': True})
self.assertRedirects(rsp, self.read_url)
def test_queuer_cannot_approve(self):
# Start privilege is not the same as approve privilege, necessarily
self.login(self.queuer)
rsp = self.client.post(self.url, data={'approve': True})
self.assertEqual(FORBIDDEN, rsp.status_code)
def test_cannot_approve_after_queueing(self):
superuser = UserFactory(is_superuser=True, password=self.password)
self.changeset.approve(self.approver)
self.changeset.approve(superuser)
self.changeset.status = Changeset.STATUS_QUEUED
self.changeset.save()
self.login(superuser)
rsp = self.client.post(self.url, data={'approve': True})
self.assertContains(rsp, "after the changeset has been started.", status_code=BAD_REQUEST)
def test_cannot_revoke_approval_after_queueing(self):
superuser = UserFactory(is_superuser=True, password=self.password)
self.changeset.approve(self.approver)
self.changeset.approve(superuser)
self.changeset.status = Changeset.STATUS_QUEUED
self.changeset.save()
self.login(superuser)
rsp = self.client.post(self.url, data={'revoke': True})
self.assertContains(rsp, "after the changeset has been started.", status_code=BAD_REQUEST)
def test_user_already_approved(self):
self.login(self.approver)
rsp = self.client.post(self.url, data={'approve': True})
self.assertRedirects(rsp, self.read_url)
self.login(self.approver)
rsp = self.client.post(self.url, data={'approve': True}, follow=True)
self.assertRedirects(rsp, self.read_url)
assert_in_messages(rsp, "already approved")
def test_revoke(self):
self.changeset.approve(self.approver)
self.login(self.approver)
rsp = self.client.post(self.url, data={'revoke': True})
self.assertEqual(0, self.changeset.number_of_approvals)
self.assertRedirects(rsp, self.read_url)
def test_revoke_by_non_approver(self):
# you can't revoke if you didn't approve
self.changeset.approvers.add(self.peon, self.queuer)
self.login(self.approver)
rsp = self.client.post(self.url, data={'revoke': True})
self.assertEqual(2, self.changeset.number_of_approvals)
assert_in_messages(rsp, "You did not approve")
self.assertEqual(BAD_REQUEST, rsp.status_code)
def test_queue(self):
# any old approvals
self.changeset.approvers.add(self.peon, self.approver)
self.changeset.status = Changeset.STATUS_APPROVED
self.changeset.save()
self.login(self.queuer)
with patch.object(Changeset, 'queue') as mock_queue:
rsp = self.client.post(self.url, data={'queue': True})
assert mock_queue.called
# queue redirects to the view page
self.assertRedirects(rsp, self.read_url)
def test_queue_without_permission(self):
# must have queue permission
# any old approvals
self.changeset.approvers.add(self.peon, self.approver)
# A user with perms to visit the approval page but not to queue
self.login(self.approver)
with patch.object(Changeset, 'queue') as mock_queue:
rsp = self.client.post(self.url, data={'queue': True})
mock_queue.assert_not_called()
self.assertEqual(FORBIDDEN, rsp.status_code)
def test_queue_not_approved(self):
# can't queue if not approved
# only one approval
self.changeset.approve(self.approver)
self.login(self.queuer)
with patch.object(Changeset, 'queue') as mock_queue:
rsp = self.client.post(self.url, data={'queue': True})
mock_queue.assert_not_called()
self.assertEqual(BAD_REQUEST, rsp.status_code)
def test_approve_view_without_expected_args(self):
assert not self.changeset.has_been_queued()
self.login(self.approver)
rsp = self.client.post(self.url, data={'nonsense': True})
self.assertEqual(BAD_REQUEST, rsp.status_code)
class DeleteViewTest(GroupsMixin, TestCase):
def setUp(self):
self.password = "PASSWORD"
self.peon = UserFactory(password=self.password)
self.deleter = UserFactory(password=self.password)
self.deleter.groups.add(self.change_group)
self.changeset = ChangesetFactory()
self.url = reverse('delete_changeset', kwargs={'pk': self.changeset.pk})
def login(self, user):
assert self.client.login(username=user.username, password=self.password)
def test_can_delete_with_permission(self):
changeset_pk = self.changeset.pk
self.login(self.deleter)
rsp = self.client.post(self.url)
self.assertRedirects(rsp, reverse('browse_changesets'))
self.assertFalse(Changeset.objects.filter(pk=changeset_pk).exists())
def test_cannot_delete_without_permission(self):
self.login(self.peon)
rsp = self.client.post(self.url)
self.assertEqual(FORBIDDEN, rsp.status_code)
def test_cannot_deleted_queued_changeset(self):
self.changeset.status = Changeset.STATUS_EXECUTING
self.changeset.save()
redirect_url = reverse('read_changeset', kwargs=dict(pk=self.changeset.pk))
self.login(self.deleter)
rsp = self.client.get(self.url)
self.assertRedirects(rsp, redirect_url)
self.assertTrue(Changeset.objects.filter(pk=self.changeset.pk).exists())
rsp = self.client.post(self.url)
self.assertRedirects(rsp, redirect_url)
self.assertTrue(Changeset.objects.filter(pk=self.changeset.pk).exists())
class CitizenListViewTest(GroupsMixin, TestCase):
def setUp(self):
self.password = "PASSWORD"
user = UserFactory(password=self.password)
user.groups.add(self.change_group)
assert self.client.login(username=user.username, password=self.password)
def test_view_view(self):
self.center2 = RegistrationCenterFactory(name='Centra Dua')
changeset = ChangesetFactory(
name='My Changeset',
change=Changeset.CHANGE_CENTER,
how_to_select=Changeset.SELECT_UPLOADED_NIDS,
target_center=self.center2,
justification='Just because',
)
per_page = UploadedCitizenBrowse.paginate_by
self.assertIsNotNone(per_page)
citizens = [CitizenFactory() for i in range(per_page + 2)]
changeset.selected_citizens.add(*citizens)
citizens = sorted(citizens, key=attrgetter('national_id'))
# Get citizens affected by this changeset, sorted ascending by national ID
url = reverse('browse_uploadedcitizens') + "?changeset=%s&o=0" % changeset.pk
rsp = self.client.get(url)
self.assertEqual(OK, rsp.status_code)
context = rsp.context
object_list = context['object_list']
self.assertLessEqual(len(object_list), per_page)
# Should be on first page
self.assertContains(rsp, citizens[0].national_id)
self.assertContains(rsp, str(citizens[0]))
# Last citizen ought to be on the next page
self.assertNotContains(rsp, citizens[-1].national_id)
self.assertNotContains(rsp, str(citizens[-1]))
class ChangesViewTest(GroupsMixin, TestCase):
def setUp(self):
self.password = "PASSWORD"
user = UserFactory(password=self.password)
user.groups.add(self.change_group)
assert self.client.login(username=user.username, password=self.password)
def test_changed_view_for_changeset(self):
# include ?changeset=NNN and it limits to that changeset
changeset = ChangesetFactory()
change1 = ChangeRecordFactory(changeset=changeset, changed=True)
change2 = ChangeRecordFactory(changeset=changeset, changed=False)
change3 = ChangeRecordFactory(changed=True)
rsp = self.client.get(reverse('browse_changerecords') + "?changeset=%s" % changeset.pk)
self.assertEqual(OK, rsp.status_code)
context = rsp.context
object_list = context['object_list']
self.assertIn(change1, object_list)
self.assertIn(change2, object_list)
self.assertNotIn(change3, object_list)
def test_changed_view_for_all(self):
# Leave out ?changeset= and it shows all changes
changeset = ChangesetFactory()
change1 = ChangeRecordFactory(changeset=changeset, changed=True)
change2 = ChangeRecordFactory(changeset=changeset, changed=False)
change3 = ChangeRecordFactory(changed=True)
rsp = self.client.get(reverse('browse_changerecords'))
self.assertEqual(OK, rsp.status_code)
context = rsp.context
object_list = context['object_list']
self.assertIn(change1, object_list)
self.assertIn(change2, object_list)
self.assertIn(change3, object_list)
|
#!/bin/python3
import requests
import pkg_resources
import imageio
from PIL import Image, ImageDraw, ImageFont
from io import BytesIO
import numpy as np
import os
from PIL.PngImagePlugin import PngImageFile, PngInfo
def flowkey_dl(url):
#url=os.path.dirname(url)+'/{}.png'
hashstring=strip_url(url)
try:
filename=pkg_resources.resource_filename(__name__, f'raw/{hashstring}.png')
img=PngImageFile(filename)
except FileNotFoundError:
pass
else:
print(f'found local file {filename}')
return np.array(img),img.info.get('Title'),img.info.get('Author')
url=make_url(hashstring)
#load with
imgs=list()
i=0
while True:
#im = imageio.imread(url.format(i))
r = requests.get(url.format(i))
if r.content[-6:-1]==b'Error':
break
patch=next(iter(imageio.get_reader(r.content, '.png')))
print(f'loaded patch {i} with shape {patch.shape}')
if len(patch.shape)==3:
imgs.append(patch)
else:
print(f'patch {i} looks strange, ignoring: {patch} \nshape: {patch.shape}')
i+=1
print(f'downloaded {len(imgs)} patches form {url}')
#print([i.shape for i in imgs])
imgs_comb = np.hstack( imgs )
r,g,b,a=np.rollaxis(imgs_comb, axis=-1)
return 255-a, None, None
def find_measure(image, min_sz=100):
#image is a numpy array
#there are about 20 pixel above and 15 below the lines.
# at most 5 pixels can be brighter than 100
lines=np.where((image>100)[:,50:-50].sum(1)<image.shape[1]/2)[0]
positions=np.where((image>100)[lines[0]:lines[-1],:].sum(0)<10)[0]
measures=[positions[0]]
for i in positions:
if i> measures[-1]+min_sz:
measures.append(i)
print(f'found {len(measures)-1} measures')
return measures
def parse_nums(val=None):
#parse strings like '1,3,6-10,15'
nums=[]
try:
if val is not None and val:
for v in val.split(','):
if '-' in v:
_from, to= v.split('-')
nums.extend(list(range(int(_from), int(to)+1)))
else:
nums.append(int(v))
except ValueError:
return []
return nums
def arange_image(image=None, title='', author='',width=2480, height=3508, scale=1,space=50, sel_measures=None,break_measures=None, nobreak_measures=None,font_size=(40,20) , mar=50):
sel_measures,break_measures, nobreak_measures=[parse_nums(val) for val in (sel_measures,break_measures, nobreak_measures)]
out=[Image.fromarray(255*np.ones((int(height),int(width))))]
try:
fnt = [ImageFont.truetype('FreeMono.ttf', sz) for sz in font_size]
except OSError:
try:
fnt = [ImageFont.truetype('arial.ttf', sz) for sz in font_size]
except OSError:
print('Can not load arial nor FreeMono ttf fonts... using default font and font size. To circumvent this, try installing FreeMono or arial ttf fonts')
fnt = [ImageFont.load_default() for sz in font_size]
d = ImageDraw.Draw(out[-1])
w, h = d.textsize(title, font=fnt[0])
d.text(((width-w)/2,mar),title, font=fnt[0], fill=0)
w2, h2= d.textsize(author, font=fnt[1])
d.text((width-mar-w2,mar+h), author, font=fnt[1], fill=0)
print(f'arage images of size {width}x{height}')
if image is None:
return out
measures=find_measure(image)
if sel_measures:
print(f'selecting measures {sel_measures}')
image=np.hstack([image[:,measures[m-1]:measures[m]] for m in sel_measures if m<len(measures)])
#offset=0
rm=[0]
#new_measures=list()
for i,m in enumerate(measures[1:]):
rm.append(rm[i])
if i+1 not in sel_measures:
#offset+=measures[i]-m
rm[i+1]+=1
nobreak_measures=[v-rm[v] for v in nobreak_measures if v in sel_measures]
break_measures=[v-rm[v] for v in break_measures if v in sel_measures]
measures=find_measure(image)
offset=measures[0]
breaks=list()
for i,ix in enumerate(measures):
if i not in nobreak_measures and (ix-offset>(width-2*mar)/scale or i in break_measures):
if measures[i-1]>offset:
breaks.append(measures[i-1])
else:
breaks.append(measures[i])
offset=breaks[-1]
breaks.append(image.shape[1])
offset=max(0,measures[0]-1)
y=int(mar+h+h2+space/2)
for i,ix in enumerate(breaks):
print(f'{offset}, {ix}')
if y+image.shape[0]+mar>height:
out.append(Image.fromarray(255*np.ones((int(height),int(width)))))
y=mar
patch=image[:,offset:ix+1]
dim=patch.shape
patch=Image.fromarray(patch)
patch=patch.resize((int(x*scale) for x in reversed(dim)))
out[-1].paste(patch, (mar,y))
y+=patch.height+space
offset=ix-1
return out
def load_image(filename):
img=PngImageFile(filename)
return(img)
def strip_url(url):
if url.startswith('https://flowkeycdn.com/sheets/'):
url=url[30:]
if '/' in url:
url=url[:url.find('/')]
return url
def make_url(hashstring, dpi=300):
if dpi!=300:
dpi=150
return f'https://flowkeycdn.com/sheets/{hashstring}/{dpi}/'+'{}.png'
def save_png(image, url, author, title):
metadata = PngInfo()
metadata.add_text("Title", title)
metadata.add_text("Author", author)
filename=pkg_resources.resource_filename(__name__, f'raw/{strip_url(url)}.png')
print(f'saving raw image of sheet {author} - {title} to {filename}')
Image.fromarray(image).save(filename, pnginfo=metadata)
#load with PngImageFile(filename)
def save_pdf(images, filename):
images=[i.convert('RGB') for i in images]
print(f'saving {len(images)} pages to {filename}')
if len(images)==1:
images[0].save(filename)
else:
images[0].save(filename,save_all=True, append_images=images[1:])
def main():
url = 'https://flowkeycdn.com/sheets/XXXXX/150/0.png'
image=flowkey_dl(url)
measure=find_measure(image)
r,g,b=[image.copy() for _ in range(3)]
r[:,measure]=255
Image.fromarray( np.dstack([r,g,b])).show()
if __name__ == "__main__":
main()
|
# It's not possible to delete a global var at runtime in strict mode.
gvar = 1
del gvar
gvar = 2
def __main__():
print("in __main__")
global gvar
# In the current implementation, TypeError is thrown. This is considered
# an implementation detail and may change later to e.g. RuntimeError.
try:
del globals()["gvar"]
except:
print("Exception1")
print(gvar)
|
class Solution:
def findAnagrams(self, s, p):
"""
:type s: str
:type p: str
:rtype: List[int]
"""
ht = dict() # Hash table
result = list()
for char in p:
if ht.get(char):
ht[char] += 1
else:
ht[char] = 1
w_start, w_end, diff = -1, -1, len(p)
while w_end < len(s)-1:
# print(w_start, w_end, diff)
# print(ht)
if ht.get(s[w_end+1]) is not None:
if ht[s[w_end+1]] > 0:
# extend window
w_end += 1
diff -= 1
ht[s[w_end]] -= 1
if w_end - w_start == len(p) and diff == 0:
result.append(w_start+1)
# print(result)
else:
# narrow window
if ht.get(s[w_start+1]) is not None:
ht[s[w_start+1]] += 1
diff += 1
w_start += 1
else:
if w_start == w_end:
w_end += 1
# narrow window
if ht.get(s[w_start+1]) is not None:
ht[s[w_start+1]] += 1
diff += 1
w_start += 1
return result
# solute = Solution()
# print(solute.findAnagrams("cbaebabacd", "abc"))
|
import argparse
import os
import re
class ShowUsageException(Exception):
pass
def dir_path(s):
if os.path.isdir(s):
return s
else:
raise ShowUsageException(f'"{s}" is not a directory')
def origin_directory_pair(s):
try:
origin, path = s.split(':')
except ValueError:
raise argparse.ArgumentTypeError(
f'Expected colon-delimited pair, not "{s}"')
expected_format = r'[0-9a-f-]{36}'
if not re.match(expected_format, origin):
raise argparse.ArgumentTypeError(
f'Origin format wrong; expected {expected_format}')
return {
'origin': origin,
'path': path
}
|
# Copyright (c) 2012 Santosh Philip
# =======================================================================
# Distributed under the MIT License.
# (See accompanying file LICENSE or copy at
# http://opensource.org/licenses/MIT)
# =======================================================================
"""use epbunch"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from itertools import chain
from eppy.EPlusInterfaceFunctions import readidf
import eppy.bunchhelpers as bunchhelpers
from eppy.EPlusInterfaceFunctions.structures import CaseInsensitiveDict
from eppy.bunch_subclass import EpBunch
# from eppy.bunch_subclass import fieldnames, fieldvalues
import eppy.iddgaps as iddgaps
import eppy.function_helpers as fh
from eppy.idf_msequence import Idf_MSequence
class NoIDDFieldsError(Exception):
pass
def iddversiontuple(afile):
"""given the idd file or filehandle, return the version handle"""
def versiontuple(vers):
"""version tuple"""
return tuple([int(num) for num in vers.split(".")])
try:
fhandle = open(afile, "rb")
except TypeError:
fhandle = afile
line1 = fhandle.readline()
try:
line1 = line1.decode("ISO-8859-2")
except AttributeError:
pass
line = line1.strip()
if line1 == "":
return (0,)
vers = line.split()[-1]
return versiontuple(vers)
def makeabunch(commdct, obj, obj_i, debugidd=True, block=None):
"""make a bunch from the object"""
objidd = commdct[obj_i]
objfields = [comm.get("field") for comm in commdct[obj_i]]
objfields[0] = ["key"]
objfields = [field[0] for field in objfields]
obj_fields = [bunchhelpers.makefieldname(field) for field in objfields]
bobj = EpBunch(obj, obj_fields, objidd)
# TODO : test for len(obj) > len(obj_fields)
# that will be missing fields in idd file
# do we throw an exception here ????? YES !!!!!
if debugidd:
if len(obj) > len(obj_fields):
n = len(obj) - len(obj_fields)
extlst = extension_of_extensible(commdct[obj_i], block[obj_i], n)
errortext = "idfobject with key '{}' & first field '{}' has {} fields while the idd for '{}' has only {} fields. Add the following fields to the object '{}' in file Energy+.idd '{};'".format(
obj[0].upper(),
obj[1],
len(obj) - 1,
obj[0].upper(),
len(obj_fields) - 1,
obj[0].upper(),
", ".join(extlst),
)
# "idfobject with key 'TIMESTEP' & first field '44' has 2 fields while the idd for 'TIMESTEP' has only 1 fields. Add the following fields to object 'TIMESTEP' in file Energy+.idd A5, A6;'"
# print(block[obj_i])
# print(errortext)
print(extlst)
raise NoIDDFieldsError(errortext)
return bobj
def makebunches(data, commdct):
"""make bunches with data"""
bunchdt = CaseInsensitiveDict()
ddtt, dtls = data.dt, data.dtls
for obj_i, key in enumerate(dtls):
key = key.upper()
bunchdt[key] = []
objs = ddtt[key]
for obj in objs:
bobj = makeabunch(commdct, obj, obj_i)
bunchdt[key].append(bobj)
return bunchdt
def makebunches_alter(data, commdct, theidf, block=None):
"""make bunches with data"""
bunchdt = CaseInsensitiveDict()
dt, dtls = data.dt, data.dtls
for obj_i, key in enumerate(dtls):
key = key.upper()
objs = dt[key]
list1 = []
for obj in objs:
bobj = makeabunch(commdct, obj, obj_i, block=block)
list1.append(bobj)
bunchdt[key] = Idf_MSequence(list1, objs, theidf)
return bunchdt
class ConvInIDD(object):
"""hold the conversion function to integer, real and no_type"""
def no_type(self, x, avar):
if avar.startswith("N"): # is a number if it starts with N
try:
return float(x) # in case x=autosize
except ValueError as e:
return x
else:
return x # starts with A, is not a number
def integer(self, x, y):
try:
return int(x)
except ValueError as e:
return x
def real(self, x, y):
try:
return float(x)
except ValueError as e:
return x
def conv_dict(self):
"""dictionary of conversion"""
return dict(integer=self.integer, real=self.real, no_type=self.no_type)
# remove this one
def convertfields_old(key_comm, obj, inblock=None):
"""convert the float and interger fields"""
convinidd = ConvInIDD()
typefunc = dict(integer=convinidd.integer, real=convinidd.real)
types = []
for comm in key_comm:
types.append(comm.get("type", [None])[0])
convs = [typefunc.get(typ, convinidd.no_type) for typ in types]
try:
inblock = list(inblock)
except TypeError as e:
inblock = ["does not start with N"] * len(obj)
for i, (val, conv, avar) in enumerate(zip(obj, convs, inblock)):
if i == 0:
# inblock[0] is the key
pass
else:
val = conv(val, inblock[i])
obj[i] = val
return obj
def convertafield(field_comm, field_val, field_iddname):
"""convert field based on field info in IDD"""
convinidd = ConvInIDD()
field_typ = field_comm.get("type", [None])[0]
conv = convinidd.conv_dict().get(field_typ, convinidd.no_type)
return conv(field_val, field_iddname)
def convertfields(key_comm, obj, inblock=None):
"""convert based on float, integer, and A1, N1"""
# f_ stands for field_
convinidd = ConvInIDD()
if not inblock:
inblock = ["does not start with N"] * len(obj)
for i, (f_comm, f_val, f_iddname) in enumerate(zip(key_comm, obj, inblock)):
if i == 0:
# inblock[0] is the iddobject key. No conversion here
pass
else:
obj[i] = convertafield(f_comm, f_val, f_iddname)
return obj
def convertallfields(data, commdct, block=None):
"""docstring for convertallfields"""
# import pdbdb; pdb.set_trace()
for key in list(data.dt.keys()):
objs = data.dt[key]
for i, obj in enumerate(objs):
key_i = data.dtls.index(key)
key_comm = commdct[key_i]
try:
inblock = block[key_i]
except TypeError as e:
inblock = None
obj = convertfields(key_comm, obj, inblock)
objs[i] = obj
def addfunctions(dtls, bunchdt):
"""add functions to the objects"""
snames = [
"BuildingSurface:Detailed",
"Wall:Detailed",
"RoofCeiling:Detailed",
"Floor:Detailed",
"FenestrationSurface:Detailed",
"Shading:Site:Detailed",
"Shading:Building:Detailed",
"Shading:Zone:Detailed",
]
for sname in snames:
if sname.upper() in bunchdt:
surfaces = bunchdt[sname.upper()]
for surface in surfaces:
func_dict = {
"area": fh.area,
"height": fh.height, # not working correctly
"width": fh.width, # not working correctly
"azimuth": fh.azimuth,
"tilt": fh.tilt,
"coords": fh.getcoords, # needed for debugging
}
try:
surface.__functions.update(func_dict)
except KeyError as e:
surface.__functions = func_dict
# add common functions
# for name in dtls:
# for idfobject in bunchdt[name]:
# idfobject.__functions
# idfobject['__functions']['fieldnames'] = fieldnames
# idfobject['__functions']['fieldvalues'] = fieldvalues
# idfobject['__functions']['getrange'] = GetRange(idfobject)
# idfobject['__functions']['checkrange'] = CheckRange(idfobject)
def addfunctions2new(abunch, key):
"""add functions to a new bunch/munch object"""
snames = [
"BuildingSurface:Detailed",
"Wall:Detailed",
"RoofCeiling:Detailed",
"Floor:Detailed",
"FenestrationSurface:Detailed",
"Shading:Site:Detailed",
"Shading:Building:Detailed",
"Shading:Zone:Detailed",
]
snames = [sname.upper() for sname in snames]
if key in snames:
func_dict = {
"area": fh.area,
"height": fh.height, # not working correctly
"width": fh.width, # not working correctly
"azimuth": fh.azimuth,
"tilt": fh.tilt,
"coords": fh.getcoords, # needed for debugging
}
try:
abunch.__functions.update(func_dict)
except KeyError as e:
abunch.__functions = func_dict
return abunch
def idfreader(fname, iddfile, conv=True):
"""read idf file and return bunches"""
data, commdct, idd_index = readidf.readdatacommdct(fname, iddfile=iddfile)
if conv:
convertallfields(data, commdct)
# fill gaps in idd
ddtt, dtls = data.dt, data.dtls
# skiplist = ["TABLE:MULTIVARIABLELOOKUP"]
nofirstfields = iddgaps.missingkeys_standard(
commdct, dtls, skiplist=["TABLE:MULTIVARIABLELOOKUP"]
)
iddgaps.missingkeys_nonstandard(None, commdct, dtls, nofirstfields)
bunchdt = makebunches(data, commdct)
return bunchdt, data, commdct, idd_index
def idfreader1(fname, iddfile, theidf, conv=True, commdct=None, block=None):
"""read idf file and return bunches"""
versiontuple = iddversiontuple(iddfile)
# import pdb; pdb.set_trace()
block, data, commdct, idd_index = readidf.readdatacommdct1(
fname, iddfile=iddfile, commdct=commdct, block=block
)
if conv:
convertallfields(data, commdct, block)
# fill gaps in idd
ddtt, dtls = data.dt, data.dtls
if versiontuple < (8,):
skiplist = ["TABLE:MULTIVARIABLELOOKUP"]
else:
skiplist = None
nofirstfields = iddgaps.missingkeys_standard(commdct, dtls, skiplist=skiplist)
iddgaps.missingkeys_nonstandard(block, commdct, dtls, nofirstfields)
# bunchdt = makebunches(data, commdct)
bunchdt = makebunches_alter(data, commdct, theidf, block)
return bunchdt, block, data, commdct, idd_index, versiontuple
def getextensible(objidd):
"""return the extensible from the idd"""
keys = objidd[0].keys()
extkey = [key for key in keys if key.startswith("extensible")]
if extkey:
extens = extkey[0].split(":")[-1]
return int(extens)
else:
return None
def endof_extensible(extensible, thisblock):
"""get the vars from where extension happens"""
return thisblock[-extensible:]
def extension_of_extensible(objidd, objblock, n):
"""generate the list of new vars needed to extend by n"""
ext = getextensible(objidd)
print(n, type(n), ext, type(ext))
# n = int(n / ext)
n = n // ext
print(n, type(n))
lastvars = endof_extensible(ext, objblock)
print("ext, lastvars", ext, lastvars)
alpha_lastvars = [i[0] for i in lastvars]
int_lastvars = [int(i[1:]) for i in lastvars]
lst = []
for alpha, start in zip(alpha_lastvars, int_lastvars):
step = alpha_lastvars.count(alpha)
rng = range(start + step, start + 1 + n * step, step)
lst.append(["{}{}".format(alpha, item) for item in rng])
from itertools import chain
return list(chain(*zip(*lst)))
# complete -- remove this junk below
# working code - working on it now.
# N3, A4, M8, A5
#
# N3, A4, M8, A5
# N4, A6, M9, A7
# N5, A8, M10, A9
# N6, A10, M11, A11
# ref = idf1.newidfobject("Refrigeration:WalkIn".upper())
# lastvars = ["N3", "A4", "M8", "A5"]
# lastvars = [u'A18',
# u'N29',
# u'N30',
# u'N31',
# u'N32',
# u'N33',
# u'A19',
# u'N34',
# u'N35',
# u'N36',
# u'A20',
# u'A21']
# alpha_lastvars = [i[0] for i in lastvars]
# int_lastvars = [int(i[1:]) for i in lastvars]
#
#
#
# n = 2
#
# lst = []
# for alpha, start in zip(alpha_lastvars, int_lastvars):
# step = alpha_lastvars.count(alpha)
# rng = range(start +1, start + 1 + n * step, step)
# lst.append(["{}{}".format(alpha, item) for item in rng])
#
# from itertools import chain
# c = list(chain(*zip(*lst)))
#
#
|
from browser import document
def hash(name):
total = 0
for char in name:
total *= 10
total += ord(char)
return 10 - (total % 11)
def update():
result = hash(document['name'].value)
document['result'].value = 'You are a %s/10' % result
document['name'].bind('onKeyDown', update)
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""Unit tests for sorting.py functions."""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import unittest
# For convenient debugging
import logging
import mox
from framework import sorting
from framework import framework_views
from proto import tracker_pb2
from testing import fake
from testing import testing_helpers
from tracker import tracker_bizobj
def MakeDescending(accessor):
return sorting._MaybeMakeDescending(accessor, True)
class SortingTest(unittest.TestCase):
def setUp(self):
self.mox = mox.Mox()
self.default_cols = 'a b c'
self.builtin_cols = 'a b x y z'
self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
self.config.component_defs.append(tracker_bizobj.MakeComponentDef(
11, 789, 'Database', 'doc', False, [], [], 0, 0))
self.config.component_defs.append(tracker_bizobj.MakeComponentDef(
22, 789, 'User Interface', 'doc', True, [], [], 0, 0))
self.config.component_defs.append(tracker_bizobj.MakeComponentDef(
33, 789, 'Installer', 'doc', False, [], [], 0, 0))
def tearDown(self):
self.mox.UnsetStubs()
self.mox.ResetAll()
def testMakeSingleSortKeyAccessor_Status(self):
"""Sorting by status should create an accessor for that column."""
self.mox.StubOutWithMock(sorting, '_IndexOrLexical')
status_names = [wks.status for wks in self.config.well_known_statuses]
sorting._IndexOrLexical(status_names, 'status accessor')
self.mox.ReplayAll()
sorting._MakeSingleSortKeyAccessor(
'status', self.config, {'status': 'status accessor'}, [], {}, [])
self.mox.VerifyAll()
def testMakeSingleSortKeyAccessor_Component(self):
"""Sorting by component should create an accessor for that column."""
self.mox.StubOutWithMock(sorting, '_IndexListAccessor')
component_ids = [11, 33, 22]
sorting._IndexListAccessor(component_ids, 'component accessor')
self.mox.ReplayAll()
sorting._MakeSingleSortKeyAccessor(
'component', self.config, {'component': 'component accessor'}, [], {}, [])
self.mox.VerifyAll()
def testMakeSingleSortKeyAccessor_OtherBuiltInColunms(self):
"""Sorting a built-in column should create an accessor for that column."""
accessor = sorting._MakeSingleSortKeyAccessor(
'buildincol', self.config, {'buildincol': 'accessor'}, [], {}, [])
self.assertEqual('accessor', accessor)
def testMakeSingleSortKeyAccessor_WithPostProcessor(self):
"""Sorting a built-in user column should create a user accessor."""
self.mox.StubOutWithMock(sorting, '_MakeAccessorWithPostProcessor')
users_by_id = {111: 'fake user'}
sorting._MakeAccessorWithPostProcessor(
users_by_id, 'mock owner accessor', 'mock postprocessor')
self.mox.ReplayAll()
sorting._MakeSingleSortKeyAccessor(
'owner', self.config, {'owner': 'mock owner accessor'},
{'owner': 'mock postprocessor'}, users_by_id, [])
self.mox.VerifyAll()
def testIndexOrLexical(self):
well_known_values = ['x-a', 'x-b', 'x-c', 'x-d']
art = 'this is a fake artifact'
# Case 1: accessor generates no values.
base_accessor = lambda art: None
accessor = sorting._IndexOrLexical(well_known_values, base_accessor)
self.assertEqual(sorting.MAX_STRING, accessor(art))
neg_accessor = MakeDescending(accessor)
self.assertEqual(sorting.DescendingValue(sorting.MAX_STRING),
neg_accessor(art))
# Case 2: accessor generates a value, but it is an empty value.
base_accessor = lambda art: ''
accessor = sorting._IndexOrLexical(well_known_values, base_accessor)
self.assertEqual(sorting.MAX_STRING, accessor(art))
neg_accessor = MakeDescending(accessor)
self.assertEqual(sorting.DescendingValue(sorting.MAX_STRING),
neg_accessor(art))
# Case 3: A single well-known value
base_accessor = lambda art: 'x-c'
accessor = sorting._IndexOrLexical(well_known_values, base_accessor)
self.assertEqual(2, accessor(art))
neg_accessor = MakeDescending(accessor)
self.assertEqual(-2, neg_accessor(art))
# Case 4: A single odd-ball value
base_accessor = lambda art: 'x-zzz'
accessor = sorting._IndexOrLexical(well_known_values, base_accessor)
self.assertEqual('x-zzz', accessor(art))
neg_accessor = MakeDescending(accessor)
self.assertEqual(
sorting.DescendingValue('x-zzz'), neg_accessor(art))
def testIndexListAccessor_SomeWellKnownValues(self):
"""Values sort according to their position in the well-known list."""
well_known_values = [11, 33, 22] # These represent component IDs.
art = fake.MakeTestIssue(789, 1, 'sum 1', 'New', 111)
base_accessor = lambda issue: issue.component_ids
accessor = sorting._IndexListAccessor(well_known_values, base_accessor)
# Case 1: accessor generates no values.
self.assertEqual(sorting.MAX_STRING, accessor(art))
neg_accessor = MakeDescending(accessor)
self.assertEqual(sorting.MAX_STRING, neg_accessor(art))
# Case 2: A single well-known value
art.component_ids = [33]
self.assertEqual([1], accessor(art))
neg_accessor = MakeDescending(accessor)
self.assertEqual([-1], neg_accessor(art))
# Case 3: Multiple well-known and odd-ball values
art.component_ids = [33, 11, 99]
self.assertEqual([0, 1, sorting.MAX_STRING], accessor(art))
neg_accessor = MakeDescending(accessor)
self.assertEqual([sorting.MAX_STRING, -1, 0],
neg_accessor(art))
def testIndexListAccessor_NoWellKnownValues(self):
"""When there are no well-known values, all values sort last."""
well_known_values = [] # Nothing pre-defined, so everything is oddball
art = fake.MakeTestIssue(789, 1, 'sum 1', 'New', 111)
base_accessor = lambda issue: issue.component_ids
accessor = sorting._IndexListAccessor(well_known_values, base_accessor)
# Case 1: accessor generates no values.
self.assertEqual(sorting.MAX_STRING, accessor(art))
neg_accessor = MakeDescending(accessor)
self.assertEqual(sorting.MAX_STRING, neg_accessor(art))
# Case 2: A single oddball value
art.component_ids = [33]
self.assertEqual([sorting.MAX_STRING], accessor(art))
neg_accessor = MakeDescending(accessor)
self.assertEqual([sorting.MAX_STRING], neg_accessor(art))
# Case 3: Multiple odd-ball values
art.component_ids = [33, 11, 99]
self.assertEqual(
[sorting.MAX_STRING, sorting.MAX_STRING, sorting.MAX_STRING],
accessor(art))
neg_accessor = MakeDescending(accessor)
self.assertEqual(
[sorting.MAX_STRING, sorting.MAX_STRING, sorting.MAX_STRING],
neg_accessor(art))
def testIndexOrLexicalList(self):
well_known_values = ['Pri-High', 'Pri-Med', 'Pri-Low']
art = fake.MakeTestIssue(789, 1, 'sum 1', 'New', 111, merged_into=200001)
# Case 1: accessor generates no values.
accessor = sorting._IndexOrLexicalList(well_known_values, [], 'pri', {})
self.assertEqual(sorting.MAX_STRING, accessor(art))
neg_accessor = MakeDescending(accessor)
self.assertEqual(sorting.MAX_STRING, neg_accessor(art))
# Case 2: A single well-known value
art.labels = ['Pri-Med']
accessor = sorting._IndexOrLexicalList(well_known_values, [], 'pri', {})
self.assertEqual([1], accessor(art))
neg_accessor = MakeDescending(accessor)
self.assertEqual([-1], neg_accessor(art))
# Case 3: Multiple well-known and odd-ball values
art.labels = ['Pri-zzz', 'Pri-Med', 'yyy', 'Pri-High']
accessor = sorting._IndexOrLexicalList(well_known_values, [], 'pri', {})
self.assertEqual([0, 1, 'zzz'], accessor(art))
neg_accessor = MakeDescending(accessor)
self.assertEqual([sorting.DescendingValue('zzz'), -1, 0],
neg_accessor(art))
# Case 4: Multi-part prefix.
well_known_values.extend(['X-Y-Header', 'X-Y-Footer'])
art.labels = ['X-Y-Footer', 'X-Y-Zone', 'X-Y-Header', 'X-Y-Area']
accessor = sorting._IndexOrLexicalList(well_known_values, [], 'x-y', {})
self.assertEqual([3, 4, 'area', 'zone'], accessor(art))
neg_accessor = MakeDescending(accessor)
self.assertEqual([sorting.DescendingValue('zone'),
sorting.DescendingValue('area'), -4, -3],
neg_accessor(art))
def testIndexOrLexicalList_CustomFields(self):
art = fake.MakeTestIssue(789, 1, 'sum 2', 'New', 111)
art.labels = ['samename-value1']
art.field_values = [tracker_bizobj.MakeFieldValue(
3, 6078, None, None, None, None, False)]
all_field_defs = [
tracker_bizobj.MakeFieldDef(
3, 789, 'samename', tracker_pb2.FieldTypes.INT_TYPE,
None, None, False, False, False, None, None, None, False, None,
None, None, None, 'cow spots', False),
tracker_bizobj.MakeFieldDef(
4, 788, 'samename', tracker_pb2.FieldTypes.APPROVAL_TYPE,
None, None, False, False, False, None, None, None, False, None,
None, None, None, 'cow spots', False),
tracker_bizobj.MakeFieldDef(
4, 788, 'notsamename', tracker_pb2.FieldTypes.APPROVAL_TYPE,
None, None, False, False, False, None, None, None, False, None,
None, None, None, 'should get filtered out', False)
]
accessor = sorting._IndexOrLexicalList([], all_field_defs, 'samename', {})
self.assertEqual([6078, 'value1'], accessor(art))
neg_accessor = MakeDescending(accessor)
self.assertEqual(
[sorting.DescendingValue('value1'), -6078], neg_accessor(art))
def testIndexOrLexicalList_PhaseCustomFields(self):
art = fake.MakeTestIssue(789, 1, 'sum 2', 'New', 111)
art.labels = ['summer.goats-value1']
art.field_values = [
tracker_bizobj.MakeFieldValue(
3, 33, None, None, None, None, False, phase_id=77),
tracker_bizobj.MakeFieldValue(
3, 34, None, None, None, None, False, phase_id=77),
tracker_bizobj.MakeFieldValue(
3, 1000, None, None, None, None, False, phase_id=78)]
art.phases = [tracker_pb2.Phase(phase_id=77, name='summer'),
tracker_pb2.Phase(phase_id=78, name='winter')]
all_field_defs = [
tracker_bizobj.MakeFieldDef(
3, 789, 'goats', tracker_pb2.FieldTypes.INT_TYPE,
None, None, False, False, True, None, None, None, False, None,
None, None, None, 'goats love mineral', False, is_phase_field=True),
tracker_bizobj.MakeFieldDef(
4, 788, 'boo', tracker_pb2.FieldTypes.APPROVAL_TYPE,
None, None, False, False, False, None, None, None, False, None,
None, None, None, 'ahh', False),
]
accessor = sorting._IndexOrLexicalList(
[], all_field_defs, 'summer.goats', {})
self.assertEqual([33, 34, 'value1'], accessor(art))
neg_accessor = MakeDescending(accessor)
self.assertEqual(
[sorting.DescendingValue('value1'), -34, -33], neg_accessor(art))
def testIndexOrLexicalList_ApprovalStatus(self):
art = fake.MakeTestIssue(789, 1, 'sum 2', 'New', 111)
art.labels = ['samename-value1']
art.approval_values = [tracker_pb2.ApprovalValue(approval_id=4)]
all_field_defs = [
tracker_bizobj.MakeFieldDef(
3, 789, 'samename', tracker_pb2.FieldTypes.INT_TYPE,
None, None, False, False, False, None, None, None, False, None,
None, None, None, 'cow spots', False),
tracker_bizobj.MakeFieldDef(
4, 788, 'samename', tracker_pb2.FieldTypes.APPROVAL_TYPE,
None, None, False, False, False, None, None, None, False, None,
None, None, None, 'cow spots', False)
]
accessor = sorting._IndexOrLexicalList([], all_field_defs, 'samename', {})
self.assertEqual([0, 'value1'], accessor(art))
neg_accessor = MakeDescending(accessor)
self.assertEqual([sorting.DescendingValue('value1'),
sorting.DescendingValue(0)],
neg_accessor(art))
def testIndexOrLexicalList_ApprovalApprover(self):
art = art = fake.MakeTestIssue(789, 1, 'sum 2', 'New', 111)
art.labels = ['samename-approver-value1']
art.approval_values = [
tracker_pb2.ApprovalValue(approval_id=4, approver_ids=[333])]
all_field_defs = [
tracker_bizobj.MakeFieldDef(
4, 788, 'samename', tracker_pb2.FieldTypes.APPROVAL_TYPE,
None, None, False, False, False, None, None, None, False, None,
None, None, None, 'cow spots', False)
]
users_by_id = {333: framework_views.StuffUserView(333, 'a@test.com', True)}
accessor = sorting._IndexOrLexicalList(
[], all_field_defs, 'samename-approver', users_by_id)
self.assertEqual(['a@test.com', 'value1'], accessor(art))
neg_accessor = MakeDescending(accessor)
self.assertEqual([sorting.DescendingValue('value1'),
sorting.DescendingValue('a@test.com')],
neg_accessor(art))
def testComputeSortDirectives(self):
config = tracker_pb2.ProjectIssueConfig()
self.assertEquals(['project', 'id'],
sorting.ComputeSortDirectives(config, '', ''))
self.assertEquals(['a', 'b', 'c', 'project', 'id'],
sorting.ComputeSortDirectives(config, '', 'a b C'))
config.default_sort_spec = 'id -reporter Owner'
self.assertEquals(['id', '-reporter', 'owner', 'project'],
sorting.ComputeSortDirectives(config, '', ''))
self.assertEquals(
['x', '-b', 'a', 'c', '-owner', 'id', '-reporter', 'project'],
sorting.ComputeSortDirectives(config, 'x -b', 'A -b c -owner'))
|
# ================================================================================== #
# __init__.py - This file is part of the yfrake package. #
# ================================================================================== #
# #
# MIT License #
# #
# Copyright (c) 2022 Mattias Aabmets #
# #
# Permission is hereby granted, free of charge, to any person obtaining a copy #
# of this software and associated documentation files (the "Software"), to deal #
# in the Software without restriction, including without limitation the rights #
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #
# copies of the Software, and to permit persons to whom the Software is #
# furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in all #
# copies or substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #
# SOFTWARE. #
# #
# ================================================================================== #
config_file_name = 'yfrake_settings.ini'
valid_config = {
'client': {
'limit': int(),
'timeout': int()
},
'server': {
'host': str(),
'port': int(),
'backlog': int()
},
'cache_size': {
'max_entries': int(),
'max_entry_size': int(),
'max_memory': int()
},
'cache_ttl_groups': {
'override': bool(),
'short_ttl': int(),
'long_ttl': int()
},
'cache_ttl_short': {
'historical_prices': int(),
'detailed_summary': int(),
'financials': int(),
'insights': int(),
'key_statistics': int(),
'market_summary': int(),
'news': int(),
'options': int(),
'price_overview': int(),
'quotes_overview': int(),
'trending_symbols': int()
},
'cache_ttl_long': {
'balance_statements': int(),
'calendar_events': int(),
'cashflow_statements': int(),
'company_overview': int(),
'currencies': int(),
'earnings': int(),
'earnings_history': int(),
'earnings_trend': int(),
'esg_chart': int(),
'esg_scores': int(),
'fund_ownership': int(),
'income_statements': int(),
'insider_holders': int(),
'insider_transactions': int(),
'institution_ownership': int(),
'major_holders': int(),
'purchase_activity': int(),
'quote_type': int(),
'ratings_history': int(),
'recommendation_trend': int(),
'recommendations': int(),
'sec_filings': int(),
'shares_outstanding': int(),
'validate_symbols': int()
}
}
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller, NoPolling
from msrestazure.polling.arm_polling import ARMPolling
from .. import models
class VirtualMachinesOperations(object):
"""VirtualMachinesOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: Client Api Version. Constant value: "2016-04-30-preview".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2016-04-30-preview"
self.config = config
def _capture_initial(
self, resource_group_name, vm_name, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.capture.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'VirtualMachineCaptureParameters')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualMachineCaptureResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def capture(
self, resource_group_name, vm_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
"""Captures the VM by copying virtual hard disks of the VM and outputs a
template that can be used to create similar VMs.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param parameters: Parameters supplied to the Capture Virtual Machine
operation.
:type parameters:
~azure.mgmt.compute.v2016_04_30_preview.models.VirtualMachineCaptureParameters
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns
VirtualMachineCaptureResult or
ClientRawResponse<VirtualMachineCaptureResult> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.compute.v2016_04_30_preview.models.VirtualMachineCaptureResult]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.compute.v2016_04_30_preview.models.VirtualMachineCaptureResult]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._capture_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('VirtualMachineCaptureResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
capture.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/capture'}
def _create_or_update_initial(
self, resource_group_name, vm_name, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.create_or_update.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'VirtualMachine')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualMachine', response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualMachine', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update(
self, resource_group_name, vm_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
"""The operation to create or update a virtual machine.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param parameters: Parameters supplied to the Create Virtual Machine
operation.
:type parameters:
~azure.mgmt.compute.v2016_04_30_preview.models.VirtualMachine
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns VirtualMachine or
ClientRawResponse<VirtualMachine> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.compute.v2016_04_30_preview.models.VirtualMachine]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.compute.v2016_04_30_preview.models.VirtualMachine]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('VirtualMachine', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}'}
def _delete_initial(
self, resource_group_name, vm_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def delete(
self, resource_group_name, vm_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""The operation to delete a virtual machine.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns OperationStatusResponse
or ClientRawResponse<OperationStatusResponse> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.compute.v2016_04_30_preview.models.OperationStatusResponse]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.compute.v2016_04_30_preview.models.OperationStatusResponse]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('OperationStatusResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}'}
def get(
self, resource_group_name, vm_name, expand=None, custom_headers=None, raw=False, **operation_config):
"""Retrieves information about the model view or the instance view of a
virtual machine.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param expand: The expand expression to apply on the operation.
Possible values include: 'instanceView'
:type expand: str or
~azure.mgmt.compute.v2016_04_30_preview.models.InstanceViewTypes
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: VirtualMachine or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.compute.v2016_04_30_preview.models.VirtualMachine
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'InstanceViewTypes')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualMachine', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}'}
def _convert_to_managed_disks_initial(
self, resource_group_name, vm_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.convert_to_managed_disks.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def convert_to_managed_disks(
self, resource_group_name, vm_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Converts virtual machine disks from blob-based to managed disks.
Virtual machine must be stop-deallocated before invoking this
operation.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns OperationStatusResponse
or ClientRawResponse<OperationStatusResponse> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.compute.v2016_04_30_preview.models.OperationStatusResponse]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.compute.v2016_04_30_preview.models.OperationStatusResponse]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._convert_to_managed_disks_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('OperationStatusResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
convert_to_managed_disks.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/convertToManagedDisks'}
def _deallocate_initial(
self, resource_group_name, vm_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.deallocate.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def deallocate(
self, resource_group_name, vm_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Shuts down the virtual machine and releases the compute resources. You
are not billed for the compute resources that this virtual machine
uses.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns OperationStatusResponse
or ClientRawResponse<OperationStatusResponse> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.compute.v2016_04_30_preview.models.OperationStatusResponse]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.compute.v2016_04_30_preview.models.OperationStatusResponse]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._deallocate_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('OperationStatusResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
deallocate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/deallocate'}
def generalize(
self, resource_group_name, vm_name, custom_headers=None, raw=False, **operation_config):
"""Sets the state of the virtual machine to generalized.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: OperationStatusResponse or ClientRawResponse if raw=true
:rtype:
~azure.mgmt.compute.v2016_04_30_preview.models.OperationStatusResponse
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.generalize.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
generalize.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/generalize'}
def list(
self, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Lists all of the virtual machines in the specified resource group. Use
the nextLink property in the response to get the next page of virtual
machines.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of VirtualMachine
:rtype:
~azure.mgmt.compute.v2016_04_30_preview.models.VirtualMachinePaged[~azure.mgmt.compute.v2016_04_30_preview.models.VirtualMachine]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.VirtualMachinePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.VirtualMachinePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines'}
def list_all(
self, custom_headers=None, raw=False, **operation_config):
"""Lists all of the virtual machines in the specified subscription. Use
the nextLink property in the response to get the next page of virtual
machines.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of VirtualMachine
:rtype:
~azure.mgmt.compute.v2016_04_30_preview.models.VirtualMachinePaged[~azure.mgmt.compute.v2016_04_30_preview.models.VirtualMachine]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_all.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.VirtualMachinePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.VirtualMachinePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/virtualMachines'}
def list_available_sizes(
self, resource_group_name, vm_name, custom_headers=None, raw=False, **operation_config):
"""Lists all available virtual machine sizes to which the specified
virtual machine can be resized.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of VirtualMachineSize
:rtype:
~azure.mgmt.compute.v2016_04_30_preview.models.VirtualMachineSizePaged[~azure.mgmt.compute.v2016_04_30_preview.models.VirtualMachineSize]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_available_sizes.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.VirtualMachineSizePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.VirtualMachineSizePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_available_sizes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/vmSizes'}
def _power_off_initial(
self, resource_group_name, vm_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.power_off.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def power_off(
self, resource_group_name, vm_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""The operation to power off (stop) a virtual machine. The virtual
machine can be restarted with the same provisioned resources. You are
still charged for this virtual machine.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns OperationStatusResponse
or ClientRawResponse<OperationStatusResponse> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.compute.v2016_04_30_preview.models.OperationStatusResponse]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.compute.v2016_04_30_preview.models.OperationStatusResponse]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._power_off_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('OperationStatusResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
power_off.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/powerOff'}
def _restart_initial(
self, resource_group_name, vm_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.restart.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def restart(
self, resource_group_name, vm_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""The operation to restart a virtual machine.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns OperationStatusResponse
or ClientRawResponse<OperationStatusResponse> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.compute.v2016_04_30_preview.models.OperationStatusResponse]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.compute.v2016_04_30_preview.models.OperationStatusResponse]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._restart_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('OperationStatusResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/restart'}
def _start_initial(
self, resource_group_name, vm_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.start.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def start(
self, resource_group_name, vm_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""The operation to start a virtual machine.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns OperationStatusResponse
or ClientRawResponse<OperationStatusResponse> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.compute.v2016_04_30_preview.models.OperationStatusResponse]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.compute.v2016_04_30_preview.models.OperationStatusResponse]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._start_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('OperationStatusResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/start'}
def _redeploy_initial(
self, resource_group_name, vm_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.redeploy.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def redeploy(
self, resource_group_name, vm_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""The operation to redeploy a virtual machine.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns OperationStatusResponse
or ClientRawResponse<OperationStatusResponse> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.compute.v2016_04_30_preview.models.OperationStatusResponse]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.compute.v2016_04_30_preview.models.OperationStatusResponse]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._redeploy_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('OperationStatusResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
redeploy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/redeploy'}
|
#!/usr/bin/env python
# Copyright (C) 2012-2014 Bastian Kleineidam
"""
Script to get a list of gocomics and save the info in a JSON file for further processing.
"""
from __future__ import print_function
import codecs
import re
import sys
import os
import requests
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
from dosagelib.util import tagre, getPageContent, asciify, unescape
from scriptutil import contains_case_insensitive, capfirst, save_result, load_result, truncate_name
json_file = __file__.replace(".py", ".json")
#<a href="/shortname" class="alpha_list updated">name</a>
url_matcher = re.compile(tagre("a", "href", r'(/[^"]+)', after="alpha_list") + r"([^<]+)</a>")
# names of comics to exclude
exclude_comics = [
"Angryprogrammer", # unavailable
"Complex", # "coming soon"
"Guinness", # "coming soon"
"Jabberwoncky", # "coming soon"
"KickyBrand", # unavailable
"Penmanship", # unavailable
"RandysRationale", # "coming soon"
"SaturdayMorningBreakfastCereal", # duplicate
"SignsOfOurTimes", # "coming soon"
"TheGagwriter", # "coming soon"
"Yaoyao", # "coming soon"
]
def handle_url(url, session, res):
"""Parse one search result page."""
print("Parsing", url, file=sys.stderr)
try:
data = getPageContent(url, session)
except IOError as msg:
print("ERROR:", msg, file=sys.stderr)
return
for match in url_matcher.finditer(data):
shortname = match.group(1)
name = unescape(match.group(2))
name = asciify(name.replace('&', 'And').replace('@', 'At'))
name = capfirst(name)
if name in exclude_comics:
continue
if contains_case_insensitive(res, name):
# we cannot handle two comics that only differ in case
print("INFO: skipping possible duplicate", repr(name), file=sys.stderr)
continue
res[name] = shortname
def get_results():
"""Parse all search result pages."""
# store info in a dictionary {name -> shortname}
res = {}
session = requests.Session()
handle_url('http://www.gocomics.com/features', session, res)
handle_url('http://www.gocomics.com/explore/espanol', session, res)
handle_url('http://www.gocomics.com/explore/editorial_list', session, res)
handle_url('http://www.gocomics.com/explore/sherpa_list', session, res)
save_result(res, json_file)
def print_results(args):
"""Print all comics that have at least the given number of minimum comic strips."""
min_comics, filename = args
with codecs.open(filename, 'a', 'utf-8') as fp:
for name, shortname in sorted(load_result(json_file).items()):
if name in exclude_comics:
print("Excluded " + name)
continue
fp.write(u"add(%r, %r)\n" % (
str(truncate_name(name)), str(shortname))
)
if __name__ == '__main__':
if len(sys.argv) > 1:
print_results(sys.argv[1:])
else:
get_results()
|
# -*- coding: utf-8 -*-
r"""
Grid View Adapter for partitions
**Grid View partition operations:**
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~PartitionGridViewAdapter.cell_to_display` | Static method for typecasting cell content to widget display value
:meth:`~PartitionGridViewAdapter.display_to_cell` | Instance method for typecasting widget display value to cell content
:meth:`~PartitionGridViewAdapter.compute_cells` | Compute partition cells as a dictionary { coordinate pair : Integer }
:meth:`~PartitionGridViewAdapter.from_cells` | Create a new partition from a cells dictionary
:meth:`~PartitionGridViewAdapter.get_cell` | Get the partition cell content
:meth:`~PartitionGridViewAdapter.addable_cells` | List addable cells
:meth:`~PartitionGridViewAdapter.removable_cells` | List removable cells
:meth:`~PartitionGridViewAdapter.add_cell` | Add a cell
:meth:`~PartitionGridViewAdapter.remove_cell` | Remove a cell
AUTHORS ::
Odile Bénassy, Nicolas Thiéry
"""
from sage.combinat.partition import Partition
from sage_widget_adapters.generic_grid_view_adapter import GridViewAdapter
from six import text_type
class PartitionGridViewAdapter(GridViewAdapter):
r"""
Grid view adapter for partitions.
ATTRIBUTES::
* ``objclass`` -- Partition
* ``celltype`` -- bool
* ``cellzero`` -- False
"""
objclass = Partition
constructorname = 'Partition'
celltype = bool
cellzero = False
@staticmethod
def cell_to_display(cell_content, display_type=bool):
r"""
From object cell content
to widget display value.
TESTS ::
sage: from sage_widget_adapters.combinat.partition_grid_view_adapter import PartitionGridViewAdapter
sage: PartitionGridViewAdapter.cell_to_display(True)
True
sage: from six import text_type
sage: PartitionGridViewAdapter.cell_to_display("my string", text_type)
''
"""
if display_type == text_type:
return ''
return cell_content
def display_to_cell(self, display_value, display_type=bool):
r"""
From widget cell value
to object display content
TESTS ::
sage: from sage_widget_adapters.combinat.partition_grid_view_adapter import PartitionGridViewAdapter
sage: pa = PartitionGridViewAdapter()
sage: pa.display_to_cell(True)
True
sage: pa.display_to_cell('')
False
"""
if not display_value or display_type == text_type:
return self.cellzero
return display_value
@staticmethod
def compute_cells(obj):
r"""
From a partition,
return a dictionary { coordinates pair : Integer }
TESTS ::
sage: from sage.combinat.partition import Partition
sage: from sage_widget_adapters.combinat.partition_grid_view_adapter import PartitionGridViewAdapter
sage: p = Partition([3, 2, 1, 1])
sage: PartitionGridViewAdapter.compute_cells(p)
{(0, 0): False,
(0, 1): False,
(0, 2): False,
(1, 0): False,
(1, 1): False,
(2, 0): False,
(3, 0): False}
"""
return {(i,j):False for (i,j) in obj.cells()}
@classmethod
def from_cells(cls, cells={}):
r"""
From a dictionary { coordinates pair : Integer }
return a corresponding partition
TESTS ::
sage: from sage.combinat.partition import Partition
sage: from sage_widget_adapters.combinat.partition_grid_view_adapter import PartitionGridViewAdapter
sage: PartitionGridViewAdapter.from_cells({(0, 0): False, (0, 1): False, (0, 2): True, (0, 3): False, (1, 0): False, (2, 0): True})
[4, 1, 1]
"""
partition_elements = [
len([(i, pos[1]) for pos in cells if pos[0] == i]) for i in range(max(pos[0] for pos in cells) + 1)]
try:
return cls.objclass(partition_elements)
except:
raise TypeError(
"This object is not compatible with this adapter (%s, for %s objects)" % (cls, cls.objclass))
@staticmethod
def get_cell(obj, pos):
r"""
Get cell value
TESTS ::
sage: from sage.combinat.partition import Partition
sage: from sage_widget_adapters.combinat.partition_grid_view_adapter import PartitionGridViewAdapter
sage: p = Partition([6, 5, 2, 1])
sage: PartitionGridViewAdapter.get_cell(p, (1, 1))
False
sage: PartitionGridViewAdapter.get_cell(p, (1, 6))
Traceback (most recent call last):
...
ValueError: Cell '(1, 6)' not in partition.
"""
try:
assert pos[0] < len(obj) and pos[1] < obj[pos[0]]
except:
raise ValueError("Cell '%s' not in partition." % str(pos))
return False
def set_cell(self, obj, pos, val, dirty={}, constructorname=''):
r"""
From a partition `obj`, a position (pair of coordinates) `pos` and a value `val`,
return a new partition with a modified cell at position `pos`.
Actually remove the cell if it's removable, otherwise return the same partition.
TESTS ::
sage: from sage.combinat.partition import Partition
sage: from sage_widget_adapters.combinat.partition_grid_view_adapter import PartitionGridViewAdapter
sage: p = Partition([6, 5, 2, 1])
sage: pa = PartitionGridViewAdapter()
sage: pa.set_cell(p, (1,2), True)
[6, 5, 2, 1]
sage: pa.set_cell(p, (1,4), True)
[6, 4, 2, 1]
"""
if pos in self.removable_cells(obj):
return self.remove_cell(obj, pos, dirty)
return obj
@staticmethod
def addable_cells(obj):
r"""
List object addable cells
TESTS ::
sage: from sage.combinat.partition import Partition
sage: from sage_widget_adapters.combinat.partition_grid_view_adapter import PartitionGridViewAdapter
sage: p = Partition([6, 5, 2, 1])
sage: PartitionGridViewAdapter.addable_cells(p)
[(0, 6), (1, 5), (2, 2), (3, 1), (4, 0)]
"""
return obj.outside_corners()
@staticmethod
def removable_cells(obj):
r"""
List object removable cells
TESTS ::
sage: from sage.combinat.partition import Partition
sage: from sage_widget_adapters.combinat.partition_grid_view_adapter import PartitionGridViewAdapter
sage: p = Partition([6, 5, 2, 1])
sage: PartitionGridViewAdapter.removable_cells(p)
[(0, 5), (1, 4), (2, 1), (3, 0)]
"""
return obj.corners()
def add_cell(self, obj, pos, val=None, dirty={}):
r"""
Add cell
TESTS ::
sage: from sage.combinat.partition import Partition
sage: from sage_widget_adapters.combinat.partition_grid_view_adapter import PartitionGridViewAdapter
sage: p = Partition([6, 5, 2, 1])
sage: pa = PartitionGridViewAdapter()
sage: pa.add_cell(p, (2, 2))
[6, 5, 3, 1]
sage: pa.add_cell(p, (4, 0), 42)
[6, 5, 2, 1, 1]
sage: pa.add_cell(p, (2, 0))
Traceback (most recent call last):
...
ValueError: Cell position '(2, 0)' is not addable.
"""
if not pos in self.addable_cells(obj):
raise ValueError("Cell position '%s' is not addable." % str(pos))
try:
return obj.add_cell(pos[0])
except Exception as e:
return e
def remove_cell(self, obj, pos, dirty={}):
r"""
Remove cell
TESTS ::
sage: from sage.combinat.partition import Partition
sage: from sage_widget_adapters.combinat.partition_grid_view_adapter import PartitionGridViewAdapter
sage: p = Partition([6, 5, 2, 1])
sage: pa = PartitionGridViewAdapter()
sage: pa.remove_cell(p, (2, 1))
[6, 5, 1, 1]
sage: pa.remove_cell(p, (1, 1))
Traceback (most recent call last):
...
ValueError: Cell position '(1, 1)' is not removable.
"""
if not pos in self.removable_cells(obj):
raise ValueError("Cell position '%s' is not removable." % str(pos))
try:
return obj.remove_cell(pos[0])
except Exception as e:
return e
|
# Imports from 3rd party libraries
import dash
import dash_bootstrap_components as dbc
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
import plotly.express as px
# Imports from this application
from app import app
from .predictions import inputs, row1, row2, row3, row4
# 2 column layout. 1st column width = 4/12
# https://dash-bootstrap-components.opensource.faculty.ai/l/components/layout
# dcc.Markdown reference
# https://dash.plotly.com/dash-core-components/markdown
layout = html.Div(
[
inputs,
html.Hr(),
dbc.Row(
[
dbc.Col(
dcc.Link(dbc.Button('Calculate', color='primary'), href='/predictions')
)
],
)
],
style={
# 'background-image':'url("/assets/background.jpg")'
'text-align': 'center'
}
)
|
from abc import ABCMeta, abstractmethod
import numpy as np
from sklearn.mixture import GaussianMixture
class Clustering(metaclass=ABCMeta):
def __init__(self, DS, levels=1, random_state=None):
self.DS = DS
self.name = DS.name
self.columns = DS.D.columns
self.X = self.DS.D.as_matrix()
self.levels = levels
self.random_state = random_state
self.clusters = []
@abstractmethod
def cluster(self):
pass
class HGMMClustering(Clustering):
def __init__(self, DS, levels=1, random_state=None):
"""
Parameters
----------
DS :obj:`Dataset`
level : int
Number of levels to cluster
random_state : int (optional)
Initialize Gaussian Mixture Model with specified random state
"""
Clustering.__init__(self, DS, levels, random_state)
self.clustname = 'HGMM'
self.shortclustname = 'hgmm'
def cluster(self):
clusters = []
n = self.X.shape[0]
l0 = self.hgmml0(self.X, self.random_state)
clusters.append(l0)
li = self.gmmBranch(l0[0], self.random_state)
clusters.append(li)
while (len(li) < n) and (len(clusters) - 1 < self.levels):
lip = []
for c in li:
q = self.gmmBranch(c, self.random_state)
if q is not None:
lip.extend(q)
clusters.append(lip)
li = lip
self.clusters = [list(map(lambda x: x[0], c)) for c in clusters]
self.hierarch = clusters
def gmmBranch(self, level, random_state):
X, p, mu = level
#Check BIC to see to split node
gmm_1 = GaussianMixture(n_components=1, random_state=random_state)
gmm_1.fit(X)
bic_1 = gmm_1.bic(X)
if len(X) != 0: #Does not run when input has one sample point
gmm_2 = GaussianMixture(n_components=2, random_state=random_state)
gmm_2.fit(X)
bic_2 = gmm_2.bic(X)
else:
bic_2 = bic_1
if bic_2 < bic_1:
X0 = X[gmm_2.predict(X) == 0, :]
X1 = X[gmm_2.predict(X) == 1, :]
mypro = np.rint(gmm_2.weights_ * p)
return [(
X0,
int(mypro[0]),
gmm_2.means_[0, :],
), (
X1,
int(mypro[1]),
gmm_2.means_[1, :],
)]
else:
return [(
X,
int(np.rint(p * gmm_1.weights_[0])),
gmm_1.means_[0, :],
)]
def hgmml0(self, X, random_state):
gmm = GaussianMixture(n_components=1, random_state=random_state)
gmm.fit(X)
return [(
X,
int(np.rint(X.shape[0] * gmm.weights_[0])),
gmm.means_[0, :],
)]
|
from numpy.testing import assert_raises
from configurator.dialogs import DialogBuilder, Dialog
class TestDialogBuilder(object):
def test_rules_constraints_one_required(self, email_client):
assert_raises(ValueError, DialogBuilder,
email_client.var_domains, email_client.sample)
def test_rules_constraints_mutually_exclusive(self, email_client):
assert_raises(ValueError, DialogBuilder,
email_client.var_domains, email_client.sample,
email_client.rules, email_client.constraints)
class TestDialog(object):
def test_reset(self, email_client):
dialog = Dialog(email_client.var_domains, rules=email_client.rules,
validate=True)
dialog.reset()
assert dialog.config == {}
dialog.set_answer(0, "yes")
assert dialog.config != {}
dialog.reset()
assert dialog.config == {}
def test_set_answer_rules(self, email_client):
dialog = Dialog(email_client.var_domains, rules=email_client.rules,
validate=True)
dialog.reset()
dialog.set_answer(1, "lgi")
assert dialog.is_complete()
def test_set_answer_constraints(self, email_client):
dialog = Dialog(email_client.var_domains,
constraints=email_client.constraints,
validate=True)
dialog.reset()
dialog.set_answer(1, "lgi")
assert dialog.is_complete()
|
# from database import db
#from geoalchemy2 import Geometry
#from sqlalchemy import Column
from sqlalchemy.dialects.postgresql.json import JSONB
from app import db
class AWSInstance(db.Model):
__tablename__ = 'aws_instances'
instance_id = db.Column(db.String(30), primary_key=True)
instance_model = db.Column(db.String(30), index=True, nullable=False)
aws_region = db.Column(db.String(30), primary_key=True, nullable=False)
aws_region_name = db.Column(db.String(60), nullable=False)
aws_region_coordinate_lat = db.Column(db.String(255), nullable=True)
aws_region_coordinate_log = db.Column(db.String(255), nullable=True)
aws_region_coordinate_geojson = db.Column(db.JSON, nullable=True)
instance_low_utilization_suspected = db.Column(db.Boolean,
index=True,
nullable=False)
instance_ami_id = db.Column(db.String(30))
instance_running_price = db.Column(db.String(30))
instance_state = db.Column(db.String(19),
nullable=False)
instance_state_date = db.Column(db.DateTime,
nullable=False)
instance_previous_state = db.Column(db.String(19),
nullable=True)
instance_linux_kernel_version = db.Column(db.String(30))
instance_linux_distribution = db.Column(db.String(30))
instance_ssh_pem_key = db.Column(db.String(255))
instance_auto_scaling_group = db.Column(db.Boolean)
instance_auto_scaling_group_name = db.Column(db.String(255))
instance_type = db.Column(db.String(255),
nullable=False)
instance_ebs_optimized = db.Column(db.Boolean)
instance_vpc_id = db.Column(db.String(30))
instance_vpc_subnet_id = db.Column(db.String(30))
instance_vpc_ip = db.Column(db.String(30))
instance_vpc_availability_zone = db.Column(db.String(30))
instance_aws_account_id = db.Column(db.String(30))
instance_reservation_id = db.Column(db.String(50))
instance_spot_id = db.Column(db.String(30))
instance_launch_time = db.Column(db.DateTime)
instance_tags = db.Column(db.String(9999))
instance_tags_json = db.Column(JSONB, index=True)
workload_tag = db.Column(db.String(255), index=True, nullable=False) # Profile would be: test, qa, prod.
def __init__(self, instance_id, instance_model, aws_region, aws_region_name,
instance_low_utilization_suspected, instance_ami_id, instance_state,
instance_linux_kernel_version, instance_linux_distribution, instance_ssh_pem_key
, instance_auto_scaling_group, instance_auto_scaling_group_name, instance_type, instance_ebs_optimized
, instance_vpc_id, instance_vpc_subnet_id, instance_vpc_ip, instance_vpc_availability_zone
, instance_aws_account_id, instance_reservation_id, instance_spot_id, instance_launch_time,
instance_tags, instance_tags_json, instance_state_date, workload_tag,
aws_region_coordinate_lat=None, aws_region_coordinate_log=None, aws_region_coordinate_geojson=None,
instance_previous_state=None, instance_running_price=None):
self.instance_id = instance_id
self.instance_model = instance_model
self.aws_region = aws_region
self.aws_region_name = aws_region_name
self.aws_region_coordinate_lat = aws_region_coordinate_lat
self.aws_region_coordinate_log = aws_region_coordinate_log
self.aws_region_coordinate_geojson = aws_region_coordinate_geojson
self.instance_low_utilization_suspected = instance_low_utilization_suspected
self.instance_ami_id = instance_ami_id
self.instance_state = instance_state
self.instance_linux_kernel_version = instance_linux_kernel_version
self.instance_linux_distribution = instance_linux_distribution
self.instance_ssh_pem_key = instance_ssh_pem_key
self.instance_auto_scaling_group = instance_auto_scaling_group
self.instance_auto_scaling_group_name = instance_auto_scaling_group_name
self.instance_type = instance_type
self.instance_ebs_optimized = instance_ebs_optimized
self.instance_vpc_id = instance_vpc_id
self.instance_vpc_subnet_id = instance_vpc_subnet_id
self.instance_vpc_ip = instance_vpc_ip
self.instance_vpc_availability_zone = instance_vpc_availability_zone
self.instance_aws_account_id = instance_aws_account_id
self.instance_reservation_id = instance_reservation_id
self.instance_spot_id = instance_spot_id
self.instance_launch_time = instance_launch_time
self.instance_tags = instance_tags
self.instance_tags_json = instance_tags_json
self.instance_state_date = instance_state_date
self.instance_previous_state = instance_previous_state
self.instance_running_price = instance_running_price
self.workload_tag = workload_tag.upper()
def __repr__(self):
return "<AWSInstance '{}'>".format(self.instance_id)
class AWSEBS(db.Model):
__tablename__ = 'aws_ebs'
ebs_volume_id = db.Column(db.String(30), primary_key=True)
ebs_aws_az = db.Column(db.String(30), primary_key=True)
instance_id = db.Column(db.String(30), index=True)
snapshot_id = db.Column(db.String(30), index=True)
ebs_size = db.Column(db.String(14), nullable=False)
ebs_volume_type = db.Column(db.String(14), nullable=False)
ebs_iops = db.Column(db.String(14), nullable=False)
ebs_alarm_status = db.Column(db.String(30))
ebs_status = db.Column(db.String(30))
ebs_volume_status_ok = db.Column(db.Boolean)
ebs_volume_encryption = db.Column(db.Boolean)
ebs_creation_data = db.Column(db.DateTime)
ebs_dettached_data = db.Column(db.DateTime)
ebs_tag = db.Column(JSONB, index=True)
def __init__(self, ebs_volume_id, ebs_aws_az, instance_id, snapshot_id,
ebs_size, ebs_volume_type, ebs_iops, ebs_alarm_status, ebs_status,
ebs_volume_status_ok, ebs_volume_encryption,
ebs_creation_data, ebs_dettached_data, ebs_tag):
self.ebs_volume_id = ebs_volume_id
self.ebs_aws_az = ebs_aws_az
self.instance_id = instance_id
self.snapshot_id = snapshot_id
self.ebs_size = ebs_size
self.ebs_volume_type = ebs_volume_type
self.ebs_iops = ebs_iops
self.ebs_alarm_status = ebs_alarm_status
self.ebs_status = ebs_status
self.ebs_volume_status_ok = ebs_volume_status_ok
self.ebs_volume_encryption = ebs_volume_encryption
self.ebs_creation_data = ebs_creation_data
self.ebs_dettached_data = ebs_dettached_data
self.ebs_tag = ebs_tag
def __repr__(self):
return '<AWSEBS {} {}>'.format(self.ebs_volume_id, self.ebs_aws_az)
class AWSnapshot(db.Model):
__tablename__ = 'aws_snap'
snapshot_id = db.Column(db.String(30), primary_key=True)
ebs_volume_id = db.Column(db.String(30), index=True)
snap_size = db.Column(db.String(14), nullable=False)
snap_status = db.Column(db.String(30))
snap_progress = db.Column(db.String(30))
snap_encryption = db.Column(db.Boolean)
snap_description = db.Column(db.String(255))
snap_started_data = db.Column(db.DateTime)
snap_tag = db.Column(JSONB, index=True)
def __init__(self, snapshot_id, ebs_volume_id, snap_size, snap_status,
snap_progress, snap_encryption, snap_description, snap_started_data, snap_tag):
self.snapshot_id = snapshot_id
self.ebs_volume_id = ebs_volume_id
self.snap_size = snap_size
self.snap_status = snap_status
self.snap_progress = snap_progress
self.snap_encryption = snap_encryption
self.snap_description = snap_description
self.snap_started_data = snap_started_data
self.snap_tag = snap_tag
def __repr__(self):
return '<AWSnapshot {} {}>'.format(self.snapshot_id, self.snap_size)
class AWSPrices(db.Model):
__tablename__ = 'aws_prices'
instance_type = db.Column(db.String(30), primary_key=True)
aws_region = db.Column(db.String(30), primary_key=True)
price_date = db.Column(db.Date, index=True)
price_ondemand_price_hrs_usd = db.Column(db.Float)
price_ondemand_price_mth_usd = db.Column(db.Float)
price_reserved_price_hsr_usd = db.Column(db.Float)
price_reserved_price_mth_usd = db.Column(db.Float)
price_reserved_price_offer_code = db.Column(db.String(255))
price_spot_price_hsr_usd = db.Column(db.Float)
price_spot_price_mth_usd = db.Column(db.Float)
price_spot_offer_date = db.Column(db.DateTime)
price_ondemand_reserved_saving = db.Column(db.Float)
price_ondemand_reserved_saving_pct = db.Column(db.Float)
price_ondemand_spot_saving = db.Column(db.Float)
price_ondemand_spot_saving_pct = db.Column(db.Float)
def __init__(self, instance_type, aws_region, price_date, price_ondemand_price_hrs_usd,
price_ondemand_price_mth_usd, price_reserved_price_hsr_usd,
price_reserved_price_mth_usd, price_reserved_price_offer_code, price_spot_price_hsr_usd,
price_spot_price_mth_usd, price_spot_offer_date,
price_ondemand_reserved_saving, price_ondemand_reserved_saving_pct,
price_ondemand_spot_saving, price_ondemand_spot_saving_pct):
self.instance_type = instance_type
self.aws_region = aws_region
self.price_date = price_date
self.price_ondemand_price_hrs_usd = price_ondemand_price_hrs_usd
self.price_ondemand_price_mth_usd = price_ondemand_price_mth_usd
self.price_reserved_price_hsr_usd = price_reserved_price_hsr_usd
self.price_reserved_price_mth_usd = price_reserved_price_mth_usd
self.price_reserved_price_offer_code = price_reserved_price_offer_code
self.price_spot_price_hsr_usd = price_spot_price_hsr_usd
self.price_spot_price_mth_usd = price_spot_price_mth_usd
self.price_spot_offer_date = price_spot_offer_date
self.price_ondemand_reserved_saving = price_ondemand_reserved_saving
self.price_ondemand_reserved_saving_pct = price_ondemand_reserved_saving_pct
self.price_ondemand_spot_saving = price_ondemand_spot_saving
self.price_ondemand_spot_saving_pct = price_ondemand_spot_saving_pct
def __repr__(self):
return '<AWSPrices {} {} {}>'.format(self.instance_type, self.aws_region, self.price_offer_date)
class AWSInstancePrice(db.Model):
# __tablename__ = 'aws_instances_price'
instance_price_id = db.Column(db.Integer, primary_key=True, autoincrement=True)
instance_id = db.Column(db.String(30), nullable=False, index=True)
aws_region = db.Column(db.String(30), nullable=False, index=True)
instance_type = db.Column(db.String(30), nullable=False, index=True)
instance_price_date = db.Column(db.Date, nullable=False)
instance_running_price = db.Column(db.String(30), nullable=False, index=True)
instance_hrs_price_usd = db.Column(db.Float)
instance_mth_price_usd = db.Column(db.Float)
instance_last_state = db.Column(db.String(30))
workload_tag = db.Column(db.String(255), index=True, nullable=False) # Profile would be: test, qa, prod.
def __init__(self, instance_id, aws_region, instance_type, instance_price_date, instance_running_price, instance_hrs_price_usd, instance_mth_price_usd, instance_last_state, workload_tag):
self.instance_id = instance_id
self.aws_region = aws_region
self.instance_type = instance_type
self.instance_price_date = instance_price_date
self.instance_running_price = instance_running_price
self.instance_hrs_price_usd = instance_hrs_price_usd
self.instance_mth_price_usd = instance_mth_price_usd
self.instance_last_state = instance_last_state
self.workload_tag = workload_tag.upper()
def __repr__(self):
return '<AWSInstancePrice {} {} {}>'.format(self.instance_id, self.instance_type, self.aws_region)
class AWSInstanceWorkLoad(db.Model):
__tablename__ = 'aws_instances_workload'
workload_id = db.Column(db.Integer, primary_key=True, autoincrement=True)
workload_date = db.Column(db.Date, nullable=False, index=True)
instance_id = db.Column(db.String(19), nullable=False, index=True)
instance_type = db.Column(db.String(255), nullable=False, index=True)
aws_region = db.Column(db.String(19), nullable=False, index=True)
cpu_percentage = db.Column(db.Float)
available_memory_percentage = db.Column(db.Float)
network_in_bytes_aggr = db.Column(db.Float)
network_ou_bytes_aggr = db.Column(db.Float)
cloudwatch_aggregation_type = db.Column(db.String(19))
cloudwatch_aggregation_days = db.Column(db.Integer)
cloudwatch_aggregation_period_from = db.Column(db.DateTime)
cloudwatch_aggregation_period_to = db.Column(db.DateTime)
cloudwatch_period_seconds = db.Column(db.Integer)
#workload_criteria_cpu = db.Column(db.Float)
#workload_criteria_memory = db.Column(db.Float)
#workload_criteria_network = db.Column(db.Float)
workload_criteria_check = db.Column(db.String(255))
workload_low_utilization = db.Column(db.Boolean, index=True)
workload_java_vm_tuning_suspect = db.Column(db.Boolean, index=True)
workload_java_vm_criteria_check = db.Column(db.String(255))
def __init__(self, workload_date,instance_id, aws_region, instance_type, cpu_percentage, available_memory_percentage, network_in_bytes_aggr,
network_ou_bytes_aggr, cloudwatch_aggregation_type, cloudwatch_aggregation_days, cloudwatch_aggregation_period_from,
cloudwatch_aggregation_period_to, cloudwatch_period_seconds, workload_criteria_check, workload_low_utilization,
workload_java_vm_tuning_suspect=False, workload_java_vm_criteria_check=None):
self.workload_date = workload_date
self.instance_id = instance_id
self.aws_region = aws_region
self.instance_type = instance_type
self.cpu_percentage = cpu_percentage
self.available_memory_percentage = available_memory_percentage
self.network_in_bytes_aggr = network_in_bytes_aggr
self.network_ou_bytes_aggr = network_ou_bytes_aggr
self.cloudwatch_aggregation_type = cloudwatch_aggregation_type
self.cloudwatch_aggregation_days = cloudwatch_aggregation_days
self.cloudwatch_aggregation_period_from = cloudwatch_aggregation_period_from
self.cloudwatch_aggregation_period_to = cloudwatch_aggregation_period_to
self.cloudwatch_period_seconds = cloudwatch_period_seconds
self.workload_criteria_check = workload_criteria_check
self.workload_low_utilization = workload_low_utilization
self.workload_java_vm_tuning_suspect = workload_java_vm_tuning_suspect
self.workload_java_vm_criteria_check = workload_java_vm_criteria_check
def __repr__(self):
return '<AWSInstanceWorkLoad {} {}>'.format(self.instance_id, self.aws_region)
class AWSSummary(db.Model):
__tablename__ = 'aws_summary_workload'
summary_id = db.Column(db.Integer, primary_key=True, autoincrement=True)
summary_date = db.Column(db.Date, nullable=False, index=True)
total_ec2_ondemand = db.Column(db.Integer)
total_ec2_reserved = db.Column(db.Integer)
percentage_rsv_x_ond = db.Column(db.Integer)
total_ec2_spot = db.Column(db.Integer)
total_instances_flagged_lowuse = db.Column(db.Integer)
total_cost_ec2_ondemand_month = db.Column(db.Float)
total_cost_ec2_reserved_month = db.Column(db.Float)
total_cost_ec2_spot_month = db.Column(db.Float)
oportunity_save_money_reservation = db.Column(db.Float)
opportunity_save_money_spot_asg = db.Column(db.Float)
summary_tag = db.Column(db.String(255)) # Profile would be: test, qa, prod, qadev, anything util
def __init__(self, summary_date,
total_ec2_ondemand,
total_ec2_reserved, percentage_rsv_x_ond, total_ec2_spot, total_instances_flagged_lowuse,
total_cost_ec2_ondemand_month, total_cost_ec2_reserved_month,total_cost_ec2_spot_month,
oportunity_save_money_reservation, oportunity_save_money_spot_asg, summary_tag):
self.summary_date = summary_date
self.total_ec2_ondemand = total_ec2_ondemand
self.total_ec2_reserved = total_ec2_reserved
self.percentage_rsv_x_ond = percentage_rsv_x_ond
self.total_ec2_spot = total_ec2_spot
self.total_instances_flagged_lowuse = total_instances_flagged_lowuse
self.total_cost_ec2_ondemand_month = total_cost_ec2_ondemand_month
self.total_cost_ec2_reserved_month = total_cost_ec2_reserved_month
self.total_cost_ec2_spot_month = total_cost_ec2_spot_month
self.opportunity_save_money_reservation = oportunity_save_money_reservation
self.opportunity_save_money_spot_asg = oportunity_save_money_spot_asg
self.summary_tag = summary_tag.upper()
def __repr__(self):
return '<AWSSummary {} {}>'.format(self.summary_id, self.summary_date)
|
#!/usr/bin/env python3
# https://codeforces.com/problemset/problem/669/A
n = int(input())
print((n//3)*2 if n%3==0 else (n//3)*2+1)
|
HASH_TAGS = "#joke #programming #programmingjoke"
|
import sys
sys.path.append('../main')
import keras
import numpy as np
from util94 import concat,flatten,n_hot_decoder,load_input2idx, load_label2idx,get_input_set,get_label_set
from util94 import load_text,load_text_hierarchical,get_stopwords,remove_stopwords
stopwords_path = '../train_data/law_stopwords.txt'
path_30w = '../train_data2/economic_legal'
stopwords = get_stopwords(stopwords_path)
input_set_30w, input2idx_30w = get_input_set(path_30w,3,stopwords)
label_set_30w, label2idx_30w = get_label_set(path_30w)
class base(object):
def __init__(self):
self.choice = 'char'
self.input_set_size, self.label_set_size = len(self.input_set), [len(i) for i in self.label_set]
def decode(self, pred_res, with_prob):
if type(pred_res) is list:
ret = [flatten([n_hot_decoder(j, self.label_set[idx], self.top_nb[idx],
self.threshold[idx], with_prob) for idx,j in enumerate(i)]) for i in pred_res]
else:
ret = [n_hot_decoder(i, self.label_set, self.top_nb, self.threshold,
with_prob) for i in pred_res]
return ret
def match(self, x):
if self.x_type == 'hierarchical':
self.input_text = [load_text_hierarchical(i, self.max_sentence_nb,self.maxlen, self.input2idx, self.choice) for i in x ]
else :
self.input_text = [load_text(i, self.max_sentence_len, self.input2idx, self.choice) for i in x ]
input_text = np.asarray(self.input_text)
res = self.model.predict(input_text)
res = concat(res)
res = self.decode(res, True)
return res
class hierarchical_30w(base):
def __init__(self):
self.vector_size = 128
self.max_sentence_nb = 10
self.maxlen = 128
self.x_type = 'hierarchical'
self.input_set, self.input2idx = input_set_30w, input2idx_30w
self.label_set, self.label2idx = label_set_30w, label2idx_30w
self.weights_file_path = '../model_6w/hi_2016-12-27/weights.46-0.172.h5'
self.top_nb, self.threshold = [2, 10, 10, 6], [0.4, 0.4, 0.4, 0.4]
base.__init__(self)
self.model = keras.models.load_model(self.weights_file_path)
# 0.862
class gru_9w(base):
def __init__(self):
self.vector_size = 128
self.max_sentence_len = 512
self.x_type = 'no'
self.input_set, self.input2idx = input_set_30w, input2idx_30w
self.label_set, self.label2idx = label_set_30w, label2idx_30w
self.weights_file_path = '../model_9w/gru_2017-01-11/weights.10-0.067.h5'
self.top_nb, self.threshold = [2, 10, 10, 6], [0.4, 0.4, 0.4, 0.4]
base.__init__(self)
self.model = keras.models.load_model(self.weights_file_path)
if __name__ == '__main__':
input_text2 = ['本院认为,被告人张某某违反国家烟草专卖管理法律法规,未经烟草专卖行政主管部门许可,非法经营烟草专卖品,扰乱市场秩序,情节严重,其行为已构成规定的非法经营罪,西安市未央区人民检察院指控被告人犯非法经营罪的事实成立,依法应予惩处。在庭审中,被告人张某某如实供述自己的犯罪事实,并表示自愿认罪,可酌情从轻处罚。同时,经社区矫正机构调查评估,张某某符合社区矫正条件。依照之规定,']
m2 = gru_9w()
p2 = m2.match(input_text2)
#print("p2:",p2)
|
"""Module to facilitate a generic interface to running long tasks in separate
threads.
"""
# Copyright 2018 Matthew A. Clapp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import threading
import wx
import common
# logging stuff
# not necessary to make a handler since we will be child logger of marcam
# we use NullHandler so if no config at top level we won't default to printing
# to stderr
LOGGER = logging.getLogger(__name__)
LOGGER.addHandler(logging.NullHandler())
# create debug function using this file's logger
debug_fxn = common.debug_fxn_factory(LOGGER.info)
debug_fxn_debug = common.debug_fxn_factory(LOGGER.debug)
class Threaded:
"""Class supporting long tasks that need to be in separate thread.
Handles running the thread part of the task in a separate thread,
and wx Events needed to invoke post-thread actions.
"""
@debug_fxn
def __init__(self, thread_fxn, thread_fxn_args, post_thread_fxn, parent):
"""Initialize a Long Task needing thread execution and wx support.
Args:
thread_fxn (function handle): long-running function to be run in
separate thread. Return values from this function will
be passed as positional arguments to post_thread_fxn.
thread_fxn_args (tuple): arguments for thread_fxn
post_thread_fxn (function handle): function that runs after
thread_fxn has finished
parent (wx.Window): Window that handles events and is parent
of ProgressDialog
"""
self.task_thread = None
# abort_event may not really need to be an Event since we never wait
# for it and setting a regular variable to a bool should be atomic.
# But it's safer to just make it an Event.
self.abort_event = threading.Event()
self.thread_fxn = thread_fxn
self.thread_fxn_args = thread_fxn_args
self.post_thread_fxn = post_thread_fxn
self.win_parent = parent
self.thread_fxn_returnvals = None
# We could normally omit events altogether if post_thread_fxn is None,
# but we'll keep these in in case a derived class needs the machinery
# NOTE: IT MIGHT BE that binding an event to self.long_task_postthread
# prevents this class instance from being deleted if the calling code
# goes out of scope. (??)
# get new Event and EventBinder for this instance only
(self.myLongTaskDoneEvent, evt_long_task_done) = wx.lib.newevent.NewEvent()
# bind postthread function to "done" event
self.win_parent.Bind(evt_long_task_done, self.long_task_postthread)
# build thread
self.task_thread = threading.Thread(
target=self.long_task_thread,
)
# Start task thread computing.
# Do this last, so that if it ends super fast we are not trying to
# still do things with self.progress_dialog after long_task_postthread
# Destroys the dialog.
self.task_thread.start()
@debug_fxn
def long_task_thread(self):
"""Function that is run in separate thread
If thread_fxn returns any values, they will be passed as positional
arguments to post_thread_fxn.
"""
thread_fxn_returnvals = self.thread_fxn(*self.thread_fxn_args)
if thread_fxn_returnvals is None:
# if returnvals = None, make empty tuple
self.thread_fxn_returnvals = ()
else:
try:
# if returnvals are iterable, convert to tuple
self.thread_fxn_returnvals = tuple(thread_fxn_returnvals)
except TypeError:
# if returnvals are single value, wrap in tuple
self.thread_fxn_returnvals = (thread_fxn_returnvals,)
if not self.abort_event.is_set():
wx.PostEvent(self.win_parent, self.myLongTaskDoneEvent())
@debug_fxn
def long_task_postthread(self, _evt):
"""Function triggered when event signifies that thread fxn is done.
Args:
evt (self.myLongTaskDoneEvent): obj returned from event when long task
thread is finished
"""
# if it exists, execute post thread function with return value(s)
# from thread_fxn
if self.post_thread_fxn is not None:
self.post_thread_fxn(*self.thread_fxn_returnvals)
class ThreadedProgressPulse(Threaded):
"""Class supporting long tasks that need to be in separate thread.
Handles running the thread part of the task in a separate thread,
wx Events needed to invoke post-thread actions, and wx ProgressDialog.
Sets ProgressDialog to "Pulse" mode, which shows indeterminant progress
(just activity).
"""
@debug_fxn
def __init__(self, thread_fxn, thread_fxn_args, post_thread_fxn,
progress_title, progress_msg, parent):
self.win_parent = parent
self.thread_fxn_returnvals = None
self.progress_dialog = wx.ProgressDialog(
progress_title,
progress_msg,
parent=self.win_parent
)
# Pulse seems to only be needed to be called once! Not multiple times
# as the docs imply.
self.progress_dialog.Pulse()
# invoke thread stuff after setting up progress_dialog, so thread
# ending and post-thread destroying progress_dialog is impossible
# to come first
super().__init__(thread_fxn, thread_fxn_args, post_thread_fxn, parent)
@debug_fxn
def long_task_postthread(self, _evt):
"""Function triggered when event signifies that thread fxn is done.
Args:
evt (self.myLongTaskDoneEvent): obj returned from event when long task
thread is finished
"""
# On Windows especially, must Destroy progress dialog for application
# to continue
self.progress_dialog.Destroy()
# execute post thread function with return value(s) from thread_fxn
super().long_task_postthread(_evt)
class ThreadedProgressPulseDelay(Threaded):
"""Class supporting long tasks that need to be in separate thread.
EXPERIMENTAL. Possible race conditions.
Handles running the thread part of the task in a separate thread,
wx Events needed to invoke post-thread actions, and wx ProgressDialog.
ProgressDialog has a delay to start, so that if thread finishes before
a certain time limit, no ProgressDialog will start at all.
Sets ProgressDialog to "Pulse" mode, which shows indeterminant progress
(just activity).
"""
@debug_fxn
def __init__(self, thread_fxn, thread_fxn_args, post_thread_fxn,
progress_title, progress_msg, parent, progress_delay_ms=100):
self.win_parent = parent
self.thread_fxn_returnvals = None
self.thread_done = False
self.progress_dialog = None
self.thread_lock = threading.Lock()
# Disable access to parent window
self.win_parent.Enable(False)
# invoke thread stuff after setting up progress_dialog, so thread
# ending and post-thread destroying progress_dialog is impossible
# to come first
super().__init__(thread_fxn, thread_fxn_args, post_thread_fxn, parent)
wx.CallLater(progress_delay_ms, self.delay_start, progress_title, progress_msg)
@debug_fxn
def delay_start(self, progress_title, progress_msg):
"""Function that starts after initial delay, checks if postthread has
disabled it, and if not actually starts the Progress Dialog
Args:
progress_title (str): title of Progress Dialog
progress_msg (str): message inside of Progress Dialog
"""
with self.thread_lock:
if not self.thread_done:
# Disable access to parent window
self.win_parent.Enable(True)
self.progress_dialog = wx.ProgressDialog(
progress_title,
progress_msg,
parent=self.win_parent
)
# Pulse seems to only be needed to be called once! Not multiple times
# as the docs imply.
self.progress_dialog.Pulse()
@debug_fxn
def long_task_postthread(self, _evt):
"""Function triggered when event signifies that thread fxn is done.
Args:
evt (self.myLongTaskDoneEvent): obj returned from event when long task
thread is finished
"""
with self.thread_lock:
self.thread_done = True
if self.progress_dialog is None:
# Disable access to parent window
self.win_parent.Enable(True)
else:
# On Windows especially, must Destroy progress dialog for application
# to continue
self.progress_dialog.Destroy()
# execute post thread function with return value(s) from thread_fxn
super().long_task_postthread(_evt)
class ThreadedDisableEnable(Threaded):
"""Class supporting long tasks that need to be in separate thread.
Handles running the thread part of the task in a separate thread,
wx Events needed to invoke post-thread actions, and disabling parent
window until thread is finished.
"""
@debug_fxn
def __init__(self, thread_fxn, thread_fxn_args, post_thread_fxn, parent):
self.win_parent = parent
self.thread_fxn_returnvals = None
# Disable access to parent window
self.win_parent.Enable(False)
# invoke thread stuff after Disabling parent window, so there's no
# chance it can happen after post-thread stuff
super().__init__(thread_fxn, thread_fxn_args, post_thread_fxn, parent)
@debug_fxn
def long_task_postthread(self, _evt):
"""Function triggered when event signifies that thread fxn is done.
Args:
evt (self.myLongTaskDoneEvent): obj returned from event when long task
thread is finished
"""
# Re-enable access to parent window
self.win_parent.Enable(True)
# execute post thread function with return value(s) from thread_fxn
super().long_task_postthread(_evt)
|
from typing import Dict, List
from data_reader.binary_input import Instance
class learner(object):
"""Base class for initial learning methods.
Defines the bare-minimum functionality for initial learning
strategies. Specified learning algorithms can create wrappers
around the underlying methods.
"""
positive_classification = 1
negative_classification = -1
def __init__(self):
"""New generic initial learner with no specified learning model.
"""
self.num_features = 0
self.training_instances = None
def set_training_instances(self, training_data):
"""
:param training_data: an dataset object , which when calling numpy() will return
X: feature matrix. shape (num_instances, num_feautres_per_instance)
y: label array. shape (num_instances, )
"""
if isinstance(training_data, List):
self.training_instances = training_data # type: List[Instance]
self.num_features = self.training_instances[0].get_feature_vector().get_feature_count()
else:
self.training_instances = training_data
self.num_features = training_data.features.shape[1]
def train(self):
"""Train on the set of training instances.
"""
raise NotImplementedError
def predict(self, instances):
"""Predict classification labels for the set of instances.
Args:
:param instances: matrix of instances shape (num_instances, num_feautres_per_instance)
Returns:
label classifications (List(int))
"""
raise NotImplementedError
def set_params(self, params: Dict):
"""Set params for the initial learner.
Defines default behavior, setting only BaseModel params
Args:
params (Dict): set of available params with updated values.
"""
raise NotImplementedError
def predict_proba(self, X):
"""
outputs a list of log probability of prediction
:param X: matrix of instances shape (num_instances, num_feautres_per_instance)
:return: list of log probability
"""
raise NotImplementedError
def decision_function(self, X):
raise NotImplementedError
|
'''Module implements methods to embed contour chain codes as text'''
def chaincodes_to_documents(chaincodes):
'''Converts chain codes to documents
Args:
chaincodes (list of list of int)
>>> chaincodes_to_documents([[1,2,2,3], [1,3,4]])
['12_1 23_2 31_1', '13_1 34_1 41_1']
'''
documents = []
for chaincode in chaincodes:
documents.append(_chaincode_to_document(chaincode))
return documents
def query_chaincode(chaincode):
'''The query chain code should be in counterclockwise. This can be achieved by reducing each codeword by 4 modulo 8'''
q_code = []
for x in chaincode:
if x == 4:
q_code.append(1)
else:
q_code.append((x-4)%8)
return q_code
def _chaincode_to_document(chaincode):
'''Takes a chain code of an image and converts to a document
Args:
chaincode (list of int): Freeman chaincode
Returns:
document (str): each word in the document is separated by "_", left hand of "_" is the transition in the chain code, right hand of the chain code is the number of occurences of pretransition number.
E.g 27_4 : represents 22227 in chain code
>>> _chaincode_to_document([1,2,2,3])
'12_1 23_2 31_1'
'''
counts = _chaincode_transition_counts(chaincode)
document = ''
for transition, count in counts:
document += f'{transition}_{count} '
return document
def _chaincode_transition_counts(chaincode):
'''
Counts number of pretransition codes in the chain code
>>> _chaincode_transition_to_counts([1,2,2,3])
[('12', '1'), ('23': '2'), ('31': '1')]
Return:
counts (tuple): first entry is a transition and second is the number of occurences of pretransition code
'''
counts = []
cnt = 0
for i, current in enumerate(chaincode):
if i == 0:
prev = current
cnt += 1
continue
if current == prev:
cnt += 1
prev = current
# consider the end and beginning of the object to account for its closedness
if i == len(chaincode) - 1:
code = f'{current}{chaincode[0]}'
counts.append((code, str(cnt)))
elif current != prev:
code = f'{prev}{current}'
counts.append((code, str(cnt)))
prev = current
cnt = 1
return counts
|
from django import forms
from .models import *
class TeilnahmeEintragenFormular(forms.ModelForm):
class Meta:
model = Zeile
fields = ['autor_name', 'text']
|
# -*- coding: utf-8 -*-
from copy import deepcopy
from loop_index.loop_index import LoopIndex
class DecisionMatrix:
"""
This class is a matrix containing actions at given coordinates. Its axes
are tuplists of conditions (callable objects returning boolean values). A
coordinate is true if the condition to which it corresponds is true.
When an instance is run, the actions whose coordinates are true are invoked.
"""
def __init__(self, *condition_axes):
"""
The DecisionMatrix constructor.
Args:
*condition_axes: tuplists containing conditions. DecisionMatrix
needs at least one axis. Each axis must contain at least one
condition.
"""
self._build_axes(condition_axes)
self._check_presence_of_axes()
self._matrix = dict()
self.set_default_action(None)
def _build_axes(self, condition_axes):
axis_list = list()
for ca in condition_axes:
ca_tuple = tuple(ca)
axis_list.append(ca_tuple)
self._axes = tuple(axis_list)
self._axis_count = len(self._axes)
def _check_presence_of_axes(self):
if self._axis_count < 1:
raise ValueError("DecisionMatrix needs at least one axis."
+ " It was not given any.")
def _check_coordinates(self, coordinates):
if not self.has_coordinates(coordinates):
self._raise_coord_value_error(coordinates)
def clear_actions(self):
"""
Deletes all actions stored in this matrix except the default action.
In order to delete it, call set_default_action(None).
"""
self._matrix.clear()
def _coordinate_is_true(self, axis, coordinate):
return self._axes[axis][coordinate]()
def _coordinates_are_true(self, coordinates):
index = LoopIndex(self._axis_count)
while index.iterate():
i = index.get_value()
coordinate = coordinates[i]
if not self._coordinate_is_true(i, coordinate):
return False
return True
def _get_all_axis_lengths(self):
lengths = list()
axis_index = LoopIndex(self._axis_count)
while axis_index.iterate():
i = axis_index.get_value()
lengths.append(self._get_axis_length(i))
return tuple(lengths)
def get_axis_count(self):
"""
Accessor of the number of axes.
Returns:
int: the number of axes of this DecisionMatrix.
"""
return self._axis_count
def _get_axis_length(self, axis):
return len(self._axes[axis])
def has_coordinates(self, coordinates):
"""
Determines whether the given coordinates exist in this matrix.
Args:
coordinates (tuplist): must be integral numbers.
Returns:
bool: True if the coordinates exist in this matrix, False otherwise.
"""
coord_length = len(coordinates)
if coord_length != self._axis_count:
return False
coord_index = LoopIndex(coord_length)
while coord_index.iterate():
i = coord_index.get_value()
coordinate = coordinates[i]
if coordinate < 0 or coordinate >= self._get_axis_length(i):
return False
return True
def print_axis_values(self):
"""
This method is a tool for debugging. It displays the boolean value of
all conditions of all axes in the console. False and True are represented
with 0 and 1 respectively.
"""
axis_index = LoopIndex(self._axis_count)
while axis_index.iterate():
i = axis_index.get_value()
current_axis = self._axes[i]
axis_values = list()
condition_index = LoopIndex(len(current_axis))
while condition_index.iterate():
j = condition_index.get_value()
if current_axis[j]():
axis_values.append(1)
else:
axis_values.append(0)
print("Axis " + str(i) + ": " + str(axis_values))
def _raise_coord_value_error(self, coordinates):
raise ValueError("Coordinates " + str(coordinates)
+ " are not valid. This matrix has "
+ str(self._axis_count) + " axes whose lengths are "
+ str(self._get_all_axis_lengths()) + ".")
def run(self):
"""
Browses the matrix checking the truth value of every coordinate set. If
the coordinates are true, the associated action is invoked. If no action
is invoked after the browsing and a default action has been specified,
that action is performed.
"""
action_performed = False
for coordinates, action in self._matrix.items():
if self._coordinates_are_true(coordinates):
action()
action_performed = True
if not action_performed and self._default_action is not None:
self._default_action()
def set_action(self, coordinates, action):
"""
Stores an action in the matrix at the specified coordinates.
Args:
coordinates (tuplist): integral values indicating where the action
will be stored in the matrix.
action: a callable object. Its return value will not be recorded
or used.
Raises:
ValueError: if action is not callable or the coordinates are invalid,
i.e. has_coordinates returns False.
"""
if type(coordinates) is not tuple:
coordinates = tuple(coordinates)
self._check_coordinates(coordinates)
if not callable(action):
raise ValueError("Argument action must be a callable object.")
self._matrix[coordinates] = action
def set_all_actions(self, coord_action_dict, overwrite=True):
"""
Stores the actions from dictionary coord_action_dict in the matrix at
the coordinates with which they are paired. The actions currently stored
in this instance will remain if they are not overwritten with a new
action. To delete all the actions stored in this instance, use method
clear_actions.
Args:
coord_action_dict (dictionary): contains actions (values) paired
with their coordinates (keys) where they must be stored.
Coordinates must be represented by tuples.
overwrite (bool, optional): if True, the actions in coord_action_dict
will overwrite the actions currently stored at their coordinates.
Defaults to True.
Raises:
ValueError: if a key in coord_action_dict is invalid, i.e.
has_coordinates returns False or if a value in
coord_action_dict is not a callable object.
"""
for coordinates, action in coord_action_dict.items():
if overwrite or self._matrix.get(coordinates) is None:
self.set_action(coordinates, action)
def set_default_action(self, action):
"""
The specified action will be performed when this instance is run if all
conditions are false. The default action can be set to None if it is not
wanted. On instantiation, DecisionMatrix does not have a default action.
The action will only be set as default if it is None or callable.
Args:
action: a callable object. Its return value will not be recorded
or used.
Raises:
ValueError: if action is not None and is not a callable object.
"""
if action is None or callable(action):
self._default_action = action
else:
raise ValueError(
"The default action must be None or a callable object.")
|
import numpy as np
import os.path
import transforms3d.quaternions as txq
import argparse
def params():
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--dataroot', type=str, default='../datasets/7Scenes', help='dataset root')
return parser.parse_args()
def is_pose_file(filename):
return filename.endswith(EXTENSIONS) and filename.startswith(PREFIX)
def get_pose_filenames(dataset_dir, seq_dir):
pose_filenames = []
dir = os.path.join(dataset_dir, seq_dir)
assert os.path.isdir(dir), '%s is not a valid directory' % dir
fnames = sorted(os.listdir(dir))
for fname in fnames:
if is_pose_file(fname):
pose_filenames.append(os.path.join(dir, fname))
return pose_filenames
def convert_xyzquat(dataset_dir, split_file, write_file):
with open(split_file, 'r') as f:
seqs = [int(l.split('sequence')[-1]) for l in f if not l.startswith('#')]
with open(write_file, 'w') as f:
f.write('7 Scenes Datasets (convert rotation matrix to translation + quaternion)\n')
f.write('Image File, Camera Position [X Y Z W P Q R]\n')
f.write('\n')
for seq in seqs:
seq_dir = 'seq-{:02d}'.format(seq)
p_filenames = get_pose_filenames(dataset_dir, seq_dir)
assert p_filenames, 'no poses in directory {}'.format(seq_dir)
ss = p_filenames[0].find(seq_dir)
se = p_filenames[0].find('.pose')
pose_out = np.zeros(7)
for i in range(len(p_filenames)):
pose_in = np.loadtxt(p_filenames[i])
pose_in = np.asarray(pose_in)
pose_out[3: ] = txq.mat2quat(pose_in[:3, :3])
pose_out[0:3] = pose_in[:, 3].flatten()[:3]
pose_str = p_filenames[i][ss:se] + '.color.png'
for i in range(7):
pose_str += ' {:0.8f}'.format(pose_out[i])
f.write(pose_str + '\n')
def split_train_test(dataset_dir, splitfns_in, splitfns_out):
for i in range(len(splitfns_in)):
split_in = splitfns_in[i]
split_out = splitfns_out[i]
split_file = os.path.join(dataset_dir, split_in)
write_file = os.path.join(dataset_dir, split_out)
if (not os.path.exists(split_file)):
print('{} does not exist'.format(split_file))
continue
if (os.path.exists(write_file)):
print('{} has existed'.format(write_file))
continue
print('start converting', split_file)
convert_xyzquat(dataset_dir, split_file, write_file)
print('finish converting', write_file)
EXTENSIONS = ('.txt')
PREFIX = ('frame')
args = params()
dataroot = args.dataroot
dataset_names = ['chess', 'fire', 'heads', 'office', 'pumpkin', 'redkitchen', 'stairs']
# dataset_names = ['chess']
splitfns_in = ['TrainSplit.txt', 'TestSplit.txt']
splitfns_out = ['dataset_train.txt', 'dataset_test.txt']
for name in dataset_names:
dir = os.path.join(dataroot, name)
print('processing', dir)
split_train_test(dir, splitfns_in, splitfns_out)
|
import os
import cv2
import argparse
import torch as t
from utils import detect_frame, load_mask_classifier
from skvideo.io import FFmpegWriter, vreader, ffprobe
from FaceDetector import FaceDetector
from torchvision.transforms import *
from pathlib import Path
from torch.nn import *
from tqdm import tqdm
arg = argparse.ArgumentParser()
arg.add_argument("vid_path", type=str,
help="The path to the image we want to classify")
arg.add_argument("--checkpoint", type=str, default="./checkpoints/face_mask.ckpt",
help="The path to the model checkpoint you want to classify with")
arg.add_argument("--show-result", type=bool, default=True,
help="Whether to show the classified image or not. Best used with --output-path")
arg.add_argument("--output-dir", type=str,
help="Where to output the classified image. Will not output if a directory is not given")
arg.add_argument("--bitrate", type=int, default=3000000,
help="The bitrate of the output video.")
# Seems to not work?
# arg.add_argument("--num-threads", type=int, default=4,
# help="The number of threads to use when writing the output video")
arg = arg.parse_args()
@t.no_grad()
def classify_video() -> None:
vid_path = Path(arg.vid_path)
if arg.output_dir is not None:
out_path = Path(arg.output_dir)
if not vid_path.exists():
print("The given video path is not valid, exiting...")
elif arg.output_dir is not None and (not out_path.exists() or not out_path.is_dir()):
print("The image output path is not valid, exiting...")
else:
device = None
if t.cuda.is_available():
print("Using GPU")
device = t.device("cuda:0")
else:
print("Using CPU")
device = t.device("CPU")
print("Attempting to load mask classifier checkpoint")
model, val_trns = load_mask_classifier(arg.checkpoint, device)
print("Mask classifier checkpoint successfully loaded")
print("Attempting to load face detector checkpoint")
face_detector = FaceDetector(prototype='./checkpoints/deploy.prototxt.txt',
model='./checkpoints/res10_300x300_ssd_iter_140000.caffemodel')
print("Face detector checkpoint successfully loaded")
if arg.output_dir is not None:
split = list(os.path.split(arg.output_dir))
split[-1] = "result_" + vid_path.name
op = os.path.sep.join(split)
print(f"Will write result at result at {op}")
print("Loading and classifying video frames")
cv2.namedWindow("Results", cv2.WINDOW_NORMAL)
classified_frames = []
for frame in vreader(arg.vid_path):
frame = detect_frame(frame, face_detector, model,
device, val_trns, opencv_frame=False)
if arg.output_dir:
classified_frames.append(frame)
if arg.show_result:
# Since openCV wants a BGR image
frame = cv2.cvtColor(frame, cv2.COLOR_RGB2BGR)
cv2.imshow("Results", frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cv2.destroyAllWindows()
if arg.output_dir:
print("Saving classified video...")
metadata = ffprobe(arg.vid_path)
writer = FFmpegWriter(
op,
inputdict={'-r': str(metadata['video']['@avg_frame_rate'])},
# outputdict={'-pix_fmt': 'yuv444p', '-b': str(arg.bitrate), '-threads': str(arg.num_threads)})
outputdict={'-pix_fmt': 'yuv444p', '-b': str(arg.bitrate)})
for frame in tqdm(classified_frames):
writer.writeFrame(frame)
writer.close()
if arg.output_dir:
writer.close()
if __name__ == "__main__":
classify_video()
|
import json
import logging; log = logging.getLogger()
class Emotes(object):
def __init__(self, bot):
self.bot = bot
with open(f"data/emotes.json", "r", encoding="utf8", errors="ignore") as f:
self.emotes = json.load(f)
if bot.config.dev:
self.emotes.update({
"YES": "<:yes:880522968969658448>", #👌
"NO": "<:no:880522968952872990>" #❌
})
def get(self, key):
try:
return self.emotes[key]
except KeyError:
log.warn("Failed to obtain an emoji with key {}".format(key))
def reload(self):
self.__init__(self.bot)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2018-03-04 21:37
from django.db import migrations
def forwards(apps, schema_editor):
from manabi.apps.twitter_usages.harvest import tweet_is_spammy
ExpressionTweet = apps.get_model('twitter_usages', 'ExpressionTweet')
for expression_tweet in ExpressionTweet.objects.all().iterator():
if tweet_is_spammy(expression_tweet.tweet):
print('Deleting spammy tweet', expression_tweet.id)
expression_tweet.delete()
class Migration(migrations.Migration):
dependencies = [
('twitter_usages', '0001_initial'),
]
operations = [
migrations.RunPython(forwards),
]
|
from . import AstChecker
class CheckOsSystemCalls(AstChecker):
"""Checks for any calls to os.system and suggests to use subprocess.check_call instead."""
def _warn_about_os_system(self, node):
self.warn("Consider replacing os.system with subprocess.check_output,"
" or use sublime's Default.exec.ExecCommand. "
"Also make sure you thought about the platform key in your pull request.")
def visit_Call(self, node):
try:
attr = node.func.attr
id = node.func.value.id
except Exception:
return
if id == "os" and attr == "system":
self._warn_about_os_system(node)
|
# See what happens when we make __get__ and __set__ things other than functions...
# TODO add some with __del__
import sys
import traceback
class CallableGet(object):
def __call__(*args):
print "callable get", map(type, args)
class CallableSet(object):
def __get__(*args):
print "__get__", map(type, args)
return args[0]
def __call__(*args):
print "callable set", map(type, args)
class InstanceMethodMaker(object):
def getBoundInstanceMethod(*args):
print '__get__ bound', map(type, args)
def setBoundInstanceMethod(*args):
print '__set__ bound', map(type, args)
def getUnboundInstanceMethod(*args):
print '__get__ unbound', map(type, args)
def setUnboundInstanceMethod(*args):
print '__set__ unbound', map(type, args)
imm = InstanceMethodMaker()
def closureGet():
a = 5
def f(*args):
print 'closure __get__', map(type, args), a
return f
def closureSet():
a = 5
def f(*args):
print 'closure __set__', map(type, args), a
return f
class A(object):
# If __get__ or __set__ is an int
class DescGetInt(object):
__get__ = 1
descGetInt = DescGetInt()
class DescSetInt(object):
__set__ = 1
descSetInt = DescSetInt()
class DescGetSetInt(object):
def __get__(*args):
print 'DescGetSetInt __get__ called', map(type, args)
__set__ = 1
descGetSetInt = DescGetSetInt()
class DescGetCall(object):
__get__ = CallableGet()
descGetCall = DescGetCall()
class DescSetCall(object):
__set__ = CallableSet()
descSetCall = DescSetCall()
class DescGetSetCall(object):
def __get__(*args):
print 'DescGetSetCall __get__ called', map(type, args)
__set__ = CallableSet()
descGetSetCall = DescGetSetCall()
class DescGetBoundInstanceMethod(object):
__get__ = imm.getBoundInstanceMethod
descGetBoundInstanceMethod = DescGetBoundInstanceMethod()
class DescSetBoundInstanceMethod(object):
__set__ = imm.setBoundInstanceMethod
descSetBoundInstanceMethod = DescSetBoundInstanceMethod()
class DescGetSetBoundInstanceMethod(object):
def __get__(*args):
print 'DescGetSetBoundInstanceMethod __get__ called', map(type, args)
__set__ = imm.setBoundInstanceMethod
descGetSetBoundInstanceMethod = DescGetSetBoundInstanceMethod()
class DescGetUnboundInstanceMethod(object):
__get__ = InstanceMethodMaker.getUnboundInstanceMethod
descGetUnboundInstanceMethod = DescGetUnboundInstanceMethod()
class DescSetUnboundInstanceMethod(object):
__set__ = InstanceMethodMaker.setUnboundInstanceMethod
descSetUnboundInstanceMethod = DescSetUnboundInstanceMethod()
class DescGetSetUnboundInstanceMethod(object):
def __get__(*args):
print 'DescGetSetUnboundInstanceMethod __get__ called', map(type, args)
__set__ = imm.setUnboundInstanceMethod
descGetSetUnboundInstanceMethod = DescGetSetUnboundInstanceMethod()
class DescGetClosure(object):
__get__ = closureGet()
descGetClosure = DescGetClosure()
class DescSetClosure(object):
__set__ = closureSet()
descSetClosure = DescSetClosure()
class DescGetSetClosure(object):
def __get__(*args):
print 'DescGetSetClosure __get__ called', map(type, args)
__set__ = closureSet()
descGetSetClosure = DescGetSetClosure()
class DescGetGenerator(object):
def __get__(*args):
print 'DescGetGenerator __get__ called', map(type, args)
yield 15
print '__get__ post yield'
descGetGenerator = DescGetGenerator()
class DescSetGenerator(object):
def __set__(*args):
print 'DescSetGenerator __set__ called', map(type, args)
yield 15
print '__set__ post yield'
descSetGenerator = DescSetGenerator()
class DescGetSetGenerator(object):
def __get__(a, b, c):
print 'DescGetSetGenerator __get__ called'
print a
print b
print c
def __set__(self, obj, value):
print 'DescGetSetGenerator __set__ called'
print self
print obj
print value
yield 15
print 'DescGetSetGenerator __set__ post yield'
descGetSetGenerator = DescGetSetGenerator()
descSetClosure = DescSetClosure()
a = A()
print 'int'
try:
print a.descGetInt
except:
traceback.print_exc(file=sys.stdout)
try:
a.descSetInt = 5
except:
traceback.print_exc(file=sys.stdout)
a.__dict__['descGetSetInt'] = 3
print a.descGetSetInt
print 'object with __call__'
print a.descGetCall
a.descSetCall = 5
a.__dict__['descGetSetCall'] = 3
print a.descGetSetCall
print 'bound instance method'
print a.descGetBoundInstanceMethod
a.descSetBoundInstanceMethod = 5
a.__dict__['descGetSetBoundInstanceMethod'] = 3
print a.descGetSetBoundInstanceMethod
# TODO: uncomment this after instancemethod_checking is working
'''
print 'unbound instance method'
try:
print a.descGetUnboundInstanceMethod
except:
traceback.print_exc(file=sys.stdout)
try:
a.descSetUnboundInstanceMethod = 5
except:
traceback.print_exc(file=sys.stdout)
a.__dict__['descGetSetUnboundInstanceMethod'] = 3
print a.descGetSetUnboundInstanceMethod
'''
print 'closure'
print a.descGetClosure
a.descSetClosure = 5
a.__dict__['descGetSetClosure'] = 3
print a.descGetClosure
print 'generator'
print type(a.descGetGenerator)
a.descSetGenerator = 5
a.__dict__['descGetSetGenerator'] = 3
print type(a.descGetGenerator)
|
import argparse
import json
from pathlib import Path, PurePosixPath
import jinja2
import yaml
def snake_to_pascal_case(input_string: str):
split_string = input_string.lower().split('_')
return ''.join([i.title() for i in split_string])
def render_template(template_file, job_types, env):
output_file = template_file.with_suffix('')
template = env.get_template(str(template_file))
with open(output_file, 'w') as f:
f.write(template.render(job_types=job_types, json=json, snake_to_pascal_case=snake_to_pascal_case))
def render_templates(job_types):
env = get_env()
for template_file in Path('.').glob('**/*.j2'):
render_template(PurePosixPath(template_file), job_types, env)
def get_env():
env = jinja2.Environment(
loader=jinja2.FileSystemLoader('./'),
autoescape=jinja2.select_autoescape(default=True, disabled_extensions=('j2',)),
undefined=jinja2.StrictUndefined,
trim_blocks=True,
lstrip_blocks=True,
keep_trailing_newline=True,
)
return env
def main():
parser = argparse.ArgumentParser()
parser.add_argument('paths', nargs='+', type=Path)
args = parser.parse_args()
job_types = {}
for file in args.paths:
with open(file.absolute()) as f:
job_types.update(yaml.safe_load(f))
render_templates(job_types)
if __name__ == '__main__':
main()
|
# Copyright 2014
# The Cloudscaling Group, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import mock
from neutronclient.common import exceptions as neutron_exception
from ec2api.api import common
from ec2api.api import vpn_connection as vpn_connection_api
from ec2api.tests.unit import base
from ec2api.tests.unit import fakes
from ec2api.tests.unit import matchers
from ec2api.tests.unit import tools
class VpnConnectionTestCase(base.ApiTestCase):
@mock.patch('ec2api.api.vpn_connection.describe_vpn_connections')
@mock.patch('ec2api.api.vpn_connection._reset_vpn_connections',
wraps=vpn_connection_api._reset_vpn_connections)
@mock.patch('random.choice')
def test_create_vpn_connection(self, random_choice, reset_vpn_connections,
describe_vpn_connections):
self.set_mock_db_items(
fakes.DB_VPN_GATEWAY_1, fakes.DB_VPN_GATEWAY_2,
fakes.DB_CUSTOMER_GATEWAY_1, fakes.DB_CUSTOMER_GATEWAY_2,
fakes.DB_VPC_1)
self.neutron.create_ikepolicy.side_effect = (
tools.get_neutron_create('ikepolicy', fakes.ID_OS_IKEPOLICY_1))
self.neutron.create_ipsecpolicy.side_effect = (
tools.get_neutron_create('ipsecpolicy', fakes.ID_OS_IPSECPOLICY_1))
self.db_api.add_item.side_effect = (
tools.get_db_api_add_item(fakes.ID_EC2_VPN_CONNECTION_1))
random_choice.side_effect = iter(fakes.PRE_SHARED_KEY_1)
describe_vpn_connections.return_value = {
'vpnConnectionSet': [fakes.EC2_VPN_CONNECTION_1]}
resp = self.execute(
'CreateVpnConnection',
{'VpnGatewayId': fakes.ID_EC2_VPN_GATEWAY_1,
'CustomerGatewayId': fakes.ID_EC2_CUSTOMER_GATEWAY_1,
'Type': 'ipsec.1',
'Options.StaticRoutesOnly': 'True'})
self.assertThat(
resp,
matchers.DictMatches(
{'vpnConnection': fakes.EC2_VPN_CONNECTION_1}))
self.neutron.create_ikepolicy.assert_called_once_with(
{'ikepolicy': tools.purge_dict(fakes.OS_IKEPOLICY_1, ('id',))})
self.neutron.create_ipsecpolicy.assert_called_once_with(
{'ipsecpolicy': tools.purge_dict(fakes.OS_IPSECPOLICY_1, ('id',))})
random_choice.assert_called_with(vpn_connection_api.SHARED_KEY_CHARS)
new_vpn_connection_1 = tools.update_dict(
fakes.DB_VPN_CONNECTION_1, {'cidrs': [],
'os_ipsec_site_connections': {}})
self.db_api.add_item.assert_called_once_with(
mock.ANY, 'vpn',
tools.purge_dict(new_vpn_connection_1, ('id', 'vpc_id', 'os_id')))
self.neutron.update_ikepolicy.assert_called_once_with(
fakes.ID_OS_IKEPOLICY_1,
{'ikepolicy': {'name': fakes.ID_EC2_VPN_CONNECTION_1}})
self.neutron.update_ipsecpolicy.assert_called_once_with(
fakes.ID_OS_IPSECPOLICY_1,
{'ipsecpolicy': {'name': fakes.ID_EC2_VPN_CONNECTION_1}})
reset_vpn_connections.assert_called_once_with(
mock.ANY, self.neutron, mock.ANY, fakes.DB_VPN_GATEWAY_1,
vpn_connections=[new_vpn_connection_1])
self.assertIsInstance(reset_vpn_connections.call_args[0][2],
common.OnCrashCleaner)
describe_vpn_connections.assert_called_once_with(
mock.ANY, vpn_connection_id=[fakes.ID_EC2_VPN_CONNECTION_1])
@mock.patch('ec2api.api.vpn_connection.describe_vpn_connections')
def test_create_vpn_connection_idempotent(self, describe_vpn_connections):
self.set_mock_db_items(
fakes.DB_VPN_GATEWAY_1, fakes.DB_CUSTOMER_GATEWAY_1,
fakes.DB_VPN_CONNECTION_1)
describe_vpn_connections.return_value = {
'vpnConnectionSet': [fakes.EC2_VPN_CONNECTION_1]}
resp = self.execute(
'CreateVpnConnection',
{'VpnGatewayId': fakes.ID_EC2_VPN_GATEWAY_1,
'CustomerGatewayId': fakes.ID_EC2_CUSTOMER_GATEWAY_1,
'Type': 'ipsec.1',
'Options.StaticRoutesOnly': 'True'})
self.assertThat({'vpnConnection': fakes.EC2_VPN_CONNECTION_1},
matchers.DictMatches(resp))
self.assertFalse(self.neutron.create_ikepolicy.called)
self.assertFalse(self.neutron.create_ipsecpolicy.called)
self.assertFalse(self.db_api.add_item.called)
describe_vpn_connections.assert_called_once_with(
mock.ANY, vpn_connection_id=[fakes.ID_EC2_VPN_CONNECTION_1])
def test_create_vpn_connection_invalid_parameters(self):
self.assert_execution_error(
'Unsupported', 'CreateVpnConnection',
{'VpnGatewayId': fakes.ID_EC2_VPN_GATEWAY_1,
'CustomerGatewayId': fakes.ID_EC2_CUSTOMER_GATEWAY_1,
'Type': 'ipsec.1',
'Options.StaticRoutesOnly': 'False'})
self.assert_execution_error(
'Unsupported', 'CreateVpnConnection',
{'VpnGatewayId': fakes.ID_EC2_VPN_GATEWAY_1,
'CustomerGatewayId': fakes.ID_EC2_CUSTOMER_GATEWAY_1,
'Type': 'ipsec.1'})
self.set_mock_db_items(fakes.DB_CUSTOMER_GATEWAY_1)
self.assert_execution_error(
'InvalidVpnGatewayID.NotFound', 'CreateVpnConnection',
{'VpnGatewayId': fakes.ID_EC2_VPN_GATEWAY_2,
'CustomerGatewayId': fakes.ID_EC2_CUSTOMER_GATEWAY_1,
'Type': 'ipsec.1',
'Options.StaticRoutesOnly': 'True'})
self.set_mock_db_items(fakes.DB_VPN_GATEWAY_1)
self.assert_execution_error(
'InvalidCustomerGatewayID.NotFound', 'CreateVpnConnection',
{'VpnGatewayId': fakes.ID_EC2_VPN_GATEWAY_2,
'CustomerGatewayId': fakes.ID_EC2_CUSTOMER_GATEWAY_1,
'Type': 'ipsec.1',
'Options.StaticRoutesOnly': 'True'})
self.set_mock_db_items(
fakes.DB_VPN_GATEWAY_2, fakes.DB_CUSTOMER_GATEWAY_1,
fakes.DB_VPN_CONNECTION_1)
self.assert_execution_error(
'InvalidCustomerGateway.DuplicateIpAddress', 'CreateVpnConnection',
{'VpnGatewayId': fakes.ID_EC2_VPN_GATEWAY_2,
'CustomerGatewayId': fakes.ID_EC2_CUSTOMER_GATEWAY_1,
'Type': 'ipsec.1',
'Options.StaticRoutesOnly': 'True'})
@tools.screen_unexpected_exception_logs
def test_create_vpn_connection_rollback(self):
self.set_mock_db_items(fakes.DB_VPN_GATEWAY_1,
fakes.DB_CUSTOMER_GATEWAY_1)
self.neutron.create_ikepolicy.side_effect = (
tools.get_neutron_create('ikepolicy', fakes.ID_OS_IKEPOLICY_1))
self.neutron.create_ipsecpolicy.side_effect = (
tools.get_neutron_create('ipsecpolicy', fakes.ID_OS_IPSECPOLICY_1))
self.db_api.add_item.side_effect = (
tools.get_db_api_add_item(fakes.ID_EC2_VPN_CONNECTION_1))
self.neutron.update_ikepolicy.side_effect = Exception()
self.assert_execution_error(
self.ANY_EXECUTE_ERROR, 'CreateVpnConnection',
{'VpnGatewayId': fakes.ID_EC2_VPN_GATEWAY_1,
'CustomerGatewayId': fakes.ID_EC2_CUSTOMER_GATEWAY_1,
'Type': 'ipsec.1',
'Options.StaticRoutesOnly': 'True'})
self.db_api.delete_item.assert_called_once_with(
mock.ANY, fakes.ID_EC2_VPN_CONNECTION_1)
self.neutron.delete_ipsecpolicy.assert_called_once_with(
fakes.ID_OS_IPSECPOLICY_1)
self.neutron.delete_ikepolicy.assert_called_once_with(
fakes.ID_OS_IKEPOLICY_1)
@mock.patch('ec2api.api.vpn_connection._reset_vpn_connections',
wraps=vpn_connection_api._reset_vpn_connections)
def test_create_vpn_connection_route(self, reset_vpn_connections):
self.set_mock_db_items(fakes.DB_VPN_CONNECTION_2,
fakes.DB_VPN_GATEWAY_2)
resp = self.execute(
'CreateVpnConnectionRoute',
{'VpnConnectionId': fakes.ID_EC2_VPN_CONNECTION_2,
'DestinationCidrBlock': '192.168.123.0/24'})
self.assertEqual({'return': True}, resp)
vpn = copy.deepcopy(fakes.DB_VPN_CONNECTION_2)
vpn['cidrs'].append('192.168.123.0/24')
self.db_api.update_item.assert_called_once_with(mock.ANY, vpn)
reset_vpn_connections.assert_called_once_with(
mock.ANY, self.neutron, mock.ANY, fakes.DB_VPN_GATEWAY_2,
vpn_connections=[vpn])
def test_create_vpn_connection_route_idempotent(self):
self.set_mock_db_items(fakes.DB_VPN_CONNECTION_2)
resp = self.execute(
'CreateVpnConnectionRoute',
{'VpnConnectionId': fakes.ID_EC2_VPN_CONNECTION_2,
'DestinationCidrBlock': fakes.CIDR_VPN_2_PROPAGATED_1})
self.assertEqual({'return': True}, resp)
self.assertFalse(self.db_api.update_item.called)
def test_create_vpn_connection_route_invalid_parameters(self):
self.set_mock_db_items()
self.assert_execution_error(
'InvalidVpnConnectionID.NotFound', 'CreateVpnConnectionRoute',
{'VpnConnectionId': fakes.ID_EC2_VPN_CONNECTION_2,
'DestinationCidrBlock': fakes.CIDR_VPN_2_PROPAGATED_1})
@tools.screen_unexpected_exception_logs
@mock.patch('ec2api.api.vpn_connection._reset_vpn_connections')
def test_create_vpn_connection_route_rollback(self, reset_vpn_connections):
self.set_mock_db_items(fakes.DB_VPN_CONNECTION_2,
fakes.DB_VPN_GATEWAY_2)
reset_vpn_connections.side_effect = Exception()
self.assert_execution_error(
self.ANY_EXECUTE_ERROR, 'CreateVpnConnectionRoute',
{'VpnConnectionId': fakes.ID_EC2_VPN_CONNECTION_2,
'DestinationCidrBlock': '192.168.123.0/24'})
self.db_api.update_item.assert_called_with(
mock.ANY, fakes.DB_VPN_CONNECTION_2)
@mock.patch('ec2api.api.vpn_connection._reset_vpn_connections',
wraps=vpn_connection_api._reset_vpn_connections)
def test_delete_vpn_connection_route(self, reset_vpn_connections):
self.set_mock_db_items(fakes.DB_VPN_CONNECTION_2,
fakes.DB_VPN_GATEWAY_2)
resp = self.execute(
'DeleteVpnConnectionRoute',
{'VpnConnectionId': fakes.ID_EC2_VPN_CONNECTION_2,
'DestinationCidrBlock': fakes.CIDR_VPN_2_PROPAGATED_1})
self.assertEqual({'return': True}, resp)
vpn = tools.update_dict(fakes.DB_VPN_CONNECTION_2,
{'cidrs': [fakes.CIDR_VPN_2_PROPAGATED_2]})
self.db_api.update_item.assert_called_once_with(mock.ANY, vpn)
reset_vpn_connections.assert_called_once_with(
mock.ANY, self.neutron, mock.ANY, fakes.DB_VPN_GATEWAY_2,
vpn_connections=[vpn])
def test_delete_vpn_connection_route_invalid_parameters(self):
self.set_mock_db_items()
self.assert_execution_error(
'InvalidVpnConnectionID.NotFound', 'DeleteVpnConnectionRoute',
{'VpnConnectionId': fakes.ID_EC2_VPN_CONNECTION_2,
'DestinationCidrBlock': fakes.CIDR_VPN_2_PROPAGATED_1})
self.set_mock_db_items(fakes.DB_VPN_CONNECTION_2)
self.assert_execution_error(
'InvalidRoute.NotFound', 'DeleteVpnConnectionRoute',
{'VpnConnectionId': fakes.ID_EC2_VPN_CONNECTION_2,
'DestinationCidrBlock': '192.168.123.0/24'})
@tools.screen_unexpected_exception_logs
@mock.patch('ec2api.api.vpn_connection._reset_vpn_connections')
def test_delete_vpn_connection_route_rollback(self, reset_vpn_connections):
self.set_mock_db_items(fakes.DB_VPN_CONNECTION_2,
fakes.DB_VPN_GATEWAY_2)
reset_vpn_connections.side_effect = Exception()
self.assert_execution_error(
self.ANY_EXECUTE_ERROR, 'DeleteVpnConnectionRoute',
{'VpnConnectionId': fakes.ID_EC2_VPN_CONNECTION_2,
'DestinationCidrBlock': fakes.CIDR_VPN_2_PROPAGATED_1})
self.assert_any_call(self.db_api.update_item,
mock.ANY, fakes.DB_VPN_CONNECTION_2)
def test_delete_vpn_connection(self):
self.set_mock_db_items(fakes.DB_VPN_CONNECTION_1)
resp = self.execute('DeleteVpnConnection',
{'VpnConnectionId': fakes.ID_EC2_VPN_CONNECTION_1})
self.assertEqual({'return': True}, resp)
self.db_api.delete_item.assert_called_once_with(
mock.ANY, fakes.ID_EC2_VPN_CONNECTION_1)
self.neutron.delete_ipsec_site_connection.assert_called_once_with(
fakes.ID_OS_IPSEC_SITE_CONNECTION_2)
self.neutron.delete_ipsecpolicy.assert_called_once_with(
fakes.ID_OS_IPSECPOLICY_1)
self.neutron.delete_ikepolicy.assert_called_once_with(
fakes.ID_OS_IKEPOLICY_1)
def test_delete_vpn_connection_invalid_parameters(self):
self.set_mock_db_items()
self.assert_execution_error(
'InvalidVpnConnectionID.NotFound', 'DeleteVpnConnection',
{'VpnConnectionId': fakes.ID_EC2_VPN_CONNECTION_1})
@tools.screen_unexpected_exception_logs
def test_delete_vpn_connection_rollback(self):
self.set_mock_db_items(fakes.DB_VPN_CONNECTION_1)
self.neutron.delete_ikepolicy.side_effect = Exception()
self.assert_execution_error(
self.ANY_EXECUTE_ERROR, 'DeleteVpnConnection',
{'VpnConnectionId': fakes.ID_EC2_VPN_CONNECTION_1})
self.db_api.restore_item.assert_called_once_with(
mock.ANY, 'vpn', fakes.DB_VPN_CONNECTION_1)
self.assertFalse(self.neutron.create_ipsec_site_connection.called)
self.assertFalse(self.neutron.create_ipsecpolicy.called)
self.assertFalse(self.neutron.create_ikepolicy.called)
def test_describe_vpn_connections(self):
self.set_mock_db_items(
fakes.DB_VPN_CONNECTION_1, fakes.DB_VPN_CONNECTION_2,
fakes.DB_CUSTOMER_GATEWAY_1, fakes.DB_CUSTOMER_GATEWAY_2,
fakes.DB_VPN_GATEWAY_1, fakes.DB_VPN_GATEWAY_2,
fakes.DB_VPC_1, fakes.DB_VPC_2)
self.neutron.list_ikepolicies.return_value = {
'ikepolicies': [fakes.OS_IKEPOLICY_1, fakes.OS_IKEPOLICY_2]}
self.neutron.list_ipsecpolicies.return_value = {
'ipsecpolicies': [fakes.OS_IPSECPOLICY_1, fakes.OS_IPSECPOLICY_2]}
self.neutron.list_ipsec_site_connections.return_value = {
'ipsec_site_connections': []}
self.neutron.list_routers.return_value = {
'routers': [fakes.OS_ROUTER_1, fakes.OS_ROUTER_2]}
resp = self.execute('DescribeVpnConnections', {})
vpns = [tools.update_dict(
vpn, {'customerGatewayConfiguration': 'DONTCARE'})
for vpn in (fakes.EC2_VPN_CONNECTION_1,
fakes.EC2_VPN_CONNECTION_2)]
self.assertThat(
resp,
matchers.DictMatches(
{'vpnConnectionSet': vpns},
orderless_lists=True))
for vpn in (fakes.EC2_VPN_CONNECTION_1, fakes.EC2_VPN_CONNECTION_2):
config = next(v['customerGatewayConfiguration']
for v in resp['vpnConnectionSet']
if v['vpnConnectionId'] == vpn['vpnConnectionId'])
self.assertThat(
config.encode(),
matchers.XMLMatches(
vpn['customerGatewayConfiguration'].encode(),
orderless_sequence=True))
self.assertTrue(config.startswith(
'<?xml version=\'1.0\' encoding=\'UTF-8\'?>'))
self.neutron.list_ikepolicies.assert_called_once_with(
tenant_id=fakes.ID_OS_PROJECT)
self.neutron.list_ipsecpolicies.assert_called_once_with(
tenant_id=fakes.ID_OS_PROJECT)
self.neutron.list_ipsec_site_connections.assert_called_once_with(
tenant_id=fakes.ID_OS_PROJECT)
self.neutron.list_routers.assert_called_once_with(
tenant_id=fakes.ID_OS_PROJECT)
resp = self.execute(
'DescribeVpnConnections',
{'VpnConnectionId.1': fakes.ID_EC2_VPN_CONNECTION_1})
self.assertThat(
resp,
matchers.DictMatches(
{'vpnConnectionSet': [vpns[0]]},
orderless_lists=True))
self.check_filtering(
'DescribeVpnConnections', 'vpnConnectionSet',
[('customer-gateway-configuration',
'*' + fakes.PRE_SHARED_KEY_1 + '*'),
('customer-gateway-id', fakes.ID_EC2_CUSTOMER_GATEWAY_1),
('state', 'available'),
('option.static-routes-only', True),
('route.destination-cidr-block', fakes.CIDR_VPN_2_PROPAGATED_1),
('type', 'ipsec.1'),
('vpn-connection-id', fakes.ID_EC2_VPN_CONNECTION_1),
('vpn-gateway-id', fakes.ID_EC2_VPN_GATEWAY_1)])
self.check_tag_support(
'DescribeVpnConnections', 'vpnConnectionSet',
fakes.ID_EC2_VPN_CONNECTION_1, 'vpnConnectionId')
def test_format_vpn_connection(self):
db_vpn_connection_1 = tools.update_dict(fakes.DB_VPN_CONNECTION_1,
{'cidrs': []})
ec2_vpn_connection_1 = tools.patch_dict(
fakes.EC2_VPN_CONNECTION_1,
{'routes': [], 'vgwTelemetry': []},
('customerGatewayConfiguration',))
formatted = vpn_connection_api._format_vpn_connection(
db_vpn_connection_1,
{fakes.ID_EC2_CUSTOMER_GATEWAY_1: fakes.DB_CUSTOMER_GATEWAY_1},
{}, {}, {}, {})
formatted.pop('customerGatewayConfiguration')
self.assertThat(ec2_vpn_connection_1, matchers.DictMatches(formatted))
def test_format_customer_config(self):
ikepolicy = {
'auth_algorithm': 'sha1-fake',
'encryption_algorithm': '3des',
'lifetime': {'value': 1111},
'pfs': 'group5',
'phase1_negotiation_mode': 'main-fake',
}
ipsecpolicy = {
'transform_protocol': 'ah-esp',
'auth_algorithm': 'sha1-fake',
'encryption_algorithm': 'aes-256',
'lifetime': {'value': 2222},
'pfs': 'group14',
'encapsulation_mode': 'transport',
}
ipsec_site_connection = {
'peer_address': '1.2.3.4',
'psk': 'password',
'mtu': 1400,
}
conf = vpn_connection_api._format_customer_config(
fakes.DB_VPN_CONNECTION_1,
{fakes.ID_EC2_CUSTOMER_GATEWAY_1: fakes.DB_CUSTOMER_GATEWAY_1},
{fakes.ID_OS_IKEPOLICY_1: ikepolicy},
{fakes.ID_OS_IPSECPOLICY_1: ipsecpolicy},
{fakes.ID_OS_IPSEC_SITE_CONNECTION_2: ipsec_site_connection},
{fakes.ID_EC2_VPN_GATEWAY_1: '5.6.7.8'})
self.assertThat(
{'ipsec_tunnel': {
'customer_gateway': {
'tunnel_outside_address': {'ip_address': '1.2.3.4'}},
'vpn_gateway': {
'tunnel_outside_address': {'ip_address': '5.6.7.8'}},
'ike': {'authentication_protocol': 'sha1-fake',
'encryption_protocol': '3des',
'lifetime': 1111,
'perfect_forward_secrecy': 'group5',
'mode': 'main-fake',
'pre_shared_key': 'password'},
'ipsec': {'protocol': 'ah-esp',
'authentication_protocol': 'sha1-fake',
'encryption_protocol': 'aes-256',
'lifetime': 2222,
'perfect_forward_secrecy': 'group14',
'mode': 'transport',
'tcp_mss_adjustment': 1400 - 40}}},
matchers.IsSubDictOf(conf))
def test_stop_vpn_connection(self):
# delete several connections
os_conn_ids = [fakes.random_os_id() for _x in range(3)]
fake_conn = {
'os_ipsec_site_connections': {
fakes.random_ec2_id('subnet'): conn_id
for conn_id in os_conn_ids}}
vpn_connection_api._stop_vpn_connection(self.neutron, fake_conn)
self.assertEqual(
3, self.neutron.delete_ipsec_site_connection.call_count)
for conn_id in os_conn_ids:
self.neutron.delete_ipsec_site_connection.assert_any_call(conn_id)
# delete several connections with exception suppressing
self.neutron.reset_mock()
self.neutron.delete_ipsec_site_connection.side_effect = [
None, neutron_exception.NotFound(), None]
vpn_connection_api._stop_vpn_connection(self.neutron, fake_conn)
self.assertEqual(
3, self.neutron.delete_ipsec_site_connection.call_count)
@mock.patch('ec2api.api.vpn_connection._stop_vpn_connection',
new_callable=tools.CopyingMock)
def test_stop_gateway_vpn_connections(self, stop_vpn_connection):
context = base.create_context()
cleaner = common.OnCrashCleaner()
vpn_connection_3 = tools.update_dict(
fakes.DB_VPN_CONNECTION_1,
{'id': fakes.random_ec2_id('vpn'),
'os_ipsec_site_connections': {}})
self.set_mock_db_items(fakes.DB_VPN_CONNECTION_1, vpn_connection_3,
fakes.DB_VPN_CONNECTION_2)
vpn_connection_api._stop_gateway_vpn_connections(
context, self.neutron, cleaner, fakes.DB_VPN_GATEWAY_1)
self.assertEqual(2, stop_vpn_connection.call_count)
stop_vpn_connection.assert_any_call(
self.neutron, fakes.DB_VPN_CONNECTION_1)
stop_vpn_connection.assert_any_call(
self.neutron, vpn_connection_3)
self.assertEqual(2, self.db_api.update_item.call_count)
self.db_api.update_item.assert_any_call(
mock.ANY, tools.update_dict(fakes.DB_VPN_CONNECTION_1,
{'os_ipsec_site_connections': {}}))
self.db_api.update_item.assert_any_call(
mock.ANY, vpn_connection_3)
self.db_api.reset_mock()
self.neutron.reset_mock()
stop_vpn_connection.reset_mock()
self.set_mock_db_items(fakes.DB_VPN_CONNECTION_1)
try:
with common.OnCrashCleaner() as cleaner:
vpn_connection_api._stop_gateway_vpn_connections(
context, self.neutron, cleaner, fakes.DB_VPN_GATEWAY_1)
raise Exception('fake-exception')
except Exception as ex:
if str(ex) != 'fake-exception':
raise
self.db_api.update_item.assert_called_with(
mock.ANY, fakes.DB_VPN_CONNECTION_1)
@mock.patch('ec2api.api.vpn_connection._reset_vpn_connections')
def test_update_vpn_routes(self, reset_vpn_connections):
context = base.create_context()
cleaner = common.OnCrashCleaner()
self.set_mock_db_items()
vpn_connection_api._update_vpn_routes(
context, self.neutron, cleaner,
fakes.DB_ROUTE_TABLE_1, [fakes.DB_SUBNET_1])
self.assertFalse(reset_vpn_connections.called)
self.set_mock_db_items(fakes.DB_VPN_GATEWAY_1)
vpn_connection_api._update_vpn_routes(
context, self.neutron, cleaner,
fakes.DB_ROUTE_TABLE_1, [fakes.DB_SUBNET_1])
reset_vpn_connections.assert_called_once_with(
context, self.neutron, cleaner, fakes.DB_VPN_GATEWAY_1,
route_tables=[fakes.DB_ROUTE_TABLE_1], subnets=[fakes.DB_SUBNET_1])
@mock.patch('ec2api.api.vpn_connection._delete_subnet_vpn')
@mock.patch('ec2api.api.vpn_connection._set_subnet_vpn')
@mock.patch('ec2api.api.vpn_connection._get_route_table_vpn_cidrs',
wraps=vpn_connection_api._get_route_table_vpn_cidrs)
def test_reset_vpn_connections(self, get_route_table_vpn_cidrs,
set_subnet_vpn, delete_subnet_vpn):
context = base.create_context()
cleaner = common.OnCrashCleaner()
vpn_gateway_3 = {'id': fakes.random_ec2_id('vpn'),
'os_id': None,
'vpc_id': None}
vpn_connection_api._reset_vpn_connections(
context, self.neutron, cleaner, vpn_gateway_3)
self.assertEqual(0, len(self.db_api.mock_calls))
self.assertFalse(get_route_table_vpn_cidrs.called)
self.assertFalse(set_subnet_vpn.called)
self.assertFalse(delete_subnet_vpn.called)
customer_gateway_3 = {'id': fakes.random_ec2_id('cgw')}
subnet_3 = {'id': fakes.random_ec2_id('subnet'),
'vpc_id': fakes.ID_EC2_VPC_2}
vpn_connection_3 = {'id': fakes.random_ec2_id('vpn'),
'vpn_gateway_id': fakes.ID_EC2_VPN_GATEWAY_1,
'customer_gateway_id': customer_gateway_3['id'],
'cidrs': []}
self.set_mock_db_items(
fakes.DB_VPC_1, fakes.DB_VPC_2,
fakes.DB_CUSTOMER_GATEWAY_1, fakes.DB_CUSTOMER_GATEWAY_2,
customer_gateway_3,
fakes.DB_SUBNET_1, fakes.DB_SUBNET_2, subnet_3,
fakes.DB_ROUTE_TABLE_1, fakes.DB_ROUTE_TABLE_2,
fakes.DB_ROUTE_TABLE_3,
fakes.DB_VPN_CONNECTION_1, fakes.DB_VPN_CONNECTION_2,
vpn_connection_3)
# common case
vpn_connection_api._reset_vpn_connections(
context, self.neutron, cleaner, fakes.DB_VPN_GATEWAY_1)
self.assertEqual(2, set_subnet_vpn.call_count)
set_subnet_vpn.assert_any_call(
context, self.neutron, cleaner, fakes.DB_SUBNET_2,
fakes.DB_VPN_CONNECTION_1, fakes.DB_CUSTOMER_GATEWAY_1,
[fakes.CIDR_VPN_1_STATIC])
set_subnet_vpn.assert_any_call(
context, self.neutron, cleaner, fakes.DB_SUBNET_2,
vpn_connection_3, customer_gateway_3,
[fakes.CIDR_VPN_1_STATIC])
self.assertEqual(2, delete_subnet_vpn.call_count)
delete_subnet_vpn.assert_any_call(
context, self.neutron, cleaner, fakes.DB_SUBNET_1,
fakes.DB_VPN_CONNECTION_1)
delete_subnet_vpn.assert_any_call(
context, self.neutron, cleaner, fakes.DB_SUBNET_1,
vpn_connection_3)
self.assertEqual(2, get_route_table_vpn_cidrs.call_count)
get_route_table_vpn_cidrs.assert_any_call(
fakes.DB_ROUTE_TABLE_1, fakes.DB_VPN_GATEWAY_1,
[fakes.DB_VPN_CONNECTION_1, vpn_connection_3])
get_route_table_vpn_cidrs.assert_any_call(
fakes.DB_ROUTE_TABLE_3, fakes.DB_VPN_GATEWAY_1,
[fakes.DB_VPN_CONNECTION_1, vpn_connection_3])
# reset for the vpn connection
set_subnet_vpn.reset_mock()
delete_subnet_vpn.reset_mock()
self.db_api.reset_mock()
get_route_table_vpn_cidrs.reset_mock()
vpn_connection_api._reset_vpn_connections(
context, self.neutron, cleaner, fakes.DB_VPN_GATEWAY_1,
vpn_connections=[fakes.DB_VPN_CONNECTION_1])
self.assertEqual(1, set_subnet_vpn.call_count)
self.assertEqual(1, delete_subnet_vpn.call_count)
self.assertNotIn(mock.call(mock.ANY, 'vpn'),
self.db_api.get_items.mock_calls)
# reset for the subnet list
set_subnet_vpn.reset_mock()
delete_subnet_vpn.reset_mock()
self.db_api.reset_mock()
get_route_table_vpn_cidrs.reset_mock()
vpn_connection_api._reset_vpn_connections(
context, self.neutron, cleaner, fakes.DB_VPN_GATEWAY_1,
subnets=[fakes.DB_SUBNET_1])
self.assertFalse(set_subnet_vpn.called)
self.assertEqual(2, delete_subnet_vpn.call_count)
self.assertNotIn(mock.call(mock.ANY, 'subnets'),
self.db_api.get_items.mock_calls)
# reset for the subnet list and the route table
set_subnet_vpn.reset_mock()
delete_subnet_vpn.reset_mock()
self.db_api.reset_mock()
get_route_table_vpn_cidrs.reset_mock()
vpn_connection_api._reset_vpn_connections(
context, self.neutron, cleaner, fakes.DB_VPN_GATEWAY_1,
subnets=[fakes.DB_SUBNET_2], route_tables=[fakes.DB_ROUTE_TABLE_3])
self.assertEqual(2, set_subnet_vpn.call_count)
self.assertFalse(delete_subnet_vpn.called)
self.assertNotIn(mock.call(mock.ANY, 'subnets'),
self.db_api.get_items.mock_calls)
self.assertNotIn(mock.call(mock.ANY, 'rtb'),
self.db_api.get_items.mock_calls)
def test_set_subnet_vpn(self):
context = base.create_context()
cleaner = common.OnCrashCleaner()
cidrs = [fakes.CIDR_VPN_1_STATIC, fakes.CIDR_VPN_1_PROPAGATED_1]
# create ipsec site connection case
id_os_connection = fakes.random_os_id()
os_connection = {
'vpnservice_id': fakes.ID_OS_VPNSERVICE_1,
'ikepolicy_id': fakes.ID_OS_IKEPOLICY_1,
'ipsecpolicy_id': fakes.ID_OS_IPSECPOLICY_1,
'peer_address': fakes.IP_CUSTOMER_GATEWAY_ADDRESS_1,
'peer_cidrs': cidrs,
'psk': fakes.PRE_SHARED_KEY_1,
'name': (fakes.ID_EC2_VPN_CONNECTION_1 + '/' +
fakes.ID_EC2_SUBNET_1),
'peer_id': fakes.IP_CUSTOMER_GATEWAY_ADDRESS_1,
'mtu': 1427,
'initiator': 'response-only',
}
self.neutron.create_ipsec_site_connection.side_effect = (
tools.get_neutron_create('ipsec_site_connection',
id_os_connection))
vpn_connection_api._set_subnet_vpn(
context, self.neutron, cleaner, fakes.DB_SUBNET_1,
copy.deepcopy(fakes.DB_VPN_CONNECTION_1),
fakes.DB_CUSTOMER_GATEWAY_1, cidrs)
self.neutron.create_ipsec_site_connection.assert_called_once_with(
{'ipsec_site_connection': os_connection})
vpn_connection_1 = copy.deepcopy(fakes.DB_VPN_CONNECTION_1)
(vpn_connection_1['os_ipsec_site_connections']
[fakes.ID_EC2_SUBNET_1]) = id_os_connection
self.db_api.update_item.assert_called_once_with(
context, vpn_connection_1)
# update ipsec site connection case
self.db_api.reset_mock()
self.neutron.reset_mock()
vpn_connection_api._set_subnet_vpn(
context, self.neutron, cleaner, fakes.DB_SUBNET_2,
fakes.DB_VPN_CONNECTION_1, fakes.DB_CUSTOMER_GATEWAY_1, cidrs)
self.neutron.update_ipsec_site_connection.assert_called_once_with(
fakes.ID_OS_IPSEC_SITE_CONNECTION_2,
{'ipsec_site_connection': {'peer_cidrs': cidrs}})
self.assertFalse(self.neutron.create_ipsec_site_connection.called)
self.assertFalse(self.db_api.update_item.called)
# rollback creating of ipsec site connection case
self.db_api.reset_mock()
self.neutron.reset_mock()
try:
with common.OnCrashCleaner() as cleaner:
vpn_connection_api._set_subnet_vpn(
context, self.neutron, cleaner, fakes.DB_SUBNET_1,
copy.deepcopy(fakes.DB_VPN_CONNECTION_1),
fakes.DB_CUSTOMER_GATEWAY_1, cidrs)
raise Exception('fake-exception')
except Exception as ex:
if str(ex) != 'fake-exception':
raise
self.neutron.delete_ipsec_site_connection.assert_called_once_with(
id_os_connection)
self.db_api.update_item.assert_called_with(
mock.ANY, fakes.DB_VPN_CONNECTION_1)
# rollback updating of ipsec site connection case
self.db_api.reset_mock()
self.neutron.reset_mock()
try:
with common.OnCrashCleaner() as cleaner:
vpn_connection_api._set_subnet_vpn(
context, self.neutron, cleaner, fakes.DB_SUBNET_2,
fakes.DB_VPN_CONNECTION_1, fakes.DB_CUSTOMER_GATEWAY_1,
cidrs)
raise Exception('fake-exception')
except Exception as ex:
if str(ex) != 'fake-exception':
raise
self.assertFalse(self.neutron.delete_ipsec_site_connection.called)
self.assertFalse(self.db_api.update_item.called)
def test_delete_subnet_vpn(self):
context = base.create_context()
cleaner = common.OnCrashCleaner()
# subnet is not connected to the vpn
vpn_connection_api._delete_subnet_vpn(
context, self.neutron, cleaner, fakes.DB_SUBNET_1,
fakes.DB_VPN_CONNECTION_1)
self.assertFalse(self.db_api.update_item.called)
self.assertFalse(self.neutron.delete_ipsec_site_connection.called)
# delete subnet vpn connection
vpn_connection_api._delete_subnet_vpn(
context, self.neutron, cleaner, fakes.DB_SUBNET_2,
copy.deepcopy(fakes.DB_VPN_CONNECTION_1))
self.db_api.update_item.assert_called_once_with(
mock.ANY, tools.update_dict(fakes.DB_VPN_CONNECTION_1,
{'os_ipsec_site_connections': {}}))
self.neutron.delete_ipsec_site_connection.assert_called_once_with(
fakes.ID_OS_IPSEC_SITE_CONNECTION_2)
# delete subnet vpn connection, leave connections of other subnets
self.db_api.reset_mock()
self.neutron.reset_mock()
id_os_connection = fakes.random_os_id()
vpn_connection_1 = copy.deepcopy(fakes.DB_VPN_CONNECTION_1)
(vpn_connection_1['os_ipsec_site_connections']
[fakes.ID_EC2_SUBNET_1]) = id_os_connection
vpn_connection_api._delete_subnet_vpn(
context, self.neutron, cleaner, fakes.DB_SUBNET_1,
vpn_connection_1)
self.db_api.update_item.assert_called_once_with(
mock.ANY, fakes.DB_VPN_CONNECTION_1)
self.neutron.delete_ipsec_site_connection.assert_called_once_with(
id_os_connection)
# rollback of deleting subnet vpn connection
self.db_api.reset_mock()
self.neutron.reset_mock()
try:
with common.OnCrashCleaner() as cleaner:
vpn_connection_api._delete_subnet_vpn(
context, self.neutron, cleaner, fakes.DB_SUBNET_2,
copy.deepcopy(fakes.DB_VPN_CONNECTION_1))
raise Exception('fake-exception')
except Exception as ex:
if str(ex) != 'fake-exception':
raise
self.db_api.update_item.assert_called_with(
mock.ANY, fakes.DB_VPN_CONNECTION_1)
self.assertFalse(self.neutron.create_ipsec_site_connection.called)
def test_get_route_table_vpn_cidrs(self):
route_table_1 = copy.deepcopy(fakes.DB_ROUTE_TABLE_1)
vpn_connection_1 = tools.update_dict(
fakes.DB_VPN_CONNECTION_1, {'cidrs': []})
vpn_connection_2 = tools.update_dict(
vpn_connection_1, {'id': fakes.ID_EC2_VPN_CONNECTION_2})
self.assertThat(
vpn_connection_api._get_route_table_vpn_cidrs(
route_table_1, fakes.DB_VPN_GATEWAY_1, []),
matchers.DictMatches({}))
self.assertThat(
vpn_connection_api._get_route_table_vpn_cidrs(
route_table_1, fakes.DB_VPN_GATEWAY_1,
[vpn_connection_1, vpn_connection_2]),
matchers.DictMatches({}))
route_table_1['propagating_gateways'] = [fakes.ID_EC2_VPN_GATEWAY_1,
fakes.ID_EC2_VPN_GATEWAY_2]
self.assertThat(
vpn_connection_api._get_route_table_vpn_cidrs(
route_table_1, fakes.DB_VPN_GATEWAY_1,
[vpn_connection_1, vpn_connection_2]),
matchers.DictMatches({}))
vpn_connection_1['cidrs'] = ['cidr_1']
self.assertThat(
vpn_connection_api._get_route_table_vpn_cidrs(
route_table_1, fakes.DB_VPN_GATEWAY_1,
[vpn_connection_1, vpn_connection_2]),
matchers.DictMatches({fakes.ID_EC2_VPN_CONNECTION_1: ['cidr_1']}))
vpn_connection_2['cidrs'] = ['cidr_1', 'cidr_2']
self.assertThat(
vpn_connection_api._get_route_table_vpn_cidrs(
route_table_1, fakes.DB_VPN_GATEWAY_1,
[vpn_connection_1, vpn_connection_2]),
matchers.DictMatches(
{fakes.ID_EC2_VPN_CONNECTION_1: ['cidr_1'],
fakes.ID_EC2_VPN_CONNECTION_2: ['cidr_1', 'cidr_2']},
orderless_lists=True))
route_table_1['routes'] = [
{'destination_cidr_block': 'fake_1',
'network_interface_id': fakes.ID_EC2_NETWORK_INTERFACE_1},
{'destination_cidr_block': 'fake_2',
'gateway_id': None},
{'destination_cidr_block': 'fake_3',
'gateway_id': fakes.ID_EC2_IGW_1},
{'destination_cidr_block': 'cidr_3',
'gateway_id': fakes.ID_EC2_VPN_GATEWAY_1},
{'destination_cidr_block': 'cidr_4',
'gateway_id': fakes.ID_EC2_VPN_GATEWAY_1},
{'destination_cidr_block': 'fake_4',
'gateway_id': fakes.ID_EC2_VPN_GATEWAY_2}]
self.assertThat(
vpn_connection_api._get_route_table_vpn_cidrs(
route_table_1, fakes.DB_VPN_GATEWAY_1,
[vpn_connection_1, vpn_connection_2]),
matchers.DictMatches(
{fakes.ID_EC2_VPN_CONNECTION_1: ['cidr_1', 'cidr_3', 'cidr_4'],
fakes.ID_EC2_VPN_CONNECTION_2: ['cidr_1', 'cidr_2',
'cidr_3', 'cidr_4']},
orderless_lists=True))
route_table_1['propagating_gateways'] = [fakes.ID_EC2_VPN_GATEWAY_2]
self.assertThat(
vpn_connection_api._get_route_table_vpn_cidrs(
route_table_1, fakes.DB_VPN_GATEWAY_1,
[vpn_connection_1, vpn_connection_2]),
matchers.DictMatches(
{fakes.ID_EC2_VPN_CONNECTION_1: ['cidr_3', 'cidr_4'],
fakes.ID_EC2_VPN_CONNECTION_2: ['cidr_3', 'cidr_4']},
orderless_lists=True))
|
"""`TypedDict`"""
from typing import TypedDict
Movie = TypedDict("Movie", {"name": str, "year": int})
movie: Movie = {"name": "Blade Runner", "year": 1982}
movie_bad: Movie = {"name": "Blade Runner", "year": 1982, "director": "Scott"}
director = movie_bad["director"]
toy_story = Movie(name="Toy Story", year=1995)
toy_story_2: Movie = {"name": "Toy Story 2"}
GuiOptions = TypedDict("GuiOptions", {"language": str, "color": str}, total=False)
options: GuiOptions = {}
options["language"] = "en"
print(options["color"]) # KeyError
print(options.get("color")) # None
reveal_type(options)
class MovieClassBased(TypedDict):
name: str
year: int
class BookBasedMovie(MovieClassBased):
based_on: str
book_based_movie = BookBasedMovie(
name="The Social Network", year=2010, based_on="The Accidental Billionaires"
)
print(book_based_movie["name"])
print(book_based_movie.based_on) # Error
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Author: Christof Schöch, 2016-2021.
"""
This is the main script that controls the coleto text collation pipeline.
For more information, please see the README.md and HOTWO.md files.
An API reference can be found in the docs folder.
"""
# === Imports ===
import meta_parameters
import text_preprocess
import text_wdiff
import text_analyze
import stats_progression
import stats_distribution
import viz_progression
import viz_distribution
import meta_documentation
# === Functions ===
def main():
"""Defines the coleto pipeline."""
params = meta_parameters.main()
text_preprocess.main(params)
text_wdiff.main(params)
analysissummary = text_analyze.main(params)
stats_progression.main(params)
stats_distribution.main(params)
viz_progression.main(params)
viz_distribution.main(params)
meta_documentation.main(params, analysissummary)
print("\n== coleto: All done. ==\n\n")
main()
|
from __future__ import unicode_literals
from frappe import _
def get_dashboard_data(data):
return {
'fieldname': 'company'
}
|
# -*- coding: utf-8 -*-
"""
Code for selecting top N models and build stacker on them.
Competition: HomeDepot Search Relevance
Author: Kostia Omelianchuk
Team: Turing test
"""
from config_IgorKostia import *
import os
import pandas as pd
import xgboost as xgb
import csv
import random
import numpy as np
import scipy as sp
import numpy.random as npr
import matplotlib.pyplot as plt
from sklearn.cross_validation import StratifiedKFold
from sklearn.linear_model import LogisticRegression
from sklearn.linear_model import LinearRegression, Ridge
from sklearn.svm import SVR,LinearSVC
from sklearn import neighbors
from sklearn import linear_model
from time import time
from sklearn.ensemble import AdaBoostRegressor, BaggingRegressor, RandomTreesEmbedding
from sklearn.tree import DecisionTreeRegressor
from sklearn import metrics
from math import sqrt
from sklearn.ensemble import RandomForestRegressor, ExtraTreesRegressor, GradientBoostingRegressor
from sklearn.decomposition import TruncatedSVD
from sklearn.random_projection import sparse_random_matrix
from sklearn import preprocessing
drop_list= []
#loading models
#9 model
train_f_1000 = pd.read_csv(MODELS_DIR+'/train_first_1000.csv', encoding="utf-8")
train_s_1000 = pd.read_csv(MODELS_DIR+'/train_second_1000.csv', encoding="utf-8")
train_f_1001 = pd.read_csv(MODELS_DIR+'/train_first_1001.csv', encoding="utf-8")
train_f_2000 = pd.read_csv(MODELS_DIR+'/train_first_2000.csv', encoding="utf-8")
train_s_2000 = pd.read_csv(MODELS_DIR+'/train_second_2000.csv', encoding="utf-8")
test_f_1000 = pd.read_csv(MODELS_DIR+'/test_first_1000.csv', encoding="utf-8")
test_s_1000 = pd.read_csv(MODELS_DIR+'/test_second_1000.csv', encoding="utf-8")
test_f_1001 = pd.read_csv(MODELS_DIR+'/test_first_1001.csv', encoding="utf-8")
test_f_2000 = pd.read_csv(MODELS_DIR+'/test_first_2000.csv', encoding="utf-8")
test_s_2000 = pd.read_csv(MODELS_DIR+'/test_second_2000.csv', encoding="utf-8")
#6 model
train_f_3000 = pd.read_csv(MODELS_DIR+'/train_first_3000.csv', encoding="utf-8")
train_s_3000 = pd.read_csv(MODELS_DIR+'/train_second_3000.csv', encoding="utf-8")
test_f_3000 = pd.read_csv(MODELS_DIR+'/test_first_3000.csv', encoding="utf-8")
test_s_3000 = pd.read_csv(MODELS_DIR+'/test_second_3000.csv', encoding="utf-8")
#6 model only kostia features
train_f_3010 = pd.read_csv(MODELS_DIR+'/train_first_3010.csv', encoding="utf-8")
test_f_3010 = pd.read_csv(MODELS_DIR+'/test_first_3010.csv', encoding="utf-8")
#6 model (4SVR + 2xgb) on corelated fetures
train_f_3020 = pd.read_csv(MODELS_DIR+'/train_first_3020.csv', encoding="utf-8")
test_f_3020 = pd.read_csv(MODELS_DIR+'/test_first_3020.csv', encoding="utf-8")
train=pd.DataFrame()
test=pd.DataFrame()
train = pd.concat([train_f_1000, train_s_1000, train_f_1001, train_f_2000, train_s_2000, train_f_3000, train_s_3000, train_f_3010,train_f_3020], axis=1)
test = pd.concat([test_f_1000, test_s_1000, test_f_1001, test_f_2000, test_s_2000, test_f_3000, test_s_3000 , test_f_3010, test_f_3020], axis=1)
#adding_some_metafeatures
df_all = pd.read_csv(FEATURES_DIR+'/df_basic_features.csv', encoding="utf-8")
t1=df_all['id'].map(lambda x: int(x<163800))
t2=df_all['id'].map(lambda x: int(x>206650))
t3=df_all['id'].map(lambda x: int(x<163800) or int(x>221473))
df_train = pd.read_csv(DATA_DIR+'/train.csv', encoding="ISO-8859-1")
df_test = pd.read_csv(DATA_DIR+'/test.csv', encoding="ISO-8859-1")
num_train = df_train.shape[0]
y = df_all["relevance"][:num_train]
id_test=df_all["id"][num_train:]
t1_tr=t1.iloc[:num_train]
t2_tr=t2.iloc[:num_train]
t3_tr=t3.iloc[:num_train]
t1_tt=pd.DataFrame(t1.iloc[num_train:])
t2_tt=pd.DataFrame(t2.iloc[num_train:])
t3_tt=pd.DataFrame(t3.iloc[num_train:])
t1_tt.index=range(len(t1_tt))
t2_tt.index=range(len(t2_tt))
t3_tt.index=range(len(t3_tt))
train=pd.concat([train, t3_tr ], axis=1)
test=pd.concat([test, t3_tt ], axis=1)
#rename columns
train.columns=range(len(train.keys()))
test.columns=range(len(test.keys()))
#train["relevance"]=y["relevance"]
train["relevance"]=y
trainX=train
y_tr = trainX['relevance'].values
X_tr = trainX.drop(['relevance'],axis=1).values
from sklearn.linear_model import LinearRegression, Ridge
from sklearn import metrics
from scipy.optimize import nnls
class MLR(object):
def __init__(self):
self.coef_ = 0
def fit(self, X, y):
self.coef_ = sp.optimize.nnls(X, y)[0]
self.coef_ = np.array(map(lambda x: x/sum(self.coef_), self.coef_))
def predict(self, X):
predictions = np.array(map(sum, self.coef_ * X))
return predictions
#selecting stacker model
n_folds=5
skf = list(StratifiedKFold(y_tr, n_folds, shuffle=True))
blend_train = np.zeros((X_tr.shape[0]))
#clf=MLR()
clf = LinearRegression()
#clf = neighbors.KNeighborsRegressor(128, weights="uniform", leaf_size=5)
#select first model
mn_rmse=1
model_n=0
for i in range(0,len(train.keys())-1):
for j, (train_index, cv_index) in enumerate(skf):
#print 'Fold [%s]' % (j)
# This is the training and validation set
X_train = X_tr[:,i][train_index]
Y_train = y_tr[train_index]
X_cv = X_tr[:,i][cv_index]
Y_cv = y_tr[cv_index]
X_train=X_train.reshape((len(X_train),1))
Y_train=Y_train.reshape((len(Y_train),1))
X_cv=X_cv.reshape((len(X_cv),1))
Y_cv=Y_cv.reshape((len(Y_cv),1))
clf.fit(X_train,Y_train)
blend_train[cv_index] = clf.predict(X_cv)
if sqrt(metrics.mean_squared_error(y_tr, blend_train))<mn_rmse:
mn_rmse=sqrt(metrics.mean_squared_error(y_tr, blend_train))
print i, mn_rmse
model_n=i
#print i, sqrt(metrics.mean_squared_error(y_tr, blend_train))
model_list=list()
model_list.append(model_n)
model_collection=X_tr[:,model_n]
model_collection=np.vstack((model_collection)).T
cur_mn=mn_rmse
#select other models
for j in range(len(train.keys())-1):
pred_mn_rmse=cur_mn
for i in range(len(train.keys())-1):
if (i in model_list):
OK="OK"
else:
for k, (train_index, cv_index) in enumerate(skf):
# This is the training and validation set
X_train = X_tr[:,i][train_index]
Y_train = y_tr[train_index]
X_cv = X_tr[:,i][cv_index]
Y_cv = y_tr[cv_index]
CV_m=model_collection[0][train_index]
for it in range(1,len(model_collection)):
tmp=model_collection[it][train_index]
CV_m=np.vstack((CV_m,tmp))
clf.fit(np.vstack((CV_m,X_train)).T, Y_train)
#clf.fit(X_train,Y_train)
CV_n=model_collection[0][cv_index]
for it in range(1,len(model_collection)):
tmp=model_collection[it][cv_index]
CV_n=np.vstack((CV_n,tmp))
blend_train[cv_index] = clf.predict(np.vstack((CV_n,X_cv)).T)
if sqrt(metrics.mean_squared_error(y_tr, blend_train))<cur_mn:
cur_mn = sqrt(metrics.mean_squared_error(y_tr, blend_train))
model_n=i
if (model_list[len(model_list)-1]==model_n) or abs(cur_mn-pred_mn_rmse)<0.00001:
break
model_list.append(model_n)
model_collection=np.vstack((model_collection,X_tr[:,model_n]))
print model_list
print cur_mn
print len(model_list)
#choose top12 models
model_list2=model_list[0:12]
test_fin=test[model_list2]
train_fin=train[model_list2]
#select model for stacking
clf = Ridge(alpha=3.0)
clf.fit(train_fin, y)
pred1 = clf.predict(test_fin)
pred1[pred1<1.]=1.
pred1[pred1>3.]=3.
#saved_results
pd.DataFrame({"id": id_test, "relevance": pred1}).to_csv(MODELS_DIR+"/submissions_ensemble_n_models_from_m_11_04_2016.csv",index=False)
#X_new=train_fin
#import statsmodels.api as sm
#X_new = sm.add_constant( X_new )
#results = sm.OLS(y, X_new).fit()
#print results.summary()
|
import heapq
import inspect
import sys
"""
Data structures useful for implementing Best-First Search
"""
class FrontierPriorityQueueWithFunction(object):
'''
FrontierPriorityQueueWithFunction class implement a search frontier using a
PriorityQueue for ordering the nodes and a set for
constant-time checks of states in frontier.
OBSERVATION: it receives as input a function `f` that
itself receives a node and returns the priority for
the given node. Check util.PriorityQueueWithFunction for
more details.
'''
def __init__(self, f):
self._queue = PriorityQueueWithFunction(f)
self._set = set()
def __contains__(self, node):
''' Return true if `node.state` is in the frontier. '''
return node.state in self._set
def __len__(self):
''' Return the number of nodes in frontier. '''
assert(len(self._queue) == len(self._set))
return len(self._queue)
def is_empty(self):
''' Return true if frontier is empty. '''
return self._queue.isEmpty()
def push(self, node):
''' Push `node` to frontier. '''
self._queue.push(node)
self._set.add(node.state)
def pop(self):
''' Pop `node` from frontier. '''
node = self._queue.pop()
self._set.discard(node.state) # antes era remove
return node
def __str__(self):
''' Return string representation of frontier. '''
return str(self._queue)
class PriorityQueue:
"""
Implements a priority queue data structure. Each inserted item
has a priority associated with it and the client is usually interested
in quick retrieval of the lowest-priority item in the queue. This
data structure allows O(1) access to the lowest-priority item.
"""
def __init__(self):
self.heap = []
self.count = 0
def __contains__(self, item):
for (_, _, i) in self.heap:
if i == item:
return True
return False
def __len__(self):
return len(self.heap)
def push(self, item, priority):
entry = (priority, self.count, item)
heapq.heappush(self.heap, entry)
self.count += 1
def pop(self):
(_, _, item) = heapq.heappop(self.heap)
return item
def isEmpty(self):
return len(self.heap) == 0
def update(self, item, priority):
# If item already in priority queue with higher priority, update its priority and rebuild the heap.
# If item already in priority queue with equal or lower priority, do nothing.
# If item not in priority queue, do the same thing as self.push.
for index, (p, c, i) in enumerate(self.heap):
if i == item:
#print("priority in heap: {}, requested priority: {}".format(p, priority))
if p <= priority:
break
del self.heap[index]
self.heap.append((priority, c, item))
heapq.heapify(self.heap)
break
else:
self.push(item, priority)
def __str__(self):
return str([(p, str(item)) for (p, _, item) in self.heap])
class PriorityQueueWithFunction(PriorityQueue):
"""
Implements a priority queue with the same push/pop signature of the
Queue and the Stack classes. This is designed for drop-in replacement for
those two classes. The caller has to provide a priority function, which
extracts each item's priority.
"""
def __init__(self, priorityFunction):
"priorityFunction (item) -> priority"
self.priorityFunction = priorityFunction # store the priority function
PriorityQueue.__init__(self) # super-class initializer
def push(self, item):
"Adds an item to the queue with priority from the priority function"
PriorityQueue.push(self, item, self.priorityFunction(item))
class Queue:
# ref: https://www.pythoncentral.io/use-queue-beginners-guide/
def __init__(self):
self.queue = list()
def push(self,data):
#Checking to avoid duplicate entry (not mandatory)
if data not in self.queue:
self.queue.insert(0,data)
return True
return False
def pop(self):
if len(self.queue)>0:
return self.queue.pop()
return ("Queue Empty!")
def isEmpty(self):
return len(self.queue) == 0
def size(self):
return len(self.queue)
def raiseNotDefined():
fileName = inspect.stack()[1][1]
line = inspect.stack()[1][2]
method = inspect.stack()[1][3]
print("*** Method not implemented: `%s` at line %s of %s" % (method, line, fileName))
sys.exit(1)
|
import atexit
def my_cleanup(name):
print('my_cleanup(%s)' % name)
atexit.register(my_cleanup, 'first')
atexit.register(my_cleanup, 'second')
atexit.register(my_cleanup, 'third')
|
"""
Functional tests for ``flocker.common.script``.
"""
from __future__ import print_function
import os
import sys
from json import loads
from signal import SIGINT
from zope.interface import implementer
from eliot import Logger, Message
from eliot.testing import assertContainsFields
from twisted.trial.unittest import TestCase
from twisted.internet.utils import getProcessOutput
from twisted.internet.defer import succeed, Deferred
from twisted.python.log import msg, err
from ..script import ICommandLineScript
@implementer(ICommandLineScript)
class EliotScript(object):
def main(self, reactor, options):
logger = Logger()
Message.new(key=123).write(logger)
return succeed(None)
@implementer(ICommandLineScript)
class TwistedScript(object):
def main(self, reactor, options):
msg(b"hello")
return succeed(None)
@implementer(ICommandLineScript)
class TwistedErrorScript(object):
def main(self, reactor, options):
err(ZeroDivisionError("onoes"), b"A zero division ono")
return succeed(None)
@implementer(ICommandLineScript)
class StdoutStderrScript(object):
def main(self, reactor, options):
sys.stdout.write(b"stdout!\n")
sys.stderr.write(b"stderr!\n")
return succeed(None)
@implementer(ICommandLineScript)
class FailScript(object):
def main(self, reactor, options):
raise ZeroDivisionError("ono")
@implementer(ICommandLineScript)
class SigintScript(object):
def main(self, reactor, options):
reactor.callLater(0.05, os.kill, os.getpid(), SIGINT)
return Deferred()
class FlockerScriptRunnerTests(TestCase):
"""
Functional tests for ``FlockerScriptRunner``.
"""
def run_script(self, script):
"""
Run a script that logs messages and uses ``FlockerScriptRunner``.
:param ICommandLineScript: Script to run. Must be class in this module.
:return: ``Deferred`` that fires with list of decoded JSON messages.
"""
code = b'''\
from twisted.python.usage import Options
from flocker.common.script import FlockerScriptRunner
from flocker.common.functional.test_script import {}
FlockerScriptRunner({}(), Options()).main()
'''.format(script.__name__, script.__name__)
d = getProcessOutput(sys.executable, [b"-c", code], env=os.environ,
errortoo=True)
d.addCallback(lambda data: map(loads, data.splitlines()))
return d
def test_eliot_messages(self):
"""
Logged ``eliot`` messages get written to standard out.
"""
d = self.run_script(EliotScript)
d.addCallback(lambda messages: assertContainsFields(self, messages[1],
{u"key": 123}))
return d
def test_twisted_messages(self):
"""
Logged Twisted messages get written to standard out as ``eliot``
messages.
"""
d = self.run_script(TwistedScript)
d.addCallback(lambda messages: assertContainsFields(
self, messages[1], {u"message_type": u"twisted:log",
u"message": u"hello",
u"error": False}))
return d
def test_twisted_errors(self):
"""
Logged Twisted errors get written to standard out as ``eliot``
messages.
"""
message = (u'A zero division ono\nTraceback (most recent call '
u'last):\nFailure: exceptions.ZeroDivisionError: onoes\n')
d = self.run_script(TwistedErrorScript)
d.addCallback(lambda messages: assertContainsFields(
self, messages[1], {u"message_type": u"twisted:log",
u"message": message,
u"error": True}))
return d
def test_stdout_stderr(self):
"""
Output from Python code writing to ``sys.stdout`` and ``sys.stderr``
is captured and turned into Eliot log messages.
"""
d = self.run_script(StdoutStderrScript)
def got_messages(messages):
assertContainsFields(self, messages[1],
{u"message_type": u"twisted:log",
u"message": u"stdout!",
u"error": False})
assertContainsFields(self, messages[2],
{u"message_type": u"twisted:log",
u"message": u"stderr!",
u"error": True})
d.addCallback(got_messages)
return d
def test_error(self):
"""
A script that raises an exception exits, logging the error as an
``eliot` message.
"""
d = self.run_script(FailScript)
def got_messages(messages):
assertContainsFields(self, messages[1],
{u"message_type": u"twisted:log",
u"error": True})
self.assertTrue(messages[1][u"message"].startswith(
u"Unhandled Error\nTraceback (most recent call last):\n"))
self.assertTrue(messages[1][u"message"].endswith(
u"ZeroDivisionError: ono\n"))
d.addCallback(got_messages)
return d
def test_sigint(self):
"""
A script that is killed by signal exits, logging the signal.
"""
d = self.run_script(SigintScript)
d.addCallback(lambda messages: assertContainsFields(
self, messages[1], {u"message_type": u"twisted:log",
u"message": u"Received SIGINT, shutting down.",
u"error": False}))
return d
|
import torch.nn as nn
import math
from torch.hub import load_state_dict_from_url
__all__ = ['MobileNetV3']
model_url = ''
class MobileNetV3(nn.Module):
def __init__(self, num_classes=1000, init_weight=True, pretrain=True):
super(MobileNetV3, self).__init__()
# setting of inverted residual blocks
self._build(num_classes)
# automatically abandon init_weight if pretrain is True
if pretrain:
assert model_url is not '', f'Pretrained model for {self.__class__.__name__} not prepared yet.'
state_dict = load_state_dict_from_url(model_url,
progress=True)
self.load_state_dict(state_dict)
elif init_weight:
self._init_weights()
def _build(self, num_classes):
l = list()
l.append(conv_3x3_bn(3, 16, 2))
l.append(InvertedResidual(16, 3, 16, 16, 0, 0, 1))
l.append(InvertedResidual(16, 3, 64, 24, 0, 0, 2))
l.append(InvertedResidual(24, 3, 72, 24, 0, 0, 1))
l.append(InvertedResidual(24, 5, 72, 40, 1, 0, 2))
l.append(InvertedResidual(40, 5, 120, 40, 1, 0, 1))
l.append(InvertedResidual(40, 5, 120, 40, 1, 0, 1))
l.append(InvertedResidual(40, 3, 240, 80, 0, 1, 2))
l.append(InvertedResidual(80, 3, 200, 80, 0, 1, 1))
l.append(InvertedResidual(80, 3, 184, 80, 0, 1, 1))
l.append(InvertedResidual(80, 3, 184, 80, 0, 1, 1))
l.append(InvertedResidual(80, 3, 480, 112, 1, 1, 1))
l.append(InvertedResidual(112, 3, 672, 112, 1, 1, 1))
l.append(InvertedResidual(112, 5, 672, 160, 1, 1, 1))
l.append(InvertedResidual(160, 5, 672, 160, 1, 1, 2))
l.append(InvertedResidual(160, 5, 960, 160, 1, 1, 1))
self.features = nn.Sequential(*l)
self.conv = conv_1x1_bn(160, 960)
self.avgpool = nn.Sequential(
nn.AdaptiveAvgPool2d((1, 1)),
h_swish()
)
self.classifier = nn.Sequential(
nn.Linear(960, 1280),
h_swish(),
nn.Linear(1280, num_classes)
)
def forward(self, x):
x = self.features(x)
x = self.conv(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
m.weight.data.normal_(0, 0.01)
m.bias.data.zero_()
class h_sigmoid(nn.Module):
def __init__(self, inplace=True):
super(h_sigmoid, self).__init__()
self.relu = nn.ReLU6(inplace=inplace)
def forward(self, x):
return self.relu(x + 3) / 6
class h_swish(nn.Module):
def __init__(self, inplace=True):
super(h_swish, self).__init__()
self.sigmoid = h_sigmoid(inplace=inplace)
def forward(self, x):
return x * self.sigmoid(x)
class SELayer(nn.Module):
def __init__(self, channel, reduction=4):
super(SELayer, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Sequential(
nn.Linear(channel, channel // reduction),
nn.ReLU(inplace=True),
nn.Linear(channel // reduction, channel),
h_sigmoid()
)
def forward(self, x):
b, c, _, _ = x.size()
y = self.avg_pool(x).view(b, c)
y = self.fc(y).view(b, c, 1, 1)
return x * y
def conv_3x3_bn(inp, oup, stride):
return nn.Sequential(
nn.Conv2d(inp, oup, 3, stride, 1, bias=False),
nn.BatchNorm2d(oup),
h_swish()
)
def conv_1x1_bn(inp, oup):
return nn.Sequential(
nn.Conv2d(inp, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
h_swish()
)
class InvertedResidual(nn.Module):
def __init__(self, inp, kernel_size, hidden_dim, oup, use_se, use_hs, stride):
super(InvertedResidual, self).__init__()
assert stride in [1, 2]
self.identity = stride == 1 and inp == oup
if inp == hidden_dim:
self.conv = nn.Sequential(
# dw
nn.Conv2d(hidden_dim, hidden_dim, kernel_size, stride, (kernel_size - 1) // 2, groups=hidden_dim, bias=False),
nn.BatchNorm2d(hidden_dim),
h_swish() if use_hs else nn.ReLU(inplace=True),
# Squeeze-and-Excite
SELayer(hidden_dim) if use_se else nn.Sequential(),
# pw-linear
nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
)
else:
self.conv = nn.Sequential(
# pw
nn.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False),
nn.BatchNorm2d(hidden_dim),
h_swish() if use_hs else nn.ReLU(inplace=True),
# dw
nn.Conv2d(hidden_dim, hidden_dim, kernel_size, stride, (kernel_size - 1) // 2, groups=hidden_dim, bias=False),
nn.BatchNorm2d(hidden_dim),
# Squeeze-and-Excite
SELayer(hidden_dim) if use_se else nn.Sequential(),
h_swish() if use_hs else nn.ReLU(inplace=True),
# pw-linear
nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
)
def forward(self, x):
if self.identity:
return x + self.conv(x)
else:
return self.conv(x)
|
"""
Hosting Jupyter Notebooks on GitHub Pages
Author: Anshul Kharbanda
Created: 10 - 12 - 2020
"""
import os
import jinja2
import logging
from .config import Configurable, load_config_file
from . import builders
from . import loaders
# Default config file name
config_file = './config.py'
class Site(Configurable):
"""
Site object, handles all of the building
"""
# Default configuration for site
_config = {
'base_url': '',
'templates_dir': 'templates',
'static_dir': 'static',
'notebook_dir': 'notebook',
'output_dir': 'dist'
}
# Internal loaders map
_loaders = {
'notebooks': loaders.NotebookLoader(),
'statics': loaders.StaticLoader(),
'readme': loaders.MarkdownLoader(file='README.md'),
'pages': loaders.MarkdownLoader(directory='pages')
}
# Internal builders array
_builders = [
builders.NotebookBuilder(),
builders.IndexBuilder(),
builders.StaticBuilder(),
builders.PageBuilder()
]
@property
def jinja_loader(self):
"""
Return jinja2 filesystem loader for this config
"""
return jinja2.FileSystemLoader(self.templates_dir)
@property
def jinja_env(self):
"""
Return jinja2 environment for this config
"""
return jinja2.Environment(
loader=self.jinja_loader,
autoescape=jinja2.select_autoescape(['html']))
def build(self):
"""
Build site
"""
log = logging.getLogger('Site:build')
log.info('Building site.')
self._make_directory()
self._run_loaders()
self._run_builders()
def _make_directory(self):
"""
Ensure that output directory exists
"""
log = logging.getLogger('Site:_make_directory')
log.debug(f'Output Directory: {self.output_dir}')
if os.path.exists(f'./{self.output_dir}'):
log.info(f"'{self.output_dir}' directory exists!")
else:
log.info(f"Creating '{self.output_dir}' directory")
os.mkdir(f"{self.output_dir}")
def _run_loaders(self):
"""
Run loaders step
"""
log = logging.getLogger('Site:_run_loaders')
log.debug(f'Loaders: {self._loaders}')
for name, loader in self._loaders.items():
log.info(f'Running {loader}')
result = loader.load(self)
setattr(self, name, result)
def _run_builders(self):
"""
Run builders step
"""
log = logging.getLogger('Site:_run_builders')
log.debug(f'Builders: {self._builders}')
for builder in self._builders:
log.info(f'Running {builder}')
builder.build(self)
def load_site():
"""
Load site from config file
"""
# Get logger
log = logging.getLogger('load_site')
# Read config python file
log.debug(f'Config file: {config_file}')
if os.path.exists(config_file):
# Read config file
log.debug('Config file found!')
config = load_config_file(config_file)
log.debug(f'Config data: {config}')
return Site(**config)
else:
# Default config file
log.debug('No config file found')
log.debug('Default config')
return Site()
|
#!/bin/python3
"""
By listing the first six prime numbers: 2, 3, 5, 7, 11, and 13, we can see that the 6th prime is 13.
What is the 10 001st prime number?
"""
# Check if number is prime or not
def prime_check(number):
if number == 2:
return True
for pointer in range(2, int(number ** 0.5) + 1):
if number % pointer == 0:
return False
return True
def solLogic(upper_limit):
# Count is set to 1 because of 2
count = 1
# Checks if number is prime or not
is_prime = 3
# Until we find our prime number, loop runs
while count < upper_limit:
# If number is prime, count is increased
if prime_check(is_prime):
count += 1
# When we find our prime number, we break off loop
if count == upper_limit:
break
# Skips multiples of 2
is_prime += 2
return is_prime
if __name__ == '__main__':
print(solLogic(10001))
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from pkg_resources import parse_version
from cartoview.version import get_current_version
import requests
from geonode.urls import api
import json
class CartoviewHomeViewTest(TestCase):
def test_view_url_exists_at_desired_location(self):
resp = self.client.get('/')
self.assertEqual(resp.status_code, 200)
def test_view_url_accessible_by_name(self):
resp = self.client.get(reverse('home'))
self.assertEqual(resp.status_code, 200)
def test_view_uses_correct_template(self):
resp = self.client.get(reverse('home'))
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, 'index.html')
class CartoviewCheckVersionViewTest(TestCase):
def test_view_url_exists_at_desired_location(self):
resp = self.client.get('/check-version/')
self.assertEqual(resp.status_code, 200)
def test_view_url_accessible_by_name(self):
resp = self.client.get(reverse('check_version'))
self.assertEqual(resp.status_code, 200)
def test_view_uses_correct_template(self):
resp = self.client.get(reverse('check_version'))
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, 'cartoview/check_version.js')
self.assertEqual("current_version" in resp.context, True)
self.assertEqual("latest_version" in resp.context, True)
_version = parse_version(get_current_version())._version
release = _version.release
version = [str(x) for x in release]
current_version = ".".join(version)
req = requests.get("https://pypi.org/pypi/cartoview/json")
latest_version = str(req.json()["info"]["version"])
self.assertEqual(resp.context["current_version"], current_version)
self.assertEqual(resp.context["latest_version"], latest_version)
class CartoviewResourcesRegisteredTest(TestCase):
def test_api_url_exists_at_desired_location(self):
resp = self.client.get('/api/')
self.assertEqual(resp.status_code, 200)
def test_api_url_accessible_by_name(self):
resp = self.client.get(
reverse("api_%s_top_level" % api.api_name,
kwargs={"api_name": api.api_name}))
self.assertEqual(resp.status_code, 200)
def test_cartoview_resources_exists(self):
resp = self.client.get(
reverse("api_%s_top_level" % api.api_name,
kwargs={"api_name": api.api_name}))
self.assertEqual(resp.status_code, 200)
resources_dict = json.loads(resp.content)
self.assertTrue("app" in resources_dict.keys())
self.assertTrue("appinstances" in resources_dict.keys())
self.assertTrue("all_resources" in resources_dict.keys())
for endpoint in resources_dict.values():
resp = self.client.get(endpoint['list_endpoint'])
self.assertEqual(resp.status_code, 200)
|
from __future__ import absolute_import
from udsoncan import DataIdentifier, Routine, Units
from test.UdsTest import UdsTest
class TestDefinitions(UdsTest):
def test_data_identifier_name_from_id(self):
for i in xrange(0x10000):
name = DataIdentifier.name_from_id(i)
self.assertTrue(isinstance(name, unicode))
def test_routine_name_from_id(self):
for i in xrange(0x10000):
name = Routine.name_from_id(i)
self.assertTrue(isinstance(name, unicode))
|
def variance(data):
n = len(data)
mean = sum(data) / n
deviations = [(x-mean) **2 for x in data]
var = sum(deviations) / n
return var
|
#!/usr/bin/env python
import threading
import signal
import sys
import time
import subprocess
import os
import os.path
import argparse
def signal_handler(signal, frame):
print 'You pressend CTRL+C, data is flushed into database/file...'
switchThread.running = False
switchThread.join()
formatString = "{0: <18} {1: <20} {2: <18}"
print formatString.format("mac", "ssid", "last seen")
for key, value in entries.iteritems():
print formatString.format(value.mac, value.ssid, time.strftime("%Y%m%d-%H:%M:%S", value.timeLastSeen))
sys.exit(0)
class switchChannelThread (threading.Thread):
def __init__(self, threadID, name, delayInSeconds):
threading.Thread.__init__(self)
self.threadID = threadID
self.name = name
if osname != "Darwin":
self.maxChannel = 12
else:
self.maxChannel = 14
self.delayInSeconds = delayInSeconds
self.running = True
def run(self):
print 'Starting switch channel thread using a dely of %d seconds' % self.delayInSeconds
while self.running:
for channel in range (1, self.maxChannel + 1):
if verbose:
print 'Switching to channel %d' % (channel)
if osname != "Darwin":
if subprocess.call([iwconfigPath, interface, "channel", str(channel)]) != 0:
self.running = False
sys.exit(4)
else:
if subprocess.call([airportPath, interface, "-c%d" % channel]) != 0:
self.running = False
sys.exit(4)
time.sleep(self.delayInSeconds)
if not self.running:
return
class Entry (object):
def __init__(self, mac, ssid, time):
self.mac = mac
self.ssid = ssid
self.timeLastSeen = time
osname = os.uname()[0]
if osname != "Darwin":
defaultInterface = "wlan0"
else:
defaultInterface = "en1"
# command line parsing:
parser = argparse.ArgumentParser(description='Show and collect wlan request probes')
parser.add_argument('--interface', default=defaultInterface,
help='the interface used for monitoring')
parser.add_argument('--tsharkPath', default='/usr/local/bin/tshark',
help='path to tshark binary')
parser.add_argument('--ifconfigPath', default='/sbin/ifconfig',
help='path to ifconfig')
parser.add_argument('--iwconfigPath', default='/sbin/iwconfig',
help='path to iwconfig')
parser.add_argument('--verbose', action='store_true', help='verbose information')
args = parser.parse_args()
tsharkPath = args.tsharkPath
ifconfigPath = args.ifconfigPath
iwconfigPath = args.iwconfigPath
interface = args.interface
verbose = args.verbose
# only on osx:
airportPath = "/System/Library/PrivateFrameworks/Apple80211.framework/Versions/Current/Resources/airport";
# check all params
if not os.path.isfile(tsharkPath):
print "tshark not found at path {0}".format(tsharkPath)
sys.exit(1)
if not os.path.isfile(ifconfigPath):
print "ifconfig not found at path {0}".format(ifconfigPath)
sys.exit(1)
if osname != "Darwin":
if not os.path.isfile(iwconfigPath):
print "iwconfig not found at path {0}".format(iwconfigPath)
sys.exit(1)
# start interface
if subprocess.call([ifconfigPath, interface, 'up']) != 0:
print "cannot start interface: {0}".format(interface)
sys.exit(2)
# Set interface in monitor mode
retVal = 0
if osname != 'Darwin':
retVal = subprocess.call([iwconfigPath, interface, "mode", "monitor"])
else:
retVal = subprocess.call([airportPath, interface, "-z"])
if retVal != 0:
print "cannot set interface to monitor mode: {0}".format(interface)
sys.exit(3)
# start thread that switches channels
switchThread = switchChannelThread(1, 'SwitchChannel', 5)
switchThread.start()
signal.signal(signal.SIGINT, signal_handler)
print 'press CTRL+C to exit'
# signal.pause()
# start tshark and read the results
displayFilter = "wlan.fcs_good==1 and not wlan_mgt.ssid==\\\"\\\"";
fieldParams = "-T fields -e wlan.sa -e wlan_mgt.ssid -Eseparator=,";
tsharkCommandLine = "{0} -i {1} -n -l {2}"
if (osname != 'Darwin'):
tsharkCommandLine += " subtype probereq -2 -R \"{3}\""
else:
tsharkCommandLine += " -y PPI -2 -R \"wlan.fc.type_subtype==4 and {3}\""
tsharkCommandLine = tsharkCommandLine.format(tsharkPath, interface, fieldParams, displayFilter)
if verbose:
print 'tshark command: %s\n' % tsharkCommandLine,
DEVNULL = open(os.devnull, 'w')
popen = subprocess.Popen(tsharkCommandLine, shell=True, stdout=subprocess.PIPE, stderr=DEVNULL)
# collect all Entry objects in entries
entries = {}
for line in iter(popen.stdout.readline, ''):
line = line.rstrip()
# if verbose:
# print 'line: "%s"' % (line,)
if line.find(',') > 0:
mac, ssid = line.split(',', 1)
if line in entries:
if verbose:
print "entry found (seen before): mac: '{0}', ssid: '{1}'".format(mac,ssid)
entry = entries[line]
entry.timeLastSeen = time.localtime()
else:
print "new entry found: mac: '{0}', ssid: '{1}'".format(mac,ssid)
entries[line] = Entry(mac, ssid, time.localtime())
|
from setuptools import setup, find_packages
NAME = 'rboost'
VERSION = '0.2.0'
AUTHOR = 'SimoneGasperini'
AUTHOR_EMAIL = 'simone.gasperini2@studio.unibo.it'
REPO_URL = 'https://github.com/SimoneGasperini/rboost.git'
PYTHON_VERSION = '>=3.8'
def get_requirements():
with open('./requirements.txt', 'r') as f:
reqs = f.read().splitlines()
return reqs
setup(
name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=REPO_URL,
python_requires=PYTHON_VERSION,
packages=find_packages(),
install_requires=get_requirements(),
entry_points={'console_scripts': ['rboost=rboost.gui.__main:rboost']},
include_package_data=True
)
|
from pytorch_lightning import LightningDataModule
from torch.utils.data import Dataset, DataLoader
from torch.utils.data.dataset import random_split
from torchvision.datasets import ImageFolder
import os
import sys
sys.path.append('.')
from utils.ImageFolderSplit import ImageFolderSplitter, DatasetFromFilename
class TinyImagenetDataModule(LightningDataModule):
def __init__(self, data_dir, batch_size, train_transforms, val_transforms, test_transforms, train_size=0.9, num_workers=16):
super().__init__()
self.data_dir=data_dir
self.train_transforms = train_transforms
self.val_transforms = val_transforms
self.test_transforms = test_transforms
self.batch_size = batch_size
self.train_size = train_size
self.num_workers = num_workers
def setup(self, stage):
if stage in (None, 'fit'):
# splitter = ImageFolderSplitter(path=os.path.join(self.data_dir,'train'), train_size=self.train_size)
# X_train, y_train = splitter.getTrainingDataset()
# self.training_dataset = DatasetFromFilename(X_train, y_train, transforms=self.train_transforms)
# X_valid, y_valid = splitter.getValidationDataset()
# self.validation_dataset = DatasetFromFilename(X_valid, y_valid, transforms=self.val_transforms)
train_dataset = ImageFolder(root=os.path.join(self.data_dir, 'train'), transform=self.train_transforms)
train_size = int(self.train_size * len(train_dataset))
val_size = len(train_dataset) - train_size
self.training_dataset, self.validation_dataset = random_split(train_dataset, [train_size, val_size])
if stage in (None, 'test'):
self.test_dataset = ImageFolder(root=os.path.join(self.data_dir, 'val'), transform=self.test_transforms)
def train_dataloader(self):
return DataLoader(self.training_dataset, batch_size=self.batch_size, shuffle=True, num_workers=self.num_workers)
def val_dataloader(self):
return DataLoader(self.validation_dataset, batch_size=self.batch_size, shuffle=False, num_workers=self.num_workers)
def test_dataloader(self):
return DataLoader(self.test_dataset, batch_size=5, shuffle=False, num_workers=self.num_workers)
|
#
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import tempfile
from conary import callbacks
from conary import conaryclient
from conary import versions
from conary.cmds import updatecmd
from conary.lib import log
from conary.local import database
from conary.repository import changeset
from conary.repository import errors
from conary.repository import filecontainer
class CheckinCallback(updatecmd.UpdateCallback, callbacks.ChangesetCallback):
def __init__(self, cfg=None):
updatecmd.UpdateCallback.__init__(self, cfg)
callbacks.ChangesetCallback.__init__(self)
def missingFiles(self, missingFiles):
print "Warning: The following files are missing:"
for mp in missingFiles:
print mp[4]
return True
def doCommit(cfg, changeSetFile, targetLabel):
client = conaryclient.ConaryClient(cfg)
repos = client.getRepos()
callback = CheckinCallback()
try:
cs = changeset.ChangeSetFromFile(changeSetFile)
except filecontainer.BadContainer:
log.error("invalid changeset %s", changeSetFile)
return 1
if cs.isLocal():
if not targetLabel:
log.error("committing local changesets requires a targetLabel")
label = versions.Label(targetLabel)
cs.setTargetShadow(repos, label)
commitCs = cs.makeAbsolute(repos)
(fd, changeSetFile) = tempfile.mkstemp()
os.close(fd)
commitCs.writeToFile(changeSetFile)
try:
# hopefully the file hasn't changed underneath us since we
# did the check at the top of doCommit(). We should probably
# add commitChangeSet method that takes a fd.
try:
repos.commitChangeSetFile(changeSetFile, callback=callback)
except errors.CommitError, e:
print e
finally:
if targetLabel:
os.unlink(changeSetFile)
def doLocalCommit(db, changeSetFile):
cs = changeset.ChangeSetFromFile(changeSetFile)
if not cs.isLocal():
log.error("repository changesets must be applied with update instead")
else:
db.commitChangeSet(cs, database.UpdateJob(db),
rollbackPhase = db.ROLLBACK_PHASE_LOCAL,
updateDatabase = False)
|
#!/usr/bin/env python
# encoding: utf-8
#
# The MIT License (MIT)
#
# Copyright (c) 2015 CNRS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# AUTHORS
# Hervé BREDIN -- http://herve.niderb.fr/
"""
MediaEval submission robot
Usage:
robot_submission [options]
Options:
-h --help Show this screen.
--version Show version.
--debug Show debug information.
--url=URL Submission server URL
[default: http://api.mediaeval.niderb.fr]
--password=P45sw0Rd Password.
--period=N Query queue every N seconds [default: 600].
--log=DIR Path to log directory.
"""
from common import RobotCamomile, create_logger
from docopt import docopt
arguments = docopt(__doc__, version='0.1')
url = arguments['--url']
password = arguments['--password']
period = int(arguments['--period'])
debug = arguments['--debug']
log = arguments['--log']
logger = create_logger('robot_submission', path=log, debug=debug)
robot = RobotCamomile(
url, 'robot_submission', password=password,
period=period, logger=logger)
submissionQueue = robot.getQueueByName('mediaeval.submission.in')
testCorpus = robot.getCorpusByName('mediaeval.test')
robot_evidence = robot.getUserByName('robot_evidence')
robot_label = robot.getUserByName('robot_label')
# forever loop on submission queue
for item in robot.dequeue_loop(submissionQueue):
id_evidence = item.id_evidence
id_label = item.id_label
# withdrawn submission
if hasattr(item, 'deletedBy'):
logger.info(
"del - {team:s}.{user:s} - {evidence:s}.{label:s}".format(
team=item.team, user=item.user,
evidence=id_evidence, label=id_label))
# look for the (label|evidence) copy of this submission
# and mark them as deleted
for labelLayer in robot.getLayers(
testCorpus, data_type="mediaeval.persondiscovery.label"):
# if this is not a copy or not the copy we are looking for, skip
description = labelLayer.description
if description.get('copy', None) != id_label:
continue
# if we reached this point, it means that we found the copy
# mark label layer copy as deleted
description['deleted'] = item
del description['copy']
robot.updateLayer(labelLayer._id, description=description)
# mark evidence layer copy as deleted
evidenceLayer = robot.getLayer(description.id_evidence)
description = evidenceLayer.description
description['deleted'] = item
del description['copy']
robot.updateLayer(evidenceLayer._id, description=description)
# no need to keep looking for the copy of the submission
break
# back to the top of the loop
continue
logger.info(
"new - {team:s}.{user:s} - {name:s} - {evidence:s}.{label:s}".format(
team=item.team, user=item.user, name=item.name,
evidence=id_evidence, label=id_label))
# duplicate evidence layer
try:
# in a try/except scope because it might have been deleted by now
evidence = robot.duplicate_layer(id_evidence, returns_id=False)
except Exception:
logger.error(
"error when create the copy of {evidence:s} ".format(
evidence=id_evidence))
continue
# duplicate label layer
try:
# in a try/except scope because it might have been deleted by now
label = robot.duplicate_layer(id_label, returns_id=False)
except Exception:
robot.deleteLayer(evidence._id)
logger.error(
"""error when create the copy of {label:s},
remove the copy of {evidence:s}""".format(
label=id_label, evidence=id_evidence))
continue
# update evidence --> label cross-reference
evidence.description.id_label = label._id
robot.updateLayer(evidence._id, description=evidence.description)
# update label --> evidence cross-reference
label.description.id_evidence = evidence._id
robot.updateLayer(label._id, description=label.description)
# give ADMIN permission to robot_evidence
robot.setLayerPermissions(evidence._id, robot.ADMIN, user=robot_evidence)
# give READ permission to robot_label
robot.setLayerPermissions(label._id, robot.READ, user=robot_label)
# give ADMIN permission to robot_evidence
# (allowing to later update the mapping)
robot.setLayerPermissions(label._id, robot.ADMIN, user=robot_evidence)
|
def factorial(count):
if num == 1:
return 1
elif num == 0:
return 0
else:
return num * factorial(num - 1)
def fibonacci(num):
if num < 1:
print('输入有误')
return -1
elif num == 1 or num == 2:
return 1
else:
return fibonacci(num - 1) + fibonacci(num - 2)
count = 0
def hanoi(n, x, y ,z):
global count
count += 1
print('count == ',count)
if n == 1:
print (x, '-->', z ,count)
else:
hanoi(n-1, x, z, y)
print(x, '-->', z, count)
hanoi(n-1, y, x, z)
print(hanoi(18, 'x', 'y', 'z'))
|
"""A module to store some results that are parsed from .txt files."""
import os
from configparser import ConfigParser
from types import SimpleNamespace
import pandas as pd
import numpy as np
from skm_pyutils.py_table import list_to_df
from dictances.bhattacharyya import bhattacharyya
from .main import main as ctrl_main
here = os.path.dirname(os.path.abspath(__file__))
def parse_cfg(name):
"""Parse the configs at configs/name."""
cfg_path = os.path.join(here, "..", "configs", name)
cfg = ConfigParser()
cfg.read(cfg_path)
return cfg
def df_from_dict(dict, cols):
"""Form a dataframe from a dictionary with cols, keys are considered an entry."""
vals = []
for k, v in dict.items():
vals.append([k, v])
df = pd.DataFrame(vals, columns=cols)
return df
def store_region_results():
np.random.seed(42)
vals = []
names = [
"Tetrode CA3 CA1",
"MOp to SSp-ll",
"Figure 1 E",
"Max distance 3",
"Figure 1 A",
]
mean_vals = [
0.4248 / 5.0,
6.7371 / 79.0,
8.512 / 20.0,
8.86 / 25.0,
0.7340 / 3.0,
]
stats_vals = [
0.4117 / 5.0,
6.20478 / 79.0,
8.511 / 20.0,
9.27 / 25.0,
0.7346 / 3.0,
]
for i in range(len(names)):
vals.append([names[i], mean_vals[i], "Monte Carlo simulation"])
vals.append([names[i], stats_vals[i], "Statistical estimation"])
cols = ["Connectivity", "Expected proportion connected", "Calculation"]
df = pd.DataFrame(vals, columns=cols)
df.to_csv(
os.path.join(here, "..", "results", "exp_man.csv"),
index=False,
)
def store_tetrode_results():
np.random.seed(42)
args = SimpleNamespace(
max_depth=1,
num_cpus=1,
cfg="tetrode_ca3_ca1",
clt_start=30,
subsample_rate=0,
approx_hypergeo=False,
)
result = ctrl_main(parse_cfg("tetrode_ca3_ca1.cfg"), args)
df = df_from_dict(
result["mpf"]["total"],
cols=["Number of sampled connected neurons", "Probability"],
)
df.to_csv(
os.path.join(here, "..", "results", "tetrode_man.csv"),
index=False,
)
def store_tetrode_results_full():
np.random.seed(42)
args = SimpleNamespace(
max_depth=1,
num_cpus=1,
cfg="tetrode_ca3_ca1_full",
clt_start=30,
subsample_rate=0,
approx_hypergeo=False,
)
result = ctrl_main(parse_cfg("tetrode_ca3_ca1_full.cfg"), args)
df = df_from_dict(
result["mpf"]["total"],
cols=["Number of sampled connected neurons", "Probability"],
)
df.to_csv(
os.path.join(here, "..", "results", "tetrode_full.csv"),
index=False,
)
def store_tetrode_results_depth():
np.random.seed(42)
num_samples_range = np.arange(11)
res_list = []
headers = ["Number of samples", "Proportion of connections", "Max distance"]
for depth in (1, 2, 3):
for s in num_samples_range:
args = SimpleNamespace(
max_depth=depth,
num_cpus=1,
cfg="tetrode_ca3_ca1_full_stats",
clt_start=30,
subsample_rate=0,
approx_hypergeo=False,
)
cfg = parse_cfg("tetrode_ca3_ca1_full.cfg")
cfg["default"]["num_samples"] = f"[{s}, {s}]"
result = ctrl_main(cfg, args)
if s == 0:
exp = 0
else:
exp = result["mpf"]["expected"] / s
res_list.append([s, exp, depth])
df = list_to_df(res_list, headers=headers)
df.to_csv(
os.path.join(here, "..", "results", "samples_depth_ca3_ca1.csv"),
index=False,
)
def store_npix_results():
np.random.seed(42)
args = SimpleNamespace(
max_depth=1,
num_cpus=1,
cfg="ca3_ca1_probe_full",
clt_start=10,
subsample_rate=0.01,
approx_hypergeo=False,
)
result = ctrl_main(parse_cfg("probe_ca3_ca1_full.cfg"), args)
df = df_from_dict(
result["mpf"]["total"],
cols=["Number of sampled connected neurons", "Probability"],
)
df.to_csv(
os.path.join(here, "..", "results", "npix_probe_ca3_ca1.csv"),
index=False,
)
def store_sub_results():
np.random.seed(42)
configs = [
"probe_ca1_sub_full_high.cfg",
"probe_ca1_sub_full_med.cfg",
"probe_ca1_sub_full_low.cfg",
]
out_names = [
"20_sub_high.csv",
"20_sub_out.csv",
"20_sub_low.csv",
]
for cfg_name, name in zip(configs, out_names):
args = SimpleNamespace(
max_depth=1,
num_cpus=1,
cfg=cfg_name,
clt_start=30,
subsample_rate=0.01,
approx_hypergeo=False,
)
cfg = parse_cfg(cfg_name)
cfg["default"]["num_samples"] = "[79, 79]"
result = ctrl_main(cfg, args)
df = df_from_dict(
result["mpf"]["total"],
cols=["Number of sampled connected neurons", "Probability"],
)
df.to_csv(
os.path.join(here, "..", "results", name),
index=False,
)
def store_mouse_result():
dict_a = {
0: 0.097727929,
1: 0.073771117,
2: 0.09002461,
3: 0.098312369,
4: 0.097549236,
5: 0.090365064,
6: 0.079501495,
7: 0.067243357,
8: 0.055259551,
9: 0.044579481,
10: 0.035678545,
11: 0.02861985,
12: 0.02320665,
13: 0.019120951,
14: 0.016025042,
15: 0.013623944,
16: 0.01169026,
17: 0.010066428,
18: 0.00865235,
19: 0.007391084,
20: 0.006253293,
21: 0.005227931,
22: 0.004312818,
23: 0.003508591,
24: 0.002815313,
25: 0.002230014,
26: 0.001745977,
27: 0.001353389,
28: 0.001040298,
29: 0.000794062,
30: 0.000602467,
31: 0.000454566,
32: 0.000341045,
33: 0.0002543,
34: 0.000188282,
35: 0.000138257,
36: 0.000100549,
37: 7.23e-05,
38: 5.13e-05,
39: 3.59e-05,
40: 2.47e-05,
41: 1.66e-05,
42: 1.10e-05,
43: 7.07e-06,
44: 4.43e-06,
45: 2.69e-06,
}
dict_b = {
0: 0.0755,
1: 0.07796,
2: 0.09164,
3: 0.09948,
4: 0.09346,
5: 0.08536,
6: 0.07598,
7: 0.06552,
8: 0.05336,
9: 0.04534,
10: 0.03604,
11: 0.0309,
12: 0.02306,
13: 0.02202,
14: 0.0174,
15: 0.01518,
16: 0.0133,
17: 0.01316,
18: 0.01104,
19: 0.0089,
20: 0.00808,
21: 0.00752,
22: 0.00576,
23: 0.00448,
24: 0.00386,
25: 0.00368,
26: 0.00252,
27: 0.0019,
28: 0.00158,
29: 0.00144,
30: 0.0009,
31: 0.001,
32: 0.00066,
33: 0.00048,
34: 0.00042,
35: 0.00016,
36: 0.00022,
37: 0.00022,
38: 0.00018,
39: 0.00012,
40: 6.00e-05,
41: 4.00e-05,
42: 4.00e-05,
43: 6.00e-05,
45: 2.00e-05,
}
dist = bhattacharyya(dict_a, dict_b)
return dist
def main():
store_region_results()
store_tetrode_results()
store_npix_results()
store_sub_results()
store_mouse_result()
|
#!/usr/bin/env python3
from codekit import eups
def test_git_tag2eups_tag_prefix():
"""Numeric tags should be prefixed with 'v'"""
eups_tag = eups.git_tag2eups_tag('15')
assert eups_tag == 'v15'
# one `v` is enough
eups_tag = eups.git_tag2eups_tag('v15')
assert eups_tag == 'v15'
def test_git_tag2eups_tag_dots():
"""Dots `.` should be converted to underscores `_`"""
eups_tag = eups.git_tag2eups_tag('a.b')
assert eups_tag == 'a_b'
def test_git_tag2eups_tag_dashes():
"""Dashes `-` should be converted to underscores `_`"""
eups_tag = eups.git_tag2eups_tag('a-b')
assert eups_tag == 'a_b'
def test_git_tag2eups_tag_official():
"""Check mangling of official format git tags"""
eups_tag = eups.git_tag2eups_tag('16.0.0')
assert eups_tag == 'v16_0_0'
def test_git_tag2eups_tag_weekly():
"""Check mangling of weekly format git tags"""
eups_tag = eups.git_tag2eups_tag('w.2018.19')
assert eups_tag == 'w_2018_19'
def test_git_tag2eups_tag_daily():
"""Check mangling of daily format git tags"""
eups_tag = eups.git_tag2eups_tag('d.2018.05.30')
assert eups_tag == 'd_2018_05_30'
|
from azure.mgmt.dns import DnsManagementClient
from azure.identity import ClientSecretCredential, DefaultAzureCredential
from datetime import datetime
import argparse
import json
import os
parser = argparse.ArgumentParser(
description="Update Azure DNS record based on current public IP"
)
parser.add_argument("--config", help="Path to configuration file")
parser.add_argument("--subscription-id", help="Azure subscription ID")
parser.add_argument("--resource-group", help="Azure resource group name")
parser.add_argument("--zone", help="Azure DNS zone name")
parser.add_argument("--record", help="DNS record name to create/update")
parser.add_argument("--tenant-id", help="Azure tenant ID (or set AZURE_TENANT_ID)")
parser.add_argument(
"--client-id", help="Azure service principal client id (or set AZURE_CLIENT_ID)"
)
parser.add_argument(
"--client-secret",
help="Service principal client secret (or set AZURE_CLIENT_SECRET)",
)
args = parser.parse_args()
if args.config:
with open(args.config, "r") as config_file:
config = json.load(config_file)
else:
config = {
"subscriptionId": args.subscription_id,
"tenantId": args.tenant_id,
"clientId": args.client_id,
"clientSecret": args.client_secret,
"resourceGroup": args.resource_group,
"zoneName": args.zone,
"recordName": args.record,
}
if (
os.getenv("AZURE_TENANT_ID")
and os.getenv("AZURE_CLIENT_ID")
and os.getenv("AZURE_CLIENT_SECRET")
):
credentials = DefaultAzureCredential()
else:
credentials = ClientSecretCredential(
config["tenantId"], config["clientId"], config["clientSecret"]
)
def update_dns(ip: str):
dns_client = DnsManagementClient(
credentials, subscription_id=config["subscriptionId"]
)
record_set = dns_client.record_sets.create_or_update(
config["resourceGroup"],
config["zoneName"],
config["recordName"],
"A",
{
"ttl": 60,
"arecords": [{"ipv4_address": ip}],
"metadata": {
"createdBy": "azure-dyndns (python)",
"updated": datetime.now().strftime("%m/%d/%Y, %H:%M:%S"),
},
},
)
print(f"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}: {record_set.fqdn} - {ip} - {record_set.provisioning_state}")
def update_dnsv6(ipv6: str):
dns_client = DnsManagementClient(
credentials, subscription_id=config["subscriptionId"]
)
record_set = dns_client.record_sets.create_or_update(
config["resourceGroup"],
config["zoneName"],
config["recordName"],
"AAAA",
{
"ttl": 60,
"aaaarecords": [{"ipv6_address": ipv6}],
"metadata": {
"createdBy": "azure-dyndns (python)",
"updated": datetime.now().strftime("%m/%d/%Y, %H:%M:%S"),
},
},
)
print(f"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}: {record_set.fqdn} - {ipv6} - {record_set.provisioning_state}")
def get_external_ip():
import urllib3
client = urllib3.connection_from_url("https://ifconfig.me")
response = client.request("get", "/")
return response.data.decode("utf-8")
def get_external_ipv6():
from netifaces import interfaces, ifaddresses, AF_INET, AF_INET6
return ifaddresses('eth0').setdefault(AF_INET6, [{'addr':'No IP addr'}] )[1]['addr']
if __name__ == "__main__":
ip = get_external_ip()
update_dns(ip)
ipv6 = get_external_ipv6()
update_dnsv6(ipv6)
|
# Copyright (c) 2014-2021, Manfred Moitzi
# License: MIT License
from typing import BinaryIO, cast, TextIO, List, Optional
import zipfile
from contextlib import contextmanager
from ezdxf.lldxf.validator import is_dxf_stream, dxf_info
CRLF = b"\r\n"
LF = b"\n"
class ZipReader:
def __init__(self, zip_archive_name: str, errors="surrogateescape"):
if not zipfile.is_zipfile(zip_archive_name):
raise IOError(f"'{zip_archive_name}' is not a zip archive.")
self.zip_archive_name = zip_archive_name
self.zip_archive: Optional[zipfile.ZipFile] = None
self.dxf_file_name: Optional[str] = None
self.dxf_file: Optional[BinaryIO] = None
self.encoding = "cp1252"
self.errors = errors
self.dxfversion = "AC1009"
def open(self, dxf_file_name: str = None) -> None:
def open_dxf_file() -> BinaryIO:
# Open always in binary mode:
return cast(BinaryIO, self.zip_archive.open(self.dxf_file_name)) # type: ignore
self.zip_archive = zipfile.ZipFile(self.zip_archive_name)
self.dxf_file_name = (
dxf_file_name
if dxf_file_name is not None
else self.get_first_dxf_file_name()
)
self.dxf_file = open_dxf_file()
# Reading with standard encoding 'cp1252' - readline() fails if leading
# comments contain none ASCII characters.
if not is_dxf_stream(cast(TextIO, self)):
raise IOError(f"'{self.dxf_file_name}' is not a DXF file.")
self.dxf_file = open_dxf_file() # restart
self.get_dxf_info()
self.dxf_file = open_dxf_file() # restart
def get_first_dxf_file_name(self) -> str:
dxf_file_names = self.get_dxf_file_names()
if len(dxf_file_names) > 0:
return dxf_file_names[0]
else:
raise IOError("No DXF files found.")
def get_dxf_file_names(self) -> List[str]:
return [
name
for name in self.zip_archive.namelist() # type: ignore
if name.lower().endswith(".dxf")
]
def get_dxf_info(self) -> None:
info = dxf_info(cast(TextIO, self))
# Since DXF R2007 (AC1021) file encoding is always 'utf-8'
self.encoding = info.encoding if info.version < "AC1021" else "utf-8"
self.dxfversion = info.version
# Required TextIO interface
def readline(self) -> str:
next_line = self.dxf_file.readline().replace(CRLF, LF) # type: ignore
return str(next_line, self.encoding, self.errors)
def close(self) -> None:
self.zip_archive.close() # type: ignore
@contextmanager # type: ignore
def ctxZipReader( # type: ignore
zipfilename: str, filename: str = None, errors: str = "surrogateescape"
) -> ZipReader:
zip_reader = ZipReader(zipfilename, errors=errors)
zip_reader.open(filename)
yield zip_reader
zip_reader.close()
|
import get_covid19_data
import discord
from discord.ext import commands
import json, hcskr
import datetime
from variable import *
from embed.help_embed import *
import get_covid19_data
import hcskr
result = ''
class selfcheck(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command()
async def 자가진단(self,ctx):
await ctx.send(f"수동자가진단은 `{PREFIX}진단참여`을 이용해주시기 바랍니다.")
@commands.command()
async def 진단참여(self,ctx):
global result
user_id = str(ctx.author.id)
with open(JSON_FILE_NAME, "r",encoding="utf-8-sig") as json_file:
user_data=json.load(json_file)
if user_id in user_data.keys():
name = user_data[user_id]['name']
birth = user_data[user_id]["birth"]
area = user_data[user_id]["area"]
school_name = user_data[user_id]["school_name"]
school_type = user_data[user_id]["school_type"]
passward = user_data[user_id]["passward"]
print(f"수동진단참여 :[{name}]님의 자가진단 준비중")
try:
result = await hcskr.asyncSelfCheck(name,birth,area,school_name,school_type,passward)
err = False
except:
err = True
print(f"result : {result}, type : {type(result)}")
if "Cannot connect to host hcs.eduro.go.kr:443 ssl:True" in result['message'] or err == True:
print("err")
i=0
while True:
user = await self.bot.fetch_user(int(ADMIN_ID))
await user.send(f"무지성 트라이 {i+1}트 : {user_data[user_id]['name']}")
i+=1
try:
result = await hcskr.asyncSelfCheck(name,birth,area,school_name,school_type,passward)
except:
pass
if "Cannot connect to host hcs.eduro.go.kr:443 ssl:True" not in data['message'] or i>5:
print(result)
break
if result["code"]=="SUCCESS":
print("수동 자가진단 성공")
await ctx.send(f"[{result['regtime']}] 자가진단 완료!\n{result['message']}")
else:
print(f"수동 자가진단 실패 : {result}")
await ctx.send(f"[{datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}] 자가진단 실패!\n{result['message']}")
await ctx.send(f"입력된 정보가 정상인데도 자가진단 실패가 뜬다면 재시도 하시거나 관리자(white201#0201)에게 문의 부탁드립니다.")
else:
await ctx.send(f"유저 데이터에 등록된 정보가 없습니다. `{PREFIX}정보등록`으로 등록해주십시오.")
@commands.command()
async def 자가진단실시(self,ctx):
user = str(ctx.author.id)
with open(JSON_FILE_NAME, "r",encoding="utf-8-sig") as json_file:
user_data=json.load(json_file)
if user in user_data.keys():
user_data[user]["possible"] = True
with open(JSON_FILE_NAME, "w",encoding='UTF-8') as json_file:
json.dump(user_data,json_file,ensure_ascii = False, indent=4)
await ctx.send("자가진단이 내일부터 실시될 예정입니다.")
else:
await ctx.send("유저데이터에 해당 유저가 없습니다.")
@commands.command()
async def 자가진단중지(self,ctx):
user = str(ctx.author.id)
with open(JSON_FILE_NAME, "r",encoding="utf-8-sig") as json_file:
user_data=json.load(json_file)
if user in user_data.keys():
user_data[user]["possible"] = False
with open(JSON_FILE_NAME, "w",encoding='UTF-8') as json_file:
json.dump(user_data,json_file,ensure_ascii = False, indent=4)
await ctx.send("자가진단이 내일부터 실시되지 않을 예정입니다.")
else:
await ctx.send("유저데이터에 해당 유저가 없습니다.")
def setup(bot):
bot.add_cog(selfcheck(bot))
|
import json
from datetime import date
def calculate_age(born):
today = date.today()
return today.year - born.year - (
(today.month, today.day) < (born.month, born.day)
)
def filters_user(params):
kwargs = {}
if type(params) is str:
params = json.loads(params)
if 'same_sex' in params.keys():
kwargs['genre__name'] = params['same_sex']
if 'age' in params.keys():
kwargs['age'] = params['age']
if 'range_age' in params.keys():
age = int(params['range_age'])
kwargs['age__gte'] = age - 3
kwargs['age__lte'] = age + 3
if 'hobbies' in params.keys():
hobbies = params.getlist('hobbies')
hobbies = hobbies[0].split(',')
print(hobbies)
kwargs['hobbies__name__in'] = hobbies
return kwargs
|
# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from awscli.customizations.cloudformation.package import PackageCommand
from awscli.customizations.cloudformation.deploy import DeployCommand
def initialize(cli):
"""
The entry point for CloudFormation high level commands.
"""
cli.register('building-command-table.cloudformation', inject_commands)
def inject_commands(command_table, session, **kwargs):
"""
Called when the CloudFormation command table is being built. Used to
inject new high level commands into the command list. These high level
commands must not collide with existing low-level API call names.
"""
command_table['package'] = PackageCommand(session)
command_table['deploy'] = DeployCommand(session)
|
# © 2021 Nokia
#
# Licensed under the Apache license, version 2.0
# SPDX-License-Identifier: Apache-2.0
"""Datacollector API resources."""
import json
from datetime import datetime
from flask import Flask, Response, jsonify, request
from datacollector.api.api_collector_handler import CollectorHandler
app = Flask(__name__)
collector_handler = CollectorHandler()
class InvalidParameterException(Exception):
"""Raised in case of invalid request body parameters."""
@app.route('/api/start', methods=['POST'])
def start_collector():
"""Endpoint for starting the collector. Route command to CollectorHandler object."""
run_id = None
try:
data = request.get_json()
start_delta = data['start']
stop_delta = data['stop']
interval = data['interval']
if ("id" in data) is True:
run_id = data['id']
if start_delta < 0 or stop_delta <= 0 or interval <= 0:
raise InvalidParameterException
except Exception as e:
return Response(json.dumps({"ret": "fail",
"message": 'Invalid request body. Example of a request: {"start": 60, '
'"stop": 120, "interval": 60}.'}), status=400)
if run_id is None:
run_id = collector_handler.create_run_id()
else:
date = datetime.utcnow()
run_id = run_id + "_" + datetime.strftime(date, "%Y-%m-%dT%H-%M-%S")
collector_handler.start_agent(start_delta, stop_delta, interval, run_id)
return Response(json.dumps({"ret": "ok", "message": "Parameters for starting the collector received.", "id": run_id}))
@app.route('/api/stop', methods=['POST'])
def stop_collector():
"""Endpoint for stopping the collector. Route command to CollectorHandler-object."""
try:
data = request.get_json()
run_id = data['id']
message = collector_handler.stop_agent(run_id)
return jsonify(message)
except Exception as e:
return jsonify(str(e))
@app.route('/api/results/collections', methods=['GET'])
def retrieve_collections():
"""Return names of existing collection runs."""
data = collector_handler.get_collection_names()
return Response(json.dumps({"ret": "ok", "message": "Names of collections retrieved successfully.", "data": data}))
@app.route('/api/results/collections/<run_id>/<host>', methods=['GET'])
def retrieve_collection_results(run_id, host):
"""Return results of a collection run using collection name and filename."""
data = collector_handler.get_collection_results(run_id, host)
return Response(json.dumps({"ret": "ok", "message": "Results for collection retrieved successfully.", "data": data}))
@app.route('/', methods=['GET'])
def default_get():
"""Entry point for the API."""
return Response('Datacollector API', 200)
def run():
"""Deploy the API."""
app.run(host='127.0.0.1', port=5000)
|
import unittest
import parameterized
import numbers
import openproblems
from openproblems.test import utils
utils.ignore_numba_warnings()
@parameterized.parameterized.expand(
[
(task, dataset, method)
for task in openproblems.TASKS
for dataset in task.DATASETS
for method in task.METHODS
],
name_func=utils.name_test,
)
def test_method(task, dataset, method):
adata = dataset(test=True)
output = method(adata)
assert output is None
assert task.checks.check_method(adata)
for metric in task.METRICS:
m = metric(adata)
assert isinstance(m, numbers.Number)
@parameterized.parameterized.expand(
[(method,) for task in openproblems.TASKS for method in task.METHODS],
name_func=utils.name_test,
)
def test_method_metadata(method):
assert hasattr(method, "metadata")
for attr in [
"method_name",
"paper_name",
"paper_url",
"paper_year",
"code_url",
"code_version",
]:
assert attr in method.metadata
|
"""Shipping JPEG 2000 files.
These include:
nemo.jp2: converted from the original JPEG photo of the aftermath of NEMO,
the nor'easter that shutdown Boston in February of 2013.
goodstuff.j2k: my favorite bevorage.
"""
import pkg_resources
def nemo():
"""Shortcut for specifying path to nemo.jp2.
Returns
-------
file : str
Platform-independent path to nemo.jp2.
"""
filename = pkg_resources.resource_filename(__name__, "nemo.jp2")
return filename
def goodstuff():
"""Shortcut for specifying path to goodstuff.j2k.
Returns
-------
file : str
Platform-independent path to goodstuff.j2k.
"""
filename = pkg_resources.resource_filename(__name__, "goodstuff.j2k")
return filename
def jpxfile():
"""Shortcut for specifying path to heliov.jpx.
Returns
-------
file : str
Platform-independent path to 12-v6.4.jpx
"""
filename = pkg_resources.resource_filename(__name__, "heliov.jpx")
return filename
|
"""ResNets, implemented in PyTorch."""
# TODO: add Squeeze and Excitation module
from __future__ import division
__all__ = ['get_resnet', 'ResNetV1', 'ResNetV2',
'BasicBlockV1', 'BasicBlockV2',
'BottleneckV1', 'BottleneckV2',
'resnet18_v1', 'resnet34_v1', 'resnet50_v1', 'resnet101_v1', 'resnet152_v1',
'resnet18_v2', 'resnet34_v2', 'resnet50_v2', 'resnet101_v2', 'resnet152_v2',
]
from torch import nn
import torch.nn.functional as F
from model.module.basic import _conv3x3, _bn_no_affine
# -----------------------------------------------------------------------------
# BLOCKS & BOTTLENECK
# -----------------------------------------------------------------------------
class BasicBlockV1(nn.Module):
r"""BasicBlock V1 from `"Deep Residual Learning for Image Recognition"
<http://arxiv.org/abs/1512.03385>`_ paper.
This is used for ResNet V1 for 18, 34 layers.
Parameters
----------
in_channels : int
Number of input channels.
channels : int
Number of output channels.
stride : int
Stride size.
downsample : bool, default False
Whether to downsample the input.
last_gamma : bool, default False
Whether to initialize the gamma of the last BatchNorm layer in each bottleneck to zero.
"""
def __init__(self, in_channels, channels, stride, downsample=False,
last_gamma=False, **kwargs):
super(BasicBlockV1, self).__init__(**kwargs)
self.body = list()
self.body.append(_conv3x3(in_channels, channels, stride))
self.body.append(nn.BatchNorm2d(channels))
self.body.append(nn.ReLU(inplace=True))
self.body.append(_conv3x3(channels, channels, 1))
tmp_layer = nn.BatchNorm2d(channels)
if last_gamma:
nn.init.zeros_(tmp_layer.weight)
self.body.append(tmp_layer)
self.body = nn.Sequential(*self.body)
if downsample:
self.downsample = nn.Sequential(
nn.Conv2d(in_channels, channels, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(channels)
)
else:
self.downsample = None
def forward(self, x):
residual = x
x = self.body(x)
if self.downsample:
residual = self.downsample(residual)
x = F.relu_(residual + x)
return x
class BottleneckV1(nn.Module):
r"""Bottleneck V1 from `"Deep Residual Learning for Image Recognition"
<http://arxiv.org/abs/1512.03385>`_ paper.
This is used for ResNet V1 for 50, 101, 152 layers.
Parameters
----------
in_channels : int
Number of input channels.
channels : int
Number of output channels.
stride : int
Stride size.
downsample : bool, default False
Whether to downsample the input.
last_gamma : bool, default False
Whether to initialize the gamma of the last BatchNorm layer in each bottleneck to zero.
"""
def __init__(self, in_channels, channels, stride, downsample=False,
last_gamma=False, **kwargs):
super(BottleneckV1, self).__init__(**kwargs)
self.body = list()
self.body.append(nn.Conv2d(in_channels, channels // 4, kernel_size=1, stride=stride))
self.body.append(nn.BatchNorm2d(channels // 4))
self.body.append(nn.ReLU(inplace=True))
self.body.append(_conv3x3(channels // 4, channels // 4, 1))
self.body.append(nn.BatchNorm2d(channels // 4))
self.body.append(nn.ReLU(inplace=True))
self.body.append(nn.Conv2d(channels // 4, channels, kernel_size=1, stride=1))
tmp_layer = nn.BatchNorm2d(channels)
if last_gamma:
nn.init.zeros_(tmp_layer.weight)
self.body.append(tmp_layer)
self.body = nn.Sequential(*self.body)
if downsample:
self.downsample = nn.Sequential(
nn.Conv2d(in_channels, channels, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(channels)
)
else:
self.downsample = None
def forward(self, x):
residual = x
x = self.body(x)
if self.downsample:
residual = self.downsample(residual)
x = F.relu_(x + residual)
return x
class BasicBlockV2(nn.Module):
r"""BasicBlock V2 from
`"Identity Mappings in Deep Residual Networks"
<https://arxiv.org/abs/1603.05027>`_ paper.
This is used for ResNet V2 for 18, 34 layers.
Parameters
----------
in_channels : int
Number of input channels.
channels : int
Number of output channels.
stride : int
Stride size.
downsample : bool, default False
Whether to downsample the input.
last_gamma : bool, default False
Whether to initialize the gamma of the last BatchNorm layer in each bottleneck to zero.
"""
def __init__(self, in_channels, channels, stride, downsample=False,
last_gamma=False, **kwargs):
super(BasicBlockV2, self).__init__(**kwargs)
self.bn1 = nn.BatchNorm2d(in_channels)
self.conv1 = _conv3x3(in_channels, channels, stride)
self.bn2 = nn.BatchNorm2d(channels)
if last_gamma:
nn.init.zeros_(self.bn2.weight)
self.conv2 = _conv3x3(channels, channels, 1)
if downsample:
self.downsample = nn.Conv2d(in_channels, channels, 1, stride, bias=False)
else:
self.downsample = None
def forward(self, x):
residual = x
x = self.bn1(x)
x = F.relu_(x)
if self.downsample:
residual = self.downsample(x)
x = self.conv1(x)
x = self.bn2(x)
x = F.relu_(x)
x = self.conv2(x)
return x + residual
class BottleneckV2(nn.Module):
r"""Bottleneck V2 from
`"Identity Mappings in Deep Residual Networks"
<https://arxiv.org/abs/1603.05027>`_ paper.
This is used for ResNet V2 for 50, 101, 152 layers.
Parameters
----------
in_channels : int
Number of input channels.
channels : int
Number of output channels.
stride : int
Stride size.
downsample : bool, default False
Whether to downsample the input.
last_gamma : bool, default False
Whether to initialize the gamma of the last BatchNorm layer in each bottleneck to zero.
norm_layer : object
Normalization layer used (default: :class:`nn.BatchNorm`)
Can be :class:`nn.BatchNorm` or :class:`other normalization`.
norm_kwargs : dict
Additional `norm_layer` arguments
"""
def __init__(self, in_channels, channels, stride, downsample=False,
last_gamma=False, **kwargs):
super(BottleneckV2, self).__init__(**kwargs)
self.bn1 = nn.BatchNorm2d(in_channels)
self.conv1 = nn.Conv2d(in_channels, channels // 4, kernel_size=1, stride=1, bias=False)
self.bn2 = nn.BatchNorm2d(channels // 4)
self.conv2 = _conv3x3(channels // 4, channels // 4, stride)
self.bn3 = nn.BatchNorm2d(channels // 4)
if last_gamma:
nn.init.zeros_(self.bn3.weight)
self.conv3 = nn.Conv2d(channels // 4, channels, kernel_size=1, stride=1, bias=False)
if downsample:
self.downsample = nn.Conv2d(in_channels, channels, 1, stride, bias=False)
else:
self.downsample = None
def forward(self, x):
residual = x
x = self.bn1(x)
x = F.relu_(x)
if self.downsample:
residual = self.downsample(x)
x = self.conv1(x)
x = self.bn2(x)
x = F.relu_(x)
x = self.conv2(x)
x = self.bn3(x)
x = F.relu_(x)
x = self.conv3(x)
return x + residual
# -----------------------------------------------------------------------------
# NETS
# -----------------------------------------------------------------------------
class ResNetV1(nn.Module):
r"""ResNet V1 model from
`"Deep Residual Learning for Image Recognition"
<http://arxiv.org/abs/1512.03385>`_ paper.
Parameters
----------
block : nn.Module
Class for the residual block. Options are BasicBlockV1, BottleneckV1.
layers : list of int
Numbers of layers in each block
channels : list of int
Numbers of channels in each block. Length should be two larger than layers list.
classes : int, default 1000
Number of classification classes.
thumbnail : bool, default False
Enable thumbnail.
last_gamma : bool, default False
Whether to initialize the gamma of the last BatchNorm layer in each bottleneck to zero.
"""
def __init__(self, block, layers, channels, classes=1000, thumbnail=False,
last_gamma=False, **kwargs):
super(ResNetV1, self).__init__(**kwargs)
assert len(layers) == len(channels) - 2
self.features = list()
if thumbnail:
self.features.append(_conv3x3(channels[0], channels[1], 1))
else:
self.features.append(nn.Conv2d(channels[0], channels[1], 7, 2, 3, bias=False))
self.features.append(nn.BatchNorm2d(channels[1]))
self.features.append(nn.ReLU(inplace=True))
self.features.append(nn.MaxPool2d(3, 2, 1))
for i, num_layer in enumerate(layers):
stride = 1 if i == 0 else 2
self.features.append(self._make_layer(block, num_layer, channels[i + 1], channels[i + 2],
stride, last_gamma=last_gamma))
self.features = nn.Sequential(*self.features)
self.output = nn.Linear(channels[-1], classes)
def _make_layer(self, block, layers, in_channels, channels, stride, last_gamma=False):
layer = list()
layer.append(block(in_channels, channels, stride, channels != in_channels,
last_gamma=last_gamma))
for _ in range(layers - 1):
layer.append(block(channels, channels, 1, False, last_gamma=last_gamma))
return nn.Sequential(*layer)
def forward(self, x):
x = self.features(x)
x = F.adaptive_avg_pool2d(x, 1).squeeze_(3).squeeze_(2)
x = self.output(x)
return x
class ResNetV2(nn.Module):
r"""ResNet V2 model from
`"Identity Mappings in Deep Residual Networks"
<https://arxiv.org/abs/1603.05027>`_ paper.
Parameters
----------
block : nn.Module
Class for the residual block. Options are BasicBlockV1, BottleneckV1.
layers : list of int
Numbers of layers in each block
channels : list of int
Numbers of channels in each block. Length should be two larger than layers list.
classes : int, default 1000
Number of classification classes.
thumbnail : bool, default False
Enable thumbnail.
last_gamma : bool, default False
Whether to initialize the gamma of the last BatchNorm layer in each bottleneck to zero.
"""
def __init__(self, block, layers, channels, classes=1000, thumbnail=False,
last_gamma=False, **kwargs):
super(ResNetV2, self).__init__(**kwargs)
assert len(layers) == len(channels) - 2
self.features = list()
self.features.append(_bn_no_affine(channels[0]))
if thumbnail:
self.features.append(_conv3x3(channels[0], channels[1], 1))
else:
self.features.append(nn.Conv2d(channels[0], channels[1], 7, 2, 3, bias=False))
self.features.append(nn.BatchNorm2d(channels[1]))
self.features.append(nn.ReLU(inplace=True))
self.features.append(nn.MaxPool2d(3, 2, 1))
for i, num_layer in enumerate(layers):
stride = 1 if i == 0 else 2
self.features.append(self._make_layer(block, num_layer, channels[i + 1], channels[i + 2],
stride, last_gamma=last_gamma))
self.features.append(nn.BatchNorm2d(channels[-1]))
self.features.append(nn.ReLU(inplace=True))
self.features = nn.Sequential(*self.features)
self.output = nn.Linear(channels[-1], classes)
def _make_layer(self, block, layers, in_channels, channels, stride, last_gamma=False):
layer = list()
layer.append(block(in_channels, channels, stride, channels != in_channels,
last_gamma=last_gamma))
for _ in range(layers - 1):
layer.append(block(channels, channels, 1, False, last_gamma=last_gamma))
return nn.Sequential(*layer)
def forward(self, x):
x = self.features(x)
x = F.adaptive_avg_pool2d(x, 1).squeeze_(3).squeeze_(2)
x = self.output(x)
return x
# -----------------------------------------------------------------------------
# Specification
# -----------------------------------------------------------------------------
resnet_spec = {18: ('basic_block', [2, 2, 2, 2], [3, 64, 64, 128, 256, 512]),
34: ('basic_block', [3, 4, 6, 3], [3, 64, 64, 128, 256, 512]),
50: ('bottle_neck', [3, 4, 6, 3], [3, 64, 256, 512, 1024, 2048]),
101: ('bottle_neck', [3, 4, 23, 3], [3, 64, 256, 512, 1024, 2048]),
152: ('bottle_neck', [3, 8, 36, 3], [3, 64, 256, 512, 1024, 2048])}
resnet_net_versions = [ResNetV1, ResNetV2]
resnet_block_versions = [{'basic_block': BasicBlockV1, 'bottle_neck': BottleneckV1},
{'basic_block': BasicBlockV2, 'bottle_neck': BottleneckV2}]
# -----------------------------------------------------------------------------
# Constructor
# -----------------------------------------------------------------------------
def get_resnet(version, num_layers, pretrained=None, **kwargs):
r"""ResNet V1 model from `"Deep Residual Learning for Image Recognition"
<http://arxiv.org/abs/1512.03385>`_ paper.
ResNet V2 model from `"Identity Mappings in Deep Residual Networks"
<https://arxiv.org/abs/1603.05027>`_ paper.
Parameters
----------
version : int
Version of ResNet. Options are 1, 2.
num_layers : int
Numbers of layers. Options are 18, 34, 50, 101, 152.
pretrained : str
default pretrained weights for model.
"""
assert num_layers in resnet_spec, \
"Invalid number of layers: %d. Options are %s" % (
num_layers, str(resnet_spec.keys()))
block_type, layers, channels = resnet_spec[num_layers]
assert 1 <= version <= 2, \
"Invalid resnet version: %d. Options are 1 and 2." % version
resnet_class = resnet_net_versions[version - 1]
block_class = resnet_block_versions[version - 1][block_type]
net = resnet_class(block_class, layers, channels, **kwargs)
if pretrained:
import torch
net.load_state_dict(torch.load(pretrained))
from data.imagenet import ImageNetAttr
attrib = ImageNetAttr()
net.synset = attrib.synset
net.classes = attrib.classes
net.classes_long = attrib.classes_long
return net
def resnet18_v1(**kwargs):
r"""ResNet-18 V1 model from `"Deep Residual Learning for Image Recognition"
<http://arxiv.org/abs/1512.03385>`_ paper.
Parameters
----------
pretrained : str
default pretrained weights for model.
"""
return get_resnet(1, 18, **kwargs)
def resnet34_v1(**kwargs):
r"""ResNet-34 V1 model from `"Deep Residual Learning for Image Recognition"
<http://arxiv.org/abs/1512.03385>`_ paper.
Parameters
----------
pretrained : str
default pretrained weights for model.
"""
return get_resnet(1, 34, **kwargs)
def resnet50_v1(**kwargs):
r"""ResNet-50 V1 model from `"Deep Residual Learning for Image Recognition"
<http://arxiv.org/abs/1512.03385>`_ paper.
Parameters
----------
pretrained : str
default pretrained weights for model.
"""
return get_resnet(1, 50, **kwargs)
def resnet101_v1(**kwargs):
r"""ResNet-101 V1 model from `"Deep Residual Learning for Image Recognition"
<http://arxiv.org/abs/1512.03385>`_ paper.
Parameters
----------
pretrained : str
default pretrained weights for model.
"""
return get_resnet(1, 101, **kwargs)
def resnet152_v1(**kwargs):
r"""ResNet-152 V1 model from `"Deep Residual Learning for Image Recognition"
<http://arxiv.org/abs/1512.03385>`_ paper.
Parameters
----------
pretrained : str
default pretrained weights for model.
"""
return get_resnet(1, 152, **kwargs)
def resnet18_v2(**kwargs):
r"""ResNet-18 V2 model from `"Identity Mappings in Deep Residual Networks"
<https://arxiv.org/abs/1603.05027>`_ paper.
Parameters
----------
pretrained : str
default pretrained weights for model.
"""
return get_resnet(2, 18, **kwargs)
def resnet34_v2(**kwargs):
r"""ResNet-34 V2 model from `"Identity Mappings in Deep Residual Networks"
<https://arxiv.org/abs/1603.05027>`_ paper.
Parameters
----------
pretrained : str
default pretrained weights for model.
"""
return get_resnet(2, 34, **kwargs)
def resnet50_v2(**kwargs):
r"""ResNet-50 V2 model from `"Identity Mappings in Deep Residual Networks"
<https://arxiv.org/abs/1603.05027>`_ paper.
Parameters
----------
pretrained : str
default pretrained weights for model.
"""
return get_resnet(2, 50, **kwargs)
def resnet101_v2(**kwargs):
r"""ResNet-101 V2 model from `"Identity Mappings in Deep Residual Networks"
<https://arxiv.org/abs/1603.05027>`_ paper.
Parameters
----------
pretrained : str
default pretrained weights for model.
"""
return get_resnet(2, 101, **kwargs)
def resnet152_v2(**kwargs):
r"""ResNet-152 V2 model from `"Identity Mappings in Deep Residual Networks"
<https://arxiv.org/abs/1603.05027>`_ paper.
Parameters
----------
pretrained : str
default pretrained weights for model.
"""
return get_resnet(2, 152, **kwargs)
if __name__ == '__main__':
import torch
a = torch.randn(2, 3, 224, 224)
net1 = resnet18_v1()
net2 = resnet18_v2()
net3 = resnet34_v1()
net4 = resnet34_v2()
net5 = resnet50_v1()
net6 = resnet50_v2()
net7 = resnet101_v1()
net8 = resnet101_v2()
net9 = resnet152_v1()
net10 = resnet152_v2()
with torch.no_grad():
net1(a)
net2(a)
net3(a)
net4(a)
net5(a)
net6(a)
net7(a)
net8(a)
net9(a)
net10(a)
|
from sqlalchemy.orm import Session
import random
class RoutedSession(Session):
def get_bind(self, mapper=None, clause=None):
if self._flushing:
engine = api().get_engine(True)
return engine
else:
engine = api().get_engine()
return engine
class RoutedSessionMaker(object):
Mode_RoundRobin = 1 << 0
Mode_Random = 1 << 1
def __init__(self, balancing_mode=Mode_RoundRobin, engines=[]):
self._mode = balancing_mode
self._engines = engines
self.last_master = 0
self.last_slave = 0
def add_engine(self, engine):
self._engines.append(engine)
def get_engine(self, flushing=False):
if flushing:
if self._mode == self.Mode_RoundRobin:
masters = [engine._engine for engine in self._engines if engine._type == RoutedEngine.Engine_Master]
if self.last_master + 1 >= len(masters):
self.last_master = 0
return masters[self.last_master]
else:
self.last_master += 1
return masters[self.last_master]
elif self._mode == self.Mode_Random:
return random.choice([engine._engine for engine in self._engines if engine._type == RoutedEngine.Engine_Master])
else:
if self._mode == self.Mode_RoundRobin:
slaves = [engine._engine for engine in self._engines if engine._type == RoutedEngine.Engine_Slave]
if self.last_slave + 1 >= len(slaves):
self.last_slave = 0
return slaves[self.last_slave]
else:
self.last_slave += 1
return slaves[self.last_slave]
elif self._mode == self.Mode_Random:
return random.choice([engine._engine for engine in self._engines if engine._type == RoutedEngine.Engine_Slave])
class RoutedEngine(object):
Engine_Slave = 1 << 0
Engine_Master = 1 << 1
def __init__(self, type=Engine_Slave, engine=None):
self._type = type
self._engine = engine
def __repr__(self):
return 'Master: ' if self.type == Engine_Master else 'Slave: ' + self._engine
__sessionmaker = RoutedSessionMaker()
def Configure(mode=RoutedSessionMaker.Mode_RoundRobin, engines=[]):
global __sessionmaker
__sessionmaker = RoutedSessionMaker(balancing_mode=mode, engines=engines)
def api():
global __sessionmaker
return __sessionmaker
|
# ---------------------- Packages
from sqlalchemy import create_engine
from sqlalchemy_utils import database_exists, create_database
import pandas as pd
import psycopg2
# --------------------- Function
def eBayscrapper_remove_duplicates(phone_model):
"""Function to drop duplicate and null rows."""
try:
# PostgreSQL
# Connect to our database
DATABASE_URL = "postgres://isczffxjpjzpxr:41e6aaa55dd93e8ae680b5d6ab8eef4febc02f2a94b7c266dffce8ccea74c286@ec2-50-19-26-235.compute-1.amazonaws.com:5432/d64tko6dss9lgk"
engine = create_engine(DATABASE_URL)
conn = psycopg2.connect(DATABASE_URL, sslmode='require')
cursor = conn.cursor()
# Perform string manipulations to phone model name
s = phone_model
s = s.lower()
s = s.replace(' ', '_')
# Delete columns with null price, timestamp, user, and id values
cursor.execute('DELETE FROM ' + str(s) +
' WHERE (user IS NULL) OR (price IS NULL) OR (user_feedback IS NULL) OR (user_feedback_positive IS NULL) OR (id is NULL) OR (timestamp is NULL);')
# Commit changes
conn.commit()
# Delete duplicate values
cursor.execute('DELETE FROM ' + str(s) +
' x USING ' + str(s) + ' y WHERE x.index > y.index AND x.id = y.id;')
# Commit changes
conn.commit()
# Close cursor
cursor.close()
# Close connection
conn.close()
except:
# Close Cursor
cursor.close()
# Close Connection
conn.close()
print('Error in previous query')
return "The tables have been cleared"
# ---------------------- Instantiate Function
eBayscrapper_remove_duplicates(phone_model='iPhone 11')
eBayscrapper_remove_duplicates(phone_model='Apple iPhone SE')
eBayscrapper_remove_duplicates(phone_model='Samsung Galaxy Note 10')
eBayscrapper_remove_duplicates(phone_model='Samsung Galaxy S20 Ultra')
|
"""Implementation of the function command."""
from pathlib import Path
from typing import Union
from mcipc.rcon.client import Client
__all__ = ['function']
def function(self: Client, name: Union[Path, str]) -> str:
"""Runs the given function."""
return self.run('function', name)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-09-23 09:01
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Invoice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('number', models.CharField(max_length=17, unique=True, verbose_name='\u041d\u043e\u043c\u0435\u0440 \u0441\u0447\u0435\u0442\u0430')),
('description', models.CharField(max_length=256, verbose_name='\u041d\u0430\u0437\u043d\u0430\u0447\u0435\u043d\u0438\u0435 \u043f\u043b\u0430\u0442\u0435\u0436\u0430')),
('amount', models.DecimalField(decimal_places=2, max_digits=11, verbose_name='\u0421\u0443\u043c\u043c\u0430 \u043f\u043b\u0430\u0442\u0435\u0436\u0430, \u0437\u0430\u043a\u0430\u0437\u0430\u043d\u043d\u0430\u044f \u043f\u0440\u043e\u0434\u0430\u0432\u0446\u043e\u043c')),
('currency', models.CharField(max_length=3, verbose_name='\u0412\u0430\u043b\u044e\u0442\u0430 \u043f\u043b\u0430\u0442\u0435\u0436\u0430, \u0437\u0430\u043a\u0430\u0437\u0430\u043d\u043d\u0430\u044f \u043f\u0440\u043e\u0434\u0430\u0432\u0446\u043e\u043c')),
('paid_amount', models.DecimalField(decimal_places=2, max_digits=11, verbose_name='\u0421\u0443\u043c\u043c\u0430 \u043f\u043b\u0430\u0442\u0435\u0436\u0430 \u0432 \u0432\u0430\u043b\u044e\u0442\u0435, \u0432 \u043a\u043e\u0442\u043e\u0440\u043e\u0439 \u043f\u043e\u043a\u0443\u043f\u0430\u0442\u0435\u043b\u044c \u043f\u0440\u043e\u0438\u0437\u0432\u043e\u0434\u0438\u0442 \u043f\u043b\u0430\u0442\u0435\u0436')),
('paid_currency', models.CharField(max_length=3, verbose_name='\u0412\u0430\u043b\u044e\u0442\u0430, \u0432 \u043a\u043e\u0442\u043e\u0440\u043e\u0439 \u043f\u0440\u043e\u0438\u0437\u0432\u043e\u0434\u0438\u0442\u0441\u044f \u043f\u043b\u0430\u0442\u0435\u0436')),
('payment_method', models.CharField(max_length=50, verbose_name='\u0418\u0434\u0435\u043d\u0442\u0438\u0444\u0438\u043a\u0430\u0442\u043e\u0440 \u043f\u043b\u0430\u0442\u0435\u0436\u043d\u043e\u0439 \u0441\u0438\u0441\u0442\u0435\u043c\u044b, \u0432\u044b\u0431\u0440\u0430\u043d\u043d\u043e\u0439 \u043f\u043e\u043a\u0443\u043f\u0430\u0442\u0435\u043b\u0435\u043c')),
('payment_system', models.CharField(max_length=50, verbose_name='\u0418\u0434\u0435\u043d\u0442\u0438\u0444\u0438\u043a\u0430\u0442\u043e\u0440 \u043f\u043b\u0430\u0442\u0435\u0436\u043d\u043e\u0433\u043e \u043c\u0435\u0442\u043e\u0434\u0430')),
('payment_id', models.CharField(blank=True, max_length=50, null=True, verbose_name='\u041d\u043e\u043c\u0435\u0440 \u043f\u043b\u0430\u0442\u0435\u0436\u0430 \u0432 \u0441\u0438\u0441\u0442\u0435\u043c\u0435 PayMaster')),
('payer_id', models.CharField(blank=True, max_length=50, null=True, verbose_name='\u0418\u0434\u0435\u043d\u0442\u0438\u0444\u0438\u043a\u0430\u0442\u043e\u0440 \u043f\u043b\u0430\u0442\u0435\u043b\u044c\u0449\u0438\u043a\u0430 \u0432 \u043f\u043b\u0430\u0442\u0435\u0436\u043d\u043e\u0439 \u0441\u0438\u0441\u0442\u0435\u043c\u0435')),
('payment_date', models.DateTimeField(blank=True, null=True, verbose_name='\u0414\u0430\u0442\u0430 \u043f\u043b\u0430\u0442\u0435\u0436\u0430')),
('creation_date', models.DateTimeField(auto_now_add=True, verbose_name='\u0414\u0430\u0442\u0430 \u0441\u043e\u0437\u0434\u0430\u043d\u0438\u044f \u0437\u0430\u043f\u0438\u0441\u0438')),
('edition_date', models.DateTimeField(auto_now=True, verbose_name='\u0414\u0430\u0442\u0430 \u043f\u043e\u0441\u043b\u0435\u0434\u043d\u0435\u0433\u043e \u0438\u0437\u043c\u0435\u043d\u0435\u043d\u0438\u044f')),
],
options={
'verbose_name': '\u0421\u0447\u0435\u0442',
'verbose_name_plural': '\u0421\u0447\u0435\u0442\u0430',
},
),
]
|
# -*- coding: utf-8 -*-
# Python script to remove class from cs:style in dependents
# Author: Rintze M. Zelle
# Version: 2013-03-29
# * Requires lxml library (http://lxml.de/)
import os, glob, re, inspect
from lxml import etree
# http://stackoverflow.com/questions/50499
folderPath = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentFolderPath = os.path.dirname (folderPath)
path = os.path.join(parentFolderPath, 'styles')
styles = []
for stylepath in glob.glob( os.path.join(path, 'dependent', '*.csl') ):
styles.append(os.path.join(stylepath))
for style in styles:
parser = etree.XMLParser(remove_blank_text=True)
parsedStyle = etree.parse(style, parser)
styleElement = parsedStyle.getroot()
fixedStyle = False
if "class" in styleElement.attrib:
del styleElement.attrib["class"]
fixedStyle = True
if (fixedStyle == False):
continue
try:
parsedStyle = etree.tostring(parsedStyle, pretty_print=True, xml_declaration=True, encoding="utf-8")
parsedStyle = parsedStyle.replace("'", '"', 4)
parsedStyle = parsedStyle.replace(" ", " ")#no-break space
parsedStyle = parsedStyle.replace("ᵉ", "ᵉ")
parsedStyle = parsedStyle.replace("‑", "‑")#non-breaking hyphen
parsedStyle = parsedStyle.replace("–", "–")#en dash
parsedStyle = parsedStyle.replace("—", "—")#em dash
f = open(style, 'w')
f.write ( parsedStyle )
f.close()
except:
pass
|
from .base import BaseRedisClient
class SMSCodeRedis(BaseRedisClient):
"""full key: sms_code:{key}"""
DB = 1
PREFIX_KEY = 'sms_code'
|
# pylint: disable=too-many-function-args
import os
import numpy as np
import pickle
from PIL import Image
import torchvision as V
from .. import configs
def getDataset(kind: str = 'classification'):
if kind == 'classification':
return __get_classification_dataset()
else:
raise NotImplementedError
def __get_classification_dataset():
'''
https://github.com/pytorch/examples/blob/master/imagenet/main.py
'''
normalize = V.transforms.Normalize(
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
train = V.datasets.ImageFolder(
os.path.join(configs.ilsvrc.path, 'train'),
V.transforms.Compose([
V.transforms.RandomResizedCrop(224),
V.transforms.RandomHorizontalFlip(),
V.transforms.ToTensor(),
normalize,
]))
test = V.datasets.ImageFolder(
os.path.join(configs.ilsvrc.path, 'val-symlink'),
V.transforms.Compose([
V.transforms.Resize(256),
V.transforms.CenterCrop(224),
V.transforms.ToTensor(),
normalize,
]))
return (train, None, test)
|
#!/usr/bin/env python
from __future__ import print_function
import subprocess
import json
import os
import dateutil.parser
import requests
from datetime import datetime
from itertools import izip
import pytz
import argparse
parser = argparse.ArgumentParser(description="")
parser.add_argument('--all', action='store_true', dest='refresh_all')
args = parser.parse_args()
from tsd_tools import JWTWrapper
auto_token = JWTWrapper()
STORAGE_FORMAT = "./data/tsd.{trip_pk}.jsonl"
print("Fetching list of available TSDs ...")
items = json.loads(subprocess.check_output("ssh -Cqt rideserver@rideserver-backend-1 ./manage.py list_misclassifications --out=json", shell=True))
for item in items:
item['created'] = dateutil.parser.parse(item['created'])
path = STORAGE_FORMAT.format(**item)
item['local_path'] = path
if not args.refresh_all and os.path.isfile(path):
mtime_epoch = os.path.getmtime(path)
mdate = datetime.utcfromtimestamp(os.path.getmtime(path))
filedate = pytz.utc.localize(mdate)
try:
with open(path) as f:
json.load(f)
valid = True
except:
valid = False
if filedate > item['created'] and valid:
item['load'] = False
print("Got {} TSDs from server".format(len(items)))
items = [item for item in items if item.get('load', True)]
print("Fetching {} TSDs".format(len(items)))
def futures(session, items):
for item in items:
url = 'https://ride.report/__tools/inspect/tripsensordata_raw/{trip_pk}'.format(**item)
headers = {
'Authorization': 'JWT {}'.format(auto_token),
}
# TODO: write session wrapper that auto-refreshes jwt every few minutes
yield session.get(url, headers=headers)
from requests_futures.sessions import FuturesSession
with FuturesSession(max_workers=6) as session:
for future, item in izip(list(futures(session, items)), items):
response = future.result()
try:
response.raise_for_status()
except:
print('Failed on TSD {trip_pk}'.format(**item))
print(response.text)
continue
with open(item['local_path'], 'wb') as outfile:
outfile.write(response.text.encode('utf-8'))
# cmd = "ssh -Cqt rideserver@rideserver-backend-1 ./manage.py export_tsd {trip_pk}".format(**item)
# outfile = open(item['local_path'], 'wb')
# subprocess.call(cmd, shell=True, stdout=outfile)
print("Finished export TSD {trip_pk} {created}".format(**item))
|
# -*- coding: utf-8 -*-
from model.group import Group
from sys import maxsize
def test_test_add_group(app):
old_groups = app.group.get_group_list()
group = Group(name="Python Group", header="This is the Logo", footer="Here we have a group footer")
app.group.create_g(group)
assert len(old_groups) + 1 == app.group.count()
new_groups = app.group.get_group_list()
old_groups.append(group)
assert sorted(old_groups, key=Group.id_or_max) == sorted(new_groups, key=Group.id_or_max)
#def test_add_empty_group(app):
# old_groups = app.group.get_group_list()
# group = Group(name="Python Group", header="This is the Logo", footer="Here we have a group footer")
# app.group.create_g(group)
# new_groups = app.group.get_group_list()
# assert len(old_groups) + 1 == len(new_groups)
# old_groups.append(group)
# assert sorted(old_groups, key=Group.id_or_max) == sorted(new_groups, key=Group.id_or_max)
|
import networkx as nx
import matplotlib.pyplot as plt
# BA scale-free degree network
# generalize BA network which has 20 nodes, m = 1
BA = nx.random_graphs.barabasi_albert_graph(20, 1)
# spring layout
pos = nx.spring_layout(BA)
nx.draw(BA, pos, with_labels = False, node_size = 30)
# nx.draw(BA,pos)
plt.show()
|
salario = float(input("Digite seu salário: "))
aumento = salario * 0.15
salarioAumento = salario + aumento
print(f'O valor do seu aumento é R${aumento} logo o valor do seu novo salário é R${salarioAumento}')
|
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import pytest
import mindspore.context as context
from mindspore import Tensor, nn
from mindspore.common import dtype as mstype
class CaseNet(nn.Cell):
def __init__(self):
super(CaseNet, self).__init__()
self.conv = nn.Conv2d(1, 3, 3)
self.relu = nn.ReLU()
self.softmax = nn.Softmax()
self.layers1 = (self.relu, self.softmax)
self.layers2 = (self.conv, self.relu)
def construct(self, x, index1, index2):
x = self.layers1[index1](x)
x = self.layers2[index2](x)
return x
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_switch_layer():
context.set_context(mode=context.GRAPH_MODE)
net = CaseNet()
data = Tensor(np.ones((1, 1, 224, 224)), mstype.float32)
idx = Tensor(0, mstype.int32)
idx2 = Tensor(-1, mstype.int32)
value = net(data, idx, idx2)
relu = nn.ReLU()
true_value = relu(data)
ret = np.allclose(value.asnumpy(), true_value.asnumpy())
assert ret
idx3 = Tensor(3, mstype.int32)
with pytest.raises(RuntimeError):
value = net(data, idx3, idx2)
|
class Node:
def __init__(self,data):
self.data = data
self.next = None
class LinkedList:
def __init__(self):
self.head = None
def print_llist(self):
if self.head == None:
print("Linked list is empty")
else:
temp = self.head
while temp != None:
print(temp.data,end="->")
temp = temp.next
def insert_start(self,data):
new_node = Node(data)
new_node.next = self.head
self.head = new_node
def insert_end(self,data):
new_node = Node(data)
temp = self.head
while temp.next != None:
temp = temp.next
temp.next = new_node
temp.next.next = None
def insert_pos(self,data,prevnode):
head = self.head
while head:
if head.data == prevnode:
break
head = head.next
new_node = Node(data)
temp = head.next
head.next = new_node
new_node.next = temp
def reverse_ll(self):
prev = None
current = self.head
while current != None:
next = current.next
current.next = prev
prev = current
current = next
self.head = prev
# Driver's Code
LL = LinkedList()
LL.insert_start(10)
LL.insert_start(20)
LL.insert_start(30)
LL.insert_end("A")
LL.insert_end("B")
LL.insert_end("C")
LL.insert_start(40)
LL.insert_pos("a", "A")
LL.insert_pos("b","B")
print("Linked List:")
LL.print_llist()
print("\n")
print("Reversed Linked List:")
LL.reverse_ll()
LL.print_llist()
|
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def isCousins(self, root: Optional[TreeNode], x: int, y: int) -> bool:
# condition to be cousin: (1) diff.parents (2) same level
stack=[(root, 0, -1)]
xlevel, ylevel = -1, -1
xparent, yparent = -1, -1
while(stack):
cur, depth, parent = stack.pop(0)
if cur.val==x:
xlevel, xparent = depth, parent
if cur.val==y:
ylevel, yparent = depth, parent
if cur.left:
stack.append((cur.left, depth+1, cur.val))
if cur.right:
stack.append((cur.right, depth+1, cur.val))
if xlevel==ylevel and xparent!=yparent:
return True
else:
return False
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Plan'
db.create_table('plans_plan', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('order', self.gf('django.db.models.fields.PositiveIntegerField')()),
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('description', self.gf('django.db.models.fields.TextField')(blank=True)),
('default', self.gf('django.db.models.fields.BooleanField')(default=False, db_index=True)),
('available', self.gf('django.db.models.fields.BooleanField')(default=False, db_index=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(db_index=True)),
('customized', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),
('url', self.gf('django.db.models.fields.CharField')(max_length=200, blank=True)),
))
db.send_create_signal('plans', ['Plan'])
# Adding model 'BillingInfo'
db.create_table('plans_billinginfo', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['auth.User'], unique=True)),
('tax_number', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=200, blank=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=200, db_index=True)),
('street', self.gf('django.db.models.fields.CharField')(max_length=200)),
('zipcode', self.gf('django.db.models.fields.CharField')(max_length=200)),
('city', self.gf('django.db.models.fields.CharField')(max_length=200)),
('country', self.gf('django_countries.fields.CountryField')(max_length=2)),
('shipping_name', self.gf('django.db.models.fields.CharField')(max_length=200, blank=True)),
('shipping_street', self.gf('django.db.models.fields.CharField')(max_length=200, blank=True)),
('shipping_zipcode', self.gf('django.db.models.fields.CharField')(max_length=200, blank=True)),
('shipping_city', self.gf('django.db.models.fields.CharField')(max_length=200, blank=True)),
))
db.send_create_signal('plans', ['BillingInfo'])
# Adding model 'UserPlan'
db.create_table('plans_userplan', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['auth.User'], unique=True)),
('plan', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['plans.Plan'])),
('expire', self.gf('django.db.models.fields.DateField')(default=None, null=True, db_index=True, blank=True)),
('active', self.gf('django.db.models.fields.BooleanField')(default=True, db_index=True)),
))
db.send_create_signal('plans', ['UserPlan'])
# Adding model 'Pricing'
db.create_table('plans_pricing', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('period', self.gf('django.db.models.fields.PositiveIntegerField')(default=30, null=True, db_index=True, blank=True)),
('url', self.gf('django.db.models.fields.CharField')(max_length=200, blank=True)),
))
db.send_create_signal('plans', ['Pricing'])
# Adding model 'Quota'
db.create_table('plans_quota', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('order', self.gf('django.db.models.fields.PositiveIntegerField')()),
('codename', self.gf('django.db.models.fields.CharField')(unique=True, max_length=50, db_index=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('unit', self.gf('django.db.models.fields.CharField')(max_length=100, blank=True)),
('description', self.gf('django.db.models.fields.TextField')(blank=True)),
('is_boolean', self.gf('django.db.models.fields.BooleanField')(default=False)),
('url', self.gf('django.db.models.fields.CharField')(max_length=200, blank=True)),
))
db.send_create_signal('plans', ['Quota'])
# Adding model 'PlanPricing'
db.create_table('plans_planpricing', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('plan', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['plans.Plan'])),
('pricing', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['plans.Pricing'])),
('price', self.gf('django.db.models.fields.DecimalField')(max_digits=7, decimal_places=2, db_index=True)),
))
db.send_create_signal('plans', ['PlanPricing'])
# Adding model 'PlanQuota'
db.create_table('plans_planquota', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('plan', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['plans.Plan'])),
('quota', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['plans.Quota'])),
('value', self.gf('django.db.models.fields.IntegerField')(default=1, null=True, blank=True)),
))
db.send_create_signal('plans', ['PlanQuota'])
# Adding model 'Order'
db.create_table('plans_order', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('flat_name', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('plan', self.gf('django.db.models.fields.related.ForeignKey')(related_name='plan_order', to=orm['plans.Plan'])),
('pricing', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['plans.Pricing'], null=True, blank=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(db_index=True)),
('completed', self.gf('django.db.models.fields.DateTimeField')(db_index=True, null=True, blank=True)),
('amount', self.gf('django.db.models.fields.DecimalField')(max_digits=7, decimal_places=2, db_index=True)),
('tax', self.gf('django.db.models.fields.DecimalField')(db_index=True, null=True, max_digits=4, decimal_places=2, blank=True)),
('currency', self.gf('django.db.models.fields.CharField')(default='EUR', max_length=3)),
('status', self.gf('django.db.models.fields.IntegerField')(default=1)),
('order_id', self.gf('django.db.models.fields.CharField')(max_length=40, unique=True, null=True, blank=True)),
))
db.send_create_signal('plans', ['Order'])
# Adding model 'Invoice'
db.create_table('plans_invoice', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('order', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['plans.Order'])),
('number', self.gf('django.db.models.fields.IntegerField')(db_index=True)),
('full_number', self.gf('django.db.models.fields.CharField')(max_length=200)),
('type', self.gf('django.db.models.fields.IntegerField')(default=1, db_index=True)),
('issued', self.gf('django.db.models.fields.DateField')(db_index=True)),
('issued_duplicate', self.gf('django.db.models.fields.DateField')(db_index=True, null=True, blank=True)),
('selling_date', self.gf('django.db.models.fields.DateField')(db_index=True, null=True, blank=True)),
('payment_date', self.gf('django.db.models.fields.DateField')(db_index=True)),
('unit_price_net', self.gf('django.db.models.fields.DecimalField')(max_digits=7, decimal_places=2)),
('quantity', self.gf('django.db.models.fields.IntegerField')(default=1)),
('total_net', self.gf('django.db.models.fields.DecimalField')(max_digits=7, decimal_places=2)),
('total', self.gf('django.db.models.fields.DecimalField')(max_digits=7, decimal_places=2)),
('tax_total', self.gf('django.db.models.fields.DecimalField')(max_digits=7, decimal_places=2)),
('tax', self.gf('django.db.models.fields.DecimalField')(db_index=True, null=True, max_digits=4, decimal_places=2, blank=True)),
('rebate', self.gf('django.db.models.fields.DecimalField')(default='0', max_digits=4, decimal_places=2)),
('currency', self.gf('django.db.models.fields.CharField')(default='EUR', max_length=3)),
('item_description', self.gf('django.db.models.fields.CharField')(max_length=200)),
('buyer_name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('buyer_street', self.gf('django.db.models.fields.CharField')(max_length=200)),
('buyer_zipcode', self.gf('django.db.models.fields.CharField')(max_length=200)),
('buyer_city', self.gf('django.db.models.fields.CharField')(max_length=200)),
('buyer_country', self.gf('django_countries.fields.CountryField')(default='PL', max_length=2)),
('buyer_tax_number', self.gf('django.db.models.fields.CharField')(max_length=200, blank=True)),
('shipping_name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('shipping_street', self.gf('django.db.models.fields.CharField')(max_length=200)),
('shipping_zipcode', self.gf('django.db.models.fields.CharField')(max_length=200)),
('shipping_city', self.gf('django.db.models.fields.CharField')(max_length=200)),
('shipping_country', self.gf('django_countries.fields.CountryField')(default='PL', max_length=2)),
('require_shipment', self.gf('django.db.models.fields.BooleanField')(default=False, db_index=True)),
('issuer_name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('issuer_street', self.gf('django.db.models.fields.CharField')(max_length=200)),
('issuer_zipcode', self.gf('django.db.models.fields.CharField')(max_length=200)),
('issuer_city', self.gf('django.db.models.fields.CharField')(max_length=200)),
('issuer_country', self.gf('django_countries.fields.CountryField')(default='PL', max_length=2)),
('issuer_tax_number', self.gf('django.db.models.fields.CharField')(max_length=200, blank=True)),
))
db.send_create_signal('plans', ['Invoice'])
def backwards(self, orm):
# Deleting model 'Plan'
db.delete_table('plans_plan')
# Deleting model 'BillingInfo'
db.delete_table('plans_billinginfo')
# Deleting model 'UserPlan'
db.delete_table('plans_userplan')
# Deleting model 'Pricing'
db.delete_table('plans_pricing')
# Deleting model 'Quota'
db.delete_table('plans_quota')
# Deleting model 'PlanPricing'
db.delete_table('plans_planpricing')
# Deleting model 'PlanQuota'
db.delete_table('plans_planquota')
# Deleting model 'Order'
db.delete_table('plans_order')
# Deleting model 'Invoice'
db.delete_table('plans_invoice')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'plans.billinginfo': {
'Meta': {'object_name': 'BillingInfo'},
'city': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'shipping_city': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'shipping_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'shipping_street': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'shipping_zipcode': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'street': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'tax_number': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '200', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'}),
'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'plans.invoice': {
'Meta': {'object_name': 'Invoice'},
'buyer_city': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'buyer_country': ('django_countries.fields.CountryField', [], {'default': "'PL'", 'max_length': '2'}),
'buyer_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'buyer_street': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'buyer_tax_number': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'buyer_zipcode': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'EUR'", 'max_length': '3'}),
'full_number': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issued': ('django.db.models.fields.DateField', [], {'db_index': 'True'}),
'issued_duplicate': ('django.db.models.fields.DateField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'issuer_city': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'issuer_country': ('django_countries.fields.CountryField', [], {'default': "'PL'", 'max_length': '2'}),
'issuer_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'issuer_street': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'issuer_tax_number': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'issuer_zipcode': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'item_description': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'number': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['plans.Order']"}),
'payment_date': ('django.db.models.fields.DateField', [], {'db_index': 'True'}),
'quantity': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'rebate': ('django.db.models.fields.DecimalField', [], {'default': "'0'", 'max_digits': '4', 'decimal_places': '2'}),
'require_shipment': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'selling_date': ('django.db.models.fields.DateField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'shipping_city': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'shipping_country': ('django_countries.fields.CountryField', [], {'default': "'PL'", 'max_length': '2'}),
'shipping_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'shipping_street': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'shipping_zipcode': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'tax': ('django.db.models.fields.DecimalField', [], {'db_index': 'True', 'null': 'True', 'max_digits': '4', 'decimal_places': '2', 'blank': 'True'}),
'tax_total': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}),
'total': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}),
'total_net': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '1', 'db_index': 'True'}),
'unit_price_net': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'plans.order': {
'Meta': {'ordering': "('-created',)", 'object_name': 'Order'},
'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2', 'db_index': 'True'}),
'completed': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'EUR'", 'max_length': '3'}),
'flat_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order_id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'plan_order'", 'to': "orm['plans.Plan']"}),
'pricing': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['plans.Pricing']", 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'tax': ('django.db.models.fields.DecimalField', [], {'db_index': 'True', 'null': 'True', 'max_digits': '4', 'decimal_places': '2', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'plans.plan': {
'Meta': {'ordering': "('order',)", 'object_name': 'Plan'},
'available': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'customized': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'default': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {}),
'quotas': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['plans.Quota']", 'through': "orm['plans.PlanQuota']", 'symmetrical': 'False'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'})
},
'plans.planpricing': {
'Meta': {'ordering': "('pricing__period',)", 'object_name': 'PlanPricing'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['plans.Plan']"}),
'price': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2', 'db_index': 'True'}),
'pricing': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['plans.Pricing']"})
},
'plans.planquota': {
'Meta': {'object_name': 'PlanQuota'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['plans.Plan']"}),
'quota': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['plans.Quota']"}),
'value': ('django.db.models.fields.IntegerField', [], {'default': '1', 'null': 'True', 'blank': 'True'})
},
'plans.pricing': {
'Meta': {'ordering': "('period',)", 'object_name': 'Pricing'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'period': ('django.db.models.fields.PositiveIntegerField', [], {'default': '30', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'})
},
'plans.quota': {
'Meta': {'ordering': "('order',)", 'object_name': 'Quota'},
'codename': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_boolean': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {}),
'unit': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'})
},
'plans.userplan': {
'Meta': {'object_name': 'UserPlan'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'expire': ('django.db.models.fields.DateField', [], {'default': 'None', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['plans.Plan']"}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['plans']
|
# ------------------------------------------------------------------------
# Class for reading/writing files
# ------------------------------------------------------------------------
import bpy
import csv
from os import listdir, read
from os.path import isfile, join
from . import calc
from . import data
# ------------------------------------------------------------------------
# Helper functions
# ------------------------------------------------------------------------
# turns a number string into a float
def str_to_float(string: str) -> float:
string = string.strip()
if not len(string):
return 0.0
return float(string)
# ------------------------------------------------------------------------
# Camera GUI Parameters IO
# ------------------------------------------------------------------------
# writes camera parameters to csv file at specified location
def write_cam_params(filepath: str):
cg = bpy.data.scenes[0].camera_generator
# create/open file and save parameters to it
with open(filepath, 'w', newline='') as csvfile:
writer = csv.writer(csvfile, delimiter=';', quotechar='&', quoting=csv.QUOTE_MINIMAL)
lens_file = ""
for objective_entry in data.objective_list:
if objective_entry[0] == cg.prop_objective_list:
lens_file = objective_entry[2]
break
writer.writerow(['objective_file_name', lens_file])
writer.writerow(['prop_objective_scale', cg.prop_objective_scale])
writer.writerow(['prop_lens_creation_method', cg.prop_lens_creation_method])
writer.writerow(['prop_lens_patch_size', cg.prop_lens_patch_size])
writer.writerow(['prop_vertex_count_radial', cg.prop_vertex_count_radial])
writer.writerow(['prop_vertex_count_height', cg.prop_vertex_count_height])
writer.writerow(['prop_aperture_blades', cg.prop_aperture_blades])
writer.writerow(['prop_aperture_size', cg.prop_aperture_size])
writer.writerow(['prop_aperture_angle', cg.prop_aperture_angle])
writer.writerow(['prop_sensor_width', cg.prop_sensor_width])
writer.writerow(['prop_sensor_height', cg.prop_sensor_height])
writer.writerow(['prop_pixel_size', cg.prop_pixel_size])
writer.writerow(['prop_wavelength', cg.prop_wavelength])
writer.writerow(['prop_focal_distance', cg.prop_focal_distance])
writer.writerow(['prop_sensor_mainlens_distance', cg.prop_sensor_mainlens_distance])
writer.writerow(['prop_mla_enabled', cg.prop_mla_enabled])
writer.writerow(['prop_mla_type', cg.prop_mla_type])
writer.writerow(['prop_microlens_diam', cg.prop_microlens_diam])
writer.writerow(['prop_mla_sensor_dist', cg.prop_mla_sensor_dist])
writer.writerow(['prop_three_ml_types', cg.prop_three_ml_types])
writer.writerow(['prop_ml_type_1_f', cg.prop_ml_type_1_f])
writer.writerow(['prop_ml_type_2_f', cg.prop_ml_type_2_f])
writer.writerow(['prop_ml_type_3_f', cg.prop_ml_type_3_f])
# reads camera parameters from csv file at specified location
def read_cam_params(filepath: str):
cg = bpy.data.scenes[0].camera_generator
# open file and load parameters
read_data = []
with open(filepath, newline='') as csvfile:
reader = csv.reader(csvfile, delimiter=';', quotechar='&')
for row in reader:
read_data.append(row)
if read_data[0][0] != 'prop_objective_list':
objective_file_name = read_data[0][1]
read_data[0] = []
for objective_entry in data.objective_list:
if objective_entry[2] == objective_file_name:
read_data[0] = ['prop_objective_list', objective_entry[0]]
break
if len(read_data[0]) < 1:
print('Could not find objective in list.')
return
for read_property in read_data:
# check property type and set accordingly casted values
property_type = type(getattr(cg, read_property[0]))
if property_type == float:
setattr(cg, read_property[0], float(read_property[1]))
elif property_type == int:
setattr(cg, read_property[0], int(read_property[1]))
elif property_type == bool:
setattr(cg, read_property[0], (read_property[1] == 'True'))
else:
setattr(cg, read_property[0], read_property[1])
# ------------------------------------------------------------------------
# Lenses IO
# ------------------------------------------------------------------------
# reads lens parameters from csv file
def read_lens_file(filepath: str):
objective = []
reader = csv.reader(open(filepath, 'r'), delimiter=';')
glass_data_known = True
for row_idx, row in enumerate(reader):
# ignore the first line since it contains a parameter description
if row_idx < 1:
continue
ior = str_to_float(row[3])
if ior == 0.0:
ior = 1.0
# get objective scale
scale = bpy.data.scenes[0].camera_generator.prop_objective_scale
# add leading zero for surface names
name_part = "_"
if len(objective) < 10:
name_part = "_0"
objective.append({
'radius': scale * str_to_float(row[0]) / 1000,
'thickness': scale * str_to_float(row[1]) / 1000,
'material': row[2].strip(),
'ior': ior,
'ior_wavelength': ior,
'ior_ratio': ior,
'semi_aperture': scale * str_to_float(row[5]) / 1000,
'position': 0.0,
'name': "Surface"+name_part+str(len(objective)+1)+"_"+row[2].strip()
})
lens = objective[len(objective)-1]
if lens['material'] != 'air' and lens['material'] != 'Air':
if calc.ior(lens['material'], 0.5) == None:
glass_data_known = False
return objective, glass_data_known
#
def load_lens_file(lens_directory):
cg = bpy.data.scenes[0].camera_generator
objective_id = int(cg.prop_objective_list[10:])
# create a list of available lens files
lensfiles = [f for f in listdir(lens_directory) if isfile(join(lens_directory, f))]
lensfiles.sort()
file = ''
for counter, lensfile in enumerate(lensfiles):
# check if file ends with .csv
file_ending = lensfile[-3:]
if file_ending == 'csv' and counter == objective_id:
file = lensfile
break
# read lens parameters
return read_lens_file(join(lens_directory,file))
# read dispersion parameters for Sellmeier and Cauchy equation from given files
def read_dispersion_data(dispersion_file: str):
# Sellmeier type data:
dispersion_data = {}
with open(dispersion_file, newline='') as csvfile:
reader = csv.reader(csvfile, delimiter=',', quotechar='&')
for row in reader:
dispersion_data[row[0]] = (float(row[1]),float(row[2]),float(row[3]),float(row[4]),float(row[5]),float(row[6]),float(row[7]))
return dispersion_data
# ------------------------------------------------------------------------
# Additional Blender resources IO
# ------------------------------------------------------------------------
# laods raw camera model and materials from given resource file
def load_basic_camera(path: str):
bpy.context.view_layer.active_layer_collection = bpy.context.view_layer.layer_collection
for filename in ['Camera Collection', 'Glass Material', 'MLA Hex Material', 'Calibration Pattern Material']:
bpy.ops.wm.append(filename=filename, directory=f'{path}resources.blend/{filename.split()[-1]}')
for materials in ['Glass Material', 'MLA Hex Material', 'MLA Rect Material', 'Calibration Pattern Material']:
bpy.data.materials[materials].use_fake_user = True
bpy.context.view_layer.active_layer_collection = bpy.context.view_layer.layer_collection.children['Camera Collection']
|
from zipline.api import attach_pipeline, pipeline_output
from zipline.pipeline import Pipeline
from zipline.pipeline.data import USEquityPricing
from zipline.pipeline.factors import SimpleMovingAverage
def initialize(context):
pipe = Pipeline()
attach_pipeline(pipe, 'example')
sma_short = SimpleMovingAverage(inputs=[USEquityPricing.close], window_length=30)
sma_long = SimpleMovingAverage(inputs=[USEquityPricing.close], window_length=100)
# Combined factors to create new factors
sma_val = sma_short/sma_long
# Create and apply a screen to remove penny stocks
remove_penny_stocks = sma_short > 1.0
pipe.set_screen(remove_penny_stocks)
pipe.add(sma_short, 'sma_short')
pipe.add(sma_long, 'sma_long')
pipe.add(sma_val, 'sma_val')
# Rank a factor using a mask to ignore the values we're
# filtering out by passing mask=remove_penny_stocks to rank.
pipe.add(sma_val.rank(mask=remove_penny_stocks), 'sma_rank')
def before_trading_start(context, data):
context.output = pipeline_output('example')
# Set the list of securities to short
context.short_list = context.output.sort(['sma_rank'], ascending=True).iloc[:200]
# Set the list of securities to long
context.long_list = context.output.sort(['sma_rank'], ascending=True).iloc[-200:]
# Update your universe with the SIDs of long and short securities
update_universe(context.long_list.index.union(context.short_list.index))
def handle_data(context, data):
print "SHORT LIST"
log.info("\n" + str(context.short_list.sort(['sma_rank'], ascending=True).head()))
print "LONG LIST"
log.info("\n" + str(context.long_list.sort(['sma_rank'], ascending=False).head()))
|
from time import sleep
from PyQt5.QtCore import QThread, pyqtSlot, pyqtSignal
from PyQt5.QtWidgets import QComboBox
class TSpacecraftSelect(QThread):
selection_changed = pyqtSignal(str, int)
def __init__(self):
super().__init__()
self.spacecraftCBs = []
def add_sc(self, combobox: QComboBox):
self.spacecraftCBs.append(combobox)
@pyqtSlot()
def run(self):
img_src = ""
while 1:
for item in self.spacecraftCBs:
idx = self.spacecraftCBs.index(item)
if item.currentText() == "SILVER_X 177p":
img_src = "images/silver.png"
elif item.currentText() == "purpleZ AAx9":
img_src = "images/purple.png"
elif item.currentText() == "military-aircraft-POWER":
img_src = "images/military.png"
elif item.currentText() == "SpaceX-air4p66":
img_src = "images/spacex.png"
self.selection_changed.emit(img_src, idx)
sleep(0.05)
|
# pyright: reportMissingImports=false
import os
import clr
import sys
import time
import platform
from PIL import Image
from .CypressFX import FX2
from .definitions import RSC_DIR, LIB_DIR, LED_WHITE
# load appropriate dll
(bits, linkage) = platform.architecture()
if bits == "64bit":
sys.path.append(os.path.join(LIB_DIR, 'x64'))
else:
sys.path.append(os.path.join(LIB_DIR, 'x86'))
LUMAUSB_DLL = clr.AddReference('LumaUSB')
from LumaUSB_ns import LumaUSB
from LumaUSB_ns import VideoParameters
class CameraError(Exception):
pass
class LumaScope(object):
'''
This class provides access to the microscope and LEDs. For interacting with
the stage, including focusing, see :class:`EtalumaStage`.
If the microscope is in an uninitialized, i.e., its firmware has not been
uploaded, firmware will be uploaded upon instantiation.
This class supports the context manager, and thus the following code will
ensure that resources are properly disposed handled::
with LumaScope() as lumascope:
# perform microscope operations
Although the camera can also be closed manually using the :meth:`close`
method::
lumascope = LumaScope()
try:
# perform miscope operations
finally:
lumascope.close()
For convenience, this object can be instantiated using the following
optional parameters:
* *resolution* -- the resolution of the image. See :attr:`resolution`.
* *gain* -- the global gain. See :attr:`gain`.
* *shutter* -- the shutter speed in milliseconds. See :attr:`shutter`.
'''
MAX_GLOBAL_GAIN = LumaUSB.MAX_GLOBAL_GAIN_PARAMETER_VALUE
MIN_GLOBAL_GAIN = LumaUSB.RECOMMENDED_MIN_GLOBAL_GAIN_PARAMETER_VALUE
_shutter = 0
_gain = MIN_GLOBAL_GAIN
def __init__(self, resolution: int = 1600, gain: int = MIN_GLOBAL_GAIN, shutter: int = 150) -> None:
# check for initialized microscope
fx2 = FX2.with_vid_pid(LumaUSB.VID_CYPRESS, LumaUSB.PID_LSCOPE)
if fx2 is None:
# uninitialized microscope found; upload firmware
fx2 = FX2.with_vid_pid(LumaUSB.VID_CYPRESS, LumaUSB.PID_FX2_DEV)
t = 0
try:
fx2.load_intelhex_firmware(os.path.join(RSC_DIR, 'Lumascope600.hex'))
except IOError as e:
raise CameraError('Unable to upload firmware to device: ' + str(e))
while fx2 := FX2.with_vid_pid(LumaUSB.VID_CYPRESS, LumaUSB.PID_LSCOPE) is None:
time.sleep(0.5)
if (t := t + 1) > 20:
raise CameraError('Timeout while initializing microscope')
self.hw = LumaUSB(LumaUSB.VID_CYPRESS, LumaUSB.PID_LSCOPE, resolution, resolution)
self.vid_params = VideoParameters()
# set everything up and start video streaming
self.hw.InitImageSensor()
self.resolution = resolution
self.gain = gain
self.shutter = shutter
self.hw.ISOStreamStart()
self.hw.StartStreaming()
# wait for image stream to become available
while self.get_raw_image_buffer() is None:
time.sleep(0.1)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def close(self):
'''
Turn off LEDs and stop streaming.
'''
for led in range(0x41, 0x45):
self.set_led(brightness=0, led_no=led)
self.hw.StopStreaming()
self.hw.ISOStreamStop()
@property
def gain(self) -> int:
'''
Sets or returns the global gain of the image sensor. Valid values are
between ``MIN_GLOBAL_GAIN`` and ``MAX_GLOBAL_GAIN``. The minimum value
is derived from the corresponding value in the Etaluma SDK, which is
described like this: "If the gain goes below this value it was
empirically observed that the image sensor cannot saturate no matter
the intensity of the light source.
'''
return self._gain
@gain.setter
def gain(self, value: int):
if value < self.MIN_GLOBAL_GAIN or value > self.MAX_GLOBAL_GAIN:
raise CameraError('Global gain outside allowed range')
else:
if not self.hw.SetGlobalGain(value):
raise CameraError('Could not set global gain')
self._gain = value
@property
def resolution(self):
'''
Sets or returns the camera resolution. The image is always square so
only a single integer is given or returned. Valid values are 100-1900 in
multiples of 4.
'''
return self.vid_params.width
@resolution.setter
def resolution(self, resolution: int):
self.vid_params.width = self.vid_params.height = resolution
if not self.hw.SetWindowSize(resolution):
raise CameraError('Could not set resolution')
@property
def shutter(self) -> int:
'''
Sets or retrieves the shutter speed in milliseconds.
If the shutter speed cannot be determined, this attribute is 0.
'''
return self._shutter
# XXX: figure out why below code doesn't work
# ret, val = self.hw.ImageSensorRegisterRead(LumaUSB.IMAGE_SENSOR_SHUTTER_WIDTH_LOWER, int())
# if ret:
# return val
# else:
# return 0
@shutter.setter
def shutter(self, speed: int) -> bool:
if speed > 0 and speed <= LumaUSB.MAX_IMAGE_SENSOR_EXPOSURE:
if not self.hw.ImageSensorRegisterWrite(LumaUSB.IMAGE_SENSOR_SHUTTER_WIDTH_LOWER, speed):
raise CameraError('Unable to set desired shutter speed')
else:
# XXX: temporary workaround for failing to read register
self._shutter = speed
else:
raise CameraError('Exposure speed out of range')
def set_led(self, brightness: int, led_no: int = LED_WHITE) -> bool:
'''
Set brightness of selected LED. Returns ``False`` if this fails.
Arguments:
* *led_no* -- LED number. Valid values are 0x41-0x44, where 0x41-0x43
are LEDs F1-F3 and 0x44 is white (A-D in ASCII hexadecimal).
* *brightness* -- desired brightness (0-255).
'''
if led_no >= 0x41 and led_no <= 0x44 and \
brightness >= 0 and brightness <= 255:
return self.hw.LedControllerWrite(led_no, brightness)
else:
return False
def get_image(self):
'''
Return the current buffer as a ``PIL`` (i.e., ``Pillow``) Image object.
If an image cannot be retrieved, returns ``None``.
'''
if (buffer := self.get_raw_image_buffer()) is not None:
return Image.frombytes('RGB', (self.resolution, self.resolution), buffer)
else:
return None
def get_raw_image_buffer(self):
'''
Return the contents of the image buffer as bytes, or ``None``.
'''
(status, buffer) = self.hw.GetLatest24bppBuffer(None)
buffer = bytes(buffer)
# there's a bug in certain versions of the SDK that causes the last line
# of the buffer to not be returned. add a black line to the bottom of
# the image if this is the case.
if len(buffer) < self.resolution**2 * 3:
buffer += bytes(self.resolution * 3)
if not status:
return None
else:
return buffer
|
import xmltodict
import logging
import itertools
import sys
import os
import json
import pandas as pd
import numpy as np
import pywaterml.waterML as pwml
from datetime import datetime
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.core import serializers
from django.conf import settings
from django.template import Context, Template
from django.template.loader import render_to_string, get_template
from sqlalchemy import create_engine
from sqlalchemy import Table, Column, Integer, String, MetaData
from sqlalchemy.orm import mapper
from .model import Base, Groups, HydroServer_Individual
from tethys_sdk.gizmos import TimeSeries, SelectInput, DatePicker, TextInput, GoogleMapView
from tethys_sdk.permissions import permission_required, has_permission
from .auxiliary import *
import xml.etree.ElementTree as ET
import psycopg2
from owslib.waterml.wml11 import WaterML_1_1 as wml11
from suds.client import Client # For parsing WaterML/XML
from suds.xsd.doctor import Import, ImportDoctor
from json import dumps, loads
from pyproj import Proj, transform # Reprojecting/Transforming coordinates
from datetime import datetime
from django.http import JsonResponse, HttpResponse
from .app import WaterDataExplorer as app
Persistent_Store_Name = 'catalog_db'
logging.getLogger('suds.client').setLevel(logging.CRITICAL)
def get_values_hs(request):
"""
Get metadata of a given site using the WaterOneFlow GetSiteInfo function .
Args:
request object containing
- service url from site
- network from site
- site code
return_obj['country'] = []
return_obj['variables'] =[]
return_obj['units'] = []
return_obj['codes'] = []
return_obj['organization'] = []
return_obj['times_series'] = []
return_obj['geolo'] = []
return_obj['timeUnitName'] = []
return_obj['TimeSupport'] = []
return_obj['dataType'] = []
Returns:
json object containing the following information:
- country: array containing country of origin of site.
- variables: array containing variables names of site.
- units: array containing unit names.
- codes: array containing variable codes ofsite .
- organization: array containing the organization of the given sites.
- times_seris: array containing time series of a given site.
- geolo: array containing geolocation of the given site.
- timeUnitName: array containing time units used for the time series.
- timeSupport: array containing booleans that indicates if the variables support time.
"""
list_catalog={}
return_obj={}
hs_url = request.POST.get('hs_url')
# print(hs_url)
site_code = request.POST.get('code')
network = request.POST.get('network')
site_desc = network + ':' + site_code
SessionMaker = app.get_persistent_store_database(Persistent_Store_Name, as_sessionmaker=True)
session = SessionMaker() # Initiate a session
client = Client(hs_url)
try:
response_info = GetSiteInfo(client,site_desc)['siteInfo']
df = pd.DataFrame.from_dict(response_info)
if df.empty:
return_obj['country'] = []
return_obj['variables'] =[]
return_obj['units'] = []
return_obj['codes'] = []
return_obj['organization'] = []
return_obj['times_series'] = []
return_obj['geolo'] = []
return_obj['timeUnitName'] = []
return_obj['TimeSupport'] = []
return_obj['dataType'] = []
return JsonResponse(return_obj)
pd.set_option('display.max_columns', None)
return_obj['country'] = df['country'].tolist()[0]
return_obj['variables'] = df['variableName'].tolist()
return_obj['units'] = df['unitAbbreviation'].tolist()
return_obj['codes'] = df['variableCode'].tolist()
return_obj['timeUnitName'] = df['timeUnitName'].tolist();
return_obj['timeSupport'] = df['timeSupport'].tolist();
return_obj['dataType'] = df['dataType'].tolist();
obj_var_desc = {}
obj_var_times_s = {}
for vari, desc, times_s in zip(df['variableCode'].tolist(),df['organization'].tolist(),df['variableTimeInterval'].tolist()):
obj_var_desc[vari] = desc
obj_var_times_s[vari] = times_s
return_obj['organization'] = obj_var_desc
return_obj['times_series'] = obj_var_times_s
return_obj['geolo'] = df['geolocation'].tolist()[0]
return JsonResponse(return_obj)
except Exception as e:
return JsonResponse(return_obj)
def get_values_graph_hs(request):
"""
Get the time series of a given site using the WaterOneFlow GetValues function .
Args:
request object containing
- service url from site
- network from site
- variable code from site
- timeframe from site
- site code
Returns:
json object containing the following information:
- graphs: array containing time series values.
- interpolation: array containing interpolation timeseries.
- unit_name: array containing unit names.
- variablesname: array containing variable names.
- timeUnitName: array containing time unit names.
"""
list_catalog={}
return_obj={}
hs_url = request.POST.get('hs_url')
site_code = request.POST.get('code')
network = request.POST.get('network')
code_variable =request.POST.get ('code_variable')
dates_request = request.POST.getlist('timeFrame[]')
start_date = dates_request[0]
end_date = dates_request[1];
variable_desc = network + ':' + code_variable
site_desc = network + ':' + site_code
water = pwml.WaterMLOperations(url = hs_url)
values = water.GetValues(site_desc, variable_desc, start_date, end_date, format = 'json')
# print(values)
df = pd.DataFrame.from_dict(values['values'])
# print(df)
if df.empty:
return_obj['graphs'] = []
return_obj['interpolation'] = []
return_obj['unit_name'] = []
return_obj['variablename'] = []
return_obj['timeUnitName'] = []
return JsonResponse(return_obj)
variable_name = df['variableName'].tolist()[0]
unit_name = df['unitAbbreviation'].tolist()[0]
time_unit_name = df['timeUnitName'].tolist()[0]
time_series_vals = df['dataValue'].tolist()
time_series_timeUTC = df['dateTime'].tolist()
return_obj['graphs'] = list(zip(time_series_timeUTC,time_series_vals))
return_obj['interpolation'] = water.GetInterpolation(values)
return_obj['unit_name'] = unit_name
return_obj['variablename'] = variable_name
return_obj['timeUnitName'] = time_unit_name
dict_xml = []
for gps_ in return_obj['graphs']:
chunk_xml = {}
chunk_xml['DateTimeUTC']=gps_[0]
chunk_xml['DataValue']=gps_[1]
dict_xml.append(chunk_xml)
current_date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
my_vals = values['values'][0]
context = {
"data_values": dict_xml,
"current_date": current_date,
"init_date": time_series_timeUTC[0],
"final_date": time_series_timeUTC[-1],
"network": network,
"code_variable": code_variable,
"code_site": site_code,
"site_name": my_vals["siteName"],
"unitAbbreviation": my_vals["unitAbbreviation"],
"latitude_longitude": f'{my_vals["latitude"]} {my_vals["longitude"]}',
"site_id": my_vals["siteID"],
"dataType": my_vals["dataType"],
}
template_renderizado = render_to_string('water_data_explorer/wml2_values_template.xml', context)
return_obj['template_renderizado'] = template_renderizado
return JsonResponse(return_obj)
def get_xml(request):
"""
Get the xml in WaterML.1.0 format of a given site.
Args:
request object containing
- service url from site
- network from site
- variable code from site
- timeframe from site
Returns:
waterML text containing the following data of the seleceted site from the WaterOneFlow web service
"""
list_catalog={}
return_obj={}
hs_url = request.POST.get('hs_url')
site_code = request.POST.get('code')
network = request.POST.get('network')
code_variable =request.POST.get ('code_variable')
dates_request = request.POST.getlist('timeFrame[]')
start_date = dates_request[0]
end_date = dates_request[1];
variable_desc = network + ':' + code_variable
site_desc = network + ':' + site_code
water = pwml.WaterMLOperations(url = hs_url)
return_obj['waterml'] = water.GetValues(site_desc, variable_desc, start_date, end_date,format = 'waterml')
return JsonResponse(return_obj)
"""
Extracted from the WaterML.py source code in the pywaterML package, but customized to meet the requirements of the WDE
"""
def GetSiteInfo(client,site_full_code, format ="json"):
"""
Get the information of a given site. GetSiteInfo() function is similar to the GetSiteInfo() WaterML function.
Args:
site_full_code: A string representing the full code of the given site following the structure
- site_full_code = site network + ":" + site code
format: format of the response (json, csv or waterML)
Returns:
A json, csv or waterML file containing the following data of the seleceted site from the WaterOneFlow web service:
- siteName: Name of the site.
- siteCode: Code of the site.
- network: observation network that the site belongs to
- fullVariableCode: The full variable code, for example: SNOTEL:SNWD.Use this value as the variableCode parameter in GetValues().
- siteID: ID of the site
- latitude: latitude of the site
- longitude: longitude of the site
- variableName: Name of the variable
- unitName: Name of the units of the values associated to the given variable and site
- unitAbbreviation: unit abbreviation of the units from the values associated to the given variable and site
- dataType: Type of data
- noDataValue: value associated to lack of data.
- isRegular: Boolean to indicate whether the observation measurements and collections regular
- timeSupport: Boolean to indicate whether the values support time
- timeUnitName: Time Units associated to the observation
- timeUnitAbbreviation: Time units abbreviation
- sampleMedium: the sample medium, for example water, atmosphere, soil.
- speciation: The chemical sample speciation (as nitrogen, as phosphorus..)
- beginningDateTimeUTC: The UTC date and time of the first available
- EndDateTimeUTC: The UTC date and time of the last available
- beginningDateTime: The local date and time of the first available
- EndDateTime: The local date and time of the last available
- censorCode: The code for censored observations. Possible values are nc (not censored), gt(greater than), lt (less than), nd (non-detect), pnq (present but not quantified)
- methodCode: The code of the method or instrument used for the observation
- methodID: The ID of the sensor or measurement method
- qualityControlLevelCode: The code of the quality control level. Possible values are -9999(Unknown), 0 (Raw data), 1 (Quality controlled data), 2 (Derived products), 3 (Interpretedproducts), 4 (Knowledge products)
- qualityControlLevelID: The ID of the quality control level. Usually 0 means raw data and 1 means quality controlled data.
- sourceCode: The code of the data source.
- timeOffSet: The difference between local time and UTC time in hours.
Example::
url_testing = "http://hydroportal.cuahsi.org/para_la_naturaleza/cuahsi_1_1.asmx?WSDL"
water = WaterMLOperations(url = url_testing)
sites = water.GetSites()
firstSiteFullSiteCode = sites[0]['fullSiteCode']
siteInfo = water.GetSiteInfo(firstSiteFullSiteCode)
"""
return_array = []
try:
site_info_Mc = client.service.GetSiteInfo(site_full_code)
if format is 'waterml':
return site_info_Mc
site_info_Mc_dict = xmltodict.parse(site_info_Mc)
site_info_Mc_json_object = json.dumps(site_info_Mc_dict)
site_info_Mc_json = json.loads(site_info_Mc_json_object)
try:
object_methods = site_info_Mc_json['sitesResponse']['site']['seriesCatalog']['series']
object_siteInfo = site_info_Mc_json['sitesResponse']['site']['siteInfo']
return_array = []
if(isinstance(object_methods,(dict))):
return_obj = _getSiteInfoHelper(object_siteInfo,object_methods)
return_array.append(return_obj)
else:
for object_method in object_methods:
return_obj = _getSiteInfoHelper(object_siteInfo,object_method)
return_array.append(return_obj)
if format is "json":
json_response = {
'siteInfo':return_array
}
return json_response
elif format is "csv":
df = pd.DataFrame.from_dict(return_array)
csv_siteInfo = df.to_csv(index=False)
return csv_siteInfo
else:
return print("the only supported formats are json, csv, and waterml")
except KeyError as ke:
return_array = []
if format is "json":
json_response = {
'siteInfo':return_array
}
return json_response
elif format is "csv":
df = pd.DataFrame.from_dict(return_array)
csv_siteInfo = df.to_csv(index=False)
return csv_siteInfo
else:
return print("the only supported formats are json, csv, and waterml")
except Exception as error:
return return_array
print(error)
return return_array
"""
Extracted from the AuxiliaryMod in the pywaterML package.
"""
def _getSiteInfoHelper(object_siteInfo,object_methods):
"""
Helper function to parse and store the content of two dictionaries:
- object_methods = GetSiteInfoResponse ['sitesResponse']['site']['seriesCatalog']['series']
- object_siteInfo = GetSiteInfoResponse ['sitesResponse']['site']['siteInfo']
Both dictionaries containing the response from the GetSiteInfo at store the following content into a new dictionary:
- siteName: Name of the site.
- siteCode: Code of the site.
- network: observation network that the site belongs to
- fullVariableCode: The full variable code, for example: SNOTEL:SNWD.Use this value as the variableCode parameter in GetValues().
- siteID: ID of the site
- latitude: latitude of the site
- longitude: longitude of the site
- variableName: Name of the variable
- unitName: Name of the units of the values associated to the given variable and site
- unitAbbreviation: unit abbreviation of the units from the values associated to the given variable and site
- dataType: Type of data
- noDataValue: value associated to lack of data.
- isRegular: Boolean to indicate whether the observation measurements and collections regular
- timeSupport: Boolean to indicate whether the values support time
- timeUnitName: Time Units associated to the observation
- timeUnitAbbreviation: Time units abbreviation
- sampleMedium: the sample medium, for example water, atmosphere, soil.
- speciation: The chemical sample speciation (as nitrogen, as phosphorus..)
- beginningDateTimeUTC: The UTC date and time of the first available
- EndDateTimeUTC: The UTC date and time of the last available
- beginningDateTime: The local date and time of the first available
- EndDateTime: The local date and time of the last available
- censorCode: The code for censored observations. Possible values are nc (not censored), gt(greater than), lt (less than), nd (non-detect), pnq (present but not quantified)
- methodCode: The code of the method or instrument used for the observation
- methodID: The ID of the sensor or measurement method
- qualityControlLevelCode: The code of the quality control level. Possible values are -9999(Unknown), 0 (Raw data), 1 (Quality controlled data), 2 (Derived products), 3 (Interpretedproducts), 4 (Knowledge products)
- qualityControlLevelID: The ID of the quality control level. Usually 0 means raw data and 1 means quality controlled data.
- sourceCode: The code of the data source.
- timeOffSet: The difference between local time and UTC time in hours.
Args:
object_siteInfo: Contains metadata associated to the site.
object_methods: Contains a list of <series>, which are unique combinations of site, variable and time intervals that specify a sequence of observations.
Returns:
return_obj: python dictionary containing data from the GetSiteInfo response.
"""
return_obj = {}
try:
sitePorperty_Info = object_siteInfo['siteProperty']
return_obj['country'] = "No Data was Provided"
if type(sitePorperty_Info) is list:
for props in sitePorperty_Info:
if props['@name'] == 'Country':
return_obj['country'] = props['#text']
else:
if str(sitePorperty_Info['@name']) == 'Country':
return_obj['country'] = str(sitePorperty_Info['#text'])
# print(return_obj['country'])
except Exception as e:
print(e)
return_obj['country'] = "No Data was Provided"
try:
# return_obj['siteName'] = object_siteInfo['siteName']
siteName = object_siteInfo['siteName'].encode("utf-8")
# return_object['siteName'] = siteName
return_obj['siteName'] = siteName.decode("utf-8")
except KeyError as ke:
return_obj['siteName'] = "No Data was Provided"
try:
return_obj['latitude'] = object_siteInfo['geoLocation']['geogLocation']['latitude']
except KeyError as ke:
return_obj['latitude'] = "No Data was Provided"
try:
return_obj['longitude'] = object_siteInfo['geoLocation']['geogLocation']['longitude']
except KeyError as ke:
return_obj['longitude'] = "No Data was Provided"
try:
return_obj['geolocation'] = object_siteInfo['geoLocation']['geogLocation']
except KeyError as ke:
return_obj['geolocation'] = "No Data was Provided"
try:
return_obj['network'] = object_siteInfo['siteCode']['@network']
except KeyError as ke:
return_obj['network'] = "No Data was Provided"
try:
return_obj['siteCode'] = object_siteInfo['siteCode']['#text']
except KeyError as ke:
return_obj['siteCode'] = "No Data was Provided"
try:
return_obj['fullSiteCode'] = return_obj['network'] + ":" + return_obj['siteCode']
except KeyError as ke:
return_obj['fullSiteCode'] = "No Data was Provided"
try:
return_obj['variableName'] = object_methods['variable']['variableName']
except KeyError as ke:
return_obj['variableName'] = "No Data was Provided"
try:
return_obj['variableCode'] = object_methods['variable']['variableCode']['#text']
except KeyError as ke:
return_obj['variableCode'] = "No Data was Provided"
try:
return_obj['fullVariableCode'] = return_obj['network'] + ":" + return_obj['variableCode']
except KeyError as ke:
return_obj['fullVariableCode'] = "No Data was Provided"
try:
return_obj['variableCount'] = object_methods['valueCount']
except KeyError as ke:
return_obj['variableCount'] = "No Data was Provided"
try:
return_obj['dataType'] = object_methods['variable']['dataType']
except KeyError as ke:
return_obj['dataType'] = "No Data was Provided"
try:
return_obj['valueType'] = object_methods['variable']['valueType']
except KeyError as ke:
return_obj['valueType'] = "No Data was Provided"
try:
return_obj['generalCategory'] = object_methods['variable']['generalCategory']
except KeyError as ke:
return_obj['generalCategory'] = "No Data was Provided"
try:
return_obj['noDataValue'] = object_methods['variable']['noDataValue']
except KeyError as ke:
return_obj['noDataValue'] = "No Data was Provided"
try:
return_obj['sampleMedium'] = object_methods['variable']['sampleMedium']
except KeyError as ke:
return_obj['sampleMedium'] = "No Data was Provided"
try:
return_obj['speciation'] = object_methods['variable']['speciation']
except KeyError as ke:
return_obj['speciation'] = "No Data was Provided"
try:
return_obj['timeUnitAbbreviation'] = object_methods['variable']['timeScale']['unit']['unitAbbreviation']
except KeyError as ke:
return_obj['timeUnitAbbreviation'] = "No Data was Provided"
try:
return_obj['timeUnitName'] = object_methods['variable']['timeScale']['unit']['unitName']
except KeyError as ke:
return_obj['timeUnitName'] = "No Data was Provided"
try:
return_obj['timeUnitType'] = object_methods['variable']['timeScale']['unit']['unitType']
except KeyError as ke:
return_obj['timeUnitType'] = "No Data was Provided"
try:
return_obj['timeSupport'] = object_methods['variable']['timeScale']['timeSupport']
except KeyError as ke:
return_obj['timeSupport'] = "No Data was Provided"
try:
return_obj['isRegular'] = object_methods['variable']['timeScale']['@isRegular']
except KeyError as ke:
return_obj['isRegular'] = "No Data was Provided"
try:
return_obj['unitAbbreviation'] = object_methods['variable']['unit']['unitAbbreviation']
except KeyError as ke:
return_obj['unitAbbreviation'] = "No Data was Provided"
try:
return_obj['unitName'] = object_methods['variable']['unit']['unitName']
except KeyError as ke:
return_obj['unitName'] = "No Data was Provided"
try:
return_obj['unitType'] = object_methods['variable']['unit']['unitType']
except KeyError as ke:
return_obj['unitType'] = "No Data was Provided"
if 'method' in object_methods:
return_obj['methodID'] = object_methods['method']['@methodID']
return_obj['methodDescription'] = object_methods['method']['methodDescription']
else:
return_obj['methodID'] = "No Method Id was provided"
return_obj['methodDescription'] = "No Method Description was provided"
try:
return_obj['qualityControlLevelID'] = object_methods['qualityControlLevel']['@qualityControlLevelID']
except KeyError as ke:
return_obj['qualityControlLevelID'] = "No Data was Provided"
try:
return_obj['definition'] = object_methods['qualityControlLevel']['definition']
except KeyError as ke:
return_obj['definition'] = "No Data was Provided"
try:
return_obj['qualityControlLevelCode'] = object_methods['qualityControlLevel']['qualityControlLevelCode']
except KeyError as ke:
return_obj['qualityControlLevelCode'] = "No Data was Provided"
try:
return_obj['citation'] = object_methods['source']['citation']
except KeyError as ke:
return_obj['citation'] = "No Data was Provided"
try:
return_obj['organization'] = object_methods['source']['organization']
except KeyError as ke:
return_obj['organization'] = "No Data was Provided"
try:
return_obj['description'] = object_methods['source']['sourceDescription']
except KeyError as ke:
return_obj['description'] = "No Data was Provided"
try:
return_obj['beginDateTime'] = object_methods['variableTimeInterval']['beginDateTime']
except KeyError as ke:
return_obj['beginDateTime'] = "No Data was Provided"
try:
return_obj['endDateTime'] = object_methods['variableTimeInterval']['endDateTime']
except KeyError as ke:
return_obj['endDateTime'] = "No Data was Provided"
try:
return_obj['beginDateTimeUTC'] = object_methods['variableTimeInterval']['beginDateTimeUTC']
except KeyError as ke:
return_obj['beginDateTimeUTC'] = "No Data was Provided"
try:
return_obj['endDateTimeUTC'] = object_methods['variableTimeInterval']['endDateTimeUTC']
except KeyError as ke:
return_obj['endDateTimeUTC'] = "No Data was Provided"
try:
return_obj['variableTimeInterval'] = object_methods['variableTimeInterval']
except KeyError as ke:
return_obj['variableTimeInterval'] = "No Data was Provided"
return return_obj
|
import sys
inputfile = sys.argv[1]
outputfile = sys.argv[2]
if __name__ == "__main__":
with open(inputfile, "r") as f:
l, r = f.readlines()
l = float(l)
r = float(r)
b = r - l
with open(outputfile, "w") as f:
f.writelines([str(b)])
|
# coding: utf-8
# flake8: noqa
from __future__ import absolute_import
# import models into model package
from swagger_server.models.result import Result
from swagger_server.models.result_chains import ResultChains
from swagger_server.models.result_in_complex_with import ResultInComplexWith
from swagger_server.models.result_interacting_pdb_residues import ResultInteractingPDBResidues
from swagger_server.models.result_residue import ResultResidue
from swagger_server.models.result_residues import ResultResidues
from swagger_server.models.result_segments import ResultSegments
from swagger_server.models.result_seqres import ResultSeqres
from swagger_server.models.result_structures import ResultStructures
from swagger_server.models.result_template import ResultTemplate
from swagger_server.models.result_uniprot import ResultUniprot
from swagger_server.models.result_uniprot_entries import ResultUniprotEntries
|
from dataclasses import dataclass
from collections import Counter
from typing import ClassVar, TypeAlias
ELEMENT: TypeAlias = str # represents on character string
PAIR: TypeAlias = tuple[ELEMENT, ELEMENT]
RULES: TypeAlias = dict[PAIR, ELEMENT]
POLYMER: TypeAlias = Counter[PAIR]
@dataclass
class Polymer:
template: str
data: POLYMER
rules: RULES
step_count: ClassVar = 0
@classmethod
def from_string(cls, input_text: str):
data: POLYMER = Counter()
data_str, rules_str = input_text.split("\n\n")
for e1, e2 in zip(data_str, data_str[1:]):
data[(e1, e2)] += 1
rules: RULES = dict()
for rule_str in rules_str.splitlines():
e1e2, e3 = rule_str.split(" -> ")
e1, e2 = e1e2
rules[(e1, e2)] = e3
return cls(data_str, data, rules)
def step(self):
new_data: POLYMER = Counter()
for e1, e2 in self.data:
if self.data[(e1, e2)] != 0:
count = self.data[(e1, e2)]
e3 = self.rules[(e1, e2)] # new middle element
new_data[(e1, e3)] += count
new_data[(e3, e2)] += count
self.data = new_data
Polymer.step_count += 1
def apply_steps(self, step_count: int):
for _ in range(step_count):
self.step()
def get_answer(self) -> int:
element_counter: Counter[ELEMENT] = Counter()
for e1, e2 in self.data:
element_counter[e1] += self.data[(e1, e2)]
last_element = self.template[-1]
element_counter[last_element] += 1
most_commons = element_counter.most_common()
most_common_quantity = most_commons[0][1]
least_common_quantity = most_commons[-1][1]
return most_common_quantity - least_common_quantity
def part_1(self) -> int:
self.apply_steps(10)
return self.get_answer()
def part_2(self) -> int:
self.apply_steps(40)
return self.get_answer()
if __name__ == "__main__":
# with open("day14/sample.txt") as file:
with open("day14/input.txt") as file:
input_text = file.read()
polymer = Polymer.from_string(input_text)
part_1_answer = polymer.part_1()
print(f"{part_1_answer = }")
polymer = Polymer.from_string(input_text)
part_2_answer = polymer.part_2()
print(f"{part_2_answer = }")
|
"""Decorators that simplify interacting with the FriendlyArgs framework"""
from functools import update_wrapper
import sys
import logging
def main():
"""Decorates the primary entry-point function for your command line
application"""
def _main_decorator(func):
log = logging.getLogger(__name__)
log.debug("MainDecorator: {0}".format(func.__name__))
def _nested_main():
retval = func()
if isinstance(retval, int):
return retval
return 0
return update_wrapper(_nested_main, func)
return _main_decorator
def count(short_name, long_name):
"""Decorates a command function, providing an optional parameter which
counts the occurrences of a character.
Useful for things like verbosity flags which look like '-vvvv'
Args:
short_name (str):
single character parameter short name, of the form "-a" or "-b"
long_name (str):
verbose descriptive name for the count parameter
"""
log = logging.getLogger(__name__)
assert len(short_name) == 2 and short_name[0] == "-"
assert long_name.startswith("--")
def _count_decorator(func):
log.debug("CountDecorator: {0}".format(func.__name__))
def _nested_count(*args):
parts = sys.argv
param_count = 0
for cur_part in parts:
if cur_part == long_name:
param_count += 1
if cur_part.startswith(short_name):
num_vals = len(cur_part) - 1
sample = "-" + (short_name[1] * num_vals)
if sample == cur_part:
param_count += num_vals
return func(param_count, *args)
return update_wrapper(_nested_count, func)
return _count_decorator
if __name__ == "__main__":
pass
|
import numpy as np
from numpy.random import randn
import unittest
from niwqg import CoupledModel
from niwqg import QGModel
class QGNIWTester(unittest.TestCase):
""" A class for testing the QGNIW kernel (real and complex 2d ffts)
Note: 1d plane wave pass test with machine precision
2d (slanted) plane wave has an error O(10^{-13})
"""
def setUp(self):
self.m = CoupledModel.Model(use_filter=False,nu4=1e14,nu4w=0.)
self.m.tmax = 10*self.m.dt
k, l = 2*np.pi*5/self.m.L, 2*np.pi*9/self.m.L
self.qi = np.sin(k*self.m.x + l*self.m.y)
self.m.set_q(self.qi)
self.m.set_phi(self.qi*0)
def test_hyperviscosity(self, rtol=1e-15):
""" Test if the hyperviscosity implementation simply damps
the Fourier coefficiants individualy. """
self.m.run()
qfh = self.m.fft(self.qi)*np.exp(-self.m.nu4*self.m.wv4*self.m.tmax)
self.assertTrue(np.allclose(qfh,self.m.qh,rtol=rtol), 'Implementation of\
hypterviscosity is broken in CoupledModel')
class QGTester(unittest.TestCase):
""" A class for testing the QG model (rffts)
Note: 1d plane wave pass test with machine precision
2d (slanted) plane wave has an error O(10^{-13})
"""
def setUp(self):
self.m = QGModel.Model(use_filter=False, nu4 = 1e14)
self.m.tmax = 100*self.m.dt
k, l = 2*np.pi*5/self.m.L, 2*np.pi*9/self.m.L
self.qi = np.sin(k*self.m.x + l*self.m.x)
self.m.set_q(self.qi)
def test_hyperviscosity(self, rtol=1e-15):
""" Test if the hyperviscosity implementation simply damps
the Fourier coefficiants individualy. """
self.m.run()
qfh = self.m.fft(self.qi)*np.exp(-self.m.nu4*self.m.wv4*self.m.tmax)
self.assertTrue(np.allclose(qfh,self.m.qh,rtol=rtol), 'Implementation of\
hypterviscosity is broken QGModel')
if __name__ == "__main__":
unittest.main()
|
import socket
import threading
from os import system
from datetime import date
PORT = 65432
SERVER = socket.gethostbyname(socket.gethostname())
ADDR = (SERVER, PORT)
clients = []
def start():
server.listen()
print(f"[Server] Listening on {SERVER}\n")
while True:
conn, addr = server.accept() # waits for new connection, stores object in conn and address in addr
thread = threading.Thread(target=handle_client, args=(conn, addr)) # new thread with function handle_client as target and conn object and addr as arguments
thread.start() # starts new thread
print(f"[{get_time()}] [Server] Client with id #{addr[1]} has connected to the server.")
print(f"[{get_time()}] [Server] {threading.activeCount()-1} client(s) currently connected.")
print(f'just dont die!')
print("━━━━━━━━━━━━━━━━━━━━━━━━\n")
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print(f"\n[Server] Socket created succesfully. Starting server.")
server.bind(ADDR)
start()
|
from django.urls import path, re_path
from . import views
urlpatterns = [
path('invoice-list-api/', views.invoice_list_api ),
path('invoice-list/', views.InvoiceList.as_view(), name='invoice_list' ),
path('invoice-list-unpaid/', views.UnpaidInvoice.as_view(), name='unpaid_invoice_list' ),
path('invoice-list-partially-paid/', views.PartiallyPaidInvoice.as_view(), name='partially_paid_invoice_list' ),
path('invoice-list-batch/', views.BatchInvoice.as_view(), name='batch_invoice_list' ),
re_path(r'^invoice/(?P<pk>[0-9]+)/$', views.invoice_detail, name='invoice_detail' ),
re_path(r'^invoice-pdf/(?P<pk>[0-9]+)/$', views.generate_pdf, name='generate_pdf'),
re_path(r'^shipments-list-pdf/(?P<pk>[0-9]+)/$', views.generate_shipment_pdf, name='generate_shipment_pdf'),
path('invoice-create/', views.CreateInvoice.as_view(), name='create_invoice'),
re_path(r'^invoice-update/(?P<pk>[0-9]+)/$', views.UpdateInvoice.as_view(), name='update_invoice'),
path('invoice-items-ship/', views.ShipmentView.as_view(), name='ship_items'),
re_path(r'^invoice-items-ship-update/(?P<pk>[0-9]+)/$', views.ShipmentUpdate.as_view(), name='ship_items_update'),
re_path(r'^invoice-items-ship-simple-update/(?P<pk>[0-9]+)/$', views.ShipmentSimpleUpdate.as_view(), name='ship_items_simple_update'),
path('invoice-items-to-ship/', views.ItemsToSendList.as_view(), name='items_to_send'),
path('invoice-items-sent/', views.ItemsSent.as_view(), name='items_sent'),
path('shipments-list/', views.shipment_lists, name='shipment_lists'),
re_path(r'^shipment-detail/(?P<pk>[0-9]+)/$', views.shipment_detail, name='shipment_detail'),
]
|
import sys
import logging
import requests
from PIL import Image
def load_pil_image(path):
if path.startswith("http"):
path = requests.get(path, stream=True).raw
else:
path = path
image = Image.open(path).convert("RGB")
return image
def setup_logging(log_level: str = "INFO"):
stdout = logging.StreamHandler(stream=sys.stdout)
stdout.setLevel(log_level)
logging.basicConfig(
format="[{asctime}][{levelname}]: {message}",
style="{",
level=log_level,
handlers=[stdout],
)
def generate_repr(obj: object, attrs):
lines = [f"{obj.__class__.__name__}("]
for attr in attrs:
attr_line = f" {attr}={getattr(obj, attr, None)},"
lines.append(attr_line)
lines.append(")")
return "\n".join(lines)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.