text stringlengths 8 6.05M |
|---|
from pymol import cmd
from pymol.cgo import *
from math import *
#
# Some functions to allow drawing arrows (vectors) in Pymol
# In need of proper documentation...
#
# Please don't distribute (parts of) this file, without credits
#
# (c)2006 Tsjerk A. Wassenaar, PhD, University of Utrecht
#
# t s j e r k w .at. g m a i l .dot. c o m
# http://nmr.chem.uu.nl/~tsjerk/
#
# BY Documentation
# Copy this file into the folder with the command script(s). Change the colour designations below. In the command script, insert for each arrow the following command after running this script.
# Syntax: cgo_arrow([startx, starty, startz], [endx, endy, endz], thickness of arrow, width of the wide end of the arrowhead, length of arrowhead, name of object (must be an integer))
# The start is the arrow tail, and the end is the arrow head.
# Arrows with arrowhead length one quarter of the vector length and with arrowhead width one seventh of the arrowhead length look reasonable. Vector length is: SQRT[(x1-x2)^2 + (y1-y2)^2 + (z1-z2)^2]
# Example with arrowhead: cgo_arrow( [ -5.07 , 28.02 , 3.06 ], [ -4.11 , 29.57 , 5.42 ],0.1, 0.11 , 0.74 , 1 )
# Example without arrowhead: cgo_arrow( [ -5.07 , 28.02 , 3.06 ], [ -4.11 , 29.57 , 5.42 ],0.1, 0 , 0 , 1 )
def t( X ):
if not X: return X
Y = []
for i in range( len( X[0] ) ):
Y.append( [] )
for j in X:
Y[i].append( j[i] )
return Y
def v_add( a, b ): return ( a[0]+b[0], a[1]+b[1], a[2]+b[2] )
def v_sub( a, b ): return ( a[0]-b[0], a[1]-b[1], a[2]-b[2] )
def vecprod( a, b ): return ( a[1]*b[2]-a[2]*b[1], a[2]*b[0]-a[0]*b[2], a[0]*b[1]-a[1]*b[0] )
def inprod( a, b=None ):
if b: return a[0]*b[0] + a[1]*b[1] + a[2]*b[2]
else: return a[0]*a[0] + a[1]*a[1] + a[2]*a[2]
def svmult( s, a ): return ( s*a[0], s*a[1], s*a[2] )
def norm( a ): return svmult( 1/sqrt(inprod( a )), a )
def mvmult( R, x ):
y = []
for i in R: y.append( inprod( i, x ) )
return tuple(y)
def mv_add( X, a ):
Y = []
for i in X:
Y.append( v_add( i, a ) )
return Y
def mmmult( R, X ):
Y = []
for i in X: Y.append( mvmult( R, i ) )
return Y
def smatrix( v ): return [[ v[0], 0, 0 ], [ 0, v[1], 0 ], [ 0, 0, v[2] ]]
def rmatrix( v ):
cosx, sinx = cos( v[0] ), sin( v[0] )
cosy, siny = cos( v[1] ), sin( v[1] )
cosz, sinz = cos( v[2] ), sin( v[2] )
return mmmult( mmmult( [[1,0,0],[0,cosx,-sinx],[0,sinx,cosx]],
[[cosy,0,-siny],[0,1,0],[siny,0,cosy]] ),
[[cosz,-sinz,0],[sinz,cosz,0],[0,0,1]] )
def block( i, dphi ):
ddphi = 0.25*dphi
phi0 = i*dphi
phi1 = phi0+ddphi
phi2 = phi1+ddphi
phi3 = phi2+ddphi
phi4 = phi3+ddphi
sqrt2 = sqrt(2)
return [ (-0.5*sqrt2,-0.5*sqrt2*cos(phi2),0.5*sqrt2*sin(phi2)),
(1,0,0),
(0,cos(phi0),-sin(phi0)),
(0,cos(phi1),-sin(phi1)),
(0,cos(phi2),-sin(phi2)),
(0,cos(phi3),-sin(phi3)),
(0,cos(phi4),-sin(phi4))
]
def cgo_triangle_fan( X ):
Y = []
while ( X ):
i = X.pop(0)
Y.extend( [ NORMAL, i[0], i[1], i[2], ] )
for i in range( 6 ): #BY changing the 6 to 0 will eliminate the arrowhead
i = X.pop(0)
Y.extend( [ VERTEX, i[0], i[1], i[2], ] )
return Y
def cgo_arrow1( S, E, r=0.2, hr=0.4, hl=1.0 ):
P0 = S
D = v_sub( E, S )
DL = inprod( D, D )
P1 = v_add( S, svmult( (DL-hl)/DL, D ) )
P2 = E
# Define a vector orthogonal to P1-P0
V = v_sub( P1, P0 )
V = norm( V )
if V[2] != 0:
A = ( 1, 1, -(V[0]+V[1])/V[2] )
elif V[1] != 0:
A = ( 1, -V[0]/V[1], 0 )
else:
A = ( 0, -V[0], 0 )
A = norm( A )
B = vecprod( V, A )
print (inprod(V), inprod(B), inprod(A))
R = t([ svmult( hl,V ), svmult( hr,A ), svmult( hr,B ) ])
# Define the transformation matrix (scale and rotation)
#C = v_sub( P2, P1 )
#scale = ( hl, hr, hr )
#rotate = ( 0, acos( C[0]/sqrt(C[0]**2+C[2]**2) ), acos( C[0]/sqrt(C[0]**2+C[1]**2) ) )
#R = mmmult( smatrix( scale ), rmatrix( rotate ) )
obj = [
CYLINDER, S[0], S[1], S[2], P1[0], P1[1], P1[2], r, 0, 1, 0, 0, 0, 1, #BY: after the r, the first three are the RGB numbers for the arrow tail, and the last three are the RGB near the arrow head.
COLOR, 0, 0, 1, #BY: after COLOR, the three numbers are the RGB numbers for the arrow head itself
BEGIN, TRIANGLE_FAN ]
N = 10
dphi = 2*pi/N
crds = []
for i in range(N+1):
crds.extend( block( i, dphi ) )
crds = mv_add( mmmult( R, crds ), P1 )
obj.extend( cgo_triangle_fan( crds ) )
obj.extend( [ END, ] )
return obj
def cgo_arrow( S, E, r=0.2, hr=0.4, hl=1.0, name="arrow", state=1 ):
obj = cgo_arrow1( S, E, r=r, hr=hr, hl=hl )
cmd.load_cgo( obj, name, state )
def cgo_arrows( X, r=0.2, hr=0.4, hl=1.0, name="arrow", state=1 ):
obj = []
for i in X:
obj.extend( cgo_arrow1( (i[0], i[1], i[2]), (i[3], i[4], i[5]), r=r, hr=hr, hl=hl ) )
cmd.load_cgo( obj, name, state )
|
#IanNolon
#4/25/18
#warmup13.py
from random import randint
list = []
for i in range (1,21):
list.append(randint(1,100))
#for item in list:
#print(item)
print(sum(list))
print(min(list))
print(max(list))
|
#! /usr/bin/python
import corr,socket,array
import socket,math,corr,array
import struct
import time
import matplotlib.pyplot as plt
import numpy as np
N_Chans = 4096
SAMP_RATE = 1024 # MHz
BW = SAMP_RATE / 2.0
Ch_BW = BW / N_Chans
def plot_spectrum():
data1, addr1 = sock1.recvfrom(4104)
data2, addr2 = sock1.recvfrom(4104)
header1 = struct.unpack('<Q',data1[0:8])[0]
header2 = struct.unpack('<Q',data2[0:8])[0]
data1_tmp=np.fromstring(data1[8:],dtype=np.uint8)
data2_tmp=np.fromstring(data2[8:],dtype=np.uint8)
xx_tmp1 = data1_tmp[0::2]
yy_tmp1 = data1_tmp[1::2]
xx_tmp2 = data2_tmp[0::2]
yy_tmp2 = data2_tmp[1::2]
seq1 = header1 & 0x00ffffffffffffff
seq2 = header2 & 0x00ffffffffffffff
if((seq1%2==0) & (seq2%2==1)):
xx = xx_tmp1.tolist() + xx_tmp2.tolist()
yy = yy_tmp1.tolist() + yy_tmp2.tolist()
elif((seq1%2==1) & (seq2%2==0)):
xx = xx_tmp2.tolist() + xx_tmp1.tolist()
yy = yy_tmp2.tolist() + yy_tmp1.tolist()
else:
xx = N_Chans*[0]
yy = N_Chans*[0]
print "seq is %lu, ID source is %X" % (seq1,header1>>56)
#print "header1 is %lu, ID source is %X" % (header2,header2>>56)
#print "seq1 is %lu, seq2 is %lu" % (seq1,seq2)
freq = np.arange(0,N_Chans*Ch_BW,Ch_BW)
plt.clf()
#print paa
plt.subplot(211)
#plt.title('SEQ is '+str(seq),bbox=dict(facecolor='red', alpha=0.5))
#plt.title('SEQ is '+str(seq1))
plt.title('Spectrum monitor from 10GbE port')
#plt.title('xx')
#plt.plot(np.log10(paa),color="g")
plt.plot(freq,xx,color="g")
#plt.xlim(0,freq)
plt.ylabel('xx')
plt.ylim(0,256)
#plt.ylabel('Power(dBm)')
#print pbb
plt.subplot(212)
#plt.title('yy')
#plt.plot(np.log10(pbb),color="b")
plt.plot(freq,yy,color="b")
#plt.xlim(0,freq)
plt.ylim(0,256)
plt.ylabel('yy')
plt.xlabel('Freq(MHz)')
#plt.ylabel('Power(dBm)')
print ('total power of xx and yy are: %f and %f') % (np.average(xx),np.average(yy))
fig.canvas.draw()
fig.canvas.manager.window.after(1000,plot_spectrum)
return True
if __name__ == '__main__':
xx = 4096*[0]
yy = 4096*[0]
xx_tmp1=2048*[0]
yy_tmp1=2048*[0]
xx_tmp2=2048*[0]
yy_tmp2=2048*[0]
IP1 = "192.168.16.11" #bind on IP addresses
#IP1 = "192.168.1.127" #bind on IP addresses
#IP1 = "10.10.12.2" #bind on IP addresses
PORT = 12345
file_name = "fast-test.dat"
sock1 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock1.bind((IP1, PORT))
if PORT != -1:
print "10GbE port connect done!"
fig = plt.figure()
fig.canvas.manager.window.after(1000,plot_spectrum)
plt.show()
|
"""Intelligence Services Classes."""
import logging
logging.debug("In the intelligence_services __init__.py file.")
|
from sys import stdin
import math
n, w, h = [int(x) for x in input().split()]
maximum = math.sqrt((w*w+h*h))
for i in range(0, n):
l = int(stdin.readline().strip())
if l <= maximum:
print("DA")
else:
print("NE") |
import psycopg2 as pg2 # Labeling psycopg2 as pg2 here.
import db_column_cons as col # Same thing here.
cur = None
conn = None
try:
conn = pg2.connect("host=127.0.0.1 dbname=postgres user=postgres password={} port=5433".format(col.password))
cur = conn.cursor()
print("Connected to {} database.".format("cut_log")) # Just let's us know we are connected to the database.
print()
except pg2.DatabaseError as e:
print(e.pgerror)
def get_records():
try:
cur.execute("SELECT * FROM cut_log;")
results = cur.fetchall() # Assigning all results to this tuple.
for record in results: # Basically for each row in the table, print them out for me.
print(record)
except pg2.DatabaseError as e:
print(e.pgerror)
def collect_info():
print() # Only used here to create space
weld_id = str(input("Enter the Weld Id to update: "))
cut_length = float(input("Enter the length of the cut: "))
return [weld_id, cut_length]
def update_cuts(pipe_id_name, cut_length):
sql_statement = """UPDATE cut_log
SET cuts = %s,
new_lt = length - %s
WHERE pipe_id = %s;"""
values_to_insert = ( cut_length, cut_length, pipe_id_name)
try:
cur.execute(sql_statement, values_to_insert)
conn.commit()
print("Record updated successfully.") # Will print this message if our update was successful.
except pg2.DatabaseError as e:
print(e.pgerror)
def main():
get_records()
values_to_pass = collect_info()
update_cuts(values_to_pass[0], values_to_pass[1])
if __name__ == '__main__':
main()
|
from PyQt5 import QtWidgets
from PyQt5.QtWidgets import QMessageBox
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
from matplotlib.figure import Figure
class PlotWindow(QtWidgets.QFrame):
def __init__(self, width=600, height=800, parent=None):
super(PlotWindow, self).__init__(parent)
self.figure = Figure(tight_layout=True)
self.canvas = FigureCanvas(self.figure)
self.toolbar = NavigationToolbar(self.canvas, self)
self.ax = self.figure.subplots()
self.setWindowTitle('Plot window')
layout = QtWidgets.QVBoxLayout()
layout.addWidget(self.toolbar)
layout.addWidget(self.canvas)
self.setLayout(layout)
self.resize(width,height)
def drawSafe(self):
try:
self.canvas.draw()
except RuntimeError as e:
msg = QMessageBox()
msg.setIcon(QMessageBox.Critical)
msg.setText(e.strerror)
msg.setWindowTitle('Runtime Error')
msg.setStandardButtons(QMessageBox.Ok)
msg.exec_()
|
import os
import time
import json
import copy
import logging
import threading
import psycopg2
from psycopg2 import extras
class DBConnectionError(Exception):
pass
class DBQueryError(Exception):
pass
class DBOfflineError(Exception):
pass
class UnconfiguredGroupError(Exception):
pass
def conn_iter(connection_group):
logger = logging.getLogger(__name__)
connection_id = 0
max_pool_size = Connection.get_max_pool_size(connection_group)
while True:
connection = (connection_group, connection_id)
(conn_ref, status) = Connection.get_connection(connection)
logger.debug('iterator group:{} id:{} conn_ref:{} status:{}'.format(
connection_group,
connection_id,
conn_ref,
status
)
)
if status == 'free':
Connection.set_connection_status(connection, 'occupied')
yield (connection_id, conn_ref)
connection_id += 1
else:
connection_id += 1
if Connection.get_connection_count(connection) == max_pool_size:
yield None
if connection_id == max_pool_size:
connection_id = 0
def conn_iter_locked(iterator):
lock = threading.Lock()
while True:
try:
with lock:
value = next(iterator)
yield value
except StopIteration:
return
class Connection(object):
"""
config = {
'db': {
'host': 'hostname',
'name': 'database',
'user': 'dbuser',
'pass': 'dbpass',
'ssl': False,
'connect_timeout': 30,
'connection_retry_sleep': 1,
'query_timeout': 120,
'session_tmp_buffer': 128
},
'groups': {
'group1': {
'connection_count': 20,
'autocommit': False,
'connections': [
(conn, status),
],
'connection_iter': None
}
}
}
"""
@classmethod
def init(cls, config):
cls.logger = logging.getLogger(__name__)
cls._config = config
cls._init_class()
@classmethod
def _init_class(cls):
db_config = cls._config['db']
statement_timeout = 'statement_timeout={}'.format(db_config['query_timeout'])
temp_buffers = 'temp_buffers={}MB'.format(db_config['session_tmp_buffer'])
os.environ['PGOPTIONS'] = '-c {timeout} -c {buffers}'.format(
timeout = statement_timeout,
buffers = temp_buffers
)
cls._setup_groups()
@classmethod
def _setup_groups(cls):
for group in cls._config['groups']:
cls._config['groups'][group]['connection_iter'] = conn_iter_locked(
conn_iter(group)
)
cls._setup_connections(group)
@classmethod
def _setup_connections(cls, group):
group_container = cls._config['groups'][group]
group_container['connections'] = []
connection_container = group_container['connections']
for id in range(0, group_container['connection_count']):
connection_container.append(
(None, 'connecting')
)
cls.connect((group, id))
cls.logger.debug(cls._config)
@classmethod
def get_max_pool_size(cls, group):
return cls._config['groups'][group]['connection_count']
@classmethod
def get_connection_iter_container(cls, group):
return cls._config['groups'][group]['connection_iter']
@classmethod
def get_connection_container(cls, connection):
(group, id) = connection
return cls._config['groups'][group]['connections'][id]
@classmethod
def get_connection(cls, connection):
return cls.get_connection_container(connection)
@classmethod
def get_connection_count(cls, connection):
connection_count = 0
(group, id) = connection
connections = cls._config['groups'][group]['connections']
for (conn_ref, status) in connections:
if status == 'occupied':
connection_count += 1
return connection_count
@classmethod
def set_connection_status(cls, connection, status):
assert status in ['occupied', 'free'], 'status must be free or occupied'
lock = threading.Lock()
with lock:
(group, id) = connection
connections = cls._config['groups'][group]['connections']
connection = connections[id]
new_connection = (connection[0], status)
#del(connections[id])
connections[id] = new_connection
cls.logger.debug('set status id:{} status:{} con_ref:{}'.format(
id,
status,
new_connection[0]
)
)
@classmethod
def get_next_connection(cls, group):
try:
return next(cls.get_connection_iter_container(group))
except KeyError:
raise UnconfiguredGroupError
@classmethod
def connect(cls, connection):
(conn_group, conn_id) = connection
try:
lock = threading.Lock()
db_container = cls._config['db']
group_container = cls._config['groups'][conn_group]
with lock:
group_container['connections'][conn_id] = (
psycopg2.connect(
dbname = db_container['name'],
user = db_container['user'],
host = db_container['host'],
password = db_container['pass'],
sslmode = db_container['ssl'],
connect_timeout = db_container['connect_timeout']
),
'free'
)
conn_container = group_container['connections'][conn_id]
connection = conn_container[0]
if 'autocommit' in group_container and group_container['autocommit'] is True:
extension = psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT
connection.set_isolation_level(extension)
if 'sqlprepare' in group_container and group_container['sqlprepare'] is True:
tmpCursor = connection.cursor(
cursor_factory = psycopg2.extras.DictCursor
)
tmpCursor.callproc('"SQLPrepare"."PrepareQueries"')
except:
raise DBConnectionError
@classmethod
def reconnect(cls, connection):
try:
Query.check_db(connection)
except DBOfflineError:
while True:
try:
Connection.connect(connection)
return
except Exception as e:
time.sleep(cls._config['db']['connection_retry_sleep'])
class Query(object):
@staticmethod
def execute_prepared(connection, sql_params):
assert sql_params is not None, "sql_params must be given."
Connection.reconnect(connection)
(conn_ref, status) = Connection.get_connection(connection)
try:
tmpCursor = conn_ref.cursor(cursor_factory=psycopg2.extras.DictCursor)
tmpCursor.callproc('"SQLPrepare"."ExecuteQuery"', sql_params)
rec = tmpCursor.fetchone()
return rec[0]
except Exception as e:
ErrorJSON = {}
ErrorJSON['error'] = True
ErrorJSON['exception'] = type(e).__name__
ErrorJSON['exceptionCause'] = e.message
return json.dumps(ErrorJSON)
@staticmethod
def execute(connection, sql_statement, sql_params=None):
Connection.reconnect(connection)
(conn_ref, status) = Connection.get_connection(connection)
try:
tmpCursor = conn_ref.cursor(cursor_factory=psycopg2.extras.DictCursor)
tmpCursor.execute(sql_statement, sql_params)
return tmpCursor
except Exception as e:
raise DBQueryError(repr(e))
@staticmethod
def check_db(connection):
(conn_ref, status) = Connection.get_connection(connection)
try:
tmpCursor = conn_ref.cursor(cursor_factory=psycopg2.extras.DictCursor)
tmpCursor.execute("SELECT to_char(now(), 'HH:MI:SS') AS result")
rec = tmpCursor.fetchone()
except Exception as e:
raise DBOfflineError
class Handler(object):
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._cleanup()
def query(self, statement, params=None):
return Query.execute(self._connection, statement, params)
def query_prepared(self, params):
return Query.execute_prepared(self._connection, params)
def _cleanup(self):
self.logger.debug('cleanup connection:{}'.format(self._connection))
try:
self.conn_ref.commit()
except Exception as e:
pass
Connection.set_connection_status(
(self._group, self._conn_id),
'free'
)
return
def __init__(self, group):
self.logger = logging.getLogger(__name__)
self._group = group
while True:
try:
(self._conn_id, self.conn_ref) = Connection.get_next_connection(group)
self._connection = (self._group, self._conn_id)
self.logger.debug('handler connection:{}'.format(self._connection))
return
except TypeError:
time.sleep(0.1)
|
# -*- coding: utf-8 -*-
import sys
import os
import random
import time
import json
import copy
from pprint import pprint
from errors import err
from const import *
#<class 'model.BaseModel.BaseModel'>
from model.BaseModel import BaseModel
from model.RoomModel import RoomModel
from model.CardModel import CardModel
from model.FightModel import FightModel
from pymongo.errors import AutoReconnect
class EffectHelperModel(BaseModel):
def __init__(self):
super(EffectHelperModel, self).__init__()
'''
@classmethod
def __getEffectPointerTarget(self, cards, pointer, uniqids, object):
if isinstance(cards, list)== False or isinstance(pointer, dict)== False or isinstance(uniqids, list)== False:
return []
all_uniqids= RoomModel.getRoomCardAllUniqid(cards)
uniqids= [int(val) for val in uniqids]
pointer_type= pointer.get('pointerType')
pointer_value= pointer.get('pointerValue')
#fix bug
if pointer_type== None:
return []
if str(pointer_type)=='' or (pointer_value!= None and pointer_value<= 0):
self.log.error('doEffectAddAtk, Configuration error')
return []
if pointer_type== 'playerPickX':
print 'here', uniqids, all_uniqids
tmp= [val for val in uniqids if val in all_uniqids]
if len(tmp)!= len(uniqids):
return []
elif pointer_type== 'single':
if len(uniqids)>1:
return []
if all_uniqids.count(uniqids[0])== 0:
return []
elif pointer_type== 'all':
#tmp= [val for val in uniqids if val in all_uniqids]
uniqids= all_uniqids
elif pointer_type== 'randomX':
pointer_value= int(pointer_value)
count= min(len(all_uniqids), pointer_value)
count= max(1, pointer_value)
if count>= len(all_uniqids):
uniqids= all_uniqids
else:
uniqids= random.sample(all_uniqids, count)
elif pointer_type== 'others':
uniqids = [val for val in all_uniqids if val != object]
else:
return []
return uniqids
@classmethod
def getEffectTarget(self, room, pos, effect, uniqids= []):
if isinstance(room, dict)== False or isinstance(effect, dict)== False or (pos!=1 and pos!= 2):
return []
range= effect['targetA'].get('range')
pointer= effect['targetA'].get('pointer')
attribute= effect['targetA'].get('attribute')
object= effect['object']
print pointer, range, object, uniqids
if range.get('thisCard')== 1:
return [object]
if pointer== None:
pointer= {'pointerType':'all'}
elif isinstance(pointer, dict)== True:
field= range.get('field')
role= range.get('role')
camp= range.get('camp')
pointer_type= pointer.get('pointerType')
pointer_value= pointer.get('pointerValue')
if role== None:
role= []
if isinstance(role, list)== False:
role= [role]
print type(role), HERO_HERO, camp
if pos== 1:
if camp== 'me' and len(role)==1 and role[0]== HERO_HERO:
return [room['cardPlay1'][0]['uniqid']]
if field== 'battleField' or field== None:
if camp== None or camp== 'all':
cards= []
cards.extend(room['cardPlay1'])
cards.extend(room['cardPlay2'])
elif camp== 'me':
k= 'cardPlay1'
cards= copy.deepcopy(room[k])
elif camp== 'enemy':
k= 'cardPlay2'
cards= copy.deepcopy(room[k])
else:
return []
else:
if camp== 'me' and len(role)==1 and role[0]== HERO_HERO:
return [room['cardPlay2'][0]['uniqid']]
if field== 'battleField' or field== None:
if camp== None or camp== 'all':
cards= []
cards.extend(room['cardPlay1'])
cards.extend(room['cardPlay2'])
elif camp== 'me':
k= 'cardPlay2'
cards= copy.deepcopy(room[k])
elif camp== 'enemy':
k= 'cardPlay1'
cards= copy.deepcopy(room[k])
else:
return []
print range, pointer, camp
cards= [val for val in cards if role.count(val.get('type'))> 0]
print '__getEffectPointerTarget', cards, role
cards= self.__doEffectTargetAttributeFilter(room, pos, effect, cards, attribute)
return self.__getEffectPointerTarget(cards, pointer, uniqids, object)
return []
@classmethod
def __doEffectTargetAttributeFilter(self, room, pos, effect, cards, attribute):
if attribute== None or isinstance(cards, list)== False:
return cards
#受过伤的
if attribute== "injured":
cards= [val for val in cards if (val.get('maxHp')> 0 and val.get('maxHp')> val.get('hp'))]
else:
pass
return cards
'''
@classmethod
def getEffectValue(self, room, pos, effect, uniqids= []):
'''
效果值分配处理
modified by tm at 2014-01-28
1.add summary
2.整理代码逻辑,支持nForRandom类型
@return: 返回以uniqid为key,具体分配到的值为value的dict
'''
if isinstance(room, dict)== False or isinstance(effect, dict)== False or (pos!=1 and pos!= 2):
return {}
values = effect.get('value', {})
if not values:
return {}
nvalue = int(values.get('value', 0))
return {
'n' : (lambda x: {val:nvalue for val in uniqids})(uniqids),
'nForRandom' : self.__effectValueNForRandom(nvalue, uniqids),
}[values.get('valueType')]
# return value, tp
@classmethod
def __effectValueNForRandom(cls, nvalue, uniqids):
'''
值随机分配到各对象上
@author: tm 2014-01-28
'''
returnDict = {}
for i in xrange(nvalue):
# 随机从list中取出一个元素
uniqid = random.choice(uniqids)
returnDict[uniqid] = returnDict.has_key(uniqid) and returnDict[uniqid] + 1 or 1
return returnDict
@classmethod
def getEffectContinuous(self, room, pos, effect, uniqids= []):
if isinstance(room, dict)== False or isinstance(effect, dict)== False or (pos!=1 and pos!= 2):
return 0
continuous= effect.get('continuous')
if isinstance(continuous, dict)== False:
return False
continuousType= continuous.get('continuous')
continuousValue= continuous.get('continuousValue')
if continuousType== "cardExistence":
value= -2
elif continuousType== "once":
value= -1
return True
elif continuousType== "xTurnsFromNext":
value= int(continuousValue)+1
elif continuousType== "xTurnsFromNow":
value= int(continuousValue)
elif continuousType== "thisMatchEnd":
value= -3
elif continuousType== "thisTurnEnd":
value= 1
else:
value= 0
return value
@classmethod
def getEffectPointer(self, pos, pointer, targetRange):
'''
获取房间内受effect影响的卡牌组(己方,对方,手牌,随从)
modified by tm 2013-01-27
1.add commet
2.重组逻辑
@return: 受影响的key组成的list
'''
if isinstance(targetRange, dict) == False or (pos!=1 and pos!= 2):
return []
returnList = []
if targetRange.get('thisCard') == 1:
if pos == 1:
returnList.append('cardPlay1')
else:
returnList.append('cardPlay2')
else:
field = targetRange.get('field', 'battleField')
camp = targetRange.get('camp', 'all')
card_type_key = (field == 'battleField' and 'cardPlay' or 'cardHand')
#k = card_type_key + '1'
#k2 = card_type_key + '2'
if pos == 1:
returnList = {
'me' : [card_type_key + '1'],
'enemy' : [card_type_key + '2'],
'all' : [card_type_key + '1', card_type_key + '2']
}[camp]
else:
returnList = {
'me' : [card_type_key + '2'],
'enemy' : [card_type_key + '1'],
'all' : [card_type_key + '1', card_type_key + '2']
}[camp]
return returnList
@classmethod
def getEffectTurnRedisKey(self, room, uniqid, turn, type= ''):
if type== EFFECT_TYPE_STUN:
key= REDIS_KEY_EFFECT_STUN_TURN
elif type== EFFECT_TYPE_TAUNT:
key= REDIS_KEY_EFFECT_TAUNT_TURN
elif type== EFFECT_TYPE_WINDFURY:
key= REDIS_KEY_EFFECT_WINDFURY_TURN
else:
return None
return key
@classmethod
def setEffectTurn(self, room, uniqid, turn, type= ''):
if isinstance(room, dict)== False or isinstance(uniqid, int)== False or isinstance(turn, int)== False or type== '':
return False
room_id= room['roomId']
key= self.getEffectTurnRedisKey(room, uniqid, turn, type)
if key== None:
return False
key= key % (room_id)
turn= min(20, turn)
turn= max(0, turn)
self.redis.zadd(key, uniqid, turn)
if self.redis.ttl(key)== None:
self.redis.expire(key, 3600)
return True
@classmethod
def checkEffectTurn(self, room, uniqid, type= ''):
if isinstance(room, dict)== False or isinstance(uniqid, int)== False or type== '':
return False
room_id= room['roomId']
key= self.getEffectTurnRedisKey(room, uniqid, turn, type)
if key== None:
return False
key= key % (room_id)
turn= self.redis.zscore(key, uniqid)
if turn!=0 and turn!= None:
return True
else:
return False
@classmethod
def updateEffectTurn(self, room, pos, uniqid, type= ''):
if isinstance(room, dict)== False or isinstance(uniqid, int)== False or type== '' or (pos!=1 and pos!= 2):
return False
room_id= room['roomId']
key= self.getEffectTurnRedisKey(room, uniqid, turn, type)
if key== None:
return False
key= key % (room_id)
turn= min(20, turn)
turn= max(0, turn)
turn= self.redis.zscore(key, uniqid)
if turn== None:
return False
elif turn== 1 or turn== 0:
self.redis.zrem(key, uniqid)
self.removeEffectTurn(room, pos, uniqid, 0, type)
return True
else:
new_turn= int(self.redis.zincrby(key, uniqid, -1))
return True
@classmethod
def removeEffectTurn(self, room, pos, uniqid, type= ''):
if isinstance(room, dict)== False or isinstance(uniqid, int)== False or type== '' or (pos!=1 and pos!= 2):
return False
if pos== 1:
k= 'cardPlay1'
else:
k= 'cardPlay2'
if type== EFFECT_TYPE_STUN:
status= STATUS_STUN
elif type== EFFECT_TYPE_TAUNT:
status= STATUS_TAUNT
elif type== EFFECT_TYPE_WINDFURY:
status= STATUS_WINDFURY
else:
return False
p, v= RoomModel.getRoomCardById(room[k], uniqid)
if p== None:
return False
room[k][p]['status']= room[k][p]['status'] ^ status
return True
@classmethod
def turnEndEffectCallBack(self, room, pos):
if isinstance(room, dict)== False or (pos!=1 and pos!= 2):
return False
keys= [REDIS_KEY_EFFECT_WINDFURY_TURN, REDIS_KEY_EFFECT_TAUNT_TURN, REDIS_KEY_EFFECT_STUN_TURN]
types= [STATUS_WINDFURY, STATUS_TAUNT, STATUS_STUN]
def doEffectCallBack(key, tp):
list= self.redis.zrange(key, 0, -1, withscores=False, score_cast_func=int)
for uniqid in list:
uniqid= int(uniqid)
self.updateEffectTurn(room, pos, uniqid, tp)
map(doEffectCallBack, keys, types)
@classmethod
def getEffectTarget(cls, room, pos, effect, uniqids=[]):
'''
获取要作用的目标
modified by tm at 2014-01-22
@param room: 房间信息
@param pos: 请求接口的角色所处房间位置 (1:代表上方,2代表下方 )
@param effect: 效果配置信息
@param uniqids: 用户选择效果作用的卡uniqid
@return: 返回符合规则的卡信息list
'''
if isinstance(room, dict)==False or isinstance(effect, dict)==False or (pos!=1 and pos!=2):
return []
targetRange = effect['targetA'].get('range')
pointer = effect['targetA'].get('pointer', {})
targetObject = effect['object']
if targetRange.get('thisCard') == 1:
return [int(targetObject)]
returnCards = []
cardIds = targetRange.get('cardID')
# 如果是指定卡id的类型
if cardIds:
enemyPos = (pos == 1 and 2 or 1)
returnCards.extend(room['cardPlay'+str(pos)])
returnCards.extend(room['cardHand'+str(enemyPos)])
for card in returnCards:
if card['_id'] in cardIds:
returnCards.append(card['uniqid'])
return cls.__effectTargetPointerFilter(returnCards, pointer, uniqids, targetObject)
tgCamp = targetRange.get('camp', 'all')
tgField = targetRange.get('field', 'battleField')
tgRole = targetRange.get('role', '')
tgClass = targetRange.get('class', '')
tgRace = targetRange.get('race', '')
methodDict = {
'hands' : cls.__effectTargetRangeFilter(room, pos, 'cardHand', tgCamp, tgRole, tgClass, tgRace),
'battleField' : cls.__effectTargetRangeFilter(room, pos, 'cardPlay', tgCamp, tgRole, tgClass, tgRace),
}
returnCards = methodDict.get(tgField, {})
return cls.__effectTargetPointerFilter(returnCards, pointer, uniqids, targetObject)
@classmethod
def __effectTargetPointerFilter(cls, cards, pointer, uniqids, targetObject):
'''
获取要作用的范围群体内的目标群体
@author: tm
@param cards: 符合要求的卡集合字典
@param pointer: 配置
@param targetObject: 本卡uniqueid
@param uniqids: 用户选择的卡
@return: 返回符合规则的卡片列表
'''
if isinstance(cards, dict) == False or isinstance(pointer, dict) == False or isinstance(uniqids, list) == False:
return []
all_uniqids = [int(val) for val in cards.keys()]
pointer_type = pointer.get('pointerType', '')
pointer_value = pointer.get('pointerValue', 0)
if not pointer or not pointer_type:
return all_uniqids
# 因为客户端选择对象后传过来的对象uniqid是string类型的,所以要进行转化
uniqids = [int(val) for val in uniqids]
methodDict = {
'playerPickX': (lambda x, y: [] if x < len(y) else y)(int(pointer_value), uniqids),
'all': all_uniqids,
'randomX': (lambda x, y: y if x >= len(y) else random.sample(y, x))(int(pointer_value), all_uniqids),
'others': [val for val in all_uniqids if val != targetObject],
'adjacent': cls.__pointerAdjacentFilter(cards, targetObject)
}
return methodDict.get(pointer_type, [])
@classmethod
def __effectTargetRangeFilter(cls, room, pos, tgFiledKey, tgCamp, tgRole, tgClass, tgRace):
'''
根据range范围来过滤目标卡牌
@author: tm 2014-01-22
@param room: 房间信息
@param pos: 请求接口的角色所处房间位置 (1:代表上方,2代表下方 )
@param tgFiledKey: 对应的目标牌所处位置的key(cardPlay or cardHand)
其他参数对应range里的各个key的值
@return: 返回符合要求的卡的列表
'''
targetCards = []
originCards = {}
returnCards = {}
enemyPos = (pos == 1 and 2 or 1)
if not tgCamp or tgCamp == 'all':
targetCards.extend(room.get(tgFiledKey+str(pos), []))
targetCards.extend(room.get(tgFiledKey+str(enemyPos), []))
originCards = dict(room.get('cardOrigin'+str(pos), {}), **(room.get('cardOrigin'+str(enemyPos), {})))
else:
targetCards = copy.deepcopy(tgCamp == 'me' and room.get(tgFiledKey+str(pos), []) or room.get(tgFiledKey+str(enemyPos), []))
originCards = copy.deepcopy(tgCamp == 'me' and room.get('cardOrigin'+str(pos), {}) or room.get('cardOrigin'+str(enemyPos), {}))
for card in targetCards:
oriCard = originCards.get(str(card['uniqid']), {})
if not oriCard: # 原始卡不存在
cls.log.error('the card with uniqid:<'+str(card['uniqid'])+'> is not exist in originCards')
return {}
if tgRole:
oType = oriCard.get('type', '')
if not oType or oType not in tgRole:
# targetCards.remove(card)
continue
if tgClass:
oJob = oriCard.get('job', '')
if not oJob or oJob not in tgClass:
# targetCards.remove(card)
continue
if tgRace:
oRace = oriCard.get('race', '')
if not oRace or oRace not in tgRace:
# targetCards.remove(card)
continue
# 存储locaX,用于做pointer的位置判断
returnCards[card['uniqid']] = dict(oriCard, **{'locaX':card.get('locaX', 0)})
return returnCards
@classmethod
def __pointerAdjacentFilter(cls, cards, targetObject):
'''
获取邻近随从
@author: tm
@param cards: 符合要求的卡集合字典
@param targetObject: 本卡uniquid
@return: 返回符合规则的卡片列表
'''
if not cards.has_key(targetObject):
return []
localX = cards[targetObject].get('localX', 0)
if not localX <= 0:
return []
leftX, rightX = localX - 1, localX + 1
returnUniqids = []
for k, card in cards.items():
cardX = card.get('localX', 0)
if (leftX > 0 and cardX == leftX) or cardX == rightX:
returnUniqids.append(card.get('uniqid'))
return returnUniqids
@classmethod
def __effectTargetAttributeFilter(cls, cards, attribute=''):
'''
根据attribute来过滤目标卡牌
@author: tanming 2014-01-23
'''
if not attribute or not isinstance(cards, list):
return cards
#受过伤的
if attribute == "injured":
cards = [val for val in cards if (val.get('maxHp') > 0 and val.get('maxHp') > val.get('hp'))]
else:
pass
return cards
|
import os
import random
from itertools import chain, product
from sys import stdout
from typing import List
import pygame as pg
from pygame import Surface
from pygame.rect import Rect
from asset import get_sprite
from config import MAP_WIDTH, MAP_HEIGHT, ROOM_WIDTH, ROOM_HEIGHT, TILE_WIDTH, TILE_HEIGHT, DOOR_POSITION, SCREEN_WIDTH, \
SCREEN_HEIGHT, MIN_DISTANCE_WC_BED, MAX_DISTANCE_WC_BED, CLOSING_DOORS_SWAPS, \
MAX_CLOSING_DOORS
from game_screen.furniture import BPaint, Furniture, GPaint, OPaint
from game_screen.lighting import draw_light_source, get_light_source_dampening
from game_screen.tile import WestWall, SouthWestCorner, WestOpenDoor, NorthOpenDoor, SouthOpenDoor, Floor, \
NorthEastCorner, \
EastOpenDoor, \
SouthEastCorner, EastWall, NorthWall, SouthWall, NorthWestCorner, NorthClosedDoor, SouthClosedDoor, WestClosedDoor, \
EastClosedDoor, BedsideLamp, BedTop, BedBottom, Tile, PorcelainFloor, PorcelainSouthWall, PorcelainNorthWall, \
PorcelainEastWall, PorcelainSouthEastCorner, PorcelainNorthEastCorner, PorcelainWestWall, PorcelainSouthWestCorner, \
PorcelainNorthWestCorner, PorcelainNorthClosedDoor, PorcelainSouthClosedDoor, PorcelainWestClosedDoor, \
PorcelainEastClosedDoor, PorcelainWestOpenDoor, PorcelainEastOpenDoor, PorcelainSouthOpenDoor, \
PorcelainNorthOpenDoor
rooms = None
h_edges = None
v_edges = None
initial_room = None
final_room = None
map_surface = None
closing_door_sound = None
closing_door_sequence = None
closed_door_count = 10
light_sources = None
OutsideMap = object()
DIRECTIONS = ['north', 'east', 'south', 'west']
def get_dir(obj, dir):
assert dir in DIRECTIONS
return getattr(obj, dir)()
def _outside_map(x, y):
return x < 0 or x >= MAP_WIDTH or y < 0 or y >= MAP_HEIGHT
class Room:
def __init__(self, x, y):
self.x = x
self.y = y
self.distance_to_bed = None
self.furnitures: List[Furniture] = []
def replace(self, class_name):
dtb = self.distance_to_bed
rooms[self.x][self.y] = class_name(self.x, self.y)
if dtb is not None:
rooms[self.x][self.y].distance_to_bed = dtb
@property
def rect(self):
return Rect(self.x * ROOM_WIDTH * TILE_WIDTH, self.y * ROOM_HEIGHT * TILE_HEIGHT,
ROOM_WIDTH * TILE_WIDTH, ROOM_HEIGHT * TILE_HEIGHT)
def north(self):
return h_edges[self.x][self.y]
def south(self):
return h_edges[self.x][self.y + 1]
def east(self):
return v_edges[self.x + 1][self.y]
def west(self):
return v_edges[self.x][self.y]
def add_furniture(self, furniture):
self.furnitures.append(furniture)
def get_tile(self, tile_x, tile_y) -> List[Tile]:
furniture_tiles = []
for f in self.furnitures:
if f.in_furniture(tile_x, tile_y):
furniture_tiles.append(f.get_tile(*f.room_coords_to_furniture_coords(tile_x, tile_y)))
break
if tile_x == 0:
if tile_y == 0:
base_tile = NorthWestCorner
elif tile_y == DOOR_POSITION:
base_tile = v_edges[self.x][self.y].get_tile(right=True)
elif tile_y == ROOM_HEIGHT - 1:
base_tile = SouthWestCorner
else:
base_tile = WestWall
elif tile_x == DOOR_POSITION:
if tile_y == 0:
base_tile = h_edges[self.x][self.y].get_tile(bottom=True)
elif tile_y == ROOM_HEIGHT - 1:
base_tile = h_edges[self.x][self.y + 1].get_tile(bottom=False)
else:
base_tile = Floor
elif tile_x == ROOM_WIDTH - 1:
if tile_y == 0:
base_tile = NorthEastCorner
elif tile_y == DOOR_POSITION:
base_tile = v_edges[self.x + 1][self.y].get_tile(right=False)
elif tile_y == ROOM_HEIGHT - 1:
base_tile = SouthEastCorner
else:
base_tile = EastWall
else:
if tile_y == 0:
base_tile = NorthWall
elif tile_y == ROOM_HEIGHT - 1:
base_tile = SouthWall
else:
base_tile = Floor
return [base_tile] + furniture_tiles
def __str__(self):
return '.'
def __repr__(self):
return "Room(%s, %s)" % (self.x, self.y)
class Bedroom(Room):
def __init__(self, x, y):
super().__init__(x, y)
self.distance_to_bed = 0
def get_tile(self, tile_x, tile_y):
if (tile_x, tile_y) == (1, 0):
return [BedsideLamp]
elif (tile_x, tile_y) == (2, 0):
return [BedTop]
elif (tile_x, tile_y) == (2, 1):
return [BedBottom]
else:
return super().get_tile(tile_x, tile_y)
def get_initial_position(self):
return (self.x * ROOM_WIDTH * TILE_WIDTH + int(2.2 * TILE_WIDTH),
self.y * ROOM_HEIGHT * TILE_HEIGHT + int(1.8 * TILE_HEIGHT))
def __str__(self):
return 'B'
def __repr__(self):
return "Bedroom(%s, %s)" % (self.x, self.y)
class WC(Room):
def __str__(self):
return 'W'
def __repr__(self):
return "WC(%s, %s)" % (self.x, self.y)
def get_tile(self, tile_x, tile_y) -> List[Tile]:
furniture_tiles = []
for f in self.furnitures:
if f.in_furniture(tile_x, tile_y):
furniture_tiles.append(f.get_tile(*f.room_coords_to_furniture_coords(tile_x, tile_y)))
break
if tile_x == 0:
if tile_y == 0:
base_tile = PorcelainNorthWestCorner
elif tile_y == DOOR_POSITION:
base_tile = v_edges[self.x][self.y].get_tile(right=True, porcelain=True)
elif tile_y == ROOM_HEIGHT - 1:
base_tile = PorcelainSouthWestCorner
else:
base_tile = PorcelainWestWall
elif tile_x == DOOR_POSITION:
if tile_y == 0:
base_tile = h_edges[self.x][self.y].get_tile(bottom=True, porcelain=True)
elif tile_y == ROOM_HEIGHT - 1:
base_tile = h_edges[self.x][self.y+1].get_tile(bottom=False, porcelain=True)
else:
base_tile = PorcelainFloor
elif tile_x == ROOM_WIDTH - 1:
if tile_y == 0:
base_tile = PorcelainNorthEastCorner
elif tile_y == DOOR_POSITION:
base_tile = v_edges[self.x+1][self.y].get_tile(right=False, porcelain=True)
elif tile_y == ROOM_HEIGHT - 1:
base_tile = PorcelainSouthEastCorner
else:
base_tile = PorcelainEastWall
else:
if tile_y == 0:
base_tile = PorcelainNorthWall
elif tile_y == ROOM_HEIGHT - 1:
base_tile = PorcelainSouthWall
else:
base_tile = PorcelainFloor
return [base_tile] + furniture_tiles
class Edge:
HORIZ = 1
VERT = 2
def __init__(self, x, y, dir):
self.x = x
self.y = y
self.dir = dir
def replace(self, edge_class):
assert self.dir == Edge.HORIZ or 0 < self.x < MAP_WIDTH
assert self.dir == Edge.VERT or 0 < self.y < MAP_HEIGHT
if self.dir == Edge.HORIZ:
target = h_edges
elif self.dir == Edge.VERT:
target = v_edges
else:
raise Exception("Unknown dir: %d" % self.dir)
target[self.x][self.y] = edge_class(self.x, self.y, self.dir)
def get_pixel_coords(self):
if self.dir == Edge.HORIZ:
return int((self.x + 0.5) * ROOM_WIDTH * TILE_WIDTH), self.y * ROOM_HEIGHT * TILE_HEIGHT
else:
return self.x * ROOM_WIDTH * TILE_WIDTH, int((self.y + 0.5) * ROOM_HEIGHT * TILE_HEIGHT)
def north(self):
assert self.dir == Edge.HORIZ
if _outside_map(self.x, self.y - 1):
return OutsideMap
return rooms[self.x][self.y - 1]
def south(self):
assert self.dir == Edge.HORIZ
if _outside_map(self.x, self.y):
return OutsideMap
return rooms[self.x][self.y]
def east(self):
assert self.dir == Edge.VERT
if _outside_map(self.x, self.y):
return OutsideMap
return rooms[self.x][self.y]
def west(self):
assert self.dir == Edge.VERT
if _outside_map(self.x - 1, self.y):
return OutsideMap
return rooms[self.x - 1][self.y]
class Wall(Edge):
passable = False
def __str__(self):
return '+'
def get_tile(self, bottom=None, right=None, porcelain=False):
if self.dir == Edge.HORIZ:
assert bottom is not None
if bottom:
return PorcelainNorthWall if porcelain else NorthWall
else:
return PorcelainSouthWall if porcelain else SouthWall
else:
assert right is not None
if right:
return PorcelainWestWall if porcelain else WestWall
else:
return PorcelainEastWall if porcelain else EastWall
class ClosedDoor(Edge):
passable = False
def get_tile(self, bottom=None, right=None, porcelain=False):
if self.dir == Edge.HORIZ:
assert bottom is not None
if bottom:
return PorcelainNorthClosedDoor if porcelain else NorthClosedDoor
else:
return PorcelainSouthClosedDoor if porcelain else SouthClosedDoor
else:
assert right is not None
if right:
return PorcelainWestClosedDoor if porcelain else WestClosedDoor
else:
return PorcelainEastClosedDoor if porcelain else EastClosedDoor
def __str__(self):
if self.dir == Edge.HORIZ:
return '-'
elif self.dir == Edge.VERT:
return '|'
else:
raise Exception("Unknown dir: %d" % self.dir)
def ClosingDoor(closing_priority):
class _ClosingDoor(Edge):
@property
def passable(self):
return self.visible_state.passable
@property
def visible_state(self):
if self.closing_priority >= closed_door_count:
return OpenDoor(self.x, self.y, self.dir)
else:
return ClosedDoor(self.x, self.y, self.dir)
def get_tile(self, *args, **kwargs):
return self.visible_state.get_tile(*args, **kwargs)
def __init__(self, x, y, dir):
super().__init__(x, y, dir)
self.closing_priority = closing_priority
def __str__(self):
return str(self.visible_state)
return _ClosingDoor
class OpenDoor(Edge):
passable = True
def __str__(self):
return ' '
def get_tile(self, bottom=None, right=None, porcelain=False):
if self.dir == Edge.HORIZ:
assert bottom is not None
if bottom:
return PorcelainNorthOpenDoor if porcelain else NorthOpenDoor
else:
return PorcelainSouthOpenDoor if porcelain else SouthOpenDoor
else:
assert right is not None
if right:
return PorcelainWestOpenDoor if porcelain else WestOpenDoor
else:
return PorcelainEastOpenDoor if porcelain else EastOpenDoor
class MapCreationFailed(Exception):
pass
def _fill_initial_surface():
global map_surface
map_surface = Surface((MAP_WIDTH * ROOM_WIDTH * TILE_WIDTH, MAP_HEIGHT * ROOM_HEIGHT * TILE_HEIGHT))
room_x = 0
for room_i in range(0, MAP_WIDTH):
room_y = 0
for room_j in range(0, MAP_HEIGHT):
for tile_i in range(0, ROOM_WIDTH):
for tile_j in range(0, ROOM_HEIGHT):
x_coord = room_x + tile_i * TILE_WIDTH
y_coord = room_y + tile_j * TILE_HEIGHT
for t in rooms[room_i][room_j].get_tile(tile_i, tile_j):
map_surface.blit(get_sprite(t.sprite_id), (x_coord, y_coord))
room_y += ROOM_HEIGHT * TILE_HEIGHT
room_x += ROOM_WIDTH * TILE_WIDTH
def _replace_door_bitmap():
global map_surface
global closed_door_count
global closing_door_sequence
newly_closed_door: Edge = closing_door_sequence[closed_door_count - 1]
if newly_closed_door.dir == Edge.VERT:
for i in range(-1, 1):
room: Room = rooms[newly_closed_door.x + i][newly_closed_door.y]
tile_i = 6 if i == -1 else 0
tile_j = DOOR_POSITION
for tile in room.get_tile(tile_i, tile_j):
sprite_id = tile.sprite_id
x_coord = (room.x * ROOM_WIDTH * TILE_WIDTH) + tile_i * TILE_WIDTH
y_coord = (room.y * ROOM_HEIGHT * TILE_HEIGHT) + tile_j * TILE_HEIGHT
map_surface.blit(get_sprite(sprite_id), (x_coord, y_coord))
if newly_closed_door.dir == Edge.HORIZ:
for i in range(-1, 1):
room: Room = rooms[newly_closed_door.x][newly_closed_door.y + i]
tile_i = DOOR_POSITION
tile_j = 6 if i == -1 else 0
for tile in room.get_tile(tile_i, tile_j):
sprite_id = tile.sprite_id
x_coord = (room.x * ROOM_WIDTH * TILE_WIDTH) + tile_i * TILE_WIDTH
y_coord = (room.y * ROOM_HEIGHT * TILE_HEIGHT) + tile_j * TILE_HEIGHT
map_surface.blit(get_sprite(sprite_id), (x_coord, y_coord))
def _determine_initial_room():
global initial_room
x = random.choice(tuple(range(0, MAP_WIDTH)))
y = random.choice(tuple(range(0, MAP_HEIGHT)))
rooms[x][y].replace(Bedroom)
initial_room = rooms[x][y]
def _bfs_scan_creation():
global closing_door_sequence, final_room
closing_door_sequence = []
wc_candidates = []
first_room_candidates = []
# 1 -- Breadth First Search
queue = []
visited = set()
queue.append(initial_room)
while queue:
room, queue = queue[0], queue[1:]
visited.add(room)
if MIN_DISTANCE_WC_BED <= room.distance_to_bed <= MAX_DISTANCE_WC_BED:
wc_candidates.append(room)
if room.distance_to_bed == 1:
first_room_candidates.append(room)
for d in DIRECTIONS:
edge = get_dir(room, d)
adj_room = get_dir(edge, d)
if adj_room == OutsideMap:
continue
if not edge.passable:
closing_door_sequence.append(edge)
else:
if adj_room not in visited:
queue.append(adj_room)
visited.add(adj_room)
adj_room.distance_to_bed = room.distance_to_bed + 1
# 2 -- Determine final room (WC Room)
if not wc_candidates:
raise MapCreationFailed('No candidates for final room.')
final_room = random.choice(wc_candidates)
final_room.replace(WC)
final_room = rooms[final_room.x][final_room.y]
# Add paint to the an adjacent room
adjacent_room = random.choice(first_room_candidates)
adjacent_room.add_furniture(BPaint(1, 4))
# 3 -- Determine the closing door sequence
closing_doors_count = len(closing_door_sequence)
for _ in range(0, CLOSING_DOORS_SWAPS):
i = random.choice(tuple(range(0, closing_doors_count)))
j = random.choice(tuple(range(0, closing_doors_count)))
if i != j:
closing_door_sequence[i], closing_door_sequence[j] = closing_door_sequence[j], closing_door_sequence[i]
for i in range(0, min(closing_doors_count, MAX_CLOSING_DOORS)):
closing_door_sequence[i].replace(ClosingDoor(i))
if closing_door_sequence[i].dir == Edge.HORIZ:
closing_door_sequence[i] = h_edges[closing_door_sequence[i].x][closing_door_sequence[i].y]
if closing_door_sequence[i].dir == Edge.VERT:
closing_door_sequence[i] = v_edges[closing_door_sequence[i].x][closing_door_sequence[i].y]
def _init_sound():
global closing_door_sound
closing_door_sound = pg.mixer.Sound(os.path.join('assets', 'sfx_footsteps.wav'))
def _create():
global rooms, h_edges, v_edges
rooms = [
[
Room(i, j) for j in range(0, MAP_HEIGHT)
]
for i in range(0, MAP_WIDTH)
]
h_edges = [
[
Wall(i, j, dir=Edge.HORIZ) for j in range(0, MAP_HEIGHT + 1)
]
for i in range(0, MAP_WIDTH)
]
v_edges = [
[
Wall(i, j, dir=Edge.VERT) for j in range(0, MAP_HEIGHT)
]
for i in range(0, MAP_WIDTH + 1)
]
def _get_component(r):
if r == OutsideMap:
return OutsideMap
return [c for c in connex_components if r in c][0]
connex_components = {frozenset((rooms[i][j],)) for (i, j) in product(range(0, MAP_WIDTH),
range(0, MAP_HEIGHT))}
connex_edges = {
c: {
(list(c)[0], dir): _get_component(get_dir(get_dir(list(c)[0], dir), dir))
for dir in DIRECTIONS
}
for c in connex_components
}
while len(connex_components) > 1:
c1 = random.choice(tuple(connex_components))
room, dir = random.choice(tuple(connex_edges[c1].keys()))
edge = get_dir(room, dir)
adj_room = get_dir(edge, dir)
if adj_room == OutsideMap:
continue
c2 = _get_component(adj_room)
assert c1 != c2
edge.replace(OpenDoor)
connex_components.remove(c1)
connex_components.remove(c2)
merged_c = frozenset.union(c1, c2)
connex_components.add(merged_c)
for c in connex_edges:
for (room, dir), cc in connex_edges[c].items():
if cc in (c1, c2):
connex_edges[c][room, dir] = merged_c
connex_edges[merged_c] = {
(room, dir): cc
for ((room, dir), cc) in chain(connex_edges[c1].items(), connex_edges[c2].items())
if cc != merged_c
}
def _position_light_sources():
global light_sources
light_sources = []
light_sources.append((initial_room.x * ROOM_WIDTH * TILE_WIDTH + 96,
initial_room.y * ROOM_HEIGHT * TILE_HEIGHT + 36,
128))
x = final_room.x * ROOM_WIDTH * TILE_WIDTH + 228
y = final_room.y * ROOM_WIDTH * TILE_WIDTH + 228
light_sources.append((x, y, 278))
for d in DIRECTIONS:
edge = get_dir(final_room, d)
if edge.passable:
x, y = edge.get_pixel_coords()
light_sources.append((x, y, 96))
def _position_furniture():
for f in [BPaint, OPaint, OPaint, GPaint, GPaint] * 4:
x = random.randint(0, MAP_WIDTH-1)
y = random.randint(0, MAP_HEIGHT-1)
if rooms[x][y] in (initial_room, final_room):
continue
if rooms[x][y].furnitures:
continue
i = random.randint(1, 4)
j = random.randint(1, 4)
rooms[x][y].add_furniture(f(i, j))
def init():
_create()
_init_sound()
_determine_initial_room()
_bfs_scan_creation()
_position_furniture()
_fill_initial_surface()
_position_light_sources()
def draw(screen, light_mask):
screen.blit(map_surface, to_screen_coords(0, 0))
for source_x, source_y, source_radius in light_sources:
damp = get_light_source_dampening(source_x, source_y)
if not damp:
continue
draw_light_source(light_mask, source_x, source_y, int(source_radius*damp))
def close_door():
global closed_door_count
global closing_door_sound
closed_door_count += 1
_replace_door_bitmap()
pg.mixer.Channel(2).play(closing_door_sound)
def get_room(coord_x, coord_y):
return rooms[int(coord_x) // (ROOM_WIDTH * TILE_WIDTH)][int(coord_y) // (ROOM_HEIGHT * TILE_HEIGHT)]
def get_tile(player_x, player_y) -> List[Tile]:
room = get_room(player_x, player_y)
tile_x = (player_x % (ROOM_WIDTH * TILE_WIDTH)) // TILE_WIDTH
tile_y = (player_y % (ROOM_HEIGHT * TILE_HEIGHT)) // TILE_HEIGHT
return room.get_tile(tile_x, tile_y)
def random_point():
return random.randint(0, MAP_WIDTH * ROOM_WIDTH * TILE_WIDTH), random.randint(0,
MAP_HEIGHT * ROOM_HEIGHT * TILE_HEIGHT)
def to_screen_coords(*args):
from game_screen import player
if isinstance(args[0], Rect):
return args[0].move(SCREEN_WIDTH / 2 - player.get_x(),
SCREEN_HEIGHT / 2 - player.get_y())
assert len(args) == 2
return args[0] + (SCREEN_WIDTH / 2 - player.get_x()), args[1] + SCREEN_HEIGHT / 2 - player.get_y()
def print_map():
for j in range(0, MAP_HEIGHT):
for i in range(0, MAP_WIDTH):
stdout.write('+')
stdout.write('%s' % h_edges[i][j])
stdout.write('+\n')
for i in range(0, MAP_WIDTH):
stdout.write('%s' % v_edges[i][j])
stdout.write('%s' % rooms[i][j])
stdout.write('%s\n' % v_edges[MAP_WIDTH][j])
for i in range(0, MAP_WIDTH):
stdout.write('+')
stdout.write('%s' % h_edges[i][MAP_HEIGHT])
stdout.write('+\n')
|
from itertools import groupby
def run_length_encoding(string):
return [[sum(1 for _ in g), k] for k, g in groupby(string)]
|
from datetime import datetime
from sqlalchemy.inspection import inspect
from flask import request
from flask_login import current_user
from typing import Callable, Dict, Any
from waitlist.base import app
from waitlist.data.version import version
from waitlist.permissions import perm_manager
from waitlist.utility.config import cdn_eveimg, cdn_eveimg_webp, cdn_eveimg_js, influence_link, title
from waitlist.utility.settings import sget_insert
from waitlist.utility.i18n.locale import get_locale, get_langcode_from_locale
from waitlist.utility.mainmenu import main_nav
def eve_image(browser_webp: bool) -> Callable[[str, str], str]:
if browser_webp and cdn_eveimg_webp:
def _eve_image(path: str, _: str) -> str:
return cdn_eveimg.format(path, 'webp')
else:
def _eve_image(path: str, suffix: str) -> str:
return cdn_eveimg.format(path, suffix)
return _eve_image
def get_header_insert():
return sget_insert('header')
# set if it is the igb
@app.context_processor
def inject_data() -> Dict[str, Any]:
is_account = False
if hasattr(current_user, 'type'):
is_account = (current_user.type == "account")
req_supports_webp = 'image/webp' in request.headers.get('accept', '')
eve_image_macro: Callable[[str, str], str] = eve_image(req_supports_webp)
return dict(version=version,
perm_manager=perm_manager, get_header_insert=get_header_insert,
eve_proxy_js=cdn_eveimg_js, eve_cdn_webp=cdn_eveimg_webp,
browserSupportsWebp=req_supports_webp, eve_image=eve_image_macro,
influence_link=influence_link, is_account=is_account,
title=title, lang_code=get_langcode_from_locale(get_locale(app)),
main_nav=main_nav
)
def get_pk(obj):
return inspect(obj).identity
app.jinja_env.globals.update(get_pk=get_pk)
|
import team, player, random
class Tournament:
def __init__(self, teams=[[], [], [], [], [], []], validator={}, winner=None):
self._winner = winner
self._teams = teams
self._validator = validator
@property
def winner(self):
return self._winner
@property
def teams(self):
return self._teams
@property
def validator(self):
return self._validator
def make_teams(self, roster):
j = 1
i = 0
print(len(roster))
while j <= 30:
p = random.choice(roster)
if p not in self.validator and len(self.teams[i]) < 6:
self.validator[p] = "used"
self.teams[i].append(p)
j += 1
i += 1
if i > 5:
i = 0
return self.teams
def compute_winner(self):
ms = 0
t = 0
for team in range(len(self.teams)):
score = 0
for p in range(len(self.teams[team])):
score += self.teams[team][p].score
if score > ms:
ms = score
t = team
return self.teams[t]
|
# Generated by Django 2.1.2 on 2018-11-28 10:39
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blogs', '0006_comment_rating'),
]
operations = [
migrations.AlterField(
model_name='comment',
name='rating',
field=models.IntegerField(blank=True, help_text='Оцінка продукту: залишає клієнт, макс. значення - 10 од.', null=True, validators=[django.core.validators.MaxValueValidator(10), django.core.validators.MinValueValidator(0)], verbose_name='Оцінка продукту'),
),
]
|
import os
import re
from decimal import Decimal
from datetime import datetime
import boto3
#import botocore
from botocore.exceptions import ClientError
from dynamodb_json import json_util
from openpyxl import load_workbook
SHEET_NAME = str(os.environ['SHEET_NAME'])
RETAIL_TABLE = str(os.environ['RETAIL_TABLE'])
RETAIL_INFO_TABLE = str(os.environ['RETAIL_INFO_TABLE'])
REGEX = "\\d{2}-\\w{3}-\\d{4}"
print('** Loading function **')
s3 = boto3.client('s3')
dynamodb = boto3.client('dynamodb')
def parse_xlsx(event, context):
bucket = event['Records'][0]['s3']['bucket']['name']
key = event['Records'][0]['s3']['object']['key']
# Was already processed?
if was_already_processed(key):
return "INFO: {} was already processed... nothing to do here...".format(key)
last_report_date = get_last_report_date()
# Parse xlsx file to data
data = parse_xlsx_to_data(bucket, key, last_report_date)
# Save items
save_retail_data(data)
# Save meta info
save_retail_info_details(key)
return "All tasks done!"
def was_already_processed(key):
print("Checking if it was already processed before")
try:
response = dynamodb.get_item(
Key={
'file_name': {
'S':key,
},
},
TableName=RETAIL_INFO_TABLE,
)
if 'Item' in response.keys():
print('INFO: File was already processed!')
return True
return False
except ClientError as client_exception:
print(client_exception)
def get_last_report_date():
print("Getting the last report date")
default_date = '19700101'
try:
response = dynamodb.scan(
TableName=RETAIL_INFO_TABLE
)
if 'Items' in response and response['Items']:
result = json_util.loads(response)
items = result['Items']
item = sorted(items, key=lambda i: i['processed_at'], reverse=False)[0]
if 'report_date' in item:
return item['report_date']
return default_date
except ClientError as client_exception:
print(client_exception)
def parse_xlsx_to_data(bucket, key, report_date):
file = get_s3_file(bucket, key)
print("Parsing file {} filtering by date greater than {}".format(key, report_date))
data = parse_sheet(file, report_date)
return data
def parse_sheet(file, report_date):
book = load_workbook(file)
sheet = book[SHEET_NAME]
#dates = [(sheet.cell(1, col_index).value).strftime("%Y%m%d") for col_index in range(2, sheet.max_column)]
dates = []
col_index = 2
while True:
cell_value = sheet.cell(1, col_index).value
if (cell_value is None):
break
dates.append(cell_value.strftime("%Y%m%d"))
col_index += 1
row = 2
data = []
while True:
location = sheet.cell(row, 1).value
if location is None:
break
for col in range(1, sheet.max_column - 1):
if dates[col - 1] > report_date:
d = {
'loc_retail':location,
'date_retail':dates[col - 1],
'kwh': Decimal(str(sheet.cell(row, col + 1).value))
}
data.append(d)
row = row + 1
return data
def get_s3_file(bucket, key):
print("Download file from S3 - File name: "+key)
local_file_name = '/tmp/'+key
s3.download_file(bucket, key, local_file_name)
return local_file_name
def save_retail_data(retails):
print("Saving items to dynamoDB")
for retail in retails:
response = dynamodb.update_item(
TableName=RETAIL_TABLE,
ExpressionAttributeNames={
'#KWH': 'kwh',
},
ExpressionAttributeValues={
':kwh': {
'N':str(retail['kwh']),
},
},
Key={
'loc_retail': {
'S':retail['loc_retail'],
},
'date_retail': {
'S':retail['date_retail']
},
},
UpdateExpression='SET #KWH = :kwh',
)
def save_retail_info_details(key):
print("Saving info data for {} in table {}".format(key, RETAIL_INFO_TABLE))
match = re.search(REGEX, str(key))
report_date_obj = datetime.strptime(match.group(0), "%d-%b-%Y")
report_date = report_date_obj.strftime("%Y%m%d")
response = dynamodb.put_item(
TableName=RETAIL_INFO_TABLE,
Item={
'file_name': {
'S':str(key),
},
'processed_at': {
'S':datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
},
'report_date': {
'S':report_date,
},
}
)
print(response)
return response |
# Generated by Django 3.2.2 on 2021-05-07 09:40
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('blog_app', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='comment',
name='pub_date',
field=models.DateTimeField(default=datetime.datetime(2021, 5, 7, 9, 40, 51, 25366, tzinfo=utc)),
),
migrations.AlterField(
model_name='post',
name='pub_date',
field=models.DateTimeField(default=datetime.datetime(2021, 5, 7, 9, 40, 51, 25366, tzinfo=utc)),
),
]
|
#!/usr/bin/env python3
"""thread.py: The threading manager file for the CarSoft project."""
__author__ = "Rhys Read"
__copyright__ = "Copyright 2019, Rhys Read"
import logging
import threading
class ThreadManager(object):
instance = None
def __init__(self):
if ThreadManager.instance is not None:
logging.warning('ThreadManager repeat instance occurrence. Please check as this is undesirable.')
ThreadManager.instance = self
self.__threads = []
def add_task(self, func, args: tuple, start=True):
thread = threading.Thread(target=func, args=args, daemon=True)
self.__threads.append(thread)
if start:
thread.start()
return thread
def check_threads(self, remove_complete=True):
for thread in self.__threads:
if not thread.is_alive() and remove_complete:
self.__threads.remove(thread)
def start(self):
for thread in self.__threads:
if not thread.is_alive():
thread.start()
|
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from dataclasses import dataclass
from pants.backend.terraform.target_types import TerraformModuleTarget
from pants.backend.terraform.tool import TerraformTool
from pants.core.goals.tailor import (
AllOwnedSources,
PutativeTarget,
PutativeTargets,
PutativeTargetsRequest,
)
from pants.engine.fs import PathGlobs, Paths
from pants.engine.internals.selectors import Get
from pants.engine.rules import collect_rules, rule
from pants.engine.unions import UnionRule
from pants.util.dirutil import group_by_dir
from pants.util.logging import LogLevel
@dataclass(frozen=True)
class PutativeTerraformTargetsRequest(PutativeTargetsRequest):
pass
@rule(level=LogLevel.DEBUG, desc="Determine candidate Terraform targets to create")
async def find_putative_terrform_module_targets(
request: PutativeTerraformTargetsRequest,
terraform: TerraformTool,
all_owned_sources: AllOwnedSources,
) -> PutativeTargets:
if not terraform.tailor:
return PutativeTargets()
all_terraform_files = await Get(Paths, PathGlobs, request.path_globs("*.tf"))
unowned_terraform_files = set(all_terraform_files.files) - set(all_owned_sources)
putative_targets = [
PutativeTarget.for_target_type(
TerraformModuleTarget,
path=dirname,
name=None,
triggering_sources=sorted(filenames),
)
for dirname, filenames in group_by_dir(unowned_terraform_files).items()
]
return PutativeTargets(putative_targets)
def rules():
return [*collect_rules(), UnionRule(PutativeTargetsRequest, PutativeTerraformTargetsRequest)]
|
from django.contrib import admin
from properties.models import Address, Property
@admin.register(Address)
class AddressAdmin(admin.ModelAdmin):
pass
@admin.register(Property)
class PropertyAdmin(admin.ModelAdmin):
pass
|
from django.shortcuts import render
import requests
from datetime import datetime
from bs4 import BeautifulSoup
URL_BASE = "https://www.nbrb.by/api/exrates/rates?ondate="
def core_kurs_dollar(request):
url = URL_BASE
date = datetime.now().strftime("%y-%m-%d")
periodicity = "&periodicity=0"
data = requests.get(url + date + periodicity, params=locals()).json()
kurs_dollar = data[5]['Cur_OfficialRate']
return f'${kurs_dollar}'
def currency(request):
url = URL_BASE
date = datetime.now().strftime("%y-%m-%d")
date_html = datetime.now().strftime("%d.%m.%y")
periodicity = "&periodicity=0"
data = requests.get(url + date + periodicity, params=locals()).json()
dollar_nb = data[5]['Cur_OfficialRate']
euro_nb = data[6]['Cur_OfficialRate']
rosrubl_nb = data[17]['Cur_OfficialRate']
link = "https://myfin.by/currency/minsk"
text = requests.get(link, headers={'User-agent': 'Super Bot Power Level Over 9000'}).text
soup = BeautifulSoup(text, 'html.parser')
parser_all = soup.find_all('td')
dollar_pok = parser_all[1].text.replace('\n', '')
dollar_prod = parser_all[2].text.replace('\n', '')
euro_pok = parser_all[6].text.replace('\n', '')
euro_prod = parser_all[7].text.replace('\n', '')
rosrubl_pok = parser_all[11].text.replace('\n', '')
rosrubl_prod = parser_all[12].text.replace('\n', '')
context = {'dollar_nb': dollar_nb, 'euro_nb': euro_nb,
'rosrubl_nb': rosrubl_nb, 'date': date, 'date_html': date_html,
'dollar_pok': dollar_pok, 'dollar_prod': dollar_prod,
'euro_pok': euro_pok, 'euro_prod': euro_prod,
'rosrubl_pok': rosrubl_pok, 'rosrubl_prod': rosrubl_prod,
}
return render(request, 'currency/currency.html', context)
|
from .cbam import (
resnet18_cbam,
resnet34_cbam,
resnet50_cbam,
resnet101_cbam,
resnet152_cbam,
)
from .efficientnet import (
efficientnet_b0,
efficientnet_b1,
efficientnet_b2,
efficientnet_b3,
efficientnet_b4,
efficientnet_b5,
efficientnet_b6,
efficientnet_b7,
)
from .msv import resnet50_msv
from .wsl import (
resnext101_32x8d_wsl,
resnext101_32x16d_wsl,
resnext101_32x32d_wsl,
resnext101_32x48d_wsl,
)
from .xrv import (
densenet_all_xrv,
densenet_chex_xrv,
densenet_mimic_ch_xrv,
densenet_mimic_nb_xrv,
densenet_nih_xrv,
densenet_pc_xrv,
densenet_rsna_xrv,
)
|
from django.shortcuts import render
from django.http import HttpResponse
from assigment1.settings import BASE_DIR
import os
def frequency(request,filename):
try:
fileroot= os.path.join(os.path.dirname(BASE_DIR),"static","templates",filename)
filee = open(fileroot)
hdict={}
for word in filee.read().split(" "):
if not hdict.has_key(word):
hdict[word]=[]
hdict[word].append(word)
result = "Name: %s <br> Words: <br>" % filename
for i in hdict:
result += "%s: %d <br>"%(i,len(hdict[i]))
return HttpResponse(result)
except:
return HttpResponse("There is no file named as %s" %filename)
|
# -*- coding:utf-8 -*-
# author: will
import datetime
import json
import re
import time
import requests
from flask import request, jsonify, g
from app import db
from app.models import GoodsOrder, Goods, UserAddress, GoodsImage, ExpressInfo, Admin
from common.error_code import ErrorCode
from common.response import build_response
from params_service.goods.pc_goods_order_service import LsParamsCheck, DetailParamsCheck, ExpressParamsCheck
from utils.log_service import Logging
from utils.time_service import unix_time
from utils.to_dict import query_to_dict
from utils.user_service.login import login_required, admin_required
from utils.user_service.wechat_pay import OrderQuery
from view_service.goods.pc_goods_order_service import ls_data_service, detail_data_service, close_data_service, \
express_data_service
from . import api_goods
# PC--订单分类列表
@api_goods.route('/pc_goods_order_ls', methods=['POST'])
@login_required
@admin_required
def pc_goods_order_ls():
try:
res = request.get_json()
Logging.logger.info('request_args:{0}'.format(res))
params_status, (code, msg) = LsParamsCheck.ls_params_check(res)
if not params_status:
return build_response(errno=code, errmsg=msg)
data_status, data = ls_data_service(res)
if not data_status:
return build_response(errno=data[0], errmsg=data[1])
doc = dict(order_list=data[0], count=data[1])
return build_response(doc)
except Exception as e:
Logging.logger.error('errmsg:{0}'.format(e))
return build_response(errno=ErrorCode.Internet_error[0], errmsg=ErrorCode.Internet_error[1])
# PC--订单搜索
@api_goods.route('/pc_goods_order_query', methods=['POST'])
@login_required
@admin_required
def pc_goods_order_query():
try:
res = request.get_json()
order_num = res.get('order_num')
goods_name = res.get('goods_name')
page = res.get('page', 1)
pagesize = res.get('pagesize', 10)
Logging.logger.info('request_args:{0}'.format(res))
params_status, (code, msg) = LsParamsCheck.params_check(res)
if not params_status:
return jsonify(errno=code, errmsg=msg)
if order_num:
if goods_name:
goods = Goods.query.filter(Goods.name.like("%" + goods_name + "%")).all()
goods_id_ls = [x.id for x in goods]
orders = GoodsOrder.query.filter(GoodsOrder.goods_id.in_(goods_id_ls),
GoodsOrder.order_num == order_num).order_by(
GoodsOrder.create_time.desc()).paginate(page, pagesize, False)
else:
orders = GoodsOrder.query.filter_by(order_num=order_num).order_by(GoodsOrder.create_time.desc()).paginate(page, pagesize, False)
else:
if goods_name:
goods = Goods.query.filter(Goods.name.like("%" + goods_name + "%")).all()
goods_id_ls = [x.id for x in goods]
orders = GoodsOrder.query.filter(GoodsOrder.goods_id.in_(goods_id_ls)).order_by(
GoodsOrder.create_time.desc()).paginate(page, pagesize, False)
else:
orders = GoodsOrder.query.paginate(page, pagesize, False)
count = orders.total
# orders = orders[(page - 1) * pagesize:page * pagesize]
order_list = list()
for order in orders.items:
order_dict = dict()
order_dict['order_id'] = order.id
order_dict['order_num'] = order.order_num
order_dict['price'] = order.price
order_dict['postage'] = order.postage
order_dict['order_status'] = order.order_status
order_dict['create_time'] = str(order.create_time)
goods_id = order.goods_id
goods_obj = Goods.query.get(goods_id)
order_dict['goods_name'] = goods_obj.name
img_obj = GoodsImage.query.filter(GoodsImage.goods_id == goods_id, GoodsImage.is_min == 1).first()
order_dict['img_url'] = img_obj.img_url
address_id = order.address_id
address = UserAddress.query.get(address_id)
address_dict = dict()
address_dict['name'] = address.name
address_dict['phone'] = address.phone
address_dict['provence'] = address.provence
address_dict['city'] = address.city
address_dict['area'] = address.area
address_dict['detail'] = address.detail
address_dict['post_num'] = address.post_num
order_dict['address_data'] = address_dict
order_list.append(order_dict)
return jsonify(errno=0, errmsg='ok', order_list=order_list, count=count)
except Exception as e:
Logging.logger.error('errmsg:{0}'.format(e))
return jsonify(errno=-1, errmsg="网络错误")
# PC--查询用户微信支付状态
@api_goods.route('/wechat_pay_query', methods=['POST'])
# @login_required
# @admin_required
def wechat_pay_query():
try:
res = request.get_json()
order_num = res.get('order_num')
Logging.logger.info('request_args:{0}'.format(res))
order_obj = GoodsOrder.query.filter_by(order_num=order_num).first()
if not order_obj:
return jsonify(errno=-1, errmsg='订单不存在')
# order_status = order_obj.order_status
# if order_status == 1:
# return jsonify(errno=-1, errmsg='当前查询订单未支付')
transaction_id = order_obj.transaction_id
pay = OrderQuery()
# 微信订单查询接口
url = pay.url
# 拿到封装好的xml数据
body_data = pay.get_xml_data(transaction_id)
# 请求微信订单查询接口
result = requests.post(url, body_data.encode("utf-8"), headers={'Content-Type': 'application/xml'})
content = pay.xmlToArray(result.content)
Logging.logger.info('微信订单查询返回数据:{0}'.format(content))
return jsonify(errno=0, errmsg="OK", data=content)
except Exception as e:
Logging.logger.error('errmsg:{0}'.format(e))
return jsonify(errno=-1, errmsg='网络异常')
# PC--发货
@api_goods.route('/bulk_send_goods', methods=['POST'])
@login_required
@admin_required
def bulk_send_goods():
try:
res = request.get_json()
order_id = res.get('order_id')
express_num = res.get('express_num')
company = res.get('company')
remark = res.get('remark')
admin_id = g.user_id
Logging.logger.info('request_args:{0}'.format(res))
order_obj = GoodsOrder.query.get(order_id)
if not order_obj:
return jsonify(errno=-1, errmsg='订单不存在', order_id=order_id)
if order_obj.order_status != 2:
return jsonify(errno=-1, errmsg='当前订单状态不支持该操作', order_id=order_id)
now = datetime.datetime.now()
order_obj.order_status = 3
order_obj.send_time = now
express_obj = ExpressInfo()
express_obj.admin_id = admin_id
express_obj.order_id = order_id
express_obj.express_num = express_num
express_obj.company = company
express_obj.remark = remark
db.session.add(order_obj)
db.session.add(express_obj)
db.session.commit()
return jsonify(errno=0, errmsg="OK")
except Exception as e:
Logging.logger.error('errmsg:{0}'.format(e))
db.session.rollback()
return jsonify(errno=-1, errmsg='网络异常')
# PC--订单详情
@api_goods.route('/pc_goods_order_detail', methods=['POST'])
@login_required
@admin_required
def pc_goods_order_detail():
try:
res = request.get_json()
order_id = res.get('order_id')
Logging.logger.info('request_args:{0}'.format(res))
if not isinstance(order_id, int):
return jsonify(errno=-1, errmsg='参数错误')
order = GoodsOrder.query.get(order_id)
if not order:
return jsonify(errno=-1, errmsg='订单不存在', order_id=order_id)
# params_status, result = DetailParamsCheck.detail_params_check(res)
# if not params_status:
# return jsonify(errno=result[0], errmsg=result[1])
order_data = detail_data_service(res)
return jsonify(errno=0, errmsg="ok", data=order_data)
except Exception as e:
Logging.logger.error('errmsg:{0}'.format(e))
return jsonify(errno=ErrorCode.Internet_error[0], errmsg=ErrorCode.Internet_error[1])
# PC--订单关闭
@api_goods.route('/pc_goods_order_close', methods=['POST'])
@login_required
@admin_required
def pc_goods_order_close():
try:
res = request.get_json()
admin_id = g.user_id
Logging.logger.info('request_args:{0}'.format(res))
params_status, result = DetailParamsCheck.detail_params_check(res)
if not params_status:
return jsonify(errno=result[0], errmsg=result[1])
close_data_service(res, admin_id)
return jsonify(errno=0, errmsg="ok")
except Exception as e:
Logging.logger.error('errmsg:{0}'.format(e))
return jsonify(errno=ErrorCode.Internet_error[0], errmsg=ErrorCode.Internet_error[1])
# PC--订单的物流信息
@api_goods.route('/pc_express_info', methods=['POST'])
@login_required
@admin_required
def pc_express_info():
try:
res = request.get_json()
order_id = res.get('order_id')
Logging.logger.info('request_args:{0}'.format(res))
if not isinstance(order_id, int):
return jsonify(errno=ErrorCode.params_type_error[0], errmsg=ErrorCode.params_type_error[1])
order = GoodsOrder.query.get(order_id)
if not order:
return jsonify(errno=ErrorCode.order_not_exist[0], errmsg=ErrorCode.order_not_exist[1])
if order.order_status in [1, 2]:
return jsonify(errno=ErrorCode.order_status_error[0], errmsg=ErrorCode.order_status_error[1])
express = ExpressInfo.query.filter_by(order_id=order_id).first()
if express:
express_dict = query_to_dict(express)
else:
express_dict = None
return jsonify(errno=0, errmsg="ok", data=express_dict)
except Exception as e:
Logging.logger.error('errmsg:{0}'.format(e))
return jsonify(errno=ErrorCode.Internet_error[0], errmsg=ErrorCode.Internet_error[1])
# PC--订单的物流信息
@api_goods.route('/express_info', methods=['POST'])
def express_info():
try:
res = request.get_json()
Logging.logger.info('request_args:{0}'.format(res))
params_status, result = ExpressParamsCheck.express_params_check(res)
if not params_status:
return jsonify(errno=result[0], errmsg=result[1])
print("---")
express_dict = express_data_service(res)
return jsonify(errno=0, errmsg="ok", data=express_dict)
except Exception as e:
Logging.logger.error('errmsg:{0}'.format(e))
return jsonify(errno=ErrorCode.Internet_error[0], errmsg=ErrorCode.Internet_error[1])
# PC退款申请审核
@api_goods.route('/goods_order_refund_check', methods=['POST'])
@login_required
@admin_required
def refund_check():
try:
res = request.get_json()
order_id = res.get('order_id')
admin_id = g.admin_id
Logging.logger.info('request_args:{0}'.format(res))
order_obj = GoodsOrder.query.get(order_id)
if not order_obj:
return jsonify(errno=-1, errmsg='订单不存在')
if order_obj.order_status not in [1, 2]:
return jsonify(errno=-1, errmsg='当前订单状态不支持该操作')
admin = Admin.query.get(admin_id)
refund_person = admin.username
order_obj.order_status = -1
order_obj.admin_id = admin_id
order_obj.refund_person = refund_person
db.session.add(order_obj)
db.session.commit()
return jsonify(errno=0, errmsg="ok")
except Exception as e:
Logging.logger.error('errmsg:{0}'.format(e))
db.session.rollback()
return jsonify(errno=ErrorCode.Internet_error[0], errmsg=ErrorCode.Internet_error[1])
|
number=[1,2,3,4,5,6]
for i in number:
if i%2 == 0:
print(f'even no = {i}') #formating string
else:
print("odd no = ",i) |
motorcycles = ['honda', 'bmw', 'yamaha', 'suzuki']
'''
0. Чтобы заменить элемент в списке, нужно указать
индекс элемента, поставить = и указать, на что
конкретно вы хотите заменить этот элемент.
1 Добавление. Чтобы добавить элемент в массив,
воспользуемся методом .append(element)
Пишем название списка, ставим точку и метод append.
В скобках у метода пишем, что именно добавляем.
1.1 Чтобы вставить элемент в n позицию списка,
используем метод .insert(position, element)
2. Чтобы удалить элемент, нужно воспользоваться методом .pop().
В скобках у метода можно указать индекс элемента, который
собираетесь удалить. Если оставить скобки пустыми, удалится последний элемент списка.
'''
print('Список до изменения:', motorcycles)
motorcycles[3] = 'cf motors' # замена элемента
motorcycles.append('ducati') # добавление элемента
motorcycles.insert(1, 'm1nsk') # вставка элементов
print('Список после изменения', motorcycles)
print('Удалил 3 элемент списка:', motorcycles.pop(2)) # удаление по индексу
print('Удалил последний элемент списка', motorcycles.pop()) # удаление последнего
|
word = input()
alpha = [-1]*26
for idx, w in enumerate(word):
diff = ord(w) - ord('a')
if alpha[diff] == -1:
alpha[diff] = idx
for a in alpha:
print(a, end=" ")
|
from scipy.signal import butter, lfilter, resample,iirnotch
from tqdm import tqdm
from pylab import genfromtxt
import scipy.io as io
import numpy as np
import pandas as pd
import lib.utils as utils
import random
import os
import sys
from mne.decoding import UnsupervisedSpatialFilter
from sklearn.decomposition import PCA, FastICA
sys.path.append('..')
from methods import pulse_noise
def bandpass(sig, band, fs):
B, A = butter(5, np.array(band) / (fs / 2), btype='bandpass')
return lfilter(B, A, sig, axis=0)
def notch_filtering(wav, fs, w0, Q):#陷波滤波器
""" Apply a notch (band-stop) filter to the audio signal.
Args:
wav: Waveform.
fs: Sampling frequency of the waveform.
w0: See scipy.signal.iirnotch.
Q: See scipy.signal.iirnotch.
Returns:
wav: Filtered waveform.
"""
b, a = iirnotch(2 * w0/fs, Q)
wav = lfilter(b, a, wav)
return wav
sample_freq = 300.0
epoc_window = 2 * sample_freq#可以变
# subjects = ['0001','0002','0003','0004','0005','0006','0007','0008','0009','0010','0011','0012','0013','0014','0015','0016','0017',
# '0018','0019','0023','0021','0022','0023','0024','0025','0026','0027','0028']
subjects = ['0037','0038']
npp_params=[0.2, 5, 0.1]
data_file = 'EEG_Data/MI_DR02/raw/data_{}_raw.csv'
y1=np.squeeze(np.array([1,1,1,1,1, 1,1,1,1,1, 1,1,1,1,1, 1,1,1,1,1, 1,1,1,1,1,
1,1,1,1,1, 1,1,1,1,1, 1,1,1,1,1, 1,1,1,1,1, 1,1,1,1,1]))
y2=np.squeeze(np.array([1,1,1,1,1, 1,1,1,1,1, 1,1,1,1,1, 1,1,1,1,1, 1,1,1,1,1,
1,1,1,1,1, 1,1,1,1,1, 1,1,1,1,1, 1,1,1,1,1, 1,1,1,1,1]))
# y1=np.squeeze(np.array([0,0,0,0,0, 0,0,0,0,0, 0,0,0,0,0, 0,0,0,0,0, 0,0,0,0,0,
# 0,0,0,0,0, 0,0,0,0,0, 0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0]))
# y2=np.squeeze(np.array([0,0,0,0,0, 0,0,0,0,0, 0,0,0,0,0, 0,0,0,0,0, 0,0,0,0,0,
# 0,0,0,0,0, 0,0,0,0,0, 0,0,0,0,0, 0,0,0,0,0, 0,0,0,0,0]))
X_cl=[]
Y_cl=[]
X_po=[]
Y_po=[]
Ek_cl=0
Ek_po=0
q1=0
q2=0
for index in tqdm(range(len(subjects))):
x = []
e = []
s = []
clean=True
q1+=1
file_name = data_file.format(subjects[index])
sig = np.array(pd.read_csv(file_name).values)########读取CSV文件
EEG = sig[:, 1:-2]#取每行从第二个到倒数第三个(去掉最后两个)
Trigger = sig[:, -1]#取每行的最后一位
idxFeedBack = np.where(Trigger == 1)[0]
idxFeedBack2 = np.where(Trigger == 1700)[0]
if not clean:
npp = pulse_noise([1, 16, int(epoc_window)], freq=npp_params[1], sample_freq=sample_freq,
proportion=npp_params[2])
amplitude = np.mean(np.std(EEG, axis=0)) * npp_params[0]
for _, idx in enumerate(idxFeedBack):
idx = int(idx)
EEG[idx:int(idx + epoc_window), :] = np.transpose(npp.squeeze() * amplitude,
(1, 0)) + EEG[idx:int(idx + epoc_window), :]
sig_F = bandpass(EEG, [8.0, 30.0], sample_freq)
sig_F= notch_filtering(sig_F, sample_freq, 50, 30)
for i, idx in enumerate(idxFeedBack):
idx = int(idx)
################基线修正
# idx2 = idxFeedBack2[i] # 基线修正
# #a = sig_F[idx2:int(idx2 + 100), :]
# mul_mean = np.mean(sig_F[idx2:int(idx2 + 100), :], axis=0)
# s_sig = sig_F[idx:int(idx + epoc_window), :]
# b = np.zeros((int(epoc_window), 16))
# for j in range(16):
# b[:, j] = mul_mean[j]
# s_sig = s_sig - b
###############################
s_sig = sig_F[idx:int(idx + epoc_window), :]
s_sig = resample(s_sig, int(epoc_window * 128 / sample_freq))
x.append(s_sig)
s.append(idx)
x = np.array(x)
x = np.transpose(x, (0, 2, 1))
# # 创建PCA的计算模型
# ica = UnsupervisedSpatialFilter(FastICA(16), average=False)
# # 进行PCA处理
# x = ica.fit_transform(x)
s = np.squeeze(np.array(s))
if q1%2==1:
y = np.squeeze(np.array(y1))
else:
y= np.squeeze(np.array(y2))
x = utils.standard_normalize(x)
if Ek_cl==0:#解决concatenate无法拼接空数组的问题
X_cl=x
Y_cl=y
Ek_cl=1
else:
X_cl= np.concatenate((X_cl, x), axis=0)
Y_cl= np.concatenate((Y_cl, y), axis=0)
ampl_al=np.std(X_cl, axis=(1,2))
ind=np.argsort(ampl_al)
ind_vaild = ind[5:-5]
X_cl2=X_cl[ind_vaild]
Y_cl2=Y_cl[ind_vaild]
X_cl2=X_cl2[:, np.newaxis, :, :]
save_dir = 'EEG_Data/MI_DR02/'
save_file = save_dir + 'dataPOI0.05.mat'
io.savemat(save_file, {'x_POI': X_cl2,'y_POI': Y_cl2}) |
# cook your dish here
import sys
from collections import defaultdict
from collections import deque
sys.setrecursionlimit(10**6)
n,m=list(map(int,input().split()))
c=list(map(int,input().split()))
d=defaultdict(list)
for i in range(m):
x,y=map(int,input().split())
d[x].append(y)
d[y].append(x)
ans=0
visited=[0]*(n+1)
for i in range(1,n+1):
if visited[i]==0:
q=deque()
q.append(i)
m=10**(9)
visited[i]=1
while len(q):
x=q.popleft()
m=min(m,c[x-1])
for j in d[x]:
if visited[j]==0:
visited[j]=1
q.append(j)
ans=ans+m
print(ans)
|
import numpy as np
import torch
import os
from matplotlib import pyplot as plt
from Config import GeneralConfig, DynamicsConfig
from Dynamics import VehicleDynamics
from utils import step_relative
class Train(DynamicsConfig):
def __init__(self):
super(Train, self).__init__()
self.agent_batch = torch.empty([self.BATCH_SIZE, self.DYNAMICS_DIM])
self.state_batch = torch.empty([self.BATCH_SIZE, self.STATE_DIM])
self.init_index = np.ones([self.BATCH_SIZE, 1])
self.x_forward = []
self.u_forward = []
self.L_forward = []
self.iteration_index = 0
self.value_loss = np.empty([0, 1])
self.policy_loss = np.empty([0, 1])
self.dynamics = VehicleDynamics()
for i in range(self.FORWARD_STEP):
self.u_forward.append([])
self.L_forward.append([])
for i in range(self.FORWARD_STEP+1):
self.x_forward.append([])
def initialize_state(self):
# 0.6, 0.4, 0.15, 0.1
self.state_batch[:, 0] = torch.normal(0.0, 0.6, [self.BATCH_SIZE, ])
self.state_batch[:, 1] = torch.normal(0.0, 0.4, [self.BATCH_SIZE, ])
self.state_batch[:, 2] = torch.normal(0.0, 0.15, [self.BATCH_SIZE, ])
self.state_batch[:, 3] = torch.normal(0.0, 0.1, [self.BATCH_SIZE, ])
self.agent_batch[:, 4] = torch.linspace(0.0, np.pi, self.BATCH_SIZE)
init_ref = self.dynamics.reference_trajectory(self.agent_batch[:, 4])
self.agent_batch[:, 0:4] = self.state_batch + init_ref
self.init_state = self.agent_batch
def setInitState(self):
# 0.6,0.4,0.15,0.1
state_batch = self.state_batch.detach().clone()
agent_batch = self.agent_batch.detach().clone()
state_batch[:, 0] = torch.normal(0.0, 0.3, [self.BATCH_SIZE, ])
state_batch[:, 1] = torch.normal(0.0, 0.2, [self.BATCH_SIZE, ])
state_batch[:, 2] = torch.normal(0.0, 0.07, [self.BATCH_SIZE, ])
state_batch[:, 3] = torch.normal(0.0, 0.05, [self.BATCH_SIZE, ])
agent_batch[:, 4] = torch.linspace(0.0, np.pi, self.BATCH_SIZE)
init_ref = self.dynamics.reference_trajectory(agent_batch[:, 4])
agent_batch[:, 0:4] = state_batch + init_ref
self.init_state = agent_batch
def check_done(self, state):
"""
Check if the states reach unreasonable zone and reset them
Parameters
----------
state: tensor shape: [BATCH_SIZE, STATE_DIMENSION]
state used for checking.
Returns
-------
"""
threshold = np.kron(np.ones([self.BATCH_SIZE, 1]), np.array([self.y_range, self.psi_range]))
threshold = np.array(threshold, dtype='float32')
threshold = torch.from_numpy(threshold)
ref_state = self.dynamics.reference_trajectory(state[:, -1])
state = state[:, 0:4] - ref_state
check_state = state[:, [0, 2]].clone()
check_state.detach_()
sign_error = torch.sign(torch.abs(check_state) - threshold) # if abs state is over threshold, sign_error = 1
self._reset_index, _ = torch.max(sign_error, 1) # if one state is over threshold, _reset_index = 1
if self.iteration_index == self.RESET_ITERATION:
self._reset_index = torch.from_numpy(np.ones([self.BATCH_SIZE,],dtype='float32'))
self.iteration_index = 0
print('AGENT RESET')
reset_state = self._reset_state(self.agent_batch)
return reset_state
def _reset_state(self, state):
"""
reset state to initial state.
Parameters
----------
state: tensor shape: [BATCH_SIZE, STATE_DIMENSION]
state used for checking.
Returns
-------
state: state after reset.
"""
for i in range(self.BATCH_SIZE):
if self._reset_index[i] == 1:
state[i, :] = self.init_state[i, :]
return state
def update_state(self, policy, dynamics):
"""
Update state using policy net and dynamics model.
Parameters
----------
policy: nn.Module
policy net.
dynamics: object dynamics.
"""
self.agent_batch = self.check_done(self.agent_batch)
self.agent_batch.detach_()
ref_trajectory = dynamics.reference_trajectory(self.agent_batch[:, -1])
self.state_batch = self.agent_batch[:, 0:4] - ref_trajectory
control = policy.forward(self.state_batch)
self.agent_batch, self.state_batch = dynamics.step_relative(self.agent_batch, control)
self.iteration_index += 1
def policy_evaluation(self, policy, value, dynamics):
"""
Do n-step look-ahead policy evaluation.
Parameters
----------
policy: policy net
value: value net
dynamics: object dynamics
"""
for i in range(self.FORWARD_STEP):
if i == 0:
self.x_forward[i] = self.agent_batch.detach() # 要存agent batch是因为step relative要用agent
reference = dynamics.reference_trajectory(self.agent_batch[:,-1])
self.state_batch = dynamics.relative_state(self.x_forward[i])
self.u_forward[i] = policy.forward(self.state_batch)
self.x_forward[i + 1], _, _, _, _, _, _ = dynamics.step(self.x_forward[i], self.u_forward[i])
ref_state_next = self.x_forward[i + 1][:, 0:4] - reference
self.L_forward[i] = dynamics.utility(ref_state_next, self.u_forward[i])
else:
ref_state = self.x_forward[i][:, 0:4] - reference
self.u_forward[i] = policy.forward(ref_state)
self.x_forward[i + 1], _, _, _, _, _, _ = dynamics.step(self.x_forward[i],
self.u_forward[i])
ref_state_next = self.x_forward[i + 1][:, 0:4] - reference
self.L_forward[i] = dynamics.utility(ref_state_next, self.u_forward[i])
# # self.u_forward[i] = policy.forward(self.x_forward[i][:, 0:4])
# self.state_batch = dynamics.relative_state(self.x_forward[i])
# self.u_forward[i] = policy.forward(self.state_batch)
# # self.x_forward[i + 1], _, self.L_forward[i],_, _, _, _ = dynamics.step(self.x_forward[i], self.u_forward[i])
# self.x_forward[i + 1], self.state_batch_next = step_relative(dynamics, self.x_forward[i], self.u_forward[i])
# self.L_forward[i] = dynamics.utility(self.state_batch_next, self.u_forward[i])
self.agent_batch_next = self.x_forward[-1]
self.state_batch_next = self.agent_batch_next[:, 0:4] - reference
self.value_next = value.forward(self.state_batch_next)
self.utility = torch.zeros([self.FORWARD_STEP, self.BATCH_SIZE], dtype=torch.float32)
for i in range(self.FORWARD_STEP):
self.utility[i] = self.L_forward[i].clone()
self.sum_utility = torch.sum(self.utility,0)
target_value = self.sum_utility.detach() + self.GAMMA_D * self.value_next.detach()
value_now = value.forward(self.state_batch)
equilibrium_state = torch.tensor([[0.0, 0.0, 0.0, 0.0]])
value_equilibrium = value.forward(equilibrium_state)
value_loss = 1 / 2 * torch.mean(torch.pow((target_value - value_now), 2)) \
+ 10 * torch.pow(value_equilibrium, 2)
self.state_batch.requires_grad_(False)
# for i in range(1):
value.zero_grad()
value_loss.backward()
value.opt.step()
value.lrScheduler.step()
# ("PEV learning rate {:3.3e}".format(value.opt.param_groups[0]['lr']))
self.value_loss = np.append(self.value_loss, value_loss.detach().numpy())
return value_loss.detach().numpy()
def policy_improvement(self, policy, value):
"""
Do n-step look-ahead policy improvement.
Parameters
----------
policy: policy net
value: value net
"""
self.value_next = value.forward(self.state_batch_next)
policy_loss = torch.mean(self.sum_utility + self.value_next) # Hamilton
#for i in range(1):
policy.zero_grad()
policy_loss.backward()
policy.opt.step()
self.policy_loss = np.append(self.policy_loss, policy_loss.detach().numpy())
return policy_loss.detach().numpy()
def save_data(self, log_dir):
"""
Save loss data.
Parameters
----------
log_dir: str
directory in ./Results_dir.
Returns
-------
"""
np.savetxt(os.path.join(log_dir, "value_loss.txt"), self.value_loss)
np.savetxt(os.path.join(log_dir, "policy_loss.txt"), self.policy_loss)
torch.save(self.agent_batch, os.path.join(log_dir, "agent_buffer.pth"))
def print_loss_figure(self, iteration, log_dir):
"""
print figure of loss decent.
Parameters
----------
iteration: int
number of iterations.
log_dir: str
directory in ./Results_dir.
Returns
-------
"""
plt.figure()
plt.scatter(range(iteration), np.log10(self.value_loss), c='r', marker=".", s=5., label="policy evaluation")
plt.scatter(range(iteration), np.log10(self.policy_loss), c='b', marker=".", s=5., label="policy improvement")
plt.legend(loc='upper right')
plt.xlabel('iteration')
plt.ylabel('loss')
plt.savefig(os.path.join(log_dir, "loss.png"))
|
from django.urls import path
from .import views
urlpatterns = [
path('', views.index, name='index'),
path('login/', views.login, name='login'),
path('change/', views.change, name='change'),
path('apply/', views.apply, name='apply'),
path('approve/', views.approve, name='approve'),
path('main/', views.main, name='main'),
path('<str:trans_type>/transfer/', views.transfer, name='transfer'),
path('schedule/', views.schedule, name='schedule'),
path('account_manage/', views.account_manage, name='account_manage'),
]
# Create your views here.
|
import numpy as np
# script to load label dictunary to labels
def Dect2Mat(LabelDict,TimeResoutionPerSec = 20,RecTime=10):
keys = list(LabelDict.keys())
OutputLabel = np.zeros((len(keys),TimeResoutionPerSec*RecTime))
for i,TimeIndex in enumerate( LabelDict.values()):
for k in range(0,len(TimeIndex),2):
Start = int(np.round(TimeIndex[k]*TimeResoutionPerSec))
End = int(np.round(TimeIndex[k+1]*TimeResoutionPerSec))
OutputLabel[i,Start:End] = 1
return OutputLabel,keys
def PlotLabels(OutputLabel,keys,TimeResoutionPerSec = 20 ,RecTime=10) :
plt.imshow(OutputLabel,aspect='auto')
plt.xticks(np.arange(0,TimeResoutionPerSec*RecTime+1,TimeResoutionPerSec),np.arange(RecTime))
plt.yticks(np.arange(OutputLabel.shape[0]),keys)
|
def find_pivot_index(input_list):
# List is sorted, but then rotated.
# Find the minimum element in less than linear time
# return it's index
start = 0
end = len(input_list)
min_index = 0
while start < end:
pivot = end + (start - end) // 2
if input_list[pivot] < input_list[min_index]:
min_index = pivot
end = pivot
else:
start = pivot + 1
return min_index
print(find_pivot_index([3,4,5,6,7,0,1,2])) |
########################
###### UTILITIES #######
########################
'''
This service provides additional services to users
# 1. Setting image as desktop background
# 2. Uploading image to Imgur
# 3. Uploading image to twitter
# 4. Uploading image to facebook
'''
import os
import sys
import ctypes
from getpass import getpass
from configparser import ConfigParser
import requests
from requests_oauthlib import OAuth1
# import for uploading images to imgur
from imgurpython import ImgurClient
# imports for twitterAPI
from urllib.parse import parse_qs
from TwitterAPI import TwitterAPI
# imports for facebook
from facebook import GraphAPI
# Image tagging API
def tag_image(image_path, limit=5):
# Limit is set to 5 as default no of maximum tags
api_key = 'acc_c3b083251e16281'
api_secret = '587fcd3f035a8503470f297fc708ca07'
response = requests.post('https://api.imagga.com/v2/tags',
auth=(api_key, api_secret),
files={'image': open(image_path, 'rb')})
result = response.json()
status = result['status']
tags = result['result']['tags']
tags = [tag['tag'] for tag in tags]
# Getting classification tag values and generating a tagList
tags = [list(item.values())[0] for item in tags]
return tags[:limit:]
# Sets meme to desktop background
def set_desktop_background(img_path):
# Takes image path
platform = sys.platform
if platform == 'win32':
'''
Windows
'''
ctypes.windll.user32.SystemParametersInfoW(20, 0, img_path, 0)
elif platform == 'Darwin':
'''
Macintosh
'''
print('No support for Mac yet.')
elif platform == 'linux' or platform == 'linux2':
'''
Linux
'''
sessionName = os.getenv("DESKTOP_SESSION")
if sessionName == 'gnome':
os.system(
"gsettings set org.gnome.desktop.background picture-uri file:" + img_path)
elif sessionName == 'kde':
print('No support for KDE systems yet.')
elif sessionName == 'xfce':
print('No support for xfce systems yet.')
elif sessionName == 'i3':
os.system("feh --bg-scale " + img_path)
# Sets it to background
# Upload meme to imgur
def upload_to_imgur(img_path):
# Takes image path
config = ConfigParser()
config.read('auth.ini')
client_id = config.get('imgur_credentials', 'client_id')
client_secret = config.get('imgur_credentials', 'client_secret')
client = ImgurClient(client_id, client_secret)
user_res = input("Do you want to upload anonymously to imgur?(yes/no)")
if user_res.lower() == 'no':
imgur_username = input("Enter username:")
imgur_password = getpass("Enter password:")
login_data = {
'username':imgur_username,
'password':imgur_password
}
authorization_url = client.get_auth_url('pin')
with requests.Session() as s:
headers = {
'user-agent':'Mozilla/5.0 (X11; Fedora; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36'
}
r = s.get(authorization_url, headers=headers)
soup = BeautifulSoup(r.content, 'html5lib')
login_data['allow'] = soup.find('button', attrs={'name':'allow'})['value']
r = s.post(authorization_url, data=login_data, headers=headers)
soup = BeautifulSoup(r.content, 'html5lib')
pin = soup.find('input', attrs={'name':'pin'})['value']
# print(pin)
credentials = client.authorize(pin, 'pin')
client.set_user_auth(
credentials['access_token'], credentials['refresh_token'])
config = {
'album': None,
'name': 'test name',
'title': 'test title',
'description': 'test description'
}
print("Uploading image...")
image = client.upload_from_path(
img_path, config=config, anon=False)
print("Done! Check at", image['link'])
elif user_res.lower() == 'yes':
try:
response = client.upload_from_path(img_path)
except Exception as e:
print(e)
else:
print('Upload Successful! Check at', response['link'])
# performs upload to imgur and generates verbose information
# Uploads meme to twitter
def upload_to_twitter(img_path):
# Takes image path
headers = {
'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.36'
}
config = ConfigParser()
config.read('auth.ini')
consumer_key = config.get('twitter_credentials', 'api_key')
consumer_secret = config.get('twitter_credentials', 'api_secret')
oauth = OAuth1(consumer_key, consumer_secret)
res = requests.post(
url='https://api.twitter.com/oauth/request_token', auth=oauth)
credentials = parse_qs(res.text)
request_key = credentials.get('oauth_token')[0]
request_secret = credentials.get('oauth_token_secret')[0]
authorization_url = 'https://api.twitter.com/oauth/authorize?oauth_token=%s' % request_key
with requests.Session() as s:
USERNAME = input('Enter your username:')
PASSWORD = getpass('Enter your password:')
login_data = {
'session[username_or_email]': USERNAME,
'session[password]': PASSWORD,
'form_id':'oauth_form'
}
r = s.get(authorization_url, headers=headers)
soup = BeautifulSoup(r.content, 'html5lib')
login_data['authenticity_token'] = soup.find('input', attrs={'name':'authenticity_token'})['value']
r = s.post(authorization_url, data=login_data, headers=headers)
soup = BeautifulSoup(r.content, 'html5lib')
verifier = soup.findAll('code')[0].string
oauth = OAuth1(consumer_key,
consumer_secret,
request_key,
request_secret,
verifier=verifier
)
res = requests.post(
url='https://api.twitter.com/oauth/access_token', auth=oauth)
credentials = parse_qs(res.text)
access_token_key = credentials.get('oauth_token')[0]
access_token_secret = credentials.get('oauth_token_secret')[0]
api = TwitterAPI(consumer_key,
consumer_secret,
access_token_key,
access_token_secret)
tweet_text = input('Enter tweet message:')
file = open(img_path, "rb")
image_data = file.read()
res = api.request('media/upload', None, {'media': image_data})
if res.status_code == 200:
print('Media uploaded')
else:
print('Upload failed: ', res.text)
if res.status_code == 200:
media_id = res.json()['media_id']
res = api.request('statuses/update',
{'status': tweet_text, 'media_ids': media_id})
if res.status_code == 200:
print('Status upload successful.')
else:
print('Status upload failed:', res.text)
# performs upload to twitter and generates verbose information
# Upload meme to facebook
def upload_to_facebook(img_path):
# Takes image path
config = ConfigParser()
config.read('auth.ini')
app_key = config.get('facebook_credentials', 'app_key')
app_key_secret = config.get('facebook_credentials', 'app_key_secret')
user_access_token = config.get('facebook_credentials', 'user_access_token')
page_id = config.get('facebook_credentials', 'page_id')
graph = GraphAPI(user_access_token)
resp = graph.get('me/accounts')
page_access_token = None
for page in resp['data']:
if page['id'] == page_id:
page_access_token = page['access_token']
graph = GraphAPI(page_access_token)
graph = GraphAPI(access_token=access_token, version="3.1")
print('Uploading photo...')
image = open(img_path, 'rb')
graph.put_photo(image, message='Test caption')
# performs upload to facebook and generates verbose information
# username = input('Enter your email:')
# password = getpass('Enter your password:')
#
# headers = {
# 'user-agent':'Mozilla/5.0 (X11; Fedora; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36'
# }
#
# redirect_url = 'https://localhost:3000/'
# access_url = ('https://www.facebook.com/dialog/'
# + 'oauth?client_id=' + app_key + '&redirect_uri=' + redirect_url + "&scope=user_posts")
# with requests.Session() as s:
# login_data = {
# 'email':email,
# 'pass':password,
# 'form_id':'login_form'
# }
#
# r = s.get(access_url, headers=headers)
# soup = BeautifulSoup(r.content, 'html5lib')
# login_data['lgndim'] = soup.find('input', attrs={'name':'lgndim'})['value']
# login_data['lgnrnd'] = soup.find('input', attrs={'name':'lgnrnd'})['value']
# r = s.post(access_url, data=login_data, headers=headers, allow_redirects=True)
# print('Go to the link and authorize:', access_url)
#
# code = input('Enter code from the redirect URL')
#
# graph_auth_uri = ('https://graph.facebook.com/v2.2/oauth/'
# + 'access_token?client_id=' + app_key + '&redirect_uri='
# + redirect_url + '&client_secret=' + app_key_secret + '&code=%s') % code
#
# res = s.get(graph_auth_uri)
# data = res.json()
# access_token = data['access_token']
if __name__ == '__main__':
try:
set_desktop_background(sys.argv[1])
except:
# No args to get sys.argv
pass
|
#recursive method
def hcf(a,b):
if (b==0):
return a
else:
return hcf(b, a%b)
a = int(input())
b = int(input())
print(hcf(a,b)) |
from django.shortcuts import render, redirect
from .models import Courses, Descriptions
def index(request):
context = {
'courses': Courses.objects.all(),
}
return render( request, "main_app/index.html", context )
def add_predefined_data(request):
desc = Descriptions.objects.create(
description = "Description1",
)
Courses.objects.create(
name = "How to be a ninja",
description_id = desc,
)
desc = Descriptions.objects.create(
description = "Description2",
)
Courses.objects.create(
name = "How to fly",
description_id = desc,
)
desc = Descriptions.objects.create(
description = "Description3",
)
Courses.objects.create(
name = "How to get more energy in the bootcamp",
description_id = desc,
)
desc = Descriptions.objects.create(
description = "Description4",
)
Courses.objects.create(
name = "How to pair program more efficiently",
description_id = desc,
)
return redirect( "/" )
def add_course( request ):
desc = Descriptions.objects.create(
description = request.POST['description'],
)
Courses.objects.create(
name = request.POST['title'],
description_id = desc,
)
return redirect( "/" )
def remove_course( request, id ):
context = {
"course": Courses.objects.get( id = id ),
}
return render( request, "main_app/remove_course.html", context )
def remove_course_confirm( request, id ):
if request.POST['action'] == "continue":
Courses.objects.get( id = id ).delete()
return redirect( "/" )
|
#!/usr/bin/env python2
import sys
from PyQt5 import Qt
from PyQt5 import QtCore
from PyQt5.uic import loadUi
# [ms]
TICK_TIME = 2**6
class Timer(Qt.QMainWindow):
def __init__(self):
super(Timer, self).__init__()
# self.reset.clicked.connect(self.do_reset)
# self.start.clicked.connect(self.do_start)
self.timer = Qt.QTimer()
self.timer.setInterval(TICK_TIME)
self.timer.timeout.connect(self.tick)
self.running = False;
self.do_reset()
def __start_stop_reset_event(self, event):
if event.button() == QtCore.Qt.LeftButton:
if self.running is False:
self.do_start
elif self.running is True:
self.do_pause
elif event.button() == QtCore.Qt.RightButton:
self.do_reset
def display(self):
self.timer.display("%d:%05.2f" % (self.time // 60, self.time % 60))
@Qt.pyqtSlot()
def tick(self):
self.time += TICK_TIME/1000
self.display()
@Qt.pyqtSlot()
def do_start(self):
self.timer.start()
# self.start.setText("Pause")
# self.start.clicked.disconnect()
# self.start.clicked.connect(self.do_pause)
@Qt.pyqtSlot()
def do_pause(self):
self.timer.stop()
# self.start.setText("Start")
# self.start.clicked.disconnect()
# self.start.clicked.connect(self.do_start)
@Qt.pyqtSlot()
def do_reset(self):
self.time = 0
self.display()
app = Qt.QApplication(sys.argv)
timer = Timer()
timer.show()
app.exec_()
|
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 23 16:27:38 2020
@author: scott
"""
# dataframe imports
import pandas as pd
# datetime imports
from datetime import datetime
import holidays
us_holidays = holidays.UnitedStates()
# database imports
import sqlite3
import os
# custom functions import
from functions import (get_weekdays_df, remove_incomplete_days, day_box, group_days_dict,
day_lines, normalize, day_sums, df_iwf, df_peakyness)
# =============================================================================
# #### CSV Exports for Grow A ###
# =============================================================================
df = pd.read_csv('GrowA_hourly.csv')
df = df.rename(columns={"Total.Flow": "value"})
### SORT DATAFRAME SO TIMESTAMP IS INDEX
# create timestamps
timestamps = []
for i in range(0, len(df)):
ndt = datetime # create a datetime object
ndt = ndt.fromisoformat(df['date'][i]) # add date
ndt = ndt.replace(hour=df['hour'][i]) # add hour
timestamps.append(ndt) # append to timestamps list
df['time stamp'] = timestamps
# set index to time stamp
df = df.set_index(['time stamp']).sort_index()
df = remove_incomplete_days(df)
growA_flows = df.reset_index()
# look at weekdays only
weekdays = get_weekdays_df(df) # filter for weekdays
sums_df = day_sums(weekdays) # sum on date
# add three hour peak, volumes, times, and normalized volumes to sums_df
sums_df = df_peakyness(sums_df, weekdays)
# convert to gallons
# must be after peakyness function for proper normalization
sums_df['value'] = sums_df['value']*60
growA_peakyness = sums_df.reset_index()
site = []
for i in range(0,len(growA_peakyness)):
site.append('growA')
growA_peakyness['site'] = site
# =============================================================================
# #### CSV Exports for Grow B ###
# =============================================================================
df = pd.read_csv('growB_hourly.csv')
df = df.rename(columns={"Total.Flow": "value"})
### SORT DATAFRAME SO TIMESTAMP IS INDEX
# create timestamps
timestamps = []
for i in range(0, len(df)):
ndt = datetime # create a datetime object
ndt = ndt.fromisoformat(df['date'][i]) # add date
ndt = ndt.replace(hour=df['hour'][i]) # add hour
timestamps.append(ndt) # append to timestamps list
df['time stamp'] = timestamps
# set index to time stamp
df = df.set_index(['time stamp']).sort_index()
df = remove_incomplete_days(df)
growB_flows = df.reset_index()
# look at weekdays only
weekdays = get_weekdays_df(df) # filter for weekdays
sums_df = day_sums(weekdays) # sum on date
# add three hour peak, volumes, times, and normalized volumes to sums_df
sums_df = df_peakyness(sums_df, weekdays)
# convert to gallons
# must be after peakyness function for proper normalization
sums_df['value'] = sums_df['value']*60
growB_peakyness = sums_df.reset_index()
site = []
for i in range(0,len(growB_peakyness)):
site.append('growB')
growB_peakyness['site'] = site
# =============================================================================
# #### Convert GPM to Gals
# =============================================================================
growA_volume = growA_flows
growA_volume['value'] = growA_volume['value']*60 # convert to gallons
growB_volume = growB_flows
growB_volume['value'] = growB_volume['value']*60 # convert to gallons
# =============================================================================
# #### Export data
# =============================================================================
growA_volume.to_csv('growA_volume.csv')
growB_volume.to_csv('growB_volume.csv')
growA_peakyness.to_csv('growA_peakyness.csv')
growB_peakyness.to_csv('growB_peakyness.csv')
|
# This file is part of beets.
# Copyright 2019, Carl Suster
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Test the beets.random utilities associated with the random plugin.
"""
import unittest
from test.helper import TestHelper
import math
from random import Random
from beets import random
class RandomTest(unittest.TestCase, TestHelper):
def setUp(self):
self.lib = None
self.artist1 = 'Artist 1'
self.artist2 = 'Artist 2'
self.item1 = self.create_item(artist=self.artist1)
self.item2 = self.create_item(artist=self.artist2)
self.items = [self.item1, self.item2]
for _ in range(8):
self.items.append(self.create_item(artist=self.artist2))
self.random_gen = Random()
self.random_gen.seed(12345)
def tearDown(self):
pass
def _stats(self, data):
mean = sum(data) / len(data)
stdev = math.sqrt(
sum((p - mean) ** 2 for p in data) / (len(data) - 1))
quot, rem = divmod(len(data), 2)
if rem:
median = sorted(data)[quot]
else:
median = sum(sorted(data)[quot - 1:quot + 1]) / 2
return mean, stdev, median
def test_equal_permutation(self):
"""We have a list of items where only one item is from artist1 and the
rest are from artist2. If we permute weighted by the artist field then
the solo track will almost always end up near the start. If we use a
different field then it'll be in the middle on average.
"""
def experiment(field, histogram=False):
"""Permutes the list of items 500 times and calculates the position
of self.item1 each time. Returns stats about that position.
"""
positions = []
for _ in range(500):
shuffled = list(random._equal_chance_permutation(
self.items, field=field, random_gen=self.random_gen))
positions.append(shuffled.index(self.item1))
# Print a histogram (useful for debugging).
if histogram:
for i in range(len(self.items)):
print('{:2d} {}'.format(i, '*' * positions.count(i)))
return self._stats(positions)
mean1, stdev1, median1 = experiment('artist')
mean2, stdev2, median2 = experiment('track')
self.assertAlmostEqual(0, median1, delta=1)
self.assertAlmostEqual(len(self.items) // 2, median2, delta=1)
self.assertGreater(stdev2, stdev1)
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
# coding: utf-8
'''
模拟其他语言的线程同步锁(函数装饰器),和获取可重复同步锁函数get。
可以看作锁定key对象,也可以把key视作键,使用字符串等。
key必须是hashable的,必须提供。
'''
import threading as _threading
import weakref as _weakref
_dLock = _threading.RLock()
_dict = {}
def _recycle(key):
_dLock.acquire()
if _dict.has_key(key):
_dict.pop(key)
_dLock.release()
return
def synchronized(key):
'''
线程同步锁(函数装饰器)。
可以把操作对象本身当做key,也可以是字符串之类的……
'''
lock = get(key)
def _decorator(func):
def _decorated(*args, **kw):
lock.acquire()
try:
return func(*args, **kw)
finally:
lock.release()
return _decorated
return _decorator
def get(key):
_dLock.acquire()
try:
_key = _weakref.ref(key)
if not _dict.has_key(_key):
_key = _weakref.ref(key, _recycle)
_dict[_key] = _threading.RLock()
return _dict[_key]
except TypeError, e: # 不可创建弱引用(字符串等)
if not _dict.has_key(key):
_dict[key] = _threading.RLock()
return _dict[key]
finally:
_dLock.release()
__all__ = ['synchronized', 'get']
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# 示例一,普通参数-------------------------------------------------------------
print('-' * 30, ' 普通参数 ', '-' * 30)
def func(a, b, c):
print(a, b, c)
func(1, 2, 3)
# 示例二,关键字参数,使用关键字传入参数
func(a=2, b=1, c=3)
func(3, c=2, b=1,)
# 如果使用 func(3, a=2, b=1) 则会报错,需要注意,
# 因为默认先分配非关键字参数,再分配关键字参数,a 相当于赋值了两次
# 示例二,默认参数-------------------------------------------------------------
print('-' * 30, ' 默认参数 ', '-' * 30)
def func(a, b=2, c=3):
print(a, b, c)
func(1)
func(a=1)
func(1, 7)
func(1, 3, 5)
|
# Udacity Deep Learning Course Notes
# Lecture 1: From Machine Learning to Deep Learning
import numpy as np
import matplotlib.pyplot as plt
#################################
#### 10/31 Softmax Quiz 1 #####
#################################
def softmax(x):
"""Compute softmax values for each sets of scores in x."""
x = np.asarray(x)
return np.exp(x)/np.sum(np.exp(x), axis=0)
#################################
#### 11/31 Softmax Quiz 2 #####
#################################
x = [1, 2, 3]
vals = np.zeros((90, 3))
n = 0
for i in np.arange(1, 10, 0.1):
print(np.multiply(x, i))
vals[n] = softmax(np.multiply(x, i))
n += 1
x = np.arange(1, 10, 0.1)
plt.plot(x, vals, linewidth=2)
plt.show()
# If you multiply the probabilities, probabilities get closer to 0 and 1!
x = [1, 2, 3]
vals = np.zeros((90, 3))
n = 0
for i in np.arange(1, 10, 0.1):
print(np.add(x, i))
vals[n] = softmax(np.add(x, i))
n += 1
x = np.arange(1, 10, 0.1)
plt.plot(x, vals, linewidth=2)
plt.show()
# If you add the probabilities by a constant there is no difference!
x = [1, 2, 3]
vals = np.zeros((90, 3))
n = 0
for i in np.arange(1, 10, 0.1):
print(np.divide(x, i))
vals[n] = softmax(np.divide(x, i))
n += 1
x = np.arange(1, 10, 0.1)
plt.plot(x, vals, linewidth=2)
plt.show()
# If you divide by constant the probabilities get closer to uniform!
#########################################
#### 18/31 Numeric Stability Quiz #####
#########################################
x = 1000000000
for i in range(0, 1000000):
x = x + 0.000001
x = x - 1000000000
# x = 0.95!? But it should be 1!
# since 0.000001*1000000 = 1
# We want our variables to have 0 mean and equal variance when possible.
# For pixels, between [0, 255], subtract 128 and then divide by 128.
# Generate initial weights from Gaussian with zero mean and small sigma, so it has small peaks i.e. "isn't opinionated"
##############################################
#### 26/31 Validation and Test Set Size ####
##############################################
#Rule of 30: A change that affects 30 examples in your validation set is significant
#Assume you have 3000 examples, Change from 80% to 81% would be significant,
#But change from 80% to 80.5% or 80.1% would not be (only 15 and 3 labels
#would change, respectively)
#If you use 30000 for valivation set, then accuracy becomes significant to the
#1st decimal place ( >0.1% )
#SGD -has following Hyperparameters: Adaptive Learning Rate, Momentum, Batch Size, Weight #Initialization.
#When things don't work, start by lowering your learning rate.
|
class Session:
"""
Class that handles an individual game session.
"""
def __init__(self, client_address):
self.client_address = client_address
self.client_authenticated = False
def authenticate(self, user, password):
"""
Handles user login and authentication
"""
if user == "root" and password == "toor":
return True
else:
return False
def handle_input(self, input):
"""
Handles a single line of raw text input from a player.
""" |
a2=int(input())
if a2 in range(1,10):
print("yes")
else:
print("no")
|
# telegram bot token. Get it here https://t.me/BotFather
API_TOKEN = 'PUT_TOKEN_HERE'
URL_BASE = 'https://api.telegram.org/file/bot' + API_TOKEN + '/'
# violation photos count upper bound in single appeal
MAX_VIOLATION_PHOTOS = 10
# appeal language
BY = '_by'
RU = '_ru'
LANGUAGES = [BY, RU]
# bot config
PREVIOUS_ADDRESS_PREFIX = '/saved_'
PREVIOUS_ADDRESS_REGEX = r'\/saved_\d+'
APPEAL_STORAGE_LIMIT = 3
TEMP_FILES_PATH = '/tmp/temp_files_parkun'
# regionalization
MINSK = 'minsk'
CENTRALNY = 'centralny'
SAVIECKI = 'saviecki'
PIERSAMAJSKI = 'piersamajski'
PARTYZANSKI = 'partyzanski'
ZAVODSKI = 'zavodski'
LENINSKI = 'leninski'
KASTRYCNICKI = 'kastrycnicki'
MASKOUSKI = 'maskouski'
FRUNZIENSKI = 'frunzienski'
BREST_REGION = 'brest_region'
VITSEBSK_REGION = 'vitsebsk_region'
HOMEL_REGION = 'homel_region'
HRODNA_REGION = 'hrodna_region'
MINSK_REGION = 'minsk_region'
MAHILEU_REGION = 'mahileu_region'
REGIONS = {
MINSK: {CENTRALNY: {},
FRUNZIENSKI: {},
KASTRYCNICKI: {},
LENINSKI: {},
MASKOUSKI: {},
PARTYZANSKI: {},
PIERSAMAJSKI: {},
SAVIECKI: {},
ZAVODSKI: {}, },
BREST_REGION: {},
VITSEBSK_REGION: {},
HOMEL_REGION: {},
HRODNA_REGION: {},
MINSK_REGION: {},
MAHILEU_REGION: {},
}
OSM_REGIONS = {
CENTRALNY: 'Centralny raion, Minsk',
FRUNZIENSKI: 'Frunzienski paion, Minsk',
KASTRYCNICKI: 'Kastryčnicki raion, Minsk',
LENINSKI: 'Leninski raion, Minsk',
MASKOUSKI: 'Maskoŭski raion, Minsk',
PARTYZANSKI: 'Partyzanski raion, Minsk',
PIERSAMAJSKI: 'Pieršamajski Rajon, Minsk',
SAVIECKI: 'Saviecki raion, Minsk',
ZAVODSKI: 'Zavodski raion, Minsk',
MINSK: 'Minsk, Belarus',
BREST_REGION: 'Brest Region, Belarus',
VITSEBSK_REGION: 'Vitsebsk Region, Belarus',
HOMEL_REGION: 'Homel Region, Belarus',
HRODNA_REGION: 'Hrodna Region, Belarus',
MINSK_REGION: 'Minsk Region, Belarus',
MAHILEU_REGION: 'Mahilyow Region, Belarus',
}
# redis
REDIS_HOST = 'localhost'
REDIS_PORT = '16379'
REDIS_PASSWORD = 'redis'
# bot owner's telegram id to receive feedback
ADMIN_ID = 00000000
# yandex maps
YANDEX_MAPS_API_KEY = 'UNNECESSARY_FOR_DEV'
BASE_YANDEX_MAPS_URL = 'http://localhost:18080/yandex_maps/?'
ADDRESS_FAIL = 'no_address'
# to post into channel bot needs to be admin there
CHANNEL = '@channel_name'
TRASH_CHANNEL = '@channel_name'
RESPONSE_HASHTAG = '#ответГАИ'
RESPONSE_EXAMPLE = 'https://t.me/parkun/24390'
# email verifier url
MAIL_VERIFIER_URL = 'http://localhost:18080/validate' # response 111
VERIFYING_FAIL = '42'
# Twitter
TWI_URL = 'twitter.com/SOME_TWITTER_ACCOUNT'
# VK
VK_URL = 'vk.com/SOME_VK_ACCOUNT'
# RabbitMQ
RABBIT_HOST = 'localhost'
RABBIT_HTTP_PORT = '15672'
RABBIT_AMQP_PORT = '5672'
RABBIT_LOGIN = 'parkun_bot'
RABBIT_PASSWORD = 'parkun_bot'
RABBIT_HTTP_ADDRESS = \
f'http://{RABBIT_LOGIN}:{RABBIT_PASSWORD}@{RABBIT_HOST}:{RABBIT_HTTP_PORT}'
RABBIT_AMQP_ADDRESS = \
f'amqp://{RABBIT_LOGIN}:{RABBIT_PASSWORD}@{RABBIT_HOST}:{RABBIT_AMQP_PORT}'
RABBIT_EXCHANGE_MANAGING = 'managing'
RABBIT_EXCHANGE_SENDING = 'sending'
RABBIT_EXCHANGE_SHARING = 'sharing'
RABBIT_ROUTING_VIOLATION = 'violation'
RABBIT_ROUTING_APPEAL_TO_QUEUE = 'appeal_to_queue'
RABBIT_QUEUE_STATUS = 'sending_status'
RABBIT_QUEUE_APPEALS = 'appeal'
# sender messages types
CAPTCHA_TEXT = 'captcha_text'
CAPTCHA_URL = 'captcha_url'
CAPTCHA_FAIL = 'captcha_fail'
GET_CAPTCHA = 'get_captcha'
APPEAL = 'appeal'
CANCEL = 'cancel'
CAPTCHA_OK = 'captcha_ok'
SENDING_CANCELLED = 'sending_cancelled'
FREE_WORKER = 'free_worker'
BUSY_WORKER = 'busy_worker'
# status codes
OK = 'ok'
FAIL = 'fail'
WRONG_INPUT = 'wrong_input'
# Telegra.ph
TPH_ACCESS_TOKEN = "put_token_here"
TPH_SHORT_NAME = "author_nickname"
TPH_AUTHOR_NAME = "author_name"
TPH_AUTHOR_URL = "author_url"
TPH_AUTH_URL = "author_auth_url"
# Pause before task execution
DEFAULT_SCHEDULER_PAUSE = 1 # hours
|
# 向上进一步
def plus_one(s, j):
if s[j] == '9':
s[j] = '0'
else:
s[j] = str(int(s[j])+1)
return s
# 向下进一步
def minus_one(s, j):
if s[j] == '0':
s[j] = '9'
else:
s[j] = str(int(s[j])-1)
return s
def bfs(deadends, target):
dead = ()
# 将终止条件放入一个元组中,去重
for s in deadends:
dead += (s)
# 初始化起点、已访问点和步数
visited = ('0000')
q = ['0000']
step = 0
while q:
size = len(q)
for i in range(size):
cur = q.pop()
if cur in dead:
continue
if cur == target:
return step
# 遍历加减的策略
for j in range(4):
up = plus_one(cur, j)
# 跳过已访问点
if up not in visited:
q.append(up)
visited += (up)
down = minus_one(cur, j)
# 跳过已访问点
if down not in visited:
q.append(down)
visited += (down)
# 步数+1
step += 1
return step
|
import requests,json,os
import win32gui,win32api,win32con
urlbase='https://cn.bing.com/HPImageArchive.aspx?format=js&idx=0&n=1&pid=hp'
r=requests.get(urlbase)
r=r.json()
url='https://cn.bing.com'+r[u'images'][0][u'urlbase']+'_1920x1080.jpg'
pic=requests.get(url)
with open(r'C:\Users\chun\Pictures\1.jpg','wb')as f:
f.write(pic.content)
f.close()
k=win32api.RegOpenKeyEx(win32con.HKEY_CURRENT_USER,"Control Panel\\Desktop",0,win32con.KEY_SET_VALUE)
win32api.RegSetValueEx(k, "WallpaperStyle", 0, win32con.REG_SZ, "2")
win32api.RegSetValueEx(k, "TileWallpaper", 0, win32con.REG_SZ, "0")
win32gui.SystemParametersInfo(win32con.SPI_SETDESKWALLPAPER,r'C:\Users\chun\Pictures\1.jpg',1+2)
|
#!/usr/bin/env python3
"""Tests for dataset functions"""
import unittest
from pathlib import Path
from evtech import load_dataset
from evtech import Camera
from .test_util import rmtree
def mock_loader(json_path, image_path):
# Return an empty camera with image path
return Camera(None,None,None,None,None,None,image_path)
class TestDataset(unittest.TestCase):
def setUp(self):
"""Set up test fixtures, if any."""
self.tmp = Path("temp/")
self.nadirs = self.tmp.joinpath("nadirs")
self.obliques = self.tmp.joinpath("obliques")
self.nadirs.mkdir(parents=True, exist_ok=True)
self.obliques.mkdir(parents=True, exist_ok=True)
# Make fake data
self.nadirs.joinpath("test.jpg").touch()
self.nadirs.joinpath("test.json").write_text("{}")
self.obliques.joinpath("test.jpg").touch()
self.obliques.joinpath("test.json").write_text("{}")
pass
def tearDown(self):
rmtree(self.tmp)
pass
def test_load_dataset(self):
nadirs, obliques = load_dataset(self.tmp, mock_loader)
self.assertEqual(1,len(nadirs))
self.assertEqual(1,len(obliques))
self.assertEqual(self.nadirs.joinpath("test.jpg"), nadirs[0].image_path)
self.assertEqual(self.obliques.joinpath("test.jpg"), obliques[0].image_path) |
from django.db import models
def upload_platform_icon(instance, filename):
return f'image/wallets/{filename}'
class WalletPlatform(models.Model):
name = models.CharField(
max_length=50
)
icon = models.FileField(
upload_to=upload_platform_icon
)
def __str__(self):
return f'{self.name}'
class Wallet(models.Model):
platform = models.ForeignKey(
WalletPlatform,
on_delete=models.PROTECT
)
name = models.CharField(
max_length=50
)
url = models.CharField(
max_length=255
)
cold_staking = models.BooleanField(default=False)
def __str__(self):
return f'{self.name} - {self.platform.name}'
|
"""
These are membership level related models.
"""
from dataclasses import dataclass, field
from typing import List, Optional
from .base import BaseModel
from .common import BaseResource, BaseApiResponse
@dataclass
class MembershipLevelSnippetLevelDetails(BaseModel):
displayName: Optional[str] = field(default=None)
@dataclass
class MembershipsLevelSnippet(BaseModel):
"""
A class representing the membership level snippet.
Refer: https://developers.google.com/youtube/v3/docs/membershipsLevels#snippet
"""
creatorChannelId: Optional[str] = field(default=None)
levelDetails: Optional[MembershipLevelSnippetLevelDetails] = field(
default=None, repr=False
)
@dataclass
class MembershipsLevel(BaseResource):
"""
A class representing the membership level.
Refer: https://developers.google.com/youtube/v3/docs/membershipsLevels
"""
snippet: Optional[MembershipsLevelSnippet] = field(default=None, repr=False)
@dataclass
class MembershipsLevelListResponse(BaseApiResponse):
"""
A class representing the memberships level's retrieve response info.
Refer: https://developers.google.com/youtube/v3/docs/membershipsLevels/list#response
"""
items: Optional[List[MembershipsLevel]] = field(default=None, repr=False)
|
#!/usr/bin/python
import exceptions
import platform
import getopt
import sys
from os.path import exists
import logging
import logging.handlers
from storm.monitoring.sensor.api import converter
from storm.monitoring.sensor.api import publisher
from storm.monitoring.sensor.api import measures
from storm.monitoring.sensor.api import units
from storm.monitoring.sensor.api import metrics
from storm.monitoring.sensor.api import services
from storm.monitoring.sensor.api import sensor
from storm.monitoring.sensor.api import built_sensors
from storm.monitoring.sensor.host.mem import mem_sensor
from storm.monitoring.sensor.host.mem import mem_check
from storm.monitoring.sensor.host.cpu import cpu_sensor
from storm.monitoring.sensor.host.cpu import cpu_check
from storm.monitoring.sensor.service.logsize import logsize_sensor
from storm.monitoring.sensor.service.logsize import file_check
from storm.monitoring.sensor.service.procmem import procmem_sensor
from storm.monitoring.sensor.service.procmem import procmem_check
from storm.monitoring.sensor.common import string_converter
from storm.monitoring.sensor.common import file_publisher
from storm.monitoring.sensor.common import usage
from storm.monitoring.sensor.common import configuration_parser
from storm.monitoring.sensor.common import file_compressor
class ParsingError(exceptions.Exception):
pass
class InputError(exceptions.Exception):
pass
class Parser(object):
def __init__(self):
self.parameters = {}
self.parameters['monitoring_version'] = '1.0.0'
self.parameters['publish_file'] = '/var/log/monitoring/'
self.parameters['default_conf'] = '/etc/storm/monitoring/sensor-common/services.ini'
self.parameters['hostname'] = platform.node()
self.parameters['metric'] = ''
self.parameters['services'] = []
def do_parsing(self):
try:
opts, args = getopt.getopt(sys.argv[1:],
"hVc:m:",
["help", "version", "config=", "metric="])
#"hvVs:m:",
#["help", "verbose", "version", "service=", "metric="])
except getopt.GetoptError, err:
print str(err)
usage.get_usage()
sys.exit(2)
for opt, value in opts:
if opt in ("-h", "--help"):
usage.get_usage()
sys.exit(0)
elif opt in ("-V", "--version"):
print self.parameters['monitoring_version']
sys.exit(0)
elif opt in ("-c", "--config"):
self.parameters['default_conf'] = value
elif opt in ("-m", "--metric"):
get_metrics = metrics.Metrics().get_string_metrics()
if value not in get_metrics.values():
msg = 'The specified metric %s does not exist' % value
raise InputError(msg)
for metric_key in get_metrics:
if value == get_metrics[metric_key]:
self.parameters['metric'] = int(metric_key)
else:
msg = 'The specified %s option is not recognized' % str(opt)
raise ParsingError(msg)
if not exists(self.parameters['default_conf']):
msg='Configuration file does not exist'
raise InputError(msg)
node_services = configuration_parser.NodeServices(
conf=self.parameters['default_conf']).get_node_services()
if 'storm_nodes' not in node_services:
msg='Configuration file is not well formed'
usage.usage()
raise InputError(msg)
for key in node_services['storm_nodes']:
if node_services['storm_nodes'][key].lower() == 'true':
get_services = services.Services().get_string_services()
if key.upper() not in get_services.values():
msg = 'The specified service node %s is not supported' % key.upper()
raise InputError(msg)
for service_key in get_services:
if key.upper() == get_services[service_key]:
self.parameters['services'].append(int(service_key))
if len(self.parameters['services']) == 0:
msg = 'In the configuration file set to true the StoRM services that are installed in the node'
raise InputError(msg)
def check_parameters(self):
if self.parameters['metric'] == '':
raise InputError('Metric parameter is not provided')
if self.parameters['services'] == []:
raise InputError('Service parameter is not provided')
def get_parameters(self):
self.do_parsing()
self.check_parameters()
return self.parameters
if __name__ == '__main__':
logger = logging.getLogger('storm_sensor')
log_name = '/var/log/monitoring/storm_sensor.log'
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
file_rotating = file_compressor.CompressedRotatingFileHandler(log_name, maxBytes=1000000000, backupCount=5)
file_rotating.setFormatter(formatter)
file_rotating.setLevel(logging.DEBUG)
logger.addHandler(file_rotating)
try:
logger.info("Get parameters")
parameters = Parser().get_parameters()
build_monitoring = built_sensors.BuiltSensors()
if metrics.Metrics().is_host_metric(parameters['metric']):
logger.info("Set host metric")
if parameters['metric'] == metrics.Metrics.mem:
build_monitoring.add_sensor(mem_sensor.MemSensor(parameters['hostname'], parameters['services']))
logger.info("Set mem sensor")
elif parameters['metric'] == metrics.Metrics.cpu:
build_monitoring.add_sensor(cpu_sensor.CpuSensor(parameters['hostname'], parameters['services']))
logger.info("Set cpu sensor")
else:
raise sensor.SensorError('Not implemented yet')
publish = file_publisher.FilePublisher(parameters['publish_file'] + '/host/')
elif metrics.Metrics().is_service_metric(parameters['metric']):
logger.info("Set service metric")
for service_type in parameters['services']:
if parameters['metric'] == metrics.Metrics.log_size:
build_monitoring.add_sensor(logsize_sensor.LogSizeSensor(parameters['hostname'], service_type))
logger.info("Set log_size sensor")
elif parameters['metric'] == metrics.Metrics.proc_mem:
build_monitoring.add_sensor(procmem_sensor.ProcMemSensor(parameters['hostname'], service_type))
logger.info("Set proc_mem sensor")
else:
raise sensor.SensorError('Not implemented yet')
publish = file_publisher.FilePublisher(parameters['publish_file'] + '/service/')
else:
raise sensor.SensorError('Not implemented yet')
for sen in build_monitoring.get_sensors():
logger.info("Run sensor")
sen.run()
logger.info("of the type %s" % metrics.Metrics().get_string_metric(sen.get_metric_type()))
conver = string_converter.StringConverter()
if metrics.Metrics().is_host_metric(parameters['metric']):
tmp_services = sen.get_storm_service_types()
elif metrics.Metrics().is_service_metric(parameters['metric']):
convert_int_in_list = []
convert_int_in_list.append(sen.get_storm_service_types())
tmp_services = convert_int_in_list
logger.info("for the services %s" % ','.join([services.Services().get_string_service(service) for service in tmp_services]))
for produced_measure in sen.get_measures().get_measures():
output = conver.do_conversion(produced_measure, sen.get_timestamp())
publish.do_publish(tmp_services, sen.get_metric_type(), output)
except converter.ConversionError, err:
logger.error(err)
print '\n\nExecution: ', err
except publisher.PublisherError, err:
logger.error(err)
print '\n\nExecution: ', err
except sensor.SensorError, err:
logger.error(err)
print '\n\nExecution: ', err
except file_check.FileSizeError, err:
logger.error(err)
print '\n\nExecution: ', err
except procmem_check.PsError, err:
logger.error(err)
print '\n\nExecution: ', err
except units.UnitsError, err:
logger.error(err)
print '\n\nExecution: ', err
except services.ServicesError, err:
logger.error(err)
print '\n\nExecution: ', err
except ParsingError,err:
logger.error(err)
print '\n\nExecution: ', err
usage.get_usage()
except InputError,err:
logger.error(err)
print '\n\nExecution: ', err
usage.get_usage()
except KeyboardInterrupt:
logger.error(err)
print '\n\nExecution n!'
sys.exit(1)
|
import sqlite3
import os
from flask import current_app
from flask_sqlalchemy import SQLAlchemy
from init import db
class Calculator(db.Model):
__tablename__ = 'weather_info'
id = db.Column(db.Integer, primary_key=True)
city = db.Column(db.String(80))
date = db.Column(db.String(120))
weather = db.Column(db.String(120))
wind = db.Column(db.String(120))
temperature = db.Column(db.String(120))
last_updated_on = db.Column(db.String(120))
queried_by = db.Column(db.String(120))
def __init__(self, items):
self.city = items['city']
self.date = items['date']
self.weather = items['weather']
self.wind = items['wind']
self.temperature = items['temperature']
self.last_updated_on = items['last_updated_on']
self.queried_by = items['queried_by']
def as_dict(self):
return {col.name: getattr(self, col.name) for col in self.__table__.columns}
class User(db.Model):
__tablename__ = 'user'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80))
password = db.Column(db.String(120))
def __init__(self, username, password):
self.username = username
self.password = password
def as_dict(self):
return {col.name: getattr(self, col.name) for col in self.__table__.columns}
db.drop_all()
db.create_all()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from unicodedata import normalize
# sistema
import os
#regex
import re
def remover_acentos(txt,codif='iso-8859-1'):
return normalize('NFKD', txt.decode(codif)).encode('ASCII','ignore')
texto_pronto = ''
texto_auxiliar = ''
contas = { 'bb14466': '1.1.1.02.001',
'bb114466': '1.1.1.02.006',
'bb8873': '1.1.1.02.005',
'bb208873': '1.1.1.02.007',
'caixa': '1.1.1.01.001',
'clientes_diversos': '1.1.2.01.001',
'agua': '4.2.1.01.006',
'energia': '4.1.1.03.003',
'telefone': '4.2.1.01.005',
'icms' : '5.1.1.01',
'ipva': '4.2.1.01.017',
'iptu': '',
'ipi': '',
'cofins': '',
'inss': '2.1.3.01.007',
'irrf': '4.2.1.03.008',
'fgts': '2.1.4.01.007',
'fornecedores_diversos': '2.1.1.01.001',
'tarifas_bancarias': '4.2.1.04.005',
'iof': '4.2.1.04.007',
'prolabore': '5.1.2.01',
'pagamento_emprestimo': '5.1.5',
'definir': 'INDEFINIDO',
'frete': 'FRETE',
'sesi': '',
'tarifas_cartorio': '4.2.1.01.033',
'juros': '4.2.1.04.001',
'darf': '5.1.1.02',
'comunicacao': '4.2.1.02.004',
'transferencia_diversos': '5.1.2.02',
'unimed': '4.1.1.02.066',
'audaces': '4.2.1.01.024',
'prestaserv': '4.2.1.01.031',
'outros_debitos': '5.1.4',
'outros_creditos': '5.1.3',
'sefaz_sp': '3.1.1.03.009',
'sefaz_ms': '3.1.1.03.009',
'sefaz_rj': '3.1.1.03.009'
}
for file in sorted(os.listdir(os.getcwd())):
if file.endswith(".txt"):
# print(file)
cont_linha = 0
arq = open(file, 'r')
texto = arq.readlines()
for linha in texto :
#Pega o número da conta
if linha[30:37] == "14466-5":
conta_banco = contas['bb14466']
elif linha[30:38] == "114466-9":
conta_banco = contas['bb114466']
elif linha[30:38] == "208873-8":
conta_banco = contas['bb208873']
elif linha[30:36] == "8873-0":
conta_banco = contas['bb8873']
#Pega só linhas que tenham data
if re.match(r"\d{2}/\d{2}/\d{4}", linha[3:13]):
# Operações não utilizadas (saldo inicial, saldo final, depósito bloqueado
# (no extrato tem outro lançamento para desbloqueio))
if linha[30:33] != "000" and linha[30:33] != "999" and linha[30:33] != "911" and linha[30:33] != "912" and linha[30:33] != "913":
conta_credito = ""
conta_debito = ""
#data
data = linha[3:13]
data_formatada = data[6:10]+data[3:5]+data[0:2]
#valor
valor = linha[75:89]
valor = valor.replace(".", "")
valor = valor.replace(",", ".")
#valor do tamanho correto
valor = valor.rjust(14," ")
#Número do documento
numero_documento = linha[55:74]
numero_documento = numero_documento.replace(" ", "")
if linha[90:91] =="D":
conta_credito = conta_banco
else:
conta_debito = conta_banco
proxima_linha = str(texto[cont_linha+1])
#Contas e histórico
historico = remover_acentos(linha[34:55]).rstrip()
linha = remover_acentos(linha)
# Cheque Compensado
if linha[30:33] == "102" :
conta_debito = contas['outros_debitos']
# Cheque pago em outra agencia
elif linha[30:33] == "103" :
conta_debito = contas['outros_debitos']
# Pagamento de Título
elif linha[30:33] == "109" :
historico = historico + ": " + proxima_linha[30:-2]
if 'TRANSRAPIDO' in proxima_linha or 'TRANSPAULO' in proxima_linha:
conta_debito = contas['fornecedores_diversos']
elif 'SESI' in proxima_linha:
# conta_debito = contas['sesi']
conta_debito = contas['fornecedores_diversos']
else:
conta_debito = contas['fornecedores_diversos']
# Transferencia agendada
elif linha[30:33] == "114" :
conta_debito = contas['outros_debitos']
# IOF
elif linha[30:33] == "118" :
conta_debito = contas['iof']
# Transferido para Poup
elif linha[30:33] == "120" :
conta_debito = contas['outros_debitos']
# Transporte de Saldo
elif linha[30:33] == "121" :
conta_debito = contas['outros_debitos']
# Cobrança de Juros
elif linha[30:33] == "123" :
conta_debito = contas['juros']
# Tarifas bancárias
elif linha[30:33] == "124" or linha[30:33] == "170" or linha[30:33] == "435" or linha[30:33] == "262" or linha[30:33] == "310" or linha[30:33] == "165" or linha[30:33] == "392" or linha[30:33] == "263":
# print linha[34:47]
if linha[34:47] == 'Desp Cartorio':
conta_debito = contas['tarifas_cartorio']
else:
conta_debito = contas['tarifas_bancarias']
# Depósito identificado
elif linha[30:33] == "129" :
conta_debito = contas['tarifas_bancarias']
historico = historico + ": " + proxima_linha[30:-2]
#Transferencia Debito sem destinatário
elif linha[30:33] == "144" :
conta_debito = contas['outros_debitos']
#Emissão de DOC - Débito
elif linha[30:33] == "166" :
conta_debito = contas['outros_debitos']
historico = historico + ": " +proxima_linha[30:-2]
#Ourocap PU
elif linha[30:33] == "168" :
conta_debito = contas['outros_debitos']
#Pagamento de empréstimo
elif linha[30:33] == "177" or linha[30:33] == "500" :
conta_debito = contas['pagamento_emprestimo']
# INSS
elif linha[30:33] == "196" :
conta_debito = contas['inss']
# Compra com cartão
elif linha[30:33] == "234" :
conta_debito = contas['outros_debitos']
# Água
elif linha[30:33] == "240" :
conta_debito = contas['agua']
# Folha de pagamento
elif linha[30:33] == "250" :
conta_debito = contas['outros_debitos']
# ESTORNO DE CREDITO
elif linha[30:33] == "280" :
conta_debito = contas['outros_debitos']
# Pagto via Auto-Atend
elif linha[30:33] == "303" :
conta_debito = contas['outros_debitos']
historico = historico + ": " +proxima_linha[30:-2]
#Pagamento cartão de crédito
elif linha[30:33] == "328" :
conta_debito = contas['outros_debitos']
# Aplicação BB CDB DI
elif linha[30:33] == "328" or linha[30:33] == "351" :
conta_debito = contas['outros_debitos']
# Energia
elif linha[30:33] == "362" :
conta_debito = contas['energia']
# Telefone
elif linha[30:33] == "363" :
conta_debito = contas['telefone']
#Impostos
elif linha[30:33] == "375" :
# print proxima_linha
historico = historico + ": " +proxima_linha[30:-2]
if 'IPVA' in proxima_linha:
conta_debito = contas['ipva']
elif 'ICMS' in proxima_linha:
conta_debito = contas['icms']
elif 'DARF' in proxima_linha:
conta_debito = contas['darf']
elif 'FGTS' in proxima_linha:
conta_debito = contas['fgts']
elif 'GNRE-SEFAZ-SP' in proxima_linha:
conta_debito = contas['sefaz_sp']
elif 'SEFAZ RJ GNRE' in proxima_linha:
conta_debito = contas['sefaz_rj']
elif 'GNRE ELETRONICA-SEFAZ MS' in proxima_linha:
conta_debito = contas['sefaz_ms']
# TED Transf.Eletr.Disp
elif linha[30:33] == "393" :
conta_debito = contas['outros_debitos']
historico = historico + ": " +proxima_linha[30:-2]
#Transferencia on line Debito
elif linha[30:33] == "470" :
historico = historico + ": " +proxima_linha[46:-2]
texto_auxiliar += proxima_linha
if '10548' in proxima_linha or '21728' in proxima_linha:
conta_debito = contas['prolabore']
elif 'GANDHI' in proxima_linha:
# print 'GANDHI'
conta_debito = contas['comunicacao']
elif 'UNIMED' in proxima_linha:
conta_debito = contas['unimed']
elif 'TECNOLOGIA' in proxima_linha or 'AUDACES' in proxima_linha:
conta_debito = contas['audaces']
elif 'PRESTASERV' in proxima_linha:
conta_debito = contas['prestaserv']
else :
conta_debito = contas['transferencia_diversos']
# Recebimento de fornecedores
elif linha[30:33] == "612" :
conta_credito = contas['clientes_diversos']
historico = historico + ": " +proxima_linha[30:-2]
# Transporte de saldo
elif linha[30:33] == "621" :
conta_credito = contas['definir']
# DOC-Fornecedor/Honorá - Credito
elif linha[30:33] == "623" :
conta_credito = contas['clientes_diversos']
historico = historico + ": " +proxima_linha[40:-2]
# Recebimento de Cobrança com registro
elif linha[30:33] == "624" :
conta_credito = contas['clientes_diversos']
# Desbloqueio de depósito
elif linha[30:33] == "631" :
conta_credito = contas['caixa']
# Ordem bancária Crédito
elif linha[30:33] == "632" :
conta_credito = contas['clientes_diversos']
# Empréstimo Crédito
elif linha[30:33] == "677" :
conta_credito = contas['outros_creditos']
#Transferencia
elif linha[30:33] == "729" :
conta_credito = contas['clientes_diversos']
#Cielo
elif linha[30:33] == "732" :
conta_credito = contas['caixa']
#Produtos Brasilcap
elif linha[30:33] == "742" :
conta_credito = contas['caixa']
# DOC devolvido
elif linha[30:33] == "776" :
conta_credito = contas['outros_creditos']
# Correios
elif linha[30:33] == "789" :
conta_credito = contas['outros_creditos']
# Resgate BB CDB DI
elif linha[30:33] == "798" or linha[30:33] == "989" :
conta_credito = contas['outros_creditos']
#Depósito online'- Credito
elif linha[30:33] == "830" :
conta_credito = contas['caixa']
# Transferência on line Crédito
elif linha[30:33] == "870":
conta_credito = contas['clientes_diversos']
historico = historico + ": " +proxima_linha[30:-2]
# Transferencia agendada
elif linha[30:33] == "874" :
conta_debito = contas['outros_debitos']
historico = historico + ": " +proxima_linha[30:-2]
# Transferido para Poup
elif linha[30:33] == "875" :
conta_credito = contas['outros_debitos']
historico = historico + ": " +proxima_linha[30:-2]
# Dep Cheque BB Liquida
elif linha[30:33] == "910" :
conta_credito = contas['caixa']
# TED-Crédito em Conta
elif linha[30:33] == "976" :
conta_credito = contas['clientes_diversos']
historico = historico + ": " +proxima_linha[54:-2]
# TED-Devolvida - Crédito
elif linha[30:33] == "983" :
conta_credito = contas['outros_creditos']
historico = historico + ": " +proxima_linha[30:-2]
elif conta_debito == "" or conta_credito == "":
print "Operação desconhecida: "+linha[30:33]
historico += "desconhecida"
print(file)
#histórico no tamanho correto
historico = re.sub(' +',' ',historico)
historico = historico.ljust(50," ")
historico = historico[0:50]
#Contas com o tamanho correto
conta_credito = conta_credito.ljust(17," ")
conta_debito = conta_debito.ljust(17," ")
#data
linha_pronta = data_formatada
#documento
linha_pronta += " "
#conta_debito
linha_pronta += conta_debito
#conta_credito
linha_pronta += conta_credito
#Valor
linha_pronta += valor
#HP
linha_pronta += " "
#historico
linha_pronta += historico
#filial debito
linha_pronta += "00"
#filial credito
linha_pronta += "00"
#evento
linha_pronta += " "
#cont
linha_pronta += " "
#user
linha_pronta += " "
#Integ_to
linha_pronta += " "
#centro de custo
linha_pronta += " "
#unidade administrativa
linha_pronta += " "
#codigo CP
linha_pronta += " "
#XRef
linha_pronta += " "
#documento
linha_pronta += numero_documento
linha_pronta += "\n"
#print linha_pronta
texto_pronto += linha_pronta
cont_linha += 1
# print texto_pronto
arq.close()
#imprime texto auxiliar
# arq_novo = open("auxiliar.TXT","w")
# arq_novo.write(texto_auxiliar)
# arq_novo.close()
# Exporta para o novo aquivo
arq_novo = open("CTBIL109.TXT","w")
arq_novo.write(texto_pronto)
arq_novo.close()
print 'Exportação terminada.'
|
#!/usr/bin/env python
import tensorflow as tf
# Model paramters
session = tf.Session()
W = tf.Variable([0.3])
b = tf.Variable([-0.3])
# Model inputs and outputs
x = tf.placeholder(tf.float32)
y = tf.placeholder(tf.float32)
init = tf.global_variables_initializer()
# Perfect values for 0 loss
session.run(init)
fixW = tf.assign(W, [-1.0])
fixb = tf.assign(b, [1.0])
session.run([fixW, fixb])
# loss = reduce_sum((Wx +b)^2)
linear_model = W * x + b
squared_deltas = tf.square(linear_model - y)
loss = tf.reduce_sum(squared_deltas)
print(session.run(loss, {
x: [1, 2, 3, 4],
y: [0, -1, -2, -3]
}))
|
#!/usr/bin/env python
# Copyright 2017 The Forseti Security Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup script for Forseti Security tools."""
import os
import subprocess
import sys
from setuptools import find_packages
from setuptools import setup
from setuptools.command.install import install
import google.cloud.security
FORSETI_VERSION = google.cloud.security.__version__
NAMESPACE_PACKAGES = [
'google',
'google.cloud',
'google.cloud.security'
]
INSTALL_REQUIRES = [
'anytree==2.1.4',
'futures==3.0.5',
'google-api-python-client==1.6.1',
'Jinja2==2.9.5',
'MySQL-python==1.2.5',
'netaddr>=0.7.19',
'protobuf>=3.2.0',
'PyYAML==3.12',
'ratelimiter==1.1.0',
'retrying==1.3.3',
'requests[security]==2.18.4',
'sendgrid==3.6.3',
'SQLAlchemy==1.1.9',
'pygraph>=0.2.1',
'unicodecsv==0.14.1',
]
SETUP_REQUIRES = [
'google-apputils==0.4.2',
'python-gflags==3.1.1',
'grpcio==1.4.0',
'grpcio-tools==1.4.0',
'protobuf>=3.2.0',
]
TEST_REQUIRES = [
'mock==2.0.0',
'SQLAlchemy==1.1.9',
'parameterized==0.6.1',
]
if sys.version_info < (2, 7):
sys.exit('Sorry, Python < 2.7 is not supported.')
if sys.version_info.major > 2:
sys.exit('Sorry, Python 3 is not supported.')
def build_protos():
"""Build protos."""
subprocess.check_call(['python', 'build_protos.py', '--clean'])
class PostInstallCommand(install):
"""Post installation command."""
def run(self):
build_protos()
install.do_egg_install(self)
setup(
name='forseti-security',
version=FORSETI_VERSION,
description='Forseti Security tools',
author='Google Inc.',
author_email='opensource@google.com',
url='https://github.com/GoogleCloudPlatform/forseti-security',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'License :: OSI Approved :: Apache Software License'
],
cmdclass={
'install': PostInstallCommand,
},
install_requires=SETUP_REQUIRES + INSTALL_REQUIRES,
setup_requires=SETUP_REQUIRES,
tests_require=INSTALL_REQUIRES + SETUP_REQUIRES + TEST_REQUIRES,
packages=find_packages(exclude=[
'*.tests', '*.tests.*', 'tests.*', 'tests']),
include_package_data=True,
package_data={
'': ['cloud/security/common/email_templates/*.jinja']
},
namespace_packages=NAMESPACE_PACKAGES,
google_test_dir='tests',
license='Apache 2.0',
keywords='gcp google cloud platform security tools',
entry_points={
'console_scripts': [
'forseti_inventory = google.cloud.security.stubs:RunForsetiInventory',
'forseti_scanner = google.cloud.security.stubs:RunForsetiScanner',
'forseti_enforcer = google.cloud.security.stubs:RunForsetiEnforcer',
'forseti_notifier = google.cloud.security.stubs:RunForsetiNotifier',
]
},
zip_safe=False, # Set to False: apputils doesn't like zip_safe eggs
)
|
from numpy import *
import operator
import os
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import cm
from os import listdir
from mpl_toolkits.mplot3d import Axes3D
import struct
#读取图片
def read_image(file_name):
#先用二进制方式把文件都读进来
file_handle=open(file_name,"rb") #以二进制打开文档
file_content=file_handle.read() #读取到缓冲区中
offset=0
head = struct.unpack_from('>IIII', file_content, offset) # 取前4个整数,返回一个元组
offset += struct.calcsize('>IIII')
imgNum = head[1] #图片数
rows = head[2] #宽度
cols = head[3] #高度
images=np.empty((imgNum , 784))
image_size=rows*cols#单个图片的大小
fmt='>' + str(image_size) + 'B'#单个图片的format
for i in range(imgNum):
images[i] = np.array(struct.unpack_from(fmt, file_content, offset))
offset += struct.calcsize(fmt)
return images
#读取标签
def read_label(file_name):
file_handle = open(file_name, "rb") # 以二进制打开文档
file_content = file_handle.read() # 读取到缓冲区中
head = struct.unpack_from('>II', file_content, 0) # 取前2个整数,返回一个元组
offset = struct.calcsize('>II')
labelNum = head[1] # label数
# print(labelNum)
bitsString = '>' + str(labelNum) + 'B' # fmt格式:'>47040000B'
label = struct.unpack_from(bitsString, file_content, offset) # 取data数据,返回一个元组
return np.array(label)
#KNN算法
def KNN(test_data, dataSet, labels, k):
dataSetSize = dataSet.shape[0]#dataSet.shape[0]表示的是读取矩阵第一维度的长度,代表行数
distance1 = tile(test_data, (dataSetSize)).reshape((60000,784))-dataSet#tile函数在行上重复dataSetSizec次,在列上重复1次
distance2 = distance1**2 #每个元素平方
distance3 = distance2.sum(axis=1)#矩阵每行相加
distances4 = distance3**0.5#欧氏距离计算结束
sortedDistIndicies = distances4.argsort() #返回从小到大排序的索引
classCount=np.zeros((10), np.int32)#10是代表10个类别
for i in range(k): #统计前k个数据类的数量
voteIlabel = labels[sortedDistIndicies[i]]
classCount[voteIlabel] += 1
max = 0
id = 0
for i in range(classCount.shape[0]):
if classCount[i] >= max:
max = classCount[i]
id = i
print(id)
return id
def test_KNN():
# 文件获取
#mnist数据集
train_image = "train-images.idx3-ubyte"
test_image = "t10k-images.idx3-ubyte"
train_label = "train-labels.idx1-ubyte"
test_label = "t10k-labels.idx1-ubyte"
# 读取数据
train_x = read_image(train_image) # train_dataSet
test_x = read_image(test_image) # test_dataSet
train_y = read_label(train_label) # train_label
test_y = read_label(test_label) # test_label
testRatio = 0.01 # 取数据集的前%x为测试数据,这个参数比重可以改变
train_row = train_x.shape[0] # 数据集的行数,即数据集的总的样本数
test_row=test_x.shape[0]
testNum = int(test_row * testRatio)
errorCount = 0 # 判断错误的个数
for i in range(testNum):
result = KNN(test_x[i], train_x, train_y, 30)
if result != test_y[i]:
errorCount += 1.0# 如果mnist验证集的标签和本身标签不一样,则出错
error_rate = errorCount / float(testNum) # 计算出错率
acc = 1.0 - error_rate
print("\nthe total number of errors is: %d" % errorCount)
print("\nthe total error rate is: %f" % (error_rate))
print("\nthe total accuracy rate is: %f" % (acc))
if __name__ == "__main__":
test_KNN()
|
from django.shortcuts import render
from django.views import generic
from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from django.urls import reverse_lazy
from .forms import SignUpForm, EditProfileForm, PasswordChangedForm
from django.contrib.auth.views import PasswordChangeView, PasswordChangeForm
# Create your views here.
class UserRegisterView(generic.CreateView):
form_class = SignUpForm
template_name = 'registration/registration.html'
success_url = reverse_lazy('login')
class UserEditView(generic.UpdateView):
form_class = EditProfileForm
template_name = 'registration/edit_profile.html'
success_url = reverse_lazy('home')
def get_object(self):
return self.request.user
class PasswordsChangeView(PasswordChangeView):
form_class = PasswordChangedForm
success_url = reverse_lazy('home') |
import os
lista = []
def parent_child():
r, w = os.pipe()
n = os.fork()
if n > 0:
print("Parent process and id is : ", os.getpid())
os.close(w)
r = os.fdopen(r)
print ("Parent reading")
read = r.read()
print( "Parent reads =", read)
else:
pid, status = os.wait()
print("Child process and id is {} and status {}: ".format(pid,status))
valor = 8;
fatorial = 1
while (valor > 1):
child_writes = fatorial * valor
valor = valor - 1
os.close(r)
w = os.fdopen(w, 'w')
print ("Child writing")
w.write(child_writes)
w.close()
exitstat = 0
# Function that is executed after os.fork() that runs in a new process
def child():
global exitstat
exitstat += 1
print('Hello from child', os.getpid(), exitstat)
# End this process using os._exit() and pass a status code back to the shell
os._exit(exitstat)
# This is the parent process code
def parent():
while True:
# Fork this program into a child process
newpid = os.fork()
# newpid is 0 if we are in the child process
if newpid == 0:
# Call child()
child()
# otherwise, we are still in the parent process
else:
# os.wait() returns the pid and status and status code
# On unix systems, status code is stored in status and has to
# be bit-shifted
pid, status = os.wait()
print('Parent got', pid, status, (status >> 8))
if input() == 'q':
break
if __name__ == '__main__':
parent() |
import csv
from evaluate import pivotal, pivotality, criticality, prob_pivotal, unpacked_pivotality
from fractions import Fraction
from simulate import *
import json
with open('../json/pilot.json', 'r') as data:
file = json.load(data)
data.close()
# list of cases to run
cases = len(file['experiments'])
with open('model.csv', 'w') as csvfile:
fieldnames = ['trial', 'index', 'rating']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for case in xrange(cases):
cause = file['experiments'][case]['situation']['cause']
if 'effect' in file['experiments'][case]['situation']:
effect = file['experiments'][case]['situation']['effect']
else:
effect = 'o'
hierarchy = simulate(file['experiments'][case], cause=cause, effect=effect)
pivr = pivotality(hierarchy, cause, effect, root=True)
piv = pivotality(hierarchy, cause, effect)
crit = criticality(hierarchy, cause, effect, e_value=True)
writer.writerow({'trial': case, 'index' : 'criticality', 'rating': crit})
writer.writerow({'trial': case, 'index' : 'alternative', 'rating': 100*float(Fraction(pivr))})
writer.writerow({'trial': case, 'index' : 'newmodel', 'rating': 100*float(Fraction(piv))})
# with open('model.csv', 'w') as csvfile:
# fieldnames = ['first_name', 'last_name']
# writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
# writer.writeheader()
# writer.writerow({'first_name': 'Baked', 'last_name': 'Beans'})
# writer.writerow({'first_name': 'Lovely', 'last_name': 'Spam'})
# writer.writerow({'first_name': 'Wonderful', 'last_name': 'Spam'}) |
import logging
LOG = logging.getLogger(__name__)
LOG.setLevel(logging.DEBUG)
handler = logging.FileHandler(filename='access.log')
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
handler.setFormatter(formatter)
handler.setLevel(logging.INFO)
class OnlyInfoFliter:
def filter(self, logRecord):
return logRecord.levelno == logging.INFO
handler.addFilter(OnlyInfoFliter())
LOG.addHandler(handler)
|
# coding: utf-8
# In[5]:
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
# In[6]:
def get_distance(v,w=[1/3,1/3,1/3]):
a,b,c= v[0],v[1],v[2]
w1,w2,w3=w[0],w[1],w[2]
d=((a**2)*w[0] + (b**2)*w[1] +(c**2)*w[2])**.5
return d
# In[7]:
def convert_rgb_to_gray_level(img):
m=img.shape[0]
n=img.shape[1]
img_2=np.zeros((m,n))
for i in range(m):
for j in range(n):
img_2[i,j]=get_distance(img[i,j,:])
return img_2
# In[8]:
def convert_gray_level_to_BW(img):
m=img.shape[0]
n=img.shape[1]
img_bw=np.zeros((m,n))
for i in range(m):
for j in range(n):
if img[i,j] > 120:
img_bw[i,j]=1
else:
img_bw[i,j]=0
return img_bw
# In[12]:
img =mpimg.imread("turtle.jpg")
get_ipython().run_line_magic('matplotlib', 'inline')
im_2=convert_rgb_to_gray_level(img)
im_3=convert_gray_level_to_BW(im_2)
plt.imshow(img)
plt.subplot(1,3,1),plt.imshow(img)
plt.subplot(1,3,2),plt.imshow(im_2,cmap='gray')
plt.subplot(1,3,3),plt.imshow(im_3,cmap='gray')
|
'''''
program: csdn博客爬虫2
function: 采用BeautifulSoup技术实现对某个CSDN主页所有博文的日期、主题、访问量、评论个数信息爬取
'''
import gzip
import re
import urllib.request
from bs4 import BeautifulSoup
# 定义保存文件函数
def saveFile(data, i):
path = "F:\\Spider\\05_csdn\\papers\\paper_" + str(i + 1) + ".txt"
file = open(path, 'wb')
page = '当前页:' + str(i + 1) + '\n'
file.write(page.encode('gbk'))
# 将博文信息写入文件(以utf-8保存的文件声明为gbk)
for d in data:
d = str(d) + '\n'
file.write(d.encode('gbk'))
file.close()
# 解压缩数据
def ungzip(data):
try:
# print("正在解压缩...")
data = gzip.decompress(data)
# print("解压完毕...")
except:
print("未经压缩,无需解压...")
return data
# CSDN爬虫类
class CSDNSpider:
def __init__(self, pageIdx=1, url="http://blog.csdn.net/qq_878799579/article/list/1"):
# 默认当前页
self.pageIdx = pageIdx
self.url = url[0:url.rfind('/') + 1] + str(pageIdx)
self.headers = {
"Connection": "keep-alive",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 "
"(KHTML, like Gecko) Chrome/51.0.2704.63 Safari/537.36",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"Accept-Encoding": "gzip, deflate, sdch",
"Accept-Language": "zh-CN,zh;q=0.8",
"Host": "blog.csdn.net"
}
# 求总页数
def getPages(self):
req = urllib.request.Request(url=self.url, headers=self.headers)
page = urllib.request.urlopen(req)
# 从我的csdn博客主页抓取的内容是压缩后的内容,先解压缩
data = page.read()
data = ungzip(data)
data = data.decode('utf-8')
# 得到BeautifulSoup对象
soup = BeautifulSoup(data, 'html5lib')
# 计算我的博文总页数
tag = soup.find('div', "pagelist")
pagesData = tag.span.get_text()
# 输出条数 总页数,找到其中的数字
pagesNum = re.findall(re.compile(pattern=r'共(.*?)页'), pagesData)[0]
return pagesNum
# 设置要抓取的博文页面
def setPage(self, idx):
self.url = self.url[0:self.url.rfind('/') + 1] + str(idx)
# 读取博文信息
def readData(self):
ret = []
req = urllib.request.Request(url=self.url, headers=self.headers)
res = urllib.request.urlopen(req)
# 从我的csdn博客主页抓取的内容是压缩后的内容,先解压缩
data = res.read()
data = ungzip(data)
data = data.decode('utf-8')
soup = BeautifulSoup(data, "html5lib")
# 找到所有的博文代码模块
items = soup.find_all('div', "list_item article_item")
for item in items:
# 标题、链接、日期、阅读次数、评论个数
title = item.find('span', "link_title").a.get_text()
link = item.find('span', "link_title").a.get('href')
writeTime = item.find('span', "link_postdate").get_text()
readers = re.findall(re.compile(r'(.∗?)'), item.find('span', "link_view").get_text())[0]
comments = re.findall(re.compile(r'(.∗?)'), item.find('span', "link_comments").get_text())[0]
ret.append('日期:' + writeTime + '\n标题:' + title
+ '\n链接:http://blog.csdn.net' + link
+ '\n' + '阅读:' + readers + '\t评论:' + comments + '\n')
return ret
# 定义爬虫对象
cs = CSDNSpider()
# 求取
pagesNum = int(cs.getPages())
print("博文总页数: ", pagesNum)
for idx in range(pagesNum):
cs.setPage(idx)
print("当前页:", idx + 1)
# 读取当前页的所有博文,结果为list类型
papers = cs.readData()
saveFile(papers, idx)
|
from ctypes import CDLL, util, c_char_p, c_int, POINTER, byref, cast, create_string_buffer
import json
neko = CDLL(util.find_library("nekointf"))
neko.init.resType = None
neko.finalize.resType = None
neko.solve.argtypes = [POINTER(c_char_p), c_int]
neko.solve.resType = None
def init():
neko.init()
def finalize():
neko.finalize()
def solve(case_json):
cp = python_dict_to_fortran(case_json)
neko.solve(byref(cp), len(json.dumps(case_json)))
#
# https://degenerateconic.com/fortran-json-python.html
#
def python_str_to_fortran(s):
return cast(create_string_buffer(s.encode()),c_char_p)
def python_dict_to_fortran(d):
return python_str_to_fortran(json.dumps(d))
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import http.client
conn = http.client.HTTPConnection('www.cnblogs.com')
conn.request('GET', '/vamei')
res = conn.getresponse()
content = res.read()
print(type(content))
print(content)
input()
|
from base import *
import clsTestService
from general import General
from logger import writeToLog
from editEntryPage import EditEntryPage
import enums
from selenium.webdriver.common.keys import Keys
import re
class MyMedia(Base):
driver = None
clsCommon = None
def __init__(self, clsCommon, driver):
self.driver = driver
self.clsCommon = clsCommon
#=============================================================================================================
#My Media locators:
#=============================================================================================================
MY_MEDIA_SEARCH_BAR = ('id', 'searchBar')
MY_MEDIA_SEARCH_BAR_OLD_UI = ('id', 'searchBar')
MY_MEDIA_ELASTIC_SEARCH_BAR = ('xpath', "//input[@class='searchForm__text']")
MY_MEDIA_NO_RESULTS_ALERT = ('xpath', "//div[@id='myMedia_scroller_alert' and contains(text(),'There are no more media items.')]")
MY_MEDIA_ENRTY_DELETE_BUTTON = ('xpath', '//*[@title = "Delete ENTRY_NAME"]')# When using this locator, replace 'ENTRY_NAME' string with your real entry name
MY_MEDIA_ENRTY_EDIT_BUTTON = ('xpath', '//*[@title = "Edit ENTRY_NAME"]')# When using this locator, replace 'ENTRY_NAME' string with your real entry name
MY_MEDIA_CONFIRM_ENTRY_DELETE = ('xpath', "//a[contains(@id,'delete_button_') and @class='btn btn-danger']")
MY_MEDIA_ENTRY_CHECKBOX = ('xpath', '//*[@title = "ENTRY_NAME"]')
MY_MEDIA_ACTIONS_BUTTON = ('id', 'actionsDropDown')
MY_MEDIA_ACTIONS_BUTTON_PUBLISH_BUTTON = ('id', 'Publish')
MY_MEDIA_ACTIONS_BUTTON_DELETE_BUTTON = ('id', 'tab-Delete')
MY_MEDIA_ACTIONS_BUTTON_ADDTOPLAYLIST_BUTTON = ('id', 'Addtoplaylists')
MY_MEDIA_PUBLISH_UNLISTED = ('id', 'unlisted')
MY_MEDIA_PUBLISH_PRIVATE = ('id', 'private')
MY_MEDIA_PUBLISH_SAVE_BUTTON = ('xpath', "//button[@class='btn btn-primary pblSave' and text()='Save']")
MY_MEDIA_PUBLISHED_AS_UNLISTED_MSG = ('xpath', "//div[contains(.,'Media successfully set to Unlisted')]")
MY_MEDIA_PUBLISHED_AS_PRIVATE_MSG = ('xpath', "//div[contains(.,'Media successfully set to Private')]")
MY_MEDIA_PAGE_TITLE = ('xpath', "//h1[@class='inline' and contains(text(), 'My Media')]")
MY_MEDIA_PUBLISHED_RADIO_BUTTON = ('id', 'published') #This refers to the publish radio button after clicking action > publish
MY_MEIDA_PUBLISH_TO_CATEGORY_OPTION = ('class_name', 'pblTabCategory')
MY_MEIDA_PUBLISH_TO_CHANNEL_OPTION = ('class_name', 'pblTabChannel')
MY_MEDIA_CHOSEN_CATEGORY_TO_PUBLISH = ('xpath', "//span[contains(.,'PUBLISHED_CATEGORY')]")# When using this locator, replace 'PUBLISHED_CATEGORY' string with your real category/channel name
MY_MEDIA_SAVE_MESSAGE_CONFIRM = ('xpath', "//div[@class='alert alert-success ' and contains(text(), 'Media successfully published')]")
MY_MEDIA_DISCLAIMER_MSG = ('xpath', "//div[@class='alert ' and contains(text(), 'Complete all the required fields and save the entry before you can select to publish it to categories or channels.')]")
#MY_MEDIA_ENTRY_PARNET = ('xpath', "//div[@class='photo-group thumb_wrapper' and @title='ENTRY_NAME']")
MY_MEDIA_ENTRY_CHILD = ('xpath', "//p[@class='status_content' and contains(text(), 'ENTRY_PRIVACY')]")
MY_MEDIA_ENTRY_PARNET = ('xpath', "//span[@class='entry-name' and text() ='ENTRY_NAME']/ancestor::a[@class='entryTitle tight']")
#MY_MEDIA_ENTRY_PUBLISHED_BTN_OLD_UI = ('xpath', "//a[@id ='accordion-ENTRY_ID']")
MY_MEDIA_ENTRY_PUBLISHED_BTN_OLD_UI = ('xpath', "//div[@id ='accordion_ENTRY_ID']")
MY_MEDIA_ENTRY_PUBLISHED_BTN = ('xpath', "//a[@id ='accordion-ENTRY_ID']/i[@class='icon-plus-sign kmstooltip']")
MY_MEDIA_ENTRY_CHILD_POPUP = ('xpath', "//strong[@class='valign-top']")
MY_MEDIA_SORT_BY_DROPDOWNLIST_OLD_UI = ('xpath', "//a[@id='sort-btn']")
MY_MEDIA_SORT_BY_DROPDOWNLIST_NEW_UI = ('xpath', "//a[@id='sortBy-menu-toggle']")
MY_MEDIA_FILTER_BY_STATUS_DROPDOWNLIST = ('xpath', "//a[@id='status-btn']")
MY_MEDIA_FILTER_BY_TYPE_DROPDOWNLIST = ('xpath', "//a[@id='type-btn']")
MY_MEDIA_FILTER_BY_COLLABORATION_DROPDOWNLIST = ('xpath', "//a[@id='mediaCollaboration-btn']")
MY_MEDIA_FILTER_BY_SCHEDULING_DROPDOWNLIST = ('xpath', "//a[@id='sched-btn']")
MY_MEDIA_DROPDOWNLIST_ITEM_OLD_UI = ('xpath', "//a[@role='menuitem' and contains(text(), 'DROPDOWNLIST_ITEM')]")
MY_MEDIA_DROPDOWNLIST_ITEM_NEW_UI = ('xpath', "//span[@class='filter-checkbox__label' and contains(text(), 'DROPDOWNLIST_ITEM')]")
MY_MEDIA_ENTRY_TOP = ('xpath', "//span[@class='entry-name' and text()='ENTRY_NAME']")
MY_MEDIA_END_OF_PAGE = ('xpath', "//div[@class='alert alert-info endlessScrollAlert']")
MY_MEDIA_TABLE_SIZE = ('xpath', "//table[@class='table table-condensed table-hover bulkCheckbox mymediaTable mediaTable full']/tbody/tr")
MY_MEDIA_CONFIRM_CHANGING_STATUS = ('xpath', "//a[@class='btn btn-primary' and text()='OK']")
MY_MEDIA_ENTRY_THUMBNAIL = ('xpath', "//img[@class='thumb_img' and @alt='Thumbnail for entry ENTRY_NAME']")
MY_MEDIA_ENTRY_THUMBNAIL_ELASTIC_SEARCH = ("xpath", "//img[@class='entryThumbnail__img']")
MY_MEDIA_REMOVE_SEARCH_ICON_OLD_UI = ('xpath', "//i[@class='icon-remove']")
MY_MEDIA_REMOVE_SEARCH_ICON_NEW_UI = ('xpath', "//a[@class='clear searchForm_icon']")
MY_MEDIA_NO_ENTRIES_FOUND = ('xpath',"//div[@class='alert alert-info no-results' and contains(text(), 'No Entries Found')]")
MY_MEDIA_TABLE = ('xpath', "//table[@class='table table-condensed table-hover bulkCheckbox mymediaTable mediaTable full']")
MY_MEDIA_IMAGE_ICON = ('xpath', "//i[@class='icon-picture icon-white']")
MY_MEDIA_AUDIO_ICON = ('xpath', "//i[@class='icon-music icon-white']")
MY_MEDIA_QUIZ_ICON = ('xpath', "//i[@class='icomoon-quiz icon-white']")
MY_MEDIA_VIDEO_ICON_OLD_UI = ('xpath', "//i[@class='icon-film icon-white']")
MY_MEDIA_EXPEND_MEDIA_DETAILS = ('xpath', "//div[@class='accordion-body in collapse contentLoaded' and @id='collapse_ENTRY_ID']")
MY_MEDIA_COLLAPSED_VIEW_BUTTON = ('xpath', "//button[@id='MyMediaList' and @data-original-title='Collapsed view']")
MY_MEDIA_DETAILED_VIEW_BUTTON = ('xpath', "//button[@id='MyMediaThumbs' and @data-original-title='Detailed view']")
SEARCH_RESULTS_ENTRY_NAME = ('xpath', "//span[@class='results-entry__name']")
MY_MEDIA_FILTERS_BUTTON_NEW_UI = ('xpath', "//button[contains(@class,'toggleButton btn shrink-container__button hidden-phone') and text()='Filters']")
SEARCH_RESULTS_ENTRY_NAME_OLD_UI = ('xpath', '//span[@class="searchTerm" and text()="ENTRY_NAME"]')
#=============================================================================================================
def getSearchBarElement(self):
try:
# Check which search bar do we have: old or new (elastic)
if self.clsCommon.isElasticSearchOnPage():
return self.get_elements(self.MY_MEDIA_ELASTIC_SEARCH_BAR)[1]
else:
return self.wait_visible(self.MY_MEDIA_SEARCH_BAR, 30, True)
except:
writeToLog("INFO","FAILED get Search Bar element")
return False
# This method, clicks on the menu and My Media
def navigateToMyMedia(self, forceNavigate = False):
application = localSettings.LOCAL_SETTINGS_APPLICATION_UNDER_TEST
if application == enums.Application.BLACK_BOARD:
if self.clsCommon.blackBoard.navigateToMyMediaBlackBoard() == False:
writeToLog("INFO","FAILED navigate to my media in blackboard")
return False
else:
# Check if we are already in my media page
if forceNavigate == False:
if self.verifyUrl(localSettings.LOCAL_SETTINGS_KMS_MY_MEDIA_URL, False, 5) == True:
return True
# Click on User Menu Toggle Button
if self.click(self.clsCommon.general.USER_MENU_TOGGLE_BUTTON) == False:
writeToLog("INFO","FAILED to click on User Menu Toggle Button")
return False
# Click on My Media
if self.click(self.clsCommon.general.USER_MENU_MY_MEDIA_BUTTON) == False:
writeToLog("INFO","FAILED to click on My Media from the user menu")
return False
if self.verifyUrl(localSettings.LOCAL_SETTINGS_KMS_MY_MEDIA_URL, False) == False:
writeToLog("INFO","FAILED to navigate to My Media")
return False
return True
# Author: Michal Zomper
def deleteSingleEntryFromMyMedia(self, entryName):
# Search for entry in my media
if self.searchEntryMyMedia(entryName) == False:
return False
# Click on delete button
tmp_entry_name = (self.MY_MEDIA_ENRTY_DELETE_BUTTON[0], self.MY_MEDIA_ENRTY_DELETE_BUTTON[1].replace('ENTRY_NAME', entryName))
if self.click(tmp_entry_name) == False:
writeToLog("INFO","FAILED to click on delete entry button")
return False
sleep(5)
# Click on confirm delete
if self.click(self.MY_MEDIA_CONFIRM_ENTRY_DELETE, multipleElements=True) == False:
writeToLog("INFO","FAILED to click on confirm delete button")
return False
self.clsCommon.general.waitForLoaderToDisappear()
writeToLog("INFO","Entry: '" + entryName + "' Was Deleted")
return True
# @Author: Tzachi Guetta
# The following method can handle list of entries and a single entry:
# in order to delete list of entries pass a List[] of entries name, for single entry - just pass the entry name
# also: the method will navigate to My media
# Known limitation: entries MUST be presented on the first page of my media
def deleteEntriesFromMyMedia(self, entriesNames, showAllEntries=False):
if self.navigateToMyMedia(forceNavigate = True) == False:
writeToLog("INFO","FAILED Navigate to my media page")
return False
success = True
# Checking if entriesNames list type
if type(entriesNames) is list:
if showAllEntries == True:
if self.showAllEntries() == False:
return False
for entryName in entriesNames:
if self.checkSingleEntryInMyMedia(entryName) == False:
writeToLog("INFO","FAILED to CHECK the entry in my-media page")
success = False
writeToLog("INFO","Going to delete Entry: " + entryName)
else:
if self.checkSingleEntryInMyMedia(entriesNames) == False:
writeToLog("INFO","FAILED to CHECK the entry in my-media page")
success = False
writeToLog("INFO","Going to delete Entry: " + entriesNames)
if self.clickActionsAndDeleteFromMyMedia() == False:
writeToLog("INFO","FAILED to click Action -> Delete")
return False
sleep(1)
self.clsCommon.general.waitForLoaderToDisappear()
if self.click(self.MY_MEDIA_CONFIRM_ENTRY_DELETE) == False:
writeToLog("INFO","FAILED to click on confirm delete button")
return False
sleep(1)
self.clsCommon.general.waitForLoaderToDisappear()
# Printing the deleted entries
if success == True:
if type(entriesNames) is list:
entries = ", ".join(entriesNames)
writeToLog("INFO","The following entries were deleted: " + entries + "")
else:
writeToLog("INFO","The following entry was deleted: " + entriesNames + "")
else:
writeToLog("INFO","Failed, Not all entries were deleted")
return success
def searchEntryMyMedia(self, entryName, forceNavigate=True, exactSearch=False):
# Check if my media page is already open
# Navigate to My Media
if self.navigateToMyMedia(forceNavigate) == False:
return False
sleep(5)
# Search Entry
searchBarElement = self.getSearchBarElement()
if searchBarElement == False:
writeToLog("INFO","FAILED to get search bar element")
return False
searchBarElement.click()
if exactSearch == True:
searchLine = '"' + entryName + '"'
else:
if self.clsCommon.isElasticSearchOnPage():
searchLine = '"' + entryName + '"'
else:
searchLine = entryName
self.getSearchBarElement().send_keys(searchLine + Keys.ENTER)
sleep(1)
self.clsCommon.general.waitForLoaderToDisappear()
return True
def clickEntryAfterSearchInMyMedia(self, entryName):
if localSettings.LOCAL_SETTINGS_IS_NEW_UI == False:
if self.click(('xpath', "//span[@class='entry-name' and text()='" + entryName + "']"), 10) == False:
writeToLog("INFO","FAILED to click on Entry: '" + entryName + "'")
return False
else:
result = self.getResultAfterSearch(entryName)
if result == False:
return False
if self.clickElement(result) == False:
writeToLog("INFO","FAILED to click on Entry: '" + entryName + "'")
return False
return True
# This method for Elastic Search (new UI), returns the result element.
def getResultAfterSearch(self, searchString):
#If we are in new UI with Elastic search
if self.clsCommon.isElasticSearchOnPage() == True:
results = self.wait_elements(self.SEARCH_RESULTS_ENTRY_NAME, 30)
#If we are in old UI
else:
tmp_results = (self.SEARCH_RESULTS_ENTRY_NAME_OLD_UI[0], self.SEARCH_RESULTS_ENTRY_NAME_OLD_UI[1].replace('ENTRY_NAME', searchString))
results = self.wait_elements(tmp_results, 30)
if results == False:
writeToLog("INFO","No entries found")
return False
for result in results:
if result.text == searchString:
return result
writeToLog("INFO","No entries found after search entry: '" + searchString + "'")
return False
# This method for Elastic Search (new UI), clicks on the returned result element.
def clickResultEntryAfterSearch(self, entryName):
result = self.getResultAfterSearch(entryName)
if result == False:
writeToLog("INFO","FAILED to click on Entry: '" + entryName + "'")
return False
else:
if self.clickElement(result) == False:
writeToLog("INFO","FAILED to click on Entry: '" + entryName + "'")
return False
return True
# Author: Michal Zomper
def clickEditEntryAfterSearchInMyMedia(self, entryName):
# Click on the Edit Entry button
tmp_entry_name = (self.MY_MEDIA_ENRTY_EDIT_BUTTON[0], self.MY_MEDIA_ENRTY_EDIT_BUTTON[1].replace('ENTRY_NAME', entryName))
if self.click(tmp_entry_name) == False:
# If entry not found, search for 'No Entries Found' alert
if self.wait_for_text(self.MY_MEDIA_NO_RESULTS_ALERT, 'No Entries Found', 5) == True:
writeToLog("INFO","No Entry: '" + entryName + "' was found")
else:
writeToLog("INFO","FAILED search for Entry: '" + entryName + "' something went wrong")
return True
# @Author: Tzachi Guetta
def serachAndCheckSingleEntryInMyMedia(self, entryName):
if self.searchEntryMyMedia(entryName) == False:
writeToLog("INFO","FAILED to find: '" + entryName + "'")
return False
# Click on the Entry's check-box in MyMedia page
if self.checkSingleEntryInMyMedia(entryName) == False:
return False
return True
# @Author: Tzachi Guetta
def checkSingleEntryInMyMedia(self, entryName):
# Click on the Entry's check-box in MyMedia page
tmp_entry_name = (self.MY_MEDIA_ENTRY_CHECKBOX[0], self.MY_MEDIA_ENTRY_CHECKBOX[1].replace('ENTRY_NAME', entryName))
if self.click(tmp_entry_name) == False:
# If entry not found, search for 'No Entries Found' alert
writeToLog("INFO","FAILED to Check for Entry: '" + entryName + "' something went wrong")
return False
return True
# @Author: Tzachi Guetta
def checkEntriesInMyMedia(self, entriesNames):
if type(entriesNames) is list:
for entryName in entriesNames:
if self.checkSingleEntryInMyMedia(entryName) == False:
writeToLog("INFO","FAILED to CHECK the entry: " + entryName + ", in my media page")
return False
else:
writeToLog("INFO","FAILED, Entries list was not provided")
return False
return True
# @Author: Tzachi Guetta
def clickActionsAndPublishFromMyMedia(self):
if self.click(self.MY_MEDIA_ACTIONS_BUTTON) == False:
writeToLog("INFO","FAILED to click on Action button")
return False
sleep(2)
if self.click(self.MY_MEDIA_ACTIONS_BUTTON_PUBLISH_BUTTON) == False:
writeToLog("INFO","FAILED to click on Publish button")
return False
sleep(1)
return True
# @Author: Tzachi Guetta
def clickActionsAndDeleteFromMyMedia(self):
if self.click(self.MY_MEDIA_ACTIONS_BUTTON) == False:
writeToLog("INFO","FAILED to click on Action button")
return False
if self.click(self.MY_MEDIA_ACTIONS_BUTTON_DELETE_BUTTON) == False:
writeToLog("INFO","FAILED to click on Publish button")
return False
sleep(1)
return True
# @Author: Tzachi Guetta
def clickActionsAndAddToPlaylistFromMyMedia(self):
if self.click(self.MY_MEDIA_ACTIONS_BUTTON) == False:
writeToLog("INFO","FAILED to click on Action button")
return False
if self.click(self.MY_MEDIA_ACTIONS_BUTTON_ADDTOPLAYLIST_BUTTON) == False:
writeToLog("INFO","FAILED to click on Publish button")
return False
sleep(1)
return True
# @Author: Tzachi Guetta
def publishSingleEntryPrivacyToUnlistedInMyMedia(self, entryName):
if self.serachAndCheckSingleEntryInMyMedia(entryName) == False:
writeToLog("INFO","FAILED to Check for Entry: '" + entryName + "' something went wrong")
return False
if self.clickActionsAndPublishFromMyMedia() == False:
writeToLog("INFO","FAILED to click on Action button, Entry: '" + entryName + "' something went wrong")
return False
if self.click(self.MY_MEDIA_PUBLISH_UNLISTED, 30) == False:
writeToLog("INFO","FAILED to click on Unlisted button")
return False
if self.click(self.MY_MEDIA_PUBLISH_SAVE_BUTTON) == False:
writeToLog("INFO","FAILED to click on Unlisted button")
return False
sleep(1)
self.clsCommon.general.waitForLoaderToDisappear()
if self.wait_visible(self.clsCommon.myMedia.MY_MEDIA_PUBLISHED_AS_UNLISTED_MSG, 20) == False:
writeToLog("INFO","FAILED to Publish Entry: '" + entryName + "' something went wrong")
return False
writeToLog("INFO","Success, Entry '" + entryName + "' was set to unlisted successfully")
return True
# @Author: Tzachi Guetta
def handleDisclaimerBeforePublish(self, entryName):
if self.serachAndCheckSingleEntryInMyMedia(entryName) == False:
writeToLog("INFO","FAILED to Check for Entry: '" + entryName + "' something went wrong")
return False
if self.clickActionsAndPublishFromMyMedia() == False:
writeToLog("INFO","FAILED to click on Action button, Entry: '" + entryName + "' something went wrong")
return False
if self.wait_visible(self.MY_MEDIA_DISCLAIMER_MSG) == False:
writeToLog("INFO","FAILED, Disclaimer alert (before publish) wasn't presented although Disclaimer module is turned on")
return False
if self.clsCommon.editEntryPage.navigateToEditEntryPageFromMyMedia(entryName) == False:
writeToLog("INFO","FAILED to navigate to Edit entry page, Entry: '" + entryName + "' something went wrong")
return False
if self.click(self.clsCommon.upload.UPLOAD_ENTRY_DISCLAIMER_CHECKBOX) == False:
writeToLog("INFO","FAILED to click on disclaimer check-box")
return False
if self.click(self.clsCommon.editEntryPage.EDIT_ENTRY_SAVE_BUTTON) == False:
writeToLog("INFO","FAILED to click on save button at edit entry page")
return False
self.clsCommon.general.waitForLoaderToDisappear()
if self.navigateToMyMedia() == False:
writeToLog("INFO","FAILED to navigate to my media")
return False
return True
# Author: Michal Zomper
# publishFrom - enums.Location
# in categoryList / channelList will have all the names of the categories / channels to publish to
def publishSingleEntry(self, entryName, categoryList, channelList, publishFrom = enums.Location.MY_MEDIA, disclaimer=False):
#checking if disclaimer is turned on for "Before publish"
if disclaimer == True:
if self.handleDisclaimerBeforePublish(entryName) == False:
writeToLog("INFO","FAILED, Handle disclaimer before Publish failed")
return False
if publishFrom == enums.Location.MY_MEDIA:
if self.navigateToMyMedia() == False:
writeToLog("INFO","FAILED to navigate to my media")
return False
sleep(1)
if self.serachAndCheckSingleEntryInMyMedia(entryName) == False:
writeToLog("INFO","FAILED to check entry '" + entryName + "' check box")
return False
if self.clickActionsAndPublishFromMyMedia() == False:
writeToLog("INFO","FAILED to click on action button")
return False
sleep(7)
if self.click(self.MY_MEDIA_PUBLISHED_RADIO_BUTTON, 30) == False:
writeToLog("DEBUG","FAILED to click on publish button")
return False
elif publishFrom == enums.Location.ENTRY_PAGE:
sleep(1)
# Click on action tab
if self.click(self.clsCommon.entryPage.ENTRY_PAGE_ACTIONS_DROPDOWNLIST, 30) == False:
writeToLog("INFO","FAILED to click on action button in entry page '" + entryName + "'")
return False
sleep(5)
# Click on publish button
if self.click(self.clsCommon.entryPage.ENTRY_PAGE_PUBLISH_BUTTON, 30) == False:
writeToLog("INFO","FAILED to click on publish button in entry page '" + entryName + "'")
return False
if self.click(self.MY_MEDIA_PUBLISHED_RADIO_BUTTON, 45) == False:
writeToLog("DEBUG","FAILED to click on publish button")
return False
elif publishFrom == enums.Location.UPLOAD_PAGE:
writeToLog("INFO","Publishing from Upload page, Entry name: '" + entryName + "'")
sleep(2)
# When the radio is disabled, it still clickable, self.click() wont return false
# The solution is to check button attribute "disabled" == "disabled"
if self.wait_visible(self.MY_MEDIA_PUBLISHED_RADIO_BUTTON, 45).get_attribute("disabled") == 'true':
writeToLog("DEBUG","FAILED to click on publish button - button is disabled")
return False
if self.click(self.MY_MEDIA_PUBLISHED_RADIO_BUTTON, 45) == False:
writeToLog("DEBUG","FAILED to click on publish button")
return False
sleep(2)
# Click if category list is empty
if len(categoryList) != 0:
# Click on Publish in Category
if self.click(self.MY_MEIDA_PUBLISH_TO_CATEGORY_OPTION, 30) == False:
writeToLog("INFO","FAILED to click on Publish in Category")
return False
# choose all the categories to publish to
for category in categoryList:
tmoCategoryName = (self.MY_MEDIA_CHOSEN_CATEGORY_TO_PUBLISH[0], self.MY_MEDIA_CHOSEN_CATEGORY_TO_PUBLISH[1].replace('PUBLISHED_CATEGORY', category))
if self.click(tmoCategoryName, 30) == False:
writeToLog("INFO","FAILED to select published category '" + category + "'")
return False
sleep(2)
# Click if channel list is empty
if len(channelList) != 0:
# Click on Publish in Channel
if self.click(self.MY_MEIDA_PUBLISH_TO_CHANNEL_OPTION, 30) == False:
writeToLog("INFO","FAILED to click on Publish in channel")
return False
sleep(2)
# choose all the channels to publish to
for channel in channelList:
tmpChannelName = (self.MY_MEDIA_CHOSEN_CATEGORY_TO_PUBLISH[0], self.MY_MEDIA_CHOSEN_CATEGORY_TO_PUBLISH[1].replace('PUBLISHED_CATEGORY', channel))
if self.click(tmpChannelName, 20, multipleElements=True) == False:
writeToLog("INFO","FAILED to select published channel '" + channel + "'")
return False
sleep(1)
if publishFrom == enums.Location.MY_MEDIA or publishFrom == enums.Location.ENTRY_PAGE:
if self.click(self.MY_MEDIA_PUBLISH_SAVE_BUTTON, 30) == False:
writeToLog("INFO","FAILED to click on save button")
return False
if self.wait_visible(self.MY_MEDIA_SAVE_MESSAGE_CONFIRM, 45) == False:
writeToLog("INFO","FAILED to find confirm save message")
return False
else:
if self.click(self.clsCommon.upload.UPLOAD_ENTRY_SAVE_BUTTON) == False:
writeToLog("DEBUG","FAILED to click on 'Save' button")
return None
sleep(2)
# Wait for loader to disappear
self.clsCommon.general.waitForLoaderToDisappear()
sleep(3)
writeToLog("INFO","Success, publish entry '" + entryName + "' was successful")
return True
# Author: Tzachi Guetta
def verifyEntryPrivacyInMyMedia(self, entryName, expectedEntryPrivacy, forceNavigate=True):
try:
if forceNavigate == True:
if self.navigateToMyMedia() == False:
writeToLog("INFO","FAILED to navigate to my media")
return False
if expectedEntryPrivacy == enums.EntryPrivacyType.UNLISTED:
parent = self.wait_visible(self.replaceInLocator(self.MY_MEDIA_ENTRY_PARNET, "ENTRY_NAME", entryName))
child = self.replaceInLocator(self.MY_MEDIA_ENTRY_CHILD, "ENTRY_PRIVACY", 'Unlisted')
if self.clsCommon.base.get_child_element(parent, child) != None:
writeToLog("INFO","As Expected: The privacy of: '" + entryName + "' in My-media page is: '" + str(enums.EntryPrivacyType.UNLISTED) + "'")
return True
elif expectedEntryPrivacy == enums.EntryPrivacyType.PRIVATE:
parent = self.wait_visible(self.replaceInLocator(self.MY_MEDIA_ENTRY_PARNET, "ENTRY_NAME", entryName))
child = self.replaceInLocator(self.MY_MEDIA_ENTRY_CHILD, "ENTRY_PRIVACY", 'Private')
if self.clsCommon.base.get_child_element(parent, child) != None:
writeToLog("INFO","As Expected: The privacy of: '" + entryName + "' in My-media page is: '" + str(enums.EntryPrivacyType.PRIVATE) + "'")
return True
elif expectedEntryPrivacy == enums.EntryPrivacyType.REJECTED or expectedEntryPrivacy == enums.EntryPrivacyType.PENDING or expectedEntryPrivacy == enums.EntryPrivacyType.PUBLISHED:
tmpEntry = self.replaceInLocator(self.MY_MEDIA_ENTRY_PARNET, "ENTRY_NAME", entryName)
entryId = self.clsCommon.upload.extractEntryID(tmpEntry)
tmpBtn = (self.MY_MEDIA_ENTRY_PUBLISHED_BTN[0], self.MY_MEDIA_ENTRY_PUBLISHED_BTN[1].replace('ENTRY_ID', entryId))
if self.click(tmpBtn) == False:
writeToLog("INFO","FAILED to click on the 'published' pop-up of: " + entryName)
return False
sleep(3)
if localSettings.LOCAL_SETTINGS_IS_NEW_UI == False:
tmpBtn = (self.MY_MEDIA_ENTRY_PUBLISHED_BTN_OLD_UI[0], self.MY_MEDIA_ENTRY_PUBLISHED_BTN_OLD_UI[1].replace('ENTRY_ID', entryId))
else:
tmpBtn = (self.MY_MEDIA_ENTRY_PUBLISHED_BTN_OLD_UI[0], self.MY_MEDIA_ENTRY_PUBLISHED_BTN_OLD_UI[1].replace('ENTRY_ID', entryId) + "/descendant::strong[@class='valign-top']")
if str(expectedEntryPrivacy) in self.get_element_text(tmpBtn):
writeToLog("INFO","As Expected: The privacy of: '" + entryName + "' in My-media page is: '" + str(expectedEntryPrivacy) + "'")
return True
except NoSuchElementException:
writeToLog("INFO","FAILED to verify that entry '" + entryName + "' label is " + expectedEntryPrivacy.value)
return False
# Author: Michal Zomper
def verifyEntriesPrivacyInMyMedia(self, entriesList):
for entry in entriesList:
if self.verifyEntryPrivacyInMyMedia(entry, entriesList[entry], forceNavigate=False) == False:
writeToLog("INFO","FAILED to verify entry '" + entry + "' label")
return False
writeToLog("INFO","Success, All entries label were verified")
return True
# Author: Tzachi Guetta
def SortAndFilter(self, dropDownListName='' ,dropDownListItem=''):
if self.clsCommon.isElasticSearchOnPage() == True:
if dropDownListName == enums.SortAndFilter.SORT_BY:
tmpSortlocator = self.MY_MEDIA_SORT_BY_DROPDOWNLIST_NEW_UI
if self.click(tmpSortlocator, multipleElements=True) == False:
writeToLog("INFO","FAILED to click on :" + dropDownListItem.value + " filter in my media")
return False
# only sort filter use the locater of the dropdownlist_item_old_ui
# tmpSortBy = (self.MY_MEDIA_DROPDOWNLIST_ITEM_OLD_UI[0], self.MY_MEDIA_DROPDOWNLIST_ITEM_OLD_UI[1].replace('DROPDOWNLIST_ITEM', dropDownListItem.value))
tmpSortBy = (self.MY_MEDIA_DROPDOWNLIST_ITEM_OLD_UI[0], self.MY_MEDIA_DROPDOWNLIST_ITEM_OLD_UI[1].replace('DROPDOWNLIST_ITEM', dropDownListItem))
if self.click(tmpSortBy, multipleElements=True) == False:
writeToLog("INFO","FAILED to click on sort by :" + dropDownListItem.value + " filter in my media")
return False
else:
if self.click(self.MY_MEDIA_FILTERS_BUTTON_NEW_UI, 20, multipleElements=True) == False:
writeToLog("INFO","FAILED to click on filters button in my media")
return False
sleep(2)
tmpEntry = (self.MY_MEDIA_DROPDOWNLIST_ITEM_NEW_UI[0], self.MY_MEDIA_DROPDOWNLIST_ITEM_NEW_UI[1].replace('DROPDOWNLIST_ITEM', dropDownListItem.value))
if self.click(tmpEntry, multipleElements=True) == False:
writeToLog("INFO","FAILED to click on the drop-down list item: " + dropDownListItem.value)
return False
self.clsCommon.general.waitForLoaderToDisappear()
writeToLog("INFO","Success, " + dropDownListName.value + " - " + dropDownListItem + " was set successfully")
return True
else:
if dropDownListName == enums.SortAndFilter.SORT_BY:
tmplocator = self.MY_MEDIA_SORT_BY_DROPDOWNLIST_OLD_UI
elif dropDownListName == enums.SortAndFilter.PRIVACY:
tmplocator = self.MY_MEDIA_FILTER_BY_STATUS_DROPDOWNLIST
elif dropDownListName == enums.SortAndFilter.MEDIA_TYPE:
tmplocator = self.MY_MEDIA_FILTER_BY_TYPE_DROPDOWNLIST
elif dropDownListName == enums.SortAndFilter.COLLABORATION:
tmplocator = self.MY_MEDIA_FILTER_BY_COLLABORATION_DROPDOWNLIST
elif dropDownListName == enums.SortAndFilter.SCHEDULING:
tmplocator = self.MY_MEDIA_FILTER_BY_SCHEDULING_DROPDOWNLIST
else:
writeToLog("INFO","FAILED, drop-down-list name was not provided")
return False
if self.click(tmplocator, multipleElements=True) == False:
writeToLog("INFO","FAILED to click on: " + str(dropDownListName) + " in my media")
return False
tmpEntry = self.replaceInLocator(self.MY_MEDIA_DROPDOWNLIST_ITEM_OLD_UI, "DROPDOWNLIST_ITEM", str(dropDownListItem))
if self.click(tmpEntry, multipleElements=True) == False:
writeToLog("INFO","FAILED to click on the drop-down list item: " + str(dropDownListItem))
return False
self.clsCommon.general.waitForLoaderToDisappear()
writeToLog("INFO","Success, sort by " + dropDownListName.value + " - " + str(dropDownListItem) + " was set successfully")
return True
# Author: Tzachi Guetta
def sortAndFilterInMyMedia(self, sortBy='', filterPrivacy='', filterMediaType='', filterCollaboration='', filterScheduling='', resetFields=False):
try:
if self.navigateToMyMedia(forceNavigate = resetFields) == False:
writeToLog("INFO","FAILED to navigate to my media")
return False
if sortBy != '':
if self.SortAndFilter(enums.SortAndFilter.SORT_BY, sortBy) == False:
writeToLog("INFO","FAILED to set sortBy: " + str(sortBy) + " in my media")
return False
if filterPrivacy != '':
if self.SortAndFilter(enums.SortAndFilter.PRIVACY, filterPrivacy) == False:
writeToLog("INFO","FAILED to set filter: " + str(filterPrivacy) + " in my media")
return False
if filterMediaType != '':
if self.SortAndFilter(enums.SortAndFilter.MEDIA_TYPE, filterMediaType) == False:
writeToLog("INFO","FAILED to set filter: " + str(filterMediaType) + " in my media")
return False
if filterCollaboration != '':
if self.SortAndFilter(enums.SortAndFilter.COLLABORATION, filterCollaboration) == False:
writeToLog("INFO","FAILED to set filter: " + str(filterCollaboration) + " in my media")
return False
if filterScheduling != '':
if self.SortAndFilter(enums.SortAndFilter.SCHEDULING, filterScheduling) == False:
writeToLog("INFO","FAILED to set filter: " + str(filterScheduling) + " in my media")
return False
except NoSuchElementException:
return False
return True
# Author: Tzachi Guetta
# MUST: enableLoadButton must be turned off in KMS admin
def scrollToBottom(self, retries=5):
try:
if len(self.get_elements(self.MY_MEDIA_TABLE_SIZE)) < 4:
return True
else:
count = 0
while count < retries:
self.driver.find_element_by_xpath('//body').send_keys(Keys.END)
writeToLog("INFO","Scrolling to bottom, retry #: " + str(count+1))
sleep(2)
if self.wait_visible(self.MY_MEDIA_END_OF_PAGE, 1) != False:
writeToLog("INFO","*** Reached the End of page ***")
return True
count += 1
writeToLog("INFO","FAILED, scrolled " + str(retries) + " times and didn't reached the bottom of the page, maybe you need add more retries")
except NoSuchElementException:
return False
return False
# Author: Tzachi Guetta
def getTop(self, entryName, location):
try:
if location == enums.Location.MY_MEDIA:
tmpEntry = self.replaceInLocator(self.clsCommon.myMedia.MY_MEDIA_ENTRY_TOP, "ENTRY_NAME", entryName)
elif location == enums.Location.MY_PLAYLISTS or location == enums.Location.PENDING_TAB:
tmpEntry = self.replaceInLocator(self.clsCommon.myPlaylists.PLAYLIST_ENTRY_NAME_IN_PLAYLIST, "ENTRY_NAME", entryName)
entrytop = self.get_element_attributes(tmpEntry, multipleElements=True)['top']
writeToLog("INFO","The top of: '" + entryName + "' is: " + str(entrytop))
return entrytop
except NoSuchElementException:
writeToLog("INFO","The top of: '" + entryName + "' could not be located")
return False
# Author: Tzachi Guetta
def verifyEntriesOrder(self, expectedEntriesOrder, location = enums.Location.MY_MEDIA):
try:
if location == enums.Location.MY_MEDIA:
if self.clsCommon.myMedia.navigateToMyMedia() == False:
return False
if self.scrollToBottom() == False:
writeToLog("INFO","FAILED to scroll to bottom in my-media")
return False
elif location == enums.Location.MY_PLAYLISTS:
if self.clsCommon.myPlaylists.navigateToMyPlaylists(True) == False:
return False
tmpTop = -9999
for entry in expectedEntriesOrder:
currentEntryTop = self.getTop(entry, location)
if currentEntryTop != False and currentEntryTop!= 0:
if currentEntryTop <= tmpTop:
writeToLog("INFO","FAILED, the location of: '" + entry + "' is not as expected. (the top is '" + str(currentEntryTop) + "' and it should be higher than: '" + str(tmpTop) + "')")
return False
else:
return False
tmpTop = currentEntryTop
except NoSuchElementException:
return False
return True
# Author: Tzachi Guetta
def isEntryPresented(self, entryName, isExpected):
try:
tmpEntry = self.replaceInLocator(self.MY_MEDIA_ENTRY_TOP, "ENTRY_NAME", entryName)
isPresented = self.is_present(tmpEntry, 5)
strPresented = "Not Presented"
if isPresented == True:
strPresented = "Presented"
if isPresented == isExpected:
writeToLog("INFO","Passed, As expected, Entry: '" + entryName + "' is " + strPresented)
return True
else:
writeToLog("INFO","FAILED, Not expected, Entry: '" + entryName + "' is " + strPresented)
return False
except NoSuchElementException:
return False
return True
# Author: Tzachi Guetta
def areEntriesPresented(self, entriesDict):
try:
for entry in entriesDict:
if self.isEntryPresented(entry, entriesDict.get(entry)) == False:
writeToLog("INFO","FAILED to verify if entry presented for entry: " + str(entry))
return False
except NoSuchElementException:
return False
return True
# @Author: Inbar Willman
def publishSingleEntryToUnlistedOrPrivate(self, entryName, privacy, alreadyPublished=False, publishFrom=enums.Location.MY_MEDIA):
if publishFrom == enums.Location.MY_MEDIA:
if self.serachAndCheckSingleEntryInMyMedia(entryName) == False:
writeToLog("INFO","FAILED to Check for Entry: '" + entryName + "' something went wrong")
return False
if self.clickActionsAndPublishFromMyMedia() == False:
writeToLog("INFO","FAILED to click on Action button, Entry: '" + entryName + "' something went wrong")
return False
sleep(3)
if privacy == enums.ChannelPrivacyType.UNLISTED:
if self.click(self.MY_MEDIA_PUBLISH_UNLISTED) == False:
writeToLog("INFO","FAILED to click on Unlisted button")
return False
if self.click(self.MY_MEDIA_PUBLISH_SAVE_BUTTON) == False:
writeToLog("INFO","FAILED to click on Unlisted button")
return False
if alreadyPublished == True:
#Click on confirm modal
if self.click(self.MY_MEDIA_CONFIRM_CHANGING_STATUS) == False:
writeToLog("INFO","FAILED to click on confirm button")
return False
sleep(1)
self.clsCommon.general.waitForLoaderToDisappear()
if self.wait_visible(self.clsCommon.myMedia.MY_MEDIA_PUBLISHED_AS_UNLISTED_MSG, 20) == False:
writeToLog("INFO","FAILED to Publish Entry: '" + entryName + "' something went wrong")
return False
elif privacy == enums.ChannelPrivacyType.PRIVATE:
if self.click(self.MY_MEDIA_PUBLISH_PRIVATE) == False:
writeToLog("INFO","FAILED to click on Unlisted button")
return False
if self.click(self.MY_MEDIA_PUBLISH_SAVE_BUTTON) == False:
writeToLog("INFO","FAILED to click on Unlisted button")
return False
if alreadyPublished == True:
#Click on confirm modal
if self.click(self.MY_MEDIA_CONFIRM_CHANGING_STATUS) == False:
writeToLog("INFO","FAILED to click on confirm button")
return False
sleep(1)
self.clsCommon.general.waitForLoaderToDisappear()
if self.wait_visible(self.clsCommon.myMedia.MY_MEDIA_PUBLISHED_AS_PRIVATE_MSG, 20) == False:
writeToLog("INFO","FAILED to Publish Entry: '" + entryName + "' something went wrong")
return False
else:
writeToLog("INFO","FAILED to get valid privacy:" + str(privacy))
return False
elif publishFrom == enums.Location.UPLOAD_PAGE:
writeToLog("INFO","Publishing from Upload page, Entry name: '" + entryName + "'")
sleep(2)
if self.click(self.MY_MEDIA_PUBLISH_UNLISTED) == False:
writeToLog("INFO","FAILED to click on Unlisted button")
return False
if self.click(self.clsCommon.upload.UPLOAD_ENTRY_SAVE_BUTTON) == False:
writeToLog("DEBUG","FAILED to click on 'Save' button")
return None
sleep(2)
# Wait for loader to disappear
self.clsCommon.general.waitForLoaderToDisappear()
if self.wait_visible(self.clsCommon.myMedia.MY_MEDIA_PUBLISHED_AS_UNLISTED_MSG, 20) == False:
writeToLog("INFO","FAILED to Publish Entry: '" + entryName + "' something went wrong")
return False
return True
# @Author: Michal Zomper
def navigateToEntryPageFromMyMediaViaThubnail(self, entryName):
tmp_entry_name = (self.clsCommon.entryPage.ENTRY_PAGE_ENTRY_TITLE[0], self.clsCommon.entryPage.ENTRY_PAGE_ENTRY_TITLE[1].replace('ENTRY_NAME', entryName))
#Check if we already in edit entry page
if self.wait_visible(tmp_entry_name, 5) != False:
writeToLog("INFO","Already in edit entry page, Entry name: '" + entryName + "'")
return True
if self.clsCommon.myMedia.searchEntryMyMedia(entryName) == False:
writeToLog("INFO","FAILD to search entry name: '" + entryName + "' in my media")
return False
if self.clsCommon.isElasticSearchOnPage() == False:
tmp_entryThumbnail = (self.MY_MEDIA_ENTRY_THUMBNAIL[0], self.MY_MEDIA_ENTRY_THUMBNAIL[1].replace('ENTRY_NAME', entryName))
if self.click(tmp_entryThumbnail, 20) == False:
writeToLog("INFO","FAILED to click on entry thumbnail: " + entryName)
return False
else:
if self.click(self.MY_MEDIA_ENTRY_THUMBNAIL_ELASTIC_SEARCH, 20) == False:
writeToLog("INFO","FAILED to click on entry thumbnail: " + entryName)
return False
if self.wait_visible(tmp_entry_name, 30) == False:
writeToLog("INFO","FAILED to enter entry page: '" + entryName + "'")
return False
sleep(2)
writeToLog("INFO","Success, entry was open successfully")
return True
# @Author: Michal Zomper
def verifyEntriesExistInMyMedia(self, searchKey, entriesList, entriesCount):
if self.searchEntryMyMedia(searchKey) == False:
writeToLog("INFO","FAILED to search entry: '" + searchKey + "' in my media")
return False
try:
searchedEntries = self.get_elements(self.MY_MEDIA_TABLE_SIZE)
except NoSuchElementException:
writeToLog("INFO","FAILED to get entries list")
return False
# Check that number of entries that display is correct
if len(searchedEntries) != entriesCount:
writeToLog("INFO","FAILED, number of entries after search is '" + str(len(self.get_elements(self.MY_MEDIA_TABLE_SIZE))) + "' but need to be '" + str(entriesCount) + "'")
return False
if self.clsCommon.isElasticSearchOnPage() == False:
if type(entriesList) is list:
i=1
for entry in entriesList:
if (entry in searchedEntries[len(searchedEntries)-i].text) == False:
writeToLog("INFO","FAILED to find entry: '" + entry + "' after search in my media")
return False
i = i+1
# only one entry
else:
if (entriesList in searchedEntries[0].text) == False:
writeToLog("INFO","FAILED to find entry: '" + entriesList + "' after search in my media")
return False
else:
if type(entriesList) is list:
for entry in entriesList:
if (self.getResultAfterSearch(entry)) == False:
writeToLog("INFO","FAILED to find entry: '" + entry + "' after search in my media")
return False
# only one entry
else:
if (self.getResultAfterSearch(entriesList)) == False:
writeToLog("INFO","FAILED to find entry: '" + entriesList + "' after search in my media")
return False
if self.clearSearch() == False:
writeToLog("INFO","FAILED to clear search textbox")
return False
sleep(1)
writeToLog("INFO","Success, All searched entries were found after search")
return True
# Author: Michal Zomper
def clearSearch(self):
if self.clsCommon.isElasticSearchOnPage() == True:
try:
clear_button = self.get_elements(self.MY_MEDIA_REMOVE_SEARCH_ICON_NEW_UI)
except NoSuchElementException:
writeToLog("INFO","FAILED to find clear search icon")
return False
if self.clickElement(clear_button[1]) == False:
writeToLog("INFO","FAILED click on the remove search icon")
return False
else:
if self.click(self.MY_MEDIA_REMOVE_SEARCH_ICON_OLD_UI, 15, multipleElements=True) == False:
writeToLog("INFO","FAILED click on the remove search icon")
return False
self.clsCommon.general.waitForLoaderToDisappear()
writeToLog("INFO","Success, search was clear from search textbox")
return True
# @Author: Michal Zomper
# The following method can handle list of entries and a single entry:
# in order to publish list of entries pass a List[] of entries name, for single entry - just pass the entry name
# also: the method will navigate to My media
# in categoryList / channelList will have all the names of the categories / channels to publish to
# Known limitation: entries MUST be presented on the first page of my media
def publishEntriesFromMyMedia(self, entriesName, categoryList, channelList='', disclaimer=False, showAllEntries=False):
if self.navigateToMyMedia(forceNavigate = True) == False:
writeToLog("INFO","FAILED Navigate to my media page")
return False
if showAllEntries == True:
if self.showAllEntries() == False:
return False
# Checking if entriesNames list type
if type(entriesName) is list:
if disclaimer == True:
for entryName in entriesName:
sleep(2)
if self.handleDisclaimerBeforePublish(entryName) == False:
writeToLog("INFO","FAILED, Handle disclaimer before Publish failed for entry:" + entryName)
return False
for entryName in entriesName:
if self.checkSingleEntryInMyMedia(entryName) == False:
writeToLog("INFO","FAILED to CHECK the entry in my-media page")
return False
else:
if disclaimer == True:
if self.handleDisclaimerBeforePublish(entriesName) == False:
writeToLog("INFO","FAILED, Handle disclaimer before Publish failed")
return False
if self.checkSingleEntryInMyMedia(entriesName) == False:
writeToLog("INFO","FAILED to CHECK the entry in my-media page")
return False
if self.clickActionsAndPublishFromMyMedia() == False:
writeToLog("INFO","FAILED to click on action button")
return False
sleep(10)
if self.click(self.MY_MEDIA_PUBLISHED_RADIO_BUTTON, 40, True) == False:
writeToLog("INFO","FAILED to click on publish button")
return False
sleep(2)
# Click if category list is empty
if len(categoryList) != 0:
# Click on Publish in Category
if self.click(self.MY_MEIDA_PUBLISH_TO_CATEGORY_OPTION, 30) == False:
writeToLog("INFO","FAILED to click on Publish in Category")
return False
# choose all the categories to publish to
for category in categoryList:
tmoCategoryName = (self.MY_MEDIA_CHOSEN_CATEGORY_TO_PUBLISH[0], self.MY_MEDIA_CHOSEN_CATEGORY_TO_PUBLISH[1].replace('PUBLISHED_CATEGORY', category))
if self.click(tmoCategoryName, 30) == False:
writeToLog("INFO","FAILED to select published category '" + category + "'")
return False
sleep(2)
# Click if channel list is empty
if len(channelList) != 0:
# Click on Publish in Channel
if self.click(self.MY_MEIDA_PUBLISH_TO_CHANNEL_OPTION, 30) == False:
writeToLog("INFO","FAILED to click on Publish in channel")
return False
sleep(2)
# choose all the channels to publish to
for channel in channelList:
tmpChannelName = (self.MY_MEDIA_CHOSEN_CATEGORY_TO_PUBLISH[0], self.MY_MEDIA_CHOSEN_CATEGORY_TO_PUBLISH[1].replace('PUBLISHED_CATEGORY', channel))
if self.click(tmpChannelName, 20, multipleElements=True) == False:
writeToLog("INFO","FAILED to select published channel '" + channel + "'")
return False
sleep(1)
if self.click(self.MY_MEDIA_PUBLISH_SAVE_BUTTON, 30) == False:
writeToLog("INFO","FAILED to click on save button")
return False
if self.wait_visible(self.MY_MEDIA_SAVE_MESSAGE_CONFIRM, 45) == False:
writeToLog("INFO","FAILED to find confirm save message")
return False
sleep(3)
if type(entriesName) is list:
entries = ", ".join(entriesName)
writeToLog("INFO","Success, The entries '" + entries + "' were published successfully")
else:
writeToLog("INFO","Success, The entry '" + entriesName + "' was publish successfully")
return True
# @Author: Michal Zomper
# The function check the the entries sort in my media is correct
def verifySortInMyMediaOldUi(self, sortBy, entriesList):
if self.SortAndFilter(enums.SortAndFilter.SORT_BY,sortBy) == False:
writeToLog("INFO","FAILED to sort entries by: " + sortBy.value)
return False
if self.showAllEntries() == False:
writeToLog("INFO","FAILED to show all entries in my media")
return False
try:
entriesInMyMedia = self.get_element(self.MY_MEDIA_TABLE).text.lower()
except NoSuchElementException:
writeToLog("INFO","FAILED to get entries list in galley")
return False
entriesInMyMedia = entriesInMyMedia.split("\n")
prevEntryIndex = -1
for entry in entriesList:
currentEntryIndex = entriesInMyMedia.index(entry.lower())
if prevEntryIndex > currentEntryIndex:
writeToLog("INFO","FAILED ,sort by '" + sortBy.value + "' isn't correct. entry '" + entry + "' isn't in the right place" )
return False
prevEntryIndex = currentEntryIndex
writeToLog("INFO","Success, My media sort by '" + sortBy.value + "' was successful")
return True
def verifySortInMyMedia(self, sortBy, entriesList):
if self.clsCommon.isElasticSearchOnPage() == True:
sortBy = sortBy.value
if self.SortAndFilter(enums.SortAndFilter.SORT_BY,sortBy) == False:
writeToLog("INFO","FAILED to sort entries")
return False
if self.showAllEntries() == False:
writeToLog("INFO","FAILED to show all entries in my media")
return False
sleep(10)
try:
entriesInMyMedia = self.wait_visible(self.MY_MEDIA_TABLE).text.lower()
except NoSuchElementException:
writeToLog("INFO","FAILED to get entries list in galley")
return False
entriesInMyMedia = entriesInMyMedia.split("\n")
prevEntryIndex = -1
if self.clsCommon.isElasticSearchOnPage() == True:
for entry in entriesList:
try:
currentEntryIndex = entriesInMyMedia.index(entry.lower())
except:
writeToLog("INFO","FAILED , entry '" + entry + "' was not found in my media" )
return False
if prevEntryIndex > currentEntryIndex:
writeToLog("INFO","FAILED ,sort by '" + sortBy + "' isn't correct. entry '" + entry + "' isn't in the right place" )
return False
prevEntryIndex = currentEntryIndex
writeToLog("INFO","Success, My media sort by '" + sortBy + "' was successful")
return True
else:
for entry in entriesList:
currentEntryIndex = entriesInMyMedia.index(entry.lower())
if prevEntryIndex > currentEntryIndex:
writeToLog("INFO","FAILED ,sort by '" + sortBy.value + "' isn't correct. entry '" + entry + "' isn't in the right place" )
return False
prevEntryIndex = currentEntryIndex
writeToLog("INFO","Success, My media sort by '" + sortBy.value + "' was successful")
return True
def showAllEntries(self, searchIn = enums.Location.MY_MEDIA, timeOut=60, afterSearch=False):
# Check if we are in My Media page
if searchIn == enums.Location.MY_MEDIA:
tmp_table_size = self.MY_MEDIA_TABLE_SIZE
no_entries_page_msg = self.MY_MEDIA_NO_RESULTS_ALERT
# Check if we are in category page
elif searchIn == enums.Location.CATEGORY_PAGE:
if self.clsCommon.isElasticSearchOnPage() == True:
if afterSearch == False:
tmp_table_size = self.clsCommon.category.CATEGORY_TABLE_SIZE_NEW_UI
no_entries_page_msg = self.clsCommon.category.CATEGORY_NO_MORE_MEDIA_ITEMS_MSG
else:
tmp_table_size = self.clsCommon.category.CATEGORY_TABLE_SIZE_AFTER_SEARCH
no_entries_page_msg = self.clsCommon.category.CATEGORY_NO_MORE_MEDIA_FOUND_NEW_UI_MSG
else:#TODO OLD UI
if afterSearch == False:
tmp_table_size = self.clsCommon.category.CATEGORY_TABLE_SIZE
no_entries_page_msg = self.clsCommon.category.CATEGORY_NO_MORE_MEDIA_FOUND_MSG
else:
tmp_table_size = self.clsCommon.category.CATEGORY_TABLE_SIZE
no_entries_page_msg = self.clsCommon.category.CATEGORY_NO_MORE_MEDIA_FOUND_MSG
elif searchIn == enums.Location.MY_HISTORY:
tmp_table_size = self.clsCommon.myHistory.MY_HISTORY_TABLE_SIZE
no_entries_page_msg = self.clsCommon.myHistory.MY_HISTORY_NO_MORE_RESULTS_ALERT
else:
writeToLog("INFO","Failed to get valid location page")
return False
if len(self.get_elements(tmp_table_size)) < 4:
writeToLog("INFO","Success, All media is display")
return True
self.clsCommon.sendKeysToBodyElement(Keys.END)
wait_until = datetime.datetime.now() + datetime.timedelta(seconds=timeOut)
while wait_until > datetime.datetime.now():
if self.is_present(no_entries_page_msg, 2) == True:
writeToLog("INFO","Success, All media is display")
sleep(1)
# go back to the top of the page
self.clsCommon.sendKeysToBodyElement(Keys.HOME)
return True
self.clsCommon.sendKeysToBodyElement(Keys.END)
writeToLog("INFO","FAILED to show all media")
return False
# @Author: Michal Zomper
# The function check the the entries in my media are filter correctly
def verifyFiltersInMyMedia(self, entriesDict):
if self.showAllEntries() == False:
writeToLog("INFO","FAILED to show all entries in my media")
return False
try:
entriesInMyMedia = self.get_element(self.MY_MEDIA_TABLE).text.lower()
except NoSuchElementException:
writeToLog("INFO","FAILED to get entries list in galley")
return False
for entry in entriesDict:
#if entry[1] == True:
if entriesDict[entry] == True:
#if entry[0].lower() in entriesInMyMedia == False:
if (entry.lower() in entriesInMyMedia) == False:
writeToLog("INFO","FAILED, entry '" + entry + "' wasn't found in my media although he need to be found")
return False
#elif entry[1] == False:
if entriesDict[entry] == False:
# if entry[0].lower() in entriesInMyMedia == True:
if (entry.lower() in entriesInMyMedia) == True:
writeToLog("INFO","FAILED, entry '" + entry + "' was found in my media although he doesn't need to be found")
return False
writeToLog("INFO","Success, Only the correct media display in my media")
return True
#@Author: Michal Zomper
# The function going over the entries list and check that the entries icon that display on the thumbnail are match the 'entryType' parameter
def verifyEntryTypeIcon(self, entriesList, entryType):
for entry in entriesList:
tmpEntry = (self.MY_MEDIA_ENTRY_THUMBNAIL[0], self.MY_MEDIA_ENTRY_THUMBNAIL[1].replace('ENTRY_NAME', entry))
try:
entryThumbnail = self.get_element(tmpEntry)
except NoSuchElementException:
writeToLog("INFO","FAILED to find entry '" + entry + "' element")
return False
if entryType == enums.MediaType.IMAGE:
try:
self.get_child_element(entryThumbnail, self.MY_MEDIA_IMAGE_ICON)
except NoSuchElementException:
writeToLog("INFO","FAILED to find entry '" + entry + "' Image icon")
return False
if entryType == enums.MediaType.AUDIO:
try:
self.get_child_element(entryThumbnail, self.MY_MEDIA_AUDIO_ICON)
except NoSuchElementException:
writeToLog("INFO","FAILED to find entry '" + entry + "' Audio icon")
return False
if entryType == enums.MediaType.QUIZ:
try:
self.get_child_element(entryThumbnail, self.MY_MEDIA_QUIZ_ICON)
except NoSuchElementException:
writeToLog("INFO","FAILED to find entry '" + entry + "' Quiz icon")
return False
if localSettings.LOCAL_SETTINGS_IS_NEW_UI == False:
if entryType == enums.MediaType.VIDEO:
try:
self.get_child_element(entryThumbnail, self.MY_MEDIA_VIDEO_ICON_OLD_UI)
except NoSuchElementException:
writeToLog("INFO","FAILED to find entry '" + entry + "' Video icon")
return False
if self.verifyFilterUniqueIconType(entryType) == False:
writeToLog("INFO","FAILED entries from different types display although the filter set to " + entryType.value)
return False
writeToLog("INFO","Success, All entry '" + entry + "' " + entryType.value + " icon was verify")
return True
#@Author: Michal Zomper
# The function check that only the entries type with that match the 'iconType' parameter display in the list in my media
def verifyFilterUniqueIconType(self, iconType):
if self.showAllEntries() == False:
writeToLog("INFO","FAILED to show all entries in my media")
return False
if iconType != enums.MediaType.IMAGE:
if self.wait_elements(self.MY_MEDIA_IMAGE_ICON) != False:
writeToLog("INFO","FAILED, Image icon display in the list although only " + iconType.value + "need to be display")
return False
if iconType != enums.MediaType.AUDIO:
if self.wait_elements(self.MY_MEDIA_AUDIO_ICON) != False:
writeToLog("INFO","FAILED, Audio icon display in the list although only " + iconType.value + "need to be display")
return False
if localSettings.LOCAL_SETTINGS_IS_NEW_UI == False:
if iconType != enums.MediaType.VIDEO:
if self.wait_elements(self.MY_MEDIA_VIDEO_ICON_OLD_UI) != False:
writeToLog("INFO","FAILED, Video icon display in the list although only " + iconType.value + "need to be display")
return False
writeToLog("INFO","Success, only " + iconType.value + " type entries display")
return True
#@Author: Michal Zomper
def expendAndVerifyPublishedEntriesDetails(self, entryName, categoris, channels):
if self.verifyEntryPrivacyInMyMedia(entryName, enums.EntryPrivacyType.PUBLISHED, forceNavigate=False) == False:
writeToLog("INFO","FAILED to verify entry '" + entryName + "' privacy")
return False
if self.verifyPublishedInExpendEntryDeatails(entryName, categoris, len(categoris), channels, len(channels)) == False:
writeToLog("INFO","FAILED to verify entry '" + entryName + "' published categories/channels")
return False
writeToLog("INFO","Success, Entry published details were verified successfully")
return True
#@Author: Michal Zomper
# pre condition to this function : need to open the publish option (the '+' button) in order to see all the publish details
# the function verifyEntryPrivacyInMyMedia have the option to open the details
def verifyPublishedInExpendEntryDeatails(self, entryName, categories="", categoryCount="", channels="", channelCount=""):
tmpEntry = self.replaceInLocator(self.MY_MEDIA_ENTRY_PARNET, "ENTRY_NAME", entryName)
entryId = self.clsCommon.upload.extractEntryID(tmpEntry)
detailsBody = (self.MY_MEDIA_EXPEND_MEDIA_DETAILS[0], self.MY_MEDIA_EXPEND_MEDIA_DETAILS[1].replace('ENTRY_ID', entryId))
tmpDetails = self.get_element(detailsBody).text
tmpDetails = tmpDetails.split("\n")
if len(categories) > 0:
# verify number of published categories
if len(categories) == 1:
if (str(categoryCount) + " Category:") in tmpDetails == False:
writeToLog("INFO","FAILED to verify entry '" + entryName + "' have " + str(categoryCount) + " categories")
return False
else:
if (str(categoryCount) + " Categories:") in tmpDetails == False:
writeToLog("INFO","FAILED to verify entry '" + entryName + "' have " + str(categoryCount) + " categories")
return False
# Verify categories names
listOfCategories =""
for category in categories:
listOfCategories = listOfCategories + category + " "
listOfCategories = (listOfCategories.strip())
if listOfCategories in tmpDetails == False:
writeToLog("INFO","FAILED to find category '" + category + "' under the entry '" + entryName + "' published in option")
return False
if len(channels) > 0:
# verify number of published categories
if len(channels) == 1:
if (str(channelCount) + " Channel:") in tmpDetails == False:
writeToLog("INFO","FAILED to verify entry '" + entryName + "' have " + str(channelCount) + " channels")
return False
else:
if (str(channelCount) + " Channels:") in tmpDetails == False:
writeToLog("INFO","FAILED to verify entry '" + entryName + "' have " + str(channelCount) + " channels")
return False
# Verify channels names
listOfChannels =""
for channel in channels:
listOfChannels = listOfChannels + channel + " "
listOfChannels = (listOfChannels.strip())
if listOfChannels in tmpDetails == False:
writeToLog("INFO","FAILED to find channel '" + channel + "' under the entry '" + entryName + "' published in option")
return False
writeToLog("INFO","Success, All entry '" + entryName + "' categories/channels display under published in option")
return True
#@Author: Michal Zomper
def verifyMyMediaView(self, entryName, view=enums.MyMediaView.DETAILED):
tmpEntryCheckBox = (self.MY_MEDIA_ENTRY_CHECKBOX[0], self.MY_MEDIA_ENTRY_CHECKBOX[1].replace('ENTRY_NAME', entryName))
if self.is_visible(tmpEntryCheckBox) == False:
writeToLog("INFO","FAILED to find entry '" + entryName + "' checkbox")
return False
tmpEntryName = (self.MY_MEDIA_ENTRY_TOP[0], self.MY_MEDIA_ENTRY_TOP[1].replace('ENTRY_NAME', entryName))
if self.is_visible(tmpEntryName) == False:
writeToLog("INFO","FAILED to find entry '" + entryName + "' title")
return False
tmpEntryEditButton = (self.MY_MEDIA_ENRTY_EDIT_BUTTON[0], self.MY_MEDIA_ENRTY_EDIT_BUTTON[1].replace('ENTRY_NAME', entryName))
if self.is_visible(tmpEntryEditButton) == False:
writeToLog("INFO","FAILED to find entry '" + entryName + "' edit Button")
return False
tmpEntryDeleteButton = (self.MY_MEDIA_ENRTY_DELETE_BUTTON[0], self.MY_MEDIA_ENRTY_DELETE_BUTTON[1].replace('ENTRY_NAME', entryName))
if self.is_visible(tmpEntryDeleteButton) == False:
writeToLog("INFO","FAILED to find entry '" + entryName + "' delete button")
return False
if view == enums.MyMediaView.DETAILED:
tmpEntryThmbnail = (self.MY_MEDIA_ENTRY_THUMBNAIL[0], self.MY_MEDIA_ENTRY_THUMBNAIL[1].replace('ENTRY_NAME', entryName))
if self.is_visible(tmpEntryThmbnail) == False:
writeToLog("INFO","FAILED to find entry '" + entryName + "' thumbnail")
return False
writeToLog("INFO","Success, my media '" + view.value + " view' was verify for entry '" + entryName + "'")
return True
#@Author: Michal Zomper
def verifyMyMediaViewForEntris(self, entrisList, viewType):
# Checking if entriesNames list type
if type(entrisList) is list:
for entry in entrisList:
if self.verifyMyMediaView(entry, viewType) == False:
writeToLog("INFO","FAILED to verify my media view '" + viewType.value + "' for entry" + entry + "'")
return False
else:
if self.verifyMyMediaView(entrisList) == False:
writeToLog("INFO","FAILED to verify my media view '" + viewType.value + "' for entry" + entrisList + "'")
return False
writeToLog("INFO","Success, my media '" + viewType.value + " view' was verified")
return True
|
from matplotlib import animation
import matplotlib.pyplot as plt
import numpy as np
def save_frames_as_gif(frames, fps=60, filename='gym_animation.gif'):
plt.figure(figsize=(frames[0].shape[1] / 72.0, frames[0].shape[0] / 72.0), dpi=72)
patch = plt.imshow(frames[0])
plt.axis('off')
def animate(i):
patch.set_data(frames[i])
anim = animation.FuncAnimation(plt.gcf(), animate, frames = len(frames), interval=50)
anim.save('./' + filename, writer='imagemagick', fps=fps)
def get_state_rep_func(maze_size):
def get_state(observation):
"""
Converts observation (position in maze) into state.
"""
if type(observation) is int:
return observation
return int(observation[1] * maze_size[1] + observation[0])
return get_state
def argmax(values, seed=None):
"""
Takes in a list of values and returns the index of the item
with the highest value. Breaks ties randomly.
returns: int - the index of the highest value in values
"""
np.random.seed(seed)
top_value = float("-inf")
ties = []
for i in range(len(values)):
if values[i] > top_value:
top_value = values[i]
ties = []
if values[i] == top_value:
ties.append(i)
return np.random.choice(ties) |
import json
from django.shortcuts import render
from django.http import JsonResponse
from .circuitmaker import makecircuit
import json
import time
# from django.views.csrf import csrf_exempt
def home(request):
return render(request,"index.html")
def getsvg(request):
if request.method == "POST":
circuit = request.POST.get('circuitlst')
else:
circuit = request.GET.get('circuitlst')
circuit = json.loads(circuit)
circuitsvg = makecircuit(circuit)
circuitsvg = str(circuitsvg)
return JsonResponse({"circuitsvg":circuitsvg})
|
from time import time
def timestamp() -> int:
return int(time() * 1000) |
from celery.decorators import task
from celery.utils.log import get_task_logger
from architect.monitor.models import Monitor
log = get_task_logger(__name__)
@task(name="get_monitor_status_task")
def get_monitor_status_task(monitor_name):
monitor = Monitor.objects.get(name=monitor_name)
if monitor.client().check_status():
monitor.status = 'active'
else:
monitor.status = 'error'
monitor.save()
return True
@task(name="sync_monitor_resources_task")
def sync_monitor_resources_task(monitor_name):
monitor = Monitor.objects.get(name=monitor_name)
log.info('Updating monitor {}'.format(monitor_name))
client = monitor.client()
client.update_resources()
client.save()
return True
|
#region Import Modules
from fluid_properties import *
import numpy as np
import pandas as pd
import math
from pyXSteam.XSteam import XSteam
import matplotlib.pyplot as plt
import pprint
#endregion
#region Inputs:
# Geometry
geometry={}
geometry['L']=0.69
geometry['W']=0.4
geometry['H']=2
# Ambient Conditions
external={}
external['emissivity']=0.94
T_amb=np.array([-50,-40,-30,-20,-10,0])
T_amb=np.array([-30])
# Internal Volume
internal={}
internal['T_in']=1
#endregion
#region Calculations
fig = plt.figure()
ax1 = fig.add_subplot(111)
Q_conv=np.zeros(len(T_amb))
Q_rademit=np.zeros(len(T_amb))
Q_balance=np.zeros(len(T_amb))
i=0
j=0
for T_ambi in T_amb:
# Ambient Conditions
external['A_sup'] = 2 * ( geometry['L'] * geometry['W'] + geometry['L'] * geometry['H'] + geometry['W'] * geometry['H']) # [m^2]
external['T_amb'] = T_ambi
sigma = 5.67 * 10 ** (-8)
# Internal Volume
internal['V_total'] = geometry['H'] * geometry['L'] * geometry['W']
# Conduction
# Calculate convection coefficient
g = 9.80665 # [m/s^2]
external['T_film'] = (internal['T_in'] + external['T_amb']) * 0.5
external['beta'] = 1 / (273.15 + external['T_film'])
external['vu'] = thermal_properties('vu', 'air', external['T_film']) # [m^2/(s)]
external['k'] = thermal_properties('k', 'air', external['T_film']) # [W/(m*ºK)]
external['Pr'] = thermal_properties('Pr', 'air', external['T_film'])
external['Gr'] = (g * external['beta'] * (geometry['H'] ** 3) * (internal['T_in'] - external['T_amb'])) / (
external['vu'] ** 2)
external = nusselt_external_free(external)
external['h_ext'] = (external['Nu'] * external['k']) / (geometry['H'])
# Calculate ventilation needs:
ventilation = {}
external['Q_rademit'] = sigma * external['A_sup'] * external['emissivity'] * (
((internal['T_in'] + 273.15) ** 4) - ((external['T_amb'] + 273.15) ** 4))
external['Q_conv'] = external['h_ext'] * (external['A_sup']-2*geometry['W']*geometry['L']) * (internal['T_in'] - external['T_amb'])
Q_balance[i]=(external['Q_conv']+external['Q_rademit'])
Q_conv[i] = external['Q_conv']
Q_rademit[i] = external['Q_rademit']
i+=1
ax1.scatter(T_amb, Q_balance, label=str('Internal Temperature = '+ str(internal['T_in']) + ' [℃]'))
i=0
#endregion
#region Plots
# Plot1
ax1.set_xlabel('Ambient Temperature - T_{amb} - [ºC]')
ax1.set_xticks(T_amb)
ax1.set_ylabel('Thermal Power transferred to the exterior - [kW]')
ax1.set_title('Thermal Power Required to maintain internal temperature at T= '+str(internal['T_in'])+' [ºC]')
ax1.legend()
fig.show()
#endregion
|
import curses
import time
screen=curses.initscr()
screen.addstr(4,3, 'Hello !')
screen.refresh()
time.sleep(10)
curses.endwin()
|
from audio.models import AudioBook, Podcast, Song
import json
from django.test import TestCase
from django.urls import reverse
from pprint import pprint
class TestEndpoints(TestCase):
def setUp(self):
# setting up some test record entry in tables of database namely AudioBook, Podcast, Song
# to test for the End Point
AudioBook.objects.create(
title="test_title",
author="test_author",
narrator="test_narrator",
duration=223
)
Podcast.objects.create(
name="test_podcast_name",
host="test_host",
duration=3423
)
Podcast.objects.create(
name="test_podcast_name_2",
host="test_host_2",
duration=33
)
Song.objects.create(
name="tset_song_name",
duration=8457
)
Song.objects.create(
name="test_song_name_2",
duration=73868
)
return super().setUp()
def test_audioCreate(self):
"""
Used to test Create API
"""
url = reverse("audioCreate")
print ("\n########## Create API ##########")
# GET method for audio file creation
get_resp = self.client.get(url)
print ("status_code : {}, get content : ".format(get_resp.status_code), end="\n")
pprint(json.loads(get_resp.content))
self.assertEqual(get_resp.status_code, 400 or 404 or 406)
# POST method for audio file creation without any metadata
post_resp = self.client.post(url)
print ("status_code : {}, post content : ".format(post_resp.status_code), end="\n")
pprint(json.loads(post_resp.content))
self.assertEqual(post_resp.status_code, 500)
# POST method for audio file creation with incomplete metadata
post_data = {
"audioFileType": "song",
"audioFileMetadata" : {
"name": "",
"duration": 90
}
}
post_resp = self.client.post(url, data=post_data, content_type="application/json")
print ("status_code : {}, post content : ".format(post_resp.status_code), end="\n")
pprint(json.loads(post_resp.content))
self.assertEqual(post_resp.status_code, 500)
# POST method for audio file creation with improper metadata
post_data = {
"audioFileType": "audiobook",
"audioFileMetadata" : {
"title": "test_case_title",
"duration": 90,
"author": "Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum",
"narrator": "test_narrator_name"
}
}
post_resp = self.client.post(url, data=post_data, content_type="application/json")
print ("status_code : {}, post method(audiobook) : ".format(post_resp.status_code), end="\n")
pprint(json.loads(post_resp.content))
self.assertEqual(post_resp.status_code, 500)
# POST method for audio file creation with proper metadata
post_data = {
"audioFileType": "song",
"audioFileMetadata" : {
"name": "test_case_name",
"duration": 90
}
}
post_resp = self.client.post(url, data=post_data, content_type="application/json")
print ("status_code : {}, post method(song) : ".format(post_resp.status_code), end="\n")
pprint(json.loads(post_resp.content))
self.assertEqual(post_resp.status_code, 200)
# POST method for audio file creation with complete and proper metadata
post_data = {
"audioFileType": "audiobook",
"audioFileMetadata" : {
"title": "test_case_title_1",
"duration": 90,
"author": "Lorem Ipsum is simply dummy text of the printing and typesetting industry.",
"narrator": "test_narrator_name_1"
}
}
post_resp = self.client.post(url, data=post_data, content_type="application/json")
print ("status_code : {}, post method(audiobook) : ".format(post_resp.status_code), end="\n")
pprint(json.loads(post_resp.content))
self.assertEqual(post_resp.status_code, 200)
def test_audioFiles(self):
"""
Used to test GET API for all audio file of a audio file type.
"""
print ("\n\n########## GET API ##########")
audiobook_url = reverse("audioFiles", args=["audiobook"])
song_url = reverse("audioFiles", args=["song"])
podcast_url = reverse("audioFiles", args=["podcast"])
# Tests for GET Method to fetch all the audiobooks files
# while fetching audio files it is also checking for the returned data's datatype
get_resp = self.client.get(audiobook_url)
print ("status_code : {}, get method(audiobook): ".format(get_resp.status_code), end="\n")
pprint(json.loads(get_resp.content))
self.assertEquals(get_resp.status_code, 200)
self.assertIsInstance(json.loads(get_resp.content), dict)
self.assertIsInstance(json.loads(get_resp.content)["audiobook"], list)
self.assertNotIsInstance(json.loads(get_resp.content)["audiobook"], (str,int,dict,set))
# Tests for GET Method to fetch all the songs files
# while fetching audio files it is also checking for the returned data's datatype
get_resp = self.client.get(song_url)
print ("status_code : {}, get method(song): ".format(get_resp.status_code), end="\n")
pprint(json.loads(get_resp.content))
self.assertEquals(get_resp.status_code, 200)
self.assertIsInstance(json.loads(get_resp.content), dict)
self.assertIsInstance(json.loads(get_resp.content)["song"], list)
self.assertNotIsInstance(json.loads(get_resp.content)["song"], (str,int,dict,set))
# Tests for GET Method to fetch all the podcasts files
# while fetching audio files it is also checking for the returned data's datatype
get_resp = self.client.get(podcast_url)
print ("status_code : {}, get method(podcast): ".format(get_resp.status_code), end="\n")
pprint(json.loads(get_resp.content))
self.assertEquals(get_resp.status_code, 200)
self.assertIsInstance(json.loads(get_resp.content), dict)
self.assertIsInstance(json.loads(get_resp.content)["podcast"], list)
self.assertNotIsInstance(json.loads(get_resp.content)["podcast"], (str,int,dict,set))
# Tests for POST Method at wrong EndPoint
post_resp = self.client.post(audiobook_url)
print ("status_code : {}, post method(podcast): ".format(post_resp.status_code), end="\n")
pprint(json.loads(post_resp.content))
self.assertNotEquals(post_resp.status_code, 200)
# Tests for PUT Method without metadata
put_resp = self.client.put(song_url)
print ("status_code : {}, put method(podcast): ".format(put_resp.status_code), end="\n")
pprint(json.loads(put_resp.content))
self.assertNotEquals(put_resp.status_code, 200)
# Tests for DELETE Method without an audio file ID
del_resp = self.client.delete(podcast_url)
print ("status_code : {}, del method(podcast): ".format(del_resp.status_code), end="\n")
pprint(json.loads(del_resp.content))
self.assertNotEquals(del_resp.status_code, 200)
def test_audioFileFetch(self):
"""
Used to test GET/PUT/DELETE API for particular audio file.
"""
print ("\n\n########## GET/PUT/DELETE API ##########")
audiobook_url = reverse("audioFileFetch", args=["audiobook", "10"])
song_url = reverse("audioFileFetch", args=["song", "1"])
podcast_url = reverse("audioFileFetch", args=["podcast", "2"])
# Tests for GET Method to fetch a particular audiobook
get_resp = self.client.get(audiobook_url)
print ("status_code : {}, get method(audiobook): ".format(get_resp.status_code), end="\n")
pprint(json.loads(get_resp.content))
self.assertEquals(get_resp.status_code, 404)
self.assertEquals(json.loads(get_resp.content)["error"], "AudioBook matching query does not exist.")
# Tests for GET Method to fetch a particular song
get_resp = self.client.get(song_url)
print ("status_code : {}, get method(song): ".format(get_resp.status_code), end="\n")
pprint(json.loads(get_resp.content))
self.assertEquals(get_resp.status_code, 200)
# Tests for PUT Method to update an audio file
# here audio file type (Song) with ID(1) is being updated
put_data = {
"audioFileMetadata": {
"name": "Himanshu Shende",
"duration": 6759
}
}
put_resp = self.client.put(song_url, data=put_data, content_type='application/json')
print ("status_code : {}, put method(song): ".format(put_resp.status_code), end="\n")
pprint(json.loads(put_resp.content))
self.assertEquals(put_resp.status_code, 200)
# Tests for DELETE Method
# here audio file type (Podcast) with ID(2) is being deleted
del_resp = self.client.delete(podcast_url)
print ("status_code : {}, del method(podcast): ".format(del_resp.status_code), end="\n")
pprint(json.loads(del_resp.content))
self.assertEquals(del_resp.status_code, 200)
|
"""
The python function, find_average() given below, is written to accept a list of marks and return the average marks. On execution, the program is resulting in an error.
1: Add code to handle the exception occurring in the code.
2: Make the necessary correction in the program to remove the error.
3: Make the following changes in the code, execute, observe the results. Add code to handle the errors occurring in each case.
Case – 1: Initialize m_list as ["1",2,3,4]
Case – 2: Initialize m_list as given below
mark1="A"
mark1=int("A")
m_list=[mark1,2,3,4]
Case – 3: Initialize m_list as []
Case – 4: Make the following change in the for loop statement
for i in range(0, len(mark_list)+1):
"""
#PF-Exer-30
def find_average(mark_list):
try:
total=0
for i in range(0, len(mark_list)):
total+=mark_list[i]
marks_avg=total/len(mark_list)
return marks_avg
except:
print("some error occur")
m_list=[1,2,3,4]
print("Average marks:", find_average(m_list))
|
# get the middle 2 characters of social security numbers
# in an array
s_n_numbers = ['738-38-4838', '638-28-3838', '538-18-2838',
'438-08-1838', '338-48-0838']
def getMid2_digits_ssn():
for snn in s_n_numbers:
snn = snn[4:-5]
print(snn)
getMid2_digits_ssn()
|
# 输出
print()
print("="*20)
print('''
1-新用户注册
2-用户登录
3-忘记密码
4-退出
''')
print("="*20)
# select = input("pls select:")
# name = input("pls input your name:")
# age = input("pls input your age:")
#
# print(name, "is", age, "years old!", sep=" ")
# print("%s is %s years old!" % (name, age))
# print("{} is {} years old!".format(name, age))
number = 111222333444
print('number is %09d' % number) # %06d 可以将数值补全为6位,不足6位的前面补0
number2 = 0.33
print('number2 is %.3f' % number2) # %f默认是小数点后面有6位,%.3f表示小数点后3位
print('number is %.2f%%' % (number2*100)) # %%输出百分号%
|
#import packages
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn import preprocessing
from sklearn.ensemble import RandomForestClassifier
from sklearn.compose import ColumnTransformer
from sklearn.impute import SimpleImputer
from sklearn.preprocessing import StandardScaler
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import OneHotEncoder
from sklearn.inspection import permutation_importance
from sklearn.linear_model import Ridge
from decimal import *
getcontext().prec = 10
#load data
data = pd.read_csv('merged_final.csv')
data_copy = data.copy()
print("Data loaded")
#split in target and train
X,y = data.drop(['TARGET','SK_ID_CURR'], axis=1), data['TARGET']
#make a list of categorical features and sepearate from numeric in lists
categorical_feature_mask = X.dtypes==object
categorical_features = X.columns[categorical_feature_mask].tolist()
numeric_feature_mask = X.dtypes!=object
numeric_features = X.columns[numeric_feature_mask].tolist()
print (categorical_features)
print(numeric_features)
#rearrange
X = data[categorical_features + numeric_features]
print(X)
#train_test_splt
X_train, X_test, y_train, y_test = train_test_split(
X, y, stratify=y , random_state=42)
#make pipeline
categorical_pipe = Pipeline([
('imputer', SimpleImputer(strategy='constant', fill_value='missing')),
('onehot', OneHotEncoder(handle_unknown='ignore'))
])
#strategy for numeric missing: mean
numerical_pipe = Pipeline([
('imputer', SimpleImputer(strategy='mean'))
])
# preprocessing for cat to onehot dummies
preprocessing = ColumnTransformer(
[('cat', categorical_pipe, categorical_features),
('num', numerical_pipe, numeric_features)])
#select classifier for pipeline
rf = Pipeline([
('preprocess', preprocessing),
('classifier', RandomForestClassifier(random_state=42))
])
#fit model
rf.fit(X_train, y_train)
#print accuracy
print("RF train accuracy: %0.3f" % rf.score(X_train, y_train))
print("RF test accuracy: %0.3f" % rf.score(X_test, y_test))
#make prediction on test_set
print(rf.predict(X_test))
#possible plot for casual feature importaces
"""ohe = (rf.named_steps['preprocess']
.named_transformers_['cat']
.named_steps['onehot'])
feature_names = ohe.get_feature_names(input_features=categorical_features)
feature_names = np.r_[feature_names, numeric_features]
tree_feature_importances = (
rf.named_steps['classifier'].feature_importances_)
sorted_idx = tree_feature_importances.argsort()
y_ticks = np.arange(0, len(feature_names))
fig, ax = plt.subplots()
ax.barh(y_ticks, tree_feature_importances[sorted_idx])
ax.set_yticklabels(feature_names[sorted_idx])
ax.set_yticks(y_ticks)
ax.set_title("Random Forest Feature Importances (MDI)")
fig.tight_layout()
plt.show()"""
#make Permutation sorted Importance Ranking
result = permutation_importance(rf, X_test, y_test, n_repeats=10,
random_state=42, n_jobs=1)
sorted_idx = result.importances_mean.argsort()
#possible plot for Permutation Importances
"""fig, ax = plt.subplots()
ax.boxplot(result.importances[sorted_idx].T,
vert=False, labels=X_test.columns[sorted_idx])
ax.set_title("Permutation Importances (test set)")
fig.tight_layout()
plt.show()"""
#Dataframe with sorted Permutation Importance per feature
df = pd.DataFrame(columns=['Feature','Importance_Mean','S_dev'])
#possible selection for threshold
for i in result.importances_mean.argsort()[::-1]:
if result.importances_mean[i] >= """here"""0.000 or result.importances_mean[i] < """and here"""0.000:
df = df.append({'Feature': X_test.columns[i],'Importance_Mean':result.importances_mean[i],'S_dev':result.importances_std[i]}, ignore_index=True)
print(df)
#create new DataFrame with selected features by threshold
selected_features = pd.DataFrame(data = data_copy, columns = ['TARGET'])
selected_features[df['Feature'].tolist()]= pd.DataFrame(data = data_copy, columns = df['Feature'].tolist())
#save as csv
df.to_csv('Perm_Imp_Score.csv')
selected_features.to_csv('Selected_Data_Perm_Imp.csv',index=False)
print(selected_features)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# concatena.py
#
# Copyright 2015 Cristian <cristian@cristian>
"""
Concatenar n arquivos ... Exemplo:
python3 concatena.py <arq1> <arq2> ... <arq Concatenado>
"""
import sys
def main():
novoTexto = ""
for i in range(1, len(sys.argv) - 1):
arquivo = open(sys.argv[i], 'r')
for texto in arquivo.readlines():
novoTexto += texto
#
print(i)
arqConcat = open(sys.argv[len(sys.argv) - 1], 'a')
arqConcat.write(novoTexto)
novoTexto = ""
arquivo.close()
arqConcat.close()
#
return 0
if __name__ == '__main__':
main()
|
from .config import Config
from .consts import *
from .standard_logger import Logger
from quart_openapi import Pint
from .db import DBSettings, ODMMapper, ORMMapper
import importlib
import traceback
import asyncio
from multiprocessing import Process
from hypercorn.config import Config as HyperConfig
from hypercorn.asyncio import serve
from sqlalchemy.engine import create_engine
from .models import patch_umongo_meta, patch_sqlalchemy_meta
from sqlalchemy.ext.declarative import declarative_base
class RestService(object):
DEFAULT_VALUES = REST_SERVICE_CONFIGS
NAME = REST_SERVICE_BLOCK
def __init__(self, **kwargs):
self.init_keys = set()
for k, v in kwargs.items(): # TODO figure out how to distinguish between Models in Mongo and Postgres
setattr(self, k, v)
self.init_keys.add(k)
for k, v in self.DEFAULT_VALUES.items():
if k not in kwargs:
setattr(self, k, v)
else:
setattr(self, k, kwargs.get(k))
self.logger = Logger(self.NAME)
self.app = Pint(self.NAME)
self.app.config.update(DBSettings.get_mongo_config(**kwargs))
self.app.config.update(DBSettings.get_sqlalchemy_config(**kwargs))
self.orm_mappings = {}
self.odm_mappings = {}
self.default_mongo_settings = {}
self.default_sqlalchemy_settings = {}
self.default_mongo_engine = None
self.default_sqla_engine = None
self.views = []
if isinstance(kwargs.get(VIEWS, list()), list):
for v in kwargs.get(VIEWS, list()):
self.import_add_view(v)
if self.get_using_mongo():
self.default_mongo_settings = DBSettings.get_mongoclient_kargs(**kwargs)
self.default_mongo_settings[TLS_INSECURE] = True
if self.default_mongo_settings.get(SSL_KEYFILE, None) is None:
self.default_mongo_settings[SSL_KEYFILE] = self.get_key_pem()
if self.default_mongo_settings.get(SSL_CERTFILE, None) is None:
self.default_mongo_settings[SSL_KEYFILE] = self.get_cert_pem()
if self.get_using_postgres():
self.default_sqlalchemy_settings = DBSettings.get_sqlalchemy_config(**kwargs)
uri = self.pgc.get(SQLALCHEMY_DATABASE_URI)
self.default_sqla_engine = create_engine(uri)
for name, kargs in self.get_sqlalchemy_orms():
classname = kargs.get(ORM_CLASS, None)
tablename = kargs.get(TABLE, None)
if classname is None or tablename is None:
continue
self.import_add_orms(classname, tablename)
self.load_odm_configurations(self.get_odms())
self.load_orm_configurations(self.get_orms())
self.bg_thread = None
def load_odm_configurations(self, odm_configs):
for name, configs in odm_configs.items():
odm = ODMMapper(name, default_engine_settings=self.default_mongo_settings, **configs)
self.odm_mappings[name] = odm
# TODO finalize mappings
def load_orm_configurations(self, orm_configs):
for name, configs in orm_configs.items():
odm = ODMMapper(name, default_engine_settings=self.default_sqlalchemy_settings, **configs)
self.odm_mappings[name] = odm
self.Base = declarative_base()
def load_class(self, classname):
blah = classname.split('.')
if len(blah) <= 1:
raise Exception("Expecting a python_module.Class got {}".format(classname))
mn = '.'.join(blah[:-1])
cn = blah[-1]
mi = None
python_class = None
try:
mi = importlib.import_module(mn)
except:
msg = "{} is not a valid Python module: \n{}".format(mn, traceback.format_exc())
self.logger.exception(msg)
return None
try:
python_class = getattr(mi, cn, None)
except:
msg = "{} is not a valid Python class in {}: \n{}".format(cn, mn, traceback.format_exc())
self.logger.exception(msg)
return None
return python_class
def import_add_view(self, fq_python_class_view: str) -> bool:
'''
Import a module and load the class for a provided view
:param view: Python module in dot'ted notation, e.g. `foo.views.ViewX`
:return: bool
'''
self.logger.debug("Adding view ({}) to rest-service".format(fq_python_class_view))
blah = fq_python_class_view.split('.')
if len(blah) <= 1:
raise Exception("Expecting a python_module.Class got {}".format(fq_python_class_view))
python_class = self.load_class(fq_python_class_view)
if python_class is not None:
self.views.append(python_class)
python_class.bind_application(self.app)
self.logger.debug("Finished adding view ({}) to rest-service".format(fq_python_class_view))
return True
self.logger.debug("Failed tp add view ({}) to rest-service".format(fq_python_class_view))
return False
def import_add_odms(self, fq_python_class_odm: str, database_name, collection_name) -> bool:
'''
Import a module and load the class for a provided view
:param view: Python module in dot'ted notation, e.g. `foo.views.ViewX`
:return: bool
'''
self.logger.debug("Adding view ({}) to rest-service".format(fq_python_class_odm))
python_class = self.load_class(fq_python_class_odm)
if python_class is not None and self.default_mongo_engine is not None:
kargs = {ODM_DATABASE: database_name,
ODM_COLLECTION: collection_name,
ODM_CONNECTION: self.default_mongo_engine
}
r = patch_umongo_meta(python_class, **kargs)
self.logger.debug("Finished adding view ({}) to rest-service".format(fq_python_class_odm))
return r
self.logger.debug("Failed tp add view ({}) to rest-service".format(fq_python_class_odm))
return False
def import_add_orms(self, fq_python_class_orm: str, table_name) -> bool:
'''
Import a module and load the class for a provided view
:param view: Python module in dot'ted notation, e.g. `foo.views.ViewX`
:return: bool
'''
self.logger.debug("Adding view ({}) to rest-service".format(fq_python_class_orm))
python_class = self.load_class(fq_python_class_orm)
if python_class is not None and self.default_sqla_engine is not None:
kargs = {ORM_TABLE: table_name, }
r = patch_sqlalchemy_meta(python_class, **kargs)
self.logger.debug("Finished adding view ({}) to rest-service".format(fq_python_class_orm))
return r
self.logger.debug("Failed tp add view ({}) to rest-service".format(fq_python_class_orm))
return False
def add_odm(self, database_name, collection_name, odm_class):
if self.mongddb_client is None:
return False
kargs = {'mongo_database': database_name,
'mongo_collection': collection_name,
'mongo_connection': self.mongddb_client
}
patch_umongo_meta(odm_class, **kargs)
return True
@classmethod
def from_config(cls):
cdict = Config.get_value(REST_SERVICE_BLOCK)
if cdict is None:
cdict = {}
kwargs = {}
for k, v in cls.DEFAULT_VALUES.items():
kwargs[k] = cdict.get(k, v)
return cls(**kwargs)
def run(self, debug=False):
ssl_context = None
self.logger.debug("Preparing to start rest-service")
self.logger.info("Starting the application {}:{} using ssl? {}".format(self.get_listening_host(),
self.get_listening_port(),
ssl_context is None))
# Created a separate process so that the application will run in the background
# this lets me manage it from a distance if need be
self.bg_thread = Process(target=self.start_app, args=(debug,))
self.bg_thread.start()
return True
def start_app(self, debug):
ssl_context = None
self.logger.debug("Preparing to start rest-service")
if self.get_use_ssl() and self.get_key_pem() is not None and \
self.get_cert_pem() is not None:
self.logger.debug("Preparing ssl_context with cert:{} and key:{}".format(self.get_cert_pem(),
self.get_key_pem()))
ssl_context = (self.get_cert_pem(), self.get_key_pem())
self.logger.info("Starting the application {}:{} using ssl? {}".format(self.get_listening_host(),
self.get_listening_port(),
ssl_context is None))
# looked at the hypercorn and quart Python project to figure out
# how to start the application separately, without going through
# the Quart.app.run APIs
self.app.debug = debug
config = HyperConfig()
config.debug = debug
config.access_log_format = "%(h)s %(r)s %(s)s %(b)s %(D)s"
config.accesslog = self.logger.logger
config.bind = ["{host}:{port}".format(**{'host':self.get_listening_host(),
'port':self.get_listening_port()})]
config.certfile = self.get_cert_pem() if self.get_use_ssl() else None
config.keyfile = self.get_key_pem() if self.get_use_ssl() else None
config.errorlog = config.accesslog
config.use_reloader = True
scheme = "https" if config.ssl_enabled else "http"
self.logger.info("Running on {}://{} (CTRL + C to quit)".format(scheme, config.bind[0]))
loop = asyncio.get_event_loop()
if loop is not None:
loop.set_debug(debug or False)
loop.run_until_complete(serve(self.app, config))
else:
asyncio.run(serve(self.app, config), debug=config.debug)
def stop(self):
if self.bg_thread is not None:
self.bg_thread.terminate()
self.bg_thread.join()
def has_view(self, view):
if view is not None:
return view.name in set([i.name for i in self.views])
return False
def get_json(self, the_request):
try:
return the_request.json()
except:
return None
def add_view(self, view):
self.views.append(view)
view.bind_application(self.app)
def get_using_postgres(self):
return getattr(self, USING_SQLALCHEMY, False)
def get_using_mongo(self):
return getattr(self, USING_MONGO, False)
def get_listening_host(self):
return getattr(self, HOST)
def get_listening_port(self):
return getattr(self, PORT)
def get_validate_ssl(self):
return getattr(self, VALIDATE_SSL)
def get_cert_pem(self):
return getattr(self, CERT_PEM)
def get_key_pem(self):
return getattr(self, KEY_PEM)
def get_use_uwsgi(self):
return getattr(self, USE_UWSGI)
def get_host(self):
return getattr(self, HOST)
def get_port(self):
return getattr(self, PORT)
def get_use_ssl(self):
return getattr(self, USE_SSL)
def get_mongo_odms(self):
return getattr(self, MONGO_ODMS, {})
def get_sqlalchemy_orms(self):
return getattr(self, SQLALCHEMY_ORMS, {})
def get_heartbeat(self, *args, **kargs):
raise Exception("Not implemented")
|
a = "abcde"
if a == "abc":
print("a=abc")
elif a == "abcd" :
print("a=abcd")
else:
print("a!=abcd") |
import math
number=math.pi
g=float(10)
x=int(0)
y=int(0)
"""sqrt(n)-number"""
def sqr_n(n):
global y,g
ans=abs((math.sqrt(n)-math.sqrt(n)//1)+3)
if abs(ans-math.pi)<g :
g=abs(ans-math.pi)
print("√"+str(n)+"-"+str(int(math.sqrt(n)//1-3))+"="+str(ans))
"""sqrt(n)-sqrt(m)"""
def sqr_sqr(n):
global y,g
m=int(pow(math.sqrt(n)-number,2))
ans=math.sqrt(n)-math.sqrt(m)
if abs(ans-math.pi)<g :
g=abs(ans-math.pi)
print("√"+str(n)+"-"+"√"+str(m)+"="+str(ans))
for x in range(1,1000001):
sqr_n(x)
sqr_sqr(x)
|
###############################################################################
# flag complexes for graphs:
# This is used e.g. for computing the nerve of a cover.
import numpy as np
###############################################################################
def _lower_neighbours(G, u):
"""Given a graph `G` and a vertex `u` in `G`, we return a list with the
vertices in `G` that are lower than `u` and are connected to `u` by an
edge in `G`.
Parameters
----------
G : :obj:`Numpy Array(no. of edges, 2)`
Matrix storing the edges of the graph.
u : int
Vertex of the graph.
Returns
-------
lower_neighbours : :obj:`list`
List of lower neighbours of `u` in `G`.
"""
lower_neighbours = []
for e in G:
if max(e) == u:
lower_neighbours.append(min(e))
return np.unique(lower_neighbours)
###############################################################################
# Main function
def flag_complex(G, no_vertices, max_dim):
"""Compute the flag complex of a graph `G` up to a maximum dimension `max_dim`.
Parameters
----------
G : :obj:`Numpy Array(no. of edges, 2)`
Matrix storing the edges of the graph.
no_vertices : int
Number of vertices in graph `G`
max_dim : int
Maximum dimension
Returns
-------
fl_cpx : :obj:`list(Numpy Array)`
Flag complex of `G`. The `0` entry stores the number of vertices. For
a higher entry `i`, `fl_cpx[i]` stores a :obj:`Numpy Array` matrix
with the `i` simplices from `fl_cpx`.
"""
if max_dim < 0:
print("Cannot compute a complex of dimension: {}".format(max_dim))
raise ValueError
fl_cpx = []
for i in range(max_dim + 1):
fl_cpx.append([])
fl_cpx[0] = no_vertices
if max_dim == 0:
return fl_cpx
fl_cpx[1] = np.copy(G)
# Build flag complex inductively
for d in range(1, max_dim):
for simplex in fl_cpx[d]:
N = _lower_neighbours(fl_cpx[1], simplex[0])
for v in simplex[1:]:
N = np.intersect1d(N, _lower_neighbours(fl_cpx[1], v))
# find simplices containing simplex and add them
if np.size(N) > 0:
simplices = np.ones((np.size(N), 1 + np.size(simplex)))
simplices = np.multiply(np.append([1], simplex), simplices)
simplices[:, 0] = N
if isinstance(fl_cpx[d+1], list):
fl_cpx[d + 1] = simplices
else:
fl_cpx[d+1] = np.append(fl_cpx[d + 1], simplices, 0)
return fl_cpx
|
from graph_db.access.execute import QueryExecutor
from graph_db.access.parser import Parser
from graph_db.access.result import ResultSet
from graph_db.engine.api import EngineAPI
class Cursor:
def __init__(self, graph_engine: EngineAPI):
self.graph_engine = graph_engine
self.parser = Parser()
self.query_executor = QueryExecutor()
self.result_set = None
def execute(self, query: str):
try:
func, params = self.parser.parse_query(query)
self.result_set = self.query_executor.execute(self.graph_engine, func, **params)
except SyntaxError as e:
raise e
def fetch_all(self) -> ResultSet:
return self.result_set
def fetch_one(self):
return self.result_set[0]
def count(self):
return len(self.result_set)
|
import msgpack
# ------------------------------------------------------------------
class Serialisation():
"""serialisation interface
"""
# ------------------------------------------------------------------
def __init__(self, log=None):
self.log = log
return
# ------------------------------------------------------------------
@classmethod
def pack(self, data_in, log=None):
try:
data = msgpack.packb(data_in)
except Exception as e:
if log is None:
log = self.log
if log is not None:
log.error([
['r', ' - could not do pack() for ',
str(data_in)],
['r', '\n', e],
])
raise e
return data
# ------------------------------------------------------------------
@classmethod
def unpack(self, data_in, log=None):
try:
if isinstance(data_in, str):
data = data_in
elif isinstance(data_in, bytes):
try:
data = msgpack.unpackb(data_in, encoding="utf-8")
except Exception:
try:
data = data_in.decode("utf-8")
except Exception as e:
if data_in == b'':
data = ''
else:
raise e
elif isinstance(data_in, list):
data = []
for data_now_0 in data_in:
data += [self.unpack(data_now_0, log)]
elif isinstance(data_in, set):
data = set()
for data_now_0 in data_in:
data.add(self.unpack(data_now_0, log))
elif isinstance(data_in, dict):
data = dict()
for k, v in data_in.items():
data[self.unpack(k, log)] = self.unpack(v, log)
elif isinstance(data_in, tuple):
data = ()
for v in data_in:
data += (self.unpack(v, log), )
elif isinstance(data_in, (int, float, complex, bool)) or (data_in is None):
data = data_in
else:
raise Exception('unknown data type', data_in)
except Exception as e:
if log is None:
log = self.log
if log is not None:
log.error([
['r', ' - could not do unpack() for ',
str(data_in)],
['r', '\n', e],
])
raise e
return data
# ------------------------------------------------------------------
@classmethod
def is_empty_obj(self, data):
if data is None:
is_empty = True
elif isinstance(data, str):
is_empty = (data == '')
elif isinstance(data, bytes):
is_empty = (data == b'')
elif isinstance(data, (list, set, tuple)):
is_empty = (len(data) == 0)
elif isinstance(data, dict):
is_empty = (len(data.keys()) == 0)
elif isinstance(data, (int, float, complex, bool)):
is_empty = False
else:
raise Exception('unknown data type', data)
return is_empty
s = Serialisation() |
"""
arquivo = open('arquivo.txt', 'w') #read write a-arquivo+atualizacao b-binario +atualização
arquivo.write('machines')
arquivo.write('aprende python')
arquivo.close()
arquivo = open('arquivo.txt', 'a') #texto + atualizacao
arquivo.write("\n\n\lololo")
arquivo.close()
texto = '''Tres aspas simples
significam que o texto ocupa mais de uma
linha'''
with open('arquivo.txt', 'a') as f:
f.write(texto)
"""
with open('arquivo.txt', 'r') as arquivo:
for linha in arquivo.readlines():
print(linha) |
#!/usr/bin/env python
#
# Copyright (c) 2019 Opticks Team. All Rights Reserved.
#
# This file is part of Opticks
# (see https://bitbucket.org/simoncblyth/opticks).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Regenerate the cpp for each mesh with::
gdml2gltf.py # writes .gltf and extras beside the .gdml in standard IDFOLD location
See::
opticks-nnt LVID
opticks-nnt-vi LVID
"""
import logging, os
log = logging.getLogger(__name__)
template_head = r"""
// regenerate with gdml2gltf.py
#include <vector>
#include "SSys.hh"
#include "NGLMExt.hpp"
#include "NCSG.hpp"
#include "NSceneConfig.hpp"
#include "NBBox.hpp"
#include "NNode.hpp"
#include "NPrimitives.hpp"
#include "PLOG.hh"
#include "NPY_LOG.hh"
int main(int argc, char** argv)
{
PLOG_(argc, argv);
NPY_LOG__ ;
"""
template_tail = r"""
%(root)s.update_gtransforms();
unsigned verbosity = SSys::getenvint("VERBOSITY", 1) ;
%(root)s.verbosity = verbosity ;
//%(root)s.dump() ;
const char* boundary = "Rock//perfectAbsorbSurface/Vacuum" ;
%(root)s.set_boundary(boundary);
const char* gltfconfig = "" ;
const NSceneConfig* config = new NSceneConfig(gltfconfig);
NCSG* csg = NCSG::FromNode(&%(root)s, config);
csg->dump();
csg->dump_surface_points("dsp", verbosity > 2 ? 500 : 20 );
return 0 ;
}
"""
template_body = r"""%(body)s"""
test_body = r"""
nsphere a = make_nsphere(0,0,0,100) ;
"""
from opticks.ana.base import now_
def indent_(s):
lines = s.split("\n")
return "\n".join(map(lambda _:" %s" % _, lines))
def trim_(s):
lines = filter(None, s.split("\n"))
return "\n".join(lines)
class NNodeTestCPP(dict):
def __init__(self, *args, **kwa):
dict.__init__(self, *args, **kwa)
def _get_stamp(self):
return "// generated by nnode_test_cpp.py : %s " % ( now_() )
stamp = property(_get_stamp)
def _get_runline(self):
return "\n".join([
"// opticks-;opticks-nnt %(name)s " % self,
"// opticks-;opticks-nnt-vi %(name)s " % self,""])
runline = property(_get_runline)
head = property(lambda self:template_head % self)
body = property(lambda self:template_body % self)
tail = property(lambda self:template_tail % self)
path = property(lambda self:os.path.expandvars("$TMP/tbool%(name)s.cc" % self))
def save(self):
log.info("saving to %s " % self.path)
file(self.path,"w").write(str(self))
def test(self):
print(self.stamp)
print(self.head)
print(self.body)
print(self.tail)
def __str__(self):
return "\n".join([self.stamp, self.runline, self.head, indent_("\n".join([self.stamp,self.runline])), indent_(self.body), self.tail])
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
ntc = NNodeTestCPP(name="0", root="a", body=test_body)
#ntc.test()
print(ntc)
#ntc.save()
|
import nltk
from nltk.corpus import stopwords
import re
def clean_str(string):
"""
Tokenization/string cleaning for all datasets except for SST.
from https://github.com/harvardnlp/sent-conv-torch/blob/master/preprocess.py
"""
string = re.sub(r"[^A-Za-z0-9(),!?\'\`]", " ", string)
string = re.sub(r"\'s", " \'s", string)
string = re.sub(r"\'ve", " \'ve", string)
string = re.sub(r"n\'t", " n\'t", string)
string = re.sub(r"\'re", " \'re", string)
string = re.sub(r"\'d", " \'d", string)
string = re.sub(r"\'ll", " \'ll", string)
string = re.sub(r",", " , ", string)
string = re.sub(r"!", " ! ", string)
string = re.sub(r"\(", " ( ", string)
string = re.sub(r"\)", " ) ", string)
string = re.sub(r"\?", " ? ", string)
string = re.sub(r"\s{2,}", " ", string)
return string.strip().lower()
a = set(stopwords.words('english'))
total_stop_word = []
for word in a:
total_stop_word.extend(clean_str(word).split(" "))
for word in set(total_stop_word):
print(word)
|
from student1.py import mydivisible7not5
print(mydivisible7not5(10))
|
from tensorflow import keras
from tensorflow.keras import layers
class Seq2Seq():
def __init__(
self,num_encoder_tokens: int=64,
num_decoder_tokens: int=64,
latent_dim: int=300,
model_weights: str=None
):
self.latent_dim = latent_dim
self.num_decoder_tokens = num_decoder_tokens
self.num_encoder_tokens = num_encoder_tokens
self.encoder_input = layers.Input(shape=(None, self.num_encoder_tokens), name="encoder_input")
self.encoder_lstm = layers.LSTM(self.latent_dim, return_state=True, name="encoder_lstm")
_, state_h, state_c = self.encoder_lstm(self.encoder_input)
self.encoder_state = [state_h, state_c]
self.decoder_input = layers.Input(shape=(None, self.num_decoder_tokens), name="decoder_input")
self.decoder_lstm = layers.LSTM(self.latent_dim, return_sequences=True, return_state=True, name="decoder_lstm")
decoder_lstm_output, _, _ = self.decoder_lstm(self.decoder_input, initial_state=self.encoder_state)
self.decoder_dense = layers.Dense(num_decoder_tokens, activation="softmax", name="decoder_dense")
dense_output = self.decoder_dense(decoder_lstm_output)
self.training_model = keras.Model([self.encoder_input, self.decoder_input], dense_output)
if model_weights:
self.training_model.load_weights(model_weights)
self.encoder_model, self.decoder_model = self.create_inference_models()
def create_inference_models(self):
encoder_model = keras.Model(self.encoder_input, self.encoder_state, name="encoder_model")
self.decoder_state_input_h = layers.Input(shape=(self.latent_dim,), name="decoder_state_input_h")
self.decoder_state_input_c = layers.Input(shape=(self.latent_dim,), name="decoder_state_input_c")
decoder_states_inputs = [self.decoder_state_input_h, self.decoder_state_input_c]
decoder_outputs, state_h, state_c = self.decoder_lstm(
self.decoder_input, initial_state=decoder_states_inputs)
self.decoder_states = [state_h, state_c]
decoder_output = self.decoder_dense(decoder_outputs)
decoder_model = keras.Model(
[self.decoder_input] + decoder_states_inputs,
[decoder_output] + self.decoder_states,
name="decoder_model")
return [encoder_model, decoder_model]
# model.create_inference_models()
# enc_model = model.encoder_model
# enc_model.summary()
# plot_model(enc_model, show_shapes=True)
|
n = int(input())
arr = list(map(int,input().strip().split()))[:n]
m = max(arr)
for j in range(len(arr)-1):
if arr[j] == arr[j+1]:
ans = -1
elif m == arr[-1]:
ans = len(arr)
else:
for i in range(len(arr)-1):
if arr[i] == m:
if arr[i] > arr[i+1]:
ans = i+1
elif arr[i] > arr[i-1]:
ans = i+1
print(ans)
|
# The ProcessTable class contains all of the processes in the system.
# The internal table of processes is represented as a circular buffer
# It will be a list of a fixed number of processes which will be filled
# in sequentially. When the last process in the list is populated,
# the structure wraps around to add values from the beginning
from process import Process
import threading
# TODO: Throughout this class, add locking where needed
class ProcessTable:
# Constructor
# The constructor takes the size that the table should be.
def __init__(self, tableSize=100):
self.table_size = tableSize
self.current_index = -1
self.table = []
# Initialize the list containing the process entries
# with default values
for _ in range(0, tableSize):
self.table.append(Process())
# This lock will be used to synchronize access to the
# Process table data structure
self.lock = threading.Lock()
# Return the table size
def getSize(self):
return self.table_size
# Add a process to the table by name
# Returns the process ID of the table, or
# -1 if the process can not be added.
def add(self, p):
self.lock.acquire()
# We start looking for an empty spot one above the location
# of the current process, and continue incrementing until we either
# find an empty slot or wrap around back to the current location.
# Note the use of the remainder operator (%) to wrap around to the beginning as needed.
new_index = (self.current_index + 1) % self.table_size
while (new_index != self.current_index) and (self.table[new_index].processId() != -1) :
new_index = (new_index + 1) % self.table_size
if new_index == self.current_index:
# new_index wrapped around to the current index, which means we are out of space
self.lock.release()
return -1
# Found the place to put the new entry
# The process ID is one greater than the index, because the list starts at 0 but we
# want process ID's to start at 1
new_process_id = new_index + 1
self.table[new_index].setProcessName(p.processName())
self.table[new_index].setProcessId(new_process_id)
self.table[new_index].setPriority(p.priority())
self.table[new_index].setCpuNeeded(p.cpuNeeded())
self.current_index = new_index
self.lock.release()
return new_process_id
# Remove the indicated process ID from the table
# Returns True if successfully removed, or False if the
# process ID is not found
def remove(self, process_id):
self.lock.acquire()
# The index will be one less than the process ID
index = process_id -1
if (self.table[index].processId() != -1):
# the process is there. Remove it.
self.table[index].setProcessName("")
self.table[index].setProcessId(-1)
return_value = True
else :
return_value = False
self.lock.release()
return return_value
# Find the indicated process by process ID
# Returns a copy of the process
def find(self, process_id):
return self.table[process_id - 1]
# Method that is called when someone calls print() on a ProcessTable.
# This invokes the __str__ method for each active process (i.e. each
# process with a pid that isn't -1), to print that process on its own line
def __str__(self):
s = ""
for i in range(0, self.table_size):
if self.table[i].processId() != -1 :
if s:
s += "\n"
s += self.table[i].__str__()
return s
|
import datetime as dt
import matplotlib.pyplot as plt
from matplotlib import style
import pandas as pd
import pandas_datareader.data as web
style.use('ggplot')
#validate the date
#validate the symbol
company_name = input ('please enter the company\'s symbol: ')
startdate_entry = input('please choose your start date (YYYY,MM,DD): ').split(',')
enddate_entry = input('please choose your end date (YYYY,MM,DD): ').split(',')
start = dt.datetime(int(startdate_entry[0]),int(startdate_entry[1]),int(startdate_entry[2]))
end = dt.datetime(int(enddate_entry[0]),int(enddate_entry[1]),int(enddate_entry[2]))
df = web.DataReader(company_name, 'yahoo', start, end)
print (df.head())
def date_validation: |
#MatthewMascoloQuiz2
#I pledge my honor that I have abided
#by the Stevens Honor System. Matthew Mascolo
#
#Coding segment to Quiz #2
def addition(num1, num2):
ans = int(num1) + int(num2)
return ans
def subtraction(num1, num2):
ans = int(num1) - int(num2)
return ans
def multiplication(num1, num2):
ans = int(num1) * int(num2)
return ans
def division(num1, num2):
ans = int(num1) / int(num2)
return ans
def vowelCount(sentence):
vowel = 0
for char in sentence:
if char in "AEIOUaeiou":
vowel = vowel+1
return vowel
def encrypt(string): #Encrpyts a string using the encryption code we learned in class
newString = ""
for i in string:
x = ord(i)
newString = newString + str(x+5) + " "
return newString
def main():
answer = input("For Mathematical Functions Please Enter the Number 1\nFor String Operations Please Enter the Number 2\n")
if answer == "1":
answerMath = input("For Addition, Please Enter the Number 1\nFor Subtraction, Please Enter the Number 2\nFor Multiplication, Please Enter the Number 3\nFor Division, Please Enter the Number 4\n")
if answerMath.isdigit() == False:
raise Exception("Please enter a numerical digit.")
else:
if int(answerMath) == 1 or int(answerMath) == 2 or int(answerMath) == 3 or int(answerMath) == 4:
if int(answerMath) == 1: #Addition Section of Mathimatical Functions
number1 = input("Enter first number:")
if number1.isdigit() == False:
raise Exception("Please enter a numerical digit.") #Exception is thrown when number 1 is not a number.
number2 = input("Enter second number:")
if number2.isdigit() == False:
raise Exception("Please enter a numerical digit.") #Exception is thrown when number 2 is not a number.
ans = addition(number1, number2)
print("The sum of", number1, "and", number2, "is:", ans)
else:
if int(answerMath) == 2: #Subtraction Section of Mathimatical Functions
subnum1 = input("Enter first number:")
if subnum1.isdigit() == False:
raise Exception("Please enter a numerical digit.") #Exception is thrown when number 1 is not a number.
subnum2 = input("Enter second number:")
if subnum2.isdigit() == False:
raise Exception("Please enter a numerical digit.") #Exception is thrown when number 2 is not a number.
subAns = subtraction(subnum1, subnum2)
print("The difference between", subnum1, "and", subnum2, "is:", subAns)
else:
if int(answerMath) == 3: #Multiplication Section of Mathematical Functions
num1 = input("Enter first number:")
if num1.isdigit() == False:
raise Exception("Please enter a numerical digit.") #Exception is thrown when number 1 is not a number.
num2 = input("Enter second number:")
if num2.isdigit() == False:
raise Exception("Please enter a numerical digit.") #Exception is thrown when number 2 is not a number.
product = multiplication(num1, num2)
print("The product of", num1, "and", num2, "is:", product)
else:
if int(answerMath) == 4:#Division Section of Mathematical Functions
divnum1 = input("Enter first number:")
if divnum1.isdigit() == False:
raise Exception("Please enter a numerical digit.") #Exception is thrown when number 1 is not a number.
divnum2 = input("Enter second number:")
if divnum2.isdigit() == False:
raise Exception("Please enter a numerical digit.") #Exception is thrown when number 2 is not a number.
divAns = division(divnum1, divnum2)
print("The qoutient of", divnum1, "and", divnum2, "is:", divAns)
else:
raise Exception("Out of range error. Please enter numbers 1, 2, 3, or 4.")
else:
if answer == "2":
answerString = input("To Determine How Many Vowels Are In A String, Enter 1.\nTo Encrypt A String, Enter 2.\n")
if answerString.isdigit() == False:
raise Exception("Please enter a numerical digit.")
else:
if int(answerString) == 1 or int(answerString) == 2:
if int(answerString) == 1: #Vowel Counting Section of String Functions
enteredString = input("Enter a string:")
vowelCounted = vowelCount(enteredString)
print("There amount of vowels in the string you entered is:", vowelCounted)
else:
if int(answerString) == 2: #Encrypting String Section
encryptString = input("Enter a string:")
encryptedString = encrypt(encryptString)
print("The Encrypted String is:", encryptedString)
else:
raise Exception("Out of range error. Please enter numbers 1, or 2.")
else:
if answer.isdigit():
raise Exception("Out of range error. Please enter number 1 or 2.")
else:
if isinstance(answer, str):
raise Exception("Please enter a numerical digit.")
main()
|
from django.apps import AppConfig
class FaceLoginConfig(AppConfig):
name = 'face_login'
|
from django.db import models
from django.conf import settings
from django.urls import reverse
from django.utils.text import slugify
from django.utils.crypto import get_random_string
ROLE_CHOICE = (
('Admin', 'Admin'),
('Drinker', 'Drinker'),
('Lover', 'Lover'),
('Pro', 'Pro'),
)
PUBLISH_CHOICE = (
('Private', 'Private'),
('Public', 'Public'),
)
BOTTLING_CHOICE = (
('OB', 'OB'),
('IB', 'IB'),
)
BOOLEAN_CHOICE = (
('Yes', 'Yes'),
('No', 'No'),
)
def path_and_rename(instance, filename):
upload_to = 'user'
ext = filename.split('.')[-1]
# get filename
filename = 'profile/{}.{}'.format(instance.user.username.split('@')[0]+'_'+get_random_string(length=15), ext)
# return the whole path to the file
return os.path.join(upload_to, filename)
# Create your models here.
class UserRole(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, unique=True, related_name="role")
role = models.CharField(max_length=10, choices=ROLE_CHOICE)
class Meta:
ordering = ['-id']
class UserProfile(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, unique=True, related_name="profile")
nickname = models.TextField(blank=True, null=True)
profilepic = models.ImageField(upload_to=path_and_rename, blank=True, null=True)
wishlist = models.IntegerField(default=0)
bottle_num = models.IntegerField(default=0)
rate_num = models.IntegerField(default=0)
distillery_num = models.IntegerField(default=0)
class Meta:
ordering = ['-id']
def __str__(self):
return user
class Menu(models.Model):
parent = models.ForeignKey('self', blank=True, null=True, on_delete=models.CASCADE)
title = models.CharField(max_length=255)
url = models.CharField(max_length=255, blank=True)
icon = models.CharField(max_length=255, default='fa fa-search', blank=True, null=True)
status = models.BooleanField(default=True)
lvl = models.IntegerField(blank=True)
def __str__(self):
return self.title
def get_children(self):
return self.menu_set.filter(status=True)
def has_children(self):
if self.get_children():
return True
return False
def get_absolute_url(self):
return reverse('selected_category', kwargs={"slug": self.title})
class Country(models.Model):
name = models.CharField(max_length=20)
image = models.ImageField(upload_to='country/uploads/%Y/%m/%d/', blank=True)
def __str__(self):
return self.name
class Region(models.Model):
name = models.CharField(max_length=20)
country = models.ForeignKey(Country, related_name='region_country', on_delete=models.CASCADE)
image = models.ImageField(upload_to='region/uploads/%Y/%m/%d/', blank=True)
def __str__(self):
return self.name
class Distillery(models.Model):
is_active = models.BooleanField(default=False)
name = models.CharField(max_length=100, unique=True)
region = models.ForeignKey(Region, related_name='distillery_region', on_delete=models.CASCADE)
country = models.ForeignKey(Country, related_name='distillery_country', on_delete=models.CASCADE)
year_founded = models.IntegerField(blank=True, null=True)
owner = models.CharField(max_length=100)
description = models.TextField(blank=True, null=True)
photo = models.ImageField(upload_to='distillery/uploads/%Y/%m/%d/', blank=True, null=True)
lon = models.FloatField(blank=True, null=True)
lat = models.FloatField(blank=True, null=True)
smws_code = models.IntegerField(default=0)
official_site = models.CharField(max_length=150, blank=True, null=True)
slug = models.SlugField(
default='',
editable=False,
max_length=200,
)
def get_absolute_url(self):
kwargs = {
'pk': self.id,
'slug': self.slug
}
return reverse('article-pk-slug-detail', kwargs=kwargs)
def save(self, *args, **kwargs):
value = self.name
self.slug = slugify(value, allow_unicode=True)
super().save(*args, **kwargs)
def __str__(self):
return self.name
class WhiskyInfo(models.Model):
name = models.CharField(max_length=100, unique=True)
brand_series = models.CharField(max_length=100, blank=True)
company = models.CharField(max_length=100, blank=True)
distillery = models.ForeignKey(Distillery, related_name='whisky_distillery', on_delete=models.CASCADE)
year = models.IntegerField(default=0)
abv = models.FloatField(default=0)
general_desc = models.TextField(blank=True, null=True)
casktype = models.CharField(max_length=100, blank=True, null=True)
bottler = models.CharField(max_length=2, choices=BOTTLING_CHOICE)
photo = models.ImageField(upload_to='whisky/uploads/%Y/%m/%d/', blank=True, null=True)
chill_filtration = models.CharField(max_length=3, choices=BOOLEAN_CHOICE)
artificial_coloring = models.CharField(max_length=3, choices=BOOLEAN_CHOICE)
cask_num = models.CharField(max_length=15, blank=True)
bottle_num = models.CharField(max_length=15, blank=True)
rating = models.FloatField(default=0)
num_rating = models.IntegerField(default=0)
slug = models.SlugField(
default='',
editable=False,
max_length=200,
)
def get_absolute_url(self):
kwargs = {
'pk': self.id,
'slug': self.slug
}
return reverse('distillery-pk-slug-detail', kwargs=kwargs)
def save(self, *args, **kwargs):
value = self.name
self.slug = slugify(value, allow_unicode=True)
super().save(*args, **kwargs)
def __str__(self):
return self.name
class PersonalWhiskyNote(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='per_user', on_delete=models.CASCADE)
whisky = models.ForeignKey(WhiskyInfo, related_name='per_whisky', on_delete=models.CASCADE)
flora = models.IntegerField(blank=True, null=True)
fruity = models.IntegerField(blank=True, null=True)
creamy = models.IntegerField(blank=True, null=True)
nutty = models.IntegerField(blank=True, null=True)
malty = models.IntegerField(blank=True, null=True)
spicy = models.IntegerField(blank=True, null=True)
smoky = models.IntegerField(blank=True, null=True)
peaty = models.IntegerField(blank=True, null=True)
class GeneralWhiskyNote(models.Model):
whisky = models.OneToOneField(WhiskyInfo, on_delete=models.CASCADE, unique=True, related_name="gen_whisky")
flora = models.FloatField(default=0)
fruity = models.FloatField(default=0)
creamy = models.FloatField(default=0)
nutty = models.FloatField(default=0)
malty = models.FloatField(default=0)
spicy = models.FloatField(default=0)
smoky = models.FloatField(default=0)
peaty = models.FloatField(default=0)
total_notes_num = models.IntegerField(default=0)
class Comment(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name='note_user')
whisky = models.ForeignKey(WhiskyInfo, on_delete=models.CASCADE)
note = models.TextField()
publish_choice = models.CharField(max_length=10, choices=PUBLISH_CHOICE)
rating = models.FloatField(default=0)
created_at = models.DateTimeField(auto_now_add=True)
class Wishlist(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
whisky = models.ForeignKey(WhiskyInfo, on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True)
class Bar(models.Model):
name = models.CharField(max_length=100, unique=True)
region = models.ForeignKey(Region, related_name='bar_region', on_delete=models.CASCADE)
country = models.ForeignKey(Country, related_name='bar_country', on_delete=models.CASCADE)
description = models.TextField(blank=True, null=True)
photo = models.ImageField(upload_to='bar/uploads/%Y/%m/%d/', blank=True, null=True)
lon = models.FloatField(blank=True, null=True)
lat = models.FloatField(blank=True, null=True)
slug = models.SlugField(
default='',
editable=False,
max_length=200,
)
def save(self, *args, **kwargs):
value = self.name
self.slug = slugify(value, allow_unicode=True)
super().save(*args, **kwargs)
def __str__(self):
return self.name
|
from django.db import models
class loginModel (models.Model):
phone = models.CharField(max_length=20, unique=True)
password = models.CharField(max_length=20)
|
# Generated by Django 3.0.8 on 2020-07-07 08:42
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Professor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Student',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Lecture',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('attendees', models.ManyToManyField(related_name='lectures', to='college.Student')),
('lecturer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lectures', to='college.Professor')),
],
),
]
|
import operator
import re
import logging
import pickle
import utils.clean as cl
from utils import recuperate_punct
class Map(object):
'''
Post processes the word sequence with time stamps to add the
target speaker and punctuation information using the original
cleaned text of the intervention
'''
def __init__(self, intervention, alignment):
self.intervention = intervention
self.alignment = alignment
self.full_text = ' '
self.speaker_stats = {}
corpus = []
for speaker, text in intervention['text']:
corpus.append(text)
if not self.speaker_stats.get(speaker):
self.speaker_stats[speaker] = 0
self.speaker_stats[speaker] += len(text.split())
self.full_text = ' '.join(corpus)
self.target_speaker = None
self.re_stop = re.compile('\.|:|\?|!')
self.re_comma = re.compile(',|;')
self.dash = re.compile('-|‑|–')
self.non_std_dash = re.compile('‑|–')
lexicon = '../parlament-scrape/utils/lexicon_set_ca3.bin'
with open(lexicon, 'rb') as lexicon_file:
self.lexicon_set = pickle.load(lexicon_file)
def prepare(self):
self.check()
self.find_speaker()
self.enrich_alignment()
self.align()
def check(self):
if len(self.full_text.split()) != len(self.alignment):
msg = 'the original and cleaned text do not have equal tokens'\
'\ncleaning the tokens not present in cmu cleaned text'
logging.warning(msg)
# remove the tokens not appearing in full_text
# assumes len(self.full_text.split()) > len(self.alignment)
# this might happen when symbols surrounded by white spaces
# appear in the full_text
i = 0
skip = 0
new_int_text = []
for speaker, text in self.intervention['text']:
int_text = ''
#TODO pre alignment standardized cleaning for the original text
text = self.non_std_dash.sub('-', text)
text = re.sub('(\xad| (?=,)|)', '', text)
text = cl.dash.sub(' ', text)
text = cl.hyphenfix(text, self.lexicon_set)
for word in text.split():
al_word = self.alignment[i+skip]['word']
if re.search(al_word, word.lower()) or\
re.search(self.dash.sub('',al_word),
self.dash.sub('',word.lower())):
int_text += ' %s'%word
else:
if skip > -5:
msg = "%s does not appear in clean text"%word
logging.warning(msg)
skip -= 1
i += 1
msg = 'had to skip %i steps'%abs(skip)
logging.warning(msg)
new_int_text.append([speaker, text])
self.intervention['text'] = new_int_text
self.full_text = int_text
def find_speaker(self):
'''
Finds the main (desired) speaker in the dictonary which can have the
mesa or interruptions
Currently operates under the assumption that there are no interruptions
Extracts only the speaker with the most word tokens
TODO mesa needs to be carried from past steps
'''
speakers_sorted = sorted(self.speaker_stats.items(), key=operator.itemgetter(1))
self.target_speaker = speakers_sorted[-1][0]
if 'president' in self.target_speaker.lower():
msg = 'could the target speaker be mesa?\n' + \
str(list(self.speaker_stats.keys()))
logging.warning(msg)
def enrich_alignment(self):
'''
Enriches the alignment dictionary with speaker and punctuation
information.
'''
# create equivalent alignment dictionary from intervention dict
reference_dicts = []
if self.target_speaker == None:
msg = 'non existent target speaker in mapping'
logging.error(msg)
raise ValueError(msg)
for speaker, text in self.intervention['text']:
# TODO cleaning needs to be treated consistently
# this is a quick fix due to skips
#text = re.sub('(\xad| - |,)', '', text)
for word in text.split():
token = {'word': word}
if speaker == self.target_speaker:
token['target_speaker'] = True
# Punctuation weights are used in segmentation search algorithm
# commas get lower preference
if self.re_stop.search(word):
token['punctuation'] = 1.0
elif self.re_comma.search(word):
token['punctuation'] = 0.9
reference_dicts.append(token)
# assuming they are of the same length
for reference, target in zip(reference_dicts, self.alignment):
if not re.search(self.dash.sub('',target['word']),
self.dash.sub('',reference['word'].lower())):
msg = '%s vs %s target not in reference'\
%(target['word'], reference['word'])
logging.warning(msg)
for key in ['target_speaker', 'punctuation']:
if reference.get(key):
target[key] = reference[key]
def align(self):
alignment_words = [token['word'] for token in self.alignment]
# create a list of tuples cleaned vs original word
text_clean_tuples = recuperate_punct.clean(self.full_text)
clean_words = [cl for cl, tx in text_clean_tuples]
diff_wc = abs(len(text_clean_tuples)-len(alignment_words))
if diff_wc > 5:
logging.warning('word count difference is large: %i'%diff_wc)
# align words from decode with the clean_words
# first do the alignment with the clean versions in the text_clean_tuples
align_seq = recuperate_punct.needle(clean_words, alignment_words)
clean_aligned, full_text_aligned = align_seq
# create the aligned version with the original words using text_clean_tuples
original_aligned = recuperate_punct.get_original(clean_aligned,
text_clean_tuples)
# no of decoded words will always be greater or equal to the original
# hence it is necessary to find where the decode starts and ends
# TODO might be unnecessary bcs alignment already has non decoded words
i_start, i_end = recuperate_punct.get_start_end_indices(full_text_aligned)
self.original_align_seq = list(zip(original_aligned[i_start:i_end+1],
full_text_aligned[i_start:i_end+1]))
recuperate_punct.get_original_alignment(self.alignment, self.original_align_seq)
|
#귀찮은거 싫어해요 => 긴 코드를 줄이려고 노력...
def SUM(a, b):
return a + b
SUM = lambda a, b:a + b
MUL = lambda a, b:a * b
print(SUM(10, 20))
print(MUL(10, 20))
funclist = [lambda a, b:a + b, lambda a, b:a * b]
print(funclist[0](10, 20))
print(funclist[1](10, 20))
|
"""
Created on September 14, 2015
:Program: WAFNinja
:ModuleName: fuzzer
:Version: 1.0
:Revision: 1.0.0
:Author: Khalil Bijjou
:Description: The purpose of the fuzz function is to automate the reverse-engineering of the WAF's rule set by sending various fuzzing strings and see what is blocked and what not.
In contrast to reverse-engineer the rule set manually, this function saves time, enhances the result by using a very broad amount of symbols and keywords and displays
results in a clear and concise way. The result is either displayed in form of a table directly in the CLI or written to a HTML file if the '-o' argument is provided.
"""
import urllib
import urllib2
import copy
import string
import random
from time import sleep
from progressbar import *
from prettytable import PrettyTable
def fireFuzz(type, fuzz, url, params, header, delay, outputFile):
"""
:Description: This function iterates through a list of fuzzing strings retrieved from the database, sends them to the target site and displays a progress bar of this process.
:param type: Type of the fuzzing strings to send [sql | xss].
:type type: String
:param fuzz: Fuzzing strings
:type fuzz: List
:param url: Target URL
:type url: String
:param params: POST Parameter
:type params: String
:param header: Cookie header
:type header: String
:param delay: Delay between requests
:type delay: Float
:param outputFile: Name of Output file
:type outputFile: String
:note: This function calls the showOutput() file with the saved outputs as argument.
:todo: Add threads in order to send requests simultaneously.
"""
print '''
___ ______________________ ______ ________
__ | / /__ |__ ____/__ | / /__(_)____________(_)_____ _
__ | /| / /__ /| |_ /_ __ |/ /__ /__ __ \____ /_ __ `/
__ |/ |/ / _ ___ | __/ _ /| / _ / _ / / /___ / / /_/ /
____/|__/ /_/ |_/_/ /_/ |_/ /_/ /_/ /_/___ / \__,_/
/___/
WAFNinja - Penetration testers favorite for WAF Bypassing
'''
pbar = ProgressBar(widgets=[SimpleProgress(), ' Fuzz sent! ', Percentage(), Bar()])
opener = urllib2.build_opener()
for h in header:
opener.addheaders.append(h)
result = []
for fuzz in pbar(fuzz):
expected = fuzz[1]
fuzz = fuzz[0]
try:
sleep(float(delay))
if params is None: # GET parameter
randomString, url_with_fuzz = insertFuzz(url, fuzz)
response = opener.open(url_with_fuzz)
else: # POST parameter
randomString, params_with_fuzz = setParams(params, fuzz)
response = opener.open(url, urllib.urlencode(params_with_fuzz))
content = response.read()
occurence = content.find(randomString)+len(randomString) # get position of the randomString + length(randomString) to get to the fuzz
result.append({
'fuzz' : fuzz,
'expected' : expected,
'httpCode' : response.getcode(),
'contentLength': response.headers.get('Content-Length'),
'output' : content[occurence:occurence+len(expected)]}) # take string from occurence to occurence+len(expected)
except urllib2.HTTPError, error: # HTTP Status != 200
if error.code == 404:
print 'ERROR: Target URL not reachable!'
sys.exit()
else: # HTTP Status != 404
result.append({
'fuzz' : fuzz,
'expected' : expected,
'httpCode' : error.code,
'contentLength': '-',
'output' : '-'})
showOutput(type, result, outputFile)
def showOutput(type, result, outputFile):
"""
:Description: This function prints the result of the fireFuzz() function in a nice fashion.
:param type: Type of the fuzzing strings that were sent
:type type: String
:param result: Contains the sent Fuzz, HTTP Code, Content-Length, expected string and the response's output
:type result: List
:param outputFile: Name of Output file
:type outputFile: String
:note: This function saves the output in a HTML file or prints the output directly in the CLI.
"""
table = PrettyTable(['Fuzz', 'HTTP Status', 'Content-Length', 'Expected', 'Output', 'Working'])
for value in result:
if (value['httpCode'] != 200):
table.add_row([value['fuzz'], value['httpCode'], value['contentLength'], value['expected'], value['output'].strip(), 'No'])
else:
if(value['expected'] in value['output']):
table.add_row([value['fuzz'], value['httpCode'], value['contentLength'], value['expected'], value['output'], 'Yes'])
else:
table.add_row([value['fuzz'], value['httpCode'], value['contentLength'], value['expected'], value['output'], 'Probably'])
if outputFile is not None:
table = table.get_html_string(attributes={"class":"OutputTable"})
table = '<link rel="stylesheet" href="style.css">' + table
table = table.replace('<td>Yes</td>', '<td class="Yes">Yes</td>')
table = table.replace('<td>No</td>', '<td class="No">No</td>')
table = table.replace('<td>Probably</td>', '<td class="Probably">Probably</td>')
file = open(outputFile,'w')
file.write(table)
file.close()
print 'Output saved to ' + outputFile + '!'
else:
print table
def insertFuzz(url, fuzz):
"""
:Description: This function inserts the Fuzz as GET Parameter in the URL
:param url: Target URL
:type type: String
:param fuzz: Fuzzing string
:type fuzz: String
:return: The URL with a concatenated string consisting of a random string and the fuzz.
:note: Some fuzzing symbols can be part of a normal response. In order to distinctly find the fuzz that was sent, a random string is added before the fuzz.
"""
fuzz = urllib.quote_plus(fuzz) #url encoding
randomString = ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(6))
return randomString, url.replace('FUZZ', randomString + str(fuzz))
def setParams(params, fuzz):
"""
:Description: This function sets the Fuzz in the POST Parameter.
:param url: Target URL
:type type: String
:param fuzz: Fuzzing string
:type fuzz: String
:return: The post parameter with a concatenated string consisting of a random string and the fuzz
:note: Some fuzzing symbols can be part of a normal response. In order to distinctly find the fuzz that was sent, a random string is added before the fuzz.
"""
randomString = ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(6))
parameter = copy.deepcopy(params) #makes a deep copy. this is needed because using a reference does not work
for param in parameter:
if parameter[param] == 'FUZZ':
parameter[param] = randomString + str(fuzz)
return randomString, parameter;
|
__author__ = 'CRUCIFIX'
templates = ['dark', 'social', 'unit']
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from xml.etree.ElementTree import Element
import os
try:
unicode = unicode
except NameError:
str = str
unicode = str
basestring = (str, bytes)
else:
str = str
unicode = unicode
basestring = basestring
__location__ = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
CADENA_ORIGINAL_3_2_PATH = os.path.join(__location__, 'assets/cadenaoriginal_3_2.xslt')
CADENA_ORIGINAL_3_2_NOMINA_1_2_PATH = os.path.join(__location__, 'assets/cadenaoriginal_3_2_nomina_1_2.xslt')
class CfdiNode(object):
def __init__(self, tag='', **kwargs):
self.__dict__.update(kwargs)
self.__namespace__ = kwargs.get('_namespace', 'cfdi')
self.__tag__ = kwargs.get('_tag', tag)
self.__order__ = int(kwargs.get('_order', 0))
for k, v in self.__dict__.items():
if isinstance(v, dict):
setattr(self, k, CfdiNode(tag=k, **v))
if isinstance(v, list):
setattr(self, k, [CfdiNode(**i) for i in v])
@staticmethod
def _as_dict(obj):
if not hasattr(obj, '__dict__'):
return obj
result = {}
for k, v in obj.__dict__.items():
if k.startswith('_'):
continue
element = []
if isinstance(v, list):
for item in v:
element.append(CfdiNode._as_dict(item))
else:
element = CfdiNode._as_dict(v)
result[k] = element
return result
def as_dict(self):
return CfdiNode._as_dict(self)
def get_attr(self, attr):
if hasattr(self, attr):
return ' {}="{}"'.format(attr, getattr(self, attr))
return ''
def print_attributes(self):
output = ''
for k, v in self.as_dict().items():
if type(v) not in (dict, list):
output += '{}="{}" '.format(k, v)
return output.strip()
def get_attributes(self):
attributes = {}
for k, v in self.as_dict().items():
if k.startswith('_'):
continue
if type(v) in (dict, list):
continue
attributes[k] = v
return attributes
def get_children(self):
children = {}
for k, v in self.as_dict().items():
if k.startswith('as_'):
continue
if k.startswith('get_'):
continue
if k.startswith('print_'):
continue
if type(v) not in (dict, list):
continue
children[k] = v
return children
@staticmethod
def _as_etree_node(obj, extra_attrs={}):
if not hasattr(obj, '__dict__'):
return obj
tag = '{}:{}'.format(obj.__namespace__, obj.__tag__)
attributes = obj.get_attributes()
attributes.update(extra_attrs)
children = obj.get_children()
element = Element(tag)
for k, v in attributes.items():
value = unicode('{}').format(v)
element.set(k, value)
for k, v in children.items():
if isinstance(v, list):
v = sorted(v, key=lambda k: k.get('_order', 0))
items = getattr(obj, k)
for item in items:
element.append(obj._as_etree_node(item))
else:
child = getattr(obj, k)
element.append(obj._as_etree_node(child))
return element
def as_etree_node_recursive(self, extra_attrs={}):
return self._as_etree_node(self)
def as_etree_node(self, extra_attrs={}):
tag = '{}:{}'.format(self.__namespace__, self.__tag__)
attributes = self.get_attributes()
attributes.update(extra_attrs)
element = Element(tag)
for k, v in attributes.items():
value = unicode('{}').format(v)
element.set(k, value)
return element
class XmlBuilder(object):
'''
'''
def __init__(self, root_node):
self.root_node = root_node
def get_cfdi_3_2(self):
xml_schema_data = {
'xmlns:xsi': [
'http://www.w3.org/2001/XMLSchema-instance',
],
'xmlns:cfdi': [
'http://www.sat.gob.mx/cfd/3'
],
'xsi:schemaLocation': [
'http://www.sat.gob.mx/cfd/3',
'http://www.sat.gob.mx/sitio_internet/cfd/3/cfdv32.xsd',
],
}
# - Comprobante
comprobante_node = self.root_node.as_etree_node()
# Add schema data
for k, v in xml_schema_data.items():
comprobante_node.set(k, ' '.join(v))
# -- Emisor
Emisor = self.root_node.Emisor
emisor_node = Emisor.as_etree_node()
if hasattr(Emisor, 'DomicilioFiscal'):
domicilio_node = Emisor.DomicilioFiscal.as_etree_node()
emisor_node.append(domicilio_node)
expedidoen_node = Emisor.ExpedidoEn.as_etree_node()
emisor_node.append(expedidoen_node)
for RegimenFiscal in Emisor.RegimenFiscal:
regimen_node = RegimenFiscal.as_etree_node_recursive()
emisor_node.append(regimen_node)
comprobante_node.append(emisor_node)
# -- Receptor
Receptor = self.root_node.Receptor
receptor_node = Receptor.as_etree_node()
if hasattr(Receptor, 'Domicilio'):
domicilio_node = Receptor.Domicilio.as_etree_node()
receptor_node.append(domicilio_node)
comprobante_node.append(receptor_node)
# -- Conceptos
conceptos_node = Element('cfdi:Conceptos')
for Concepto in self.root_node.Conceptos:
concepto_node = Concepto.as_etree_node()
if hasattr(Concepto, 'CuentaPredial'):
CuentaPredial = Concepto.CuentaPredial
cuenta_predial_node = CuentaPredial.as_etree_node()
concepto_node.append(cuenta_predial_node)
conceptos_node.append(concepto_node)
comprobante_node.append(conceptos_node)
# -- Impuestos
Impuestos = self.root_node.Impuestos
impuestos_node = Impuestos.as_etree_node()
# --- Retenciones
if hasattr(Impuestos, 'Retenciones'):
Retenciones = Impuestos.Retenciones
retenciones_node = Element('cfdi:Retenciones')
for Retencion in Retenciones:
retencion_node = Retencion.as_etree_node()
retenciones_node.append(retencion_node)
impuestos_node.append(retenciones_node)
# --- Traslados
if hasattr(Impuestos, 'Traslados'):
Traslados = Impuestos.Traslados
traslados_nodes = Element('cfdi:Traslados')
for Traslado in Traslados:
traslado_node = Traslado.as_etree_node()
traslados_nodes.append(traslado_node)
impuestos_node.append(traslados_nodes)
comprobante_node.append(impuestos_node)
# -- Complemento
if hasattr(self.root_node, 'Complemento'):
Complemento = self.root_node.Complemento
complemento_node = Complemento.as_etree_node()
# --- Impuestos locales
implocales_version = None
has_implocales = hasattr(Complemento, 'ImpuestosLocales')
if has_implocales:
implocales_version = Complemento.ImpuestosLocales.version
# ----- Impuestos locales 1.0
if has_implocales and implocales_version == '1.0':
ImpuestosLocales = Complemento.ImpuestosLocales
implocales_node = ImpuestosLocales.as_etree_node()
implocales_node.set('xmlns:implocal', 'http://www.sat.gob.mx/implocal')
implocales_node.set('xsi:schemaLocation', 'http://www.sat.gob.mx/implocal http://www.sat.gob.mx/sitio_internet/cfd/implocal/implocal.xsd')
if hasattr(ImpuestosLocales, 'traslados'):
for traslado in ImpuestosLocales.traslados:
implocales_node.append(traslado.as_etree_node())
if hasattr(ImpuestosLocales, 'retenciones'):
for retencion in ImpuestosLocales.retenciones:
implocales_node.append(retencion.as_etree_node())
complemento_node.append(implocales_node)
# --- Donatarias
donatarias_version = None
has_donatarias = hasattr(Complemento, 'Donatarias')
if has_donatarias:
donatarias_version = Complemento.Donatarias.version
# ---- Donatarias 1.1
if has_donatarias and donatarias_version == '1.1':
Donatarias = Complemento.Donatarias
donatarias_node = Donatarias.as_etree_node()
donatarias_node.set('xmlns:donat', 'http://www.sat.gob.mx/donat')
donatarias_node.set('xsi:schemaLocation', 'http://www.sat.gob.mx/donat http://www.sat.gob.mx/sitio_internet/cfd/donat/donat11.xsd')
complemento_node.append(donatarias_node)
# --- Complemento vehiculo
vehiculousado_version = None
has_vehiculousado = hasattr(Complemento, 'VehiculoUsado')
if has_vehiculousado:
vehiculousado_version = Complemento.VehiculoUsado.Version
# ---- Complemento vehiculo 1.0
if has_vehiculousado and vehiculousado_version == '1.0':
VehiculoUsado = Complemento.VehiculoUsado
vehiculousado_node = VehiculoUsado.as_etree_node()
vehiculousado_node.set('xmlns:vehiculousado', 'http://www.sat.gob.mx/vehiculousado')
vehiculousado_node.set('xsi:schemaLocation', 'http://www.sat.gob.mx/vehiculousado http://www.sat.gob.mx/sitio_internet/cfd/vehiculousado/vehiculousado.xsd')
complemento_node.append(vehiculousado_node)
# --- Servicios parciales constr.
serviciosparciales_version = None
has_serviciosparciales = hasattr(Complemento, 'parcialesconstruccion')
if has_serviciosparciales:
serviciosparciales_version = Complemento.parcialesconstruccion.Version
# ---- Servicios parciales constr. 1.0
if has_serviciosparciales and serviciosparciales_version == '1.0':
parcialesconstruccion = Complemento.parcialesconstruccion
parcialesconstruccion_node = parcialesconstruccion.as_etree_node()
parcialesconstruccion_node.set('xmlns:servicioparcial', 'http://www.sat.gob.mx/servicioparcialconstruccion')
parcialesconstruccion_node.set('xsi:schemaLocation', 'http://www.sat.gob.mx/servicioparcialconstruccion http://www.sat.gob.mx/sitio_internet/cfd/servicioparcialconstruccion/servicioparcialconstruccion.xsd')
if hasattr(parcialesconstruccion, 'servicios'):
for servicio in parcialesconstruccion.servicios:
parcialesconstruccion_node.append(servicio.as_etree_node())
complemento_node.append(parcialesconstruccion_node)
# --- Divisas
divisas_version = None
has_divisas = hasattr(Complemento, 'Divisas')
if has_divisas:
divisas_version = Complemento.Divisas.version
# ---- Divisas 1.0
if has_divisas and divisas_version == '1.0':
Divisas = Complemento.Divisas
divisas_node = Divisas.as_etree_node()
divisas_node.set('xmlns:divisas', 'http://www.sat.gob.mx/divisas')
divisas_node.set('xsi:schemaLocation', 'http://www.sat.gob.mx/cfd/3 http://www.sat.gob.mx/sitio_internet/cfd/3/cfdv32.xsd http://www.sat.gob.mx/divisas http://www.sat.gob.mx/sitio_internet/cfd/divisas/divisas.xsd')
complemento_node.append(divisas_node)
# Complemento INE - TODO
ine_version = None
has_ine = hasattr(Complemento, 'INE')
if has_ine:
ine_version = Complemento.INE.Version
if has_ine and ine_version == '1.1':
INE = Complemento.INE
ine_node = INE.as_etree_node()
ine_node.set('xmlns:ine', 'http://www.sat.gob.mx/ine')
ine_node.set('xsi:schemaLocation', 'http://www.sat.gob.mx/ine http://www.sat.gob.mx/sitio_internet/cfd/ine/ine11.xsd')
if hasattr(INE, 'entidades'):
for entidad in INE.entidades:
entidad_node = entidad.as_etree_node()
if hasattr(entidad, 'contabilidades'):
for contabilidad in entidad.contabilidades:
contabilidad_node = contabilidad.as_etree_node()
entidad_node.append(contabilidad_node)
ine_node.append(entidad_node)
complemento_node.append(ine_node)
# --- Nomina
nomina_version = None
has_nomina = hasattr(Complemento, 'Nomina')
if has_nomina:
nomina_version = Complemento.Nomina.Version
# ---- Nomina 1.1
if has_nomina and nomina_version == '1.1':
Nomina = Complemento.Nomina
nomina_node = Nomina.as_etree_node()
nomina_node.set('xmlns:nomina', 'http://www.sat.gob.mx/nomina')
nomina_node.set('xsi:schemaLocation', 'http://www.sat.gob.mx/nomina http://www.sat.gob.mx/sitio_internet/cfd/nomina/nomina11.xsd')
if hasattr(Nomina, 'Percepciones'):
Percepciones = Nomina.Percepciones
percepciones_node = Percepciones.as_etree_node_recursive()
nomina_node.append(percepciones_node)
if hasattr(Nomina, 'Deducciones'):
Deducciones = Nomina.Deducciones
deducciones_node = Deducciones.as_etree_node_recursive()
nomina_node.append(deducciones_node)
complemento_node.append(nomina_node)
# ---- Nomina 1.2 - TODO
if len(complemento_node.getchildren()) > 0:
comprobante_node.append(complemento_node)
return comprobante_node
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.