text stringlengths 8 6.05M |
|---|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Alex Buzunov <alex_buz@yahoo.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# Title: Batch Status
# Description:
# Batch Status Browser.
# Environment:
# Python 3.5 and wxPython 3.0
# set NLS_LANGUAGE=AMERICAN_AMERICA.WE8ISO8859P1
# set NLS_LANGUAGE=AMERICAN_AMERICA.WE8MSWIN1252
# set NLS_LANGUAGE=AMERICAN_AMERICA.AL16UTF16
#
##64 bit client
# set ORACLE_HOME=C:\app\abuzunov-local\product\11.2.0\client_1
# set PATH=%PATH%;%ORACLE_HOME%
##db password
# set table-hunter0connectors0DEVdb=manage
#
#git clone --progress --recursive -- "https://github.com/alexbuz/BatchStatusBrowser.git"
#
from __future__ import print_function
__author__ = "Alex Buzunov"
__copyright__ = "Copyright 2017, KCG"
__credits__ = []
__appname__='BatchStatusBrowser'
__license__ = "GPL"
__title__ = "Batch Status Browser"
__version__ = "0.1.0"
__maintainer__ = "Alex Buzunov"
__email__ = "alex_buz@yahoo.com"
__github__= ''
__status__ = "Development"
if 0:
#import wxversion
import wxversion as wv
wv.select("3.0")
import wx
from wx import adv
import wx.lib.newevent
import os, sys
import time, imp, datetime
#from six.moves import _thread
e=sys.exit
import win32gui
import atexit
import shutil
import pprint as pp
import cx_Oracle
from libs.clip import Paste
import getpass
import win32gui, win32con
import webbrowser
from wx import html
import wx.lib.agw.pybusyinfo as PBI
from six.moves import _thread, queue as Queue
from wx.lib.combotreebox import ComboTreeBox
from win32com.client import gencache
import wx.lib.inspection
import wx.lib.mixins.inspection
from multiprocessing import freeze_support
import argparse
from tc_lib import sub, send
from copy import deepcopy
from pprint import pprint
import wx.lib.mixins.listctrl as listmix
from locale import getdefaultlocale, setlocale, LC_ALL
from wx.aui import AuiManager, AuiPaneInfo, AuiToolBar, \
AUI_TB_DEFAULT_STYLE, AUI_TB_VERTICAL, AUI_TB_OVERFLOW
from wx.py import shell, version
import libs.EnhancedStatusBar as ESB
[wxID_MAIN_WINDOWSTATUSBAR1, wxID_MAIN_WINDOWSTATUSIMAGE, wxID_TIMER, wxID_EXIT] = [wx.NewId() for __init_ids in range(4)]
import images
from wx.lib import sized_controls
#import ListCtrl_StatusLog
import win32com.client as win32
from simplecrypt import decrypt
from binascii import unhexlify
con=None
from libs.PyBusyFrame import MyPyBusyFrame
import wx.lib.newevent
update_evt = (UpdateListEvent, EVT_UPDATE_LIST) = wx.lib.newevent.NewEvent()
exit_evt = (ExitAppEvent, wxID_EVT_EXIT) = wx.lib.newevent.NewCommandEvent()
log_evt = (LogEvent, wxID_EVT_LOG) = wx.lib.newevent.NewCommandEvent()
import builtins
builtins.update_evt = update_evt
builtins.exit_evt=exit_evt
builtins.log_evt=log_evt
from libs.db import DbThread2
#home=os.path.dirname(os.path.abspath(__file__))
home=os.path.dirname(sys.argv[0])
if not home :
home=os.path.dirname(os.path.abspath(__file__))
#print (home)
builtins.home = home
from libs.startupframe import BusyFrame
try:
from urllib.parse import unquote
except ImportError:
#Python 2.7 and before
from urllib import unquote
import builtins
script_name=os.path.splitext(os.path.basename(__file__))[0]
builtins.pid = os.getpid()
builtins.home = home
builtins.script_name=script_name
if 1:
import libs.init_job as init
ts, JOB_NAME, IMP_DATE, HOME, log,_ = init.init()
d=init.d
d['script']=''
ts_out_dir=init.ts_out_dir
app_title='%s %s' % (__title__,__version__)
job_status_file=os.path.join(init.ts_dir,'%s.%s.status_%d.py' % (os.path.splitext(__file__)[0],JOB_NAME,os.getpid()))
job_status={}
default_fullscreen_style = wx.FULLSCREEN_NOSTATUSBAR | wx.FULLSCREEN_NOBORDER | wx.FULLSCREEN_NOCAPTION
import gettext
_ = gettext.gettext
try:
# Python 3.4+
if sys.platform.startswith('win'):
import multiprocessing.popen_spawn_win32 as forking
else:
import multiprocessing.popen_fork as forking
except ImportError:
import multiprocessing.forking as forking
if sys.platform.startswith('win'):
# First define a modified version of Popen.
class _Popen(forking.Popen):
def __init__(self, *args, **kw):
if hasattr(sys, 'frozen'):
# We have to set original _MEIPASS2 value from sys._MEIPASS
# to get --onefile mode working.
os.putenv('_MEIPASS2', sys._MEIPASS)
try:
super(_Popen, self).__init__(*args, **kw)
finally:
if hasattr(sys, 'frozen'):
# On some platforms (e.g. AIX) 'os.unsetenv()' is not
# available. In those cases we cannot delete the variable
# but only set it to the empty string. The bootloader
# can handle this case.
if hasattr(os, 'unsetenv'):
os.unsetenv('_MEIPASS2')
else:
os.putenv('_MEIPASS2', '')
# Second override 'Popen' class with our modified version.
forking.Popen = _Popen
import multiprocessing
#from functools import cmp_to_key
try:
cmp
except NameError:
def cmp(x, y):
if x < y:
return -1
elif x > y:
return 1
else:
return 0
#----------------------------------------------------------------------
def _join_lingering_threads():
for thr in _active_threads:
thr.stop_thread()
class LogStatus:
r"""\brief Needed by the wxdemos.
The log output is redirected to the status bar of the containing frame.
"""
def WriteText(self,text_string):
self.write(text_string)
def write(self,text_string):
wx.GetApp().GetTopWindow().SetStatusText(text_string)
#----------------------------------------------------------------------
# The panel you want to test (TestVirtualList)
#----------------------------------------------------------------------
def cmp_to_key(mycmp):
'Convert a cmp= function into a key= function'
class K(object):
def __init__(self, obj, *args):
self.obj = obj
def __lt__(self, other):
return mycmp(self.obj, other.obj) < 0
def __gt__(self, other):
return mycmp(self.obj, other.obj) > 0
def __eq__(self, other):
return mycmp(self.obj, other.obj) == 0
def __le__(self, other):
return mycmp(self.obj, other.obj) <= 0
def __ge__(self, other):
return mycmp(self.obj, other.obj) >= 0
def __ne__(self, other):
return mycmp(self.obj, other.obj) != 0
return K
def reverse_numeric(x, y):
return y - x
def chunks(cur): # 65536
global log, d
while True:
#log.info('Chunk size %s' % cur.arraysize, extra=d)
rows=cur.fetchmany()
if not rows: break;
yield rows
class AboutDlg2(wx.Frame):
def __init__(self, parent):
wx.Frame.__init__(self, parent, wx.ID_ANY, style=wx.CLIP_CHILDREN|wx.RESIZE_BORDER|wx.CLOSE_BOX|wx.FRAME_SHAPED|wx.FRAME_NO_TASKBAR|wx.NO_BORDER|wx.CAPTION |wx.FRAME_FLOAT_ON_PARENT, title="About %s" % __title__, size=(300,300))
p=wx.Panel(self,-1)
html = wxHTML(p)
page="""
<h2>%s</h2>
<p>Batch Status Browser (<a href="http://theloop.kcg.com/pages/viewpage.action?pageId=73130973" target="_blank">loop docs</a>).</p>
<p><b>Authors:</b><br>Alex B (abuzunov@kcg.com),<br>Bhasker Parsi (bparsi@kcg.com)</p>
<p><b>Software used in making this tool:</h3></p>
<p><b><a href="http://www.python.org" target="_blank">Python 3.5</a></b></p>
<p><b><a href="http://www.wxpython.org" target="_blank">wxPython 3.0</a></b></p>
""" % (__title__)
html.SetPage(
page
)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(html, 1, wx.EXPAND)
p.SetSizer(sizer)
#self.Center()
#self.SetSize((300,300))
#self.Fit()
self.Layout()
#self.Refresh()
self.Bind(wx.EVT_ACTIVATE, self.Close)
def Close(self, evt):
if evt.GetActive() != True:
self.Destroy()
#
# MAIN LIST
#
class JobListCtrl(wx.ListCtrl, listmix.ListCtrlAutoWidthMixin, listmix.ColumnSorterMixin):
def __init__(self, win, parent,id, view,log):
global home
wx.ListCtrl.__init__( self, parent, id, style=wx.LC_REPORT|wx.LC_VIRTUAL|wx.LC_HRULES|wx.LC_VRULES)
self.ID=id
if id<0:
self.ID=wx.NewId()
self.log=log
self.parent=parent
self.win=win
self.view=view
self.filter_history={}
self.running=[]
self.current_list='TableList'
#adding some art
self.il = wx.ImageList(16, 16)
#self.idx1=self.il.Add(self.idx[status])
self.idx={}
self.idx['FAILED'] = self.il.Add(wx.Bitmap(os.path.join(home, "images","exclamation_16.png"),wx.BITMAP_TYPE_PNG))
self.idx['SUCCESS'] = self.il.Add(wx.Bitmap(os.path.join(home,"images","bullet_green_16.png"),wx.BITMAP_TYPE_PNG))
self.idx['NOT RUN'] = self.il.Add(wx.Bitmap(os.path.join(home,"images","bullet_yellow_16.png"),wx.BITMAP_TYPE_PNG))
self.idx['RUNNING'] = self.il.Add(wx.Bitmap(os.path.join(home,"images","bullet_blue_16.png"),wx.BITMAP_TYPE_PNG))
#self.setImage('SUCCESS')
self.SetImageList(self.il, wx.IMAGE_LIST_SMALL)
if 1:
#self.il = wx.ImageList(16, 16)
a={"sm_up":"GO_UP","sm_dn":"GO_DOWN","w_idx":"WARNING","e_idx":"ERROR","i_idx":"QUESTION"}
for k,v in a.items():
s="self.%s= self.il.Add(wx.ArtProvider.GetBitmap(wx.ART_%s,wx.ART_TOOLBAR,(16,16)))" % (k,v)
exec(s)
self.SetImageList(self.il, wx.IMAGE_LIST_SMALL)
#integer columns
#select column_id||',' from all_tab_columns where table_name='ALL_TABLES' and data_type in ('NUMBER','INTEGER');
#self.ints=[1, 13, 18,]
#adding some attributes (colourful background for each item rows)
#e(0)
self.attr={}
self.colors=self.win.config.cfg['queries']['job_list']['status_colors']
for k, v in self.colors.items():
self.attr[k]= wx.ListItemAttr()
self.attr[k].SetBackgroundColour(v)
if 0:
self.attr1 = wx.ListItemAttr()
self.attr1.SetBackgroundColour('#FFCCCC')
font = wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.BOLD)
self.attr1.SetFont(font)
self.attr2 = wx.ListItemAttr()
self.attr2.SetBackgroundColour("#CCE5FF")
self.attr3 = wx.ListItemAttr()
self.attr3.SetBackgroundColour("#FFFFCC")
self.attr4 = wx.ListItemAttr()
self.attr4.SetBackgroundColour("#CCFFCC")
self.attr_long_running = wx.ListItemAttr()
self.attr_long_running.SetBackgroundColour("#FF99CC")
self.attr_not_started = wx.ListItemAttr()
self.attr_not_started.SetBackgroundColour("#FFB266")
#pprint(self.win.config.cfg['monitor_list'])
qdef =self.win.config.cfg['queries']['job_list']
if 1:
self.view=self.win.getJobViewName()
self.info('Setting view to %s' % self.view)
if not self.view:
self.view=qdef['default_view']
assert self.view in qdef['views'].keys(), 'ERROR: view "%s" in not in configured view names: \n"%s"' % (self.view, ','.join(qdef['views'].keys()))
self.info('Job view: %s' % self.view)
cols=qdef['views'][self.view]['columns']
n=1
self.itemDataMap={}
self.Bind(EVT_UPDATE_LIST, self.OnUpdateFromDb)
self.Bind(wxID_EVT_LOG, self.OnLogFromDb)
self.RefreshListData(self.view)
sub(self.onUpdateList, "update_list")
if 1:
#These two should probably be passed to init more cleanly
#setting the numbers of items = number of elements in the dictionary
#self.itemDataMap = musicdata
self.itemIndexMap = self.itemDataMap.keys()
#print(self.itemIndexMap )
self.SetItemCount(len(self.itemDataMap))
self.data={}
self.data[self.current_list]= self.itemDataMap
#print (self.itemDataMap)
#mixins
#listmix.ListCtrlAutoWidthMixin.__init__(self)
#listmix.ColumnSorterMixin.__init__(self, 55)
self.setMixins()
self.col_id=1
#sort by genre (column 2), A->Z ascending order (1)
self.if_reverse=True
self.SortListItems(self.col_id, 1)
#events
#self.Bind(wx.EVT_LIST_ITEM_SELECTED, self.OnItemSelected)
self.gen_bind(wx.EVT_LIST_ITEM_SELECTED,self, self.OnItemSelected,(self.ID,))
self.Bind(wx.EVT_LIST_ITEM_ACTIVATED, self.OnItemActivated)
self.Bind(wx.EVT_LIST_ITEM_DESELECTED, self.OnItemDeselected)
self.Bind(wx.EVT_LIST_COL_CLICK, self.OnColClick)
self.Bind(wx.EVT_LIST_ITEM_ACTIVATED, self.OnDoubleClick)
data= None
#Log popup
#############################################################################################
self.logwin=StatusLogReport(self,win,wx.ID_ANY, (self.log,d),data, 'Job Details', size=(1400,800))
#############################################################################################
self.logwin.Center()
self.logwin.SetTitle('Job Details')
self.logwin.Hide()
#self.Bind(wx.EVT_CHAR_HOOK, self.OnKeyUP)
self.gen_bind(wx.EVT_CHAR_HOOK,self, self.OnKeyUP,(self.ID,))
#busybox.parent=s.win
#send('set_new_log_parent',(self.logwin,))
self.info(type(self.logwin).__name__+' Creaded.')
def OnUpdateFromDb(self, evt):
#print (evt.value)
(_, _, _, update_type) = evt.value
#print (update_type)
if update_type in ['update_list']:
self.UpdateList(evt.value)
elif update_type in ['update_running']:
self.info('Updating running stats')
self.SetRunning(evt.value)
def OnLogFromDb(self, evt):
#print (evt.value)
(list_id, msg) = evt.value
#print (update_type)
#print(list_id, msg)
self.sinfo('DbThread','%s: %s' % (list_id, msg))
def SetRunning(self,data):
(list_id, rows, _,_) = data
#pprint(self.itemDataMap)
#pprint(col_desc)
#self.setInts(col_desc)
#print(self.ID==list_id)
if self.ID==list_id:
#pprint(self.itemDataMap)
self.running=rows
if self.itemIndexMap:
self.UpdateRunning()
#self.UpdateList(evt.value)
def UpdateRunning(self):
if self.running:
running_seq=self.running.keys()
running_ids= [i for i,row in self.itemDataMap.items() if row[0] in running_seq]
for id in running_ids:
#print(self.itemDataMap[id])
if self.running[self.itemDataMap[id][0]][1]:
self.itemDataMap[id][3] += '[%+dm]' % (self.running[self.itemDataMap[id][0]][1])
if self.running[self.itemDataMap[id][0]][2]>0:
self.itemDataMap[id][3] += '[%+d' % self.running[self.itemDataMap[id][0]][2]+'%]'
#print(running_ids)
self.running=[]
def RefreshListData(self, view=None):
global con,update_evt,exit_evt, log_evt
send("start_timer", (self.ID,) )
self.busy = wx.BusyInfo("One moment please, retrieving data...",self.win)
wx.Yield()
self.win.Freeze()
self.threads=[]
self.dsbWindow=None
self.busy =None
self.qdef =self.win.config.cfg['queries']['job_list']
#view=self.win.getJobViewName()
#if not view:
# view=qdef['default_view']
#print ('view', view)
if not view:
view=self.view
if not view or not (view in self.qdef['views'].keys()):
self.error('ERROR: view "%s" in not in configured view names: \n"%s"' % (view, ','.join(self.qdef['views'].keys())))
wx.PostEvent(self.GetEventHandler(), ExitAppEvent(self.GetId()))
#wx.PyCommandEvent(wx.EVT_BUTTON.typeId, wxID_EXIT)
self.info('Setting view to: %s' % view)
#print ('view', view)
cols=self.qdef['views'][view]['columns']
if 1:
self.ClearAll()
if 1:
#select 'self.InsertColumn('||(column_id-1)||', '''||column_name||''')' from all_tab_columns where table_name='VW_PROC_LOAD' and owner='STGDATA' order by column_id;
for i,c in enumerate(cols):
self.InsertColumn(i, c[1])
#select 'self.SetColumnWidth('||(column_id-1)||', '||length(column_name)*9||')' from all_tab_columns where table_name='VW_PROC_LOAD' and owner='STGDATA' order by column_id;
for i,c in enumerate(cols):
#print(c)
self.SetColumnWidth(i, c[2])
n=1
#self.itemDataMap={}
#self.itemIndexMap={}
self.info('Retrieving data from database.')
#self.connstr= 'oats/manage@jc1lbiorc1:1521/oradb1p'
self.query=self.qdef['views'][view]['sql'] % ','.join(['%s %s' %(x[0],x[1]) for x in cols])
#secondary
self.running_query=self.qdef['views']['_running_elapsed']
#print(query)
#connstr= 'oats@jc1lbiorc1:1521/oradb1p'
#con = cx_Oracle.connect(self.connstr,threaded=True)
#dbconn = cx_Oracle.connect('usr', 'pswd', '127.0.0.1/XE',threaded=True)
con.autocommit = True
cur = con.cursor()
self.threads.append(DbThread2(self, self.ID, [self.query,self.running_query], cur ,update_evt=update_evt, exit_evt=exit_evt, log_evt=log_evt))
for t in self.threads:
t.start()
#t.Join()
#send("stop_timer", () )
def setInts(self, col_desc):
self.ints=[]
for i,d in enumerate(col_desc):
if 'NUMBER' in str(d[1]):
self.ints.append(i)
#pprint(col_desc)
#pprint(self.ints)
def OnKeyUP(self, event, params):
#print ("KEY UP!")
controlDown = event.CmdDown()
keyCode = event.GetKeyCode()
#print(controlDown ,keyCode)
if controlDown and keyCode == ord('L'):
#print ('raise_log_window')
send('raise_log_window', (True,))
elif controlDown and keyCode == ord('A'):
#print ('raise_log_window')
self.SelectAll()
elif controlDown and keyCode == ord('C'):
#print ('raise_log_window')
self.CopySelectedToClipboard()
event.Skip()
#event.Veto()
def CopySelectedToClipboard(self):
out=''
#print(self.itemDataMap)
j=0
for i in self.GetSelectedItems():
#print (i)
#print(self.itemDataMap[i])
j +=1
out +='|'.join([str(x) for x in self.itemDataMap[i]])+os.linesep
Paste(out)
self.info('Copy to clipboard done for %d records.' % j)
def GetSelectedItems(self):
""" Gets the selected items for the list control.
Selection is returned as a list of selected indices,
low to high.
"""
selection = []
index = self.GetFirstSelected()
selection.append(index)
while len(selection) != self.GetSelectedItemCount():
index = self.GetNextSelected(index)
selection.append(index)
return selection
def SelectAll(self):
for idx in range(self.GetItemCount()):
self.SetItemState(idx, wx.LIST_STATE_SELECTED, wx.LIST_STATE_SELECTED)
def onUpdateList(self, data, extra1, extra2=None):
#print('UpdateList----')
#send("start_timer", () )
#time.sleep(5)
(_, _, _, update_type) = data
print (update_type)
if update_type in ['update_list']:
self.UpdateList(data)
else:
print ('unknown event type')
def UpdateList(self, data):
(list_id, itemDataMap, col_desc,_) = data
#pprint(self.itemDataMap)
#pprint(col_desc)
self.setInts(col_desc)
self.itemIndexMap=[]
self.itemDataMap={}
#print(self.ID==list_id)
if self.ID==list_id:
self.info('Rendering data for %s.' % list_id)
wx.Yield()
cols=self.qdef['views'][self.view]['columns']
if 0:
self.ClearAll()
if 1:
#select 'self.InsertColumn('||(column_id-1)||', '''||column_name||''')' from all_tab_columns where table_name='VW_PROC_LOAD' and owner='STGDATA' order by column_id;
for i,c in enumerate(cols):
self.InsertColumn(i, c[1])
#select 'self.SetColumnWidth('||(column_id-1)||', '||length(column_name)*9||')' from all_tab_columns where table_name='VW_PROC_LOAD' and owner='STGDATA' order by column_id;
for i,c in enumerate(cols):
self.SetColumnWidth(i, c[2])
self.itemDataMap=itemDataMap
#print ('UpdateList', list_id, self.__class__)
#self.Freeze()
#self.ClearAll()
self.itemIndexMap = self.itemDataMap.keys()
#print(len(self.itemDataMap))
self.SetItemCount(len(self.itemDataMap))
if self.running:
#print(self.running)
self.UpdateRunning()
self.setMixins()
#self.Refresh()
#self.parent.parent.Enable()
#self.setPageTitle('%s (%s)' % (self.page_title,len(self.itemIndexMap)))
if self.dsbWindow:
self.dsbWindow=None
if self.busy:
self.busy =None
if 0:
self.busybox.Iconize()
self.busybox.setIconizedPos()
#send("stop_timer", (list_id,) )
self.win.RecreateList(None,(self, self.win.filter))
self.win.Thaw()
self.win.Layout()
#print(self.parent)
self.win.Show()
else:
pass
#print ('Passing UpdateList', list_id, self.__class__)
for t in self.threads:
t.join()
del t
self.threads=[]
send("stop_timer", (list_id,) )
self.info('Done.')
del self.busy
def sinfo(self, sender, text):
ts=datetime.datetime.now().strftime("%H:%M:%S.%f")
self.win.status(text)
send('add_log',(['INFO',sender,text,ts],))
def info(self, text):
ts=datetime.datetime.now().strftime("%H:%M:%S.%f")
self.win.status(text)
send('add_log',(['INFO',type(self).__name__,text,ts],))
def error(self, text):
ts=datetime.datetime.now().strftime("%H:%M:%S.%f")
send('add_log',(['ERROR',type(self).__name__,text,ts],))
def setImage(self, status):
self.il = wx.ImageList(16, 16)
if '-' in str(status):
status=status.split('-')[1].split('[')[0]
self.idx1=self.il.Add(self.idx[status])
self.SetImageList(self.il, wx.IMAGE_LIST_SMALL)
def setMixins (self):
self.SetItemCount(len(self.itemIndexMap))
listmix.ListCtrlAutoWidthMixin.__init__(self)
listmix.ColumnSorterMixin.__init__(self, 55)
def set_data(self):
#print 'set_data'
flist=OrderedDict()
i=0
os.chdir(self.save_to_dir)
#print filter(os.path.isfile,os.listdir(self.save_to_dir))
#print os.listdir(self.save_to_dir)
#e(0)
#print 'dir'
#pprint(filter(os.path.isfile,os.listdir(self.save_to_dir)))
for f in filter(os.path.isfile,os.listdir(self.save_to_dir)):
#print f
d= datetime.datetime.fromtimestamp(os.path.getmtime(f))
dt= d.strftime('%Y-%m-%d %H:%M:%S')
cv, tmpl,name= f.split(';')
name=name.split('.')[0]
#print name
type='Copy'
if tmpl.startswith('CSV'):
type='Load'
if '.CSV_' in tmpl:
type='Spool'
flist[i] = [name.strip(' '),dt,tmpl.split('.')[1],cv.split('.')[0],cv.split('.')[1],type,tmpl.split('.')[0],self.save_to_dir,f]
i +=1
#pprint(flist)
self.data[self.current_list]= flist
self.parent.itemDataMap=self.data[self.current_list]
def get_second_elem(self,iterable):
#global b
return self.sub(iterable[1], self.col_id)
def nvl(self,val, col_id):
if val: return val
else:
return ''
def sub(self, val, col_id):
if val: return val
else:
if col_id in self.ints:
return -1
else:
return ''
def OnColClick(self,evt):
#print(1)
#self.Sort()
#print (dir(evt))
colid=evt.GetColumn()
if colid == self.col_id:
self.if_reverse=not self.if_reverse
else:
self.if_reverse=False
self.col_id=colid
#print (self.col_id, self.col_id in self.ints)
evt.Skip()
def gen_bind(self, type, instance, handler, *args, **kwargs):
self.Bind(type, lambda event: handler(event, *args, **kwargs), instance)
def OnItemSelected(self, event, params):
#pprint(dir(event))
item=event.GetItem()
self.currentItem=item.Id
self.win.status("[%s] %s " % (item.Id,self.GetItem(item.Id, 1).GetText()))
self.win.b_email.Enable(True)
def OnDoubleClick(self, event):
#print ("OnDoubleClick item %s\n" % self.GetItemText(self.currentItem))
#self.log.WriteText("OnDoubleClick item %s\n" % self.list.GetItemText(self.currentItem))
#print (self.getColumnText(self.currentItem,0))
#data= ((self.getColumnText(self.currentItem,6), self.getColumnText(self.currentItem,7)), int(self.getColumnText(self.currentItem,0)),self.connstr)
row={}
for i in range(self.GetColumnCount()):
c=self.GetColumn(i)
cname=c.GetText()
cval=self.getColumnText(self.currentItem,i)
row[cname]=cval
#pprint(data)
data=[row,int(self.getColumnText(self.currentItem,0)),None]
if not self.logwin:
self.logwin=StatusLogReport(self,self.win, wx.ID_ANY, (self.log,d),data, 'Log Report', size=(1400,800))
else:
self.Disable()
send("reset_log_window", (data,) )
if not self.logwin.IsShown() :
self.logwin.Center()
self.logwin.Show()
#self.logwin.ShowWithoutActivating()
self.logwin.SetFocus()
#send('raise_log_window', (True,))
event.Skip()
def OnItemActivated(self, event):
pass
def getColumnText(self, index, col):
item = self.GetItem(index, col)
return item.GetText()
def OnItemDeselected(self, evt):
self.log.write("OnItemDeselected: %s" % evt.m_itemIndex,extra=d)
#---------------------------------------------------
# These methods are callbacks for implementing the
# "virtualness" of the list...
def OnGetItemText(self, item, col):
#print(item)
index=list(self.itemIndexMap)[item]
s = self.itemDataMap[index][col]
if 0:
if col == 3:
flag= self.itemDataMap[index][10]
if flag:
s = '2-%s' % flag
else:
s = self.itemDataMap[index][col]
else:
s = self.itemDataMap[index][col]
return str(s)
def OnGetItemImage(self, item):
index=list(self.itemIndexMap)[item]
if len(self.itemDataMap[index])>3:
status=self.itemDataMap[index][3]
#key=status.split('-')[1].split('[')[0]
if '-' in str(status) and status.split('-')[1].split('[')[0] in self.idx.keys():
return self.idx[status.split('-')[1].split('[')[0]]
else:
return self.idx['FAILED']
else:
self.idx['SUCCESS']
def OnGetItemAttr(self, item):
#return self.attr2
index=list(self.itemIndexMap)[item]
st=self.itemDataMap[index][3]
#http://www.rapidtables.com/web/color/RGB_Color.htm
#print (self.colors)
if '-' in str(st):
status=st.split('-')[1].split('[')[0]
if status.upper() in self.colors.keys():
return self.attr[status.upper()]
else:
return self.attr['FAILED']
if 0:
if status=="FAILED":
#return self.attr1
return self.attr_not_started
elif status=="RUNNING":
#return self.attr2
return self.attr_long_running
elif status=="NOT RUN":
return self.attr3
elif status=="SUCCESS":
return self.attr4
elif status=="NOT STARTED":
return self.attr_not_started
elif status=="LONG RUNNING":
return self.attr_long_running
else:
return self.attr1
else:
return self.attr['SUCCESS']
#---------------------------------------------------
# Matt C, 2006/02/22
# Here's a better SortItems() method --
# the ColumnSorterMixin.__ColumnSorter() method already handles the ascending/descending,
# and it knows to sort on another column if the chosen columns have the same value.
def Sort(self):
import operator
#self.SortListItems(1,1)
items=[(x,v[self.col_id]) for x,v in self.itemDataMap.items()]
sorted_x = sorted(items, key=operator.itemgetter(1), reverse=self.if_reverse)
#pprint(sorted_x)
self.Refresh()
def SortItems(self,sorter=cmp):
import operator
#pass
#items = list(self.itemDataMap.keys())
#items.sort(sorter)
#items= sorted(items, key=cmp_to_key(cmp))
#self.itemIndexMap = items
# redraw the list
#self.SortListItems(2,1)
#print (self.col_id)
items=[(x,v[self.col_id]) for x,v in self.itemDataMap.items()]
#pprint(items)
sorted_x = sorted(items, key=self.get_second_elem, reverse=self.if_reverse)
#pprint(sorted_x)
self.itemIndexMap=[x[0] for x in sorted_x]
if 0:
self.itemIndexMap=[x[0] for x in sorted(self.itemDataMap.items(), key=operator.itemgetter(self.col_id-1), reverse= self.if_reverse)]
pprint(sorted(self.itemDataMap.items(), key=operator.itemgetter(self.col_id-1), reverse= self.if_reverse))
print([x[0] for x in sorted(self.itemDataMap.items(), key=operator.itemgetter(self.col_id-1), reverse= self.if_reverse)])
pprint([self.itemDataMap[x][self.col_id] for x in [x[0] for x in sorted(self.itemDataMap.items(), key=operator.itemgetter(1,self.col_id), reverse= self.if_reverse)]])
self.Refresh()
# Used by the ColumnSorterMixin, see wx/lib/mixins/listctrl.py
def SortListItems(self, col=-1, ascending=1):
pass
def GetListCtrl(self):
return self
# Used by the ColumnSorterMixin, see wx/lib/mixins/listctrl.py
def GetSortImages(self):
return (self.sm_dn, self.sm_up)
#XXX Looks okay to remove this one (was present in the original demo)
#def getColumnText(self, index, col):
# item = self.GetItem(index, col)
# return item.GetText()
def get_source_db_connect_string(self,cfg, spool_spec):
global JOB_NAME
assert 'connectors' in cfg.keys(), "'connectors' section is missing in config."
assert 'from' in spool_spec.keys(), "'from' definition is missing in spool specification."
assert spool_spec['from'] in cfg['connectors'].keys(), 'database "%s" is missing in "connectors" configuration.' % spool_spec['from']
cli_var_name='%s0%s0%s' % (JOB_NAME,'connectors', spool_spec['from'])
assert cli_var_name.upper() in [x.upper() for x in os.environ.keys()] , 'Source db password is not set.\nUse "set %s=<passwd>".' % cli_var_name
conn = cfg['connectors'][spool_spec['from']].split('@')
assert len(conn)==2, 'Wrong connector format. Should be "user@dbserver/SID"'
pwd=os.environ[cli_var_name]
return ('/%s@' % pwd). join (conn)
class TableSpooler:
def __init__(self, win, table_list, out_dir, log):
self.win = win
#print ('Frame:',self.win)
#e(0)
self.table_list = table_list
self.out_dir = out_dir
self.log=log
def Start(self):
self.keepGoing = self.running = True
_thread.start_new_thread(self.Run, ())
def Stop(self):
self.keepGoing = False
def IsRunning(self):
return self.running
def Run(self):
self.ExtractData()
if 0:
while self.keepGoing:
# We communicate with the UI by sending events to it. There can be
# no manipulation of UI objects from the worker thread.
evt = UpdateBarEvent(barNum = 1, value = 1)
wx.PostEvent(self.win, evt)
#time.sleep(1)
self.running = False
def i(self,msg):
global d
self.log.info(msg, extra=d)
def get_nls_params(self, cfg, spool_spec):
#pprint(cfg)
#e(0)
assert 'nls_param_sets' in cfg.keys(), "'nls_param_sets' section is missing in config."
assert 'nls_params' in spool_spec.keys(), "'nls_params' definition is missing in spool specification."
assert spool_spec['nls_params'] in cfg['nls_param_sets'].keys(), 'nls_param_set "%s" is missing in nls_param_sets configuration.' % spool_spec['nls_params']
return cfg['nls_param_sets'][spool_spec['nls_params']]
def get_source_db_connect_string(self,cfg, spool_spec):
global JOB_NAME
assert 'connectors' in cfg.keys(), "'connectors' section is missing in config."
assert 'from' in spool_spec.keys(), "'from' definition is missing in spool specification."
assert spool_spec['from'] in cfg['connectors'].keys(), 'database "%s" is missing in "connectors" configuration.' % spool_spec['from']
cli_var_name='%s0%s0%s' % (JOB_NAME,'connectors', spool_spec['from'])
assert cli_var_name.upper() in [x.upper() for x in os.environ.keys()] , 'Source db password is not set.\nUse "set %s=<passwd>".' % cli_var_name
conn = cfg['connectors'][spool_spec['from']].split('@')
assert len(conn)==2, 'Wrong connector format. Should be "user@dbserver/SID"'
pwd=os.environ[cli_var_name]
return ('/%s@' % pwd). join (conn)
def ExtractData(self):
global pool_size, config
self.i('start')
queries=[]
#__builtin__.trd_date = None
#pprint (table_list)
#e(0)
assert self.out_dir, 'out_dir is not set'
for k,v in enumerate(self.table_list):
db, schema, table= v
#print(db, schema, table)
if db and schema and table:
q= "SELECT * FROM %s.%s" % (schema, table);
#q= v['query'].strip().strip('/').strip(';').strip()
fn=os.path.join(self.out_dir,'%s.%s.%s.csv' % ( db, schema, table))
#delete file if exists
#next step wait for IN_CREATE event for ts_out_dir
if os.path.isfile(fn):
try:
unlink(fn)
except Exception as err:
tb = traceback.format_exc()
#print (tb)
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
log.error('%s %s %s' % (exc_type, fname, exc_tb.tb_lineno), extra=d)
raise
nls=self.get_nls_params(config.cfg,config.cfg['profile']['default'])
#get password from environment
conn = self.get_source_db_connect_string(config.cfg,config.cfg['profile']['default'])
#print (fn)
queries.append([conn,fn,q,nls])
else:
log.error('Table name is not set: %s, %s, %s' % (db, schema, table), extra=d)
#e(0)
#e(0)
if len(queries):
m= multiprocessing.Manager()
if pool_size>len(queries):
pool_size=len(queries)
inputs = list([(i,q, opt) for i,q in enumerate(queries)])
#pprint(inputs)
#e(0)
#import re
#print(re.escape(inputs[0][1][0]))
#e(0)
pool_size = multiprocessing.cpu_count()*2-2
self.pool = m.Pool(processes=pool_size,
initializer=start_process,
)
pool_outputs = self.pool.map(extract_query_data, inputs)
self.pool.close() # no more tasks
self.pool.join() # wrap up current tasks
#print ('Pool :', pool_outputs)
#e(0)
print ('Total rows extracted : %d' % sum([r[0] for r in pool_outputs]))
job_status={'spool_status':[r[2] for r in pool_outputs],'spool_files':[r[1] for r in pool_outputs]}
print ('-'*60)
for r in pool_outputs:
log.info('Status: %s' % (r[2]),extra=d)
for r in pool_outputs:
print('%s' % (r[1]))
print ('-'*60)
else:
log.error('Table list is empty',extra=d)
def import_module(filepath):
class_inst = None
mod_name,file_ext = os.path.splitext(os.path.split(filepath)[-1])
assert os.path.isfile(filepath), 'File %s does not exists.' % filepath
if file_ext.lower() == '.py':
py_mod = imp.load_source(mod_name, filepath)
elif file_ext.lower() == '.pyc':
py_mod = imp.load_compiled(mod_name, filepath)
return py_mod
def open_settings(filename):
#filename=r'c:\Python35-64\apps\BatchStatusBrowser\cfg\batch_status.cfg'
conf = wx.FileConfig(localFilename = filename)
#print(conf)
def create_entry(entry_name, entry_value):
if not conf.HasEntry(entry_name):
if isinstance(entry_value, (str, bytes)):
conf.Write(entry_name, entry_value)
elif isinstance(entry_value, int):
conf.WriteInt(entry_name, entry_value)
elif isinstance(entry_value, bool):
conf.WriteBool(entry_name, entry_value)
else:
conf.Write(entry_name, repr(entry_value))
return True
else:
return False
flag_flush = False
#print(getdefaultlocale())
if create_entry('Language/Catalog', getdefaultlocale()[0]):
flag_flush = True
if create_entry('GUI/load_default_perspective_on_start', True):
flag_flush = True
if create_entry('GUI/save_default_perspective_on_exit', True):
flag_flush = True
if create_entry('GUI/perspective', ''):
flag_flush = True
if create_entry('GUI/load_default_state_on_start', True):
flag_flush = True
if create_entry('GUI/save_default_state_on_exit', True):
flag_flush = True
if create_entry('GUI/fullscreen_style', default_fullscreen_style):
flag_flush = True
if create_entry('GUI/centre_on_screen', repr((False, wx.BOTH))):
flag_flush = True
if create_entry('GUI/default_open_path', '.'):
flag_flush = True
if flag_flush:
conf.Flush()
return conf
def chunks(cur): # 65536
global log, d
i=0
while True:
#log.info('Chunk size %s' % cur.arraysize, extra=d)
rows=cur.fetchone()
if not rows: break;
yield [rows]
#time.sleep(0.1)
#----------------------------------------------------------------------
# The main window
#----------------------------------------------------------------------
# This is where you populate the frame with a panel from the demo.
# original line in runTest (in the demo source):
# win = TestPanel(nb, log)
# this is changed to:
# self.win=TestPanel(self,log)
#----------------------------------------------------------------------
def start_process():
global log
log.info('Starting ' + multiprocessing.current_process().name, extra=d)
def extract_query_data(data):
global log, d
#d = {'iteration': 0,'pid':os.getpid(), 'rows':0}
id, query, opt=data
status=1
conn,fn,q, nls = query
#evt = UpdateBarEvent(barNum = 1, value = 1)
#wx.PostEvent(win, evt)
try:
#print(conn)
con = cx_Oracle.connect(conn)
log.info('Connected.', extra=d)
cur = con.cursor()
nls_cmd="ALTER SESSION SET %s" % ' '.join(nls.split())
cur.execute(nls_cmd)
#print ('SELECt * FROM (%s) WHERE 1=2' % q)
cur.execute('SELECt * FROM (%s) WHERE 1=2' % q)
sel= 'SELECT "' + ("\"||'%s'||\"" % opt.column_delimiter[0]). join([k[0] for k in cur.description]) + '" data FROM ( %s)' % q
header = opt.column_delimiter[0].join([k[0] for k in cur.description])
cur.arraysize=opt.array_size
cur.execute(sel)
#print('done')
cnt=0
if opt.compress:
fn='%s.gz' % fn
with gzip.open(fn, 'wb') as f_out:
log.info('Strating data extract.', extra=d)
for i, chunk in enumerate(chunks(cur)):
d['iteration']=i
cnt+=len(chunk)
d['rows']=cnt
try:
#log.info('Starting ' + multiprocessing.current_process().name, extra=d)
f_out.write('\n'.join([row[0] for row in chunk]))
f_out.write('\n')
log.info('extracted into %s' % os.path.basename(fn), extra=d )
except Exception as err:
tb = traceback.format_exc()
#print (tb)
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#print(exc_type, fname, exc_tb.tb_lineno)
raise
else:
with open(fn, 'wb') as fh:
fh.seek(0)
#print (header)
fh.write (header.encode('utf-16'))
for i, chunk in enumerate(chunks(cur)):
d['iteration']=i
cnt+=len(chunk)
d['rows']=cnt
fh.write(('\n'.join([row[0] for row in chunk])).encode('utf-16'))
fh.write('\n'.encode('utf-16'))
#log.info('%d rows added to %s' % (cnt,os.path.basename(fn)), extra=d )
log.info('%d rows extracted' % cnt, extra=d )
log.info('Finished extract.', extra=d)
status=0
cur.close()
con.close()
except Exception as err:
#import pickle
#pickled = pickle.dumps(err)
#pickle.loads(pickled)
err=str(err)
exc, er, traceback = sys.exc_info()
print ('#'*50)
print ('#'*20, 'EXCEPTION', '#'*19)
print ('#'*50)
print(exc, traceback.tb_frame.f_code.co_filename,traceback.tb_lineno, er)
print(err)
print ('#'*50)
print ('#'*50)
print ('#'*50)
(cnt, fn, status) = (-1,-1,-1)
#print (exc, traceback.tb_frame.f_code.co_filename, 'ERROR ON LINE', traceback.tb_lineno, '\n', err)
#estr= '|'.join([exc, traceback.tb_frame.f_code.co_filename, 'ERROR ON LINE', traceback.tb_lineno, er])
#log.error(estr, extra=d)
log.info('Done.' , extra=d)
return [cnt, fn, status]
def unlink(dirname):
if (os.name == "posix"):
os.unlink(dirname)
elif (os.name == "nt"):
#shutil.rmtree( os.path.dirname(dirname) )
os.remove(dirname)
else:
log.error('Cannot unlink. Unknown OS.', extra=d)
def delete_file(filename):
if (os.name == "posix"):
os.unlink(filename)
elif (os.name == "nt"):
#shutil.rmtree( os.path.dirname(dirname) )
os.remove(filename)
else:
log.error('Cannot unlink. Unknown OS.', extra=d)
class ListCtrlPanel(wx.Panel):
def __init__(self, *args ):
self.ID=wx.NewId()
wx.Panel.__init__(self,*args, style=wx.WANTS_CHARS)
self.list=None
self.itemDataMap={}
self.itemIndexMap = []
#def setColSorter(self):
def GetListCtrl(self):
return self.list
def SetListCtrl(self, list):
self.list=list
listmix.ColumnSorterMixin.__init__(self,self.list.GetColumnCount())
def scale_bitmap(bitmap, width, height):
image = bitmap.ConvertToImage()
image = image.Scale(width, height, wx.IMAGE_QUALITY_HIGH)
result = wx.Bitmap(image)
return result
class wxHTML(html.HtmlWindow):
def OnLinkClicked(self, link):
#print(link.GetHref())
webbrowser.open(link.GetHref())
class StatusNB(wx.Notebook):
def __init__(self, parent, id, data, log):
wx.Notebook.__init__(self, parent, id, size=(21,21), style=
wx.BK_DEFAULT
#wx.BK_TOP
#wx.BK_BOTTOM
#wx.BK_LEFT
#wx.BK_RIGHT
# | wx.NB_MULTILINE
)
self.log = log
self.parent=parent
#self.parent.timer.Start(100)
self.ctlist=[]
if 1:
p = wx.Panel(self, -1)
p.nb=self
obj = ListCtrl_MessageLog.MessageLogListCtrlPanel(p, data, log)
sizer = wx.BoxSizer(wx.VERTICAL)
h_sizer = wx.BoxSizer(wx.HORIZONTAL)
self.mins=['5 min','10 min','15 min','25 min','50 min']
self.dropdown=wx.ComboBox(p,choices=self.mins,value='5 min',style=wx.CB_READONLY)
h_sizer.Add(self.dropdown, 0, wx.ALIGN_LEFT|wx.LEFT)
imageFile = os.path.join(home,"images/refresh_icon_16_grey2.png")
image1 = wx.Image(imageFile, wx.BITMAP_TYPE_ANY).ConvertToBitmap()
r_btn=wx.BitmapButton(p, id=-1, bitmap=image1,size = (image1.GetWidth()+6, image1.GetHeight()+6))
#self.Bind(wx.EVT_BUTTON, self.OnRefreshLog, r_btn)
self.gen_bind(wx.EVT_BUTTON,r_btn, self.OnRefreshMessageLog,(obj.getListID(),))
h_sizer.Add(r_btn, 0, wx.ALIGN_LEFT|wx.LEFT)
sizer.Add(h_sizer, 0, wx.ALIGN_LEFT|wx.LEFT)
sizer.Add(obj, 1, wx.EXPAND)
self.ctlist.append(obj)
p.SetSizer(sizer)
self.AddPage(p, 'Message Log')
il = wx.ImageList(16, 16)
idx1 = il.Add(wx.Bitmap(os.path.join(home,"images","file_extension_log_16.png"),wx.BITMAP_TYPE_PNG))
#self.AssignImageList(il)
# now put an image on the first tab we just created:
self.SetPageImage(0, idx1)
if 1:
p = wx.Panel(self, -1)
p.nb=self
obj = ListCtrl_ParentsOf.ParentsOfListCtrlPanel(p, data, log)
sizer = wx.BoxSizer(wx.VERTICAL)
h_sizer = wx.BoxSizer(wx.HORIZONTAL)
#self.mins=['5 min','10 min','15 min','25 min','50 min']
#self.dropdown=wx.ComboBox(p,choices=self.mins,value='5 min',style=wx.CB_READONLY)
#h_sizer.Add(self.dropdown, 0, wx.ALIGN_LEFT|wx.LEFT)
imageFile = os.path.join(home,"images/refresh_icon_16_grey2.png")
image1 = wx.Image(imageFile, wx.BITMAP_TYPE_ANY).ConvertToBitmap()
r_btn=wx.BitmapButton(p, id=-1, bitmap=image1,size = (image1.GetWidth()+6, image1.GetHeight()+6))
#self.Bind(wx.EVT_BUTTON, self.OnRefreshLog, r_btn,)
self.gen_bind(wx.EVT_BUTTON,r_btn, self.OnRefreshParentList,(obj.getListID(),))
h_sizer.Add(r_btn, 0, wx.ALIGN_LEFT|wx.LEFT)
sizer.Add(h_sizer, 0, wx.ALIGN_LEFT|wx.LEFT)
sizer.Add(obj, 1, wx.EXPAND)
self.ctlist.append(obj)
p.SetSizer(sizer)
self.AddPage(p, 'Parent Jobs')
#il = wx.ImageList(16, 16)
idx1 = il.Add(wx.Bitmap(os.path.join(home,"images","node_tree_16.png"),wx.BITMAP_TYPE_PNG))
#self.AssignImageList(il)
# now put an image on the first tab we just created:
self.SetPageImage(1, idx1)
if 1:
p = wx.Panel(self, -1)
p.nb=self
obj = ListCtrl_ChildrenOf.ChildrenOfListCtrlPanel(p, data, log)
sizer = wx.BoxSizer(wx.VERTICAL)
h_sizer = wx.BoxSizer(wx.HORIZONTAL)
#self.mins=['5 min','10 min','15 min','25 min','50 min']
#self.dropdown=wx.ComboBox(p,choices=self.mins,value='5 min',style=wx.CB_READONLY)
#h_sizer.Add(self.dropdown, 0, wx.ALIGN_LEFT|wx.LEFT)
imageFile = os.path.join(home,"images","refresh_icon_16_grey2.png")
image1 = wx.Image(imageFile, wx.BITMAP_TYPE_ANY).ConvertToBitmap()
r_btn=wx.BitmapButton(p, id=-1, bitmap=image1,size = (image1.GetWidth()+6, image1.GetHeight()+6))
#self.Bind(wx.EVT_BUTTON, self.OnRefreshLog, r_btn,)
self.gen_bind(wx.EVT_BUTTON,r_btn, self.OnRefreshParentList,(obj.getListID(),))
h_sizer.Add(r_btn, 0, wx.ALIGN_LEFT|wx.LEFT)
sizer.Add(h_sizer, 0, wx.ALIGN_LEFT|wx.LEFT)
sizer.Add(obj, 1, wx.EXPAND)
self.ctlist.append(obj)
p.SetSizer(sizer)
self.AddPage(p, 'Child Jobs')
#il = wx.ImageList(16, 16)
idx1 = il.Add(wx.Bitmap(os.path.join(home,"images","node_tree_16.png"),wx.BITMAP_TYPE_PNG))
#idx1 = il.Add(images.Smiles.GetBitmap())
self.AssignImageList(il)
# now put an image on the first tab we just created:
self.SetPageImage(2, idx1)
if 1:
p = wx.Panel(self, -1)
p.nb=self
#pprint(data)
#e(0)
obj = ListCtrl_ExecHistory.ExecHistoryListCtrlPanel(p, data, log)
sizer = wx.BoxSizer(wx.VERTICAL)
h_sizer = wx.BoxSizer(wx.HORIZONTAL)
#self.mins=['5 min','10 min','15 min','25 min','50 min']
#self.dropdown=wx.ComboBox(p,choices=self.mins,value='5 min',style=wx.CB_READONLY)
#h_sizer.Add(self.dropdown, 0, wx.ALIGN_LEFT|wx.LEFT)
imageFile = os.path.join(home,"images","refresh_icon_16_grey2.png")
image1 = wx.Image(imageFile, wx.BITMAP_TYPE_ANY).ConvertToBitmap()
r_btn=wx.BitmapButton(p, id=-1, bitmap=image1,size = (image1.GetWidth()+6, image1.GetHeight()+6))
#self.Bind(wx.EVT_BUTTON, self.OnRefreshLog, r_btn,)
self.gen_bind(wx.EVT_BUTTON,r_btn, self.OnRefreshParentList,(obj.getListID(),))
h_sizer.Add(r_btn, 0, wx.ALIGN_LEFT|wx.LEFT)
sizer.Add(h_sizer, 0, wx.ALIGN_LEFT|wx.LEFT)
sizer.Add(obj, 1, wx.EXPAND)
self.ctlist.append(obj)
p.SetSizer(sizer)
self.AddPage(p, 'Exec history')
#il = wx.ImageList(16, 16)
idx1 = il.Add(wx.Bitmap(os.path.join(home,"images","node_tree_16.png"),wx.BITMAP_TYPE_PNG))
#idx1 = il.Add(images.Smiles.GetBitmap())
self.AssignImageList(il)
# now put an image on the first tab we just created:
self.SetPageImage(3, idx1)
if 0:
win = ListCtrl_StatusLog.StatusLogListCtrlPanel(self, data, log)
self.AddPage(win, 'st_log')
il = wx.ImageList(16, 16)
idx1 = il.Add(images.Smiles.GetBitmap())
self.AssignImageList(il)
# now put an image on the first tab we just created:
self.SetPageImage(1, idx1)
self.parent.stopTimers()
self.Bind(wx.EVT_NOTEBOOK_PAGE_CHANGED, self.OnPageChanged)
self.Bind(wx.EVT_NOTEBOOK_PAGE_CHANGING, self.OnPageChanging)
sub(self.ResetList, "reset_log_window")
def info(self, text):
ts=datetime.datetime.now().strftime("%H:%M:%S.%f")
send('add_log',(['INFO',type(self).__name__,text, ts],))
def ResetList(self,data, extra1, extra2=None):
self.dropdown.SetValue('5 min')
([row,_,_],) = data
self.parent.SetTitle('Job %s | %s | %s ' % (row['SEQ'],row['STATUS'].split('-')[1].split('[')[0], row['PROCEDURE_NAME']))
def gen_bind(self, type, instance, handler, *args, **kwargs):
self.Bind(type, lambda event: handler(event, *args, **kwargs), instance)
def OnRefreshMessageLog(self, event, params):
[list_id]= params
min= self.getMinutes().split(' ')[0]
#print (list_id, min)
send("refresh_log", [list_id, min])
def OnRefreshParentList(self, event, params):
self.info ('OnRefreshParentList')
send("refresh_log", params)
def getMinutes(self):
return self.dropdown.GetValue()
def makeColorPanel(self, color):
p = wx.Panel(self, -1)
win = ColorPanel.ColoredPanel(p, color)
p.win = win
def OnCPSize(evt, win=win):
win.SetPosition((0,0))
win.SetSize(evt.GetSize())
p.Bind(wx.EVT_SIZE, OnCPSize)
return p
def OnPageChanged(self, event):
old = event.GetOldSelection()
new = event.GetSelection()
sel = self.GetSelection()
#self.log.info('OnPageChanged, old:%d, new:%d, sel:%d\n' % (old, new, sel),extra=d)
wx.CallAfter(self.info,('Tab changed.'))
event.Skip()
def OnPageChanging(self, event):
old = event.GetOldSelection()
new = event.GetSelection()
sel = self.GetSelection()
#self.log.info('OnPageChanging, old:%d, new:%d, sel:%d\n' % (old, new, sel),extra=d)
event.Skip()
#POP UP
class StatusLogReport(sized_controls.SizedFrame):
def __init__(self, parent, win, id, log,data,title, pos=wx.DefaultPosition,
size=wx.DefaultSize, style=wx.CLIP_CHILDREN|wx.RESIZE_BORDER|wx.CLOSE_BOX|wx.FRAME_FLOAT_ON_PARENT|wx.FRAME_SHAPED|wx.FRAME_NO_TASKBAR|wx.NO_BORDER|wx.CAPTION|wx.FRAME_FLOAT_ON_PARENT): #
wx.Frame.__init__(self, parent, id, title, pos, size, style)
#self.SetIcon(images.Mondrian.GetIcon())
self.parent=parent
self.win=win
wx.SystemOptions.SetOption("msw.remap", "0")
self.statusImages = {
u'failed': wx.Bitmap(os.path.join(home,'images','no_exit.ico'), wx.BITMAP_TYPE_ICO),
u'eyes': wx.Bitmap(os.path.join(home,'images','eyes.png'), wx.BITMAP_TYPE_PNG),
}
#panel = wx.Panel(self, -1)
self.Center()
self.SetFocus()
self.Bind(wx.EVT_CHAR_HOOK, self.OnKeyUP)
self.statusBar = ESB.EnhancedStatusBar(id=wxID_MAIN_WINDOWSTATUSBAR1, name=u'statusBar', parent=self)
#self.SetStatusBar(self.statusBar)
self.statusBar.SetSize((-1, 35))
self.statusBar.SetFieldsCount(5)
self.SetStatusBar(self.statusBar)
self.statusBar.SetStatusWidths([20, 205,120, 950,-1])
if 1:
self.statusImage = wx.StaticBitmap(self.statusBar,-1,self.statusImages[u'eyes'],
name=u'statusImage',
size=wx.Size(-1, 16), style=0)
self.statusBar.AddWidget(self.statusImage, ESB.ESB_ALIGN_LEFT)
self.cnt=10
self.gauge = gauge = wx.Gauge(self.statusBar, -1, 100, size=(200,18))
self.gauge.SetValue(0)
self.statusBar.AddWidget(self.gauge, ESB.ESB_ALIGN_LEFT)
b_cancel_db = wx.Button(self.statusBar, wxID_EXIT, "Cancel DB request.")
b_cancel_db.Bind(wx.EVT_BUTTON, self.OnCancelDbRequest)
self.statusBar.AddWidget(b_cancel_db, ESB.ESB_ALIGN_LEFT)
if 0:
ticker = Ticker(self.statusbar, -1)
ticker.SetText("Hello World!")
ticker.SetBackgroundColour(wx.BLUE)
ticker.SetForegroundColour(wx.NamedColour("YELLOW"))
ticker.SetFont(wx.Font(9, wx.SWISS, wx.NORMAL, wx.BOLD, False))
statictext = wx.StaticText(self.statusbar, -1, "Welcome To %s!" % prog)
self.ticker = ticker
self.statustext = wx.StaticText(self.statusBar, -1, "Welcome To %s!" % __appname__)
self.statusBar.AddWidget(self.statustext, ESB.ESB_ALIGN_LEFT)
b_exit = wx.Button(self.statusBar, wxID_EXIT, "Close")
b_exit.Bind(wx.EVT_BUTTON, self.OnHide)
self.statusBar.AddWidget(b_exit, ESB.ESB_ALIGN_RIGHT)
self.g_range=80
self.gauge.SetRange(self.g_range)
self.gauge.SetValue(0)
self.gauge.SetToolTip("Retrieving Data...")
self.gauge.Show()
#self.Bind(wx.EVT_TIMER, lambda event, i=1: self.TimerHandler(event, the_id=1), id=wxID_TIMER)
#self.timer=wx.Timer(self, id=wxID_TIMER)
self.timer = {} #wx.Timer(self)
self.gauge_values={}
#self.gauge.Freeze()
#self.gauge[pos].Hide()
#self.gauge.Pulse()
#self.timer.Stop()
#self.timer.Start(1000)
self.Bind(wx.EVT_CLOSE, self.OnClose)
######################################
self.nb = StatusNB(self, -1, data,log)
######################################
#self.threads=self.nb.mlog.list.threads
#self.timer.Start(100)
sub(self.StartTimer, "start_timer")
sub(self.StopTimer, "stop_timer")
#self.Bind(EVT_UPDATE_TIMER, self.OnUpdate)
#sub(self.GaugeTest, "gauge_test")
#btm=wx.Bitmap(,wx.BITMAP_TYPE_PNG)
#ico=wx.Image(os.path.join("images","bullet_blue_16.png"), wx.BITMAP_TYPE_PNG).ConvertToIcon()
ico=wx.Icon(os.path.join(home,"images","text_list_bullets_16.png"), wx.BITMAP_TYPE_PNG)
#ico = wx.Icon('py.ico', wx.BITMAP_TYPE_ICO)
self.SetIcon(ico)
#sub(self.onSetTitle, "set_title")
#self.MakeModal(True)
#disableAll = wx.WindowDisabler()
self.sw={} # stop watch
#self.Bind(wx.EVT_SET_FOCUS, self.onFocus)
sub(self.onCancelDbRequest,'cancel_db_request')
def onCancelDbRequest(self, data, extra1, extra2=None):
#send("start_timer", () )
#time.sleep(5)
#self.UpdateList(data)
#busy = PBI.PyBusyInfo('One moment please, waiting for threads to die...',parent=self.win, title='Cancel DB requests.')
busy = wx.BusyInfo("One moment please, waiting for threads to die...", self.win)
#wx.Yield()
self.stopTimers()
del busy
#self.parent.parent.Enable()
def OnCancelDbRequest(self, evt):
#self.stopTimers()
send('cancel_db_request',())
def onFocus1(self, evt):
#print ('onFocusonFocusonFocus')
#wx.CallAfter (send, 'raise_log_window', (False,))
#self.info('Focused.')
#
wx.CallAfter(self.info,('Focused'))
#self.info,('Focused')
#evt.Skip()
def info(self, text):
ts=datetime.datetime.now().strftime("%H:%M:%S.%f")
self.status(text)
send('add_log',(['INFO',type(self).__name__,text, ts],))
def status(self, msg):
self.statustext.SetLabel(str(msg) )
def onSetTitle_delete(self, data, extra1, extra2=None):
#print ('onSetTitle')
[title]= data
self.SetTitle(title)
def stopTimers(self):
self.info('Stopping timers.')
for t in self.timer:
self.timer[t].Stop()
if self.gauge:
self.gauge.SetValue(0)
#self.parent.Enable()
def GaugeTest(self, data, extra1, extra2=None):
#print ('GaugeTest', data)
(value,)=data
self.gauge.SetValue(value)
def OnUpdate(self, evt):
#print ('EVT_UPDATE_TIMER')
self.gauge.SetValue(evt.value)
#self.gauge.Refresh(False)
def StopTimer(self, data, extra1, extra2=None):
#print ('StopTimer')
#self.win.Enable()
[list_id]= data
#self.nb.Enable()
if not list_id in self.timer.keys():
self.timer[list_id]=wx.Timer(self)
if self.timer[list_id].IsRunning():
self.timer[list_id].Stop()
if self.gauge:
self.gauge.SetValue(self.g_range)
#self.gauge.Freeze()
if list_id in self.sw.keys():
del self.sw[list_id]
#if not sum([self.timer[x].IsRunning() for x in self.timer]):
# self.parent.Enable()
def addTimer(self,list_id):
if not list_id in self.timer.keys():
self.timer[list_id]=wx.Timer(self)
#self.Bind(wx.EVT_TIMER, self.TimerHandler, self.timer[list_id])
self.gen_bind(wx.EVT_TIMER,self.timer[list_id], self.TimerHandler,(list_id,))
def StartTimer(self, data, extra1, extra2=None):
#self.win.Disable()
#self.nb.Disable()
[list_id] =data
self.sw[list_id] = wx.StopWatch()
#print ('StartTimer')
self.addTimer(list_id)
if 1: # self.gauge:
self.gauge.SetRange(self.g_range)
self.gauge_SetValue(list_id,10)
self.gauge.Show()
self.timer[list_id].Start(200)
def gauge_SetValue(self, list_id, val):
#if list_id in self.gauge_values.keys():
self.gauge_values[list_id]=val
self.gauge.SetValue(val)
def gen_bind(self, type, instance, handler, *args, **kwargs):
self.Bind(type, lambda event: handler(event, *args, **kwargs), instance)
def IsInFocus(self, list_id):
return True
def TimerHandler(self, event, params):
[list_id]=params
#print (the_id)
if self.IsInFocus(list_id):
self.AdjustGauge(list_id)
#self.gauge.Pulse()
def AdjustGauge(self, list_id):
self.gauge_values[list_id] +=10
if self.gauge_values[list_id]>self.g_range: self.gauge_values[list_id]=10
#print (time.ctime())
send("update_message", [self.sw[list_id].Time()])
wx.CallAfter(self.gauge.SetValue, self.gauge_values[list_id])
def OnKeyUP(self, event):
#print ("KEY UP!")
keyCode = event.GetKeyCode()
if keyCode == wx.WXK_ESCAPE:
self.min=5
self.win.Enable()
self.parent.Enable()
self.Hide()
#self.Destroy()
#send("on_parent_close", () )
event.Skip()
#event.Veto()
def OnClose(self,e):
#pprint(self.timer)
#print('onClose')
if 0:
for t in self.timer:
if self.timer[t].IsRunning():
self.timer[t].Stop()
if 0:
busy = wx.BusyInfo("One moment please, waiting for threads to die...")
wx.Yield()
for t in self.threads:
t.Stop()
running = 1
while running:
running = 0
for t in self.threads:
running = running + t.IsRunning()
time.sleep(0.1)
#send("on_parent_close", () )
self.win.Enable()
self.parent.Enable()
self.Hide()
e.Veto()
#e.Skip()
#self.Destroy()
def OnHide(self,e):
#pprint(self.timer)
#print('OnHide')
if 0:
for t in self.timer:
if self.timer[t].IsRunning():
self.timer[t].Stop()
if 0:
busy = wx.BusyInfo("One moment please, waiting for threads to die...")
wx.Yield()
for t in self.threads:
t.Stop()
running = 1
while running:
running = 0
for t in self.threads:
running = running + t.IsRunning()
time.sleep(0.1)
#send("on_parent_close", () )
self.Hide()
self.win.Enable()
self.parent.Enable()
#e.Veto()
#e.Skip()
#self.Destroy()
#
# MAIN FRAME
#
class JobMonitor(sized_controls.SizedFrame):
#----------------------------------------------------------------------
def __init__(self, *args, **kwargs):
wx.Frame.__init__(self, *args)
self.Hide()
#self.Freeze()
self.timer={}
self.sw={}
sub(self.StartTimer, "start_timer")
sub(self.StopTimer, "stop_timer")
self.panel= panel = wx.Panel(self)
self.app = kwargs.pop('app', None)
self.log = kwargs.pop('log', None)
self.ts_out_dir = kwargs.pop('ts_out_dir', None)
self.config = kwargs.pop('config', None)
self.sct='Status color test'
#self.config.cfg['queries']['job_list']['views'][self.sct]= self.config.cfg['queries']['job_list']['views']['All-jobs']
#self.busybox = kwargs.pop('busybox', None)
#wx.FRAME_FLOAT_ON_PARENT
#self.busybox.SetParent(self)
if 0:
self.busybox = busybox=BusyFrame(self,-1,'Applog.',app = self.app)
#msg('Extracting data')
#self.busybox.Refresh()
#self.busybox.Update()
self.app.SetTopWindow(busybox)
self.pos=[(0,0)]
if 0:
self.busybox = BusyFrame(self,-1,'Log messages.')
self.busybox.Refresh()
self.busybox.Update()
self.busybox.Show()
#threading.Timer(10.0, msgbox.Hide).start()
#self.Hide()
ico=wx.Icon(os.path.join(home,"images","categories_32.png"), wx.BITMAP_TYPE_PNG)
#ico = wx.Icon('py.ico', wx.BITMAP_TYPE_ICO)
self.SetIcon(ico)
views=[x for x in self.config.cfg['queries']['job_list']['views'].keys() if not x.startswith('_')]
self.default_view=self.config.cfg['queries']['job_list']['default_view']
#self.cb_view= wx.ComboBox(panel, id=wx.NewId(), value=self.default_list, choices=views, size=(150,-1), style=0, name='job_view')
#self.cb_view.Bind(wx.EVT_COMBOBOX, self.onViewChanged)
self.cb_view = self._createComboTreeBox(panel,0) #wx.CB_READONLY)
#self.gen_bind(wx.EVT_COMBOBOX,self.cb_view, self.OnRefreshMessageLog,(obj.getListID(),))
self.statusBar = ESB.EnhancedStatusBar(id=wxID_MAIN_WINDOWSTATUSBAR1, name=u'mainSB', parent=self)
self.SetStatusBar(self.statusBar)
#self.statusBar.SetSize((-1, 30))
self.statusBar.SetFieldsCount(7)
self.statusBar.SetStatusWidths([23, 920,75,200,100,240, -1])
self.statustext = wx.StaticText(self.statusBar, -1, "Welcome To %s!" % __appname__)
self.info('Creating main frame.')
self.info('Creating job list.')
###################################################################
self.table_list = table_list= JobListCtrl(self, panel,-1,view= self.getJobViewName(), log=self.log)
###################################################################
#panel.SetListCtrl(table_list)
self.filter_history={}
table_list.Bind(wx.EVT_LIST_BEGIN_DRAG, self.onDrag)
sizer = wx.BoxSizer(wx.VERTICAL)
self.filter =self.getFilter(panel,self.table_list)
self.currentItem = 0
navig = wx.BoxSizer(wx.HORIZONTAL)
sizer.Add((5,5)) # Make sure there is room for the focus ring
navig.Add(self.filter, 0, wx.LEFT|wx.BOTTOM)
#imageFile = os.path.join(home,'images','exec.png')
imageFile = os.path.join(home,"images","refresh_icon_32_grey2.png")
image1 = wx.Bitmap(imageFile)
#image1= scale_bitmap(image1, image1.GetWidth()*1.3, image1.GetHeight()*1.3)
self.btn_refresh=wx.BitmapButton(panel, id=-1, bitmap=image1,size = (image1.GetWidth()+5, image1.GetHeight()+5))
self.gen_bind(wx.EVT_BUTTON,self.btn_refresh, self.OnBtnRefreshList,(self.pos))
navig.Add(self.btn_refresh, 0, wx.LEFT)
if 1:
#sb = wx.StaticBox(panel, label='Report view')
boxsizer = wx.BoxSizer(wx.HORIZONTAL)
boxsizer.Add((25,5))
text = wx.StaticText(panel, -1, 'View:', (20, 120))
boxsizer.Add(text, flag=wx.ALIGN_CENTER, border=5)
boxsizer.Add((5,5))
boxsizer.Add(self.cb_view, flag=wx.ALIGN_CENTER, border=1)
navig.Add(boxsizer, flag=wx.ALIGN_CENTER, border=5)
navig.Add((5,5), 1, wx.EXPAND)
if 0: #show contacts
self.b_putty = wx.Button(panel, -1, "Putty", size=(100,40))
self.b_putty.Enable(True)
navig.Add(self.b_putty, 0, flag=wx.RIGHT|wx.ALIGN_RIGHT,border=5)
self.gen_bind(wx.EVT_BUTTON,self.b_putty, self.OnPutty,())
#print(self.cb_view.GetValue())
navig.Add((5,5), 1, wx.EXPAND)
if 0: #show contacts
self.jabber = wx.Button(panel, -1, "Jabber", size=(100,40))
self.jabber.Enable(True)
navig.Add(self.jabber, 0, flag=wx.RIGHT|wx.ALIGN_RIGHT,border=5)
self.gen_bind(wx.EVT_BUTTON,self.jabber, self.OnJabber,())
#navig.Add((5,5), 1)
if 0: #show contacts
self.b_conv = wx.Button(panel, -1, "Conversations", size=(100,40))
self.b_conv.Enable(True)
navig.Add(self.b_conv, 0, flag=wx.RIGHT|wx.ALIGN_RIGHT,border=5)
self.gen_bind(wx.EVT_BUTTON,self.b_conv, self.OnConv,())
navig.Add((5,15), 1)
if 1: #show contacts
email_config=self.config.cfg['email_to']
self.b_email = wx.Button(panel, -1, "Email", size=(100,40))
self.b_email.Enable(False)
navig.Add(self.b_email, 0, flag=wx.RIGHT|wx.ALIGN_RIGHT,border=5)
self.gen_bind(wx.EVT_BUTTON,self.b_email, self.OnEmail,(email_config,))
if 1: #show contacts
url="http://theloop.kcg.com/display/DWRPT/Data+Warehouse+and+Reporting+Home"
b_show_contacts = wx.Button(panel, -1, "DW Team\ncontacts", size=(100,40))
navig.Add(b_show_contacts, 0, flag=wx.ALIGN_CENTER|wx.RIGHT|wx.ALIGN_RIGHT,border=5)
self.gen_bind(wx.EVT_BUTTON,b_show_contacts, self.OnShowContacts,(url,))
if 0:
if 1:
imageFile = os.path.join(home,'images','thumb_14670028780Explorer.png')
image1 = wx.Bitmap(imageFile)
image1= scale_bitmap(image1, image1.GetWidth()/7, image1.GetHeight()/7.4)
btn = wx.BitmapButton(panel, id=-1, bitmap=image1,size = (image1.GetWidth()+6, image1.GetHeight()+6))
navig.Add(btn, 0, flag=wx.ALIGN_CENTER|wx.RIGHT|wx.ALIGN_RIGHT,border=5)
#btn.Bind(wx.EVT_BUTTON, self.OnMainMenu)
btn.SetName('explore')
else:
imageFile = os.path.join(home,'images','thumb_14670028780Explorer.png')
image1 = wx.Bitmap(imageFile)
#image1= scale_bitmap(image1, image1.GetWidth()/8, image1.GetHeight()/7)
btn = wx.BitmapButton(panel, id=-1, bitmap=image1,size = (image1.GetWidth()+6, image1.GetHeight()+6))
navig.Add((5,5), 1)
navig.Add(btn, 0, flag=wx.ALIGN_CENTER|wx.RIGHT|wx.ALIGN_RIGHT,border=5)
#btn.Bind(wx.EVT_BUTTON, self.OnMainMenu)
btn.SetName('explore')
if 1:
imageFile = os.path.join(home,'images','exec.png')
image1 = wx.Bitmap(imageFile)
btn = wx.BitmapButton(panel, id=-1, bitmap=image1,size = (image1.GetWidth()+6, image1.GetHeight()+6))
#navig.Add((5,5), 1, wx.EXPAND)
navig.Add(btn, 0, flag=wx.ALIGN_CENTER|wx.RIGHT|wx.ALIGN_RIGHT,border=5)
#btn.Bind(wx.EVT_BUTTON, self.OnShowPopup)
btn.SetName('menu')
sizer.Add(navig, 0, wx.EXPAND)
sizer.Add(table_list, 1, wx.EXPAND)
panel.SetSizer(sizer)
self.pane_captions ={}
self.pane_captions_0={
'main_toolbar':('main_toolbar', _('main toolbar')),
'svg_panel':('svg_panel', _('svg panel')),
'app_log_ctrl':('log', _('log')),
'shell':('shell', _('shell'))
}
if 1:
self.Bind(wx.EVT_CLOSE, self.OnClose)
if 1:
self.info('Loading application settings.')
if self.app.settings.ReadBool('GUI/load_default_state_on_start', True):
self.method_load_default_state()
self.default_open_path = self.app.settings.Read('GUI/default_open_path', os.getcwd())
#self.spooler=TableSpooler( win=self, log=self.log, ts_out_dir=self.ts_out_dir)
self.aui_manager = AuiManager()
self.aui_manager.SetManagedWindow(self)
#self.Bind(EVT_UPDATE_BARGRAPH, self.OnUpdate)
self.threads = []
self.popupmenu = wx.Menu()
item = wx.MenuItem(self.popupmenu, wx.ID_ANY,
"Edit Config\tCtrl+F4",
"CEdit Config")
item2 = wx.MenuItem(self.popupmenu, wx.ID_ANY,
"Documentation\tCtrl+F5",
"Documentation")
item3 = wx.MenuItem(self.popupmenu, wx.ID_ANY,
"wxPython howto\tCtrl+F6",
"wxPython howto")
item4 = wx.MenuItem(self.popupmenu, wx.ID_ANY,
"About\tCtrl+F7",
"About")
self.Bind(wx.EVT_MENU, self.onEditConfig, item)
self.Bind(wx.EVT_MENU, self.onDocs, item2)
self.Bind(wx.EVT_MENU, self.onHowTo, item3)
self.Bind(wx.EVT_MENU, self.onAboutDlg, item4)
#
self.popupmenu.Append(item)
self.popupmenu.Append(item2)
self.popupmenu.Append(item3)
self.popupmenu.AppendSeparator()
self.popupmenu.Append(item4)
#self.Bind(wx.EVT_MENU, self.OnPopupItemSelected, item)
self.Center()
#panel.Bind(wx.EVT_CONTEXT_MENU, self.OnShowPopup)
self.gen_bind(wx.EVT_BUTTON,btn, self.OnShowPopup,(btn.GetScreenPosition()))
self.statusImages = {
u'failed': wx.Bitmap(os.path.join(home,'images','no_exit.ico'), wx.BITMAP_TYPE_ICO),
u'eyes': wx.Bitmap(os.path.join(home,'images','eyes.png'), wx.BITMAP_TYPE_PNG),
}
if 1:
self.statusImage = wx.StaticBitmap(self.statusBar,-1,self.statusImages[u'eyes'],
name=u'statusImage',
size=wx.Size(-1, 16), style=0)
self.statusBar.AddWidget(self.statusImage, ESB.ESB_ALIGN_LEFT)
#self._accel = wx.AcceleratorTable([(wx.ACCEL_NORMAL, wx.WXK_ESCAPE, wx.ID_CLOSE)])
#self.SetAcceleratorTable(self._accel)
if 0:
from wx.lib.ticker import Ticker
ticker = Ticker(self.statusBar, -1)
ticker.SetText("1 min")
ticker.SetBackgroundColour(wx.BLUE)
ticker.SetForegroundColour(wx.NamedColour("YELLOW"))
ticker.SetFont(wx.Font(9, wx.SWISS, wx.NORMAL, wx.BOLD, False))
#self.statusBar.AddWidget(statustext, ESB.ESB_ALIGN_LEFT)
self.ticker = ticker
self.statusBar.AddWidget(self.statustext, ESB.ESB_ALIGN_LEFT)
if 1:
wxID_SHOW_APPLOG=wx.NewId()
b_show_applog = wx.Button(self.statusBar, wxID_SHOW_APPLOG, "Applog", size=(70,50))
#b_exit.Bind(wx.EVT_BUTTON, self.OnHide)
self.Bind(wx.EVT_BUTTON, lambda evt: self._ShowApplog(), id=wxID_SHOW_APPLOG)
self.statusBar.AddWidget(b_show_applog, ESB.ESB_ALIGN_RIGHT)
if 1:
self.default_rrf=300 #seconds
self.rrf_elapsed=self.default_rrf
self.rrf_text = wx.TextCtrl(self.statusBar, wx.TE_READONLY, "Refresh [%s] %s " % (self.default_rrf,self.rrf_elapsed))
#self.rrf_text.Bind(wx.EVT_TEXT, self.ontextchange)
self.rrf_timer=None
self.rrf_slider = wx.Slider(self.statusBar, value=self.default_rrf/60, minValue=1, maxValue=30,
size=(150,-1),
style=wx.SL_HORIZONTAL|wx.SL_AUTOTICKS|wx.SL_LABELS)
#fps.SetTickFreq(5)
self.Bind(wx.EVT_SCROLL_CHANGED , self.ChangeFrequency, self.rrf_slider)
self.report_refresh_frequency=self.default_rrf
self.setReportRefreshFrequency()
self.statusBar.AddWidget(self.rrf_text, ESB.ESB_ALIGN_RIGHT)
self.statusBar.AddWidget(self.rrf_slider, ESB.ESB_ALIGN_LEFT)
if 1:
wxID_SHOW_APPLOG=wx.NewId()
b_sql = wx.Button(self.statusBar, wxID_SHOW_APPLOG, "sql", size=(35,23))
#b_exit.Bind(wx.EVT_BUTTON, self.OnHide)
self.Bind(wx.EVT_BUTTON, lambda evt: self._SaveSQLToClipboard(), id=wxID_SHOW_APPLOG)
self.statusBar.AddWidget(b_sql, ESB.ESB_ALIGN_RIGHT)
b_exit = wx.Button(self.statusBar, wx.ID_CLOSE, "Exit", size=(70,50))
#b_exit.Bind(wx.EVT_BUTTON, self.OnHide)
self.Bind(wx.EVT_BUTTON, lambda evt: self._Exit(), id=wx.ID_CLOSE)
self.statusBar.AddWidget(b_exit, ESB.ESB_ALIGN_RIGHT)
self.Bind(wxID_EVT_EXIT, self.OnClose)
self.filter.SetFocus()
sub(self.OnSetFocusOnSearch, 'set_focus_on_search')
self.SetStatusBar(self.statusBar)
sub(self.onCancelDbRequest,'cancel_db_request')
def _bindEventHandlers(self, comboBox):
for eventType, handler in [(wx.EVT_COMBOBOX, self.OnItemSelected),
(wx.EVT_TEXT, self.OnItemEntered)]:
comboBox.Bind(eventType, handler)
def _createComboTreeBox(self,panel, style):
comboBox = ComboTreeBox(panel, style=style, size=(170,-1))
self._bindEventHandlers(comboBox)
cfg=self.config.cfg['queries']['job_list']
view_list=[v for v in cfg['views'] if not v.startswith('_')]
tabs={}
for view in view_list:
vtype= cfg['views'][view]['type']
if vtype not in tabs.keys():
tabs[vtype]=[]
tabs[vtype].append(view)
for vtype in tabs.keys():
child = comboBox.Append(vtype)
for view in tabs[vtype]:
grandChild = comboBox.Append(view, child)
comboBox.SetValue(self.default_view)
return comboBox
def OnItemSelected(self, event):
print('You selected: %s\n'%event.GetString())
#pprint(dir(self.comboBox))
val=event.GetString().split('-')
if len(val)==1:
self.cb_view.SetValue(self.default_view)
self.view=view=self.getJobViewName()
send('raise_log_window', (False,))
self.table_list.RefreshListData(view)
event.Skip()
def OnItemEntered(self, event):
print('You entered: %s\n'%event.GetString())
def _SaveSQLToClipboard(self):
Paste(self.table_list.query)
self.info('SQL is saved to clipboard')
def onCancelDbRequest(self, data, extra1, extra2=None):
#send("start_timer", () )
#time.sleep(5)
#self.UpdateList(data)
#busy = wx.BusyInfo("One moment please, waiting for threads to die...")
#wx.Yield()
self.info('Stopping timers.')
for t in self.timer.values():
t.Stop()
def open_outlook(self):
try:
#subprocess.call([r"C:\ProgramData\Microsoft\Windows\Start Menu\Programs\Microsoft Office 2013\Outlook 2013.lnk"])
os.system(r'"C:\ProgramData\Microsoft\Windows\Start Menu\Programs\Microsoft Office 2013\Outlook 2013.lnk"');
except:
raise
print("Outlook didn't open successfully")
def windowEnumerationHandler(self,hwnd, top_windows):
self.top_windows.append((hwnd, win32gui.GetWindowText(hwnd)))
def OnPutty(self, event, params):
global home
pty=os.path.join(home,'PuTTY','putty.exe')
print(pty)
os.startfile(pty)
def OnJabber(self, event, params):
results = []
self.top_windows = []
win32gui.EnumWindows(self.windowEnumerationHandler, self.top_windows)
for i in self.top_windows:
if "jabber" in i[1].lower():
#print (i)
l,t, r,b = win32gui.GetWindowRect(i[0])
#win32gui.ShowWindow(i[0],5)
win32gui.ShowWindow(i[0], win32con.SW_RESTORE)
#win32gui.SetFocus(i[0])
#win32gui.SetForegroundWindow(i[0])
#win32gui.SetWindowPos(i[0], win32con.HWND_TOPMOST, 0, 0, 300, 300, 0)
#win32gui.ShowWindow(i[0],5)
#win32gui.SetForegroundWindow(i[0])
#print(dir(win32gui))
if l>0:
#print(l,t, r,b)
w=r-l
h=b-t
#e(0)
#win32gui.SetWindowPos(i[0], win32con.HWND_TOPMOST, l,t, w,h, 0)
#pprint(dir(win32gui))
break
def OnConv(self, event, params):
results = []
self.top_windows = []
win32gui.EnumWindows(self.windowEnumerationHandler, self.top_windows)
for i in self.top_windows:
if "conversations" in i[1].lower():
l,t, r,b = win32gui.GetWindowRect(i[0])
#win32gui.ShowWindow(i[0],5)
win32gui.ShowWindow(i[0], win32con.SW_RESTORE)
#win32gui.SetFocus(i[0])
#win32gui.SetForegroundWindow(i[0])
#win32gui.SetWindowPos(i[0], win32con.HWND_TOPMOST, 0, 0, 300, 300, 0)
#win32gui.ShowWindow(i[0],5)
#win32gui.SetForegroundWindow(i[0])
#print(dir(win32gui))
if l>0:
#print(l,t, r,b)
w=r-l
h=b-t
#e(0)
#win32gui.SetWindowPos(i[0], win32con.HWND_TOPMOST, l,t, w,h, 0)
#pprint(dir(win32gui))
break
def OnEmail(self, event, params):
[email_config] = params
email_to, subject,body = email_config['email_list'], email_config['default_subject'],email_config['default_body']
import socket
import sys
import datetime as dt
e=sys.exit
n1=dt.datetime.now()
#s = socket.socket()
s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM, proto=0)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
#s.setsockopt(socket.IPPROTO_TCP, socket.TCP_CORK, 1)
host = socket.gethostname()
port = 12348 # Reserve a port for your service.
s.connect((host, port))
#f = open('/dump2/oats/101316/rawdata/pr1lsmars11.20161013.TransOrd.dat.gz')
#f = open('/dump2/oats/101316/rawdata/MatchIt_20161013.dat.gz','rb')
#f = open('/Bic/scripts/oats/py27/bin/file.txt','rb')
print ('Sending..',)
#l = f.read(100*1024)
if 0:
for i in range(1000):
print ('.',end='')
s.send(('Message %s\n' % i).encode())
#l = f.read(100*1024)
#f.close()
s.send((body+os.linesep+self.GetSelectedItemsAsHTML()).encode('utf-8'))
print ("Done Sending")
s.shutdown(socket.SHUT_WR)
s.close
n2=dt.datetime.now()
diff=(n2-n1)
#print (diff.seconds)
#e(0)
def OnEmail_old(self, event, params):
[email_config] = params
email_to, subject,body = email_config['email_list'], email_config['default_subject'],email_config['default_body']
#self.open_outlook()
#os.system(r'C:\Users\abuzunov\PROJECTS\BatchStatusBrowser\release\bsb010\emailer\emailer.exe');
if 0:
if self.table_list.GetSelectedItemCount()>0:
outlook = win32.Dispatch('outlook.application')
mail = outlook.CreateItem(0)
mail.To = email_to
mail.Subject = subject
mail.HtmlBody = body+os.linesep+self.GetSelectedItemsAsHTML()
#with open(r'c:\Temp\email_body.html','w') as fh:
# fh.write(mail.HtmlBody)
self.b_email.Enable(False)
mail.Display(True)
else:
self.warn('Cannot email. None of the jobs are selected.')
def GetSelectedItemsAsHTML0(self):
out="""<html itemscope itemtype="http://schema.org/QAPage"> <head>
<style type="text/css">
.nowrap { white-space: nowrap; }
a:hover,a:active{color:#4CAF50}
table.w3-table-all{margin:20px 0}
</style></head>
<body>
"""
#print(self.itemDataMap)
j=0
out +='<tr>'+'<th class="nowrap">'.join([self.table_list.GetColumn(i).Text for i in range(self.table_list.GetColumnCount())])
for i in self.table_list.GetSelectedItems():
#print (i)
#print(self.itemDataMap[i])
j +=1
out +='<tr>'+'<td class="nowrap">'.join([str(x) for x in self.table_list.itemDataMap[i]])
return '<table>'+out+'</table></body></html>'
#Paste(out)
#self.info('Copy to clipboard done for %d records.' % j)
def GetSelectedItemsAsHTML1(self):
out="""<html itemscope itemtype="http://schema.org/QAPage"> <head>
<style type="text/css">
.nowrap { white-space: nowrap; }
a:hover,a:active{color:#4CAF50}
table.w3-table-all{margin:20px 0}
</style></head>
<body>
"""
#print(self.itemDataMap)
j=0
data=[]
data.append([self.table_list.GetColumn(i).Text for i in range(self.table_list.GetColumnCount())])
#out+='<tr>'+'<th class="nowrap">'.join([self.table_list.GetColumn(i).Text for i in range(self.table_list.GetColumnCount())])
for i in self.table_list.GetSelectedItems():
#print (i)
#print(self.itemDataMap[i])
j +=1
data.append([str(x) for x in self.table_list.itemDataMap[i]])
#out +='<tr>'+'<td class="nowrap">'.join([str(x) for x in self.table_list.itemDataMap[i]])
#pprint (data)
for i in range(len(data[0])):
out +='<tr>'
for j in range(len(data)):
if j==0:
out +='<td class="nowrap" bgcolor="#FF0000"><b>%s</b>' % data[j][i]
else:
out +='<td class="nowrap">'+data[j][i]
#e(0)
return '<table border=1 class="w3-table-all notranslate">'+out+'</table></body></html>'
#Paste(out)
#self.info('Copy to clipboard done for %d records.' % j)
def GetSelectedItemsAsHTML(self):
out="""<html itemscope itemtype="http://schema.org/QAPage"> <head>
<style type="text/css">
.nowrap { white-space: nowrap; }
a:hover,a:active{color:#4CAF50}
table.w3-table-all{margin:20px 0}
</style></head>
<body>
"""
#print(self.itemDataMap)
j=0
data=[]
colors={'FAILED':'#FFCCCC','RUNNING':'#CCE5FF','NOT RUN':'#FFFFCC','SUCCESS':'#CCFFCC'}
data.append([self.table_list.GetColumn(i).Text for i in range(self.table_list.GetColumnCount())])
#out+='<tr>'+'<th class="nowrap">'.join([self.table_list.GetColumn(i).Text for i in range(self.table_list.GetColumnCount())])
for i in self.table_list.GetSelectedItems():
#print (i)
#print(self.itemDataMap[i])
j +=1
data.append([str(x) for x in self.table_list.itemDataMap[i]])
#out +='<tr>'+'<td class="nowrap">'.join([str(x) for x in self.table_list.itemDataMap[i]])
#pprint (data)
for j in range(1,len(data)):
out +='<br><br>'
out +='<table border=1>'
for i in range(len(data[0])):
status=data[j][3].split('-')[1].split('[')[0]
if status in colors.keys():
colour=colors[status]
else:
colour=colors['FAILED']
out +='<tr>'
out +='<td class="nowrap" bgcolor="%s"><b>%s</b></td>' % (colour,data[0][i])
out +='<td class="nowrap">%s</td>' % data[j][i]
out +='</table>'
#print(out)
return out+'</body></html>'
#Paste(out)
#self.info('Copy to clipboard done for %d records.' % j)
def OnShowContacts(self, event, params):
[url] = params
webbrowser.open(url)
#os.system("start "+url)
def ChangeFrequency(self, evt):
#evt.GetPosition()
self.setReportRefreshFrequency()
def setReportRefreshFrequency(self):
#print(frq)
if self.rrf_timer and self.rrf_timer.IsRunning():
self.rrf_timer.Stop()
self.report_refresh_frequency=self.rrf_slider.GetValue()*60
self.info('Setting RRF frequency to %s min' % self.rrf_slider.GetValue() )
else:
self.rrf_timer=wx.Timer(self)
self.rrf_counter=wx.Timer(self)
self.gen_bind(wx.EVT_TIMER,self.rrf_timer, self.rrf_TimerHandler,())
self.gen_bind(wx.EVT_TIMER,self.rrf_counter, self.rrf_counter_TimerHandler,())
#self.rrf_timer.Start(self.report_refresh_frequency*1000)
self.rrf_timer.Start(self.report_refresh_frequency*1000)
self.rrf_elapsed=self.report_refresh_frequency
self.rrf_counter.Start(1000)
self.rrf_text.SetValue("Refresh [%sm] %s " % (round(self.report_refresh_frequency/60),self.rrf_elapsed))
#self.rrf_elapsed
#cecreate RRF timer
def rrf_counter_TimerHandler (self, event, params):
#[rrf]=params
#print(self.rrf_elapsed)
self.rrf_elapsed -=1
self.rrf_text.SetValue("Refresh [%sm] %s " % (round(self.report_refresh_frequency/60),self.rrf_elapsed))
def rrf_TimerHandler(self, event, params):
#[rrf]=params
rrf=self.report_refresh_frequency*1000
#print ('RRF = %s' % rrf)
if self.rrf_timer.IsRunning():
self.info('Refreshing data in RRF timer [%ssec]' % self.report_refresh_frequency)
self.table_list.RefreshListData(view=self.getJobViewName())
self.rrf_elapsed=self.report_refresh_frequency
else:
self.info('RRF timer is not running.')
def _ShowApplog(self):
send('raise_log_window', (True,))
def Close(self):
#pprint(self.timer)
#print('OnHide')
if 0:
for t in self.timer:
if self.timer[t].IsRunning():
self.timer[t].Stop()
if 0:
busy = wx.BusyInfo("One moment please, waiting for threads to die...")
wx.Yield()
for t in self.threads:
t.Stop()
running = 1
while running:
running = 0
for t in self.threads:
running = running + t.IsRunning()
time.sleep(0.1)
#send("on_parent_close", () )
self.Hide()
#e.Veto()
#e.Skip()
#self.Destroy()
def onViewChanged(self, evt):
self.view=view=self.getJobViewName()
send('raise_log_window', (False,))
self.table_list.RefreshListData(view)
def getJobViewName(self):
return self.cb_view.GetValue()
def OnSetFocusOnSearch(self, data, extra1, extra2=None):
#print('OnSetFocusOnSearch')
self.filter.SetFocus()
def TimerHandler(self, event, params):
[list_id]=params
#print ('Main frame timer %s' % list_id)
if self.timer[list_id].IsRunning():
self.info('Receiving data for %s [%sms]' % (list_id,self.sw[list_id].Time()))
def addTimer(self,list_id):
if not list_id in self.timer.keys():
self.timer[list_id]=wx.Timer(self)
#self.Bind(wx.EVT_TIMER, self.TimerHandler, self.timer[list_id])
self.gen_bind(wx.EVT_TIMER,self.timer[list_id], self.TimerHandler,(list_id,))
def StartTimer(self, data, extra1, extra2=None):
#self.nb.Disable()
[list_id] =data
self.sw[list_id] = wx.StopWatch()
#print ('StartTimer')
self.addTimer(list_id)
if 1: # self.gauge:
#self.gauge.SetRange(self.g_range)
#self.gauge_SetValue(list_id,10)
#self.gauge.Show()
self.timer[list_id].Start(1000)
def StopTimer(self, data, extra1, extra2=None):
#print ('StopTimer')
[list_id]= data
#self.nb.Enable()
if not list_id in self.timer.keys():
self.timer[list_id]=wx.Timer(self)
#print(self.timer, list_id)
if self.timer[list_id].IsRunning():
self.timer[list_id].Stop()
if list_id in self.sw.keys():
del self.sw[list_id]
def info(self, text):
ts=datetime.datetime.now().strftime("%H:%M:%S.%f")
self.status(text)
send('add_log',(['INFO',type(self).__name__,text, ts],))
#self.busybox.info([type(self).__name__,text])
def warn(self, text):
ts=datetime.datetime.now().strftime("%H:%M:%S.%f")
self.status(text)
send('add_log',(['WARN',type(self).__name__,text, ts],))
#self.busybox.info([type(self).__name__,text])
def status(self, msg):
self.statustext.SetLabel(str(msg))
def GetWindow(self):
return self
def OnShowPopup(self, event, params):
(pos) =params
#pprint(pos)
#pos = event.GetPosition()
pos = self.panel.ScreenToClient(pos)
self.panel.PopupMenu(self.popupmenu, pos)
def OnPopupItemSelected(self, event):
item = self.popupmenu.FindItemById(event.GetId())
text = item.GetText()
wx.MessageBox("You selected item '%s'" % text)
def onEditConfig(self, evt):
global opt, home
conf_path = os.path.dirname(os.path.abspath(__file__))
config_file = os.path.join(home,'config', opt.batch_config[0])
print (config_file)
#url
#webbrowser.open(config_file)
os.system(r"..\\notepad++\notepad++.exe "+config_file)
def onDocs(self, evt):
url='http://theloop.kcg.com/pages/viewpage.action?pageId=73130973'
webbrowser.open(url)
def onHowTo(self, evt):
url='http://theloop.kcg.com/pages/viewpage.action?pageId=73133843'
webbrowser.open(url)
def onAboutDlg(self, event):
#(data)=params
from wx.lib.wordwrap import wordwrap
info = wx.adv.AboutDialogInfo()
info.Name = __appname__
info.Version = __version__
if __copyright__:
info.Copyright = __copyright__
info.Description = wordwrap(
#'This is session manager for <b><a href="https://github.com/DataMigrator/DataMigrator-for-Oracle">DataMigrator</a></b>.</p>',
'Monitoring tool for night batch support.',
350, wx.ClientDC(self.panel))
if __github__:
info.WebSite = (__github__, "%s Github" % __appname__)
info.Developers = ['Alex B <Abuzunov@kcg.com>\n Bhasker Parsi <bparsi@kcg.com>']
info.License = wordwrap(__copyright__, 250, wx.ClientDC(self.panel))
# Show the wx.AboutBox
adv.AboutBox(info)
def OnAbout(self, event):
aboutInfo = adv.AboutDialogInfo()
aboutInfo.SetName(__appname__)
aboutInfo.SetVersion(__version__)
aboutInfo.SetDescription("Batch status browser.")
aboutInfo.SetCopyright("(C) 2017")
#aboutInfo.SetWebSite("http:#myapp.org")
aboutInfo.AddDeveloper(":\nAlex B <Abuzunov@kcg.com>,\nBhasker Parsi <bparsi@kcg.com>")
adv.AboutBox(aboutInfo)
def onAboutHtmlDlg(self, event):
html = wx.html.HtmlWindow(self)
#html.SetRelatedFrame(self, "HTML : %%s")
#html.SetRelatedStatusBar(0)
html.SetPage("htmlbody" \
"h1Error/h1" \
"Some error occurred :-H)" \
"/body/hmtl")
html.Show()
if 0:
aboutDlg = AboutDlg2(self)
x, y = self.GetPosition()
xs, ys=self.GetSize()
myx, myy= aboutDlg.GetSize()
aboutDlg.SetPosition((x+xs/2-myx/2,y+ys/2-myy/2))
#print(self.GetScreenPosition())
aboutDlg.Show()
def gen_bind(self, type, instance, handler, *args, **kwargs):
self.Bind(type, lambda event: handler(event, *args, **kwargs), instance)
def OnBtnRefreshList(self, event, params):
#print ('OnBtnRefreshList')
#self.table_list.set_data()
self.table_list.RefreshListData(view=self.getJobViewName())
#self.RecreateList(None,(self.table_list,self.filter))
def getFilter(self,parent,list):
#self.treeMap[ttitle] = {}
self.searchItems={}
#print _tP
#tree = TacoTree(parent,images,_tP)
filter = wx.SearchCtrl(parent, style=wx.TE_PROCESS_ENTER, size=(380,35))
#text = wx.StaticText(panel, -1, 'my text', (20, 100))
font = wx.Font(14, wx.DECORATIVE, wx.NORMAL,wx.NORMAL ) # wx.FONTWEIGHT_BOLD)
filter.SetFont(font)
#filter.ShowSearchButton( True )
filter.ShowCancelButton(True)
#filter.Bind(wx.EVT_TEXT, self.RecreateTree)
self.gen_bind(wx.EVT_TEXT,filter, self.RecreateList,(list,filter))
#filter.Bind(wx.EVT_SEARCHCTRL_CANCEL_BTN, self.OnSearchCancelBtn)
self.gen_bind(wx.EVT_SEARCHCTRL_CANCEL_BTN,filter, self.OnSearchCancelBtn,(list, filter))
#self.gen_bind(wx.EVT_TEXT_ENTER,filter, self.OnSearch,(list, filter))
searchMenu = wx.Menu()
item = searchMenu.AppendRadioItem(-1, "All")
#self.Bind(wx.EVT_MENU, self.OnSearchMenu, item)
self.gen_bind(wx.EVT_MENU, item,self.OnSearchMenu,(list, ''))
searchMenu.AppendSeparator()
item = searchMenu.AppendRadioItem(-1, "FAILED")
#self.Bind(wx.EVT_MENU, self.OnSearchMenu, item)
#filter.SetValue('FAILED')
self.gen_bind(wx.EVT_MENU, item,self.OnSearchMenu,(list, 'FAILED' ))
item = searchMenu.AppendRadioItem(-1, "RUNNING")
#self.Bind(wx.EVT_MENU, self.OnSearchMenu, item)
#filter.SetValue('FAILED')
self.gen_bind(wx.EVT_MENU, item,self.OnSearchMenu,(list, 'RUNNING' ))
item = searchMenu.AppendRadioItem(-1, "NOT_RUN")
#self.Bind(wx.EVT_MENU, self.OnSearchMenu, item)
self.gen_bind(wx.EVT_MENU, item,self.OnSearchMenu,(list, 'NOT RUN'))
item = searchMenu.AppendRadioItem(-1, "SUCCESS")
#self.Bind(wx.EVT_MENU, self.OnSearchMenu, item)
self.gen_bind(wx.EVT_MENU, item,self.OnSearchMenu,(list, 'SUCCESS'))
searchMenu.AppendSeparator()
item = searchMenu.AppendRadioItem(-1, "MONITORED")
self.gen_bind(wx.EVT_MENU, item,self.OnSearchMenu,(list, 'MONITORED'))
item = searchMenu.AppendRadioItem(-1, "UNMONITORED")
self.gen_bind(wx.EVT_MENU, item,self.OnSearchMenu,(list, 'UNMONITORED'))
searchMenu.AppendSeparator()
id=wx.NewId()
item = searchMenu.AppendRadioItem(id, "NOT STARTED")
self.gen_bind(wx.EVT_MENU, item,self.OnSearchMenu,(list, 'NOT STARTED'))
item = searchMenu.AppendRadioItem(-1, "LONG RUNNING")
self.gen_bind(wx.EVT_MENU, item,self.OnSearchMenu,(list, 'LONG RUNNING'))
if 0:
item = searchMenu.AppendRadioItem(-1, "Files")
#self.Bind(wx.EVT_MENU, self.OnSearchMenu, item)
self.gen_bind(wx.EVT_MENU, item,self.OnSearchMenu,(list, filter))
filter.SetMenu(searchMenu)
searchMenu.Check(id,False)
#self.RecreateTree(None, (tree, filter,ttitle,_tP,_tL))
#tree.SetExpansionState(self.expansionState)
#tree.Bind(wx.EVT_TREE_ITEM_EXPANDED, self.OnItemExpanded)
#self.gen_bind(wx.EVT_TREE_ITEM_EXPANDED, tree, self.OnItemExpanded,(tree))
#tree.Bind(wx.EVT_TREE_ITEM_COLLAPSED, self.OnItemCollapsed)
#self.gen_bind(wx.EVT_TREE_ITEM_COLLAPSED,tree, self.OnItemCollapsed,(tree))
#tree.Bind(wx.EVT_TREE_SEL_CHANGED, self.OnSelChanged)
#self.gen_bind(wx.EVT_TREE_SEL_CHANGED, tree,self.OnSelChanged,(tree,filter,ttitle))
#tree.Bind(wx.EVT_LEFT_DOWN, self.OnTreeLeftDown)
#self.gen_bind(wx.EVT_LEFT_DOWN, tree,self.OnTreeLeftDown, (ttitle) )
#self.BuildMenuBar(_tL,ttitle)
return filter
def RecreateList(self, evt=None, tf=None):
#print('RecreateList')
dsbWindow = wx.WindowDisabler()
#wx.Yield()
send('raise_log_window', (False,))
#msg = MyPyBusyFrame(self.app,'Refreshing report...', title='Warning')
#time.sleep(0.3)
#wx.GetApp().Yield()
# Catch the search type (name or content)
#cl =self.list.current_list
#print '############# in RecreateList', self.pos,'cl:', cl
(list, filter) = tf
fltr = filter.GetValue()
favs={}
#print fltr
#print self.list.current_list, 1
#btns=self.list.nav_list[self.list.current_list]['hot_keys']
#Publisher().sendMessage( "set_buttons", (self.list.pos,btns) )
if 1:
searchMenu = filter.GetMenu().GetMenuItems()
fullSearch = searchMenu[1].IsChecked()
searchItems=self.searchItems
if evt:
#print(dir(evt.ClassName))
#print (evt.ClassName, evt.Id, evt.GetEventType())
#e(0)
if fullSearch:
#print 'RecreateList/fullSearch'
#Publisher().sendMessage( "force_search", (self.pos,fltr) )
send("force_search", (self.pos,fltr))
# Do not`scan all the demo files for every char
# the user input, use wx.EVT_TEXT_ENTER instead
#return
#expansionState = list.GetExpansionState()
current = None
#print(dir(list))
#print list.GetSelectedItemCount()
if 0:
item = list.GetSelection()
if item:
prnt = list.GetItemParent(item)
if prnt:
current = (list.GetItemText(item),
list.GetItemText(prnt))
#list.Freeze()
#self.root = list.AddRoot(activeProjName)
#list.SetItemImage(self.root, 0)
#list.SetItemPyData(self.root, 0)
treeFont = list.GetFont()
catFont = list.GetFont()
# The old native treectrl on MSW has a bug where it doesn't
# draw all of the text for an item if the font is larger than
# the default. It seems to be clipping the item's label as if
# it was the size of the same label in the default font.
if 'wxMSW' not in wx.PlatformInfo or wx.GetApp().GetComCtl32Version() >= 600:
treeFont.SetPointSize(treeFont.GetPointSize()+2)
treeFont.SetWeight(wx.BOLD)
catFont.SetWeight(wx.BOLD)
#list.SetItemFont(self.root, treeFont)
firstChild = None
selectItem = None
count = 0
#for key, items in list.data.items():
#items=list.data.values()
if fltr:
self.filter_history[list.current_list]=fltr
item_mask='%s'
#print 'RecreateList'
#print list.data[list.current_list]
#print ('',fltr.lower())
keys=[]
if 1:
count += 1
if fltr.lower():
self.info('Applying filter "%s"' % fltr.lower())
else:
self.info('Removing filter.')
if fltr:
for subfltr in fltr.split(','):
if subfltr:
keys += [key for key in list.itemDataMap if subfltr.lower() in str(list.itemDataMap[key][0]).lower() or subfltr.lower() in str(list.itemDataMap[key][1]).lower() or fltr.lower() in str(list.itemDataMap[key][3]).lower()\
or subfltr.lower() in str(list.itemDataMap[key][5]).lower()\
or subfltr.lower() in str(list.itemDataMap[key][6]).lower()]
else:
keys = list.itemDataMap.keys()
#print (keys)
#print list.data[list.current_list].items()
list.DeleteAllItems()
#print ('len keys',len(keys))
#pprint(keys)
#e(0)
self.table_list.itemIndexMap = keys
if keys:
self.table_list.setMixins()
self.table_list.Refresh()
if 0 and keys:
#print keys
j=0
#pprint(list.data[list.current_list])
for key in keys:
#print 'key',key
i= list.data[list.current_list][key]
#print 'i',i
#e(0)
list.Refresh()
if 0:
index=list.InsertStringItem(sys.maxsize, item_mask % i[0])
for idx in range(1,len(i)-2):
#print 'idx', idx
#print i[idx]
list.SetStringItem(index, idx, str(i[idx]))
list.SetItemData(index,key)
keycolid=0
if favs.has_key(i[keycolid]):
item = list.GetItem(index)
font = item.GetFont()
font.SetWeight(wx.FONTWEIGHT_BOLD)
item.SetFont(font)
# This does the trick:
list.SetItem(item)
#if i[1] == 'xml':
#print list._imgstart,list.img_offset
imgs= { 'default':'images/database_green_16.png',
'DEV':'images/database_green_16.png',
'PROD':'images/database_red_16.png',
'UAT':'images/database_blue_16.png',
'QA':'images/database_black_16.png'}
imgs={k:os.path.join(home,v) for k,v in imgs.items()}
img_type_col_id= self.list.img_col
img_type = i[img_type_col_id]
img_name=None
if imgs.has_key(img_type):
img_name=imgs[img_type]
else:
img_name=imgs['default']
#print img_name
img_id=self.list.image_refs[img_name]
list.img[key]=img_id
list.SetItemImage(index, list.img[key])
#print 'SetItemImage',index,key,list.img[key]
if 0:
if (j % 2) == 0:
list._bg='#e6f1f5'
list.SetItemBackgroundColour(index, list._bg)
j += 1
if 0:
child = list.AppendItem(self.root, category, image=count)
list.SetItemFont(child, catFont)
list.SetItemPyData(child, count)
if not firstChild: firstChild = child
for childItem in items:
image = count
if DoesModifiedExist(childItem):
image = len(_tP)
theDemo = list.AppendItem(child, childItem, image=image)
list.SetItemPyData(theDemo, count)
self.treeMap[ttitle][childItem] = theDemo
#if current and (childItem, category) == current:
# selectItem = theDemo
#print 'list.Thaw()'
#print (dir(list))
#print list.pos
searchItems = {}
#listmix.ColumnSorterMixin.__init__(self, self.table_list.GetColumnCount())
#listmix.ColumnSorterMixin.__init__(self.table_list, 55)
out=''
max_len=15
dots=''
#if not out:
#out =self.root_status
#sb=self.status
if 0: #not sb:
sb=cl
if not sb:
sb='Double click on pipeline file.'
#send( "update_status_bar", (sb,self.pos))
del dsbWindow
#del msg
def OnSearchCancelBtn(self, event,tf):
#print('1 OnSearchCancelBtn')
(list, filter) = tf
#self.filter.SetValue('')
#print('2 ')
self.filter_history[list.current_list]=''
#self.OnSearch(event,tf)
def OnSearch0(self, event, tf):
#search in every list
(list, filter) = tf
fltr = filter.GetValue()
#print 'OnSearch',fltr, self.searchItems
self.filter_history[list.current_list]=fltr
searchItems=self.searchItems
if not fltr:
self.RecreateList(None,(list, filter))
return
dsbWindow = wx.WindowDisabler()
infBusy = wx.BusyInfo("Please wait...", self)
wx.BeginBusyCursor()
#searchItems=[item for item in list.data.values() if fltr.lower() in str(item[0]).lower()]
self.RecreateList(None,(list, filter))
wx.EndBusyCursor()
del infBusy
del dsbWindow
def OnSearchMenu(self, event, tparams):
#print('OnSearchMenu')
(list, filter_str)=tparams
self.filter.SetValue(filter_str)
# Catch the search type (name or content)
searchMenu = self.filter.GetMenu().GetMenuItems()
fullSearch = searchMenu[1].IsChecked()
fltr=self.filter.GetValue()
if 1:
if 0: #fullSearch:
#print 'OnSearchMenu/fullSearch'
#Publisher().sendMessage( "force_search", (self.pos,fltr) )
send("force_search", (self.pos,fltr) )
self.OnSearch(None,(list,self.filter))
#else:
# self.RecreateList(None,(list,self.filter))
def OnUpdate(self, evt):
print('@'*20)
print('@'*20)
print ('update', os.getpid)
print('@'*20)
print('@'*20)
def onDrag0(self, event):
""""""
data = wx.FileDataObject()
obj = event.GetEventObject()
id = event.GetIndex()
filename = obj.GetItem(id).GetText()
dirname = os.path.dirname(os.path.abspath(os.listdir(".")[0]))
fullpath = str(os.path.join(dirname, filename))
data.AddFile(fullpath)
dropSource = wx.DropSource(obj)
dropSource.SetData(data)
#result = dropSource.DoDragDrop()
result = dropSource.DoDragDrop(wx.Drag_AllowMove)
#print (fullpath)
#print(result)
def method_save_default_perspective(self):
self.method_set_default_pane_captions()
current_perspective = self.aui_manager.SavePerspective()
self.method_set_translation_pane_captions()
if self.app.settings.Read('GUI/perspective', '') != current_perspective:
self.app.settings.Write('GUI/perspective', current_perspective)
self.app.settings.Flush()
def method_set_default_pane_captions(self):
for name, caption in self.pane_captions.items():
self.aui_manager.GetPane(name).Caption(caption[0])
def method_set_translation_pane_captions(self):
for name, caption in self.pane_captions.items():
self.aui_manager.GetPane(name).Caption(caption[1])
def method_save_default_state(self):
flag_flush = False
position = self.GetPosition()
if position != eval(self.app.settings.Read('GUI/position', '()')):
self.app.settings.Write('GUI/position', repr(position))
flag_flush = True
size = self.GetSize()
if size != eval(self.app.settings.Read('GUI/size', '()')):
self.app.settings.Write('GUI/size', repr(size))
flag_flush = True
font = self.GetFont().GetNativeFontInfo().ToString()
if font != self.app.settings.Read('GUI/font', ''):
self.app.settings.Write('GUI/font', font)
flag_flush = True
is_maximized = self.IsMaximized()
if is_maximized != self.app.settings.ReadBool('GUI/maximized', False):
self.app.settings.WriteBool('GUI/maximized', is_maximized)
flag_flush = True
is_iconized = self.IsIconized()
if is_iconized != self.app.settings.ReadBool('GUI/iconized', False):
self.app.settings.WriteBool('GUI/iconized', is_iconized)
flag_flush = True
is_fullscreen = self.IsFullScreen()
if is_fullscreen != self.app.settings.ReadBool('GUI/fullscreen', False):
self.app.settings.WriteBool('GUI/fullscreen', is_fullscreen)
flag_flush = True
if flag_flush:
self.app.settings.Flush()
def method_load_default_state(self):
#frame_font = wx.SystemSettings_GetFont(wx.SYS_DEFAULT_GUI_FONT)
#frame_font.SetNativeFontInfoFromString(self.app.settings.Read('GUI/font', ''))
#self.SetFont(frame_font)
self.SetSize(eval(self.app.settings.Read('GUI/size', '(100,100)')))
self.SetPosition(eval(self.app.settings.Read('GUI/position', '(100,100)')))
centre_on_screen = eval(self.app.settings.Read('GUI/centre_on_screen', repr((False, wx.BOTH))))
if centre_on_screen[0]:
self.CentreOnScreen(centre_on_screen[1])
self.Maximize(self.app.settings.ReadBool('GUI/maximized', False))
self.Iconize(self.app.settings.ReadBool('GUI/iconized', False))
self.ShowFullScreen(self.app.settings.ReadBool('GUI/fullscreen', False), self.app.settings.ReadInt('GUI/fullscreen_style', default_fullscreen_style))
def _Exit(self):
if 0:
if self.app.settings.ReadBool('GUI/save_default_state_on_exit', True):
self.method_save_default_state()
if False or self.app.settings.ReadBool('GUI/save_default_perspective_on_exit', True):
self.method_save_default_perspective()
#self.main_toolbar.Destroy()
self.aui_manager.UnInit()
self._StopThreads()
del self.panel
self.busybox.Destroy()
del self.busybox
del self.table_list.logwin
self.Close()
self.Destroy()
def OnClose(self, event):
#self.ticker.Stop()
dlg = wx.MessageDialog(self, "Are you sure you want to exit?", "Exit", wx.YES_NO | wx.ICON_QUESTION)
if dlg.ShowModal() == wx.ID_YES:
#self.Destroy() # frame
print ('Kill signal.')
self._Exit()
dlg.Destroy()
event.Skip()
def _StopThreads(self):
dsbWindow = wx.WindowDisabler()
infBusy = wx.BusyInfo("Please wait...", self)
#busy = wx.BusyInfo("One moment please, waiting for threads to die...")
wx.Yield()
for t in self.threads:
t.Stop()
running = 1
while running:
running = 0
for t in self.threads:
running = running + t.IsRunning()
time.sleep(0.1)
del infBusy
del dsbWindow
#self.Destroy()
#----------------------------------------------------------------------
def get_selected_items(self):
"""
Gets the selected items for the list control.
Selection is returned as a list of selected indices,
low to high.
"""
selection = []
# start at -1 to get the first selected item
current = -1
while True:
next = self.table_list.GetNextItem(current, wx.LIST_NEXT_ALL, wx.LIST_STATE_SELECTED)
if next == -1:
return selection
selection.append(next)
current = next
return selection
def GetNextSelected(self, current):
"""Returns next selected item, or -1 when no more"""
return self.table_list.GetNextItem(current, wx.LIST_NEXT_ALL, wx.LIST_STATE_SELECTED)
def onDrag(self, event):
data = wx.FileDataObject()
obj = event.GetEventObject()
dropSource = wx.DropSource(obj)
id = event.GetIndex()
#print(id)
db, schemaname, tablename= obj.GetItem(id,0).GetText(), obj.GetItem(id,1).GetText(), obj.GetItem(id,2).GetText()
#print (schemaname, tablename)
items=self.get_selected_items()
#print(len(items))
tl=self.table_list
tables_to_spool=[]
for oid in items:
#print(oid)
tables_to_spool.append ((tl.GetItem(oid).GetText(), tl.GetItem(oid,1).GetText(), tl.GetItem(oid,2).GetText()))
dropSource.SetData(data)
#next line will make the drop target window come to top, allowing us
#to get the info we need to do the work, if it's Explorer
result = dropSource.DoDragDrop(0)
#get foreground window hwnd
h = win32gui.GetForegroundWindow()
#get explorer location
s = gencache.EnsureDispatch('Shell.Application')
#s = win32com.client.Dispatch("Shell.Application")
loc, outdir = None, None
for w in s.Windows():
if int(w.Hwnd) == h:
loc = w.LocationURL
if loc:
outdir = loc.split('///')[1]
#print (outdir)
outdir = unquote(outdir)
#print (outdir)
#got what we need, now download to outfol
#if outdir and os.path.isdir(outdir):
# self.dloadItems(event, outdir)
self.spool_tables(tables_to_spool, outdir)
return
def spool_tables(self,table_list, out_dir):
print ('in spooler')
if 1:
self.threads.append(TableSpooler(self, table_list, out_dir, self.log))
for t in self.threads:
t.Start()
def save_status():
global job_status_file, con
data=input()
con.close()
if 0:
p = pp.PrettyPrinter(indent=4)
with open(job_status_file, "w") as text_file:
cfg= deepcopy(config.cfg)
text_file.write('cfg=%s\nstatus=%s' % (p.pformat(cfg),p.pformat(job_status)))
log.info (job_status_file, extra=d)
max_pool_size=multiprocessing.cpu_count() * 2
def main(argv=None):
if argv is None:
argv = sys.argv
# Command line arguments of the script to be run are preserved by the
# hotswap.py wrapper but hotswap.py and its options are removed that
# sys.argv looks as if no wrapper was present.
#print "argv:", `argv`
#some applications might require image handlers
#wx.InitAllImageHandlers()
app = wx.App()
f = JobMonitor(None, -1, "ColumnSorterMixin used with a Virtual ListCtrl",wx.Size(500,300))
f.Show()
app.MainLoop()
import threading
def decrypt_ora_password(cypher):
secret = 'OATS'
cyphertext=unhexlify(cypher)
# now decrypt the plaintext (using the same salt and secret)
plaintext = decrypt(secret, cyphertext)
return plaintext.decode()
DEVELOPERS=['abuzunov']
class MyApp(wx.App,wx.lib.mixins.inspection.InspectionMixin):
app_version = __version__
app_path = os.path.dirname(sys.argv[0])
app_name = os.path.basename(sys.argv[0].split('.')[0])
help_file = app_name + '.htb'
#print (init.HOME)
#e(0)
settings_name = os.path.join(init.HOME,'cfg', app_name + '.cfg')
if not os.path.isfile(settings_name):
settings_name = os.path.join(app_path, app_name + '.cfg')
assert os.path.isfile(settings_name), 'Coannot find app settings file\n%s.' % settings_name
#print(app_path,settings_name)
#app_config_loc=os.path.join(home,'config','app_config.py')
def CreateBusyFrame(self):
self.busybox = BusyFrame(None,-1,'Applog.',app = self)
def log_access_attempt(self,opt,con):
#print (opt,con)
if 1:
cur = con.cursor()
tool=__file__
q="INSERT INTO OATS.BATCH_STATUS_TOOL_LOG values(systimestamp,:1, :2, :3)"
cur.prepare(q)
runas=opt.username[0]
if runas in DEVELOPERS:
osuser='developer'
else:
osuser=opt.username[1][0]
#print (osuser)
rows=[(runas,osuser,tool)]
cur.executemany(None, rows)
con.commit()
cur.close()
def OnInit(self):
global log, init
#ac=import_module(app_config_loc)
self.Init()
if 1:
#print(self.settings_name)
self.settings = open_settings(self.settings_name)
#e(0)
#print (self.settings)
name_user = wx.GetUserId()
name_instance = self.app_name + '::'
self.instance_checker = wx.SingleInstanceChecker(name_instance + name_user)
if self.instance_checker.IsAnotherRunning():
wx.MessageBox('Software is already running.', 'Warning')
return False
if opt.username[0] in ['bicadmin'] or opt.username[0] in DEVELOPERS:
pass
else:
wx.MessageBox('You have to run this code as bicadmin.', 'Warning')
return False
if opt.username[0] in DEVELOPERS or len(opt.username)==2:
pass
else:
pass
#wx.MessageBox('You have to set --username.', 'Warning')
#return False
self.log_access_attempt(opt, con)
if 0:
self.busybox = busybox=BusyFrame(None,-1,'Log messages.',app = self)
#e(0)
#msg('Extracting data')
#self.busybox.Refresh()
#self.busybox.Update()
self.SetTopWindow(busybox)
#self.busybox.Layout()
#self.busybox.Show()
#time.sleep(1)
#wx.Yield()
#busybox.Show()
if 0:
self.frame = JobMonitor(None, -1,__appname__,app = self, log=log, ts_out_dir=init.ts_out_dir, config=config) #main_frame(None, app = self, title = 'Name/version')
#self.frame.Disable()
#self.SetTopWindow(self.frame)
#self.frame.Show()
#wx.Yield()
#dsbWindow = wx.WindowDisabler()
return True
#con=None
if __name__ == '__main__':
freeze_support()
p = argparse.ArgumentParser()
p.add_argument("--batch_config", action='append', default='batch_status.config.py',type=lambda kv: kv.split("="), dest='batch_config')
p.add_argument("--pool_size", action='append',default=6, type=lambda kv: kv.split("="), dest='pool_size')
p.add_argument("--array_size", action='append',default=10000, type=lambda kv: kv.split("="), dest='array_size')
p.add_argument("--compress", action='append',default=0, type=lambda kv: kv.split("="), dest='compress')
p.add_argument("--column_delimiter", action='append',default='|', type=lambda kv: kv.split("="), dest='column_delimiter')
p.add_argument("--username", action='append', default=[getpass.getuser()],type=lambda kv: kv.split("="), dest='username')
#import getpass
#print(getpass.getuser())
#pprint(dict(os.environ))
opt = p.parse_args()
optd = vars(opt)
#pprint(optd)
conf_path = init.config_home #os.path.dirname(os.path.abspath(__file__))
config_file = os.path.join(conf_path,'config', opt.batch_config[0])
if not os.path.isfile(config_file):
config_file = os.path.join(conf_path, opt.batch_config[0])
assert os.path.isfile(config_file), '111, Cannot find config file\n%s.' % config_file
#print (1,config_file)
config=import_module(config_file)
if 1:
#print (config)
db_profile=config.cfg['profile'][config.cfg['queries']['profile']]['from']
connector=config.cfg['connectors'][db_profile]
#print(db_profile,connector)
#ip = 'jc1lbiorc1'
#port = 1521
#service_name = 'oradb1p'
dbuser, s = connector.split('@')
ip, s = s.split(':')
port, service_name= s.split('/')
try:
dsn = cx_Oracle.makedsn(ip, port, service_name=service_name)
with open(os.path.join(home,'libs','cypher.txt'), 'rb') as fh:
c= fh.read()
#print (c)
if 0:
pwd=decrypt_ora_password(c)
#print(pwd)
#e(0)
con = cx_Oracle.connect(dbuser, decrypt_ora_password(c), dsn,threaded=True)
except cx_Oracle.DatabaseError as e:
import itertools
error, = e.args
print ('#'*80)
print ('#'*71+type(self).__name__)
if error.code == 955:
print('Table already exists')
if error.code == 1031:
print("Insufficient privileges - are you sure you're using the owner account?")
print(error.code)
print(error.message)
print(error.context)
print ('#'*80)
print ('#'*80)
print(formatExceptionInfo())
raise
app = MyApp(redirect=False) #=True,filename="applogfile.txt")
busy = MyPyBusyFrame(app,'One moment please, initializing application...', title='Warning')
builtins.home = home
builtins.update_evt = update_evt
builtins.exit_evt=exit_evt
builtins.log_evt=log_evt
builtins.con=con
import libs.ListCtrl_MessageLog as ListCtrl_MessageLog
builtins.home = home
builtins.update_evt = update_evt
builtins.exit_evt=exit_evt
builtins.log_evt=log_evt
builtins.con=con
import libs.ListCtrl_ParentsOf as ListCtrl_ParentsOf
builtins.home = home
builtins.update_evt = update_evt
builtins.exit_evt=exit_evt
builtins.log_evt=log_evt
builtins.con=con
import libs.ListCtrl_ChildrenOf as ListCtrl_ChildrenOf
builtins.home = home
builtins.update_evt = update_evt
builtins.exit_evt=exit_evt
builtins.log_evt=log_evt
builtins.con=con
import libs.ListCtrl_ExecHistory as ListCtrl_ExecHistory
def start_gui(data):
#e(0)
app = MyApp(redirect=False) #=True,filename="applogfile.txt")
#busy = MyPyBusyFrame(app,'One moment please, initializing application...111', title='Warning')
#print(log)
#e(0)
frame = JobMonitor(None, -1,__appname__,app = app, log=log, ts_out_dir=init.ts_out_dir, config=config)
#e(0)
busybox = BusyFrame(frame,-1,'Applog.',app = app)
#
busybox.info([__file__,'Initializing application.'])
frame.busybox=busybox
#busybox.info([__file__,'Staring application loop.'])
busybox.Show()
try:
busybox.info([__file__,'Starting application loop.'])
app.MainLoop()
except Exception as err:
print('#'*80)
traceback.print_exc();
print('#'*80)
raise
if opt.pool_size> max_pool_size:
pool_size=max_pool_size
log.warn('pool_size value is too high. Setting to %d (cpu_count() * 2)' % max_pool_size)
else:
pool_size=opt.pool_size
#parser = argparse.ArgumentParser(description=app_title)
#parser.add_argument('-s','--session',default='',type=str, help='Session file to open')
#args = parser.parse_args()
#default_session=None
#if hasattr(args, 'session') and args.session:
# default_session=args.session
#e(0)
if 1:
#print(log)
#e(0)
frame = JobMonitor(None, -1,__appname__,app = app, log=log, ts_out_dir=init.ts_out_dir, config=config)
#e(0)
busybox = BusyFrame(frame,-1,'Applog.',app = app)
#
busybox.info([__file__,'Initializing application.'])
frame.busybox=busybox
#busybox.info([__file__,'Staring application loop.'])
busybox.Show()
try:
busybox.info([__file__,'Starting application loop.'])
del busy
app.MainLoop()
except Exception as err:
print('#'*80)
traceback.print_exc();
print('#'*80)
raise
if 0:
app = wx.App(False)
JobMonitor(None, -1, 'File Hunter')
app.MainLoop()
atexit.register(save_status) |
from rest_framework import pagination
class StandardResultSetPagination(pagination.PageNumberPagination):
"""
Pagination class used in views returning a lot of data
"""
page_size = 100
page_size_query_param = 'limit'
max_page_size = 1000 |
import os
import shutil
print("python start1")
def runCmd(cmd):
print(("runCmd",cmd))
import subprocess
# subprocess.check_call(cmd, shell=True) # BUG: this doesn't print anything in azure!
output = subprocess.check_output(cmd, shell=True)
output=output.decode('utf8')
print("output:")
print(output)
# subprocess.check_call(cmd, shell=True, stdout=subprocess.STDOUT)
def buildNimCsources():
print("in buildNimCsources")
runCmd("git clone --depth 1 https://github.com/nim-lang/csources.git")
# runCmd("cd csources && sh build.sh")
buildNimCsources()
print("after buildNimCsources")
|
week = {1: "Monday", 2: "Tuesday", 3:"Wednesday", 4: "Thursday", 5: "Friday", 6: "Saturday", 7: "Sunday"}
n = int(input())
print(week.get(n, "Not Found")) |
import tensorflow as tf
import numpy as np
tf.set_random_seed(777) # for reproducibility
xy = np.loadtxt('data-03-diabetes.csv', delimiter=',', dtype=np.float32)
x_data = xy[:-250, 0:-1]
y_data = xy[:-250, [-1]]
x_test= xy[-250:, 0:-1]
y_test= xy[-250:, [-1]]
batch_size = 20
training_epochs = 20
print(x_data.shape, y_data.shape)
tf
# placeholders for a tensor that will be always fed.
X = tf.placeholder(tf.float32, shape=[None, 8])
Y = tf.placeholder(tf.float32, shape=[None, 1])
W = tf.Variable(tf.random_normal([8, 1]), name='weight')
b = tf.Variable(tf.random_normal([1]), name='bias')
# Hypothesis using sigmoid: tf.div(1., 1. + tf.exp(-tf.matmul(X, W)))
hypothesis = tf.sigmoid(tf.matmul(X, W) + b)
# cost/loss function
cost = -tf.reduce_mean(Y * tf.log(hypothesis) + (1 - Y) *
tf.log(1 - hypothesis))
train = tf.train.GradientDescentOptimizer(learning_rate=0.01).minimize(cost)
# Accuracy computation
# True if hypothesis>0.5 else False
predicted = tf.cast(hypothesis > 0.5, dtype=tf.float32)
accuracy = tf.reduce_mean(tf.cast(tf.equal(predicted, Y), dtype=tf.float32))
## batch train ##
#min_after_dequeue = 10000
#capacity = min_after_dequeue + 3 * batch_size
#train_x_batch, train_y_batch = tf.train.shuffle_batch([xy[:, :-1], xy[-1]], batch_size=batch_size,
# capacity=capacity, min_after_dequeue=min_after_dequeue,
# allow_smaller_final_batch=False, seed=77)
train_x_batch, train_y_batch = tf.train.batch([x_data, y_data], batch_size=batch_size)
# Launch graph
with tf.Session() as sess:
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
# Initialize TensorFlow variables
sess.run(tf.global_variables_initializer())
for epoch in range(training_epochs):
cost_sum = 0
total_batch = int(len(x_data) / batch_size) # 5
for i in range(total_batch):
x_batch, y_batch = sess.run([train_x_batch, train_y_batch])
cost_val, _ = sess.run([cost, train], feed_dict={X: x_batch, Y: y_batch})
cost_sum += cost_val / total_batch
print(cost_sum)
# Accuracy report
h, c, a = sess.run([hypothesis, predicted, accuracy],
feed_dict={X: x_test, Y: y_test})
print("\nAccuracy: ", a)
coord.request_stop()
coord.join(threads) |
#function definition is here
sum = lambda arg1, arg2: arg1 + arg2;
#Now u can call sum as function
print "Value of Total: ", sum(10,20)
print "Value of Total: ", sum(20,20)
|
from app import db
from .news_models import NewsEntry, NewsComment
from .news_views import bp_news
from app.models import User
import os
IDENTIFIER = "news"
NAME = "News Plugin"
VERSION = "v0.1"
AUTHOR = "Asyks @ EU-Blackhand"
def init():
setattr(User, "news", db.relationship("NewsEntry", backref="user", lazy="dynamic"))
setattr(User, "news_comments", db.relationship("NewsComment", backref="user", lazy="dynamic"))
def install():
setattr(User, "news", db.relationship("NewsEntry", backref="user", lazy="dynamic"))
setattr(User, "news_comments", db.relationship("NewsComment", backref="user", lazy="dynamic"))
def get_blueprint():
return bp_news
|
#! /usr/bin/python3
# tip : command pinout gives the pinout of the raspberry pi
import json
from time import sleep
import pigpio
from flask import Flask, request
app = Flask(__name__)
def angle_to_duty_cycle(angle: float) -> float:
"""
convert angle to duty cycle
Works with servo motor Tower Pro SG92R.
Works not with servo motor Tower Pro SG90
"""
try:
dc = (1.0/18.0 * angle) + 2
except TypeError:
dc = None
return dc
def set_angle(angle: float) -> dict:
"""
Change the angle of the servo motor
:param angle: the angle to rotate to
:return: response dictionary
:rtype: dict
"""
duty_cycle = angle_to_duty_cycle(angle)
r: dict = set_duty_cycle(duty_cycle)
return r
def set_duty_cycle(duty_cycle: float) -> dict:
"""
Change the pwm duty cycle of the servo motor
:param duty_cycle: the duty cycle to be set on the servo pin
:return: response dictionary
:rtype: dict
"""
if duty_cycle is None:
response: dict = {
"duty_cycle": duty_cycle,
"status": 400,
"reason": "duty cycle is None"
}
else:
frequency = 50
duty_cycle = int(duty_cycle)
pi.hardware_PWM(SERVO_PIN, frequency, duty_cycle)
response: dict = {
"duty_cycle": duty_cycle,
"status": 200
}
return response
@app.route('/')
def hello_world():
return 'Hello, World!'
@app.route("/servo")
def set_servo_angle():
angle = request.args.get("angle", type=float)
duty_cycle = request.args.get("duty_cycle", type=float)
if angle is not None:
response: dict = set_angle(angle)
elif duty_cycle is not None:
response: dict = set_duty_cycle(duty_cycle)
else:
response = {
"status": 400,
"reason": "not satisfied with url parameter"
}
return json.dumps(response)
if __name__ == "__main__":
# setup gpio pin for servo
pi = pigpio.pi()
SERVO_PIN = 18 # BCM pin layout. Not BOARD pin layout
pi.set_mode(SERVO_PIN, pigpio.ALT5)
# setup server
PORT = 5001
print(f"Server is listening on port {PORT}")
app.run(debug=True, host="0.0.0.0", port=PORT)
# tear down
pi.write(SERVO_PIN, 0)
pi.stop()
|
from sklearn.model_selection import KFold, GridSearchCV
from sklearn.tree import DecisionTreeClassifier
from sklearn.cluster import DBSCAN
from sklearn.metrics import adjusted_rand_score, make_scorer
def Decision_Tree(X, Y, kfold=5):
cv = KFold(n_splits=kfold, random_state=1, shuffle=True)
params = {
'criterion' : ["gini", "entropy"],
'splitter' : ["best", "random"],
'max_depth' : [2,3,5,8,10,15,20],
'random_state' : [1]
}
gs_dtree = GridSearchCV(estimator=DecisionTreeClassifier(), param_grid=params, cv=cv)
gs_dtree.fit(X, Y[:,1].astype(int))
return gs_dtree
def DB_Scan(X, Y, kfold=5):
cv = KFold(n_splits=kfold, random_state=1, shuffle=True)
params = {
'eps' : [1,2,3,5,15,20,25],
'min_samples' : [2,3,5,8]
}
gs_dbscan = GridSearchCV(estimator=DBSCAN(), param_grid=params, cv=cv, scoring = make_scorer(adjusted_rand_score, greater_is_better=False))
gs_dbscan.fit(X, Y[:,1].astype(int))
return gs_dbscan |
#!/usr/bin/env python
# from distutils.core import setup
from setuptools import setup
import multiprocessing # nopep8
import os
__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
VERSION = '0.3.1'
version_file = open(os.path.join(__location__, 'orlo', '_version.py'), 'w')
version_file.write("__version__ = '{}'".format(VERSION))
version_file.close()
setup(
name='orlo',
version=VERSION,
description='Deployment data capture API',
author='Alex Forbes',
author_email='alforbes@ebay.com',
license='GPL',
long_description=open(os.path.join(__location__, 'README.md')).read(),
url='https://github.com/eBayClassifiedsGroup/orlo',
packages=[
'orlo',
],
include_package_data=True,
install_requires=[
'Flask',
'Flask-Migrate',
'Flask-SQLAlchemy',
'Flask-HTTPAuth',
'Flask-TokenAuth',
'arrow',
'gunicorn',
'psycopg2',
'pyldap',
'pytz',
'sphinxcontrib-httpdomain',
'sqlalchemy-utils',
],
tests_require=[
'Flask-Testing',
'orloclient>=0.1.1',
],
test_suite='tests',
# Creates a script in /usr/local/bin
entry_points={
'console_scripts': ['orlo=orlo.cli:main']
}
)
|
# coding=utf-8
product_map = {
1: "Original 1000",
3: "Color 650",
10: "White 800 (Low Voltage)",
11: "White 800 (High Voltage)",
18: "White 900 BR30 (Low Voltage)",
20: "Color 1000 BR30",
22: "Color 1000",
27: "LIFX A19",
28: "LIFX BR30",
29: "LIFX+ A19",
30: "LIFX+ BR30",
31: "LIFX Z",
32: "LIFX Z 2",
36: "LIFX Downlight",
37: "LIFX Downlight",
38: "LIFX Beam",
43: "LIFX A19",
44: "LIFX BR30",
45: "LIFX+ A19",
46: "LIFX+ BR30",
49: "LIFX Mini",
50: "LIFX Mini Warm to White",
51: "LIFX Mini White",
52: "LIFX GU10",
55: "LIFX Tile",
57: "LIFX Candle",
59: "LIFX Mini Color",
60: "LIFX Mini Warm to White",
61: "LIFX Mini White",
62: "LIFX A19",
63: "LIFX BR30",
64: "LIFX+ A19",
65: "LIFX+ BR30",
68: "LIFX Candle",
81: "LIFX Candle Warm to White",
82: "LIFX Filament"
}
# Identifies which products are lights.
# Currently all LIFX products that speak the LAN protocol are lights.
# However, the protocol was written to allow addition of other kinds
# of devices, so it's important to be able to differentiate.
light_products = [1, 3, 10, 11, 18, 20, 22, 27, 28, 29, 30, 31, 32, 36, 37, 38, 43, 44, 45, 46, 49, 50, 51, 52, 55, 57, 59, 60, 61, 62, 63, 64, 65, 68, 81, 82]
features_map = {
1: {"color": True,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
3: {"color": True,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
10: {"color": False,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2700,
"max_kelvin": 6500},
11: {"color": False,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2700,
"max_kelvin": 6500},
18: {"color": False,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2700,
"max_kelvin": 6500},
20: {"color": True,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
22: {"color": True,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
27: {"color": True,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
28: {"color": True,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
29: {"color": True,
"temperature": True,
"infrared": True,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
30: {"color": True,
"temperature": True,
"infrared": True,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
31: {"color": True,
"temperature": True,
"infrared": False,
"multizone": True,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
32: {"color": True,
"temperature": True,
"infrared": False,
"multizone": True,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
36: {"color": True,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
37: {"color": True,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
38: {"color": True,
"temperature": True,
"infrared": False,
"multizone": True,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
43: {"color": True,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
44: {"color": True,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
45: {"color": True,
"temperature": True,
"infrared": True,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
46: {"color": True,
"temperature": True,
"infrared": True,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
49: {"color": True,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
50: {"color": False,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 1500,
"max_kelvin": 4000},
51: {"color": False,
"temperature": False,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2700,
"max_kelvin": 2700},
52: {"color": True,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
55: {"color": True,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": True,
"min_kelvin": 2500,
"max_kelvin": 9000},
57: {"color": True,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 1500,
"max_kelvin": 9000},
59: {"color": True,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
60: {"color": False,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 1500,
"max_kelvin": 4000},
61: {"color": False,
"temperature": False,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2700,
"max_kelvin": 2700},
62: {"color": True,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
63: {"color": True,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
64: {"color": True,
"temperature": True,
"infrared": True,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
65: {"color": True,
"temperature": True,
"infrared": True,
"multizone": False,
"chain": False,
"min_kelvin": 2500,
"max_kelvin": 9000},
68: {"color": True,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 1500,
"max_kelvin": 9000},
81: {"color": False,
"temperature": True,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2200,
"max_kelvin": 6500},
82: {"color": False,
"temperature": False,
"infrared": False,
"multizone": False,
"chain": False,
"min_kelvin": 2000,
"max_kelvin": 2000}
}
|
import random
def gen_small(len = 6):
assert len <= 6
password = ""
for i in range(len):
password += chr(random.randint(0,9))
return password
def gen(len = 16):
assert len >= 12
password = ""
for i in range(3):
password += chr(random.randint(48, 57))
for i in range(3):
password += chr(random.randint(65, 90))
for i in range(3):
password += chr(random.randint(97,122))
password += chr(random.randint(35, 46))
password += chr(random.randint(58, 64))
password += chr(random.randint(93, 95))
for i in range(len - 12):
v = random.randint(35, 122)
while v == 47 or v == 92 or v == 96:
v = random.randint(35, 122)
password += chr(v)
tmp_list = list(password)
random.shuffle(tmp_list)
password = ""
for ch in tmp_list:
password += ch
return password
|
from django.forms import ModelForm, PasswordInput, TextInput
from django import forms
from ...models import User
class UserForm(forms.ModelForm):
confirm_password = forms.CharField(widget=forms.PasswordInput, required=True, label='Confirmar Contraseña')
class Meta:
model = User
#fields = '__all__'
fields = ['first_name','last_name','email','password']
widgets = {
'password' : PasswordInput,
'first_name' : TextInput(attrs={'placeholder': '<nombres>'}),
'last_name' : TextInput(attrs={'placeholder': '<apellidos>'}),
'email' : TextInput(attrs={'placeholder': '<email>'}),
'password' : TextInput(attrs={'placeholder': '<contraseña>', 'type': 'password'}),
}
labels = {
'first_name' : 'Nombres',
'last_name' : 'Apellidos',
'password' : 'Contraseña',
}
def clean(self):
cleaned_data = super(UserForm, self).clean()
password = cleaned_data.get("password")
confirm_password = cleaned_data.get("confirm_password")
if (password != confirm_password):
raise forms.ValidationError(
"Las contraseñas no coinciden"
)
class UserAdminForm(forms.ModelForm):
confirm_password = forms.CharField(widget=forms.PasswordInput, required=True, label='Confirmar Contraseña')
class Meta:
model = User
#fields = '__all__'
fields = ['first_name','last_name','email','user_type','password']
widgets = {
'password' : PasswordInput,
'first_name' : TextInput(attrs={'placeholder': '<nombres>'}),
'last_name' : TextInput(attrs={'placeholder': '<apellidos>'}),
'birth_date' : TextInput(attrs={'placeholder': '<fecha de nacimiento>', 'type': 'date'}),
}
labels = {
'first_name' : 'Nombres',
'last_name' : 'Apellidos',
'password' : 'Contraseña',
'user_type' : 'Tipo Usuario',
}
def clean(self):
cleaned_data = super(UserForm, self).clean()
password = cleaned_data.get("password")
confirm_password = cleaned_data.get("confirm_password")
if (password != confirm_password):
raise forms.ValidationError(
"Las contraseñas no coinciden"
)
class UserLoginForm(forms.Form):
email = forms.EmailField(max_length=100, required=True)
password = forms.CharField(max_length=20, widget=forms.PasswordInput, required=True)
def login(self, request):
email= self.cleaned_data.get('email')
password = self.cleaned_data.get('password')
user = User.authenticate(email, password)
print ("=====> User: ", user)
return user
'''
def clean(self):
email = self.cleaned_data.get('email')
password = self.cleaned_data.get('password')
user = User.authenticate(email, password)
if not user:
raise forms.ValidationError("Lo sentimos, login fallido, vuelva a intentar")
return self.cleaned_data
''' |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2019-07-11 13:57
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('api', '0024_auto_20190626_1650'),
]
operations = [
migrations.CreateModel(
name='RecurrenceEvent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('frequency', models.CharField(max_length=50)),
('start_date', models.CharField(max_length=50)),
('end_date', models.CharField(max_length=50)),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='event',
name='recurrence',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='events', related_query_name='event', to='api.RecurrenceEvent'),
),
]
|
import dump
import gevent
from comm import ishonest, isdishonest, isadversary, setFunctionality
from utils import gwrite, print
from queue import Queue as qqueue
from hashlib import sha256
from collections import defaultdict
from gevent.event import AsyncResult
from gevent.queue import Queue, Channel
class BD_SEC_Functionality(object):
def __init__(self, sid, pid, f2p, p2f, f2a, a2f, f2z, z2f):
self.sid = sid
self.ssid, self.sender, self.receiver, self.round = sid
#print('\tSender={}, receiver={}'.format(self.sender,self.receiver))
self.pid = pid
self.M = None; self.D = 1; self.Dhat = 1
self.delta = 1
self.f2p=f2p; self.p2f=p2f
self.f2a=f2a; self.a2f=a2f
self.f2z=f2z; self.z2f=z2f
#print('\033[1m[{}]\033[0m new bd_sec with M:{}'.format( self.sid, self.M ))
self.leaks = []
def leak(self, msg):
self.leaks.append( msg )
def input_send(self, msg):
#if self.M is not None: assert False
self.D = 1; self.M = msg
self.leak( ('send', self.M) )
#print('\033[1m[Leak]\033[0m', 'message: {}'.format(self.M))
self.f2a.write( (self.sender, ('sent',self.M)) ) # change sender to id of functionality
def input_fetch(self):
self.D -= 1
if self.D == 0:
self.f2p.write( (self.receiver, ('sent',self.M)) )
else: dump.dump()
def input_msg(self, sender, msg):
sid,pid = sender
if msg[0] == 'send' and sender == self.sender and ishonest(self.sid, self.sender):
self.input_send(msg[1])
elif msg[0] == 'fetch' and sender == self.receiver and ishonest(self.sid, self.receiver):
self.input_fetch()
else: dump.dump()
def adv_delay(self, T):
if self.Dhat+T <= self.delta:
self.D += T; self.Dhat += T
self.f2a.write( ('delay-set',) )
dump.dump()
else: print('Delay failed with T=', T, 'Dhat=',self.Dhat, 'delta=', self.delta); dump.dump()
def adv_get_leaks(self):
self.f2a.write( ('leaks', self.leaks) )
self.leaks = []
def adversary_msg(self, msg):
if msg[0] == 'delay':
self.adv_delay(msg[1])
elif msg[0] == 'get-leaks':
self.adv_get_leaks()
elif msg[0] == 'send' and isdishonest(self.sid, self.sender):
self.input_send(msg[1])
elif msg[0] == 'fetch' and isdishonest(self.sid, self.receiver):
self.input_fetch()
else: dump.dump()
def run(self):
while True:
ready = gevent.wait(
objects=[self.p2f,self.a2f,self.z2f],
count=1
)
assert len(ready)==1
r = ready[0]
if r == self.a2f:
msg = r.read()
self.a2f.reset()
self.adversary_msg(msg)
elif r == self.p2f:
msg = r.read()
sender,msg = msg
self.p2f.reset()
self.input_msg(sender,msg)
elif r == self.z2f:
self.z2f.reset()
dump.dump()
else: dump.dump()
import dump
from comm import setAdversary
from itm import FunctionalityWrapper, PartyWrapper, GenChannel
from syn_katz.adv import KatzDummyAdversary
from utils import z_inputs, z_ainputs, wait_for
def test():
sid = ('one',1,2)
f2p,p2f = GenChannel(),GenChannel()
f2a,a2f = GenChannel(),GenChannel()
f2z,z2f = GenChannel(),GenChannel()
p2a,a2p = GenChannel(),GenChannel()
p2z,z2p = GenChannel(),GenChannel()
z2a,a2z = GenChannel(),GenChannel()
p = PartyWrapper(sid, z2p,p2z, f2p,p2f, a2p,p2a)
gevent.spawn(p.run)
f = FunctionalityWrapper(p2f,f2p, a2f,f2a, z2f,f2z)
gevent.spawn(f.run)
f.newFID(sid,'F_bd',BD_SEC_Functionality)
advitm = KatzDummyAdversary('adv',-1, z2a,a2z, p2a,a2p, a2f,f2a)
setAdversary(advitm)
gevent.spawn(advitm.run)
z2p.write( (1, ((sid,'F_bd'),('send','hello'))) )
msg = wait_for(p2z)
print('Party back from f_bd', msg)
z2a.write( ('A2F', ((sid,'F_bd'),('get-leaks',))) )
msg = wait_for(a2z)
print('Leaks the right way', msg)
z2a.write( ('A2F', ((sid,'F_bd'), ('delay',0))) )
msg = wait_for(a2z)
print('adv message', msg)
z2p.write( (2, ((sid,'F_bd'), ('fetch',))))
fro,(receiver,msg) = wait_for(p2z)
print('p2z message', msg)
if __name__=='__main__':
test()
|
# coding: utf-8
import tensorflow as tf
# 创建 W 和 b 节点, 并设置初始值
W = tf.Variable([.1], dtype=tf.float32)
b = tf.Variable([-.1], dtype=tf.float32)
# 创建 x 节点, 用来输入实验中的输入数据
x = tf.placeholder(tf.float32)
# 创建线性模型
linear_model = W * b - x
# 创建 y 节点, 用来输入实验中得到的输出数据, 用于损失模型计算
y = tf.placeholder(tf.float32)
# 创建损失模型
loss = tf.reduce_sum(tf.square(linear_model-y))
# 创建Session 用来计算模型
sess = tf.Session()
# 初始化变量
init = tf.global_variables_initializer()
sess.run(init)
print(sess.run(linear_model, {x: [1, 2, 3, 6, 8]}))
# 损失模型
print(sess.run(loss, {x: [1, 2, 3, 6, 8], y: [4.8, 8.5, 10.4, 21.0, 25.3]}))
# 给 W 和 b 赋新值
fixW = tf.assign(W, [2.])
fixb = tf.assign(b, [1.])
# run 之后新值才会生效
sess.run([fixW, fixb])
# 重新验证损失值
print(sess.run(loss, {x: [1, 2, 3, 6, 8], y: [4.8, 8.5, 10.4, 21.0, 25.3]}))
######################### part3 ################
# 创建一个梯度下降优化器,学习率为0.001
optimizer = tf.train.GradientDescentOptimizer(0.001)
train = optimizer.minimize(loss)
# 用两个数组保存训练数据
x_train = [1, 2, 3, 6, 8]
y_train = [4.8, 8.5, 10.4, 21.0, 25.3]
# 训练10000次
for i in range(10000):
sess.run(train, {x: x_train, y: y_train})
# 打印一下训练后的结果
print('W: %s b: %s loss: %s' % (sess.run(W), sess.run(b), sess.run(loss, {x: x_train , y: y_train})))
|
# -*- coding:utf-8 -*-
# author: will
import datetime
import random
import time
from flask import request, jsonify
from app import db
from app.models import User, UserSteps, UserFriends, Article, UserReadArticle, UserShareArticle, UserSign
from utils.log_service import Logging
from utils.time_service import dif_time
from utils.user_service.login import auth_required
from . import api_article
# 步数兑换
# 步数与钢镚兑换规则:1步=0.001枚钢镚-->春节双倍
# 每日0点用户步数清零
@api_article.route('/change_steps', methods=['POST'])
@auth_required
def change_steps():
try:
res = request.get_json()
user_id = res.get('user_id')
Logging.logger.info('request_args:{0}'.format(res))
try:
user_id = int(user_id)
except Exception as e:
Logging.logger.error('errmsg:{0}'.format(e))
return jsonify(errno=-1, errmsg='参数错误')
user_obj = User.query.get(user_id)
if not user_obj:
return jsonify(errno=-1, errmsg='用户不存在')
available_step = user_obj.available_step
coins = float('%.2f' % (available_step * 0.001))
# coins = float('%.2f' % (available_step * 0.002))
if available_step != 0:
user_step = UserSteps()
user_step.user_id = user_id
user_step.change_step = available_step
user_step.get_coins = coins
user_step.change_step_date = datetime.datetime.now()
user_obj.coins += coins
user_obj.available_step = 0
user_obj.change_steal_step = user_obj.steal_step
db.session.add(user_step)
db.session.add(user_obj)
db.session.commit()
return jsonify(errno=0, errmsg="OK", coins=coins, total_coins=user_obj.coins)
except Exception as e:
Logging.logger.error('errmsg:{0}'.format(e))
db.session.rollback()
return jsonify(errno=-1, errmsg='兑换失败')
# 计算步数兑换结果
@api_article.route('/step_to_coin', methods=['POST'])
@auth_required
def step_to_coin():
try:
res = request.get_json()
steps = res.get('steps')
Logging.logger.info('request_args:{0}'.format(res))
try:
steps = int(steps)
except Exception as e:
Logging.logger.error('errmsg:{0}'.format(e))
return jsonify(errno=-1, errmsg='参数错误')
coins = float('%.2f' % (steps * 0.001))
# coins = float('%.2f' % (steps * 0.002))
return jsonify(errno=0, errmsg="OK", coins=coins)
except Exception as e:
Logging.logger.error('errmsg:{0}'.format(e))
db.session.rollback()
return jsonify(errno=-1, errmsg='兑换失败')
# 随机奖励--春节翻倍
@api_article.route('/get_random_coins', methods=['POST'])
@auth_required
def get_random_coins():
try:
res = request.get_json()
user_id = res.get('user_id')
get_type = res.get('get_type') # 1:签到后转发, 2:兑换步数后转发
openGId = res.get('openGId') # 用户分享的微信群的唯一标识
Logging.logger.info('request_args:{0}'.format(res))
if not all([user_id, get_type, openGId]):
return jsonify(errno=-1, errmsg='参数不完整')
try:
user_id = int(user_id)
get_type = int(get_type)
except Exception as e:
Logging.logger.error('errmsg:{0}'.format(e))
return jsonify(errno=-1, errmsg='参数错误')
user_obj = User.query.get(user_id)
if not user_obj:
return jsonify(errno=-1, errmsg='用户不存在')
coins = random.uniform(0.5, 2)
# coins = random.uniform(1, 4)
coins = float('%.2f' % coins)
user_obj.coins += coins
now = datetime.datetime.now()
# 获取今天零点
today = now - datetime.timedelta(hours=now.hour, minutes=now.minute, seconds=now.second,
microseconds=now.microsecond)
if get_type == 1:
share = UserSign.query.filter(UserSign.user_id == user_id,
UserSign.openGId == openGId,
UserSign.create_time >= today).first()
if share:
return jsonify(errno=-1, errmsg='今日已获得当前群奖励')
else:
sign_obj = UserSign.query.filter(UserSign.user_id == user_id, UserSign.create_time >= today).first()
sign_obj.random_coin = coins
sign_obj.openGId = openGId
db.session.add(sign_obj)
elif get_type == 2:
share = UserSteps.query.filter(UserSteps.user_id == user_id,
UserSteps.openGId == openGId,
UserSteps.create_time >= today).first()
if share:
return jsonify(errno=-1, errmsg='今日已获得当前群奖励')
else:
change_obj = UserSteps.query.filter(UserSteps.user_id == user_id,
UserSteps.create_time >= today).first()
change_obj.random_coin = coins
change_obj.openGId = openGId
db.session.add(change_obj)
else:
pass
db.session.add(user_obj)
db.session.commit()
return jsonify(errno=0, errmsg="OK", coins=coins, total_coins=user_obj.coins)
except Exception as e:
Logging.logger.error('errmsg:{0}'.format(e))
db.session.rollback()
return jsonify(errno=-1, errmsg='兑换失败')
# 每日签到奖励钢镚5个-->春节翻倍
@api_article.route('/sign_today', methods=['POST'])
@auth_required
def sign_today():
try:
res = request.get_json()
user_id = res.get('user_id')
Logging.logger.info('request_args:{0}'.format(res))
user_obj = User.query.get(user_id)
if not user_obj:
return jsonify(errno=-1, errmsg='用户不存在')
last_day = user_obj.sign_time
sign_coin = user_obj.sign_coin
if not last_day:
user_obj.coins += sign_coin
user_obj.is_sign = 1
user_obj.sign_time = datetime.datetime.now()
# 用户每日签到记录
sign_obj = UserSign()
sign_obj.user_id = user_id
sign_obj.sign_coin = sign_coin
else:
today = datetime.date.today()
today_time = int(time.mktime(today.timetuple()))
last_time = int(time.mktime(last_day.timetuple()))
if user_obj.is_sign == 0 or (last_time - today_time) <= 0:
user_obj.coins += sign_coin
user_obj.is_sign = 1
user_obj.sign_time = datetime.datetime.now()
sign_obj = UserSign()
sign_obj.user_id = user_id
sign_obj.sign_coin = sign_coin
else:
coins = 0
return jsonify(errno=0, errmsg="今天已经完成签到了,明天再来吧", coins=coins, total_coins=user_obj.coins)
db.session.add(user_obj)
db.session.add(sign_obj)
db.session.commit()
return jsonify(errno=0, errmsg="OK", coins=sign_coin, total_coins=user_obj.coins)
except Exception as e:
Logging.logger.error('errmsg:{0}'.format(e))
db.session.rollback()
return jsonify(errno=-1, errmsg='网络异常')
# 好友点击分享卡片
@api_article.route('/invite_friend', methods=['POST'])
@auth_required
def invite_friend():
try:
res = request.get_json()
user_id = res.get('user_id')
friend_id = res.get('friend_id')
Logging.logger.info('request_args:{0}'.format(res))
if not all([user_id, friend_id]):
return jsonify(errno=-1, errmsg='参数不完整')
user_obj = User.query.get(user_id)
friend_obj = User.query.get(friend_id)
if not user_obj or not friend_obj:
return jsonify(errno=-1, errmsg='用户不存在')
if user_id == friend_id:
return jsonify(errno=-1, errmsg='参数错误')
now = datetime.datetime.now()
# 获取今天零点
today = now - datetime.timedelta(hours=now.hour, minutes=now.minute, seconds=now.second,
microseconds=now.microsecond)
user_friend = UserFriends.query.filter(UserFriends.user_id == user_id, UserFriends.friend_id == friend_id,
UserFriends.create_time >= today).first()
if user_friend:
return jsonify(errno=-1, errmsg='您今天已经被偷取过了')
else:
friends_num = UserFriends.query.filter(UserFriends.user_id == user_id, UserFriends.steal_num > 0,
UserFriends.create_time >= today).count()
if friends_num < 5:
obj = UserFriends()
obj.friend_id = friend_id
obj.user_id = user_id
db.session.add(obj)
else:
return jsonify(errno=-1, errmsg='您好友的可偷取位置已满')
db.session.commit()
return jsonify(errno=0, errmsg="OK")
except Exception as e:
Logging.logger.error('errmsg:{0}'.format(e))
db.session.rollback()
return jsonify(errno=-1, errmsg='网络异常')
# 今日可偷取好友列表
@api_article.route('/steal_friend_list', methods=['POST'])
@auth_required
def steal_friend_list():
try:
res = request.get_json()
user_id = res.get('user_id')
Logging.logger.info('request_args:{0}'.format(res))
user_obj = User.query.get(user_id)
if not user_obj:
return jsonify(errno=-1, errmsg='用户不存在')
now = datetime.datetime.now()
# 获取今天零点
today = now - datetime.timedelta(hours=now.hour, minutes=now.minute, seconds=now.second,
microseconds=now.microsecond)
friends = UserFriends.query.filter(UserFriends.user_id == user_id, UserFriends.steal_num > 0,
UserFriends.create_time >= today).all()
friends_num = len(friends)
friends_info = list()
for friend in friends:
friends_dict = dict()
friend_id = friend.friend_id
friend_obj = User.query.get(friend_id)
friends_dict['avatar_url'] = friend_obj.avatar_url
friends_dict['steal_num'] = friend.steal_num
friends_dict['friend_id'] = friend_id
steal_date = friend.steal_date
if steal_date:
un_time = time.mktime(steal_date.timetuple())
friends_dict['steal_date'] = un_time
else:
friends_dict['steal_date'] = None
# steal_date = friend.steal_date
# steal_num = friend.steal_num
# if steal_date and steal_num > 0:
# mistiming = dif_time(str(steal_date))
# if mistiming <= 60 * 5:
# mistiming = 60 * 5 - mistiming
# friends_dict['mistiming'] = mistiming
# else:
# friends_dict['mistiming'] = 0
# elif not steal_date:
# friends_dict['mistiming'] = 0
# else:
# pass
friends_info.append(friends_dict)
return jsonify(errno=0, errmsg="OK", friends_info=friends_info, friends_num=friends_num)
except Exception as e:
Logging.logger.error('errmsg:{0}'.format(e))
return jsonify(errno=-1, errmsg='网络异常')
# 偷取好友步数/5分钟后才可以偷取第二次
@api_article.route('/steal_friend_step', methods=['POST'])
@auth_required
def steal_friend_step():
try:
res = request.get_json()
user_id = res.get('user_id')
friend_id = res.get('friend_id')
Logging.logger.info('request_args:{0}'.format(res))
if not all([user_id, friend_id]):
return jsonify(errno=-1, errmsg='参数不完整')
user_obj = User.query.get(user_id)
friend_obj = User.query.get(friend_id)
if not user_obj or not friend_obj:
return jsonify(errno=-1, errmsg='用户不存在')
now = datetime.datetime.now()
# 获取今天零点
today = now - datetime.timedelta(hours=now.hour, minutes=now.minute, seconds=now.second,
microseconds=now.microsecond)
user_friend = UserFriends.query.filter(UserFriends.user_id == user_id, UserFriends.friend_id == friend_id,
UserFriends.create_time >= today).first()
if not user_friend:
return jsonify(errno=-1, errmsg='该好友未被邀请')
steal_date = user_friend.steal_date
if not steal_date:
user_friend.steal_num -= 1
# user_obj.available_step += 1000
user_obj.steal_step += 1000
# print '偷取的步数累计:', user_obj.steal_step
user_friend.steal_date = datetime.datetime.now()
else:
if user_friend.steal_num <= 0:
return jsonify(errno=0, errmsg="请换个好友偷取吧")
else:
mistiming = dif_time(str(steal_date))
if mistiming < 60 * 5:
return jsonify(errno=-1, errmsg="请在%s秒后再来偷取吧" % (60 * 5 - mistiming))
else:
user_friend.steal_num -= 1
# user_obj.available_step += 1000
user_obj.steal_step += 1000
# print '偷取的步数累计=', user_obj.steal_step
user_friend.steal_date = datetime.datetime.now()
db.session.add(user_obj)
db.session.add(user_friend)
db.session.commit()
return jsonify(errno=0, errmsg="OK")
except Exception as e:
Logging.logger.error('[api/steal_friend_step] errmsg:{0}'.format(e))
db.session.rollback()
return jsonify(errno=-1, errmsg='网络异常')
# 每人每天阅读前10次奖励1钢镚-->改为前5次,奖励随机值(0,1)-->春节翻倍
@api_article.route('/read_article_for_coin', methods=['POST'])
@auth_required
def read_article_for_coin():
try:
res = request.get_json()
user_id = res.get('user_id')
article_id = res.get('article_id')
Logging.logger.info('request_args:{0}'.format(res))
if not all([user_id, article_id]):
return jsonify(errno=-1, errmsg='参数不完整')
user_obj = User.query.get(user_id)
article_obj = Article.query.get(article_id)
if not user_obj or not article_obj:
return jsonify(errno=-1, errmsg='参数错误')
now = datetime.datetime.now()
# 获取今天零点
today = now - datetime.timedelta(hours=now.hour, minutes=now.minute, seconds=now.second,
microseconds=now.microsecond)
num = UserReadArticle.query.filter(UserReadArticle.user_id == user_id,
UserReadArticle.create_time >= today).count()
Logging.logger.info('已阅读文章次数={0}'.format(num))
if num >= 5:
return jsonify(errno=-1, errmsg='每人每天只有前5次阅读文章才可以拿奖励哦')
obj = UserReadArticle.query.filter(UserReadArticle.user_id == user_id,
UserReadArticle.article_id == article_id,
UserReadArticle.create_time >= today).first()
if obj:
return jsonify(errno=-1, errmsg='今日已获得当前文章奖励')
else:
random_coins = float('%.2f' % random.uniform(0, 1))
# random_coins = float('%.2f' % random.uniform(1, 2))
user_obj.coins += random_coins
user_article = UserReadArticle()
user_article.article_id = article_id
user_article.user_id = user_id
user_article.random_coins = random_coins
db.session.add(user_article)
db.session.add(user_obj)
db.session.commit()
return jsonify(errno=0, errmsg="OK", coins=random_coins, total_coins=user_obj.coins, num=num)
except Exception as e:
Logging.logger.error('errmsg:{0}'.format(e))
db.session.rollback()
return jsonify(errno=-1, errmsg='网络异常')
# 每人每天分享前10次奖励2钢镚-->修改为1-3随机值,无限制分享次数
@api_article.route('/share_article_for_coin', methods=['POST'])
@auth_required
def share_article_for_coin():
try:
res = request.get_json()
user_id = res.get('user_id')
article_id = res.get('article_id')
# friend_id = res.get('friend_id')
openGId = res.get('openGId') # 用户分享的微信群的唯一标识
Logging.logger.info('request_args:{0}'.format(res))
if not all([user_id, article_id, openGId]):
return jsonify(errno=-1, errmsg='参数不完整')
user_obj = User.query.get(user_id)
article_obj = Article.query.get(article_id)
# friend_obj = User.query.get(friend_id)
if not user_obj or not article_obj:
return jsonify(errno=-1, errmsg='参数错误')
# if user_id != friend_id:
# return jsonify(errno=-1, errmsg='分享人本人未点击')
now = datetime.datetime.now()
# 获取今天零点
today = now - datetime.timedelta(hours=now.hour, minutes=now.minute, seconds=now.second,
microseconds=now.microsecond)
# obj = UserShareArticle.query.filter(UserShareArticle.user_id == user_id,
# UserShareArticle.article_id == article_id,
# UserShareArticle.create_time >= today).first()
# if obj:
# return jsonify(errno=-1, errmsg='今日已获得当前文章奖励')
# else: 同一篇文章每日分享次数限制的话,无法进行分享后的继续挖币
# share = UserShareArticle.query.filter(UserShareArticle.user_id == user_id,
# UserShareArticle.openGId == openGId,
# UserShareArticle.article_id == article_id,
# UserShareArticle.create_time >= today).first()
# 最新逻辑
share = UserShareArticle.query.filter(UserShareArticle.user_id == user_id,
UserShareArticle.openGId == openGId,
UserShareArticle.create_time >= today).first()
if share:
return jsonify(errno=-1, errmsg='今天已经分享过了明天再来吧')
share_nums = UserShareArticle.query.filter(UserShareArticle.user_id == user_id,
UserShareArticle.create_time >= today).count()
if share_nums:
return jsonify(errno=-1, errmsg='你分享的次数已达到上限明天再来吧')
random_coins = float('%.2f' % random.uniform(20, 30))
# random_coins = float('%.2f' % random.uniform(2, 6))
user_obj.coins += random_coins
user_article = UserShareArticle()
user_article.article_id = article_id
user_article.user_id = user_id
user_article.openGId = openGId
user_article.random_coins = random_coins
# user_article.friend_id = friend_id
db.session.add(user_article)
db.session.add(user_obj)
db.session.commit()
return jsonify(errno=0, errmsg="OK", coins=random_coins, total_coins=user_obj.coins)
except Exception as e:
Logging.logger.error('errmsg:{0}'.format(e))
db.session.rollback()
return jsonify(errno=-1, errmsg='网络异常')
|
import cv2
class FaceModule :
def __init__(self):
cv2.namedWindow("Camera")
self.vc = None
self.rval = None
self.b = None
self.bs = None
self.center = None
print("Facemodule initialised")
def activate(self):
print("Activating camera...")
self.vc = cv2.VideoCapture(0)
if self.vc.isOpened(): # try to get the first frame
print("Camera activated")
self.rval, self.b = self.vc.read()
self.bs = self.b.shape
self.center = (self.bs[1]//2, self.bs[0]//2)
else:
self.rval = False
def continuous(self):
while self.rval:
self.rval, self.b = self.vc.read()
gray=cv2.cvtColor(self.b,cv2.COLOR_BGR2GRAY)
face= cv2\
.CascadeClassifier('haarcascade_frontalface_default.xml')\
.detectMultiScale(gray,scaleFactor=1.1,minNeighbors=10)
cv2.circle(self.b, self.center, 8, (0,255,0), -1)
for (x,y,w,h) in face:
cv2.rectangle(self.b,(x,y),(x+w,y+h),(0,255,0),5)
cv2.circle(self.b, (x+w//2, y+h//2), 3, (0,255,0), -1)
cv2.line(self.b, (x+w//2, y+h//2), self.center, (255,0,0), 2)
cv2.imshow("Camera", self.b)
key = cv2.waitKey(20)
if key == 27: # exit on ESC
break
print("Deactivating camera...")
self.deactivate()
def single(self):
self.rval, self.b = self.vc.read()
gray=cv2.cvtColor(self.b,cv2.COLOR_BGR2GRAY)
face= cv2\
.CascadeClassifier('haarcascade_frontalface_default.xml')\
.detectMultiScale(gray,scaleFactor=1.1,minNeighbors=10)
cv2.circle(self.b, self.center, 8, (0,255,0), -1)
if len(face)>0:
x,y,w,h = face[0]
cv2.rectangle(self.b,(x,y),(x+w,y+h),(0,255,0),5)
cv2.circle(self.b, (x+w//2, y+h//2), 3, (0,255,0), -1)
cv2.line(self.b, (x+w//2, y+h//2), self.center, (255,0,0), 2)
self.printDirection(x,y,w,h)
self.show()
def printDirection(self,x,y,w,h):
nx = x+w//2
ny = y+h//2
tolx = 0.75
toly = 0.75
if self.center[0]>x+(1-tolx)*w and self.center[0]<x+tolx*w:
print("O", end=" ")
elif self.center[0]>nx:
print("L", end=" ")
elif self.center[0]<nx:
print("R", end=" ")
if self.center[1]>y+(1-toly)*h and self.center[1]<y+toly*h:
print("O")
elif self.center[1]>ny:
print("U")
elif self.center[1]<ny:
print("D")
def show(self):
cv2.imshow("Camera", self.b)
key = cv2.waitKey(200)
def deactivate(self):
cv2.destroyWindow("Camera")
self.vc.release()
print("Camera Deactivated")
import time
if __name__=='__main__':
f = FaceModule()
f.activate()
target = time.time()+60
while(time.time()<=target):
f.single()
f.deactivate()
|
import os
import json
import math
import itertools
import fiona
import numpy as np
from shapely.geometry import shape
import pandas as pd
import geopandas as gpd
from datetime import datetime
# -----------------------------------------------------------------------------
polygon_path = os.path.expanduser(
"~/git/afghanistan_gie/canal_data/canal_polygons.geojson")
output_path = os.path.expanduser(
"~/git/afghanistan_gie/canal_data/canal_point_grid.geojson")
pixel_size = 0.0002695
polygon_data = fiona.open(polygon_path, 'r')
# -----------------------------------------------------------------------------
feature_list = []
# canal_feature = polygon_data[0]
for canal_feature in polygon_data:
canal_id = canal_feature['properties']['project_id']
canal_shape = shape(canal_feature['geometry'])
print "Running {0}".format(canal_id)
bounds = canal_shape.bounds
xmin, ymin, xmax, ymax = bounds
adj_xmin = math.floor((xmin - -180) / pixel_size) * pixel_size + -180
adj_ymin = math.floor((ymin - -90) / pixel_size) * pixel_size + -90
adj_xmax = math.ceil((xmax - -180) / pixel_size) * pixel_size + -180
adj_ymax = math.ceil((ymax - -90) / pixel_size) * pixel_size + -90
adj_bounds = (adj_xmin, adj_ymin, adj_xmax, adj_ymax)
x_count = (adj_xmax-adj_xmin)/pixel_size
if x_count < round(x_count):
adj_xmax += pixel_size
y_count = (adj_ymax-adj_ymin)/pixel_size
if y_count < round(y_count):
adj_ymax += pixel_size
coords = itertools.product(
np.arange(adj_xmin, adj_xmax, pixel_size),
np.arange(adj_ymin, adj_ymax, pixel_size))
point_list = [
{
# "type": "Feature",
"geometry": shape({
"type": "Point",
"coordinates": c
}),
# "properties": {
"project_id": canal_id,
"unique": "{0}_{1}".format(round(c[0], 9), round(c[1], 9))
# }
}
for c in coords
]
canal_points = [feat for feat in point_list
if canal_shape.contains(shape(feat['geometry']))]
feature_list += canal_points
# -----------------------------------------------------------------------------
gdf = gpd.GeoDataFrame(feature_list)
project_data_path = os.path.expanduser(
"~/git/afghanistan_gie/canal_data/original_project_data.csv")
project_data = pd.read_csv(project_data_path, quotechar='\"',
na_values='', keep_default_na=False,
encoding='utf-8')
project_data = project_data[["OFWM Project ID", "Actual end date"]]
project_data.columns = ['project_id', 'actual_end_date']
project_data.loc[project_data['actual_end_date'].isnull()] = "Jan/01/9999"
def prep_date(datestr):
delim = datestr[3]
datestr = "/".join(datestr.split(delim))
return datetime.strptime(datestr, "%b/%d/%Y").strftime('%Y-%m-%d')
project_data['actual_end_date_iso'] = project_data['actual_end_date'].apply(
lambda z: prep_date(z))
gdf = gdf.merge(
project_data[['project_id', 'actual_end_date_iso']], on='project_id')
grp = gdf.groupby(['unique'], as_index=False).aggregate(
lambda x: '|'.join(x))
grp['project_list'] = grp['project_id']
gdf = gdf.merge(grp[['unique', 'project_list']], on='unique')
out_gdf = gdf.sort_values(by='actual_end_date_iso', ascending=True).groupby('unique', as_index=False).first()
out_gdf = gpd.GeoDataFrame(out_gdf)
geo_json = out_gdf.to_json()
geo_file = open(output_path, "w")
json.dump(json.loads(geo_json), geo_file, indent=4)
geo_file.close()
# -----------------------------------------------------------------------------
import tarfile
def make_tarfile(dst, src):
with tarfile.open(dst, "w:gz") as tar:
tar.add(src, arcname=os.path.basename(src))
make_tarfile(dst=output_path + ".tar.gz" , src=output_path)
# -----------------------------------------------------------------------------
# def create_geojson(features, path):
# output_geo = {
# "type": "FeatureCollection",
# "features": features
# }
# output_file = open(path, "w")
# json.dump(output_geo, output_file)
# output_file.close()
# create_geojson(feature_list, output_path)
|
import numpy as np
from PIL import Image
import cv2
def draw_bbox(image, info, classes):
pass
def draw_segm(image, info, mapped_color):
ALPHA = 0.6
image_proc = image.copy()
segm_mask = np.zeros_like(image, np.uint8)
for i in range(len(info["labels"])):
segm = np.uint8(info["masks"][i][0])
label = info["labels"][i]
# Improving segmentation draw (comment this block to use every pixel)
segm = cv2.medianBlur(segm, 5)
_, segm = cv2.threshold(segm, 127, 255, cv2.THRESH_BINARY_INV)
segm = cv2.bitwise_not(segm)
contours, _ = cv2.findContours(segm, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
for _, contour in enumerate(contours):
# Draw polygon on segmentation mask
cv2.fillPoly(segm_mask, [contour], color=mapped_color[label])
# Merging proc image and segmentation
mask = segm.astype(bool)
image_proc[mask] = cv2.addWeighted(image_proc, ALPHA, segm_mask, 1 - ALPHA, 0)[mask]
# Add contours to proc image
cv2.drawContours(image_proc, [contour], -1, mapped_color[label], 1)
return image_proc
if __name__ == "__main__":
from PIL import Image
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from predict import predict
from utils import map_labels_colors
image_path = "../data/test/0afb6b28d4583e470c7d0c52268272a7.jpg"
# image_path = "lixo.jpeg"
classes = pd.read_csv("./pretrained_models/model_batch4.csv")
mapped_color = map_labels_colors(classes["ClassId"].values)
image = Image.open(image_path).convert('RGB')
image = image.resize((256, 256), resample=Image.BILINEAR)
image = np.asarray(image)
info = predict(image, "model_batch4")
proc_image = draw_segm(image, info, mapped_color)
plt.imshow(proc_image)
plt.show()
|
import newt,sys
location=sys.argv[1]
distance=sys.argv[2]
try:
num=int(sys.argv[3])
except:
num=300
try:
limit=int(sys.argv[4])
except:
limit=1
network=True
api=newt.getTwitterAPI()
tweeters={}
tags={}
print 'Looking for twitterers and tags within',distance,'km of',location
tweeters,tags=newt.twSearchNear(tweeters,tags,num, location, term='', dist=float(distance))
for t in sorted(tags, key=tags.get, reverse=True):
print t,tags[t]
tw=[]
tws={}
for i in tweeters:
tws[i]=tweeters[i]['count']
if tws[i]>=limit:
tw.append(i)
tw=newt.getTwitterUsersDetailsByScreenNames(api,tw)
locfname=location.replace(',','_')
if network:
newt.gephiOutputFile(api,'local-'+locfname, tw)
else:
newt.outputHomepageURLs(api,'local-'+locfname,tw,locfname)
newt.opmlFromCSV('local-'+locfname)
|
from flask import Flask, jsonify, render_template, request
from flask_sqlalchemy import SQLAlchemy
import random
app = Flask(__name__)
##Connect to Database
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///cafes.db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
##Cafe TABLE Configuration
class Cafe(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(250), unique=True, nullable=False)
map_url = db.Column(db.String(500), nullable=False)
img_url = db.Column(db.String(500), nullable=False)
location = db.Column(db.String(250), nullable=False)
seats = db.Column(db.String(250), nullable=False)
has_toilet = db.Column(db.Boolean, nullable=False)
has_wifi = db.Column(db.Boolean, nullable=False)
has_sockets = db.Column(db.Boolean, nullable=False)
can_take_calls = db.Column(db.Boolean, nullable=False)
coffee_price = db.Column(db.String(250), nullable=True)
def to_dict(self):
# Use Dictionary Comprehension to do the same thing.
return {column.name: getattr(self, column.name) for column in self.__table__.columns}
@app.route("/")
def home():
return render_template("index.html")
# HTTP GET - Read Record
@app.route("/random", methods=["GET"])
def get_random_request():
cafes = db.session.query(Cafe).all()
cafe = random.choice(cafes)
cafe_object = jsonify(cafe=cafe.to_dict())
return cafe_object
@app.route("/all", methods=["GET"])
def get_all():
all_cafes = db.session.query(Cafe).all()
all_cafes_object = jsonify(cafe=[cafe.to_dict() for cafe in all_cafes])
return all_cafes_object
@app.route("/search")
def search():
# ask for location
query_location = request.args.get("loc")
cafe = Cafe.query.filter_by(location=query_location).first()
if cafe:
return jsonify(cafe=cafe.to_dict())
else:
return jsonify(error={"Not Found": "Sorry, we don't have a cafe at that location."})
## HTTP POST - Create Record
## HTTP PUT/PATCH - Update Record
## HTTP DELETE - Delete Record
if __name__ == '__main__':
app.run(debug=True)
|
import sys
from baselines.run import main
import rl_od
if __name__ == '__main__':
main(sys.argv)
|
class Monitor :
def __init__(self, conf) :
self.status = "off"
def update(self) :
pass
def getStatus(self) :
status = { "status" : self.status,
"logs" : [ ]
}
return status
|
#!/usr/bin/env python3
import urllib2
import re
portfolio = [
('SCHW191108C00035500', 100, 1.45),
('SCHW191108C00040000', 300, 0.2),
('ETFC191108C00036000', 100, 1.5),
('AMTD191108C00038500', 300, 0.35),
('AMTD191108C00035000', 100, 1.0),
('ETFC191018C00040000', 300, 0.18),
]
pnl = 0.0
for name, amount, cost in portfolio:
url = 'https://finance.yahoo.com/quote/' + name + '?p=' + name
content = urllib2.urlopen(url).read()
# print(url)
match = re.search(r'"regularMarketPrice":\{"raw":(\d+\.\d+),', content)
if match:
pnl += (float(match.group(1)) - cost) * amount
else:
print("CANNOT FETCH PRICE FOR: " + name)
print('Total PnL: ' + str(pnl))
print('Per Person PnL: ' + str(pnl / 7))
|
[proxy]
proxy_enabled =
proxy_type =
proxy_url =
proxy_port =
proxy_username =
proxy_password =
proxy_rdns =
[additional_parameters]
validate_ssl =
ssl_cert_loc =
[logging]
|
import tkinter as tk
from tkinter import *
class Interface0:
def __init__(self, master):
self.master = master
self.master.title("SoloScan")
self.master.geometry("400x400")
self.master.resizable(0, 0)
self.frame = tk.Frame(self.master, bg="black")
#self.frame.title("SoloScan")
self.text = Label(self.frame, text = 'SoloScan',fg='yellow', bg ='black',font =(
'Verdana', 15)).pack(side = TOP, pady = 10)
self.text3 = Label(self.frame,text="Scan itttttt!!!!!",fg='yellow', bg ='black').pack(side=TOP)
photo1 = PhotoImage(file ="/home/thor/Desktop/FYP_Final/images.png")
self.button = Label(self.frame,text="Choose File",image=photo1).pack(side = TOP)
self.text1= Label(self.frame,text="Drag files to load images or",fg='yellow', bg ='black').pack(side = TOP, pady = 10)
self.b1 = Button(self.frame,text="Choose File",fg='yellow', bg ='black').pack(side = TOP, pady = 10)
self.butnew("Next",Interface1)
self.frame.pack()
def butnew(self, text, _class):
tk.Button(self.frame, text = text, command= lambda: self.new_window(_class)).pack()
def new_window(self, _class):
self.Interface1 = tk.Toplevel(self.master)
_class(self.Interface1)
class Interface1(Interface0):
def __init__(self, master):
self.master = master
self.master.title("Second Window")
self.master.geometry("400x400")
self.master.resizable(0, 0)
self.show_Model()
def show_Model(self):
self.frame = tk.Frame(self.master, bg="black")
self.text = Label(self.frame, text = 'SoloScan',fg='yellow', bg ='black',font =('Verdana', 15)).pack(side = TOP, pady = 10)
self.text3 = Label(self.frame,text="Choose any :",fg='yellow', bg ='black')
self.b1 = Button(self.frame,text="Feature Detection",fg='yellow', bg ='black',height = 2, width = 36).pack(side = TOP, pady = 10)
self.b2 = Button(self.frame,text="Keypoints matching",fg='yellow', bg ='black',height = 2, width = 36).pack(side = TOP, pady = 10)
self.b3 = Button(self.frame,text="Show Triangulation",fg='yellow', bg ='black',height = 2, width = 36).pack(side = TOP, pady = 10)
self.b4 = Button(self.frame,text="Display 3D Point Cloud",fg='yellow', bg ='black',height = 2, width = 36).pack(side = TOP, pady = 10)
self.text2 = Button(self.frame,text="Exit",command=self.close_window).pack(side = LEFT)
self.butnew("Back",Interface0)
#self.text3 = Button(self.frame,text="Back",fg='yellow', bg ='black',width = 10,command=self.Previous_window).pack(side = LEFT, pady = 20)
self.frame.pack()
def butnew(self, text, _class):
tk.Button(self.frame, text = text, command= lambda: self.pre_window(_class)).pack(side = RIGHT)
def Previous_window(self,_class):
self.Interface0 = tk.Toplevel(self.master)
_class(self,self.Interface0)
def close_window(self):
self.master.destroy()
root = tk.Tk()
app = Interface0(root)
root.mainloop()
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Copyright (c) 2014 by Delphix. All rights reserved.
#
import os
import json
import os.path
import string
import sys
#
# This is a simple dxData JSON Schema converter. Given a directory of
# dxData JSON Schemas (schema, in the current directory), this generates a
# Python class (in schemaTypes) for each dxData schema.
#
# This does only a simple conversion. For instance, it does nothing to assure
# that the resulting Python class instances always have data of the right type.
#
# If a type has operations or root operations, it is assumed that these will
# need to be hand-written. Thus, if a given type, Foo, has operations or root
# operations, this will generate two types: a type called FooOperations, and
# a subtype called Foo which just has the properties. Foo will always be
# regenerated, but FooOperations will only be created if there is not already a
# FooOperations in the schemaTypes directory. This simple scheme allows the
# auto-generated code to be auto-generated, while not affecting any custom code.
#
# Name of the directory where schemas to be written to
TYPE_DIR = sys.argv[len(sys.argv)-1]
# SCHEMA_FILES = sys.argv[1:len(sys.argv)-1]
# Write a description string. Wrap over multiple lines if needed.
def writeDescription(description, prefix, file):
linelength = 80 - len(prefix)
while len(description) > linelength:
rightIndex = string.rfind(description, " ", 0, linelength)
if rightIndex is -1:
rightIndex = linelength
file.write(prefix + description[:rightIndex] + '\n')
description = description[rightIndex + 1:]
file.write(prefix + description + '\n')
# return the supertype name
def getSuperTypeName(schemaDict, allSchemas):
if 'extends' in schemaDict:
superSchema = allSchemas[schemaDict['extends']['$ref']]
return superSchema['name']
else:
return None
def getSuperTypeRef(schemaDict, allSchemas):
if 'extends' in schemaDict:
superSchemaRef = schemaDict['extends']['$ref']
return superSchemaRef
else:
return None
# write the import and class declartion
def writeClassHeader(file, typeName, superName):
if superName is None:
file.write('class ' + typeName + '():\n')
else:
file.write('from ' + superName + ' import ' + superName + '\n')
file.write('\n')
file.write('class ' + typeName + '(' + superName + '):\n')
def writeProperties(file, schemaDict, allSchemas):
superTypeRef = getSuperTypeRef(schemaDict, allSchemas)
if superTypeRef is not None:
writeProperties(file, allSchemas[superTypeRef], allSchemas)
if 'properties' in schemaDict:
for propName in schemaDict['properties']:
propDef = schemaDict['properties'][propName]
defaultValue = 'None'
if 'default' in propDef:
defaultValue = str(propDef['default'])
if defaultValue is 'null':
defaultValue = 'None'
file.write('\n')
if 'description' in propDef:
writeDescription(propDef['description'], ' # ', file)
file.write(' ' + propName + ' = ' + defaultValue + '\n')
def writeOperations(file, schemaDict, opProp, isStatic):
if opProp in schemaDict:
for opName in schemaDict[opProp]:
opDef = schemaDict[opProp][opName]
if isStatic:
file.write(' @staticmethod\n')
# write the function declaration. Include a parameters or payload
# argument if the operation takes such
file.write(' def ' + opName + '(')
if not isStatic:
file.write('self, ')
if 'payload' in opDef:
if len(opDef['payload'].keys()) > 0:
file.write('payload')
elif 'parameters' in opDef:
if len(opDef['parameters'].keys()) > 0:
file.write('parameters')
file.write('):\n')
# write the docsstring for the function
if 'description' in opDef:
file.write(' """ \n')
writeDescription(opDef['description'], ' ', file)
file.write(' """')
file.write('\n')
file.write('\n')
# given a type which has operations, write out a skeleton which can be hand-augmented
def makeOperationSuperType(schemaDict, allSchemas):
# create the output file, overwriting if it already exists
typeName = schemaDict['name'] + 'Operations'
superName = getSuperTypeName(schemaDict, allSchemas)
pathName = os.path.join(TYPE_DIR, typeName + '.py');
if not os.path.exists(pathName):
file = open(pathName, 'w')
writeClassHeader(file, typeName, superName)
file.write(' """ Your implemenation of operations for the ' + schemaDict['name'] + ' type."""\n')
file.write('\n')
writeOperations(file, schemaDict, 'operations', False)
writeOperations(file, schemaDict, 'rootOperations', True)
file.close();
return typeName
# Add a toJson method.
# Thanks to stackoverflow here http://stackoverflow.com/questions/11637293/iterate-over-object-attributes-in-python
def writeToJson(file):
file.write("\n")
file.write(" def _handleObject(self, obj):\n")
file.write(' """ Handler for when JSON writing encounters an object. """\n')
file.write("\n")
file.write(" if getattr(obj, 'toJson'):\n")
file.write(" result = {}\n")
file.write(" attrNames = [attr for attr in dir(obj) if not attr.startswith('__') and not callable(getattr(obj,attr))]\n")
file.write(" for name in attrNames:\n")
file.write(" result[name] = getattr(obj, name)\n")
file.write(" return result\n")
file.write(" else:\n")
file.write(" raise TypeError()\n")
file.write("\n")
file.write(" def toJson(self):\n")
file.write(' """ Write the data attributes of this type to JSON format"""\n')
file.write("\n")
file.write(" selfAsDict = self._handleObject(self)\n")
file.write(" return json.dumps(selfAsDict, default=self._handleObject)\n")
file.write("\n")
def writeFromJson(file):
file.write("\n")
file.write(" def fromJson(self, jsonData):\n")
file.write(' """ set the properties of this object from the json blob. silently ignore irrelevant props. (bad, bad!)"""\n')
file.write("\n")
file.write(" for key in jsonData:\n")
file.write(" if hasattr(self, key):\n")
file.write(" setattr(self, key, jsonData[key])\n")
file.write("\n")
def writeInit(file, typeName):
file.write('\n')
file.write(' def __init__(self):\n')
file.write(' """ initialize the instance """\n')
file.write(' self.type = \"' + typeName + '\"\n')
# Given a schema, create a python class for it
def makePythonType(schemaDict, allSchemas):
superName = None
types = []
# Deal with inheritance
if 'operations' in schemaDict or 'rootOperations' in schemaDict:
superName = makeOperationSuperType(schemaDict, allSchemas)
types.append(superName)
else:
superName = getSuperTypeName(schemaDict, allSchemas)
# create the output file, overwriting if it already exists
typeName = schemaDict['name']
types.append(typeName)
file = open(os.path.join(TYPE_DIR, typeName + '.py'), 'w')
if superName is None:
file.write('import json\n\n')
writeClassHeader(file, typeName, superName)
# Generate the docstring
if 'description' in schemaDict:
file.write(' """ \n')
writeDescription(schemaDict['description'], ' ', file)
file.write(' Auto-generated class from JSON Schema definition\n')
file.write(' """')
else:
file.write(' """ Auto-generated class from JSON Schema definition """\n')
file.write('\n')
writeInit(file, typeName)
# write out each property, and assign a default value if any
writeProperties(file, schemaDict, allSchemas)
if superName is None:
writeToJson(file)
writeFromJson(file)
file.write('\n')
file.close()
return types
# Create the output directory if it doesn't already exist
if not os.path.exists(TYPE_DIR):
os.mkdir(TYPE_DIR)
# read in all schemas
allJsonSchemas = {}
for root, dirs, files in os.walk('../schema'):
for fileName in files:
if fileName.endswith(".json"):
file = open(root + '/' + fileName, 'r')
jsonData = json.load(file)
file.close()
allJsonSchemas['/' + fileName] = jsonData
for root, dirs, files in os.walk('schema'):
for fileName in files:
if fileName.endswith(".json"):
file = open(root + '/' + fileName, 'r')
jsonData = json.load(file)
file.close()
allJsonSchemas['/' + fileName] = jsonData
# Convert all schemas to Python types
allTypes = []
for schemaName in allJsonSchemas:
types = makePythonType(allJsonSchemas[schemaName], allJsonSchemas)
for type in types:
allTypes.append(type)
#
# Write out the package file
#
file = open(os.path.join(TYPE_DIR, '__init__.py'), 'w')
allTypes.sort()
# Write __all__
file.write("__all__ = ['" + "', '".join(allTypes) + "']\n")
file.write('\n')
# import all the classes
for type in allTypes:
file.write('from ' + type + ' import ' + type + '\n')
file.write('\n')
# write out rootedTypeMapping
file.write('rootedTypeMapping = {\n')
lines = []
for schemaRef in allJsonSchemas:
root = None
currentRef = schemaRef
currentDict = allJsonSchemas[currentRef]
if 'root' in currentDict:
root = currentDict['root']
while root is None and currentRef is not None:
currentRef = getSuperTypeRef(allJsonSchemas[currentRef], allJsonSchemas)
if currentRef is not None:
currentDict = allJsonSchemas[currentRef]
if 'root' in currentDict:
root = currentDict['root']
line = " '" + allJsonSchemas[schemaRef]['name'] + "' : "
if currentRef is None:
line += 'None'
else:
line += "'" + currentDict['name'] + "'"
lines.append(line);
lines.sort();
file.write(',\n'.join(lines) + '\n}\n')
file.write('\n')
# Write the root URL mapping
file.write('rootMapping = {\n')
lines = []
for schemaRef in allJsonSchemas:
dict = allJsonSchemas[schemaRef]
if 'root' in dict:
line = " '" + dict['root'] + "' : {\n"
line += " 'name': '" + dict['name'] + "',\n"
line += " 'klass': " + dict['name'] + ",\n"
line += " }"
lines.append(line)
lines.sort();
file.write(',\n'.join(lines) + '\n}\n')
file.write('\n')
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import cgitb
import sys
from cgi import FieldStorage
from codecs import getwriter
from inspect import currentframe, getfile
from os import path
from jinja2 import Environment, FileSystemLoader
# Import our utilitiy functions
from utilities import password, db_name, engine
from utilities import (get_20_most_popular_sql, get_products_filtered_sql, get_products_ids_sql, get_products_search_sql, get_categories_sql, get_subcategories_sql, write_order_sql)
sys.stdout = getwriter("utf-8")(sys.stdout.detach())
cgitb.enable() # Enable debugging
print("Content-Type: text/html; charset=UTF-8\n")
cmd_folder = path.realpath(
path.abspath(path.split(getfile(currentframe()))[0]))
env = Environment(loader=FileSystemLoader(path.join(cmd_folder, 'templates')))
env.globals = {'path': '../' if 'cgi-bin' in cmd_folder else ''}
def products(limits, filters=None):
template = env.get_template('products.html')
if filters is None:
#data = get_20_most_popular()
data = get_20_most_popular_sql()
else:
#data = get_products_filtered(filters)
data = get_products_filtered_sql(filters)
# Limit the length of the output to 20, otherwise its horrendous.
if len(data) > 20:
data = data[:20]
try:
# print(template.render(title='BestBuy', products=[
# {'brand': 'brand', 'name': 'Name', 'size': 'XXXL', 'price': 2323, 'color': "red"},
# {'brand': 'brand', 'name': 'Name', 'size': 'XL', 'price': 2323, 'color': "red"},
# ]))
print(template.render(
title='BestBuy',
products=data,
))
except Exception as e:
print(e)
def categories(limits):
template = env.get_template('categories.html')
data = get_categories_sql()
try:
# print(template.render(title='BestBuy', categories=[
# {'title': 'Heasasdasdasdasdrr', 'children': [
# {'url': '', 'name': 'Herr kalsong'},
# {'url': '', 'name': 'Herr Troja'}
# ]},
# {'title': 'Dam', 'children': [
# {'url': '', 'name': 'Dam vaska'},
# {'url': '', 'name': 'Dam troja'}
# ]}
# ]))
print(template.render(
title='BestBuy',
categories=data,
))
except Exception as e:
print(e)
# Need to do same thing as above but for subcategories. call the get_subcategories()
# function with gender and main category as parameters
def subcategories(limits, gender, category):
template = env.get_template('subcategories.html')
data = get_subcategories_sql(gender, category)
try:
print(template.render(
title='BestBuy',
categories=data,
))
except Exception as e:
print(e)
def cart():
from os import environ
cart = []
try:
if 'HTTP_COOKIE' in environ:
cart_data = {
i[0]: '='.join(i[1:])
for i in [
cookie.split('=')
for cookie in environ['HTTP_COOKIE'].split('; ')
]
}.get('cart')
if cart_data:
# lagt till try för att lösa buggen "invalid blablabl for int base 10 i cart"
#cart_data.strip("[]").split("%2C") ger en lista med ett element vilket är en str av produkt id
try:
#gör till lista, detta borde inte behövas...
value = [*map(int, cart_data.strip("[]").split("%2C"))]
cart, temp_var = get_products_ids_sql(value)
testing = []
for i in cart:
temp = i["price"]*temp_var[i["id"]]
testing.append(temp)
except:
pass
#price sum fix
try:
value = testing
except:
value = [0]
template = env.get_template('cart.html')
print(template.render(
title=f'BestBuy (cart)',
cart=cart,
price=sum(value),
))
"""print(template.render(title='BestBuy (cart)', cart=[
{'brand': 'brand', 'name': 'Name', 'size': 'XXXL', 'price': 2323, 'color': "red"},
{'brand': 'brand', 'name': 'Name', 'size': 'XL', 'price': 2323, 'color': "red"},
]))"""
except Exception as e:
print(e)
def checkout():
try:
order = {
'email': form.getvalue('email'),
'name': form.getvalue('name'),
'address': form.getvalue('address'),
'zipcode': form.getvalue('zipcode'),
'town': form.getvalue('town'),
'items': form.getvalue('items')
}
write_order_sql(order)
template = env.get_template('checkout.html')
print(
template.render(
title='BestBuy',
address=form.getvalue('address'),
))
except Exception as e:
print(e)
def search(words):
try:
template = env.get_template('products.html')
#data = get_products_search(words)
data = get_products_search_sql(words)
print(template.render(
title='BestBuy',
products=data,
))
except Exception as e:
print(e)
# Create instance of FieldStorage
form = FieldStorage()
action = form.getvalue('action')
if action == 'category':
categories("")
elif action == 'cart':
cart()
elif action == 'checkout':
checkout()
elif action == 'subcategory':
gender = form.getvalue('gender')
category = form.getvalue('category')
subcategories("", gender, category)
elif action == 'filtered_products':
filters = {
'gender': form.getvalue('gender'),
'type': form.getvalue('category'),
'subtype': form.getvalue('subcategory')
}
products("", filters)
elif action == 'search': # Not done. Not even started actually :)
words = form.getvalue('search').split()
search(words)
else:
products("")
|
from flask import Flask, request
from celery import Celery
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.migrate import Migrate
from raven.contrib.flask import Sentry
import logging
app = Flask(__name__)
app.config.from_object('settings')
@app.before_request
def _cache_data():
request.get_data()
try:
if app.config['SENTRY_DSN'] is not None:
sentry = Sentry(app, level=logging.ERROR, wrap_wsgi=True)
else:
sentry = None
except KeyError: pass
# Setup database
db = SQLAlchemy(app)
migrate = Migrate(app, db)
# Setup task queue
celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'])
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
import scrapy_settings
import models
import tasks
from views import *
|
# Day 19: Monster Messages
# <ryc> 2021
def inputdata():
with open('day_19_2020.input') as stream:
data = [ line for line in stream ]
grammar = dict()
line = data.pop(0)
while len(line) != 1:
line = line.split()
key = line.pop(0)[:-1]
extends = [list()]
index = 0
for item in line:
if item == '|':
index += 1
extends.append(list())
else:
extends[index].append( item if item[0] != '"' else item[1:-1] )
grammar[key] = extends
line = data.pop(0)
return grammar, data
def validate(line, grammar, first = '0'):
ptr = 0
stack = list()
stack.append([[first], ptr])
while len(stack) != 0:
tail, ptr = stack.pop()
if len(tail) == 0:
if line[ptr] == '\n':
return True
else:
head = tail.pop(0)
if head == line[ptr]:
ptr += 1
stack.append([tail, ptr])
elif head in grammar.keys():
for option in grammar[head]:
stack.append([option + tail, ptr])
else:
return False
def run(grammar, data):
count = 0
for line in data:
if validate(line,grammar) == True:
count += 1
return count
if __name__ == '__main__':
print('\n19: Monster Messages')
grammar, data = inputdata()
print('\nvalid lines =', run(grammar, data))
grammar['8'] = [['42', '8p' ]]
grammar['8p'] = [['8'], []]
grammar['11'] = [['42', '11p']]
grammar['11p'] = [['31'], ['11', '31']]
print('\nmodified grammar, valid lines', run(grammar, data))
|
def map(function, iterable):
return [function(x) for x in iterable]
'''
In this kata, you need to make your own map function. The way map works is that
it accepts two arguments: the first one is a function, the second one is an
array, a tuple, or a string. It goes through the array, applying the function
to each element of an array and storing the result in a new array. The new array
is the result of the map function. You may read more on the map function here.
You should return a list (python 2 style) instead of a generator (python 3).
Note: as Python already has a built-in map function, that is disabled.
Examples
map(sum, [[1, 2, 3], [4, 5], [6, 7, 8]]) ==> [6, 9, 21]
map(str, [1, 2, 3]) ==> ['1', '2', '3']
map(int, ['34', '23']) ==> [34, 23]
'''
|
from ConceptualDependency import Distance, CSV, histogram, Sort
import numpy as np
import pandas as pd
def main():
print("Reading file...")
texts = CSV.read_to_list("data/babel_paraphrases_bert_classifications.tsv", "tsv")
print("++++++++++++++++++++++++++++++\nReading done")
# text1, text2 = Distance.getInputs()
print("Calculating...")
# stores all output with text, distances and text lengths
output = []
# output with bert classification number
output_bert = []
# only output with edit_distance>= 8
output_E8 = []
# mean and median of the first 432 original lines of file
original_sentences_len = []
# number of pairs with ED >= 8 and bert numbers == 1
bert_number_1_g8 = 0
# number of pairs with ED >= 8
ED_number_g8 = 0
i = 0
for pairs in texts:
# progress
print(i / 186624)
text_num_1 = pairs[0]
text_num_2 = pairs[1]
bert_classification = pairs[2]
sent1 = pairs[3]
sent2 = pairs[4]
# distances
edit_distance = Distance.editDistance(sent1, sent2)
jaccard_distance = Distance.jaccardDistance(sent1, sent2)
# sentence lengths
len_sent1 = len(sent1.split(" "))
len_sent2 = len(sent2.split(" "))
# a return list
lst = [text_num_1, text_num_2, str(jaccard_distance), str(edit_distance), sent1, sent2, len_sent1, len_sent2]
lst_with_bert = [text_num_1, text_num_2, bert_classification, str(jaccard_distance), str(edit_distance), sent1,
sent2, len_sent1, len_sent2]
output.append(lst)
output_bert.append(lst_with_bert)
# -------- add pairs with edit distances >= 8
if edit_distance >= 8:
output_E8.append(lst)
ED_number_g8 += 1
# number of pairs with bert classification as 1
if int(bert_classification) == 1:
bert_number_1_g8 += 1
# finds the original sentences
if i <= 431:
original_sentences_len.append(len_sent2)
# For test
'''
if i > 500:
break
'''
i += 1
# calculate median and mean
original_median = np.median(original_sentences_len)
original_mean = np.mean(original_sentences_len)
print("Calculation done.")
'''
# print babel_outputs for test
for o in output:
print(o)
for o8 in output_E8:
print(o8)
'''
print("original_median: " + str(original_median) + "\n" +
"original_mean: " + str(original_mean) + "\n" +
"% of bert == 1: " + str(bert_number_1_g8 / ED_number_g8))
print("-------------------------------------\n"
"Writing results to file...")
txt = open("babel_outputs/output.txt", "w")
txt.write("original_median: " + str(original_median) + "\n" +
"original_mean: " + str(original_mean) + "\n" +
"% of bert == 1: " + str(ED_number_g8 / ED_number_g8))
txt.close()
CSV.write_data("babel_outputs/output_with_berts.csv", output_bert, with_bert=True)
CSV.write_data("babel_outputs/output.csv", output)
CSV.write_data("babel_outputs/output_e8.csv", output_E8, with_bert=True)
print("++++++++++++++++++++++++++++++\nWriting done")
# print("++++++++++++++++++++++++++++++++++++\n" +
# "Edit distance: " + editDistance + "\nJaccard distance: " + jaccardDistance)
print("Sorting...")
Sort.sort()
print("Sorting done")
edit_hist = histogram.edit_hist()
jaccard_hist = histogram.jaccard_hist()
edit_hist_df = pd.DataFrame(edit_hist)
edit_hist_df.to_csv('babel_outputs/edit_histogram.csv', index=False)
jaccard_hist_df = pd.DataFrame(jaccard_hist)
jaccard_hist_df.to_csv('babel_outputs/jaccard_histogram.csv', index=False)
main()
|
import fresh_tomatoes
import media
# Import fresh_tomatoes is starter that must be downloaded
# and added to the movie project folder.
# This the instance of the movie Big Hero it will showcase the arguments
# aka info: title, storyline, poster image, and youtube trailer.
big_hero = media.Movie(
"Big Hero 6",
"A story of a young robotics prodigy named Hiro Hamada who forms a"
"superhero team to combat a masked villain (Wikipedia).",
"http://t2.gstatic.com/images?q=tbn:ANd9GcQzyu98HxFhB68UKqRKSrTKknXHI-gtSTAAX0CGiKBM980CFhI1", # noqa
"https://www.youtube.com/watch?v=z3biFxZIJOQ") # noqa
# This the instance of the movie Amelie.
amelie = media.Movie(
"Amelie",
"Amelie is an innocent and naive girl in Paris with her own sense of"
"justice. She decides to help those around her and, along the way,"
"discovers love (Wikipedia).",
"http://cdn.miramax.com/media/assets/Amelie1.png", # noqa
"http://www.magiclanternfilmsociety.org/wp-content/uploads/2012/06/Amelie.jpg") # noqa
# This the instance of the movie Zootopia.
zootopia = media.Movie(
"Zootopia",
"The film details the unlikely partnership between a rabbit police officer"
"and a red fox con artist as they uncover a conspiracy that involves the"
"disappearance of predator civilians within a mammalian"
"utopia (Wikipedia).",
"http://vignette4.wikia.nocookie.net/disney/images/2/2f/Zootopia_Poster.jpg/revision/latest?cb=20151210185516", # noqa
"https://www.youtube.com/watch?v=bY73vFGhSVk") # noqa
# This the instance of the movie The Fifth Element.
fifth_element = media.Movie(
"The Fifth Element",
"Korben Dallas (Willis), a taxicab driver and former special forces major,"
"after a young woman (Jovovich) falls into his cab. Dallas joins forces"
"with her to recover four mystical stones essential for the defence of"
"Earth against an impending attack (Wikipedia).",
"http://www.gstatic.com/tv/thumb/movieposters/19352/p19352_p_v8_af.jpg", # noqa
"https://www.youtube.com/watch?time_continue=1&v=7-9mTiBawSM") # noqa
# This the instance of the movie Star Wars.
star_wars = media.Movie(
"Star Wars",
"Thirty years after the defeat of the Galactic Empire, the galaxy faces a"
"new threat from the evil Kylo Ren (Adam Driver) and the"
"First Order (Wikipedia).",
"http://t0.gstatic.com/images?q=tbn:ANd9GcQZKZtrlY3dnzsjBIGKR_b1QhkgZfM4-FIcH61uHnLQRR3WpNhk", # noqa
"https://www.youtube.com/watch?time_continue=3&v=Hyc84zvhbQU") # noqa
# This the instance of the movie Dark Knight.
dark_knight = media.Movie(
"Dark Knight",
"Bruce Wayne/Batman (Bale), James Gordon (Oldman) and"
"Harvey Dent (Eckhart) form an alliance to dismantle organised crime in"
"Gotham City, but are menaced by a criminal mastermind known as the Joker"
"who seeks to undermine Batman's influence and create chaos (Wikipedia).",
"http://host.trivialbeing.org/up/tdk-apr28-new-poster-posterexclusivoomelete6.jpg", # noqa
"https://www.youtube.com/watch?v=EXeTwQWrcwY") # noqa
# Here, movies is a list aka array.
movies = [big_hero, amelie, zootopia, fifth_element, star_wars, dark_knight]
# Fresh_tomatoes file will take in a list of movies
# to display in a webpage using the open_movies_page function.
fresh_tomatoes.open_movies_page(movies)
|
def odd_ones_out(numbers):
return [x for x in numbers if numbers.count(x)%2==0]
'''
The town sheriff dislikes odd numbers and wants all odd numbered families out
of town! In town crowds can form and individuals are often mixed with other
people and families. However you can distinguish the family they belong to by
the number on the shirts they wear. As the sheriff's assistant it's your job
to find all the odd numbered families and remove them from the town!
Challenge: You are given a list of numbers. The numbers each repeat a certain number
of times. Remove all numbers that repeat an odd number of times while keeping
everything else the same.
odd_ones_out([1, 2, 3, 1, 3, 3]) = [1, 1]
In the above example:
the number 1 appears twice
the number 2 appears once
the number 3 appears three times
2 and 3 both appear an odd number of times, so they are removed from the list.
The final result is: [1,1]
Here are more examples:
odd_ones_out([1, 1, 2, 2, 3, 3, 3]) = [1, 1, 2, 2]
odd_ones_out([26, 23, 24, 17, 23, 24, 23, 26]) = [26, 24, 24, 26]
odd_ones_out([1, 2, 3]) = []
odd_ones_out([1]) = []
'''
|
#-*- coding: utf-8 -*-
import pygame
import sys
import gtk
import solar_system
def actions():
events = pygame.event.get()
for event in events:
if event.type == pygame.QUIT:
sys.exit(0)
def main():
window = gtk.Window()
window_screen = window.get_screen()
pygame.init()
screen = pygame.display.set_mode((window_screen.get_width()-50, window_screen.get_height()-50))
s = solar_system.SolarSystem(window_screen.get_width()-50, window_screen.get_height()-50)
s.calc_data()
while 1:
actions()
s.draw(screen)
pygame.display.update()
if __name__ == "__main__":
main()
__author__ = 'lisgein'
|
from abc import ABC, abstractmethod
from scipy import signal
import numpy as np
class FilterError(Exception):
pass
class Filter(ABC):
""" Basic of all filter
"""
@abstractmethod
def process(self, data):
"""Process the filter and return the process data
Parameters
----------
- data: `array like`
frequency of transmission
Returns
-------
- array
process data
"""
return np.ndarray
class ButterBandPass(Filter):
def __init__(self, lowcut, highcut, fs=256, order=4):
nyq = fs * 0.5
low = lowcut / nyq
high = highcut / nyq
b, a = signal.butter(order, [low, high], btype='bandpass')
self.b = b
self.a = a
def process(self, data):
return signal.filtfilt(self.b, self.a, data, axis=1)
class ButterLowPass(Filter):
def __init__(self, lowcut, fs=256, order=4):
nyq = fs * 0.5
low = lowcut / nyq
b, a = signal.butter(order, low, btype='lowpass')
self.b = b
self.a = a
def process(self, data):
return signal.filtfilt(self.b, self.a, data, axis=1)
class ButterHighPass(Filter):
def __init__(self, highcut, fs=256, order=4):
nyq = fs * 0.5
high = highcut / nyq
b, a = signal.butter(order, high, btype='highpass')
self.b = b
self.a = a
def process(self, data):
return signal.filtfilt(self.b, self.a, data, axis=1)
class Notch(Filter):
def __init__(self, cutoff, var=1, fs=256, order=4):
nyq = fs * 0.5
low = (cutoff - var) / nyq
high = (cutoff + var) / nyq
b, a = signal.iirfilter(
order, [low, high], btype='bandstop', ftype="butter")
self.b = b
self.a = a
def process(self, data):
return signal.sfiltfilt(self.b, self.a, data, axis=-1)
def apply_filter(dataIter, lo=None, hi=None, **kargs):
if lo and hi:
while(True):
buff = np.array(next(dataIter)).T
yield(ButterBandPass(lo, hi, **kargs).process(buff).T)
elif lo:
while(True):
buff = np.array(next(dataIter)).T
yield(ButterLowPass(lo, **kargs).process(buff).T)
elif hi:
while(True):
buff = np.array(next(dataIter)).T
yield(ButterHighPass(hi, **kargs).process(buff).T)
def notch(dataIter, cutoff, **kargs):
while(True):
buff = np.array(next(dataIter)).T
yield(Notch(cutoff, **kargs).process(buff).T)
|
import bcrypt
from sqlalchemy import *
from sqlalchemy.orm import synonym, relationship
from app.common.extensions import Model
from app.common.utils import *
import urllib.parse
from flask import abort
def datetime_format(dt, f):
return dt.strftime(f) if dt else None
class WaterMeter(Model):
__tablename__ = 'water_meter'
idx = Column('idx', INT, primary_key=True, autoincrement=True)
customer_id = Column('customer_id', INT, nullable=False)
owner_id = Column('owner_id', INT, nullable=False)
previous_record_idx = Column('previous_record_idx', INT, ForeignKey('record_idx_logs.idx'), nullable=False)
record_idx = Column('record_idx',INT, ForeignKey('record_idx_logs.idx'), nullable=False)
record_count = Column('record_count', INT, nullable=False)
_created_at = Column('created_at', DATETIME, nullable=False, server_default=func.current_timestamp())
_updated_at = Column('updated_at', DATETIME, nullable=True)
_deleted_at = Column('deleted_at', DATETIME, nullable=True)
# previous_record_idx = relationship('RecordIndex', lazy='dynamic', foreign_keys=[previous_record_idx])
# previous_record_idx = relationship('RecordIndex', lazy='dynamic', primaryjoin="WaterMeter.previous_record_idx == RecordIndex.idx")
# record_idx = relationship('RecordIndex', lazy='dynamic', primaryjoin="WaterMeter.record_idx == RecordIndex.idx")
def __init__(self, customer_id='', owner_id=None, previous_record_idx=None, record_idx=None, record_count=None,
**kwargs):
self.customer_id = customer_id,
self.owner_id = owner_id,
self.previous_record_idx = previous_record_idx,
self.record_idx = record_idx,
self.record_count = record_count
@property
def created_at(self):
return datetime_format(self._created_at, '%Y.%m.%d %H:%M:%S')
created_at = synonym('_created_at', descriptor=created_at)
@property
def updated_at(self):
return datetime_format(self._updated_at, '%Y.%m.%d %H:%M:%S')
updated_at = synonym('_updated_at', descriptor=updated_at)
@property
def deleted_at(self):
return datetime_format(self._deleted_at, '%Y.%m.%d %H:%M:%S')
@deleted_at.setter
def deleted_at(self, deleted_at):
self._deleted_at = deleted_at
deleted_at = synonym('_deleted_at', descriptor=deleted_at)
def dict(self, filter=None):
dic = dict(
idx = self.idx,
customer_id = self.customer_id,
owner_id = self.owner_id,
prev_idx = self.previous_record_idx,
record_idx = self.record_idx,
record_count = self.record_count
)
if filter is not None:
dic = {key: dic.get(key, None) for key in dic if key in filter}
return dic
|
# logging.py --- all code related to the logging of the game
import datetime
class Logger:
def __init__(self):
self.filename = f"logs/log{str(datetime.datetime.now())}.txt"
def addLine(self, move, board, moveNo):
labelsX = "abcdefgh"
x = labelsX.index(move[3])
y = 8 - int(move[4])
pieceName = board[y][x].name[1].upper()
if pieceName == "P":
pieceName = ""
with open(self.filename, "a") as f:
f.write(f"{str(moveNo)}. {pieceName}{move[3:5]}\n") |
soma = i = 0
while True:
n = int(input('Digite um número: [999 For Stop]: \n'))
if n == 999:
break
soma += n
i += 1
print(f'A soma dos números foram: {soma}, e a quantidade de números digitados foram: {i}')
print('FIM!') |
class Stack:
def __init__(self):
self.stack = []
self.total = 0
def push(self, element):
self.stack.append(element)
self.total = len(self.stack)
return self.stack
def pop(self, data):
for i in range(self.total-1, -1, -1):
if self.stack[i] == data:
self.stack.pop()
self.total = len(self.stack)
return self.stack
self.stack.pop()
return self.stack
def size(self):
return self.total
stack = Stack()
stack.push(1)
stack.push(2)
stack.push(3)
stack.push(4)
stack.push(5)
print(stack.stack)
stack.pop(3)
print(stack.stack) |
# “A human is someone whose mother is human”.
# Program by Mitchell Aikens
# No Copyright
# 2010
num = 0
def main():
counter(num)
def counter(num):
print(num)
num += 1
counter(num)
main()
#====================================================
# Program by Mitchell Aikens
# No copyright
# 2012
def main():
loopnum = int(input("How many times would you like to loop?\n"))
counter = 1
recurr(loopnum,counter)
def recurr(loopnum,counter):
if loopnum > 0:
print("This is loop iteration",counter)
recurr(loopnum - 1,counter + 1)
else:
print("The loop is complete.")
main()
# ===================================================
#!/usr/bin/env python
def sum(list):
sum = 0
# Add every number in the list.
for i in range(0, len(list)):
sum = sum + list[i]
# Return the sum.
return sum
print(sum([5,7,3,8,10]))
#!/usr/bin/env python
def sum(list):
if len(list) == 1:
return list[0]
else:
return list[0] + sum(list[1:])
print(sum([5,7,3,8,10]))
>>> def sum_digits(n):
"""Return the sum of the digits of positive integer n.
>>> sum_digits(9)
9
>>> sum_digits(18117)
18
>>> sum_digits(9437184)
36
>>> sum_digits(11408855402054064613470328848384)
126
"""
if n < 10:
return n
else:
all_but_last, last = n // 10, n % 10
return sum_digits(all_but_last) + last
# Write a Python program to calculate the value of 'a' to the power 'b
# (power(3,4) -> 81
# Write a Python program to converting an Integer to a string in any base.
# Write a Python program of recursion list sum.
# Test Data: [1, 2, [3,4], [5,6]]
# Expected Result: 21 |
SERVER = 'host'
USERNAME = 'username'
PASSWORD = 'password'
|
#!/usr/bin/env python
import dataparser
import systemparser
import utils
import ranking
import sys
import time
test_file=sys.argv[1]
collection='sm'
article=dataparser.load_article_from_naf_file(test_file, collection)
NILS=['--NME--', '*null*']
current=0
allCands=utils.parallelizeCandidateGeneration(article.entity_mentions)
for m in article.entity_mentions:
cands=allCands[m.mention]
# Try to generate extra local candidates
if current>0:
for m2 in article.entity_mentions[:current-1]:
if utils.isSubstring(m.mention, m2.mention) or utils.isAbbreviation(m.mention, m2.mention):
if m2.candidates:
cands |= m2.candidates
# End of local candidates generation
m.candidates=cands
orderedLinks=ranking.getMostPopularCandidates(m.candidates)
if len(orderedLinks):
m.sys_link=orderedLinks[0]
else:
m.sys_link='NIL'
current+=1
print(test_file + ' done')
|
# Generated by Django 3.0.7 on 2020-07-02 08:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('newsApp', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='student',
name='marks',
field=models.IntegerField(default=1),
preserve_default=False,
),
migrations.AlterField(
model_name='student',
name='name',
field=models.CharField(max_length=30),
),
]
|
def func1():
''' Read a large line-based text file '''
with open('largefile.txt', 'rb') as infile:
# readlines reads one line at a time
for line in infile.readlines():
print line
def file_chunk_gen_func(infile, size):
while(True):
chunk = infile.read(size)
if not chunk:
break
yield chunk
def func2():
''' Read a large non line-based text file in chunks '''
infile = open('largefile_no_lines.txt', 'rb')
chunk_size = 1024
reader = file_chunk_gen_func(infile, 1024)
for chunk in reader:
print chunk
print "================"
'''
Question 3
You are given N mobile numbers. Sort them in ascending order after which print them in standard format.
+91 xxxxx xxxxx
The given mobile numbers may have +91 or 91 or 0 written before the actual 10 digit number. Alternatively, there maynot be any prefix at all.
Input Format
An integer N followed by N mobile numbers.
Output Format
N mobile numbers printed in different lines in the required format.
Sample Input
4
07895462130
919875641230
9195969878
+919345563841
Sample Output
+91 78954 62130
+91 91959 69878
+91 98756 41230
'''
def formatter(func):
def wrapper():
return [ "+91 "+n[0:6]+" "+n[6:11] for n in func() ]
return wrapper
@formatter
def func3():
valid_num = []
with open('mobile_nos.txt') as infile:
for num in infile.readlines():
_n = num[::-1][0:11]
valid_num.append(_n[::-1])
return valid_num
'''
Question 4
Square the numbers in string
'''
import re
def func4():
string = "1 2 3 4 5 6 7 8 9"
square = lambda match: str(int(match.group())**2)
print re.sub(r'\d+?', square, string)
def func5():
htmlstr = """
<head>
<title>HTML</title>
</head>
<object type="application/x-flash"
data="your-file.swf"
width="0" height="0">
<!-- <param name="movie" value="your-file.swf" /> -->
<param name="quality" value="high"/>
</object>
"""
print re.sub("<!--.*-->", "", htmlstr)
'''
Question:
You are given a text of N lines. The text contains && and || symbols.
Your task is to modify :
&& => and
|| => or
Both && and || should have space " " on both sides.
Input Format
First line contains integer, N.
Next N lines contains the text.
Constraints
0<N<100
Neither && nor || occur in start or end of each line.
'''
def func6():
strtext = """
a = 1;
b = input();
if a + b > 0 && a - b < 0:
start()
elif a*b > 10 || a/b < 1:
stop()
print set(list(a)) | set(list(b))
#Note do not change &&& or ||| or & or |
#Only change those '&&' which has space on both sides.
#Only change those '|| which has space on both sides.
"""
res = re.sub("(?<= )&&(?= )", "and", strtext)
print re.sub("(?<= )\|\|(?= )", "or", res) # NOTE: | needs escaping unlike &
def main():
'''
# Open & read a large line-based text file
print "=== Example 1 ==="
func1()
# Open & read a large non-line based file in chunks
print "=== Example 2 ==="
func2()
print "=== Example 3 === "
nums = func3()
print "\n".join(nums)
print "=== Example 4 ==="
func4()
print "=== Example 5 ==="
func5()
print "=== Example 6 ==="
func6()
if __name__ == "__main__":
main() |
from vendor.serializers import VendSerializer
from vendor.models import Vendor
from rest_framework.views import APIView
from rest_framework.response import Response
from django.shortcuts import get_object_or_404
class VendorView(APIView):
def get(self, request, vendor_id, format=None):
vendor = get_object_or_404(Vendor, pk=vendor_id)
serializer = VendSerializer(vendor, many=False)
return Response(serializer.data)
# context={'request': request},
|
from django.utils.translation import gettext_lazy as _
TRANSMISSION = (
("automatic", _("Automatic")),
("manual", _("Manual"))
)
SEATS = (
# The material of seats.
("leather", _("Leather")),
("textile", _("Textile"))
)
CATEGORIES = (
("small", _("Small Cars")), # suitable for carrying up to 4 people
("family", _("Family Cars")), # suitable for carrying up to 7 adults
("van", ("Van")) # Bigger cars
)
|
"""
iou.py
~~~~~~
Calculates the amount of money each person owes for utilites based on the
amount they paid for each utility.
"""
import argparse
import math
import json
import sys
def entrypoint(config):
"""
Entrypoint into utilities calculations. Reads the utilities from config
and calculates who owes who.
Parameters
----------
config : str
The path to that months config file.
"""
utilities = read_config(config)
split_the_difference(utilities)
def read_config(config):
"""
Reads the file that the string config points to. Should be a JSON file.
Returns the data stored in the config as a dict.
Parameters
----------
config : str
The path that months config file.
Returns
-------
dict
The configuration in a dictionary.
"""
try:
with open(config, "rb") as config_file:
return json.loads(config_file.read())
except FileNotFoundError:
print("Config file could not be loaded in, check the provided path.")
sys.exit(1)
def split_the_difference(utilities):
"""
Splits the difference between the parties
Parameters
----------
utilities : dict
The utilities and their values in a dictionary.
"""
utils_grand_total = get_utils_total(utilities)
print(f"\n{'#' * 80}\n")
print(f"The grand total for utilities this month was: $ {zformat(utils_grand_total)}\n")
individual_paid = get_individuals_paid(utilities)
each_owe = split_total(utils_grand_total, split_by=len(utilities.keys()))
print(f"Splitting this evenly results in everyone paying {each_owe}\n")
i_o_u(individual_paid, each_owe)
print(f"\n{'#' * 80}\n")
def get_utils_total(utilities):
"""
Calculates the grand total for that months utilities.
Parameters
----------
utilities : dict
The utilities and their values in a dictionary
Returns
-------
int
The total cost of utilities for a month
"""
total = 0
for utils in utilities.values():
total += math.fsum(utils.values())
return round(total, 2)
def get_individuals_paid(utilities):
"""
Returns how much an invidual paid for their utilities
Parameters
----------
utilities : dict
The utilities and their values in a dictionary
Returns
-------
dict
The individuals and what they paid as the value.
"""
indivs = {}
for indiv in utilities:
indivs[indiv] = sum(utilities[indiv].values())
return indivs
def get_paid_most(individuals):
"""
Returns whoever paid the most based on their total for the month
Parameters
----------
individuals : dict
A dictionary of individuals and how much money they spent.
Returns
-------
str
The name of who paid the most.
"""
return max(individuals, key=lambda key: individuals[key])
def get_paid_least(individuals):
"""
Returns whoever paid the least based on their total for the month
Parameters
----------
individuals : dict
A dictionary of individuals and how much money they spent.
Returns
-------
str
The name of who paid the least.
"""
return min(individuals, key=lambda key: individuals[key])
def split_total(total, split_by):
"""
Splits the total by however many are included in split_by.
Parameters
----------
total : int
The grand total for utilities
split_by : int
The number of individuals to split the cost by.
Returns
-------
list
A list of what the even split should be.
"""
separated = str(total / split_by).split(".")
integer = int(separated[0])
decimal = int(separated[1])
if decimal > 100:
rounded_dec = int(float("." + str(decimal)) * 100) / 100
owes = [integer + rounded_dec for i in range(split_by - 1)]
owes.append(round(integer + rounded_dec + 0.01, 2))
return owes
return [integer + float("." + str(decimal)) for i in range(split_by)]
def i_o_u(indiv_paid, each_owe):
"""
Figures out who owes who how much. and prints the results.
Parameters
----------
indiv_paid : dict
The individuals and how much they paid.
each_owe : list
The equal split for the month.
"""
owed, indebted = get_owed_indebted(indiv_paid, each_owe)
print()
for debtor in indebted:
print(f"{debtor} owes:")
for owes, value in owed.items():
if value > 0:
# Debtor has paid off their debt, move on to the next debtor.
if indebted[debtor] == 0:
break
if indebted[debtor] >= value:
print(f"\t{owes} $ {zformat(round(value,2))} for this month's utilities.")
indebted[debtor] = indebted[debtor] - value
owed[owes] = value - value
else:
print(f"\t{owes} $ {zformat(round(indebted[debtor], 2))} for this months utilites.")
owed[owes] = value - indebted[debtor]
indebted[debtor] = 0
def get_owed_indebted(indiv_paid, each_owe):
"""
Returns dictionaries of who is owed money and how much and who owes
money and how much.
Parameters
----------
indiv_paid : dict
The individuals and how much they paid.
each_owe : list
The equal split for the month.
Returns
-------
dict
A dictionary sorted by values of who is owed how much.
dict
A dictionary sorted by values of who owes how much.
"""
owed = {}
indebted = {}
for indiv, paid in indiv_paid.items():
if paid > each_owe[0]:
owed[indiv] = round(paid - each_owe[0], 2)
print(
f"{indiv} paid $ {zformat(paid)} and is owed "
f"$ {zformat(round(paid - each_owe[0], 2))} for this months utilities."
)
elif indiv == get_paid_least(indiv_paid):
indebted[indiv] = round(each_owe[-1] - paid, 2)
print(
f"{indiv} paid $ {zformat(paid)} and owes "
f"$ {zformat(round(each_owe[-1] - paid, 2))} for this months utitlities."
)
else:
indebted[indiv] = round(each_owe[0] - paid, 2)
print(
f"{indiv} paid $ {zformat(paid)} and owes "
f"$ {zformat(round(each_owe[0] - paid, 2))} for this months utitlities."
)
return sort_by_value(owed), sort_by_value(indebted)
def sort_by_value(dictionary):
"""
Sorts a dictionary by its values.
Parameters
----------
dictionary : dict
A dictionary to sort by value.
Returns
-------
dict
The same dictionary sorted by values.
"""
return {k: v for k, v in sorted(dictionary.items(), key=lambda item: item[1])}
def zformat(num):
"""
Formats a number to have 2 decimal places for money purposes
Parameters
----------
num : float
A number to format
Returns
-------
str
A money formatted version of the number
"""
split_num = str(num).split(".")
number_part = split_num[0]
try:
decimal = split_num[1]
if len(decimal) < 2:
decimal = decimal.ljust(2, "0")
return f"{number_part}.{decimal}"
except IndexError:
return f"{number_part}.00"
if __name__ == "__main__":
PARSER = argparse.ArgumentParser()
PARSER.add_argument(
"-c",
"--config",
type=str,
dest="config",
required=True,
help=(
"This argument should be used to provide a JSON config file to the script."
"Config files should have the individuals who pay utilities as keys and the"
"utilities and amounts they paid for the month. Use a single config file for"
"a month."
),
)
ARGS = PARSER.parse_args()
entrypoint(ARGS.config)
|
from odoo import fields, models, api
class HRDepartment(models.Model):
_name = "hr.department"
name = fields.Char('Nombre del departamento', required=True)
description = fields.Char("Descripción")
|
import ctypes
import six
from . import os
libsgutils2 = None
libc = None
def _impl_check(f):
def not_implemented(*args, **kwargs):
raise NotImplementedError(f.__name__)
if hasattr(libsgutils2, f.__name__):
return f
else:
not_implemented.__name__ = f.__name__
return not_implemented
class Buffer(object):
def __init__(self, init, size=None):
if not isinstance(init, (six.integer_types, bytes)):
init = bytes(init)
size = len(init)
self._buffer = ctypes.create_string_buffer(init, size)
self._as_parameter_ = self._buffer
def resize(self, size):
if size > ctypes.sizeof(self._buffer):
ctypes.resize(self._buffer, size)
# noinspection PyUnresolvedReferences
self._as_parameter_ = (ctypes.c_char * size).from_buffer(self._buffer)
return self
def __len__(self):
return len(self._as_parameter_)
def __getitem__(self, item):
return self._as_parameter_[item]
def __setitem__(self, key, value):
self._as_parameter_[key] = value
def __iter__(self):
return iter(self._as_parameter_)
def __bytes__(self):
return bytes(self._as_parameter_)
class AlignedBuffer(Buffer):
alignment = None
# noinspection PySuperArguments
def __init__(self, init, size=None, alignment=None):
if alignment is None:
alignment = self.alignment
self._alignment = alignment
if alignment is None:
super(AlignedBuffer, self).__init__(init, size)
else:
if isinstance(init, six.integer_types):
size = init
init = b''
elif size is None:
if isinstance(init, bytes):
init = init
size = len(init) + 1
else:
init = bytes(init)
size = len(init)
super(AlignedBuffer, self).__init__(size + alignment)
self._as_parameter_ = self._align(self._buffer, size, alignment)
ctypes.memmove(self._as_parameter_, init, min(len(init), size))
@staticmethod
def _align(buffer, size, alignment):
if alignment & (alignment - 1) != 0:
raise ValueError("Alignment must be a power of 2")
address = ctypes.addressof(buffer)
offset = alignment - (address & (alignment - 1))
# noinspection PyUnresolvedReferences
return (ctypes.c_char * size).from_buffer(buffer, offset)
def resize(self, size):
if self._alignment is None:
return super(AlignedBuffer, self).resize(size)
previous_aligned_buffer = self._as_parameter_
previous_unaligned_buffer_address = ctypes.addressof(self._buffer)
super(AlignedBuffer, self).resize(size + self._alignment)
self._as_parameter_ = self._align(self._buffer, size, self._alignment)
if ctypes.addressof(self._as_parameter_) != ctypes.addressof(previous_aligned_buffer):
if ctypes.addressof(self._as_parameter_) - ctypes.addressof(self._buffer) != \
ctypes.addressof(previous_aligned_buffer) - previous_unaligned_buffer_address:
ctypes.memmove(self._as_parameter_, previous_aligned_buffer, len(previous_aligned_buffer))
def _load(libsgutils2_path, libc_path):
global libsgutils2, libc
libsgutils2 = ctypes.CDLL(libsgutils2_path)
libc = ctypes.CDLL(libc_path)
def set_res_type(mod, fn, tp):
try:
getattr(mod, fn).restype = tp
except AttributeError:
pass
def set_arg_types(mod, fn, *tps):
try:
getattr(mod, fn).argtypes = tps
except AttributeError:
pass
set_res_type(libsgutils2, "sg_lib_version", ctypes.c_char_p)
set_res_type(libsgutils2, "sg_get_sense_key_str", ctypes.c_char_p)
set_res_type(libsgutils2, "sg_get_sense_info_fld", ctypes.c_bool)
set_res_type(libsgutils2, "sg_get_sense_filemark_eom_ili", ctypes.c_bool)
set_res_type(libsgutils2, "sg_get_sense_progress_fld", ctypes.c_bool)
set_res_type(libsgutils2, "sg_get_desig_type_str", ctypes.c_char_p)
set_res_type(libsgutils2, "sg_get_desig_code_set_str", ctypes.c_char_p)
set_res_type(libsgutils2, "sg_get_desig_assoc_str", ctypes.c_char_p)
set_res_type(libsgutils2, "safe_strerror", ctypes.c_char_p)
set_res_type(libsgutils2, "sg_is_big_endian", ctypes.c_bool)
set_res_type(libsgutils2, "sg_get_llnum", ctypes.c_int64)
set_res_type(libsgutils2, "scsi_pt_version", ctypes.c_char_p)
set_res_type(libsgutils2, "construct_scsi_pt_obj", ctypes.c_void_p)
set_arg_types(libsgutils2, "set_scsi_pt_tag", ctypes.c_void_p, ctypes.c_uint64)
libc.strerror.restype = ctypes.c_char_p
try:
stdout = libc.fdopen(1, b'w')
stderr = libc.fdopen(2, b'w')
except AttributeError:
stdout = libc._fdopen(1, b'w')
stderr = libc._fdopen(2, b'w')
libc.setbuf(stdout, None)
libc.setbuf(stderr, None)
def _autoload():
import sys
import importlib
osdep = importlib.import_module("{}.{}".format(os.__name__, sys.platform))
_load(osdep.libsgutils2, osdep.libc)
_autoload()
|
import sqlite3
from flask import Flask
from flask_mongoalchemy import MongoAlchemy
from flask_bcrypt import Bcrypt
import sys
app = Flask(__name__)
app.config['MONGOALCHEMY_DATABASE'] = 'repository'
db = MongoAlchemy(app)
bcrypt = Bcrypt(app)
class Player(db.Document):
"""MongoAlchemy Player class"""
username = db.StringField()
def addPlayer(username):
"""Adds a player object to MongoDB"""
try:
player = Player(username = username)
player.save()
except Exception as e:
print(e)
pass #return False?
def removePlayer(username):
"""Removes a player object from MongoDB"""
try:
rm = Player.query.filter(Player.username == username).first()
rm.remove()
except Exception as e:
print(e)
pass #return False?
def insertUser(username, password, email):
"""Inserts a user into sqlite"""
print("create", file=sys.stderr)
conn = sqlite3.connect("users.db")
cur = conn.cursor()
values = [username, password, email]
cur.execute("INSERT INTO users (username, password, email) VALUES (?,?,?)", values)
conn.commit()
conn.close()
def retrieveUsers():
"""Retrieves all users from sqlite"""
conn = sqlite3.connect("users.db")
cur = conn.cursor()
cur.execute("SELECT username, password, email FROM users")
users = cur.fetchall()
conn.close()
return users
def authUser(username, password):
"""Authenticates based on input compared to existence in sqlite"""
print("auth", file=sys.stderr)
conn = sqlite3.connect("users.db")
cur = conn.cursor()
auth = cur.execute("SELECT * FROM users") #dislike this current solution, will try and move back to values = [username, password]auth = cur.execute("SELECT * FROM users WHERE username=? AND password=?", values)
rows = cur.fetchall()
for row in rows:
user = row[1]
print("user {}".format(user), file=sys.stderr)
passw = row[2]
print("password {}".format(passw), file=sys.stderr)
if user == username and bcrypt.check_password_hash(passw, password):
conn.close()
return True
else:
pass
conn.close()
return False |
class Solution(object):
def isAnagram(self, s, t):
def charCount(str):
chars = {}
for c in str:
if c not in chars:
chars[c] = 0
chars[c] += 1
return chars
def countInFirstMatchesSecond(dict1, dict2):
for char in dict1.keys():
if char not in dict2: return False
if dict2[char] != dict1[char]: return False
return True
chars1 = charCount(s)
chars2 = charCount(t)
return countInFirstMatchesSecond(chars1, chars2) and countInFirstMatchesSecond(chars2, chars1)
print(Solution().isAnagram('anagram', 'nagaram'))#True
print(Solution().isAnagram('rat', 'car'))#False
|
"""Analyse an abstract argumentation framework and output arguments belonging to the grounded extension."""
from argsolverdd.common.misc import parse_cmd_args
from argsolverdd.abstract.ground_extensor import Extensor
pa = parse_cmd_args()
ext = Extensor.from_file(pa.fname, verbose=pa.verbose)
ext.ground()
print(sorted(ext.label_ins))
|
#!/usr/bin/env python
# coding: utf-8
import os
import sys
import inspect
import tempfile
import importlib
import importlib.util
from pkgutil import walk_packages
from dataset_explorer.io import DataFile, FileType
from dataset_explorer.utils import getPluginsPath
from dataset_explorer.plugins import BasePlugin, AudioPlugin, ImagePlugin
from dataset_explorer.plugins.exceptions import ProcessError, OutputFileNotFound, InstantiationError
class PluginManager(object):
pluginDirectory = os.path.expanduser("~/.DatasetExplorer/plugins")
baseClasses = [BasePlugin, AudioPlugin, ImagePlugin]
def __init__(self):
sys.path.append(self.pluginDirectory)
self.plugins = self._discoverPlugins()
self.staticDirectory = tempfile.mkdtemp()
self.processedFiles = dict()
def applyPlugin(self, pluginName, dataFile, params):
plugin = self.plugins[pluginName]
fileHash = plugin.getFileHash(dataFile, params)
if fileHash in self.processedFiles.keys():
return self.processedFiles.get(fileHash)
outFilename = self.getTempFile(plugin)
plugin.setParameterValues(params)
try:
plugin(dataFile.path, outFilename)
except Exception as e:
raise ProcessError("Error in plugin {} during processing".format(pluginName), e)
if not os.path.exists(outFilename):
raise OutputFileNotFound("Output file not written by plugin {}".format(pluginName))
dataFile = DataFile.fromPath(outFilename, name=plugin.name, urlPrefix="/plugins/static")
self.processedFiles[fileHash] = dataFile
return dataFile
def getTempFile(self, plugin):
tempFile = tempfile.NamedTemporaryFile(suffix=plugin.outExtension, dir=self.staticDirectory)
return tempFile.name
def getAvailablePlugins(self):
return [plugin.toJson() for name, plugin in self.plugins.items()]
def getAvailablePluginsByInType(self, inType: FileType):
return [plugin.toJson() for name, plugin in self.plugins.items() if plugin.inType == inType]
def _discoverPlugins(self):
plugins = dict()
localPath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
extraPaths = getPluginsPath()
for path in extraPaths:
sys.path.append(path)
for fileFinder, name, isPkg in walk_packages([localPath, self.pluginDirectory] + extraPaths):
if isPkg:
continue
module = importlib.import_module(name)
moduleContent = dir(module)
for content in moduleContent:
cls = getattr(sys.modules[name], content)
if inspect.isclass(cls) and cls not in self.baseClasses and issubclass(cls, BasePlugin):
try:
pluginInstance = cls()
plugins[cls.__name__] = pluginInstance
except TypeError as e:
raise InstantiationError("Unable to instantiate plugin {}, makes sure that it doesn't have constructor arguments".format(cls.__name__), e)
return plugins
|
"""Treadmill master process.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
_LOGGER = logging.getLogger(__name__)
class ObjectNotFoundError(Exception):
"""Storage exception raised if object is not found."""
pass
class Backend(object):
"""Master storage interface."""
def __init__(self):
pass
def list(self, _path):
"""Return path listing."""
return []
def get(self, _path):
"""Return stored object given path."""
return None
def get_with_metadata(self, _path):
"""Return stored object with metadata."""
return None, None
def get_default(self, path, default=None):
"""Return stored object given path, default if not found."""
try:
self.get(path)
except ObjectNotFoundError:
return default
def put(self, _path, _value):
"""Store object at a given path."""
pass
def exists(self, _path):
"""Check if object exists."""
pass
def ensure_exists(self, _path):
"""Ensure storage path exists."""
pass
def delete(self, _path):
"""Delete object given the path."""
pass
def update(self, _path, _data, check_content=False):
"""Set data into ZK node."""
pass
|
koszyk = [
{'produkt': 'mleko smak orzechowy', 'ilosc': 1, 'cena': 1.5, 'alergeny': 'laktoza,orzeszki'},
{'produkt': 'czekolada', 'ilosc': 2, 'cena': 2.5, 'alergeny' : 'orzeszki'},
{'produkt': 'jogurt', 'ilosc': 4, 'cena': 3.5,"alergeny" : 'laktoza'}
]
# suma = 0
# for poz in koszyk:
# il = poz['ilosc']
# c = poz['cena']
# suma = suma + (c * il)
p = koszyk[i]
alergeny = p['alergeny'].split(',')
# max=len(koszyk[j])
# while j < max :
for value in variable:
pass
if 'laktoza' in alergeny and 'orzeszki' in alergeny:
print('bingo, dzwon do szpitlaa')
elif 'laktoza' in alergeny:
print('lala')
elif 'orzeszki' in alergeny:
print('wwwwwwwwwwww')
else:
print('bezpieczne jedzenie')
# j+=1
# if 'alergeny' : 'laktoza' and 'alergeny' : 'orzeszki' :
# print("nie mozesz kupic")
# elif not 'alergeny': 'laktoza' and 'alergeny' : 'orzeszki' :
# print("mozesz kupic tylko dla siebie")
# else not 'alergeny' : 'laktoza' and not 'alergeny' : 'orzeszki' :
# print('nie mozesz kupic takich zakupow')
# print(c)
# print(suma)
# print(poz)
# print(suma)
|
import re
def first_crt_uppercase(string):
match = re.match(r'[A-Z]', string)
if not match:
return False
return match
def last_crt_number(string):
match = re.match(r'.*\d', string)
if not match:
return False
return match
def is_there_only_love(string):
match = re.match(r'[love]*$', string)
if not match:
return False
return match
if __name__ == '__main__':
print first_crt_uppercase('Aloha')
print last_crt_number('sring 3')
print is_there_only_love('lllooove')
|
import cv2
import logging
import numpy as np
class CutPhoto(object):
def __init__(self, file_path):
self.file_path = file_path
def cv_imread(self):
"""
读取图像,解决imread不能读取中文路径的问题
:return: 返回照片
"""
cv_img = cv2.imdecode(np.fromfile(self.file_path, dtype=np.uint8), -1) # 会出现黑白照片 没有RGB三个通道 与cv2.imread不同
return cv_img # 所以后面采用直接检测 不转灰度
def detect_face(self):
"""
检测人脸
:param img_path: 图像路径
:return: image: 图像
faces: list 存储人脸位置和大小 [x,y,w,h]
"""
# 日志
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s: %(message)s')
logger = logging.getLogger(__name__)
logger.info('Reading image...')
image = self.cv_imread() # 调用函数 以读取中文路径
# gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) # 不考虑灰度 直接判断
logger.info('Detect faces...')
face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml') # 在opencv官网中找对应版本的xml文件
faces = face_cascade.detectMultiScale(image, scaleFactor=1.15, minNeighbors=10, minSize=(3, 3)) # 参数调整
search_info = "Find %d face." % len(faces) if len(faces) <= 1 else "Find %d faces." % len(faces)
logger.info(search_info)
return image, faces
def cut_photo(self):
"""
裁切出人脸照片
:return:
"""
image, faces = self.detect_face()
file_name_list = []
if len(faces) > 0:
faces_number = 0
for faceRect in faces: # 若有多张人脸的情况
x, y, w, h = faceRect
x1, y1 = x - int(0.5 * w), y - int(0.5 * h)
x2, y2 = x + int(1.5 * w), y + int(1.5 * h)
# cv2.rectangle(img, (x, y), (x + w, y + h), (255, 0, 0), 2, 8, 0)
photo = image[y1:y2, x1:x2]
status = cv2.imwrite(self.file_path + '-' + 'photo' + str(faces_number) + '.png', photo)
file_name_list.append(self.file_path + '-' + 'photo' + str(faces_number) + '.png')
if status:
print('Save succeed')
# cv2.imshow("img", photo)
# cv2.waitKey(0)
else:
print('Cut 0 photo')
# print(faces)
return file_name_list
if __name__ == '__main__':
for i in range(1, 16):
path = 'data/data_test/' + str(i) + '.png'
cut_photo = CutPhoto(path)
cut_photo.cut_photo()
# # 读取图像,解决imread不能读取中文路径的问题
# def cv_imread(file_path):
# cv_img = cv2.imdecode(np.fromfile(file_path, dtype=np.uint8), -1) # 会出现黑白照片 没有RGB三个通道 与cv2.imread不同
# return cv_img # 所以后面采用直接检测 不转灰度
#
#
# def detect_face(img_path):
# # 日志
# logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s: %(message)s')
# logger = logging.getLogger(__name__)
# logger.info('Reading image...')
# image = cv_imread(img_path) # 调用函数 以读取中文路径
# # gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) # 不考虑灰度 直接判断
# logger.info('Detect faces...')
# face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml') # 在opencv官网中找对应版本的xml文件
# faces = face_cascade.detectMultiScale(image, scaleFactor=1.15, minNeighbors=5, minSize=(3, 3)) # 参数调整
# search_info = "Find %d face." % len(faces) if len(faces) <= 1 else "Find %d faces." % len(faces)
# logger.info(search_info)
#
# return image, faces
#
#
# image, faces = detect_face('5.png')
# if len(faces) > 0:
# for faceRect in faces:
# x, y, w, h = faceRect
# x1, y1 = x - int(0.15 * w), y - int(0.3 * h)
# x2, y2 = x + int(1.15 * w), y + int(1.3 * h)
# # cv2.rectangle(img, (x, y), (x + w, y + h), (255, 0, 0), 2, 8, 0)
# photo = image[y1:y2, x1:x2]
#
# cv2.imshow("img", photo)
# cv2.waitKey(0)
# print(faces)
|
class TimeMap(object):
def __init__(self):
"""
Initialize your data structure here.
"""
self.m = dict()
def set(self, key, value, timestamp):
"""
:type key: str
:type value: str
:type timestamp: int
:rtype: None
"""
image = Image(value, timestamp)
if key not in self.m:
self.m[key] = ImageStore(image)
else:
self.m[key].append(image)
def get(self, key, timestamp):
"""
:type key: str
:type timestamp: int
:rtype: str
"""
if key not in self.m:
return ""
res = self.m[key].search(timestamp)
if res is None:
return ""
return res.val
class Image(object):
def __init__(self, val, ts):
self.val = val
self.ts = ts
def __repr__(self):
return "Image[val: %s, ts: %d]" % (self.val, self.ts)
class ImageStore(object):
def __init__(self, initial_image):
self.images = [initial_image]
self.last_ts = initial_image.ts
self.first_ts = initial_image.ts
def append(self, image):
if image.ts <= self.last_ts:
raise Exception()
self.images.append(image)
self.last_ts = image.ts
def search(self, ts):
if len(self.images) == 0:
return None
if ts >= self.last_ts:
return self.images[-1]
if ts < self.first_ts:
return None
if ts == self.first_ts:
return self.images[0]
# binary search
l, r = 0, len(self.images) - 1
while l <= r:
m = (l + r) / 2
m_ts = self.images[m].ts
if ts == m_ts:
return self.images[m]
if ts < m_ts:
r = m - 1
else:
l = m + 1
# didn't find an exact match; let's try
# to get the value that's greatest but smaller
# than the given ts
m_right = m + 1
if m_right < len(self.images):
m_right_image = self.images[m_right]
if m_right_image.ts <= ts:
return m_right_image
m_image = self.images[m]
if m_image.ts <= ts:
return m_image
m_left = m - 1
if m_left >= 0:
m_left_image = self.images[m_left]
if m_left_image.ts <= ts:
return m_left_image
return None
# Your TimeMap object will be instantiated and called as such:
# obj = TimeMap()
# obj.set(key,value,timestamp)
# param_2 = obj.get(key,timestamp) |
import numpy as np
def find_best_neighbor_diag(m, r, c):
"""
find the best neighbor using Moore Neighborhood definition, which contents the diagonal position
Parameters
----------
m : array shape(rows, cols)
utility map
r : int
row of cell which need to find the best neighbor
c : int
col of cell which need to find the best neighbor
Returns
-------
tuple:
the position of neighbor with smallest util around cell@(r,c) based on UtilMap m
"""
neighbors = []
best_n = (r, c)
min_u = np.inf
for i in range(-1, 2):
new_r = r + i
if 0 <= new_r <= len(m) - 1:
for j in range(-1, 2):
new_c = c + j
if 0 <= new_c <= len(m[0]) - 1:
if new_c == c and new_r == r:
continue
# print('(' + str(new_r) + ',' + str(new_c) + ')' + str(m[new_r, new_c]))
neighbors.append(m[new_r, new_c])
if m[new_r, new_c] <= min_u:
best_n = (new_r, new_c)
min_u = m[new_r, new_c]
return best_n
def find_best_neighbor_v_h(m, r, c):
"""
find the best neighbor using Von Neumann neighborhood definition, which contents only vertically
and horizontally position
Parameters
----------
m : array shape(rows, cols)
utility map
r : int
row of cell which need to find the best neighbor
c : int
col of cell which need to find the best neighbor
Returns
-------
tuple:
the position of neighbor with smallest util around cell@(r,c) based on UtilMap m
"""
best_n = (r, c)
min_u = np.inf
r_map, c_map = m.shape
left_col = max(0, c - 1)
right_col = min(c + 1, c_map - 1)
up_row = max(0, r - 1)
down_row = min(r + 1, r_map - 1)
for new_c in range(left_col, right_col + 1):
for new_r in range(up_row, down_row + 1):
if (abs(new_r - r) != abs(new_c - c)) or (new_r == r and new_c == c):
if m[new_r, new_c] <= min_u:
best_n = (new_r, new_c)
min_u = m[new_r, new_c]
return best_n
def find_best_neighbor_total(m, r, c):
"""
find the best neighbor in a timestep for 2x speed pedestrian, which search in a wide neighbors
Parameters
----------
m : array shape(rows, cols)
utility map
r : int
row of cell which need to find the best neighbor
c : int
col of cell which need to find the best neighbor
Returns
-------
tuple:
the position of neighbor with smallest util around cell@(r,c) based on UtilMap m
"""
best_n = (r, c)
min_u = np.inf
r_map, c_map = m.shape
left_col = max(0, c - 1)
right_col = min(c + 1, c_map - 1)
up_row = max(0, r - 1)
down_row = min(r + 1, r_map - 1)
for new_c in range(left_col, right_col + 1):
for new_r in range(up_row, down_row + 1):
if m[new_r, new_c] <= min_u:
best_n = (new_r, new_c)
min_u = m[new_r, new_c]
for i in [-2, 2]:
new_r = i + r
if 0 <= i + r < r_map and m[new_r, c] <= min_u:
best_n = (new_r, c)
min_u = m[new_r, c]
for j in [-2, 2]:
new_c = j + c
if 0 <= new_c < c_map and m[r, new_c] <= min_u:
best_n = (r, new_c)
min_u = m[r, new_c]
return best_n
def find_best_neighbor_total_v3(m, r, c):
"""
find the best neighbor in a timestep for 3x speed pedestrian, which search in a wide neighbors
Parameters
----------
m : array shape(rows, cols)
utility map
r : int
row of cell which need to find the best neighbor
c : int
col of cell which need to find the best neighbor
Returns
-------
tuple:
the position of neighbor with smallest util around cell@(r,c) based on UtilMap m
"""
best_n = (r, c)
min_u = np.inf
r_map, c_map = m.shape
for new_c in range(c - 2, c + 2 + 1):
if c - 2 < 0 or c + 2 >= c_map:
continue
for new_r in range(r - 2, r + 2 + 1):
if r - 2 < 0 or r + 2 > r_map:
continue
if (abs(new_r - r) == abs(new_c - c)) and (abs(new_r - r) == 2):
continue
if m[new_r, new_c] <= min_u:
best_n = (new_r, new_c)
min_u = m[new_r, new_c]
for i in [-3, 3]:
new_r = i + r
if 0 <= i + r < r_map and m[new_r, c] <= min_u:
best_n = (new_r, c)
min_u = m[new_r, c]
for j in [-3, 3]:
new_c = j + c
if 0 <= new_c < c_map and m[r, new_c] <= min_u:
best_n = (r, new_c)
min_u = m[r, new_c]
return best_n |
m collections import OrderedDict
dict = OrderedDict()
n = int(input())
l = [input().rpartition(' ') for i in range(n)]
for item, space, quant in l:
dict[item] = dict[item] + int(quant) if item in dict else int(quant)
print("\n".join([" ".join([key, str(value)]) for key, value in dict.items()]))
|
from django.contrib import admin
from .models import Question, Score, Prize
@admin.register(Question)
class QuestionAdmin(admin.ModelAdmin):
list_display = ('id', 'question', 'correct_answer',)
search_fields = ['question']
list_per_page = 15
@admin.register(Score)
class QuestionAdmin(admin.ModelAdmin):
list_display = ('username', 'score',)
@admin.register(Prize)
class QuestionAdmin(admin.ModelAdmin):
list_display = ('amount', 'id',)
|
import glfw
from OpenGL.GL import *
import OpenGL.GL.shaders
import numpy as np
import glm
import math
from PIL import Image
from Carro import *
from Arvore import *
from Chessboard import *
from Casa import *
from Placa import *
from Table import *
from Sword import *
from Chao import *
from Ceu import *
from Luz_Interna import *
from Luz_Externa import *
#parametros da janela
polygonal_mode = False
altura = 1600
largura = 1200
firstMouse = True
yaw = -90.0
pitch = 0.0
lastX = largura/2
lastY = altura/2
#camera inicial dentro da casa
cameraPos = glm.vec3(3.0, 4.0, -25.0)
cameraFront = glm.vec3(0.0, 0.0, -1.0)
cameraUp = glm.vec3(0.0, 1.0, 0.0)
#parametros da matriz projection
fov = 85
near = 0.1
far = 65
stop = False
luz_ambiente_externo_intencidade = 0.3
luz_interno_intencidade = 1.0
'''
Esta funcao faz a verificao se a intencidade da luz
interna e externa esta no intervalo [0,1]
nao permitindo que ultrapasse estes intervalos
.
'''
def check_limite_luz():
global luz_ambiente_externo_intencidade
global luz_interno_intencidade
if luz_ambiente_externo_intencidade >= 1.0:
luz_ambiente_externo_intencidade = 1.0
if luz_ambiente_externo_intencidade <= 0.0:
luz_ambiente_externo_intencidade = 0.0
if luz_interno_intencidade >= 1.0:
luz_interno_intencidade = 1.0
if luz_interno_intencidade <= 0.0:
luz_interno_intencidade = 0.0
'''
Esta funcao faz a verificao se a camera
ainda se mantem dentro dos limites do mundo,
nao permitindo que a camera ultrapasse os limites.
'''
def check_colision_camera():
#valores maximos e minimos da posicao da camera para cada coordenada
MAX_X, MAX_Y, MAX_Z = 48, 48, 48
MIN_X, MIN_Y, MIN_Z = -48, 2, -48
global cameraPos
if cameraPos.x >= MAX_X:
cameraPos.x = MAX_X
elif cameraPos.x <= MIN_X:
cameraPos.x = MIN_X
if cameraPos.y >= MAX_Y:
cameraPos.y = MAX_Y
elif cameraPos.y <= MIN_Y:
cameraPos.y = MIN_Y
if cameraPos.z >= MAX_Z:
cameraPos.z = MAX_Z
elif cameraPos.z <= MIN_Z:
cameraPos.z = MIN_Z
'''
Esta funcao verifica se os parametros da matriz de
projecao estao dentro limites estipulados.
'''
def check_projection_camera():
global fov, near, far
#valores maximos e minimos dos parametros fov, near e far
MAX_FOV, MAX_NEAR, MAX_FAR = 180, 200, 200
MIN_FOV, MIN_NEAR, MIN_FAR = 1, 0.1, 0.1
if fov >= MAX_FOV:
fov = MAX_FOV
elif fov <= MIN_FOV:
fov = MIN_FOV
if near >= MAX_NEAR:
near = MAX_NEAR
elif near <= MIN_NEAR:
near = MIN_NEAR
if far >= MAX_FAR:
far = MAX_FAR
elif far <= MIN_FAR:
far = MIN_FAR
'''
Funcao de evento do teclado
'''
def key_event(window,key,scancode,action,mods):
global cameraPos, cameraFront, cameraUp, polygonal_mode
global fov, near, far
global stop
global luz_interno_intencidade
global luz_ambiente_externo_intencidade
cameraSpeed = 0.2
#movimentando camera
if key == 87 and (action==1 or action==2): # tecla W
cameraPos += cameraSpeed * cameraFront
if key == 83 and (action==1 or action==2): # tecla S
cameraPos -= cameraSpeed * cameraFront
if key == 65 and (action==1 or action==2): # tecla A
cameraPos -= glm.normalize(glm.cross(cameraFront, cameraUp)) * cameraSpeed
if key == 68 and (action==1 or action==2): # tecla D
cameraPos += glm.normalize(glm.cross(cameraFront, cameraUp)) * cameraSpeed
#mudando polygonal mode
if key == 80 and action==1 and polygonal_mode==True:
polygonal_mode=False
else:
if key == 80 and action==1 and polygonal_mode==False:
polygonal_mode=True
#mexendo nos parametros da projection
if key == 90 and (action==1 or action==2): #tecla Z
near -= cameraSpeed
if key == 88 and (action==1 or action==2): #tecla X
near += cameraSpeed
if key == 67 and (action==1 or action==2): #tecla C
far -= cameraSpeed
if key == 86 and (action==1 or action==2): #tecla V
far += cameraSpeed
if key == 70 and (action==1 or action==2): #tecla F
fov -= cameraSpeed
if key == 71 and (action==1 or action==2): #tecla G
fov += cameraSpeed
intencidade_speed = 0.02
if key == 85 and (action==1 or action==2): #letra U
luz_ambiente_externo_intencidade -= intencidade_speed
if key == 73 and (action==1 or action==2):
luz_ambiente_externo_intencidade += intencidade_speed
if key == 75 and (action==1 or action==2):
luz_interno_intencidade -= intencidade_speed
if key == 76 and (action==1 or action==2):
luz_interno_intencidade += intencidade_speed
#fechando janela
if key == 81 and action == 1:
stop = True
check_limite_luz()
check_colision_camera()
check_projection_camera()
'''
Funcao de evento do mouse
'''
def mouse_event(window, xpos, ypos):
global firstMouse, cameraFront, yaw, pitch, lastX, lastY
if firstMouse:
lastX = xpos
lastY = ypos
firstMouse = False
xoffset = xpos - lastX
yoffset = lastY - ypos
lastX = xpos
lastY = ypos
sensitivity = 0.3
xoffset *= sensitivity
yoffset *= sensitivity
yaw += xoffset
pitch += yoffset
#89.9 pois se pitch = 90, cameraFront eh LD com cameraUp,
#o que nao eh permitido e causa inconsistencias nos calculos
if pitch >= 89.9: pitch = 89.9
if pitch <= -89.9: pitch = -89.9
front = glm.vec3()
front.x = math.cos(glm.radians(yaw)) * math.cos(glm.radians(pitch))
front.y = math.sin(glm.radians(pitch))
front.z = math.sin(glm.radians(yaw)) * math.cos(glm.radians(pitch))
cameraFront = glm.normalize(front)
'''
fluxo principal do programa
'''
def main():
#linkando os valores globais
global altura, largura
global cameraPos, cameraFront, cameraUp
global fov, near, far
global luz_ambiente_externo_intencidade
global luz_interno_intencidade
glfw.init()
glfw.window_hint(glfw.VISIBLE, glfw.FALSE)
window = glfw.create_window(largura, altura, "Malhas e Texturas", None, None)
glfw.make_context_current(window)
vertex_code = """
attribute vec3 position;
attribute vec2 texture_coord;
attribute vec3 normals;
varying vec2 out_texture;
varying vec3 out_fragPos;
varying vec3 out_normal;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
void main(){
gl_Position = projection * view * model * vec4(position,1.0);
out_texture = vec2(texture_coord);
out_fragPos = vec3( model * vec4(position, 1.0));
out_normal = vec3( model *vec4(normals, 1.0));
}
"""
fragment_code = """
// parametro com a cor da(s) fonte(s) de iluminacao
uniform vec3 lightPos; // define coordenadas de posicao da luz
uniform vec3 lightAmbiente;
uniform vec3 lightIncidente;
// parametros da iluminacao ambiente e difusa
uniform vec3 ka; // coeficiente de reflexao ambiente
uniform vec3 kd; // coeficiente de reflexao difusa
// parametros da iluminacao especular
uniform vec3 viewPos; // define coordenadas com a posicao da camera/observador
uniform vec3 ks; // coeficiente de reflexao especular
uniform float ns; // expoente de reflexao especular
// parametros recebidos do vertex shader
varying vec2 out_texture; // recebido do vertex shader
varying vec3 out_normal; // recebido do vertex shader
varying vec3 out_fragPos; // recebido do vertex shader
uniform sampler2D samplerTexture;
void main(){
// calculando reflexao ambiente
vec3 ambient = ka * lightAmbiente;
// calculando reflexao difusa
vec3 norm = normalize(out_normal); // normaliza vetores perpendiculares
vec3 lightDir = normalize(lightPos - out_fragPos); // direcao da luz
float diff = max(dot(norm, lightDir), 0.0); // verifica limite angular (entre 0 e 90)
vec3 diffuse = kd * diff * lightIncidente; // iluminacao difusa
// calculando reflexao especular
vec3 viewDir = normalize(viewPos - out_fragPos); // direcao do observador/camera
vec3 reflectDir = normalize(reflect(-lightDir, norm)); // direcao da reflexao
float spec = pow(max(dot(viewDir, reflectDir), 0.0), ns);
vec3 specular = ks * spec * lightIncidente;
// aplicando o modelo de iluminacao
vec4 texture = texture2D(samplerTexture, out_texture);
vec4 result = vec4((ambient + diffuse + specular),1.0)*texture; // aplica iluminacao
gl_FragColor = result;
}
"""
# Request a program and shader slots from GPU
program = glCreateProgram()
vertex = glCreateShader(GL_VERTEX_SHADER)
fragment = glCreateShader(GL_FRAGMENT_SHADER)
# Set shaders source
glShaderSource(vertex, vertex_code)
glShaderSource(fragment, fragment_code)
# Compile shaders
glCompileShader(vertex)
if not glGetShaderiv(vertex, GL_COMPILE_STATUS):
error = glGetShaderInfoLog(vertex).decode()
print(error)
raise RuntimeError("Erro de compilacao do Vertex Shader")
glCompileShader(fragment)
if not glGetShaderiv(fragment, GL_COMPILE_STATUS):
error = glGetShaderInfoLog(fragment).decode()
print(error)
raise RuntimeError("Erro de compilacao do Fragment Shader")
# Attach shader objects to the program
glAttachShader(program, vertex)
glAttachShader(program, fragment)
# Build program
glLinkProgram(program)
if not glGetProgramiv(program, GL_LINK_STATUS):
print(glGetProgramInfoLog(program))
raise RuntimeError('Linking error')
# Make program the default program
glUseProgram(program)
glHint(GL_LINE_SMOOTH_HINT, GL_DONT_CARE)
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glEnable(GL_LINE_SMOOTH)
glEnable(GL_TEXTURE_2D)
qtd_texturas = 10
texturas = glGenTextures(qtd_texturas)
vertices_list = []
textures_coord_list = []
normals_list = []
id_tex_livre = [0]
#======================= CRIA OS OBJETOS ================================
chao = cria_chao(id_tex_livre, vertices_list, textures_coord_list, normals_list)
carro = cria_carro(id_tex_livre, vertices_list, textures_coord_list, normals_list)
casa = cria_casa(id_tex_livre, vertices_list, textures_coord_list, normals_list)
placa = cria_placa(id_tex_livre, vertices_list, textures_coord_list, normals_list)
arvore = cria_arvore(id_tex_livre, vertices_list, textures_coord_list, normals_list)
mesa = cria_table(id_tex_livre, vertices_list, textures_coord_list, normals_list)
tabuleiro = cria_chessboard(id_tex_livre, vertices_list, textures_coord_list, normals_list)
espada = cria_sword(id_tex_livre, vertices_list, textures_coord_list, normals_list)
ceu = cria_ceu(id_tex_livre, vertices_list, textures_coord_list, normals_list)
luz_interna = cria_luz_interna(id_tex_livre, vertices_list, textures_coord_list, normals_list)
luz_externa = cria_luz_externa(id_tex_livre, vertices_list, textures_coord_list, normals_list)
#=========================================================================
# Request a buffer slot from GPU
buffer = glGenBuffers(3)
vertices = np.zeros(len(vertices_list), [("position", np.float32, 3)])
vertices['position'] = vertices_list
# Upload data
glBindBuffer(GL_ARRAY_BUFFER, buffer[0])
glBufferData(GL_ARRAY_BUFFER, vertices.nbytes, vertices, GL_STATIC_DRAW)
stride = vertices.strides[0]
offset = ctypes.c_void_p(0)
loc_vertices = glGetAttribLocation(program, "position")
glEnableVertexAttribArray(loc_vertices)
glVertexAttribPointer(loc_vertices, 3, GL_FLOAT, False, stride, offset)
textures = np.zeros(len(textures_coord_list), [("position", np.float32, 2)]) # duas coordenadas
textures['position'] = textures_coord_list
# Upload data
glBindBuffer(GL_ARRAY_BUFFER, buffer[1])
glBufferData(GL_ARRAY_BUFFER, textures.nbytes, textures, GL_STATIC_DRAW)
stride = textures.strides[0]
offset = ctypes.c_void_p(0)
loc_texture_coord = glGetAttribLocation(program, "texture_coord")
glEnableVertexAttribArray(loc_texture_coord)
glVertexAttribPointer(loc_texture_coord, 2, GL_FLOAT, False, stride, offset)
normals = np.zeros(len(normals_list), [("position", np.float32, 3)]) # três coordenadas
normals['position'] = normals_list
# Upload coordenadas normals de cada vertice
glBindBuffer(GL_ARRAY_BUFFER, buffer[2])
glBufferData(GL_ARRAY_BUFFER, normals.nbytes, normals, GL_STATIC_DRAW)
stride = normals.strides[0]
offset = ctypes.c_void_p(0)
loc_normals_coord = glGetAttribLocation(program, "normals")
glEnableVertexAttribArray(loc_normals_coord)
glVertexAttribPointer(loc_normals_coord, 3, GL_FLOAT, False, stride, offset)
glfw.set_key_callback(window,key_event)
glfw.set_cursor_pos_callback(window, mouse_event)
glfw.show_window(window)
glfw.set_cursor_pos(window, lastX, lastY)
glEnable(GL_DEPTH_TEST) ### importante para 3D
#variaveis para controle da movimentacao do carro
min_pos_carro = -450
max_pos_carro = 450
speed_carro = 1
pos_carro = 0
#definindo valores luz externa
lightAmbExt = np.zeros((3,))
lightAmbExt[0] = 1.0
lightAmbExt[1] = 1.0
lightAmbExt[2] = 1.0
lightIncExt = np.zeros((3,))
lightIncExt[0] = 1.0
lightIncExt[1] = 1.0
lightIncExt[2] = 1.0
#definindo valores luz interna
lightAmbInt = np.zeros((3,))
lightAmbInt[0] = 1.0
lightAmbInt[1] = 1.0
lightAmbInt[2] = 1.0
lightIncInt = np.zeros((3,))
lightIncInt[0] = 1.0
lightIncInt[1] = 1.0
lightIncInt[2] = 1.0
while not glfw.window_should_close(window):
glfw.poll_events()
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glClearColor(1.0, 1.0, 1.0, 1.0)
if polygonal_mode==True:
glPolygonMode(GL_FRONT_AND_BACK,GL_LINE)
if polygonal_mode==False:
glPolygonMode(GL_FRONT_AND_BACK,GL_FILL)
loc_view_pos = glGetUniformLocation(program, "viewPos") # recuperando localizacao da variavel viewPos na GPU
glUniform3f(loc_view_pos, cameraPos[0], cameraPos[1], cameraPos[2]) ### posicao da camera/observador (x,y,z)
#====================== MOVIMENTANDO CARRO =================================
pos_carro += speed_carro
if pos_carro >= max_pos_carro:
pos_carro = max_pos_carro
speed_carro *= -1
elif pos_carro <= min_pos_carro:
pos_carro = min_pos_carro
speed_carro *= -1
carro.update_position(0,pos_carro,0, False, program)
#===========================================================================
#====================== CARREGA LUZ EXTERNA ================================
loc_light_amb = glGetUniformLocation(program, "lightAmbiente")
loc_light_inc = glGetUniformLocation(program, "lightIncidente")
le = luz_ambiente_externo_intencidade
#setando os valores desta luz nos parametros da gpu
glUniform3f(loc_light_amb, le*lightAmbExt[0], le*lightAmbExt[1], le*lightAmbExt[2])
glUniform3f(loc_light_inc, lightIncExt[0], lightIncExt[1], lightIncExt[2])
luz_externa.update_position(16.5 - pos_carro,6,0, True, program)
luz_externa.draw(program)
#===========================================================================
#======================= DESENHA OS OBJETOS INTERNOS =======================
ceu.draw(program)
chao.draw(program)
carro.draw(program)
casa.draw(program)
placa.draw(program)
arvore.draw(program)
#===========================================================================
#====================== CARREGA LUZ INTERNA ================================
loc_light_amb = glGetUniformLocation(program, "lightAmbiente")
loc_light_inc = glGetUniformLocation(program, "lightIncidente")
li = luz_interno_intencidade
#setando os valores desta luz nos parametros da gpu
glUniform3f(loc_light_amb, li*lightAmbInt[0], li*lightAmbInt[1], li*lightAmbInt[2])
glUniform3f(loc_light_inc, li*lightIncInt[0], li*lightIncInt[1], li*lightIncInt[2])
luz_interna.update_position(3.3,3.3,-30, True, program)
luz_interna.draw(program)
#===========================================================================
#======================= DESENHA OS OBJETOS INTERNOS =======================
mesa.draw(program)
tabuleiro.draw(program)
espada.draw(program)
#===========================================================================
mat_view = view(cameraPos, cameraFront, cameraUp)
loc_view = glGetUniformLocation(program, "view")
glUniformMatrix4fv(loc_view, 1, GL_FALSE, mat_view)
mat_projection = projection(altura, largura, fov, near, far)
loc_projection = glGetUniformLocation(program, "projection")
glUniformMatrix4fv(loc_projection, 1, GL_FALSE, mat_projection)
glfw.swap_buffers(window)
if stop:
break
glfw.terminate()
if __name__ == "__main__":
main() |
str1 = "pale"
str2 = "pals"
str3 = "black"
str4 = "white"
def one_away(str_1, str_2):
str_1 = str_1.replace(" ", "")
str_1 = str_1.lower()
str_2 = str_2.replace(" ", "")
str_2 = str_2.lower()
dict = {}
for char in str_1:
if char not in dict.values():
dict[char] = 1
else:
dict[char] += 1
for char in str_2:
if char not in dict.values():
dict[char] = 1
else:
dict[char] -= 1
count = 0
for k, v in dict.values():
if v == 1:
count += 1
if count < 1:
return False
return True
def one_away2(str_1, str_2):
str_1 = str_1.replace(" ", "")
str_1 = str_1.lower()
str_2 = str_2.replace(" ", "")
str_2 = str_2.lower()
dict = {}
print(one_away(str1, str2))
print(one_away(str3, str4))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2019 the HERA Project
# Licensed under the MIT License
"Command-line drive script for hera_cal.delay_filter. Only performs CLEAN Filtering"
from hera_cal import delay_filter
import sys
parser = delay_filter.delay_filter_argparser(mode='clean')
a = parser.parse_args()
# set kwargs
filter_kwargs = {'standoff': a.standoff, 'horizon': a.horizon, 'tol': a.tol, 'window': a.window,
'skip_wgt': a.skip_wgt, 'maxiter': a.maxiter, 'edgecut_hi': a.edgecut_hi,
'edgecut_low': a.edgecut_low, 'min_dly': a.min_dly, 'gain': a.gain}
if a.window == 'tukey':
filter_kwargs['alpha'] = a.alpha
spw_range = a.spw_range
# Run Delay Filter
delay_filter.load_delay_filter_and_write(a.infilename, calfile=a.calfile, Nbls_per_load=a.partial_load_Nbls,
res_outfilename=a.res_outfilename, CLEAN_outfilename=a.CLEAN_outfilename,
filled_outfilename=a.filled_outfilename, clobber=a.clobber, spw_range=spw_range,
add_to_history=' '.join(sys.argv), **filter_kwargs)
|
def minimum_steps(numbers, value):
s_nums = sorted(numbers)
total = 0
for i, n in enumerate(s_nums, 1):
total += n
if total >= value:
return i-1
'''
Task
Given an array of N integers, you have to find how many times you have to add up
the smallest numbers in the array until their Sum becomes greater or equal to K.
Notes:
List size is at least 3.
All numbers will be positive.
Numbers could occur more than once , (Duplications may exist).
Threshold K will always be reachable.
Input >> Output Examples
minimumSteps({1, 10, 12, 9, 2, 3}, 6) ==> return (2)
Explanation:
We add two smallest elements (1 + 2), their sum is 3 .
Then we add the next smallest number to it (3 + 3) , so the sum becomes 6 .
Now the result is greater or equal to 6 , Hence the output is (2) i.e (2)
operations are required to do this .
minimumSteps({8 , 9, 4, 2}, 23) ==> return (3)
Explanation:
We add two smallest elements (4 + 2), their sum is 6 .
Then we add the next smallest number to it (6 + 8) , so the sum becomes 14 .
Now we add the next smallest number (14 + 9) , so the sum becomes 23 .
Now the result is greater or equal to 23 , Hence the output is (3) i.e (3)
operations are required to do this .
minimumSteps({19,98,69,28,75,45,17,98,67}, 464) ==> return (8)
Explanation:
We add two smallest elements (19 + 17), their sum is 36 .
Then we add the next smallest number to it (36 + 28) , so the sum becomes 64 .
We need to keep doing this until the sum becomes greater or equal
to K (464 in this case), which will require 8 Steps .
Expected Time Complexity
O(n Log n)
'''
|
from .pretrain import UniterForPretraining
from torch import nn
from .layer import BertOnlyMLMHead
from collections import defaultdict
from torch.nn import functional as F
import torch
class UniterForPretrainingForVCR(UniterForPretraining):
""" 2nd Stage Pretrain UNITER for VCR
"""
def init_type_embedding(self):
new_emb = nn.Embedding(4, self.uniter.config.hidden_size)
new_emb.apply(self.init_weights)
for i in [0, 1]:
emb = self.uniter.embeddings.token_type_embeddings.weight.data[i, :]
new_emb.weight.data[i, :].copy_(emb)
emb = self.uniter.embeddings.token_type_embeddings.weight.data[0, :]
new_emb.weight.data[2, :].copy_(emb)
new_emb.weight.data[3, :].copy_(emb)
self.uniter.embeddings.token_type_embeddings = new_emb
def init_word_embedding(self, num_special_tokens):
orig_word_num = self.uniter.embeddings.word_embeddings.weight.size(0)
new_emb = nn.Embedding(
orig_word_num + num_special_tokens, self.uniter.config.hidden_size)
new_emb.apply(self.init_weights)
emb = self.uniter.embeddings.word_embeddings.weight.data
new_emb.weight.data[:orig_word_num, :].copy_(emb)
self.uniter.embeddings.word_embeddings = new_emb
self.cls = BertOnlyMLMHead(
self.uniter.config, self.uniter.embeddings.word_embeddings.weight)
def forward(self, batch, task, compute_loss=True):
batch = defaultdict(lambda: None, batch)
input_ids = batch['input_ids']
position_ids = batch['position_ids']
img_feat = batch['img_feat']
img_pos_feat = batch['img_pos_feat']
attention_mask = batch['attn_masks']
gather_index = batch['gather_index']
txt_type_ids = batch['txt_type_ids']
if task == 'mlm':
txt_labels = batch['txt_labels']
return self.forward_mlm(input_ids, position_ids,
txt_type_ids, img_feat, img_pos_feat,
attention_mask, gather_index,
txt_labels, compute_loss)
elif task == 'mrfr':
img_mask_tgt = batch['img_mask_tgt']
img_masks = batch['img_masks']
mrfr_feat_target = batch['feat_targets']
return self.forward_mrfr(input_ids, position_ids,
txt_type_ids, img_feat, img_pos_feat,
attention_mask, gather_index,
img_masks, img_mask_tgt,
mrfr_feat_target, compute_loss)
elif task.startswith('mrc'):
img_mask_tgt = batch['img_mask_tgt']
img_masks = batch['img_masks']
mrc_label_target = batch['label_targets']
return self.forward_mrc(input_ids, position_ids,
txt_type_ids, img_feat, img_pos_feat,
attention_mask, gather_index,
img_masks, img_mask_tgt,
mrc_label_target, task, compute_loss)
else:
raise ValueError('invalid task')
# MLM
def forward_mlm(self, input_ids, position_ids, txt_type_ids, img_feat,
img_pos_feat, attention_mask, gather_index,
txt_labels, compute_loss=True):
sequence_output = self.uniter(input_ids, position_ids,
img_feat, img_pos_feat,
attention_mask, gather_index,
output_all_encoded_layers=False,
txt_type_ids=txt_type_ids)
# get only the text part
sequence_output = sequence_output[:, :input_ids.size(1), :]
# only compute masked tokens for better efficiency
masked_output = self._compute_masked_hidden(sequence_output,
txt_labels != -1)
prediction_scores = self.cls(masked_output)
if compute_loss:
masked_lm_loss = F.cross_entropy(prediction_scores,
txt_labels[txt_labels != -1],
reduction='none')
return masked_lm_loss
else:
return prediction_scores
# MRFR
def forward_mrfr(self, input_ids, position_ids, txt_type_ids,
img_feat, img_pos_feat,
attention_mask, gather_index, img_masks, img_mask_tgt,
feat_targets, compute_loss=True):
sequence_output = self.uniter(input_ids, position_ids,
img_feat, img_pos_feat,
attention_mask, gather_index,
output_all_encoded_layers=False,
img_masks=img_masks,
txt_type_ids=txt_type_ids)
# only compute masked tokens for better efficiency
masked_output = self._compute_masked_hidden(sequence_output,
img_mask_tgt)
prediction_feat = self.feat_regress(masked_output)
if compute_loss:
mrfr_loss = F.mse_loss(prediction_feat, feat_targets,
reduction='none')
return mrfr_loss
else:
return prediction_feat
# MRC
def forward_mrc(self, input_ids, position_ids, txt_type_ids,
img_feat, img_pos_feat,
attention_mask, gather_index, img_masks, img_mask_tgt,
label_targets, task, compute_loss=True):
sequence_output = self.uniter(input_ids, position_ids,
img_feat, img_pos_feat,
attention_mask, gather_index,
output_all_encoded_layers=False,
img_masks=img_masks,
txt_type_ids=txt_type_ids)
# only compute masked regions for better efficiency
masked_output = self._compute_masked_hidden(sequence_output,
img_mask_tgt)
prediction_soft_label = self.region_classifier(masked_output)
if compute_loss:
if "kl" in task:
prediction_soft_label = F.log_softmax(
prediction_soft_label, dim=-1)
mrc_loss = F.kl_div(
prediction_soft_label, label_targets, reduction='none')
else:
# background class should not be the target
label_targets = torch.max(label_targets[:, 1:], dim=-1)[1] + 1
mrc_loss = F.cross_entropy(
prediction_soft_label, label_targets,
ignore_index=0, reduction='none')
return mrc_loss
else:
return prediction_soft_label
|
# coding=utf-8
"""
题目描述:
亲们!!我们的外国友人YZ这几天总是睡不好,初中奥数里有一个题目一直困扰着他,特此他向JOBDU发来求助信,希望亲们能帮帮他。问题是:求出1~13的整数中1出现的次数,并算出100~1300的整数中1出现的次数?为此他特别数了一下1~13中包含1的数字有1、10、11、12、13因此共出现6次,但是对于后面问题他就没辙了。ACMer希望你们帮帮他,并把问题更加普遍化,可以很快的求出任意非负整数区间中1出现的次数。
输入:
输入有多组数据,每组测试数据为一行。
每一行有两个整数a,b(0<=a,b<=1,000,000,000)。
输出:
对应每个测试案例,输出a和b之间1出现的次数。
样例输入:
0 5
1 13
21 55
31 99
样例输出:
1
6
4
7
按每一位来考虑,
1)此位大于1,这一位上1的个数有 ([n / 10^(b+1) ] + 1) * 10^b
2)此位等于0,为 ([n / 10^(b+1) ] ) * 10^b
3)此位等于1,在0的基础上加上n mod 10^b + 1
举个例子:
30143:
由于3>1,则个位上出现1的次数为(3014+1)*1
由于4>1,则十位上出现1的次数为(301+1)*10
由于1=1,则百位上出现1次数为(30+0)*100+(43+1)
由于0<1,则千位上出现1次数为(3+0)*1000
注:以百位为例,百位出现1为100~199,*100的意思为单步出现了100~199,100次,*30是因为出现了30次100~199,+(43+1)是因为左后一次301**不完整导致。
如果还不懂,自己拿纸和笔大致写下,找下规律,就能推导出来了!
"""
class Solution(object):
def count_number_of_one_between_number1_and_number2(self, number1, number2):
def count_one(number):
count = 0
number_list = map(int, list(str(number)))
for index, each_number in enumerate(number_list):
after = len(number_list) - index - 1
if each_number == 0:
count += (number / 10 ** (after + 1)) * 10 ** after
elif each_number == 1:
count += (number / 10 ** (after + 1)) * 10 ** after + number % (10 ** after) + 1
else:
count += (number / 10 ** (after + 1) + 1) * 10 ** after
return count
return count_one(number1) - count_one(number2)
if __name__ == '__main__':
print Solution().count_number_of_one_between_number1_and_number2(12, 0)
|
import cv2
import face_recognition as fr
mascarilla = fr.load_image_file("./mascarilla.jpg")
sinmascarilla = fr.load_image_file("./sinmascarilla.jpg")
con_mascarilla = fr.face_encodings(mascarilla)[0]
sin_mascarilla = fr.face_encodings(sinmascarilla)[0]
encodings_conocidos = [con_mascarilla,sin_mascarilla]
nombres_conocidos = ["Gracias por usar su mascarilla","Use su mascarilla por favor"]
webcam = cv2.VideoCapture(0)
font = cv2.FONT_HERSHEY_COMPLEX
reduccion = 4
while 1:
loc_rostros = []
encodings_rostros = []
nombres_rostros = []
nombre = ""
valido, img = webcam.read()
if valido:
img_rgb = img[:, :, ::-1]
img_rgb = cv2.resize(img_rgb, (0, 0), fx=1.0/reduccion, fy=1.0/reduccion)
loc_rostros = fr.face_locations(img_rgb)
encodings_rostros = fr.face_encodings(img_rgb, loc_rostros)
for encoding in encodings_rostros:
coincidencias = fr.compare_faces(encodings_rostros, encoding)
if True in coincidencias:
nombre = nombres_conocidos[coincidencias.index(True)]
else:
nombre = "irreconocible"
nombres_rostros.append(nombre)
for (top, right, bottom, left), nombre in zip(loc_rostros, nombres_rostros):
top = top*reduccion
right = right*reduccion
bottom = bottom*reduccion
left = left*reduccion
if nombre != "irreconocible":
color = (0,255,0)
else:
color = (0,0,255)
cv2.rectangle(img, (left, top), (right, bottom), color, 2)
cv2.rectangle(img, (left, bottom - 20), (right, bottom), color, -1)
cv2.putText(img, nombre, (left, bottom - 6), font, 0.6, (0,0,0), 1)
cv2.imshow('Examen final inteligencia artificial', img)
k = cv2.waitKey(5) & 0xFF
if k == 27:
cv2.destroyAllWindows()
break
webcam.release() |
#!/usr/bin/env python3
"""
python3 prepend-cells.py <cells.ipynb> <notebook0.ipynb> [<notebook1.ipynb> ...]
"""
import sys
import nbformat
def msg(s, target=sys.stderr):
target.write(s)
def open_notebook(filename, as_version=4):
with open(filename, "rt") as fp:
nb = nbformat.read(fp, as_version)
return nb
def save_notebook(nb, filename):
with open(filename, "wt") as fp:
nbformat.write(nb, fp)
def duplicate_notebook(source_filename, target_filename):
save_notebook(open_notebook(source_filename), target_filename)
if len(sys.argv) < 2:
msg(__doc__)
sys.exit(-1)
cells_filename = sys.argv[1]
msg(f"Reading cells to prepend: '{cells_filename}' ...\n")
cells = open_notebook(cells_filename)
for nb_filename in sys.argv[2:]:
nb_filename_orig = nb_filename + '.orig'
msg(f"Prepending to: '{nb_filename}' (saving original as: '{nb_filename_orig}')\n")
duplicate_notebook(nb_filename, nb_filename_orig)
try:
nb = open_notebook(nb_filename_orig)
nb['cells'] = cells['cells'] + nb['cells']
save_notebook(nb, nb_filename)
except:
msg(f"\n==> Due to an unexpected exception, restoring notebook to its original state!\n\n")
duplicate_notebook(nb_filename_orig, nb_filename)
raise
# eof
|
from contextlib import contextmanager
from math import cos, sin, pi, tau
from glm import vec3, ivec2, normalize, vec2
from game.constants import FULL_FOG_DISTANCE, GREEN, DEBUG
from game.entities.ai import CircleAi, CombinedAi
from game.entities.butterfly import Butterfly
from game.entities.buttabomber import ButtaBomber
from game.entities.flyer import Flyer
from game.entities.camera import Camera
from game.entities.powerup import Powerup
from game.scene import Scene
from game.util import random_color
class Level:
sky = "#59ABE3"
ground = GREEN
night_sky = "#00174A"
name = "A Level"
default_ai = None
# Pause times
small = 1
medium = 2
big = 4
huge = 10
# velocities
angular_speed = 2
speed = 60
def __init__(self, app, scene, script):
self.app = app
self.scene: Scene = scene
self.script = script
self.spawned = 0
self._skip = False
self.faster = 1
@property
def terminal(self):
return self.app.state.terminal
@contextmanager
def skip(self):
"""
Use this as a context manager to skip parts of a level with creating it.
Only works when DEBUG is True
Exemple:
self.pause(5) # This will happen
with self.skip():
# This will not happen
self.circle(100, 100, delay=1)
# This neither
self.pause(1000)
self.spawn(0, 0) # This will happen
"""
self._skip += DEBUG
yield
self._skip -= DEBUG
@contextmanager
def set_faster(self, val):
old = self.faster
self.faster = val
yield
self.faster = old
def spawn_powerup(self, letter: str = None, x: float = 0, y: float = 0):
"""
Spawn a powerup at position (x, y) at the current max depth
:param x: float between -1 and 1. 0 is horizontal center of the screen
:param y: float between -1 and 1. 0 is vertical center of the screen
:param letter: str powerup letter, None means random powerup
"""
if self._skip:
return
# Assuming the state is Game
camera: Camera = self.app.state.camera
pos = vec3(
x, y, camera.position.z - camera.screen_dist * FULL_FOG_DISTANCE
) * vec3(*camera.screen_size / 2, 1)
self.scene.add(Powerup(self.app, self.scene, letter, position=pos))
def spawn(self, x: float = 0, y: float = 0, ai=None, Type=Butterfly):
"""
Spawn a butterfly at position (x, y) at the current max depth
:param x: float between -1 and 1. 0 is horizontal center of the screen
:param y: float between -1 and 1. 0 is vertical center of the screen
"""
if self._skip:
return
ai = ai or self.default_ai
# Assuming the state is Game
camera: Camera = self.app.state.camera
pos = vec3(
x, y, camera.position.z - camera.screen_dist * FULL_FOG_DISTANCE
) * vec3(*camera.screen_size / 2, 1)
butt = self.scene.add(
Type(self.app, self.scene, pos, random_color(), num=self.spawned, ai=ai)
)
if DEBUG:
print("Spawned", butt)
self.spawned += 1
return butt
def pause(self, duration):
"""
Spawn nothing for the given duration.
Next spawn will be exactly after `duration` seconds.
"""
if self._skip:
duration = 0
return self.script.sleep(duration / self.faster)
def small_pause(self):
return self.pause(self.small)
def medium_pause(self):
return self.pause(self.medium)
def big_pause(self):
return self.pause(self.big)
def bigg_pause(self):
return self.pause((self.big + self.huge) / 2)
def huge_pause(self):
return self.pause(self.huge)
def engine_boost(self, mult):
self.app.state.player.speed.x *= mult
self.app.state.player.speed.y *= mult
def square(self, c, ai=None, Type=Butterfly):
self.spawn(c, c, ai, Type)
self.spawn(c, -c, ai, Type)
self.spawn(-c, c, ai, Type)
self.spawn(-c, -c, ai, Type)
def wall(self, qte_x, qte_y, w, h, ai=None, Type=Butterfly):
for i in range(qte_x):
for j in range(qte_y):
self.spawn(
(i / (qte_x - 1) - 0.5) * w, (j / (qte_y - 1) - 0.5) * h, ai, Type
)
def circle(self, n, radius, start_angle=0, ai=None, instant=False):
"""Spawn n butterflies in a centered circle of given radius"""
for i in range(n):
angle = i / n * tau + start_angle
self.spawn(radius * cos(angle), radius * sin(angle), ai)
if instant:
yield self.pause(0)
else:
yield self.small_pause()
def rotating_circle(
self,
n,
radius,
speed_mult=1,
center=(0, 0),
simultaneous=True,
ai=None,
Type=Butterfly,
):
"""
radius should be in PIXELS
"""
ai = ai or self.default_ai
aspeed = self.angular_speed * speed_mult
for i in range(n):
angle = i / n * 2 * pi
a = CircleAi(radius, angle, aspeed)
self.spawn(center[0], center[1], CombinedAi(a, ai), Type)
if simultaneous:
yield self.pause(0)
else:
yield self.small_pause()
def v_shape(self, n, dir=(1, 0), ai=None, Type=Butterfly, faster=1):
dir = normalize(vec2(dir)) * 0.4 # *0.4 so it isn't too spread out
self.spawn(0, 0)
yield self.small_pause()
for i in range(1, n):
self.spawn(*dir * i / n, ai, Type)
self.spawn(*dir * -i / n, ai, Type)
yield self.pause(self.small / faster)
def rotating_v_shape(
self, n, start_angle=0, angular_mult=1, ai=None, Type=Butterfly
):
if self._skip:
yield self.pause(0)
return
angular_speed = self.angular_speed * angular_mult
ai = ai or self.default_ai
self.spawn(0, 0)
yield self.small_pause()
angle = start_angle
for i in range(1, n):
# We sync the ai angles
ai1 = CircleAi(i * 20, angle, angular_speed)
ai2 = CircleAi(i * 20, angle + pi, angular_speed)
butt = self.spawn(0, 0, CombinedAi(ai, ai1), Type)
self.spawn(0, 0, CombinedAi(ai, ai2), Type)
yield self.small_pause()
angle = butt.ai_angle
def slow_type(
self, text, line=5, color="white", delay=0.1, clear=False, blink=False
):
if self._skip:
yield self.pause(0)
return
for i, letter in enumerate(text):
self.terminal.write_center(
letter, line, color, char_offset=(i, 0), length=len(text)
)
if letter != " ":
self.scene.play_sound("type.wav")
yield self.pause(delay)
yield self.pause(delay * clear)
terminal = self.terminal
left = ivec2((terminal.size.x - len(text)) / 2, line)
if clear:
for i, letter in enumerate(text):
terminal.clear(left + (i, 0))
self.scene.play_sound("type.wav")
yield self.pause(delay / 4)
def slow_type_lines(
self, text: str, start_line=5, color="white", delay=0.08, clear=True
):
if self._skip:
yield self.pause(0)
return
for i, line in enumerate(text.splitlines()):
yield from self.slow_type(line.strip(), start_line + i, color, delay)
if clear:
for i, line in enumerate(text.splitlines()):
left = ivec2((self.terminal.size.x - len(line)) / 2, start_line + i)
for j in range(len(line)):
self.terminal.clear(left + (j, 0))
self.scene.play_sound("type.wav")
yield self.pause(delay / 4)
def combine(self, *gens):
"""
Combine the generators so they run at the same time.
This assumes they only yield pauses and at least one.
"""
infinity = float("inf")
pauses = [0] * len(gens)
while True:
for i, gen in enumerate(gens):
if pauses[i] == 0:
try:
pauses[i] = next(gen).t
except StopIteration:
pauses[i] = infinity
m = min(pauses)
if m == infinity:
# They have all finished
return
yield self.pause(m)
for i in range(len(pauses)):
pauses[i] -= m
def __call__(self):
self.scene.sky_color = self.sky
self.scene.ground_color = self.ground
self.scene.music = self.music
if self.name:
yield from self.slow_type(self.name, 6, "white", 0.1)
terminal = self.app.state.terminal
terminal.clear(6)
self.scene.play_sound("message.wav")
# blink
for i in range(10):
# terminal.write(self.name, left, "green")
terminal.write_center("Level " + str(self.number), 4, "white")
terminal.write_center(self.name, 6, "green")
terminal.write_center("Go!", 8, "white")
yield self.pause(0.1)
terminal.clear(8)
yield self.pause(0.1)
for line in (4, 6, 8):
terminal.clear(line)
left = ivec2((terminal.size.x - len(self.name)) / 2, 5)
for i in range(len(self.name)):
terminal.clear(left + (i, 0))
yield self.pause(0.04)
def __iter__(self):
return self()
|
from pymongo import MongoClient
con = MongoClient("mongodb://localhost:27017")
# Selecting a database
db = con["mydatabase"] # If the database exists, the database will be returned else, a database will be created.
### Hint :: In MongoDB a database wll be created only if it has some content.
# Selecting a collection
collection = db["mycollection"] # If the collection exists, collection object will be returned. Else, collection will be created and then the new collection object will be returned.
# Inserting a record or document to a collection
# A document is like a collection of key-value pair of property and value
document = {"Name":"Alok","ID":1002}
return_val = collection.insert_one(document)
# Getting the _id of the inserted document
print(return_val.inserted_id)
# Inserting many documents
document_list = [{"Name":"Pramesh","ID":2001,"Age":20},\
{"Name":"Partha","ID":2002,"Grade":"A"},\
{"Name":"Tapan","ID":2003,"Age":25}]
collection.insert_many(document_list)
# Fetching the list of database available in the server.
db_list = con.list_database_names()
print(db_list)
# Fetching the list of collections available in the database
col_list = db.list_collection_names()
print(col_list)
|
"""
This type stub file was generated by pyright.
"""
import re
from sqlalchemy.ext.declarative import DeclarativeMeta
def should_set_tablename(cls):
"""Determine whether ``__tablename__`` should be automatically generated
for a model.
* If no class in the MRO sets a name, one should be generated.
* If a declared attr is found, it should be used instead.
* If a name is found, it should be used if the class is a mixin, otherwise
one should be generated.
* Abstract models should not have one generated.
Later, :meth:`._BoundDeclarativeMeta.__table_cls__` will determine if the
model looks like single or joined-table inheritance. If no primary key is
found, the name will be unset.
"""
...
camelcase_re = re.compile(r'([A-Z]+)(?=[a-z0-9])')
def camel_to_snake_case(name):
...
class NameMetaMixin(type):
def __init__(cls, name, bases, d) -> None:
...
def __table_cls__(cls, *args, **kwargs):
"""This is called by SQLAlchemy during mapper setup. It determines the
final table object that the model will use.
If no primary key is found, that indicates single-table inheritance,
so no table will be created and ``__tablename__`` will be unset.
"""
...
class BindMetaMixin(type):
def __init__(cls, name, bases, d) -> None:
...
class DefaultMeta(NameMetaMixin, BindMetaMixin, DeclarativeMeta):
...
class Model(object):
"""Base class for SQLAlchemy declarative base model.
To define models, subclass :attr:`db.Model <SQLAlchemy.Model>`, not this
class. To customize ``db.Model``, subclass this and pass it as
``model_class`` to :class:`SQLAlchemy`.
"""
query_class = ...
query = ...
def __repr__(self):
...
|
import sys
import os
from PIL import Image
print(sys.argv[0])
print(len(sys.argv))
print(str(sys.argv))
print(sys.argv)
#grab first two arguments to use
image_folder = sys.argv[1]
new_folder = sys.argv[2]
print(image_folder, new_folder)
print(os.path.exists(new_folder))
if not os.path.exists(new_folder):
os.makedirs(new_folder)
print(os.listdir(image_folder))
for filename in os.listdir(image_folder):
img = Image.open(f'{image_folder}{filename}')
clean_name = os.path.splitext(filename)[0]
print(clean_name)
img.save(f'{new_folder}{clean_name}.png', 'png')
print('All Done') |
from datetime import datetime
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth import get_user_model
from quicklook.models import UserQuickLook
from progress_analyzer.helpers.cumulative_helper import create_cumulative_instance
class Command(BaseCommand):
def __init__(self,*args, **kwargs):
super().__init__(*args, **kwargs)
self.email_or_all_flag = 'email'
self.duration_or_origin_flag = 'duration'
help = 'Generate Cumulative Sum instances '
def _get_origin_date(self, user):
last_record = UserQuickLook.objects.filter(user = user).order_by('created_at')
if last_record.exists():
start_time = last_record[0].created_at
return start_time
else:
return None
def _validate_options(self,options):
if not options['email'] and not options['all']:
raise CommandError("Provide either --email or --all")
if not options['duration'] and not options['origin']:
raise CommandError("Provide either --duration or --origin")
# give "all" flag more preference over "email"
if options['all']:
self.email_or_all_flag = 'all'
# give "origin" flag more preference over "duration"
if options['origin']:
self.duration_or_origin_flag = 'origin'
return True
def _create_cumulative_sum(self,user_qs,from_date,to_date):
for user in user_qs:
self.stdout.write(self.style.WARNING('\nCreating Cumulative Sum instance for user "%s"' % user.username))
if self.duration_or_origin_flag == 'origin':
date_of_oldest_ql_record = self._get_origin_date(user)
if date_of_oldest_ql_record:
from_date = date_of_oldest_ql_record.strftime("%Y-%m-%d")
to_date = datetime.now().strftime("%Y-%m-%d")
create_cumulative_instance(user,from_date,to_date)
else:
self.stdout.write(self.style.ERROR('\nNo Quicklook record found for user "%s", cannot create Cumulative Sum insance' % user.username))
continue
create_cumulative_instance(user,from_date,to_date)
def add_arguments(self, parser):
parser.add_argument(
'-e',
'--email',
nargs = '+',
type = str,
dest = 'email',
help = "Email(s)"
)
parser.add_argument(
'-a',
'--all',
action = 'store_true',
dest = 'all',
help = 'Generate cumulative sum instances for all user'
)
parser.add_argument(
'-d',
'--duration',
type = str,
nargs = 2,
dest = 'duration',
help = 'Range of date [from, to] eg "-d 2018-01-01 2018-01-10"'
)
parser.add_argument(
'-o',
'--origin',
action ='store_true',
dest = 'origin',
help = 'Create cumulative sum instances from date of first quicklook (including today)'
)
def handle(self, *args, **options):
if self._validate_options(options):
from_date = None
to_date = None
if self.duration_or_origin_flag == 'duration':
from_date = options['duration'][0]
to_date = options['duration'][1]
if self.email_or_all_flag == 'email':
emails = [e for e in options['email']]
user_qs = get_user_model().objects.filter(email__in = emails)
self._create_cumulative_sum(user_qs,from_date,to_date)
else:
user_qs = get_user_model().objects.all()
self._create_cumulative_sum(user_qs,from_date,to_date) |
import pyspark as ps
import datetime
import sys
import logging
from pyspark import SQLContext, SparkContext, SparkConf
from pyspark import SparkFiles
from pyspark.sql.functions import col, date_trunc
from pyspark.sql import functions as F
from pyspark.sql.types import IntegerType, FloatType, StringType, StructType, StructField, DateType
# static variables
URL_METADATA = 'http://snap.stanford.edu/data/amazon/productGraph/categoryFiles/meta_Movies_and_TV.json.gz'
URL_RATING = 'http://snap.stanford.edu/data/amazon/productGraph/categoryFiles/ratings_Movies_and_TV.csv'
RATING_SCHEMA= StructType([
StructField("reviewerID", StringType(), True),
StructField("asin", IntegerType(), True),
StructField("overall", StringType(), True),
StructField("unixReviewTime", IntegerType(), True)])
# ### create the full data frame for analysis
def download_and_create_dataframe():
spark_context = SparkContext.getOrCreate(SparkConf())
spark_context.addFile(URL_METADATA)
spark_context.addFile(URL_RATING)
sql_context = SQLContext.getOrCreate(spark_context)
#
# after succefull downloading the CSV and JSON i set the logs to ERROR level only
# to reduse the many INFO logs
#
log4j = spark_context._jvm.org.apache.log4j
log4j.LogManager.getRootLogger().setLevel(log4j.Level.ERROR)
ratings_df = sql_context.read.schema(RATING_SCHEMA).csv("file://"+ SparkFiles.get("ratings_Movies_and_TV.csv"), header=False, inferSchema= True)
metadata_df = sql_context.read.json("file://"+SparkFiles.get("meta_Movies_and_TV.json.gz"))
return ratings_df.join(metadata_df, on = ['asin']), sql_context
# recive dataFrame, clean the data
# Input: dataFrame
# Outpot: dataFrame
def clean_data(df):
# add a date column from the unixReviewTime column
df = df.withColumn('review_date', F.from_unixtime('unixReviewTime').cast(DateType()))
# remove no null corrupt records
df = df.filter(df['_corrupt_record'].isNull())
# filter title with null
df = df.filter(df['title'].isNotNull())
# select only Movies , Movies & TV category from the category column
# use explode to open the array and select only that categories
df = df.withColumn('explode_categories', F.explode(df['categories']))
df = df.withColumn('category', F.explode(df['explode_categories']))
df = df.filter(df['category'].isin('Movies','Movies & TV'))
# use distinct to remove duplicates
df = df.drop('explode_categories','category').distinct()
return df
# function to recive 1 month dataframe
# Input: dataFrame , date
# Outpot: dataFrame in the corresponding date
def get_one_month(df,date):
start = datetime.date(date.year, date.month,1)
end = start + datetime.timedelta(days = 31)
end = datetime.date(end.year, end.month,1)
return df.filter(F.col('review_date').between(start,end))
# function to recive 2 following month dataframe
# Input: dataFrame , date
# Outpot: dataFrame in the corresponding date
def get_two_following_months(df,date):
start = datetime.date(date.year, date.month,1)
end = start + datetime.timedelta(days = 62)
end = datetime.date(end.year, end.month,1)
return df.filter(F.col('review_date').between(start,end))
# function for anelize and present the first question
def top_bottom(df,date):
df = get_one_month(df,date)
df = df.groupby('asin','title').agg(
F.avg(F.col('overall')).alias('avg_rating'),
F.count(F.col('overall')).alias('count_rating')
)
top_5_ratings = df.orderBy(F.col('avg_rating').desc(),F.col('count_rating').desc()).limit(5)
buttom_5_ratings = df.orderBy(F.col('avg_rating').asc(),F.col('count_rating').desc()).limit(5)
top_5_ratings.show(5,False)
buttom_5_ratings.show(5,False)
# function for anelize and present the second question
def max_5_avg_diff(df,date,sql_context):
# calcuate the necessary dates for creating two dataframes
start = datetime.date(date.year, date.month,1)
previous_month = start - datetime.timedelta(days = 1)
start_previous_month = datetime.date(previous_month.year, previous_month.month, 1)
date_next_month = start + datetime.timedelta(days = 31)
end_date = datetime.date(date_next_month.year, date_next_month.month,1)
df_1 = get_one_month(df,date)
df_2 = get_one_month(df,start_previous_month)
two_month_df = df_1.union(df_2)
# create a temp view for SQL analysis
two_month_df.createOrReplaceTempView('df')
result_2 = sql_context.sql(f"""
with previous_month as
(
SELECT asin,title, avg(overall) as avg_rating_previous_month
from df
where review_date between '{previous_month.strftime('%Y-%m-%d')}' and '{start.strftime('%Y-%m-%d')}'
group by asin,title
),
current_month as
(
SELECT asin,title, avg(overall) as avg_rating_current_month
from df
where review_date between '{start.strftime('%Y-%m-%d')}' and '{end_date.strftime('%Y-%m-%d')}'
group by asin,title
)
SELECT c.asin , c.title, abs(c.avg_rating_current_month - p.avg_rating_previous_month) as max_avg_difference
, p.avg_rating_previous_month, c.avg_rating_current_month
FROM previous_month as p join current_month as c
on p.asin=c.asin and p.title=c.title
WHERE c.avg_rating_current_month > p.avg_rating_previous_month
order by max_avg_difference desc
LIMIT 5
""")
result_2.show(5,False)
# # main function
# Input: date string
# Output: 3 tables: for the given month
# 1 - top 5 avg movies reviews
# 2 - bottom 5 avg movies reviews
# 3 - 5 movies whose average monthly ratings increased the most compared with the previous month
def main(argv):
date = datetime.datetime.strptime(argv[1], '%Y-%m-%d').date()
df,sql_context = download_and_create_dataframe()
df = clean_data(df)
top_bottom(df,date)
max_5_avg_diff(df,date,sql_context)
if __name__ == '__main__':
main(sys.argv) |
import os
import shutil
import time
####Copies files from Branch Office folder to Home Office Folder.
####Script is intended to be run 24 hrs apart on each business day.
####Copies only files from the Branch Office where the modified time is
# less than 24 hrs from the current time
branch = '''insert path of source'''
home = '''insert path of destination'''
def copy(src, dest):
files = os.listdir(src)
for x, i in enumerate(files):
branchModTime = os.path.getmtime(src + i)
time24 = int(time.time()) - 86400
if time24 <= branchModTime:
shutil.copy(src + i, dest)
print (x,i)
copy(branch,home)
#### Reference Notes
## https://docs.python.org/2/library/os.path.html?highlight=mtime#os.path.getmtime
#os.path.getmtime(path)
#returns the file modified time in epoch time
#86400 is the number of seconds in 24 hours.
####Possible changes to the code could be to compare the modified times between
# the Home Office Folder and Branch Office Folder. Any time in the Branch Office
#Folder that is greater than the Home Office folder could be copied over. This
#would make it so that the script could be run at any time.
|
#!/usr/bin/env python
import flickrquery
import argparse
import os.path, os
import subprocess, math
import json
from joblib import Parallel, delayed
import pyprind
parser = argparse.ArgumentParser()
parser.add_argument("output_dir", help="output directory where images will be stored")
parser.add_argument("num_images", type=int, help="number of images to be downloaded")
parser.add_argument("query", help="query string")
parser.add_argument("-start_date", help="start date", default="1/1/2005")
parser.add_argument("-end_date", help="end date", default="1/1/2014")
parser.add_argument("-target_max_dim", type=int, help="Target max dimension", default=1024)
parser.add_argument("--split_dirs", help="split downloaded images in directories", dest="split_dirs",action="store_true")
parser.set_defaults(split_dirs=False)
parser.add_argument('--noload_cache', dest='load_cache', action='store_false')
args = parser.parse_args()
def resize_image(filename, width, height):
n_pixels = float(width * height)
n_target_pixels = args.target_max_dim * args.target_max_dim * 3 / 4.0
#print 'image',filename
#print 'npixels ',n_pixels, n_target_pixels
if n_pixels > n_target_pixels * 1.2:
try:
ratio = math.sqrt(n_target_pixels / (n_pixels * 1.0))
# print 'w', width, 'h', height, 'r', ratio
target_width = int(width * ratio)
target_height = int(height * ratio)
cmd = 'mogrify -resize %dx%d %s' % (target_width, target_height, filename)
#print cmd
return (os.system(cmd) == 0)
#mogrify_output = subprocess.check_output(['mogrify', '-resize','%dx%d' % (target_width, target_height), filename])
except:
return False
return True
def check_and_resize_image(filename):
try:
jhead_output = subprocess.check_output(['jhead', filename])
except:
return False
else:
for line in jhead_output.splitlines():
tokens = line.split()
if len(tokens) == 5 and tokens[0] == 'Resolution' and int(tokens[2]) > 0 and int(tokens[4]) > 0:
return resize_image(filename, int(tokens[2]), int(tokens[4]))
return False
def download_image(data, filename):
print 'downloading ', filename
if data['originalsecret'] and data['originalsecret'] != 'null':
url_original = 'http://farm%s.staticflickr.com/%s/%s_%s_o.%s' % (data['farm'], data['server'], data['id'], data['originalsecret'], data['originalformat'])
else:
url_original = 'http://farm%s.staticflickr.com/%s/%s_%s_o.jpg' % (data['farm'], data['server'], data['id'], data['secret'])
url_large = 'http://farm%s.staticflickr.com/%s/%s_%s_b.jpg' % (data['farm'], data['server'], data['id'], data['secret'])
cmd = 'wget -t 1 -T 5 --quiet --max-redirect 0 %s -O %s' % (url_original, filename)
res = os.system(cmd)
if res == 0:
return check_and_resize_image(filename)
# try:
# jhead_output = subprocess.check_output(['jhead', filename])
# except:
# pass
# else:
# for line in jhead_output.splitlines():
# tokens = line.split()
# if len(tokens) == 5 and tokens[0] == 'Resolution' and int(tokens[2]) > 0 and int(tokens[4]) > 0:
# return True
# print 'Trying to look for the large image...'
cmd = 'wget -t 1 -T 5 --quiet --max-redirect 0 %s -O %s' % (url_large, filename)
res = os.system(cmd)
if res == 0:
return check_and_resize_image(filename)
# try:
# jhead_output = subprocess.check_output(['jhead', filename])
# except:
# pass
# else:
# for line in jhead_output.splitlines():
# tokens = line.split()
# if len(tokens) == 5 and tokens[0] == 'Resolution' and int(tokens[2]) > 0 and int(tokens[4]) > 0:
# return True
# return False
if not os.path.exists(args.output_dir):
os.mkdir(args.output_dir)
if not os.path.exists(args.output_dir):
print 'Cannot create output directory, exiting.'
exit()
all_results = {}
query_results_file = os.path.join(args.output_dir, 'query_results.txt')
if not os.path.exists(query_results_file) or not args.load_cache:
queries = args.query.split(';')
for q in queries:
print q
results = flickrquery.run_flickr_query(query_args={'text': q}, max_photos = args.num_images, startDate=args.start_date, endDate=args.end_date)
print 'Found %d images for query: %s' % (len(results), q)
for photo_id, data in results.items():
all_results[photo_id] = data;
#MAX_IMAGES_DIRECTORY = 1000
#directory_number = 1
num_images_in_directory = 0
num_images = 0
num_download = 0
print 'Downloading %d images.' % len(all_results.keys())
print 'Caching results...'
f = open(query_results_file, 'w')
json.dump(all_results, f, sort_keys=True, indent=4, separators=(',', ': '))
f.close()
else:
print 'Loading cached results...'
f = open(query_results_file, 'r')
all_results = json.load(f)
f.close()
print 'Found %d images for the queries.' % len(all_results.keys())
progress_bar = pyprind.ProgPercent(len(all_results.keys()), title='Photos downloaded')
processed_photos = 0
def downloadPhoto(photo_id, data):
global progress_bar, processed_photos
progress_bar.update(item_id = str(processed_photos))
processed_photos = processed_photos + 1
if args.split_dirs:
current_directory = os.path.join(args.output_dir, photo_id[0:2])
if not os.path.exists(current_directory):
try:
os.mkdir(current_directory)
except:
pass
else:
current_directory = args.output_dir
image_filename = os.path.join(current_directory, '%s.jpg' % photo_id)
metadata_filename = os.path.join(current_directory, '%s.txt' % photo_id)
valid = True
if not os.path.exists(image_filename):
if not download_image(data, image_filename):
valid = False
if valid:
f = open(metadata_filename, 'w')
json.dump(data, f, sort_keys=True, indent=4, separators=(',', ': '))
f.close()
else:
try:
os.system('rm %s' % image_filename)
except:
pass
Parallel(n_jobs=1)(delayed(downloadPhoto)(id, data) for id, data in all_results.items())
progress_bar.stop() |
from ciao.contrib.runtool import *
from get_skycoords import get_skycoords
def extract_hltau(srcid,ra,dec):
'''Extract TG spectrum for HL Tau from a given obs id. Inputs: srcid, ra, dec'''
srcstring = str(srcid)
#Set up dmcoords to get sky coordinates of source from RA and DEC
dmcoords.punlearn()
dmcoords.option = 'cel'
dmcoords.ra = ra
dmcoords.dec = dec
#Get skycoords from RA and DEC
testdmcoords = dmcoords(srcstring+'/repro/acisf'+srcstring+'_repro_evt2.fits',verbose=2)
skyx, skyy = get_skycoords(testdmcoords)
#Hard-code file names for future use
evt1filename = srcstring+'/secondary/acisf'+srcstring+'_000N002_evt1.fits'
tgdetectoutfilename = srcstring+'/acis_'+srcstring+'_hltau.fits'
maskfilename = srcstring+'/acis_'+srcstring+'_hltau_evt1_L1a.fits'
evt1afilename = srcstring+'/acis_'+srcstring+'_hltau_evt1a.fits'
filterfile1name = srcstring+'/acis_'+srcstring+'_hltau_flt1_evt1a.fits'
evt2filename = srcstring+'/acis_'+srcstring+'_hltau_evt2.fits'
#Use those coordinates to set up tgdetect2, then run tgdetect2
tgdetect2.punlearn()
tgdetect2.zo_pos_x = skyx
tgdetect2.zo_pos_y = skyy
tgdetect2.clobber = True
tgdetect2.infile = evt1filename
tgdetect2.outfile = tgdetectoutfilename
tgdetect2.verbose = 2
a = tgdetect2()
print(a)
tg_create_mask.punlearn()
tg_create_mask.infile = evt1filename
tg_create_mask.outfile = maskfilename
tg_create_mask.input_pos_tab = tgdetectourfilename
tg_create_mask.verbose = 2
tg_create_mask.clobber = True
b = tg_create_mask()
print(b)
#tg_resolve_events
tg_resolve_events.punlearn()
tg_resolve_events.infile = evt1filename
tg_resolve_events.outfile = evt1afilename
tg_resolve_events.regionfile = maskfilename
tg_resolve_events.acaofffile = srcstring+'/repro/pcadf'+srcstring+'_000N001_asol1.fits'
tg_resolve_events.verbose = 2
tg_resolve_events.clobber = True
c = tg_resolve_events()
print(c)
#Filter events, first for grade and status
dmcopy.punlearn()
dmcopy.infile = evt1afilename+'[EVENTS][grade=0,2,3,4,6,status=0]'
dmcopy.outfile = filterfile1name
dmcopy.verbose = 2
d = dmcopy()
print(d)
dmappend.punlearn()
dmappend.infile = evt1afilename+'[region][subspace -time]'
dmappend.outfile = filterfile1name
d1 = dmappend()
print(d1)
#Second filter
dmcopy.punlearn()
dmcopy.infile = filterfile1name+'[EVENTS][@'+srcstring+'/secondary/acisf'+srcstring+'_000N002_flt1.fits][cols -phas]'
dmcopy.outfile = evt2filename
dmcopy.verbose = 2
d2 = dmcopy()
print(d2)
dmappend.punlearn()
dmappend.infile=evt1afilename+'[region][subspace -time]'
dmappend.outfile = evt2filename
dmappend.verbose = 2
d3 = dmappend()
print(d3)
#tgextract
tgextract.punlearn()
tgextract.infile = evt2filename
tgextract.outfile = srcstring+'/acis_'+srcstring+'_hltau_pha2.fits'
tgextract.verbose = 2
f = tgextract()
print(f)
print('\nDone')
return |
import random
import cal_time
ls = list(range(100000))
random.shuffle(ls)
def sift(ls,low,height):
tmp=ls[low]
i=low
j=2*i+1
while j<=height:
if j + 1 <= height and ls[j + 1] > ls[j]:
j = j + 1
if tmp < ls[j]:
ls[i] = ls[j]
i = j
j = 2 * i + 1
else:
break
ls[i] = tmp
@cal_time.run_time
def heap_sort(ls):
l = len(ls) - 1
for i in range((2 * l - 1) // 2, -1, -1):
sift(ls, i, l)
for i in range(l, -1, -1):
ls[0], ls[i] = ls[i], ls[0]
sift(ls, 0, i - 1)
heap_sort(ls)
# print(ls)
|
#coding:utf8
from base.recommender import Recommender
import numpy as np
class MostPop(Recommender):
# Recommend the most popular items for every user
def __init__(self,conf,trainingSet=None,testSet=None,fold='[1]'):
super(MostPop, self).__init__(conf,trainingSet,testSet,fold)
# def readConfiguration(self):
# super(BPR, self).readConfiguration()
def buildModel(self):
self.recommendation = []
self.recommendation = sorted(self.data.listened[self.recType].iteritems(), key=lambda d: len(d[1]), reverse=True)
self.recommendation = [item[0] for item in self.recommendation]
def predict(self, u):
'invoked to rank all the items for the user'
return self.recommendation
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 19 11:32:41 2019
@author: anasahouzi
"""
import torch
import torch.nn as nn
from torchvision import transforms
from torchvision.datasets import MNIST
from torch.utils.data import DataLoader
from torch.utils.data.sampler import SubsetRandomSampler
# With module F, we can apply the activation functions
import torch.nn.functional as F
from torch import optim
import numpy as np
# Data preprocessing: We need to transform the raw dataset into tensors and
# normalize them in a fixed range. The torchvision package provides a utility
# called transforms which can be used to combine different transformations
# together.
_tasks = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
])
# The first transformation converts the raw data into tensor variables and
# the second transformation performs normalization using the below operation:
# x_normalized = x-mean / std. The first vector is the mean vector for each
# dimension (R, G, B), and the second one is the standard deviation.
## Load MNIST Dataset and apply transformations
mnist = MNIST("data", download=True, train=True, transform=_tasks)
print("hola1")
# Another excellent utility of PyTorch is DataLoader iterators which provide
# the ability to batch, shuffle and load the data in parallel using
# multiprocessing workers.
## create training and validation split
split = int(0.8 * len(mnist))
index_list = list(range(len(mnist)))
train_idx, valid_idx = index_list[:split], index_list[split:]
print("hola2")
## create sampler objects using SubsetRandomSampler
# Samples elements randomly from a given !!!!!!!list!!!!!! of indices,
# without replacement.
tr_sampler = SubsetRandomSampler(train_idx)
val_sampler = SubsetRandomSampler(valid_idx)
print("hola3")
## create iterator objects for train and valid datasets
# Data loader. Combines a dataset and a sampler, and provides single
# or multi-process iterators over the dataset.
trainloader = DataLoader(mnist, batch_size=256, sampler=tr_sampler)
validloader = DataLoader(mnist, batch_size=256, sampler=val_sampler)
print(trainloader)
print("hola4")
# The neural network architectures in PyTorch can be defined in a class which
# inherits the properties from the base class from nn package called Module.
# This inheritance from the nn.Module class allows us to implement, access, and
# call a number of methods easily. We can define all the layers inside the
# constructor of the class, and the forward propagation steps inside
# the forward function.
class Model(nn.Module):
def __init__(self):
super().__init__()
self.hidden = nn.Linear(784, 128)
self.output = nn.Linear(128, 10)
def forward(self, x):
x = self.hidden(x)
x = F.sigmoid(x)
x = self.output(x)
return x
model = Model()
# Define the loss function and the optimizer using the nn and optim package:
loss_function = nn.CrossEntropyLoss()
# model.parameters() contains the learnable parameters (weights, biases) of
# a torch model, that will be updated using the SGD
optimizer = optim.SGD(model.parameters(), lr=0.01, weight_decay= 1e-6,
momentum = 0.9, nesterov = True)
# We are now ready to train the model. The core steps will remain the same as
# we saw earlier: Forward Propagation, Loss Computation, Backpropagation,
# and updating the parameters.
for epoch in range(1, 11): ## run the model for 10 epochs
train_loss, valid_loss = [], []
## We r setting the mode of training, it's like we r telling our nn, to
## turn the training mode "on".
model.train()
print("hola5")
for data, target in trainloader:
print("hola6")
# When you start your training loop, ideally you should zero out the
# gradients so that you do the parameter update correctly. Else the
# gradient would point in some other directions than the intended
# direction towards the minimum.
optimizer.zero_grad()
## 1. forward propagation
output = model(data)
## 2. loss calculation
loss = loss_function(output, target)
## 3. backward propagation
loss.backward()
## 4. weight optimization
optimizer.step()
train_loss.append(loss.item())
## We r now telling our nn that this is testing part, and that the
## training part is over, we can also use model.train(mode=False).
model.eval()
for data, target in validloader:
output = model(data)
loss = loss_function(output, target)
valid_loss.append(loss.item())
print ("Epoch:", epoch, "Training Loss: ", np.mean(train_loss),
"Valid Loss: ", np.mean(valid_loss))
# Once the model is trained, make the predictions on the validation data.
## dataloader for validation dataset
dataiter = iter(validloader)
data, labels = dataiter.next()
output = model(data)
_, preds_tensor = torch.max(output, 1)
preds = np.squeeze(preds_tensor.numpy())
print ("Actual:", labels[:10])
print ("Predicted:", preds[:10])
|
def countA(w):
count = 0
for i in range(0,len (w)):
if w[i]== "a":
count = count + 1
return 2
print (countA("Louisiana")) |
import torch.nn as nn
class Linear(nn.Module):
def __init__(self, inp_size, opt_size, hidden_size=1024, hidden_size2=64):
super(Linear, self).__init__()
self.regressor = nn.Sequential(nn.Linear(inp_size, hidden_size),
nn.BatchNorm1d(hidden_size),
nn.Dropout(),
nn.Linear(hidden_size, hidden_size),
nn.BatchNorm1d(hidden_size),
nn.Dropout(),
nn.Linear(hidden_size, hidden_size2),
nn.BatchNorm1d(hidden_size2),
nn.Linear(hidden_size2, opt_size))
def forward(self, inp):
return self.regressor(inp)
|
import collections
import inspect
import pygame
from .sprites import GameObject, Sprite, Text
from .inputs import Button, TextInput
SCREEN = pygame.Rect(0, 0, 800, 600)
FRAMERATE = 60
PORT = 10000
AUTO_EVENTS = {
'keydown': pygame.KEYDOWN,
'keyup': pygame.KEYUP,
'mousemotion': pygame.MOUSEMOTION,
'mousebuttondown': pygame.MOUSEBUTTONDOWN,
'mousebuttonup': pygame.MOUSEBUTTONUP,
}
class Game(object):
def __init__(self, state):
pygame.init()
self.screen = pygame.display.set_mode(SCREEN.size)
self.rect = self.screen.get_rect()
self.clock = pygame.time.Clock()
self.framerate = FRAMERATE
self._after = None
self.elapsed = 0
self.notifications = collections.defaultdict(list)
self.auto_events = AUTO_EVENTS
if inspect.isclass(state):
state = state()
state.game = self
self._state = None
self._next_state = state
self.create_state()
self.autosubscribe(state)
@property
def state(self):
return self._state
@state.setter
def state(self, value):
self._next_state = value
def create_state(self):
self.notifications.clear()
if self._state is not None:
for attr in self._state.__dict__:
del attr
self._next_state.game = self
state_creator = getattr(self._next_state, 'create', None)
if state_creator is not None:
state_creator()
self._state = self._next_state
def subscribe(self, method, eventtype):
self.notifications[eventtype].append(method)
def unsubscribe(self, method, eventtype):
self.notifications[eventtype].remove(method)
def autosubscribe(self, obj):
for eventname, eventtype in self.auto_events.items():
method = getattr(obj, eventname, None)
if method is not None:
self.subscribe(method, eventtype)
def run(self):
running = True
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
for method in self.notifications[event.type]:
method(event)
if self._next_state is None:
running = False
break
if self._state != self._next_state:
self.create_state()
self.state.update(self.elapsed)
self.screen.fill((0, 0, 0))
self.state.draw(self.screen)
pygame.display.flip()
self.elapsed = self.clock.tick(self.framerate)
class State(GameObject):
@property
def sprites(self):
return (attr for attr in self.__dict__.values() if isinstance(attr, Sprite))
def draw(self, surface):
for sprite in self.sprites:
sprite.draw(surface)
def update(self, elapsed):
for sprite in self.sprites:
if sprite.active:
sprite.update(elapsed)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from selenium import webdriver
import bs4
import requests
import requests.exceptions
import pandas as pd
import re
# Create output data frames (two relational tables, URL as shared key)
df_job_details = pd.DataFrame(columns=['URL', 'Job_Title', 'Company'])
df_job_requirements = pd.DataFrame(columns=['URL', 'Sentence'])
# Initialise browser, page iteration, and url list
browser = webdriver.Firefox()
urls = []
i = 1
# For each page of search, collect urls to job ad pages
while True:
browser.get('https://www.seek.co.nz/python-jobs/in-All-Auckland?page=' + str(i) + '&sortmode=ListedDate')
# Get list of urls
urls = urls + [elem.get_attribute('href') for elem in browser.find_elements_by_class_name('_2iNL7wI')]
i = i + 1
if len(browser.find_elements_by_class_name('bHpQ-bp')) == 0 : break # If no "NEXT" button on page, don't check next page
# Quit browser and initialise regex for parsing terms from HTML
browser.quit()
term_search_regex = '<[ilp]{1,2}>([^<]*)</[ilp]{1,2}>'
# For each URL, retrieve HTML from page and parse to get job title, company name, and job requirements
for url in urls:
# Retrieve soup object for web page
res = requests.get(url)
try:
res.raise_for_status()
except requests.exceptions.HTTPError:
continue
soup = bs4.BeautifulSoup(res.text, 'lxml')
# Retrieve job title and company
job_title = str(soup.find_all('h1')[0].string)
if len(soup.find_all('span', class_='_3FrNV7v _2QG7TNq E6m4BZb')) != 0:
company = str(soup.find('span', class_='_3FrNV7v _2QG7TNq E6m4BZb').string)
else:
company = ''
# Add job title and company to df_job_details with url as key
new_row = [url, job_title, company]
df_job_details.loc[df_job_details.shape[0] + 1] = new_row
# Retrieve element for job description
jd_elem = soup.find('div', class_='_2e4Pi2B')
jd_elem = str(jd_elem)
# Remove HTML irrelevant to output
for regex_term in ['</?strong>', '<a [^>]*>', '</a>', '<div [^>]*>', '</div>', '</?em>', '</?ul>']:
while re.search(regex_term, jd_elem) is not None:
remove_string = re.search(regex_term, jd_elem).group(0)
jd_elem = jd_elem.replace(remove_string, '')
# Extract terms and add to list
terms_list = []
while re.search(term_search_regex, jd_elem) is not None:
terms_list.append(re.search(term_search_regex, jd_elem).groups(0)[0])
jd_elem = jd_elem.replace(re.search(term_search_regex, jd_elem).group(0), '')
# Add terms to df_job_requirements dataframe with url as key
for term in terms_list:
new_row = [url, term]
df_job_requirements.loc[df_job_requirements.shape[0] + 1] = new_row
df_job_details.to_csv('Job Details.csv')
df_job_requirements.to_csv('Job Requirements.csv')
|
import os
import string
#open csv file
os.chdir('UnZip 12.Mar.2016 23-16-37')
o = open('2010_BSA_Outpatient_PUF.csv')
o = o.read()
#split string
o = string.split(o, '\n')
y = []
for x in o:
y.append(string.split(x, ','))
y.remove(y[0])
y.remove(y[-1])
#extract code 99 procedures
a = []
for x in y:
if x[3][3:5]=='99':
a.append(x)
#list variable a now contains all code 99 procedures in the data
#calculate payments to code 99 unlisted procedures
q = 0
for x in a:
z = int(x[4])*int(x[5])
q = q + z
#variable q now contains the total amount paid to code 99 unlisted procedures
#present results
for x in a:
print x
print 'the sum total of code 99 unlisted procedures is '+str(q)
|
import re
import sys
from documentRead import DocumentRead
reload(sys)
sys.setdefaultencoding('utf8')
'''
To filter the words in stopwords list
'''
stopwords = open("F:\\Git Repository\\stopwords.txt", 'rb').read().splitlines()
directory ='F:\\Git Repository\\user_seg_content'
documentReader=DocumentRead(directory)
documentReader.load_document()
documents=documentReader.get_documents()
documents_name=documentReader.get_documents_name()
temp=0
for index in range(0, len(documents_name),1):
temp=temp+1
str=documents[index]
word_list=str.replace('\n','').split(' ')
lines=""
for word in word_list:
if word not in stopwords:
lines+=word+" "
file_name = "F:\\Git Repository\\user_reseg_content\\" + documents_name[index]
with open(file_name, "w") as f:
f.write(lines)
print 'file %d done'%(temp)
print 'done'
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from config import BaseConfig
from flask_bcrypt import Bcrypt
app = Flask(__name__)
app.config.from_object(BaseConfig)
db = SQLAlchemy(app)
bcrypt = Bcrypt(app)
|
import pygame
if __name__ == '__main__':
pygame.init()
screen = pygame.display.set_mode([640, 480])
screen.fill([255, 255, 255])
#########################
# Actual drawing here...
#########################
red_face = pygame.image.load("red_face.jpg")
screen.blit(red_face, [320 - 95 // 2, 240 - 76 // 2])
pygame.display.flip()
# Keep the window alive...
running = True
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
pygame.quit()
|
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 7 17:19:11 2019
@author: HP
"""
class Node:
def __init__(self,data):
self.key=data
self.left_child=None
self.right_child=None
self.parent=None
class Tree:
def __init__(self):
self.root=None
self.min=None
self.max=None
def Insert(self,node,x):
if self.root==None:
new_node=Node(x)
self.root=new_node
self.parent=None
self.max=new_node
self.min=new_node
else:
if x>=node.key:
if node.right_child==None:
new_node=Node(x)
node.right_child=new_node
new_node.parent=node
else:
list.Insert(node.right_child,x)
else:
if node.left_child==None:
new_node=Node(x)
node.left_child=new_node
new_node.parent=node
else:
list.Insert(node.left_child,x)
list.Update()
def Delete(self,x):
nd=list.Search(self.root,x)
if nd.left_child==None and nd.right_child==None:
if nd.parent.left_child==nd:
nd.parent.left_child=None
else:
nd.parent.right_child=None
elif nd.left_child==None:
if nd.parent.left_child==nd:
nd.parent.left_child=nd.right_child
nd.right_child.parent=nd.parent
else:
nd.parent.right_child=nd.right_child
nd.right_child.parent=nd.parent
elif nd.right_child==None:
if nd.parent.left_child==nd:
nd.parent.left_child=nd.left_child
nd.left_child.parent=nd.parent
else:
nd.parent.right_child=nd.left_child
nd.left_child.parent=nd.parent
else:
node=nd.right_child
while node.left_child!=None:
node=node.left_child
if node!=nd.right_child:
nd.key=node.key
node.parent.left_child=node.right_child
node.right_child.parent=nd.parent
else:
nd.key=node.key
nd.right_child=node.right_child
if node.right_child!=None:
node.right_child.parent=nd
def Update(self):
if self.min.left_child is not None:
self.min=self.min.left_child
if self.max.right_child is not None:
self.max=self.max.right_child
def Search(self,node,x):
if node==None:
return None
else:
if x==node.key:
return node
elif x>node.key:
return list.Search(node.right_child,x)
else:
return list.Search(node.left_child,x)
def InorderPrint(self,node):
if node==None:
return
list.InorderPrint(node.left_child)
print(node.key)
list.InorderPrint(node.right_child)
list=Tree()
arr=[1,3,14,12,13,2,11,8,9]
list.Insert(list.root,10)
for i in arr:
list.Insert(list.root,i)
#list.Delete(14)
#list.Delete(13)
#list.Delete(8)
#list.Delete(9)
#list.Delete(1)
#list.Delete(3)
list.InorderPrint(list.root)
|
import util
import pickle
import numpy as np
import csv
class Neuron:
def __init__(self, bias):
self.value = None
self.bias = bias
class SingleLayerNet:
def __init__(self, inputSize, layerSize, outputSize, learningRate=3):
self.inputLayer = [Neuron(np.random.rand()) for i in xrange(inputSize)]
self.hiddenLayer = [Neuron(np.random.rand()) for i in xrange(layerSize)]
self.outputLayer = [Neuron(np.random.rand()) for i in xrange(outputSize)]
self.learningRate = learningRate
weights = {}
for n1 in self.inputLayer:
for n2 in self.hiddenLayer:
weights[(n1, n2)] = np.random.rand()
for n1 in self.hiddenLayer:
for n2 in self.outputLayer:
weights[(n1, n2)] = np.random.rand()
self.weights = weights
# Input is a list of numbers
def readInput(self, input):
if len(input) != len(self.inputLayer):
raise Exception()
for i in xrange(len(self.inputLayer)):
self.inputLayer[i].value = input[i]
def feedForward(self):
for n in self.hiddenLayer:
sum = n.bias
for ni in self.inputLayer:
sum += self.weights[(ni, n)] * ni.value
n.value = util.sigmoid(sum)
for n in self.outputLayer:
sum = n.bias
for nh in self.hiddenLayer:
sum += self.weights[(nh, n)] * nh.value
n.value = util.sigmoid(sum)
# expectedOutput is a list of expected output values corresponding to each output neuron
def backProp(self, expectedOutput):
if len(expectedOutput) != len(self.outputLayer):
raise Exception()
result = zip(self.outputLayer, expectedOutput)
hiddenLayerActivationDerviative = []
for n1 in self.hiddenLayer:
derivativeOfCostByActivation = 0
for i2, n2 in enumerate(self.outputLayer):
z = n1.value * self.weights[(n1, n2)] + n2.bias
derivativeByZ = util.sigmoid_derivative(z) * (n2.value - expectedOutput[i2])
gradient = n1.value * derivativeByZ
derivativeOfCostByActivation += self.weights[(n1, n2)] * derivativeByZ
n2.bias -= self.learningRate * derivativeByZ
self.weights[(n1, n2)] -= self.learningRate * gradient
hiddenLayerActivationDerviative.append(derivativeOfCostByActivation)
for n in self.inputLayer:
for i1, n1 in enumerate(self.hiddenLayer):
z = n.value * self.weights[(n, n1)] + n1.bias
derivativeByZ = util.sigmoid_derivative(z) * hiddenLayerActivationDerviative[i1]
gradient = n.value * derivativeByZ
n1.bias -= self.learningRate * derivativeByZ
self.weights[(n, n1)] -= self.learningRate * gradient
def getChosenValue(self):
maxIndex = 0
maxValue = self.outputLayer[0].value
for i, n in enumerate(self.outputLayer):
if n.value > maxValue:
maxIndex = i
maxValue = n.value
return maxIndex
def getLoss(self, expected):
sumOfSquares = 0
for i, n in enumerate(self.outputLayer):
sumOfSquares += (n.value - expected[i]) ** 2
return sumOfSquares / 2
def main():
image_size = 28
no_of_different_labels = 10
image_pixels = image_size * image_size
train_data = np.loadtxt(
'..\\resources\\train.csv', None, '#', ',')
test_data = np.loadtxt('..\\resources\\test.csv',
None, '#', ',')
fac = 0.99 / 255
train_imgs = np.asfarray(train_data[:, 1:]) * fac + 0.01
test_imgs = np.asfarray(test_data[:, 1:]) * fac + 0.01
train_labels = np.asfarray(train_data[:, :1])
test_labels = np.asfarray(test_data[:, :1])
lr = np.arange(no_of_different_labels)
train_labels_one_hot = (lr == train_labels).astype(np.float)
test_labels_one_hot = (lr == test_labels).astype(np.float)
train_labels_one_hot[train_labels_one_hot == 0] = 0.01
train_labels_one_hot[train_labels_one_hot == 1] = 0.99
test_labels_one_hot[test_labels_one_hot == 0] = 0.01
test_labels_one_hot[test_labels_one_hot == 1] = 0.99
sln = SingleLayerNet(image_pixels, 20, no_of_different_labels)
count = 0
for i in range(len(train_imgs)):
sln.readInput(train_imgs[i])
sln.feedForward()
sln.backProp(train_labels_one_hot[i])
print count
count = count + 1
with open('sln.pkl', 'wb') as output:
pickle.dump(sln, output, pickle.HIGHEST_PROTOCOL)
def train_net():
inputSize = 28 ** 2
sln = SingleLayerNet(inputSize, 20, 10)
with open('../resources/train.csv', 'rb') as trainingData:
reader = csv.reader(trainingData)
count = 0
numberCorrect = 0
batchLoss = 0
batchLosses = []
for row in reader:
input = list(map(int, row))
expectedOutput = [0] * 10
expectedOutput[input[0]] = 1
input2 = input[1:]
sln.readInput(input2)
sln.feedForward()
print("Expected value: " + str(input[0]) + " ----------------- Received Value: " + str(sln.getChosenValue()))
if input[0] == sln.getChosenValue():
numberCorrect += 1
batchLoss += sln.getLoss(expectedOutput)
if (count % 50 == 0):
batchLosses.append(batchLoss)
print ("Batch loss " + str(int(count / 50)) + ": " + str(batchLoss))
print("Batch losses " + str(batchLosses))
print("Learning rate: " + str(sln.learningRate))
batchLoss = 0
"""
last_5_batches = batchLosses[-3:]
last_5_avg = sum(last_5_batches) / 3 if len(last_5_batches) == 3 else 100
if last_5_avg < 35:
sln.learningRate = 1 if sln.learningRate > 1 else sln.learningRate
if last_5_avg < 30:
sln.learningRate = .1 if sln.learningRate > .1 else sln.learningRate
if last_5_avg < 25:
sln.learningRate = 0.01 if sln.learningRate > .01 else sln.learningRate
if last_5_avg < 23:
sln.learningRate = 0.001 if sln.learningRate > .001 else sln.learningRate
if last_5_avg < 22:
sln.learningRate = 0.0001 if sln.learningRate > .0001 else sln.learningRate
"""
sln.backProp(expectedOutput)
print(count)
count += 1
print ("Percent correct: " + str(100 * numberCorrect / count))
with open('sln.pkl', 'wb') as output:
pickle.dump(sln, output, pickle.HIGHEST_PROTOCOL)
def run_tests():
with open('sln.pkl', 'rb') as input:
sln = pickle.load(input)
with open('../resources/submission.csv', 'wb') as f:
writer = csv.writer(f)
writer.writerow(['ImageId', 'Label'])
with open('../resources/test.csv', 'rb') as testfile:
reader = csv.reader(testfile, delimiter=',')
count = 0
for row in reader:
print("Test: " + str(count))
if count == 0:
count += 1
continue
input = list(map(int, row))
sln.readInput(input)
sln.feedForward()
max_num = sln.getChosenValue()
max_val = sln.outputLayer[0].value
writer.writerow([str(count), str(max_num)])
count += 1
train_net()
run_tests()
if __name__ == '__main__':
main()
run_tests() |
#!/usr/bin/env python
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
#
# # Authors informations
#
# @author: HUC Stéphane
# @email: <devs@stephane-huc.net>
# @url: http://stephane-huc.net
#
# @license : GNU/GPL 3
#
''' Initialize modules '''
import glob
import os
dirname = os.path.dirname(os.path.abspath(__file__))
files = glob.glob(dirname + '/*.py')
liste = []
me = os.path.basename(__file__)
for fich in files:
(path, File) = os.path.split(fich)
if os.path.isfile(fich) and File != me:
(name, ext) = os.path.splitext(File)
if name != '__init__' and not name in liste:
liste.append(name)
__all__ = liste
|
print("szia") |
import urllib
import xml.etree.ElementTree as ET
serviceurl = 'https://maps.googleapis.com/maps/api/geocode/xml?'
while True:
address = raw_input('Enter location: ')
if len(address) < 1: break
url = serviceurl + urllib.urlencode({'sensor':'false', 'address': address, 'key':''})
print 'Retrieving', url
uh = urllib.urlopen(url).read()
print 'Retrieved',len(uh),'characters'
print uh
tree = ET.fromstring(uh)
#TODO
#Return lat,lng,adress
for element in tree.iter('location'):
for child in element:
print child.text
'''
print 'lat',lat,'lng',lng
print location
'''
|
#Pledge: I pledge my Honor that I have abided by the Stevens Honor System
#Ben Otto
#I understand that I may access the course textbook and course lecture notes but
#I am not to access any other resource. I also pledge that I worked alone on this exam.
#Math Operations
def addition(x,y):
return (x+y)
def subtraction(x,y):
return(x-y)
def multiplication(x,y):
return(x*y)
def division(x,y):
if (y==0):
return("Divide by Zero Error")
else:
return (x/y)
#String Operations
def count_vowels (word):
word=word.lower()
count=word.count("a")+word.count("e")+word.count("i")+word.count("o")+word.count("u")
return count
def encrypt (word):
word2=""
for i in range(len(word)):
word2+=str(chr(ord(word[i])+i))
return word2
def main():
num1=int(input("Enter 1 for mathematical operation, 2 for string operation"))
if (num1==1):
num2=int(input("Enter 1 for addition, 2 for subtraction, 3 for multiplication, 4 for division"))
nums=input("Enter the two numbers you wish to perform the operation on separated by a comma").split(",")
x=int(nums[0])
y=int(nums[1])
if num2==1:
print(addition(x,y))
elif num2==2:
print(subtraction(x,y))
elif num2==3:
print(multiplication(x,y))
elif num2==4:
print(division(x,y))
else:
print("You entered an invalid operation")
main()
elif (num1==2):
num2=int(input("Enter 1 for counting vowels, enter 2 for encryption"))
word=input("Enter the string to perform the operation on")
if (num2==1):
print(count_vowels(word))
elif(num2==2):
print(encrypt(word))
else:
print("You entered an invalid operation")
main()
else:
print("You entered an invalid operation")
main()
main() |
import datetime
from typing import List, Tuple
from datetime import datetime
from builtins import classmethod
from cryptography import x509
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.x509.base import Certificate, CertificateSigningRequest, CertificateBuilder
from cryptography.x509.extensions import ExtensionType
from cryptography.x509.name import NameAttribute
from cryptography.x509.oid import NameOID
from ..Config.Config import Config
class SelfSignedCertificateGenerator:
def __init__(self) -> None:
pass
@classmethod
def generate_Certificate_Authority(cls,
ca_information: List[x509.NameAttribute],
not_valid_after: datetime,
extension: ExtensionType
) -> Tuple[rsa.RSAPrivateKey, Certificate]:
privateKey = cls.generate_PrivateKey(
Config.publicKeyExponent,
Config.keySize
)
publicKey = cls.get_PublicKey_from_PrivateKey(privateKey)
certificate: Certificate = CertificateBuilder().subject_name(
x509.Name(ca_information)
).issuer_name(
x509.Name(ca_information)
).not_valid_before(
datetime.utcnow()
).not_valid_after(
not_valid_after
).serial_number(
x509.random_serial_number()
).public_key(
publicKey
).add_extension(
extension,
True
).sign(privateKey, hashes.SHA256())
return privateKey, certificate
@classmethod
def generate_PrivateKey(cls, public_exponent: int, key_size: int) -> rsa.RSAPrivateKey:
return rsa.generate_private_key(
public_exponent,
key_size
)
@classmethod
def get_PublicKey_from_PrivateKey(cls, privateKey: rsa.RSAPrivateKey) -> rsa.RSAPublicKey:
return privateKey.public_key()
@classmethod
def generate_CertificationSigningRequest(
cls,
privateKey: rsa.RSAPrivateKey,
csr_information: List[NameAttribute],
additional_extensions: List[ExtensionType]
) -> CertificateSigningRequest:
return x509.CertificateSigningRequestBuilder().subject_name(
x509.Name(
csr_information
)
).add_extension(
additional_extensions,
False
).sign(privateKey, hashes.SHA256())
@classmethod
def generate_SelfSignedServerCertificate(
cls,
csr: CertificateSigningRequest,
rootCACrt: Certificate,
rootCAKey: rsa.RSAPrivateKey,
number_days_cert_valid: int,
extension: ExtensionType
) -> Certificate:
subject = csr.subject
issuer = rootCACrt.subject
publicKeyOfCrt = csr.public_key()
signingPrivateKey = rootCAKey
return CertificateBuilder().subject_name(
subject
).issuer_name(
issuer
).public_key(
publicKeyOfCrt
).serial_number(
x509.random_serial_number()
).not_valid_before(
datetime.datetime.utcnow()
).not_valid_after(
datetime.datetime.utcnow() + datetime.timedelta(days=number_days_cert_valid)
).add_extension(
extension,
False
).sign(signingPrivateKey, hashes.SHA256())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import urllib
from zipfile import ZipFile
import requests
from bs4 import BeautifulSoup
searchpath = 'http://wordpress.org/plugins/search.php?q='
repodir = 'Your Repo Path Here.'
host_url = 'Your Host URL Here'
user = 'Your Admin Username Here!'
pwd = 'Your Password Here!'
class PluginSearch:
def __init__(self, keyword):
self.keyword = keyword
self.searchpath = searchpath
self.plugindir = repodir + '\\0\\wp-content\\plugins'
def getRelevantPlugin(self):
plist = self.getPluginList()[0]
return plist
def getPluginList(self):
search_url = '%s%s' % (self.searchpath, self.keyword)
r = requests.get(search_url).content
pluginlist = BeautifulSoup(r).find_all('div', 'plugin-block')
outlist = []
for plugin in pluginlist:
plugin = plugin.h3.a
outlist.append((plugin.text, plugin['href']))
return outlist
def getPluginDownloadAddr(self, url):
content = BeautifulSoup(requests.get(url).content)
self.pdownaddr = content.find('a', itemprop='downloadUrl')['href']
self.ver = content.find('a', itemprop='downloadUrl').text.strip(
'Download Version ')
self.ver = 'v' + self.ver
return self.pdownaddr, self.ver
def downloadPlugin(self):
(p_name, p_url) = self.getRelevantPlugin()
print p_name, p_url
d_url = self.getPluginDownloadAddr(p_url)
p_filename = os.path.basename(d_url)
def report(count, blockSize, totalSize):
percent = int(count * blockSize * 100 / totalSize)
sys.stdout.write("\r%d%%" % percent + ' complete')
sys.stdout.flush()
sys.stdout.write('\rFetching ' + p_name + '...\n')
urllib.urlretrieve(d_url, p_filename, reporthook=report)
sys.stdout.write("\rDownload complete, saved as %s" %
p_filename + '\n\n')
sys.stdout.flush()
print p_filename
pzip = ZipFile(p_filename)
pzip.extractall(self.plugindir)
pdirname = pzip.namelist()[0][:-1]
pzip.close()
return p_name, pdirname, p_url
def commitPlugin(self, action='Add'):
pname, pdirname, p_url = self.downloadPlugin()
commit_msg = '%s a new plugin %s %s.\rReference: %s.' % (
action, pname, self.ver, p_url)
bat_path = '%s\\0\\update_plugin.bat \
"%s"' % (repodir, commit_msg)
os.system(bat_path)
def AddPlugin(self):
self.commitPlugin()
def UpdatePlugin(self):
self.commitPlugin('Update')
class CheckWordpressPluginUpdate:
def __init__(self):
self.host_url = host_url
self.login_url = self.host_url + '/wp-login.php'
self.check_url = self.host_url +\
'/wp-admin/plugins.php?plugin_status=upgrade'
self.r = requests.Session()
self.user = 'my'
self.password = 'lp5261314'
# 构造header,一般header至少包括以下两项。从抓到的包中分析得出。
self.headers = {
'User-Agent':
"Mozilla/5.0 (X11; Linux i686; rv:22.0) \
Gecko/20100101 Firefox/22.0 Iceweasel/22.0",
'Referer': self.login_url}
# 构造Post数据,从抓的包中分析得出
self.postData = {'log': self.user, 'pwd': self.password,
'redirect_to': self.check_url, 'testcookie': '1',
'wp-submit': '登录'}
def Check(self):
res = self.r.get(self.login_url)
res = self.r.post(
self.login_url, data=self.postData, headers=self.headers)
content = BeautifulSoup(res.content).find_all('tbody', id='the-list')
namelist = []
for one in content:
namelist.append(one.find('strong').text)
return namelist
if __name__ == '__main__':
if sys.argv[1] == 'update':
update = True
p = CheckWordpressPluginUpdate()
plugin_name = p.Check()
else:
update = False
plugin_name = sys.argv[2]
plugin_name.replace(' ', '+')
plugin_name = plugin_name.split('|')
for name in plugin_name:
p = PluginSearch(name)
if update:
p.UpdatePlugin()
else:
p.AddPlugin()
|
from django.contrib import admin
from mywing.angel.models import Angel
admin.site.register(Angel)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.