content
stringlengths
5
1.05M
# https://www.hackerrank.com/challenges/three-month-preparation-kit-contacts/problem #!/bin/python3 import math import os import random import re import sys from collections import Counter # # Complete the 'contacts' function below. # # The function is expected to return an INTEGER_ARRAY. # The function accepts 2D_STRING_ARRAY queries as parameter. # def contacts(queries, ans=[]): c = Counter() for z in queries: if z[0]=='add': for y in range(1,len(z[1])+1): c.update([z[1][0:y]]) else: ans.append(c[z[1]]) return ans if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') queries_rows = int(input().strip()) queries = [] for _ in range(queries_rows): queries.append(input().rstrip().split()) result = contacts(queries) fptr.write('\n'.join(map(str, result))) fptr.write('\n') fptr.close()
#coding=UTF-8 ''' Created on 2011-7-2 @author: Administrator ''' try: import cookielib import mechanize import simplejson as sj import random import time import urllib import os from pyquery.pyquery import PyQuery import re import mimetypes except Exception,e: raise "puterror1|%s"%e class SouFangbrowser(): def __init__(self,pdb): self.br = mechanize.Browser() self.cj = cookielib.MozillaCookieJar()#LWPCookieJar() self.br.set_cookiejar(self.cj) self.br.set_handle_equiv(True) #self.br.set_handle_gzip(True) self.br.set_handle_redirect(True) self.br.set_handle_referer(True) self.br.set_handle_robots(False) self.br._factory.encoding = "GB18030" self.br._factory._forms_factory.encoding = "GB18030" self.br._factory._links_factory._encoding = "GB18030" self.br.addheaders = [ ('User-agent','Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13 (.NET CLR 3.5.21022)'), ] self.xqs=[] self.deaufltimages=[] self.customimages=[] self.subdict={} self.countid=0 self.pdb=pdb self.uname=self.pdb["username"]#"changna19880422" self.upwd=self.pdb["passwd"]#"19880422" self.area=self.pdb["citycode"] self.picRoot =""#"/home/wwwroot/jjr360v1.1/site.jjr.com/upfile/" self.house_drawing=[] self.house_thumb=[] self.house_xqpic=[] #self.ucityname=urllib.quote(self.cityname.decode("UTF-8").encode("GB18030")) def getUqGlCode(self): a=[] b="%X"%random.randint(000000000,999999999) while len(b)<8: b = "0" + b a.append(b) a.append("%s%s"%(int(time.time()),random.randint(000,999))) c="%X"%random.randint(000000000,999999999) while len(c)<8: c = "0" + c a.append(c) gc="-".join(a) global_cookie=cookielib.Cookie(version=0, name='global_cookie', value=gc, port=None, port_specified=False, domain='.soufun.com', domain_specified=False, domain_initial_dot=True, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) self.br._ua_handlers['_cookies'].cookiejar.set_cookie(global_cookie) uc="U_%s"%gc unique_cookie=cookielib.Cookie(version=0, name='unique_cookie', value=uc, port=None, port_specified=False, domain='.soufun.com', domain_specified=False, domain_initial_dot=True, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) self.br._ua_handlers['_cookies'].cookiejar.set_cookie(unique_cookie) jiatxShopWindow=cookielib.Cookie(version=0, name='jiatxShopWindow', value="1", port=None, port_specified=False, domain='.soufun.com', domain_specified=False, domain_initial_dot=True, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) self.br._ua_handlers['_cookies'].cookiejar.set_cookie(jiatxShopWindow) mmovenum_cookie=cookielib.Cookie(version=0, name='mmovenum_cookie', value="1", port=None, port_specified=False, domain='.soufun.com', domain_specified=False, domain_initial_dot=True, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) self.br._ua_handlers['_cookies'].cookiejar.set_cookie(mmovenum_cookie) # agent_city=cookielib.Cookie(version=0, name='agent_city', value="%c0%a5%c9%bd", port=None, port_specified=False, domain='soufun.com', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) # self.br._ua_handlers['_cookies'].cookiejar.set_cookie(agent_city) # citys=cookielib.Cookie(version=0, name='citys', value="%c0%a5%c9%bd", port=None, port_specified=False, domain='soufun.com', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) # self.br._ua_handlers['_cookies'].cookiejar.set_cookie(citys) # agent_agentemail=cookielib.Cookie(version=0, name='agent_agentemail', value=self.uname, port=None, port_specified=False, domain='soufun.com', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) # self.br._ua_handlers['_cookies'].cookiejar.set_cookie(agent_agentemail) # self.br.open("http://agents.soufun.com/magent/DealSeparateLogin.aspx") def goLogin(self): url='''http://esf.%s.soufun.com/newsecond/include/DefaultUserLoginNew.aspx?method=login&name=%s&pwd=%s'''%(self.area,self.uname,self.upwd) page=self.br.open(url).read() sts=sj.loads(page) print sts["url2"] print sts["url1"] if "http://agents.soufun.com/magent/main.aspx" !=sts["url1"]: return "loginerror|登陆有误" self.getUqGlCode() def makeUploadUrl(self,FName, fiName, cutype, city, isNorth, bakurl): sid=random.randint(0000000,999999999) action="" if isNorth == "Y": action = 'http://img1nu.soufun.com/upload/agents/houseinfo2?channel=agent.houseinfo&city=%s&kind=houseinfo&sid=%s&backurl=%s&type=%s&drawtext='%(city,sid,bakurl ,cutype) else: action = 'http://img1u.soufun.com/upload/agents/houseinfo2?channel=agent.houseinfo&city=%s&kind=houseinfo&sid=%s&backurl=%s&type=%s&drawtext='%(city,sid,bakurl,cutype ) citys=cookielib.Cookie(version=0, name='citys', value=urllib.quote("city"), port=None, port_specified=False, domain='.soufun.com', domain_specified=False, domain_initial_dot=True, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) self.br._ua_handlers['_cookies'].cookiejar.set_cookie(citys) agent_city=cookielib.Cookie(version=0, name='agent_city', value=urllib.quote("city"), port=None, port_specified=False, domain='.soufun.com', domain_specified=False, domain_initial_dot=True, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) self.br._ua_handlers['_cookies'].cookiejar.set_cookie(agent_city) return action def goSalePage(self): url="http://agents.soufun.com/magent/main.aspx?p=/magent/house/sale/saleinput.aspx" self.br.open(url) url="http://agents.soufun.com/magent/Manager.aspx?p=/magent/house/sale/saleinput.aspx" self.br.open(url) # cdd=self.br._ua_handlers['_cookies'].cookiejar._cookies # for cd in self.br._ua_handlers['_cookies'].cookiejar._cookies[".soufun.com"]["/"]: # print cd def doUpLoadPics(self): cpics=[] pixr='''UploadImageForOne\('(.*)','(.*)','',''' if len(self.house_drawing)>0: for img in self.house_drawing: self.countid=self.countid+1 self.br.select_form(nr=0) self.br.form.add_file(open(self.picRoot+img,"rb"), mimetypes.guess_type(self.picRoot+img)[0], os.path.basename(img), name='Hfile', id='Hfile') self.br.form.set_all_readonly(False) self.br.form.fixup() #self.br.set_debug_http(True) self.br.form.action=self.Hfileaction self.br.form.enctype="multipart/form-data" self.br.submit() resp=self.br.response().read() if re.search(pixr,resp): img=re.search(pixr,resp).group(1) wh=re.search(pixr,resp).group(2) #print resp.decode("GB18030") cpics.append(("txtImageDes_3_%s"%self.countid,"户型图".encode("GB18030"))) cpics.append(("txtImage_3_%s"%self.countid,img)) cpics.append(("inpUrlsExtend_3_%s"%self.countid,wh)) cpics.append(("inputIsProj_3_%s"%self.countid,"undefined")) cpics.append(("inputOrderIndex_3_%s"%self.countid,self.countid)) self.br.back(1) if len(self.house_thumb)>0: for img in self.house_drawing: self.countid=self.countid+1 self.br.select_form(nr=0) self.br.form.add_file(open(self.picRoot+img,"rb"), mimetypes.guess_type(self.picRoot+img)[0], os.path.basename(img), name='Hfile', id='Hfile') self.br.form.set_all_readonly(False) self.br.form.fixup() self.br.set_debug_http(True) self.br.form.action=self.Sfileonchange self.br.form.enctype="multipart/form-data" self.br.submit() resp=self.br.response().read() if re.search(pixr,resp): img=re.search(pixr,resp).group(1) wh=re.search(pixr,resp).group(2) #print resp.decode("GB18030") cpics.append(("txtImageDes_1_%s"%self.countid,"室内图".encode("GB18030"))) cpics.append(("txtImage_1_%s"%self.countid,img)) cpics.append(("inpUrlsExtend_1_%s"%self.countid,wh)) cpics.append(("inputIsProj_1_%s"%self.countid,"undefined")) cpics.append(("inputOrderIndex_1_%s"%self.countid,self.countid)) self.br.back(1) if len(self.house_xqpic)>0: for img in self.house_drawing: self.countid=self.countid+1 self.br.select_form(nr=0) self.br.form.add_file(open(self.picRoot+img,"rb"), mimetypes.guess_type(self.picRoot+img)[0], os.path.basename(img), name='Hfile', id='Hfile') self.br.form.set_all_readonly(False) self.br.form.fixup() self.br.set_debug_http(True) self.br.form.action=self.Xfileonchange self.br.form.enctype="multipart/form-data" self.br.submit() resp=self.br.response().read() if re.search(pixr,resp): img=re.search(pixr,resp).group(1) wh=re.search(pixr,resp).group(2) #print resp.decode("GB18030") cpics.append(("txtImageDes_2_%s"%self.countid,"小区相关图".encode("GB18030"))) cpics.append(("txtImage_2_%s"%self.countid,img)) cpics.append(("inpUrlsExtend_2_%s"%self.countid,wh)) cpics.append(("inputIsProj_2_%s"%self.countid,"undefined")) cpics.append(("inputOrderIndex_2_%s"%self.countid,self.countid)) self.br.back(1) self.customimages.extend(cpics) # def setDefaultValues(self): # GMT_FORMAT = '%a %b %d %H:%M:%S UTC+0800 %Y' # gmt= time.strftime(GMT_FORMAT,time.localtime(time.time())) # url='''http://agents.soufun.com/MAgent/House/getDistrictList.aspx?key=%s&type=%%22%%D7%%A1%%D5%%AC%%22&num=%s'''%(urllib.quote(self.borough_name.decode("UTF-8").encode("GB18030")),urllib.quote(gmt.decode("UTF-8").encode("GB18030"))) # resp=self.br.open(url).read() # # #for xq in resp.split("~"): # # xqdl=[] # # for xqd in xq.split("|"): # # xqdl.append(xqd) # # self.xqs.append(xqdl) # if resp=="": # return False # raise Exception("nityError|对方没有匹配小区") # xqkey="" # try: # for xq in resp.split("~"): # xqd=xq.split("|") # if self.borough_name in xqd[0].decode("GB18030").encode("UTF-8"): # self.subdict["input_y_str_PROJNAME"]=xqd[0] # self.subdict["input_n_str_CONTENT"]=xqd[0] # self.subdict["hiddenProjname"]=xqd[0] # self.subdict["input_y_str_ADDRESS"]=xqd[1] # self.subdict["input_y_str_DISTRICT"]=xqd[2] # self.subdict["input_y_str_COMAREA"]=xqd[3] # xqkey=xqd[4] # break # except: # raise Exception("nityError|对方没有匹配小区") # if not self.subdict.has_key("input_y_str_PROJNAME"): # raise Exception("nityError|对方没有匹配小区") # # # url="http://agents.soufun.com/MAgent/House/GetProjData.aspx?newcode=%s&purpose=%%D7%%A1%%D5%%AC&num=%s"%(xqkey,urllib.quote(gmt.decode("UTF-8").encode("GB18030"))) # #print url # resp=self.br.open(url).read() # TRAF_SUBW=resp.split("#$%") # self.subdict["input_n_str_TRAFFICINFO"]=TRAF_SUBW[0] # self.subdict["input_n_str_SUBWAYINFO"]=TRAF_SUBW[1] # # url='''http://agents.soufun.com/MAgent/House/GetProjAllImg.aspx?imgtype=2&keyword=%%CD%%E2%%BE%%B0%%CD%%BC&newcode=%s&imgname=%%D1%%A1%%D4%%F1%%D0%%A1%%C7%%F8%%CD%%BC'''%xqkey # #print url # resp=self.br.open(url).read() # if re.search('''var ranLis="(.*)"\.split\(','\);''',resp): # txtImages=re.search('''var ranLis="(.*)"\.split\(','\);''',resp).group(1).split(",") # dom=PyQuery(resp) # for i in range(len(txtImages)): # i=i+1 # dimage={} # dimage["txtImageDes_2_%s"%i]="%CD%E2%BE%B0%CD%BC" # dimage["txtImage_2_%s"%i]=dom("input#pic%s"%i).attr("value") # dimage["inpUrlsExtend_2_%s"%i]="" # dimage["inputIsProj_2_%s"%i]="2" # dimage["inputOrderIndex_2_%s"%i]="%s"%i # self.deaufltimages.append(dimage) # # # # # # #print resp.decode("GB18030") def initCitys(self,page): cityreg='''<script type="text/javascript" src="(/magent/js/city/.*City.js.*)"></script>''' d1r='''var Districts = \[(.*)\];''' d2r='''name:'(.*)',index:(\d+)''' d3r='''},{''' a1r='''Area\[(\d+)\]=\[(.*)\];''' cl="" darr={} aarr=[] if re.search(cityreg,page): cl=re.search(cityreg,page).group(1) if cl=="": raise Exception("rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr") resp=self.br.open(cl).read() resp=resp.decode("GB18030") if re.search(d1r,resp): alld=re.search(d1r,resp).group(1) alld=alld.split("},{") for ld in alld : if re.search(d2r,ld): dk=re.search(d2r,ld).group(1) dv=re.search(d2r,ld).group(2) darr[dk]=dv print darr if re.search(a1r,resp): re.search(a1r,resp).groups() def getinput_Hid(self,str): strArray = str.split('_'); lengthA = int(strArray[0]); lengthB = int(strArray[2]); strArray2 = strArray[1]; aaa = ""; bbb = ""; index = 0; indexA = 0; indexB = 0; while index < (lengthA + lengthB): if indexA < lengthA: indexA+=1 aaa += strArray2[index:index+1] index+=1 if indexB < lengthB: indexB+=1 bbb += strArray2[index:index+1] index+=1 return( aaa,bbb) def goPublishPage(self): # url='''http://agents.soufun.com/magent/Manager.aspx''' # resp=self.br.open(url).read() # if '''src="/magent/Right.aspx" name="mainFrame"''' in resp: # # print resp.decode("GB18030").encode("UTF-8") url='''http://agents.soufun.com/magent/house/sale/saleinput.aspx''' resp=self.br.open(url).read() pbpage=resp.decode("GB18030").encode("UTF-8") #print pbpage #=========================================================================== if '您今日的发布条数已满' in resp.decode("GB18030").encode("UTF-8"): return "maxhouse|您的该账号今日可发房源数已满" #=========================================================================== #self.initCitys(resp) self.br.select_form(nr=0) Hfileonchange=self.br.form.find_control('Hfile').attrs["onchange"] Hfileonchange=Hfileonchange.replace("return UploadPic(","").replace(");","").replace("'","") Hfileonchange=Hfileonchange.split(",") self.Hfileaction=self.makeUploadUrl(Hfileonchange[0],Hfileonchange[1],Hfileonchange[2],Hfileonchange[3],Hfileonchange[4],Hfileonchange[5],) #print self.Hfileaction Sfileonchange=self.br.form.find_control('Sfile').attrs["onchange"] Sfileonchange=Sfileonchange.replace("return UploadPic(","").replace(");","").replace("'","") Sfileonchange=Sfileonchange.split(",") self.Sfileonchange=self.makeUploadUrl(Sfileonchange[0],Sfileonchange[1],Sfileonchange[2],Sfileonchange[3],Sfileonchange[4],Sfileonchange[5],) #print self.Sfileonchange Xfileonchange=self.br.form.find_control('Xfile').attrs["onchange"] Xfileonchange=Xfileonchange.replace("return UploadPic(","").replace(");","").replace("'","") Xfileonchange=Xfileonchange.split(",") self.Xfileonchange=self.makeUploadUrl(Xfileonchange[0],Xfileonchange[1],Xfileonchange[2],Xfileonchange[3],Xfileonchange[4],Xfileonchange[5],) #print self.Xfileonchange self.doUpLoadPics() #self.setDefaultValues() #self.br.back(self.countid) self.br.select_form( nr=0) # self.br.form.new_control('input', "input_y_str_DISTRICT0", {'value':""}) # self.br.form.new_control('input', "input_y_str_COMAREA0", {'value':""}) # self.br.form.new_control('input', "input_str_PropertySubType", {'value':u"普通住宅".decode("UTF-8").encode("GB18030")}) # self.br.form.new_control('input', "input_y_str_PAYINFO0", {'value':u"个人产权".decode("UTF-8").encode("GB18030")}) # self.br.form.find_control('input_y_str_PAYINFO').attrs["value"]=u"个人产权".decode("UTF-8").encode("GB18030") # self.br.form.new_control('input', "input_ROOM", {'value':""}) # self.br.form.new_control('input', "input_HALL", {'value':""}) # self.br.form.new_control('input', "input_TOILET", {'value':""}) # self.br.form.new_control('input', "input_KITCHEN", {'value':""}) # self.br.form.new_control('input', "input_BALCONY", {'value':""}) # self.br.form.new_control('input', "input_FLOOR", {'value':""}) # self.br.form.new_control('input', "input_ALLFLOOR", {'value':""}) # self.br.form.find_control('imageCount').attrs["value"]=u"1" # self.br.form.new_control('input', "rwepp", {'value':""}) # self.br.form.find_control('input_y_str_BUSINESSTYPE').attrs["value"]=u"CS" # self.br.form.set_all_readonly(False) # self.br.form.fixup() # self.br.set_debug_http(True) # self.br.form.action=Hfileaction # self.br.form.enctype="multipart/form-data" # print self.br.form for d in self.subdict.items(): print d[0].decode("GB18030"),d[1].decode("GB18030") print "*"*60 # for ctrl in self.br.form.controls: # if ctrl.name==None: # continue # print "%s=%s"%(ctrl.name,ctrl.value) print "="*60 # cdd=self.br._ua_handlers['_cookies'].cookiejar._cookies # for cd in self.br._ua_handlers['_cookies'].cookiejar._cookies[".soufun.com"]["/"].items(): # if cd[0]=="agent_loginname": # # print "%s=%s"%(cd[0],cd[1]) # self.br.set_debug_http(True) # str='''input_y_str_PROJNAME=%C0%A5%C9%BD%BB%A8%D4%B0&input_y_str_ADDRESS=%B3%A4%BD%AD%B1%B1%C2%B7118%BA%C5&input_y_str_DISTRICT0=&input_y_str_COMAREA0=&str_infocode=&6z1qdys=&_mnzh=&ldfzgdaj=&opj28c2s5=&str_innerid=&input_str_PropertySubType=%C9%CC%D7%A1%C2%A5&input_y_str_PAYINFO0=%CA%B9%D3%C3%C8%A8&input_y_str_PAYINFO=%CA%B9%D3%C3%C8%A8&input_y_num_PRICE=123&input_ROOM=3&input_HALL=2&input_TOILET=1&input_KITCHEN=1&input_BALCONY=1&input_str_HouseStructure=%C6%BD%B2%E3&input_str_BuildingType=%C6%BD%B7%BF&6b5e2e9binput9d54=133&input_y_num_LIVEAREA=120&input_n_str_CREATETIME=2008&input_FLOOR=1&input_ALLFLOOR=1&input_n_str_FORWARD=%B6%AB&input_n_str_FITMENT=%BE%AB%D7%B0%D0%DE&input_n_str_BASESERVICE=%C3%BA%C6%F8%2F%CC%EC%C8%BB%C6%F8&input_n_str_BASESERVICE=%C5%AF%C6%F8&input_n_str_BASESERVICE=%B5%E7%CC%DD&input_n_str_BASESERVICE=%B3%B5%CE%BB%2F%B3%B5%BF%E2&input_n_str_BASESERVICE=%B4%A2%B2%D8%CA%D2%2F%B5%D8%CF%C2%CA%D2&input_n_str_BASESERVICE=%BB%A8%D4%B0%2F%D0%A1%D4%BA&input_n_str_BASESERVICE=%C2%B6%CC%A8&input_n_str_BASESERVICE=%B8%F3%C2%A5&input_n_str_LOOKHOUSE=%CB%E6%CA%B1%BF%B4%B7%BF&33na757zgma=&zdenhy2ra=&dum5b=&input_OoOo0w=&spa56kn51z=&5fed3002input40d5=%C0%A5%C9%BD%BB%A8%D4%B0111&input_n_str_CONTENT=%C0%A5%C9%BD%BB%A8%D4%B0222&9jyxfua=&2acttyl=&6bpd0egym=&x5srlukvi=&mkntqby6=&input_n_str_TRAFFICINFO=7%C2%B7%A1%A2122%C2%B7&0l229rkh11s=&gtc5lwpt9dq0k=&1mm_a=&5_q_6r=&input_n_str_SUBWAYINFO=%B4%F3%D1%A7%A3%BA%BD%AD%CB%D5%B9%E3%B2%A5%B5%E7%CA%D3%B4%F3%D1%A7%C0%A5%C9%BD%D1%A7%D4%BA%0D%0A%D6%D0%D0%A1%D1%A7%A3%BA+%D4%A3%D4%AA%CA%B5%D1%E9%D0%A1%D1%A7++%0D%0A%D3%D7%B6%F9%D4%B0%A3%BA+%D3%FD%D3%A2%B9%FA%BC%CA%D3%D7%B6%F9%D4%B0%0D%0A%C9%CC%B3%A1%A3%BA%D2%D7%B3%F5%B0%AE%C1%AB%B3%AC%CA%D0%A1%A2%CD%FB%D7%E5%C9%CC%B3%C7%A1%A2%B6%A5%D0%C2%B3%AC%CA%D0%0D%0A%D3%CA%BE%D6%A3%BA%C0%A5%C9%BD%CA%D0%D3%CA%D5%FE%BE%D6%D6%DC%CA%D0%D6%A7%BE%D6%0D%0A%D2%F8%D0%D0%A3%BA%C5%A9%D2%B5%D2%F8%D0%D0%B9%A4%C9%CC%D2%F8%D0%D0+%0D%0A%D2%BD%D4%BA%A3%BA%C0%A5%C9%BD%C5%AE%D7%D3%D2%BD%D4%BA++%0D%0A%C6%E4%CB%FB%A3%BA%CA%D0%D5%FE%B9%AB%D4%B0%A1%A2%D5%FE%B8%AE%B9%E3%B3%A1%A1%A2%B9%FA%BC%CA%D0%A1%D1%A7%A1%A2%B3%A4%BD%AD%B9%AB%D4%B0%0D%0A&q2et=&hsud8uj5ouv=&iacojw0gg=&iltb=&Hfile=&Sfile=&Xfile=&txtImageDes_2_1=%CD%E2%BE%B0%CD%BC&txtImage_2_1=http%3A%2F%2Fimg1.soufunimg.com%2Fagents%2F2010_12%2F26%2F34%2F39%2Fkunshan%2Fhouseinfo%2F404953452400.jpg&inpUrlsExtend_2_1=&inputIsProj_2_1=2&inputOrderIndex_2_1=1&txtImageDes_2_2=%CD%E2%BE%B0%CD%BC&txtImage_2_2=http%3A%2F%2Fimg1.soufunimg.com%2Fagents%2F2010_11%2F14%2Fkunshan%2Fhouseinfo%2F1289721720264_000.jpg&inpUrlsExtend_2_2=&inputIsProj_2_2=2&inputOrderIndex_2_2=2&txtImageDes_2_3=%CD%E2%BE%B0%CD%BC&txtImage_2_3=http%3A%2F%2Fimg.soufun.com%2Fhouse%2F2006_03%2F23%2F1143101889347.jpeg&inpUrlsExtend_2_3=&inputIsProj_2_3=2&inputOrderIndex_2_3=3&wkh8ebuwdh=&89ml8=&inputdrawtext=&hdHouseDicCity=0&viy=&hj8i5uou=&1x3rbatnm=&_6a12z=&inputT=2011-07-04+03%3A11%3A40&d93n55y=&1hyco2dz=&input_y_str_DISTRICT=%D3%F1%C9%BD%D5%F2&input_y_str_COMAREA=%B3%C7%B1%B1&ms3_uqjz=&_iu1d5qkzr=&xi5ufs=&bvupejg3w=&hmzz0xhmgz9so8=&input_y_str_PURPOSE=%D7%A1%D5%AC&imageCount=3&frl=&c1dw8z3p8v=&9edutf68t=&dmsxci0o6pupc=&1apzon=&coverPhoto=&newcode=1823067409&a87ec=&y199pm3zd=&sfpfpd=&ed7fqm=&00d3=&tempprojimgs=&newprojimgs=&UseProjImage=&2teq8xop=&idr9=&input_y_str_MANAGERNAME=changna19880422&lnike2x=&nmckj0923h=&asdmkljsdqouwev=&71o93j32y=&lfx6sb01pq=&input_Hid=17_831d4414be712518iinnppuutt42493609_17&input_y_str_BUSINESSTYPE=CS&32dmk=&rej8nqkx=&1ebam6b3y7dgzyp=&fgwoe_eor8s7lqj=&zjuejsaju=&input_y_str_COMPANYNAME=%BE%FD%D4%C3%B2%BB%B6%AF%B2%FA&input_y_str_PRICETYPE=%CD%F2%D4%AA%2F%CC%D7&hiddenProjname=%C0%A5%C9%BD%BB%A8%D4%B0&hdUseMode=&guidCode=&input_DelegateIDAndAgentID=0&input_draftsID=0&t90rvw3m1fz=&v6cscj3qyx0yqg=&g35l1rxxqgj=&u8plx5eehn_mfh=&66kxksm18qp=&1yb=&ww1k5txq=&coe5g5=&tempHouseID=47986041-ac2b-45e0-9adf-f858db9ec5a5&8141b721input4430=3d44e158input2969&b832bsdf4inpu=9e03a5input''' ## str= urllib.unquote(str) # data=urllib.urlencode(d) # data=data[1:] # str="input_y_str_PROJNAME=%C0%A5%C9%BD%BB%A8%D4%B0&input_y_str_ADDRESS=%B3%A4%BD%AD%B1%B1%C2%B7118%BA%C5&input_y_str_DISTRICT0=&input_y_str_COMAREA0=&str_infocode=&6z1qdys=&_mnzh=&ldfzgdaj=&opj28c2s5=&str_innerid=&input_str_PropertySubType=%C9%CC%D7%A1%C2%A5&input_y_str_PAYINFO0=%CA%B9%D3%C3%C8%A8&input_y_str_PAYINFO=%CA%B9%D3%C3%C8%A8&input_y_num_PRICE=123&input_ROOM=3&input_HALL=2&input_TOILET=1&input_KITCHEN=1&input_BALCONY=1&input_str_HouseStructure=%C6%BD%B2%E3&input_str_BuildingType=%C6%BD%B7%BF&6b5e2e9binput9d54=133&input_y_num_LIVEAREA=120&input_n_str_CREATETIME=2008&input_FLOOR=1&input_ALLFLOOR=1&input_n_str_FORWARD=%B6%AB&input_n_str_FITMENT=%BE%AB%D7%B0%D0%DE&input_n_str_BASESERVICE=%C3%BA%C6%F8%2F%CC%EC%C8%BB%C6%F8&input_n_str_BASESERVICE=%C5%AF%C6%F8&input_n_str_BASESERVICE=%B5%E7%CC%DD&input_n_str_BASESERVICE=%B3%B5%CE%BB%2F%B3%B5%BF%E2&input_n_str_BASESERVICE=%B4%A2%B2%D8%CA%D2%2F%B5%D8%CF%C2%CA%D2&input_n_str_BASESERVICE=%BB%A8%D4%B0%2F%D0%A1%D4%BA&input_n_str_BASESERVICE=%C2%B6%CC%A8&input_n_str_BASESERVICE=%B8%F3%C2%A5&input_n_str_LOOKHOUSE=%CB%E6%CA%B1%BF%B4%B7%BF&33na757zgma=&zdenhy2ra=&dum5b=&input_OoOo0w=&spa56kn51z=&5fed3002input40d5=%C0%A5%C9%BD%BB%A8%D4%B0111&input_n_str_CONTENT=%C0%A5%C9%BD%BB%A8%D4%B0222&9jyxfua=&2acttyl=&6bpd0egym=&x5srlukvi=&mkntqby6=&input_n_str_TRAFFICINFO=7%C2%B7%A1%A2122%C2%B7&0l229rkh11s=&gtc5lwpt9dq0k=&1mm_a=&5_q_6r=&input_n_str_SUBWAYINFO=%B4%F3%D1%A7%A3%BA%BD%AD%CB%D5%B9%E3%B2%A5%B5%E7%CA%D3%B4%F3%D1%A7%C0%A5%C9%BD%D1%A7%D4%BA%0D%0A%D6%D0%D0%A1%D1%A7%A3%BA+%D4%A3%D4%AA%CA%B5%D1%E9%D0%A1%D1%A7++%0D%0A%D3%D7%B6%F9%D4%B0%A3%BA+%D3%FD%D3%A2%B9%FA%BC%CA%D3%D7%B6%F9%D4%B0%0D%0A%C9%CC%B3%A1%A3%BA%D2%D7%B3%F5%B0%AE%C1%AB%B3%AC%CA%D0%A1%A2%CD%FB%D7%E5%C9%CC%B3%C7%A1%A2%B6%A5%D0%C2%B3%AC%CA%D0%0D%0A%D3%CA%BE%D6%A3%BA%C0%A5%C9%BD%CA%D0%D3%CA%D5%FE%BE%D6%D6%DC%CA%D0%D6%A7%BE%D6%0D%0A%D2%F8%D0%D0%A3%BA%C5%A9%D2%B5%D2%F8%D0%D0%B9%A4%C9%CC%D2%F8%D0%D0+%0D%0A%D2%BD%D4%BA%A3%BA%C0%A5%C9%BD%C5%AE%D7%D3%D2%BD%D4%BA++%0D%0A%C6%E4%CB%FB%A3%BA%CA%D0%D5%FE%B9%AB%D4%B0%A1%A2%D5%FE%B8%AE%B9%E3%B3%A1%A1%A2%B9%FA%BC%CA%D0%A1%D1%A7%A1%A2%B3%A4%BD%AD%B9%AB%D4%B0%0D%0A&q2et=&hsud8uj5ouv=&iacojw0gg=&iltb=&Hfile=&Sfile=&Xfile=&txtImageDes_2_1=%CD%E2%BE%B0%CD%BC&txtImage_2_1=http%3A%2F%2Fimg1.soufunimg.com%2Fagents%2F2010_12%2F26%2F34%2F39%2Fkunshan%2Fhouseinfo%2F404953452400.jpg&inpUrlsExtend_2_1=&inputIsProj_2_1=2&inputOrderIndex_2_1=1&txtImageDes_2_2=%CD%E2%BE%B0%CD%BC&txtImage_2_2=http%3A%2F%2Fimg1.soufunimg.com%2Fagents%2F2010_11%2F14%2Fkunshan%2Fhouseinfo%2F1289721720264_000.jpg&inpUrlsExtend_2_2=&inputIsProj_2_2=2&inputOrderIndex_2_2=2&txtImageDes_2_3=%CD%E2%BE%B0%CD%BC&txtImage_2_3=http%3A%2F%2Fimg.soufun.com%2Fhouse%2F2006_03%2F23%2F1143101889347.jpeg&inpUrlsExtend_2_3=&inputIsProj_2_3=2&inputOrderIndex_2_3=3&wkh8ebuwdh=&89ml8=&inputdrawtext=&hdHouseDicCity=0&viy=&hj8i5uou=&1x3rbatnm=&_6a12z=&inputT=2011-07-04+03%3A11%3A40&d93n55y=&1hyco2dz=&input_y_str_DISTRICT=%D3%F1%C9%BD%D5%F2&input_y_str_COMAREA=%B3%C7%B1%B1&ms3_uqjz=&_iu1d5qkzr=&xi5ufs=&bvupejg3w=&hmzz0xhmgz9so8=&input_y_str_PURPOSE=%D7%A1%D5%AC&imageCount=3&frl=&c1dw8z3p8v=&9edutf68t=&dmsxci0o6pupc=&1apzon=&coverPhoto=&newcode=1823067409&a87ec=&y199pm3zd=&sfpfpd=&ed7fqm=&00d3=&tempprojimgs=&newprojimgs=&UseProjImage=&2teq8xop=&idr9=&input_y_str_MANAGERNAME=changna19880422&lnike2x=&nmckj0923h=&asdmkljsdqouwev=&71o93j32y=&lfx6sb01pq=&input_Hid=17_831d4414be712518iinnppuutt42493609_17&input_y_str_BUSINESSTYPE=CS&32dmk=&rej8nqkx=&1ebam6b3y7dgzyp=&fgwoe_eor8s7lqj=&zjuejsaju=&input_y_str_COMPANYNAME=%BE%FD%D4%C3%B2%BB%B6%AF%B2%FA&input_y_str_PRICETYPE=%CD%F2%D4%AA%2F%CC%D7&hiddenProjname=%C0%A5%C9%BD%BB%A8%D4%B0&hdUseMode=&guidCode=&input_DelegateIDAndAgentID=0&input_draftsID=0&t90rvw3m1fz=&v6cscj3qyx0yqg=&g35l1rxxqgj=&u8plx5eehn_mfh=&66kxksm18qp=&1yb=&ww1k5txq=&coe5g5=&tempHouseID=47986041-ac2b-45e0-9adf-f858db9ec5a5&8141b721input4430=3d44e158input2969&b832bsdf4inpu=9e03a5input" # print str.count("&") input_y_str_COMPANYNAME=PyQuery(pbpage)("div#input_y_str_COMPANYNAME").attr("value") input_y_str_PRICETYPE=PyQuery(pbpage)("div#input_y_str_PRICETYPE").attr("value") d=[ ("input_y_str_PROJNAME","淀山假期花园".decode("UTF-8").encode("GB18030")),#楼盘名 ("input_y_str_ADDRESS","时的发生地方".decode("UTF-8").encode("GB18030")), ("input_y_str_DISTRICT0","淀山湖镇".decode("UTF-8").encode("GB18030")),#区域 ("input_y_str_COMAREA0","淀山湖".decode("UTF-8").encode("GB18030")),#商圈 ("str_infocode",""), ("str_innerid",""), ("input_str_PropertySubType",""), ("input_str_PropertySubType","普通住宅".decode("UTF-8").encode("GB18030")), ("input_y_str_PAYINFO0","个人产权".decode("UTF-8").encode("GB18030")), ("input_y_str_PAYINFO","个人产权".decode("UTF-8").encode("GB18030")),#出租为支付方式 ("input_y_num_PRICE","100"), ("input_ROOM","2"), ("input_HALL","1"), ("input_TOILET","1"), ("input_KITCHEN","1"), ("input_BALCONY","1"), ("input_str_HouseStructure","平层".decode("UTF-8").encode("GB18030")), ("input_str_BuildingType","板楼".decode("UTF-8").encode("GB18030")), ("6b5e2e9binput9d54","80"), ("input_y_num_LIVEAREA","75"), ("input_n_str_CREATETIME","2008"), ("input_FLOOR","1"), ("input_ALLFLOOR","12"), ("input_n_str_FORWARD","南".decode("UTF-8").encode("GB18030")), ("input_n_str_FITMENT","精装修".decode("UTF-8").encode("GB18030")), ("input_n_str_BASESERVICE","煤气/天然气".decode("UTF-8").encode("GB18030")), ("input_n_str_BASESERVICE","暖气".decode("UTF-8").encode("GB18030")), ("input_n_str_BASESERVICE","电梯".decode("UTF-8").encode("GB18030")), ("input_n_str_BASESERVICE","车位/车库".decode("UTF-8").encode("GB18030")), ("input_n_str_BASESERVICE","储藏室/地下室".decode("UTF-8").encode("GB18030")), ("input_n_str_BASESERVICE","花园/小院".decode("UTF-8").encode("GB18030")), ("input_n_str_BASESERVICE","露台".decode("UTF-8").encode("GB18030")), ("input_n_str_BASESERVICE","阁楼".decode("UTF-8").encode("GB18030")), ("input_n_str_LOOKHOUSE","随时看房".decode("UTF-8").encode("GB18030")), #("rxb8wpzr8tq2vz",""), #("wk9hvpfj",""), ("5fed3002input40d5","1111111111".encode("GB18030")),#房源标题 ("input_n_str_CONTENT","22222222222222222".encode("GB18030")),#房源描述 #("fmrswpxl0c",""), #("g2q9qd16c3l",""), #("i3uw67kq",""), #("sxjgo2m",""), #("gyu5rq",""), ("input_n_str_TRAFFICINFO",""),#交通 #("zxk52i3krqah1y",""), #("pkyrpq9i",""), #("hr6w2z",""), #("input_str_mzznd",""), #("t0usn6u1",""), ("input_n_str_SUBWAYINFO",""),#周边 #("0u3pqm",""), #("2kp3dnjzkr",""), #("a3jev19req",""), #("yk2ql07r5090ws",""), #("ndvgci99d",""), ("Hfile",""), ("Sfile",""), ("Xfile",""), #("231t",""), #("ao8h6a",""), #("kveko73zbtl",""), #("dgrh",""), #("tyr20",""), ("inputdrawtext",""), ("hdHouseDicCity","0"), #("hhou3k",""), #("rnk30r",""), #("uonmqghrbx",""), #("mgturnn2bw56a1",""), ("inputT","2011-07-04+04:26:35"), ("input_y_str_DISTRICT","淀山湖镇".decode("UTF-8").encode("GB18030")), ("input_y_str_COMAREA","淀山湖".decode("UTF-8").encode("GB18030")), #("z7wzt",""), #("jexu3yhh3ftt",""), #("901sqsdn7v",""), #("ztw000fb",""), ("input_y_str_PURPOSE","住宅".decode("UTF-8").encode("GB18030")), ("imageCount",self.countid), # ("txtImageDes_2_1","外景图".encode("GB18030")), # ("txtImage_2_1","http://img1.soufunimg.com/agents/2010_12/26/34/39/kunshan/houseinfo/404953452400.jpg"), # ("inpUrlsExtend_2_1",""), # ("inputIsProj_2_1","2"), # ("inputOrderIndex_2_1","1"), # ("txtImageDes_2_2","外景图".encode("GB18030")), # ("txtImage_2_2","http://img1.soufunimg.com/agents/2010_11/14/kunshan/houseinfo/1289721720264_000.jpg"), # ("inpUrlsExtend_2_2",""), # ("inputIsProj_2_2","2"), # ("inputOrderIndex_2_2","2"), # ("txtImageDes_2_3","外景图".encode("GB18030")), # ("txtImage_2_3","http://img.soufun.com/house/2006_03/23/1143101889347.jpeg"), # ("inpUrlsExtend_2_3",""), # ("inputIsProj_2_3","2"), # ("inputOrderIndex_2_3","3"), # #("pr3zy6qu",""), #("rcvmlss1t",""), ("coverPhoto",""), ("newcode",""), #("a87ec",""), #("hqkbz6ayd",""), #("lrx0ejh2kypv",""), #("c2zdjc",""), #("16uwcs3l",""), #("s023",""), ("tempprojimgs",""), ("newprojimgs",""), ("UseProjImage",""), ("input_y_str_MANAGERNAME","changna19880422"), #("bvrn_nq9hhy",""), #("59i8cxj",""), #("input_Hid","17_831d4414be712518iinnppuutt42493609_17"), ("input_y_str_BUSINESSTYPE","CS"),#出租CZ #("yknjvjd1s",""), #("6ek79w",""), ("input_y_str_COMPANYNAME",input_y_str_COMPANYNAME), ("input_y_str_PRICETYPE",input_y_str_PRICETYPE), ("hiddenProjname",""), ("hdUseMode",""), ("guidCode",""), ("input_DelegateIDAndAgentID","0"), ("input_draftsID","0"), #("ncx0o",""), #("8sglz",""), #("frc",""), #("input_str_zaqwer2134fgvaf",""), #("rzeys2x2t",""), #("o0q8",""), #("tempHouseID","9ef614bc-92bd-44c4-ab33-f6cafeb19007"), # ("8141b721input4430","3d44e158input2969"), #("b832bsdf4inpu","9e03a5input") ] d.extend(self.customimages) postkeys=[ c[0] for c in d ] ppp=PyQuery(pbpage)("inpt") for ipt in ppp: if (PyQuery(ipt).attr("name")!="" ) and (PyQuery(ipt).attr("value")!=""): print PyQuery(ipt).attr("name"), PyQuery(ipt).attr("value") #d.append((PyQuery(ipt).attr("name"), PyQuery(ipt).attr("value"))) if re.search('''<inpt name="(.*)" value="(.*)"></inpt>''', pbpage): hdname=re.search('''<inpt name="(.*)" value="(.*)"></inpt>''', pbpage).group(1) hdvalue=re.search('''<inpt name="(.*)" value="(.*)"></inpt>''', pbpage).group(2) d.append((hdname,hdvalue)) print "+"*60 for ctrl in self.br.form.controls: if ctrl.name==None: continue if ctrl.name=="input_Hid": hid=self.getinput_Hid(ctrl.value) d.append(hid) if not ctrl.name in postkeys: print ctrl.name,ctrl.value # if ctrl.name=="input_y_str_COMPANYNAME": # d.append(("input_y_str_COMPANYNAME",ctrl.value)) # elif ctrl.name=="input_y_str_PRICETYPE": # d.append(("input_y_str_PRICETYPE",ctrl.value)) # else: d.append((ctrl.name,ctrl.value==None and "" or ctrl.value)) for dd in d: print dd data=urllib.urlencode(d) print data self.br.addheaders = [("Referer", "http://agents.soufun.com/magent/house/sale/saleinput.aspx")] resp=self.br.open("http://agents.soufun.com/MAgent/house/InputSave.aspx?flag=2",data) # resp=self.br.submit() respage=resp.read().decode("GB18030") print respage if "window.location.replace" in respage : if re.search('''window\.location\.replace\('(.*)'\);''',respage): sts=re.search('''window\.location\.replace\('(.*)'\);''',respage).group(1) return "success|%s"%sts else: return "puterror3|%s"%"无正确相应结果" else: if re.search('''alert\('(.*)'\)''',respage): sts=re.search('''alert\('(.*)'\)''',respage).group(1) return "puterror4|%s"%sts else: return respage # self.br.submit() # resp=self.br.response().read() # print resp.decode("GB18030") #UploadPic('mainform','Hfile',3,'ks','N','http%3a%2f%2fagents.soufun.com%2fMagent%2fPicInterface%2fSingleHousePicUploadFinish.aspx'); # self.br.form.add_file(open("d:/111.jpg"), 'image/jpeg', "111.jpg", name='Hfile') # self.br.form.action]= # self.br.submit() # resp=self.br.response().read() # print resp.decode("GB18030") def makePostData(dict): params="" for item in dict.items(): params+="&%s=%s"%(item[0],item[1]) return params; def Publish(d): try: br=SouFangbrowser(d) br.goLogin() br.goSalePage() sts=br.goPublishPage() return sts except Exception,e: return "%s"%repr(e) if __name__=="__main__": p={ "username":"changna19880422",#"changna19880422", "passwd":"19880422",#201106 "webid":"8", "citycode":"ks", "broker_id":"1111", "house_title":"昆山花园", "city":"2", "cityarea_id":"1411", "borough_section":"5910", "house_type":"3", "house_toward":"1", "house_fitment":"2", "house_kind":"2", "house_deposit":"1", "belong":"1", "house_price":"500", "house_totalarea":"120", "house_room":"3", "house_hall":"2", "house_toilet":"1", "house_topfloor":"6", "house_floor":"2", "house_age":"10", "house_desc":'''昆山花园昆山花园昆山花园昆山花园''', "borough_id":"10", "borough_name":"昆山花园", "house_drawing":"d:/111.jpg|d:/111.jpg", "house_thumb":"", "house_xqpic":"", #======================== "mobile":"111", "contact":"苏大生", } print Publish(p)
# coding: UTF-8 from __future__ import (absolute_import, division, print_function, unicode_literals) from .menu_interface import MenuInterface from scenes.elements import ( GameDiv, GameTextElement, ButtonGroup, ) from consts.i18n import ( RESTART, EXIT, DRAW_MESSAGE, BLACK_WINS_MESSAGE, WHITE_WINS_MESSAGE, QUIT, ) import scenes.chess class EndMenuInterface(MenuInterface): def interface(self): MenuInterface.interface(self) return GameDiv(name="main_div", children=[ ButtonGroup( color=(0, 0, 0, 128), radius=0, define_rect=lambda: [0, 0, self.game.width, self.game.height], ), GameDiv( x=lambda: self.game.center_x(), y=lambda: self.game.relative_y(0.15), children=[ GameTextElement( font=self.title_font, text=DRAW_MESSAGE, antialias=True, color=(30, 144, 255), style="outline", other_color=(255, 255, 255), condition=(lambda: self.chess.state == scenes.chess.GAME_DRAW) ), GameTextElement( font=self.title_font, text=BLACK_WINS_MESSAGE, antialias=True, color=(50, 50, 50), style="outline", other_color=(255, 255, 255), condition=(lambda: self.chess.state == scenes.chess.BLACK_WINS) ), GameTextElement( font=self.title_font, text=WHITE_WINS_MESSAGE, antialias=True, color=(255, 255, 255), style="outline", other_color=(50, 50, 50), condition=(lambda: self.chess.state == scenes.chess.WHITE_WINS) ), ] ), ButtonGroup( color=self.main_menu_color, padding=10, children=[ GameTextElement( name="restart", font=self.menu_font, text=RESTART, antialias=True, color=self.button_color, x=lambda: self.game.center_x(), y=lambda: self.game.relative_y(0.45), click=self.restart_click, motion=self.motion, ), GameTextElement( name="exit", font=self.menu_font, text=EXIT, antialias=True, color=self.button_color, x=lambda: self.game.center_x(), y=lambda: (self.game.relative_y(0.45) + self.menu_font.size // 2 + self.game.relative_x(0.04)), click=self.exit_click, motion=self.motion, ), ] ), ButtonGroup( color=self.button_background, padding=5, radius=0.3, children=[ GameTextElement( name="quit", font=self.menu_font, text=QUIT, antialias=True, color=self.button_color, x=lambda: self.game.relative_x(0.91), y=lambda: self.game.relative_y(0.92), click=self.quit_click, motion=self.motion, ), ] ), ])
#Aula 2 - Variaveis, tipos, entradas, saidas e operadores matematicos # Formulario Basico Python3 '''Fassa um formulário perguntando Nome, CPF, Idade, Altura, Endereço, Contato''' print ('Preencha o formulário a seguir\n\n\n') nome = input('Qual seu nome?\nR: ') cpf = input('Qual o seu CPF?\nR: ') idade = input('Qual a sua idade?\nR: ') altura = input('Qual a sua altura?\nR: ') endereco = input('Qual o seu endereço?\nR: ') contato = input('Qual o seu número?\nR: ') print("Printagem:") print('Nome:', nome) print('CPF:', cpf) print('Idade:', idade, 'anos') print('Altura:', altura, 'metros') print('Endereço:', endereco) print('Número:', contato)
#from colorama import init #init() from colorama import Fore print(Fore.BLUE + "Something in Blue") print(Fore.RED + "Something in Red")
from .networks import get_network
# Generated by Django 2.0.5 on 2018-09-21 17:37 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('contract_scanner', '0008_contract_auction'), ] operations = [ migrations.AddField( model_name='contract', name='region_id', field=models.BigIntegerField(db_index=True, default=0), preserve_default=False, ), ]
#!/usr/bin/python2.4 # Copyright 2008 The Native Client Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Program to parse hammer output and sort tests by how long they took to run. The output should be from the CommandTest in SConstruct. """ import getopt import re import sys class Analyzer(object): """Basic run time analysis (sorting, so far).""" def __init__(self): self._data = [] # list of (time, test-name) tuples def AddData(self, execution_time, test_name, mode): self._data.append((execution_time, test_name, mode)) def Sort(self): self._data.sort(None, lambda x: x[0], True) def Top(self, n): return self._data[:n] def TrimTestName(name): s = '/scons-out/' ix = name.find(s) if ix < 0: return name[ix + len(s):] return name def Usage(): print >>sys.stderr, 'Usage: test_timing [-n top-n-to-print]' def main(argv): top_n = 10 try: optlist, argv = getopt.getopt(argv[1:], 'n:') except getopt.error, e: print >>sys.stderr, str(e) Usage() return 1 for opt, val in optlist: if opt == '-n': try: top_n = int(val) except ValueError: print >>sys.stderr, 'test_timing: -n arg should be an integer' Usage() return 1 mode = 'Unknown' mode_nfa = re.compile(r'^running.*scons-out/((opt|dbg)-linux)') nfa = re.compile(r'^Test (.*) took ([.0-9]*) secs') analyzer = Analyzer() for line in sys.stdin: mobj = mode_nfa.match(line) if mobj is not None: mode = mobj.group(1) continue mobj = nfa.match(line) if mobj is not None: analyzer.AddData(float(mobj.group(2)), mobj.group(1), mode) analyzer.Sort() print '%-12s %-9s %s' % ('Time', 'Mode', 'Test Name') print '%-12s %-9s %s' % (12*'-', 9*'-', '---------') for time, name, mode in analyzer.Top(top_n): print '%12.8f %-9s %s' % (time, mode, TrimTestName(name)) return 0 if __name__ == '__main__': sys.exit(main(sys.argv))
from utilities.math import Vector2, _pyi_Vector2_type, _pyi_Vector2_item_type from constants import MOUSE_MODE_CONFINED, MOUSE_MODE_CAPTURED from pygame import mouse as _mouse, event, display from pygame.cursors import Cursor from typing import List, Union from types import ModuleType import sys _captured = False class _posVector2(Vector2): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @property def x(self): return _mouse.get_pos()[0] @x.setter def x(self, value): _mouse.set_pos(value, self.y) @property def y(self): return _mouse.get_pos()[1] @y.setter def y(self, value): _mouse.set_pos(self.x, value) def copy(self): return Vector2(self) class _relVector2(Vector2): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @property def x(self): return _mouse.get_rel()[0] @x.setter def x(self, value): pass @property def y(self): return _mouse.get_rel()[1] @y.setter def y(self, value): pass def copy(self): return Vector2(self) position = _posVector2() relative = _relVector2() class Mouse(ModuleType): @property def position(self) -> Vector2: position[:] = _mouse.get_pos() return position @position.setter def position(self, value: _pyi_Vector2_type): position.x, position.y = value @property def cursor(self) -> Union[Cursor]: return _mouse.get_cursor() @cursor.setter def cursor(self, value: Union[Cursor]): _mouse.set_cursor(value) @property def visible(self) -> bool: return _mouse.get_visible() @visible.setter def visible(self, value: bool): _mouse.set_visible(value) @property def mode(self) -> int: return (MOUSE_MODE_CONFINED if event.get_grab() else 0) | (MOUSE_MODE_CAPTURED if _captured else 0) @mode.setter def mode(self, value: int): global _captured event.set_grab(bool(value & MOUSE_MODE_CONFINED)) _captured = bool(value & MOUSE_MODE_CAPTURED) @property def relative(self) -> Vector2: relative[:] = _mouse.get_rel() return relative @property def focused(self): return bool(_mouse.get_focused()) @property def left_pressed(self) -> bool: return _mouse.get_pressed()[0] @property def middle_pressed(self) -> bool: return _mouse.get_pressed()[1] @property def right_pressed(self) -> bool: return _mouse.get_pressed()[2] def __getitem__(self, item: Union[slice, int]) -> Union[List[Union[float, int]], float, int]: return self.position[item] def __setitem__(self, key: Union[slice, int], value: _pyi_Vector2_item_type): self.position.__setitem__(key, value) def flip(self): if self.focused and _captured: size = display.get_window_size() self.position = size[0] / 2, size[1] / 2 sys.modules[__name__] = Mouse(__name__)
MAX_FILE_SIZE = 150000000 # 150Mb S3_UPLOADS_BUCKET = "pollingstations-uploads-dev"
import re from pylatex import NoEscape, Package from pylatex.base_classes import Environment, ContainerCommand class LatexSection(Environment): """This class is generic and allow the creation of any section like .. code-block:: latex \begin{name} Some content \end{name} The name is provided to the constructor. No additional package will be added to the list of packages. """ def __init__(self, name, **kwargs): self._latex_name = name super().__init__(**kwargs) class HRef(ContainerCommand): """A class that represents an hyperlink to a web address.""" _repr_attributes_mapping = { 'marker': 'options', 'text': 'arguments', } packages = [Package('hyperref')] def __init__(self, url, text=None): """ Args ---- url: str The url to use. text: The text that will be shown as a link to the url. Use the url if not set """ self.url = url if text is None: text = url super().__init__(arguments=NoEscape(url), data=text) def add_text(doc, t): """ Generic function to add text to a pylatex document. Split the provided text to escape latex commands and then add to the container. """ current_pos = 0 for m in re.finditer(r'latex::(.+?)::', t): doc.append(t[current_pos: m.start()]) doc.append(NoEscape(' ' + m.group(1) + ' ')) current_pos = m.end() doc.append(t[current_pos:]) return doc
from enum import Enum from when_ml_pipeline_meets_hydra.api.deployment import bar, foo class DeploymentSubCommand(Enum): FOO = "foo" BAR = "bar" def _print_help(): subs = [sub.value for sub in DeploymentSubCommand] print(f"Please add 'c/deployment_sub={subs}' to your command!") def process_deployment_command(cfg): try: sub = cfg.deployment_sub.name if sub == DeploymentSubCommand.FOO.value: foo(cluster_info=cfg.cluster) elif sub == DeploymentSubCommand.BAR.value: bar(cluster_info=cfg.cluster) else: _print_help() except KeyError: _print_help()
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc import calculator_service_pb2 as calculator__service__pb2 class CalculatorStub(object): """Missing associated documentation comment in .proto file""" def __init__(self, channel): """Constructor. Args: channel: A grpc.Channel. """ self.Compute = channel.unary_unary( '/calculator.proto.Calculator/Compute', request_serializer=calculator__service__pb2.ComputeRequest.SerializeToString, response_deserializer=calculator__service__pb2.ComputeResponse.FromString, ) class CalculatorServicer(object): """Missing associated documentation comment in .proto file""" def Compute(self, request, context): """Missing associated documentation comment in .proto file""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def add_CalculatorServicer_to_server(servicer, server): rpc_method_handlers = { 'Compute': grpc.unary_unary_rpc_method_handler( servicer.Compute, request_deserializer=calculator__service__pb2.ComputeRequest.FromString, response_serializer=calculator__service__pb2.ComputeResponse.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'calculator.proto.Calculator', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) # This class is part of an EXPERIMENTAL API. class Calculator(object): """Missing associated documentation comment in .proto file""" @staticmethod def Compute(request, target, options=(), channel_credentials=None, call_credentials=None, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/calculator.proto.Calculator/Compute', calculator__service__pb2.ComputeRequest.SerializeToString, calculator__service__pb2.ComputeResponse.FromString, options, channel_credentials, call_credentials, compression, wait_for_ready, timeout, metadata)
import FWCore.ParameterSet.Config as cms # AlCaReco for muon based alignment using beam-halo muons in the CSC overlap regions OutALCARECOMuAlBeamHalo_noDrop = cms.PSet( SelectEvents = cms.untracked.PSet( SelectEvents = cms.vstring('pathALCARECOMuAlBeamHalo') ), outputCommands = cms.untracked.vstring( 'keep *_ALCARECOMuAlBeamHalo_*_*', 'keep *_muonCSCDigis_*_*', 'keep *_csc2DRecHits_*_*', 'keep *_cscSegments_*_*', 'keep L1AcceptBunchCrossings_*_*_*', 'keep L1GlobalTriggerReadoutRecord_gtDigis_*_*', 'keep *_TriggerResults_*_*', 'keep DcsStatuss_scalersRawToDigi_*_*') ) import copy OutALCARECOMuAlBeamHalo = copy.deepcopy(OutALCARECOMuAlBeamHalo_noDrop) OutALCARECOMuAlBeamHalo.outputCommands.insert(0, "drop *")
import os def get_full_path(filename): return os.path.join(os.path.dirname(os.path.realpath(__file__)), filename) if filename else ''
# Import libraries from tbwrapper import * import json import random import time ## Python Wrapper Tests - Out of ThinksBoards.io Package Scope ############################################################################# #---------------------------------------------------------------------------# # CREATE A DEVICE INSTANCE with a DEVICE ID # # hint: used when you already know what device you want your data from # #---------------------------------------------------------------------------# #deviceOne = startDeviceInstance('9496e830-2d0e-11e8-801f-efa7ab79a01b') #---------------------------------------------------------------------------- #print deviceOne # #print deviceOne.getDataKeys() # #print deviceOne.getDataValue('Active Energy') # #---------------------------------------------------------------------------# # Get values for a specific interval # #---------------------------------------------------------------------------# #key = 'Active Energy' # #fromDateTime = '2018-06-01,11:26' # #toDateTime = '2018-06-18,12:30' # #agg = 'AVG' # #print deviceOne.getDataValuesInterval(key,fromDateTime,toDateTime,agg) # #---------------------------------------------------------------------------# ############################################################################# ############################################################################# #---------------------------------------------------------------------------# # CREATE ALL DEVICES INSTANCES # # hint: used when you dont know what device you want your data from # #---------------------------------------------------------------------------# deviceList = startAllDevices() # # # for i in deviceList: # print i.id, i.token,i.entityType, i.name, i.dataKeys, i.attributes # #print searchDataValueForKey(deviceList,'Prima Power','Active Energy') # #print searchDataValueForKey(deviceList,'Prima Power','Active Power') # #---------------------------------------------------------------------------# # Get values for a specific interval # #---------------------------------------------------------------------------# # # #print searchDataIntervalForKey(deviceList,'Prima Power','Active Energy','2018-06-01,11:26',\ # '2018-06-18,12:30','AVG') #---------------------------------------------------------------------------# ############################################################################# ############################################################################# #---------------------------------------------------------------------------# # Post values to ThinksBoard Device 0 - example # #---------------------------------------------------------------------------# # # #print searchDataValueForKey(deviceList, 'activePower') #time.sleep(2222) # deviceName = 'TESTE'B # deviceIndex = getDeviceIndex(deviceList, deviceName) # print deviceIndex while True: activePower =random.randint(5,10) activeEnergy =random.randint(10,20) data = json.dumps({'activePower':str(activePower), 'activeEnergy':str(activeEnergy)}) response = deviceList[0].postDataValues(data) print deviceList[0] #print searchDataValueForKey(deviceList,deviceName, 'activeEnergy') time.sleep(5) #---------------------------------------------------------------------------# #############################################################################
GENERAL_ACKNOWLEDGEMENTS = { "positive": ["Sounds cool! ", "Great! ", "Wonderful! ", "Cool!", "Nice!"], "neutral": ["Okay. ", "Oh. ", "Huh. ", "Well. ", "Gotcha. ", "Aha. "], "negative": ["Hmm... ", "I see.", "That's okay.", "Okay."], }
#! /usr/bin/env python """Author: Scott Staniewicz Functions to assist input and output of SAR data Email: scott.stanie@utexas.edu """ import collections import glob import math import os import pprint import re import sys import numpy as np import matplotlib.pyplot as plt from insar.log import get_log logger = get_log() FLOAT_32_LE = np.dtype('<f4') INT_16_LE = np.dtype('<i2') INT_16_BE = np.dtype('>i2') SENTINEL_EXTS = ['.geo', '.cc', '.int', '.amp', '.unw'] UAVSAR_EXTS = ['.int', '.mlc', '.slc', '.amp', '.cor', '.grd'] # Notes: .grd, .mlc can be either real or complex for UAVSAR, # .amp files are real only for UAVSAR, complex for sentinel processing # However, we label them as real here since we can tell .amp files # are from sentinel if there exists .rsc files in the same dir COMPLEX_EXTS = ['.int', '.slc', '.geo', '.cc', '.unw', '.mlc', '.grd'] REAL_EXTS = ['.amp', '.cor', '.mlc', '.grd'] # NOTE: .cor might only be real for UAVSAR # Note about UAVSAR Multi-look products: # Will end with `_ML5X5.grd`, e.g., for 5x5 downsampled ELEVATION_EXTS = ['.dem', '.hgt'] # These file types are not simple complex matrices: see load_stacked for detail STACKED_FILES = ['.cc', '.unw'] UAVSAR_POL_DEPENDENT = ['.grd', '.mlc'] REAL_POLS = ('HHHH', 'HVHV', 'VVVV') COMPLEX_POLS = ('HHHV', 'HHVV', 'HVVV') POLARIZATIONS = REAL_POLS + COMPLEX_POLS def get_file_ext(filename): """Extracts the file extension, including the '.' (e.g.: .slc) Examples: >>> print(get_file_ext('radarimage.slc')) .slc >>> print(get_file_ext('unwrapped.lowpass.unw')) .unw """ return os.path.splitext(filename)[1] def find_files(directory, search_term): """Searches for files in `directory` using globbing on search_term Path to file is also included. Examples: >>> open("afakefile.txt", "w").close() >>> find_files(".", "*.txt") ['./afakefile.txt'] >>> os.remove("afakefile.txt") """ return glob.glob(os.path.join(directory, search_term)) def load_file(filename, rsc_file=None, ann_info=None, verbose=False): """Examines file type for real/complex and runs appropriate load Args: filename (str): path to the file to open rsc_file (str): path to a dem.rsc file (if Sentinel) ann_info (dict): data parsed from annotation file (UAVSAR) verbose (bool): print extra logging info while loading files Returns: ndarray: a 2D array of the data from a file Raises: ValueError: if sentinel files loaded without a .rsc file in same path to give the file width """ def _find_rsc_file(filename, verbose=False): basepath = os.path.split(filename)[0] # Should be just elevation.dem.rsc (for .geo folder) or dem.rsc (for igrams) possible_rscs = find_files(basepath, '*.rsc') if verbose: logger.info("Possible rsc files:") logger.info(possible_rscs) if len(possible_rscs) < 1: raise ValueError("{} needs a .rsc file with it for width info.".format(filename)) return possible_rscs[0] ext = get_file_ext(filename) # Elevation and rsc files can be immediately loaded without extra data if ext in ELEVATION_EXTS: return load_elevation(filename) elif ext == '.rsc': return load_dem_rsc(filename) # Sentinel files should have .rsc file: check for dem.rsc, or elevation.rsc rsc_data = None if rsc_file: rsc_data = load_dem_rsc(rsc_file) if ext in SENTINEL_EXTS: rsc_file = rsc_file if rsc_file else _find_rsc_file(filename) rsc_data = load_dem_rsc(rsc_file) if verbose: logger.info("Loaded rsc_data from %s", rsc_file) logger.info(pprint.pformat(rsc_data)) # UAVSAR files have an annotation file for metadata if not ann_info and not rsc_data and ext in UAVSAR_EXTS: ann_info = parse_ann_file(filename, verbose=verbose) if ext in STACKED_FILES: return load_stacked(filename, rsc_data) # having rsc_data implies that this is not a UAVSAR file, so is complex elif rsc_data or is_complex(filename): return load_complex(filename, ann_info=ann_info, rsc_data=rsc_data) else: return load_real(filename, ann_info=ann_info, rsc_data=rsc_data) # Make a shorter alias for load_file load = load_file def load_elevation(filename): """Loads a digital elevation map from either .hgt file or .dem .hgt is the NASA SRTM files given. Documentation on format here: https://dds.cr.usgs.gov/srtm/version2_1/Documentation/SRTM_Topo.pdf Key point: Big-endian 2 byte (16-bit) integers .dem is format used by Zebker geo-coded and ROI-PAC SAR software Only difference is data is stored little-endian (like other SAR data) Note on both formats: gaps in coverage are given by INT_MIN -32768, so either manually set data(data == np.min(data)) = 0, data = np.clip(data, 0, None), or when plotting, plt.imshow(data, vmin=0) """ ext = get_file_ext(filename) data_type = INT_16_LE if ext == '.dem' else INT_16_BE data = np.fromfile(filename, dtype=data_type) # Make sure we're working with little endian if data_type == INT_16_BE: data = data.astype(INT_16_LE) # Reshape to correct size. # Either get info from .dem.rsc if ext == '.dem': info = load_dem_rsc(filename) dem_img = data.reshape((info['FILE_LENGTH'], info['WIDTH'])) # Or check if we are using STRM1 (3601x3601) or SRTM3 (1201x1201) else: if (data.shape[0] / 3601) == 3601: # STRM1- 1 arc second data, 30 meter data dem_img = data.reshape((3601, 3601)) elif (data.shape[0] / 1201) == 1201: # STRM3- 3 arc second data, 90 meter data dem_img = data.reshape((1201, 1201)) else: raise ValueError("Invalid .hgt data size: must be square size 1201 or 3601") # TODO: makeDEM.m did this... do we always want this?? dem_img = np.clip(dem_img, 0, None) return dem_img def load_dem_rsc(filename): """Loads and parses the .dem.rsc file Args: filename (str) path to either the .dem or .dem.rsc file. Function will add .rsc to path if passed .dem file Returns: dict: dem.rsc file parsed out, keys are all caps example file: WIDTH 10801 FILE_LENGTH 7201 X_FIRST -157.0 Y_FIRST 21.0 X_STEP 0.000277777777 Y_STEP -0.000277777777 X_UNIT degrees Y_UNIT degrees Z_OFFSET 0 Z_SCALE 1 PROJECTION LL """ # Use OrderedDict so that upsample_dem_rsc creates with same ordering as old output_data = collections.OrderedDict() # Second part in tuple is used to cast string to correct type field_tups = (('WIDTH', int), ('FILE_LENGTH', int), ('X_STEP', float), ('Y_STEP', float), ('X_FIRST', float), ('Y_FIRST', float), ('X_UNIT', str), ('Y_UNIT', str), ('Z_OFFSET', int), ('Z_SCALE', int), ('PROJECTION', str)) rsc_filename = '{}.rsc'.format(filename) if not filename.endswith('.rsc') else filename with open(rsc_filename, 'r') as f: for line in f.readlines(): for field, num_type in field_tups: if line.startswith(field): output_data[field] = num_type(line.split()[1]) return output_data def format_dem_rsc(rsc_dict): """Creates the .dem.rsc file string from key/value pairs of an OrderedDict Output of function can be written to a file as follows with open('my.dem.rsc', 'w') as f: f.write(outstring) Args: rsc_dict (OrderedDict): data about dem in ordered key/value format See `load_dem_rsc` output for example Returns: outstring (str) formatting string to be written to .dem.rsc """ outstring = "" for field, value in rsc_dict.items(): # Files seemed to be left justified with 13 spaces? Not sure why 13 if field.lower() in ('x_step', 'y_step'): # give step floats proper sig figs to not output scientific notation outstring += "{field:<14s}{val:0.12f}\n".format(field=field.upper(), val=value) else: outstring += "{field:<14s}{val}\n".format(field=field.upper(), val=value) return outstring def _get_file_rows_cols(ann_info=None, rsc_data=None): """Wrapper function to find file width for different SV types""" if (not rsc_data and not ann_info) or (rsc_data and ann_info): raise ValueError("needs either ann_info or rsc_data (but not both) to find number of cols") elif rsc_data: return rsc_data['FILE_LENGTH'], rsc_data['WIDTH'] elif ann_info: return ann_info['rows'], ann_info['cols'] def _assert_valid_size(data, cols): """Make sure the width of the image is valid for the data size Note that only width is considered- The number of rows is ignored """ error_str = "Invalid number of cols (%s) for file size %s." % (cols, len(data)) # math.modf returns (fractional remainder, integer remainder) assert math.modf(float(len(data)) / cols)[0] == 0, error_str def load_real(filename, ann_info=None, rsc_data=None): """Reads in real 4-byte per pixel files"" Valid filetypes: See sario.REAL_EXTS Args: filename (str): path to the file to open rsc_data (dict): output from load_dem_rsc, gives width of file ann_info (dict): data parsed from UAVSAR annotation file Returns: ndarray: float32 values for the real 2D matrix """ data = np.fromfile(filename, FLOAT_32_LE) rows, cols = _get_file_rows_cols(ann_info=ann_info, rsc_data=rsc_data) _assert_valid_size(data, cols) return data.reshape([-1, cols]) def load_complex(filename, ann_info=None, rsc_data=None): """Combines real and imaginary values from a filename to make complex image Valid filetypes: See sario.COMPLEX_EXTS Args: filename (str): path to the file to open rsc_data (dict): output from load_dem_rsc, gives width of file ann_info (dict): data parsed from UAVSAR annotation file Returns: ndarray: imaginary numbers of the combined floats (dtype('complex64')) """ data = np.fromfile(filename, FLOAT_32_LE) rows, cols = _get_file_rows_cols(ann_info=ann_info, rsc_data=rsc_data) _assert_valid_size(data, cols) real_data, imag_data = parse_complex_data(data, cols) return combine_real_imag(real_data, imag_data) def load_stacked(filename, rsc_data, return_amp=False): """Helper function to load .unw and .cor files Format is two stacked matrices: [[first], [second]] where the first "cols" number of floats are the first matrix, next "cols" are second, etc. For .unw height files, the first is amplitude, second is phase (unwrapped) For .cc correlation files, first is amp, second is correlation (0 to 1) Args: filename (str): path to the file to open rsc_data (dict): output from load_dem_rsc, gives width of file return_amp (bool): flag to request the amplitude data to be returned Returns: ndarray: dtype=float32, the second matrix (height, correlation, ...) parsed if return_amp == True, returns a tuple (ndarray, ndarray) Example illustrating how strips of data alternate: reading unw (unwrapped phase) data data = np.fromfile('20141128_20150503.unw', '<f4') # The first section of data is amplitude data # The amplitude has a different, larger range of values amp = data[:cols] print(np.max(amp), np.min(amp)) # Output: (27140.396, 118.341095) # The next part of the data is a line of phases: phase = data[cols:2*cols]) print(np.max(phase), np.min(phase)) # Output: (8.011558, -2.6779003) """ data = np.fromfile(filename, FLOAT_32_LE) rows, cols = _get_file_rows_cols(rsc_data=rsc_data) _assert_valid_size(data, cols) first = data.reshape((rows, 2 * cols))[:, :cols] second = data.reshape((rows, 2 * cols))[:, cols:] if return_amp: return first, second else: return second def is_complex(filename): """Helper to determine if file data is real or complex Uses https://uavsar.jpl.nasa.gov/science/documents/polsar-format.html for UAVSAR Note: differences between 3 polarizations for .mlc files: half real, half complex """ ext = get_file_ext(filename) if ext not in COMPLEX_EXTS and ext not in REAL_EXTS: raise ValueError('Invalid filetype for load_file: %s\n ' 'Allowed types: %s' % (ext, ' '.join(COMPLEX_EXTS + REAL_EXTS))) if ext in UAVSAR_POL_DEPENDENT: # Check if filename has one of the complex polarizations return any(pol in filename for pol in COMPLEX_POLS) else: return ext in COMPLEX_EXTS def parse_complex_data(complex_data, cols): """Splits a 1-D array of real/imag bytes to 2 square arrays""" # double check if I ever need rows real_data = complex_data[::2].reshape([-1, cols]) imag_data = complex_data[1::2].reshape([-1, cols]) return real_data, imag_data def combine_real_imag(real_data, imag_data): """Combines two float data arrays into one complex64 array""" return real_data + 1j * imag_data def save(filename, array): """Save the numpy array in one of known formats Args: filename (str) Output path to save file in array (ndarray) matrix to save Returns: None Raises: NotImplementedError: if file extension of filename not a known ext """ def _is_little_endian(): """All UAVSAR data products save in little endian byte order""" return sys.byteorder == 'little' ext = get_file_ext(filename) if ext == '.png': # TODO: or ext == '.jpg': # from PIL import Image # im = Image.fromarray(array) # im.save(filename) plt.imsave(filename, array, cmap='gray', vmin=0, vmax=1, format=ext.strip('.')) elif (ext in COMPLEX_EXTS + REAL_EXTS + ELEVATION_EXTS) and (ext not in STACKED_FILES): # If machine order is big endian, need to byteswap (TODO: test on big-endian) # TODO: Do we need to do this at all?? if not _is_little_endian(): array.byteswap(inplace=True) array.tofile(filename) elif ext in STACKED_FILES: # TODO raise NotImplementedError("{} saving not yet implemented (TODO).".format(ext)) else: raise NotImplementedError("{} saving not implemented.".format(ext)) # TODO: possibly separate into a "parser" file def make_ann_filename(filename): """Take the name of a data file and return corresponding .ann name Examples: >>> print(make_ann_filename('brazos.cor')) brazos.ann >>> print(make_ann_filename('brazos.1.int')) brazos.ann >>> print(make_ann_filename('brazos_090HHHV_CX_01.mlc')) brazos_090_CX_01.ann >>> print(make_ann_filename('brazos_090HHVV_CX_01.mlc')) brazos_090_CX_01.ann >>> print(make_ann_filename('brazos_090HHVV_CX_01.grd')) brazos_090_CX_01.ann >>> print(make_ann_filename('brazos_090HHVV_CX_01_ML5X5.grd')) brazos_090_CX_01_ML5X5.ann """ # The .mlc and .grd files have polarization added to filename, .ann files don't shortname = filename for p in POLARIZATIONS: shortname = shortname.replace(p, '') # If this is a block we split up and names .1.int, remove that since # all have the same .ann file # TODO: figure out where to get this list from ext = get_file_ext(filename) shortname = re.sub('\.\d' + ext, ext, shortname) return shortname.replace(ext, '.ann') def parse_ann_file(filename, ext=None, verbose=False): """Returns the requested data from the UAVSAR annotation in ann_filename Args: ann_data (dict): key-values of requested data from .ann file ext (str): extension of desired data file, if filename is the .ann file instead of a data filepath verbose (bool): print extra logging into about file loading Returns: dict: the annotation file parsed into a dict. If no annotation file can be found, None is returned """ def _parse_line(line): wordlist = line.split() # Pick the entry after the equal sign when splitting the line return wordlist[wordlist.index('=') + 1] def _parse_int(line): return int(_parse_line(line)) def _parse_float(line): return float(_parse_line(line)) if get_file_ext(filename) == '.ann' and not ext: raise ValueError('parse_ann_file needs ext argument if the data filename not provided.') ext = ext or get_file_ext(filename) # Use what's passed by default ann_filename = make_ann_filename(filename) if verbose: logger.info("Trying to load ann_data from %s", ann_filename) if not os.path.exists(ann_filename): if verbose: logger.info("No file found: returning None") return None # Taken from a .ann file: (need to check if this is always true?) # SLC Data Units = linear amplitude # MLC Data Units = linear power # GRD Data Units = linear power ann_data = {} line_keywords = { '.slc': 'slc_mag', '.mlc': 'mlc_pwr', '.int': 'slt', '.cor': 'slt', '.amp': 'slt', '.grd': 'grd_pwr' } row_starts = {k: v + '.set_rows' for k, v in line_keywords.items()} col_starts = {k: v + '.set_cols' for k, v in line_keywords.items()} row_key = row_starts.get(ext) col_key = col_starts.get(ext) with open(ann_filename, 'r') as f: for line in f.readlines(): # TODO: disambiguate which ones to use, and when if line.startswith(row_key): ann_data['rows'] = _parse_int(line) elif line.startswith(col_key): ann_data['cols'] = _parse_int(line) # Example: get the name of the mlc for HHHH polarization elif line.startswith('mlcHHHH'): ann_data['mlcHHHH'] = _parse_line(line) # TODO: Add more parsing! whatever is useful from .ann file if verbose: logger.info(pprint.pformat(ann_data)) return ann_data
# ------------------------------------------------------------------------------------------------------ # Copyright (c) Leo Hanisch. All rights reserved. # Licensed under the BSD 3-Clause License. See LICENSE.txt in the project root for license information. # ------------------------------------------------------------------------------------------------------ from copy import deepcopy import logging from .firefly import Firefly from .visualizer import Visualizer LOGGER = logging.getLogger(__name__) class FireflyProblem(): def __init__(self, **kwargs): """Initializes a new instance of the `FireflyProblem` class. Keyword arguments: \r `firefly_number` -- Number of fireflies used for solving `function` -- The 2D evaluation function. Its input is a 2D numpy.array \r `upper_boundary` -- Upper boundary of the function (default 4) \r `lower_boundary` -- Lower boundary of the function (default 0) \r `alpha` -- Randomization parameter (default 0.25) \r `beta` -- Attractiveness at distance=0 (default 1) \r `gamma` -- Characterizes the variation of the attractiveness. (default 0.97) \r `iteration_number` -- Number of iterations to execute (default 100) \r `interval` -- Interval between two animation frames in ms (default 500) \r `continuous` -- Indicates whether the algorithm should run continuously (default False) """ self.__iteration_number = kwargs.get('iteration_number', 10) # Create fireflies self.__fireflies = [ Firefly(**kwargs) for _ in range(kwargs['firefly_number']) ] # Initialize visualizer for plotting self.__visualizer = Visualizer(**kwargs) self.__visualizer.add_data(positions=[firefly.position for firefly in self.__fireflies]) def solve(self) -> Firefly: """Solve the problem.""" best = None for _ in range(self.__iteration_number): for i in self.__fireflies: for j in self.__fireflies: if j.value < i.value: i.move_towards(j.position) current_best = min(self.__fireflies, key=lambda firefly: firefly.value) if not best or current_best.value < best.value: best = deepcopy(current_best) LOGGER.info('Current best value: %s, Overall best value: %s', current_best.value, best.value) # randomly walk the best firefly current_best.random_walk(0.1) # Add data for visualization self.__visualizer.add_data(positions=[firefly.position for firefly in self.__fireflies]) return best def replay(self): """Play the visualization""" self.__visualizer.replay()
from test_plus.test import TestCase from ..models import Profile, Task, Team from .factories import ProfileFactory, TaskFactory, TeamFactory from to_do_list.users.tests.factories import UserFactory from ..helper_functions import calculate_reputation_gain, give_reputation_reward from mock import patch class TestProfile(TestCase): def setUp(self): UserFactory.reset_sequence() team = TeamFactory() self.user1 = UserFactory() profile = ProfileFactory(user=self.user1, team=team) self.user2 = UserFactory() self.profile2 = ProfileFactory(user=self.user2, team=team) def test_trivial_task(self): """ solving trivial task is worth 1 """ task = TaskFactory( creator=self.user1, visibility=Task.VISIBILITIES.public, completed_by=self.user2, difficulty=Task.DIFFICULTIES.trivial, ) self.assertEqual(calculate_reputation_gain(task), 1) def test_easy_task(self): """ solving easy task is worth 5 """ task = TaskFactory( creator=self.user1, visibility=Task.VISIBILITIES.public, completed_by=self.user2, difficulty=Task.DIFFICULTIES.easy, ) self.assertEqual(calculate_reputation_gain(task), 5) def test_OK_task(self): """ solving OK task is worth 10 """ task = TaskFactory( creator=self.user1, visibility=Task.VISIBILITIES.public, completed_by=self.user2, difficulty=Task.DIFFICULTIES.OK, ) self.assertEqual(calculate_reputation_gain(task), 10) def test_hard_task(self): """ solving hard task is worth 25 """ task = TaskFactory( creator=self.user1, visibility=Task.VISIBILITIES.public, completed_by=self.user2, difficulty=Task.DIFFICULTIES.hard, ) self.assertEqual(calculate_reputation_gain(task), 25) def test_heroic_task(self): """ solving heroic task is worth 100 """ task = TaskFactory( creator=self.user1, visibility=Task.VISIBILITIES.public, completed_by=self.user2, difficulty=Task.DIFFICULTIES.heroic, ) self.assertEqual(calculate_reputation_gain(task), 100) def test_nightmare_task(self): """ and solving nightmare task is worth 500 """ task = TaskFactory( creator=self.user1, visibility=Task.VISIBILITIES.public, completed_by=self.user2, difficulty=Task.DIFFICULTIES.nightmare, ) self.assertEqual(calculate_reputation_gain(task), 500) @patch('to_do_list.tasks.helper_functions.calculate_reputation_gain', lambda x: 42) def test_give_reputation(self): """ should increase the reputation of the user who completed the task by the gain calculated (here patched to always return 42) """ task = TaskFactory( creator=self.user1, visibility=Task.VISIBILITIES.public, completed_by=self.user2, ) give_reputation_reward(task) self.profile2.refresh_from_db() self.assertEqual(self.profile2.reputation, 1 + 42)
import sys, atexit from time import sleep from mowaysim import * print 'Executing ' + __name__ + ' test...' if __name__ == '__main__': atexit.register(exit_mow) channel = 8 moway.usbinit_moway() ret = moway.init_moway(channel) if ret == 0: print 'Moway RFUSB Connected' else: print 'Moway RFUSB not connected. Exit' exit(-1) moway.set_rotation(210) while True: moway.command_moway(CMD_LINE_FOLLOW_L,0) obstacle = moway.get_obs_center_left() + moway.get_obs_center_right() + moway.get_obs_side_left() + moway.get_obs_side_right() if obstacle > 0: moway.command_moway(CMD_ROTATELEFT,0) moway.command_moway(CMD_BRAKELEDON,0) moway.wait_mot_end(0) moway.command_moway(CMD_BRAKELEDOFF,0)
import requests test_url = "https://playground.learnqa.ru/api/homework_cookie" class TestCookie: def test_cookie(self): print("Expected cookie is {'HomeWork': 'hw_value'}") actual_response = requests.get(test_url) actual_cookie = dict(actual_response.cookies) assert actual_response.status_code == 200, 'Wrong response code' assert "HomeWork" in actual_cookie, f"Expected cookie key 'HomeWork' is missing in the response" assert actual_cookie["HomeWork"] == "hw_value", f"Expected cookie value: 'hw_value'," \ f" actual cookie value: {actual_cookie['HomeWork']}"
from . import openacademy_course from . import openacademy_session from . import partner
from django.core.management.base import BaseCommand from iRiffSync.iRiffClient import iRiffClient class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument('page', nargs='+', type=int) def handle(self, *args, **options): for page in options['page']: client = iRiffClient() print(client.getPage(page))
# Copyright (c) Open-MMLab. All rights reserved. import torch from torch.nn.parallel._functions import _get_stream def scatter(input, devices, streams=None): """Scatters tensor across multiple GPUs. """ if streams is None: streams = [None] * len(devices) if isinstance(input, list): chunk_size = (len(input) - 1) // len(devices) + 1 outputs = [ scatter(input[i], [devices[i // chunk_size]], [streams[i // chunk_size]]) for i in range(len(input)) ] return outputs elif isinstance(input, torch.Tensor): output = input.contiguous() # TODO: copy to a pinned buffer first (if copying from CPU) stream = streams[0] if output.numel() > 0 else None with torch.cuda.device(devices[0]), torch.cuda.stream(stream): output = output.cuda(devices[0], non_blocking=True) return output else: raise Exception(f'Unknown type {type(input)}.') def synchronize_stream(output, devices, streams): if isinstance(output, list): chunk_size = len(output) // len(devices) for i in range(len(devices)): for j in range(chunk_size): synchronize_stream(output[i * chunk_size + j], [devices[i]], [streams[i]]) elif isinstance(output, torch.Tensor): if output.numel() != 0: with torch.cuda.device(devices[0]): main_stream = torch.cuda.current_stream() main_stream.wait_stream(streams[0]) output.record_stream(main_stream) else: raise Exception(f'Unknown type {type(output)}.') def get_input_device(input): if isinstance(input, list): for item in input: input_device = get_input_device(item) if input_device != -1: return input_device return -1 elif isinstance(input, torch.Tensor): return input.get_device() if input.is_cuda else -1 else: raise Exception(f'Unknown type {type(input)}.') class Scatter: @staticmethod def forward(target_gpus, input): input_device = get_input_device(input) streams = None if input_device == -1: # Perform CPU to GPU copies in a background stream streams = [_get_stream(device) for device in target_gpus] outputs = scatter(input, target_gpus, streams) # Synchronize with the copy stream if streams is not None: synchronize_stream(outputs, target_gpus, streams) return tuple(outputs)
# -*- coding: utf-8 -*- from cleo import Application from .. import __version__ application = Application('Orator', __version__, complete=True) # Migrations from .migrations import ( InstallCommand, MigrateCommand, MigrateMakeCommand, RollbackCommand, StatusCommand, ResetCommand, RefreshCommand ) application.add(InstallCommand()) application.add(MigrateCommand()) application.add(MigrateMakeCommand()) application.add(RollbackCommand()) application.add(StatusCommand()) application.add(ResetCommand()) application.add(RefreshCommand()) # Seeds from .seeds import SeedersMakeCommand, SeedCommand application.add(SeedersMakeCommand()) application.add(SeedCommand()) # Models from .models import ModelMakeCommand application.add(ModelMakeCommand())
supported_languages = { # .c,.h: C "c": "C", "h": "C", # .cc .cpp .cxx .c++ .h .hh : CPP "cc": "CPP", "cpp": "CPP", "cxx": "CPP", "c++": "CPP", "h": "CPP", "hh": "CPP", # .py .pyw, .pyc, .pyo, .pyd : PYTHON "py": "PYTHON", "pyw": "PYTHON", "pyc": "PYTHON", "pyo": "PYTHON", "pyd": "PYTHON", # .clj .edn : CLOJURE "clj": "CLOJURE", "edn": "CLOJURE", # .js : JAVASCRIPT "js": "JAVASCRIPT", # .java .class .jar :JAVA "java": "JAVA", "class": "JAVA", "jar": "JAVA", # .rb .rbw:RUBY "rb": "RUBY", "rbw": "RUBY", # .hs .hls:HASKELL "hs": "HASKELL", "hls": "HASKELL", # .pl .pm .t .pod:PERL "pl": "PERL", "pm": "PERL", "t": "PERL", "pod": "PERL", # php, .phtml, .php4, .php3, .php5, .phps "php": "PHP", "phtml": "PHP", "php4": "PHP", "php3": "PHP", "php5": "PHP", "phps": "PHP", # .cs : CSHARP "cs": "CSHARP", # .go : GO "go": "GO", # .r : R "r": "R", # .rb : RUBY "rb": "RUBY", }
import pathlib import os.path import tempfile import app.git as git basedir = os.path.abspath(os.path.dirname(__file__)) class TestGitRepoFromPath: def test_none(self): assert git.repo_from_path(None) is None def test_empty(self): assert git.repo_from_path('') is None def test_bad_directory(self): assert git.repo_from_path('/something/that/does/not/exist') is None def test_not_git_directory(self): assert git.repo_from_path(tempfile.gettempdir()) is None def test_git_root(self): assert git.repo_from_path(pathlib.Path(basedir).parent) def test_git_subdirectory(self): assert git.repo_from_path(basedir)
# -*- coding: utf-8 -*- from setuptools import find_packages from setuptools import setup test_reqs = ["pytest", "pytest-docker-fixtures", "pytest-aiohttp>=0.3.0"] setup( name="guillotina_gcloudstorage", version=open("VERSION").read().strip(), description="guillotina gcloud storage support", long_description=(open("README.rst").read() + "\n" + open("CHANGELOG.rst").read()), long_description_content_type="text/x-rst", classifiers=[ "Programming Language :: Python :: 3.7", "Topic :: Software Development :: Libraries :: Python Modules", ], author="Ramon Navarro Bosch", author_email="ramon@plone.org", keywords="guillotina async cloud storage", url="https://pypi.python.org/pypi/guillotina_gcloudstorage", license="GPL version 3", setup_requires=["pytest-runner",], zip_safe=True, include_package_data=True, packages=find_packages(exclude=["ez_setup"]), package_data={"": ["*.txt", "*.rst"], "guillotina_gcloudstorage": ["py.typed"]}, install_requires=[ "setuptools", "guillotina>=5.3.48", "protobuf", "oauth2client", "google-cloud-storage", "gcloud", "ujson", "backoff", ], extras_require={"test": test_reqs}, tests_require=test_reqs, entry_points={"guillotina": ["include = guillotina_gcloudstorage",]}, )
# Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from novaclient import api_versions from novaclient import base class QuotaClassSet(base.Resource): def update(self, *args, **kwargs): return self.manager.update(self.id, *args, **kwargs) class QuotaClassSetManager(base.Manager): resource_class = QuotaClassSet def get(self, class_name): return self._get("/os-quota-class-sets/%s" % (class_name), "quota_class_set") def _update_body(self, **kwargs): return {'quota_class_set': kwargs} # NOTE(mriedem): Before 2.50 the resources you could update was just a # kwargs dict and not validated on the client-side, only on the API server # side. @api_versions.wraps("2.0", "2.49") def update(self, class_name, **kwargs): body = self._update_body(**kwargs) for key in list(body['quota_class_set']): if body['quota_class_set'][key] is None: body['quota_class_set'].pop(key) return self._update('/os-quota-class-sets/%s' % (class_name), body, 'quota_class_set') # NOTE(mriedem): 2.50 does strict validation of the resources you can # specify since the network-related resources are blocked in 2.50. @api_versions.wraps("2.50", "2.56") def update(self, class_name, instances=None, cores=None, ram=None, metadata_items=None, injected_files=None, injected_file_content_bytes=None, injected_file_path_bytes=None, key_pairs=None, server_groups=None, server_group_members=None): resources = {} if instances is not None: resources['instances'] = instances if cores is not None: resources['cores'] = cores if ram is not None: resources['ram'] = ram if metadata_items is not None: resources['metadata_items'] = metadata_items if injected_files is not None: resources['injected_files'] = injected_files if injected_file_content_bytes is not None: resources['injected_file_content_bytes'] = ( injected_file_content_bytes) if injected_file_path_bytes is not None: resources['injected_file_path_bytes'] = injected_file_path_bytes if key_pairs is not None: resources['key_pairs'] = key_pairs if server_groups is not None: resources['server_groups'] = server_groups if server_group_members is not None: resources['server_group_members'] = server_group_members body = {'quota_class_set': resources} return self._update('/os-quota-class-sets/%s' % class_name, body, 'quota_class_set') # NOTE(mriedem): 2.57 deprecates the usage of injected_files, # injected_file_content_bytes and injected_file_path_bytes so those # kwargs are removed. @api_versions.wraps("2.57") def update(self, class_name, instances=None, cores=None, ram=None, metadata_items=None, key_pairs=None, server_groups=None, server_group_members=None): resources = {} if instances is not None: resources['instances'] = instances if cores is not None: resources['cores'] = cores if ram is not None: resources['ram'] = ram if metadata_items is not None: resources['metadata_items'] = metadata_items if key_pairs is not None: resources['key_pairs'] = key_pairs if server_groups is not None: resources['server_groups'] = server_groups if server_group_members is not None: resources['server_group_members'] = server_group_members body = {'quota_class_set': resources} return self._update('/os-quota-class-sets/%s' % class_name, body, 'quota_class_set')
from .db import get_connection, get_data_dict from flask import render_template, url_for, make_response import requests import traceback import math import logging logger = logging.getLogger() def do_packaging(): context = {'packaging_page': True} conn = get_connection() cur = conn.cursor() query = """ SELECT * FROM package_classes """ package_classes = get_data_dict(cur, query) query = """ SELECT * FROM boxes """ boxes = get_data_dict(cur, query) select_template = 'COALESCE("%(class)s".quantity, 0) %(class)s_quantity' join_template = """ LEFT JOIN packaging "%(class)s" ON "%(class)s".package_class_id = %(id)s AND "%(class)s".package_id = up.package_id """ selects = ",\n".join([select_template % package_class for package_class in package_classes]) joins = "\n".join([join_template % package_class for package_class in package_classes]) query = """ WITH uniq_packaging as ( SELECT package_id, box_id FROM packaging GROUP BY package_id, box_id ) SELECT up.package_id, b.type, %s FROM uniq_packaging up JOIN boxes b ON b.id = up.box_id %s """ % (selects, joins) keys = [("%(class)s_quantity" % package_class, package_class['id']) for package_class in package_classes] packaging = get_data_dict(cur, query) for row in packaging: row['quantities'] = [] for package_class in package_classes: info = dict(package_class) info['qty'] = row['%(class)s_quantity' % package_class] row['quantities'].append(info) context['packaging'] = packaging context['package_classes'] = package_classes context['boxes_list'] = boxes return render_template('packaging.html', **context) def do_add_packaging(form_data): response = None conn = get_connection() cur = conn.cursor() rows = [] try: query = "SELECT COALESCE(MAX(package_id)+1, 1) FROM packaging" cur.execute(query) package_id = cur.fetchone()[0] for key in form_data: if key.startswith('package-class-'): package_class_id = key.split('package-class-')[1] row = {'box_id': form_data['box_id'], 'package_class_id': package_class_id, 'quantity': form_data[key], 'package_id': package_id } rows.append(row) query = """ INSERT INTO packaging (package_id, box_id, package_class_id, quantity) VALUES %s """ % ",\n".join(["(%(package_id)s, %(box_id)s, %(package_class_id)s, %(quantity)s)" % row for row in rows]) cur.execute(query) update_package_configs(cur) conn.commit() response = make_response('success', 200) except Exception as e: logger.exception(e) tb = traceback.format_exc() response = make_response(tb, 400) return response def do_edit_packaging(form_data): response = None conn = get_connection() cur = conn.cursor() try: for key in form_data: if key.startswith('package-class-'): package_class_id = key.split('package-class-')[1] row = {'box_id': form_data['box_id'], 'package_class_id': package_class_id, 'quantity': form_data[key], 'package_id': form_data['package_id'] } query = """ SELECT COUNT(1) FROM packaging WHERE package_id = %(package_id)s AND package_class_id = %(package_class_id)s """ % row cur.execute(query) exists = cur.fetchone()[0] == 1 if exists: query = """ UPDATE packaging SET quantity = %(quantity)s, box_id = %(box_id)s WHERE package_id = %(package_id)s AND package_class_id = %(package_class_id)s """ % row else: query = """ INSERT INTO packaging (package_id, box_id, package_class_id, quantity) VALUES (%(package_id)s, %(box_id)s, %(package_class_id)s, %(quantity)s) """ % row cur.execute(query) update_package_configs(cur) conn.commit() response = make_response('success', 200) except Exception as e: logger.exception(e) tb = traceback.format_exc() response = make_response(tb, 400) return response def do_remove_packaging(package_id): response = None conn = get_connection() cur = conn.cursor() try: query = """ DELETE FROM packaging WHERE package_id = %s """ % package_id cur.execute(query) update_package_configs(cur) conn.commit() response = make_response('success', 200) except Exception as e: logger.exception(e) tb = traceback.format_exc() response = make_response(tb, 400) return response def update_package_configs(cur): cur.execute("DELETE FROM package_configs") rows = get_data_dict(cur, "SELECT * FROM shipping_costs") shipping_costs = {} for row in rows: box_id = row['box_id'] weight_str = row['weight_kg'] if box_id not in shipping_costs: shipping_costs[box_id] = {} shipping_costs[box_id][weight_str] = row rows = get_data_dict(cur, "SELECT * FROM package_classes") package_class_list = [row for row in rows] package_ids = ["%s" % row['id'] for row in rows] package_classes = {str(row["id"]): row for row in rows} rows = get_data_dict(cur, "SELECT p.*, b.cost box_cost, b.weight_kg, (b.length_in*b.width_in*b.height_in) volume FROM packaging p JOIN boxes b ON b.id = p.box_id") packaging = {} for row in rows: package_id = row['package_id'] if package_id not in packaging: packaging[package_id] = {'box_id': row['box_id'], 'weight_kg': row['weight_kg'], 'box_volume': row['volume'], 'box_cost': row['box_cost']} package_class_id = str(row['package_class_id']) packaging[package_id][package_class_id] = row['quantity'] packaging[package_id]['weight_kg'] += package_classes[package_class_id]['weight_kg']*row['quantity'] keys = ['box_id'] keys.extend(package_ids) MAX_WEIGHT = 2.0 combinations = {} for package_id in packaging: package = packaging[package_id] combinations.update(add_combinations(MAX_WEIGHT, package_class_list, package, combinations, keys)) min_volume_combinations = {} for c in combinations: combination = combinations[c] weight = max(0.1, round(math.ceil(round(combination['weight_kg'],2)/0.1)*0.1, 1)) if weight <= MAX_WEIGHT: weight_str = "%.1f" % weight shipping_cost = shipping_costs[combination['box_id']][weight_str] key = get_hashable(combination, package_ids) if (key not in min_volume_combinations or combination['box_volume'] < min_volume_combinations[key]['box_volume']): min_volume_combinations[key] = dict(combination) min_volume_combinations[key]['cost_ca'] = shipping_cost['cost_ca'] + combination['box_cost'] min_volume_combinations[key]['cost_us'] = shipping_cost['cost_us'] + combination['box_cost'] for key in min_volume_combinations: row = min_volume_combinations[key] query = "SELECT COALESCE(MAX(package_id)+1, 1) FROM package_configs" cur.execute(query) package_id = cur.fetchone()[0] template = "(%%s, %(box_id)s, %%s, %%s, %(cost_ca)s, %(cost_us)s)" % row values = ",\n".join([template % (package_id, x, row[x]) for x in package_ids]) query = """ INSERT INTO package_configs (package_id, box_id, package_class_id, quantity, cost_ca, cost_us) VALUES %s """ % values cur.execute(query) def add_combinations(max_weight, package_class_list, current_combination, combinations, keys): key = get_hashable(current_combination, keys) if key in combinations: return combinations if current_combination['weight_kg'] <= max_weight: combinations[key] = current_combination for package_class in package_class_list: weight_kg = package_class['weight_kg'] package_class_id = str(package_class['id']) if (current_combination[package_class_id] > 1): new_combination = dict(current_combination) new_combination[package_class_id] -= 1 new_combination['weight_kg'] -= weight_kg combinations.update(add_combinations(max_weight, package_class_list, new_combination, combinations, keys)) return combinations def get_hashable(data, keys): return ",".join([str(data[key]) for key in keys])
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- # Generated file, DO NOT EDIT # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------------------------- from msrest.serialization import Model class Package(Model): """Package. :param _links: :type _links: :class:`ReferenceLinks <packaging.v4_1.models.ReferenceLinks>` :param id: :type id: str :param is_cached: :type is_cached: bool :param name: The display name of the package :type name: str :param normalized_name: The normalized name representing the identity of this package for this protocol type :type normalized_name: str :param protocol_type: :type protocol_type: str :param star_count: :type star_count: int :param url: :type url: str :param versions: :type versions: list of :class:`MinimalPackageVersion <packaging.v4_1.models.MinimalPackageVersion>` """ _attribute_map = { '_links': {'key': '_links', 'type': 'ReferenceLinks'}, 'id': {'key': 'id', 'type': 'str'}, 'is_cached': {'key': 'isCached', 'type': 'bool'}, 'name': {'key': 'name', 'type': 'str'}, 'normalized_name': {'key': 'normalizedName', 'type': 'str'}, 'protocol_type': {'key': 'protocolType', 'type': 'str'}, 'star_count': {'key': 'starCount', 'type': 'int'}, 'url': {'key': 'url', 'type': 'str'}, 'versions': {'key': 'versions', 'type': '[MinimalPackageVersion]'} } def __init__(self, _links=None, id=None, is_cached=None, name=None, normalized_name=None, protocol_type=None, star_count=None, url=None, versions=None): super(Package, self).__init__() self._links = _links self.id = id self.is_cached = is_cached self.name = name self.normalized_name = normalized_name self.protocol_type = protocol_type self.star_count = star_count self.url = url self.versions = versions
# -*- coding: utf-8 -*- """ proxy.py ~~~~~~~~ ⚡⚡⚡ Fast, Lightweight, Pluggable, TLS interception capable proxy server focused on Network monitoring, controls & Application development, testing, debugging. :copyright: (c) 2013-present by Abhinav Singh and contributors. :license: BSD, see LICENSE for more details. """ import queue import threading import logging from typing import Dict, Any, List from ...common.types import DictQueueType from .queue import EventQueue from .names import eventNames logger = logging.getLogger(__name__) class EventDispatcher: """Core EventDispatcher. Provides: 1. A dispatcher module which consumes core events and dispatches them to EventQueueBasePlugin 2. A publish utility for publishing core events into global events queue. Direct consuming from global events queue outside of dispatcher module is not-recommended. Python native multiprocessing queue doesn't provide a fanout functionality which core dispatcher module implements so that several plugins can consume same published event at a time. When --enable-events is used, a multiprocessing.Queue is created and attached to global argparse. This queue can then be used for dispatching an Event dict object into the queue. When --enable-events is used, dispatcher module is automatically started. Dispatcher module also ensures that queue is not full and doesn't utilize too much memory in case there are no event plugins enabled. """ def __init__( self, shutdown: threading.Event, event_queue: EventQueue) -> None: self.shutdown: threading.Event = shutdown self.event_queue: EventQueue = event_queue self.subscribers: Dict[str, DictQueueType] = {} def handle_event(self, ev: Dict[str, Any]) -> None: if ev['event_name'] == eventNames.SUBSCRIBE: self.subscribers[ev['event_payload']['sub_id']] = \ ev['event_payload']['channel'] elif ev['event_name'] == eventNames.UNSUBSCRIBE: del self.subscribers[ev['event_payload']['sub_id']] else: # logger.info(ev) unsub_ids: List[str] = [] for sub_id in self.subscribers: try: self.subscribers[sub_id].put(ev) except BrokenPipeError: unsub_ids.append(sub_id) for sub_id in unsub_ids: del self.subscribers[sub_id] def run_once(self) -> None: ev: Dict[str, Any] = self.event_queue.queue.get(timeout=1) self.handle_event(ev) def run(self) -> None: try: while not self.shutdown.is_set(): try: self.run_once() except queue.Empty: pass except BrokenPipeError: pass except EOFError: pass except KeyboardInterrupt: pass except Exception as e: logger.exception('Event dispatcher exception', exc_info=e)
"""Various forms used within the application.""" from wtforms import ( Form, StringField, PasswordField, validators, SelectMultipleField, BooleanField ) class LoginForm(Form): """Login form validator.""" username = StringField('username', [validators.Length(min=6, max=35)]) password = PasswordField('password', [validators.DataRequired()]) class RegisterForm(Form): """Register form.""" username = StringField('username', [validators.Length(min=6, max=35)]) email = StringField('email', [validators.Length(min=6, max=35)]) first_name = StringField('first_name', [validators.Length(min=1, max=35)]) last_name = StringField('last_name', [validators.Length(min=1, max=35)]) password = PasswordField('password', [ validators.DataRequired(), validators.EqualTo('password_confirm', message='Passwords must match') ]) password_confirm = PasswordField('password_confirm') class ChangePasswordForm(Form): """Change password form.""" password = PasswordField('password', [ validators.DataRequired(), validators.EqualTo('password_confirm', message='Passwords must match') ]) password_confirm = PasswordField('password_confirm') user_id = StringField('user_id', [validators.Length(min=1, max=35)]) class AccountSettingsForm(Form): """Account settings form.""" email = StringField('email', [validators.Length(min=6, max=35)]) first_name = StringField('first_name', [validators.Length(min=1, max=35)]) last_name = StringField('last_name', [validators.Length(min=1, max=35)]) user_id = StringField('user_id', [validators.Length(min=1, max=35)]) class MonitorForm(Form): """Monitor form validator.""" term = StringField('term', [validators.Length(min=1, max=35)]) type = StringField('type', [validators.Length(min=1, max=35)]) category = StringField('category', [validators.Length(min=1, max=35)]) tags = StringField('tags') class AdminForm(Form): """Admin form validator.""" email = StringField('email', [validators.Length(min=6, max=35)]) password = PasswordField('password', [validators.DataRequired()])
import requests from requests.exceptions import HTTPError import json try: from config import log_level except: log_level = 6 finally: from .__debug import Console console = Console(log_level) class Req: def __init__(self): self.host = "" self.session = requests.session() self.privileges = "" def _url(self, uri): """Generate the url with the host (in the object) and the uri Params: uri Return: url""" return "https://" + self.host + uri def _check_authorization(self, method, org_id="", site_id=""): return True # TODO: current validation may not working in some conditions... Bypassing it if method in ["POST", "PUT", "DELETE"]: if org_id != "": for privilige in self.privileges: if "org_id" in privilige and privilige['org_id'] == org_id: if privilige["role"] in ["write", "admin"]: return True console.error("authorization error") return False elif site_id != "": for privilige in self.privileges: if "site_id" in privilige and privilige['site_id'] == site_id: if privilige["role"] in ["write", "admin"]: return True console.error("authorization error") return False else: return True def _response(self, resp, uri="", multi_pages_result=None): if resp.status_code == 200: if multi_pages_result == None: result = resp.json() else: result = multi_pages_result error = "" console.debug("Response Status Code: %s" % resp.status_code) else: result = "" error = resp.json() console.debug("Response Status Code: %s" % resp.status_code) console.debug("Response: %s" % error) return {"result": result, "status_code": resp.status_code, "error": error, "uri":uri} def mist_get(self, uri, org_id="", site_id="", query={}, page=None, limit=None): """GET HTTP Request Params: uri, HTTP query Return: HTTP response""" if self._check_authorization("GET", org_id=org_id, site_id=site_id): try: url = self._url(uri) html_query = "?" if not query == {}: for query_param in query: html_query += "%s=%s&" %(query_param, query[query_param]) if limit: html_query += "limit=%s&" %limit if page: html_query += "page=%s" %page url += html_query console.debug("Request > GET %s" % url) resp = self.session.get(url) resp.raise_for_status() except HTTPError as http_err: console.error(f'HTTP error occurred: {http_err}') # Python 3.6 console.error(f'HTTP error description: {resp.json()}') except Exception as err: console.error(f'Other error occurred: {err}') # Python 3.6 else: if "X-Page-Limit" in resp.headers: content = resp.json() x_page_limit = int(resp.headers["X-Page-Limit"]) x_page_page = int(resp.headers["X-Page-Page"]) x_page_total = int(resp.headers["X-Page-Total"]) if x_page_limit * x_page_page < x_page_total: content+=self.mist_get(uri, org_id, site_id, query, page + 1, limit)["result"] return self._response(resp, uri, content) else: return self._response(resp, uri) else: console.error("you're not authenticated yet...") def mist_post(self, uri, org_id="", site_id="", body={}): """POST HTTP Request Params: uri, HTTP body Return: HTTP response""" #if self._check_authorization("POST", org_id=org_id, site_id=site_id): try: url = self._url(uri) headers = {'Content-Type': "application/json"} console.debug("Request > POST %s" % url) console.debug("Request body: \r\n%s" % body) if type(body) == str: resp = self.session.post(url, data=body, headers=headers) elif type(body) == dict: resp = self.session.post(url, json=body, headers=headers) else: resp = self.session.post(url, json=body, headers=headers) resp.raise_for_status() except HTTPError as http_err: console.error(f'HTTP error occurred: {http_err}') # Python 3.6 console.error(f'HTTP error description: {resp.json()}') except Exception as err: console.error(f'Other error occurred: {err}') # Python 3.6 else: return self._response(resp, uri) #else: # console.error("you're not authenticated yet...") def mist_put(self, uri, org_id="", site_id="", body={}): """PUT HTTP Request Params: uri, HTTP body Return: HTTP response""" if self._check_authorization("PUT", org_id=org_id, site_id=site_id): try: url = self._url(uri) console.debug("Request > PUT %s" % url) console.debug("Request body: \r\n%s" % body) if type(body) == str: resp = self.session.put(url, data=body) elif type(body) == dict: resp = self.session.put(url, json=body) else: resp = self.session.put(url, json=body) resp.raise_for_status() except HTTPError as http_err: console.error(f'HTTP error occurred: {http_err}') # Python 3.6 console.error(f'HTTP error description: {resp.json()}') except Exception as err: console.error(f'Other error occurred: {err}') # Python 3.6 else: return self._response(resp, uri) else: console.error("you're not authenticated yet...") def mist_delete(self, uri, org_id="", site_id=""): """DELETE HTTP Request Params: uri Return: HTTP response""" if self._check_authorization("DELETE", org_id=org_id, site_id=site_id): try: url = self._url(uri) console.debug("Request > DELETE %s" % url) resp = self.session.delete(url) resp.raise_for_status() except HTTPError as http_err: console.error(f'HTTP error occurred: {http_err}') # Python 3.6 except Exception as err: console.error(f'Other error occurred: {err}') # Python 3.6 else: return self._response(resp, uri) else: console.error("you're not authenticated yet...") def mist_post_file(self, uri, org_id="", site_id="", files=None): """POST HTTP Request Params: uri, HTTP body Return: HTTP response""" if self._check_authorization("POST", org_id=org_id, site_id=site_id): try: url = self._url(uri) console.debug("Request > POST %s" % url) resp = self.session.post(url, files=files) resp.raise_for_status() except HTTPError as http_err: console.error(f'HTTP error occurred: {http_err}') # Python 3.6 console.error(f'HTTP error description: {resp.json()}') return resp except Exception as err: console.error(f'Other error occurred: {err}') # Python 3.6 else: return self._response(resp, uri) else: console.error("you're not authenticated yet...")
# Generated by Django 2.1.2 on 2018-12-06 08:45 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('trade', '0021_auto_20181130_1205'), ] operations = [ migrations.AddField( model_name='goodsitems', name='original_price', field=models.DecimalField(decimal_places=2, default=0.0, max_digits=15, verbose_name='原价'), ), migrations.AddField( model_name='order', name='discount', field=models.DecimalField(decimal_places=2, default=1, max_digits=15, verbose_name='折扣'), ), migrations.AddField( model_name='order', name='receive_time', field=models.DateTimeField(blank=True, null=True, verbose_name='收货时间'), ), migrations.AddField( model_name='order', name='send_time', field=models.DateTimeField(blank=True, null=True, verbose_name='发货时间'), ), migrations.AlterField( model_name='order', name='status', field=models.CharField(choices=[('paying', '待付款'), ('has paid', '待发货'), ('receiving', '待收货'), ('done', '交易成功'), ('close', '交易关闭')], db_index=True, default='paying', max_length=30, verbose_name='订单状态'), ), ]
# -*- coding: utf-8 -*- # Generated by Django 1.10a1 on 2016-06-23 01:35 from __future__ import unicode_literals from django.db import migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ('dashboard', '0027_auto_20160622_1737'), ] operations = [ migrations.AddField( model_name='excuse', name='date_submitted', field=models.DateTimeField(default=django.utils.timezone.now), ), migrations.AddField( model_name='servicesubmission', name='date_applied', field=models.DateTimeField(default=django.utils.timezone.now), ), migrations.AlterField( model_name='excuse', name='response_message', field=models.TextField(default=b'Please fill out if excuse was not approved'), ), ]
""" Unit Tests For NBeats Models """ import pytest from kerasbeats import utilities, NBeatsModel class TestNBeatsConfiguration(): def test_generic_prediction_output(self, numeric_data, generic_model): """Confirms that model works w/ univariate time series + prediction has the correct shape""" windows, labels = utilities.prep_time_series(numeric_data) generic_model.fit(windows, labels, epochs = 1) assert generic_model.predict(windows).shape == (windows.shape[0], 1) eval_ = generic_model.evaluate(windows, labels) assert type(eval_) == list assert len(eval_) == 3 def test_interpretable_prediction_output(self, numeric_data, interpretable_model): """Confirms that model works w/ univariate timeries for interpretable model""" windows, labels = utilities.prep_time_series(numeric_data, lookback = 7, horizon = 2) interpretable_model.fit(windows, labels, epochs = 1) assert interpretable_model.predict(windows).shape == (windows.shape[0], 2) eval_ = interpretable_model.evaluate(windows, labels) assert type(eval_) == list assert len(eval_) == 3
#string s="I am a string" print(type(s)) #will say str #boolean yes = True #boolean True print(type(yes)) no = False #Boolean false print(type(no)) #List -- ordered and changeable alpha_list =["a","b","c"] #list initialization print(type(alpha_list)) #will say tuple if this is a tuple why can you cnage print(type(alpha_list[0])) #will say string review alpha_list.append("d") #will add "d" to the list end print(alpha_list) #will print list #Tuple -- ordered and unchangeable alpha_tuple = ("a","b","c") #tuple initialization print(type(alpha_tuple)) #will say tuple try: alpha_tuple[2]= "d" #won't work and will raise Type Error except TypeError: print("We can't add elements to tuples!") #print this message print(alpha_tuple)
numstr = '345' print(numstr.isdecimal(), end=' ') numstr = '10.75' print(numstr.isdecimal(), end=' ') alnumstr = 'AB123' print(alnumstr.isdecimal()) # True False False
"""Defines all the materials and their material properties. Base class Material allows for users to define a material, while some specific commonly used materials are predefined to help speed up the design process. Users should check that the right material properties are assumed for their parts.""" from . import Q_, ureg class Material(): """Base class for defining material properties Args: name (str): Name of material. Examples Examples should be written in doctest format, and should illustrate how to use the function/class. >>> Attributes: density (Pint.Quantity): Material Density. name (str): name """ def __init__(self, name): self.name = name self.density = 0 * ureg.km / (ureg.m**3) def __repr__(self): return f'{self.name}: (Material))' def describe(self): for d in self.__dict__: print(f'{d:20s}: {str(self.__dict__[d]):20s}') class Aluminium(Material): """Defines a basic aluminium. Args: name (str): Description of parameter `name`. Defaults to 'Al-6061-T6'. Examples Examples should be written in doctest format, and should illustrate how to use the function/class. >>> Attributes: density (Pint.Quantity): Description of parameter `density`. tensile_modulus (Pint.Quantity): Description of parameter `tensile_modulus`. tensile_strength (Pint.Quantity): Description of parameter `tensile_strength`. max_temp (Pint.Quantity): Description of parameter `max_temp`. """ def __init__(self, name='Al-6061-T6'): super().__init__(name=name) self.density = 2.7 * ureg.g / (ureg.cm**3) self.tensile_modulus = 69 * ureg.GPa self.tensile_strength = 270 * ureg.MPa self.max_temp = 420 * ureg.degK class PLA(Material): def __init__(self, name='PLA'): super().__init__(name=name) self.density = 1.05 * ureg.g / (ureg.cm**3) class Phenolic(Material): def __init__(self, name='Phenolic'): super().__init__(name=name) self.density = 0.95 * ureg.g / (ureg.cm**3) class Acrylic(Material): def __init__(self, name='Acrylic'): super().__init__(name=name) self.density = 1.19 * ureg.g / (ureg.cm**3) class Plywood(Material): def __init__(self, name='Plywood'): super().__init__(name=name) self.density = 0.63 * ureg.g / (ureg.cm**3) class Polycarbonate(Material): def __init__(self, name='Polycarbonate'): super().__init__(name=name) self.density = 1.2 * ureg.g / (ureg.cm**3)
#!/home/vigliens/python/bin/python # sqsub -r 1h -f xeon -q mpi --mpp 8GB -o 23_graphlab -n 8 /home/vigliens/python/bin/python /home/vigliens/Documents/2_CODE/4_SCRIPTS/MPI/23_graphlab_mini_example.py # for ((i=0; i<=47;i++)); do echo `sqsub -r 4h -f xeon -q mpi --mpp 4GB -o 23_graphlab_0_01 -n 8 /home/vigliens/python/bin/python /home/vigliens/Documents/2_CODE/4_SCRIPTS/MPI/23_graphlab_0_01.py $i`; done # THREADED # for ((i=192; i<=223;i++)); do echo `sqsub -r 7h -f xeon -q threaded --mpp 8GB -o 24_weekday_comparison_$i -n 8 /home/vigliens/python/bin/python /home/vigliens/Documents/2_CODE/4_SCRIPTS/MPI/24_graphlab_0_1_run_weekday_comparison.py $i`; done # 0-95 1e-5 # 96-191 1e-6 # 96-127 50 # 128-159 100 * # 160-191 200 # 192-287 1e-7 # 192-223 50 ** # 224-255 100 # 256-287 200 # 288-383 1e-8 # ################################################## # This scripts should # 1. Run full demographics features experiment in 10% of the weekly data # with a .9/.1 split for train/testing # ################################################## from mpi4py import MPI import os from optparse import OptionParser # import tempfile import graphlab as gl import GraphlabHelpers as GH import itertools import GVM_classes comm = MPI.COMM_WORLD size = comm.Get_size() rank = comm.Get_rank() print "SIZE:{0}, RANK:{1}".format(size, rank) # TEMP_FOLDER = tempfile.mkdtemp(dir='/scratch/vigliens') # ComputeCanada OUT_FOLDER = '/scratch/vigliens/out_weekday/' # ComputeCanada # print 'TEMP_FOLDER: ', TEMP_FOLDER # TEMP_FOLDER = tempfile.mkdtemp(dir='/Users/gabriel/scratch/') # Local # PBS_O_WORKDIR = os.environ["PBS_O_WORKDIR"] try: SQ_JOBID = os.environ["SQ_JOBID"] except: SQ_JOBID = '_STARR' print 'SQ_JOBID:{0}'.format(SQ_JOBID) def init_parameters(): """ Returns a list with all combinations of init parameters in the form [regularization_val, num_factor, side_data_combination] [1e-05, 50, ['usid', 'age']] """ # 6 features generate 32 combinations side_data_combinations = GVM_classes.permutations(['age', 'gender', 'country', 'mainstreamness_artist', 'exploratoryness_artist']) # adding the user ID only side_data_combinations = GVM_classes.listprepender(side_data_combinations, field='usid') side_data_combinations.insert(0, ['usid']) num_factors = [50, 100, 200] regularization_vals = [1e-05, 1e-06, 1e-07, 1e-08] parameters = [regularization_vals, num_factors, side_data_combinations] parameter_combination = list(itertools.product(*parameters)) #combinatory of all parameters return parameter_combination def observed_data_loader(path='/scratch/vigliens/9_GL_MODELS/0_1/full'): """ """ observed_data = gl.SFrame(path) full_train, full_test = GH.datasetsplit(observed_data, split=0.9) return full_train, full_test def model_training(observation_data, num_factors, regularization_vals, side_data_factorization, user_data, solver): """ Creates a factorization recommender Returns a model """ model = gl.factorization_recommender.create( observation_data=observation_data, user_id='usid', item_id='mbid', target='rating', user_data=user_data, side_data_factorization=side_data_factorization, max_iterations=50, num_factors=num_factors, regularization=regularization_vals, solver=solver, sgd_trial_sample_proportion=0.25) return model def storing_results(out_folder, file_no, regularization_vals, num_factors, user_side_data_par, train, test, time): """ """ with open(out_folder + str(file_no), 'w') as out_file: out_file.write('\t'.join([str(train), str(test), str(regularization_vals), str(num_factors), str(int(time)), str('.'.join(user_side_data_par)) ])) out_file.write('\n') if __name__ == "__main__": usage = "usage: %prog [options] factor" opts = OptionParser(usage=usage) # # opts.add_option('-f', '--hdf5', dest='h5') options, args = opts.parse_args() i = int(args[0]) # 1. INITIALIZATION PARAMETERS par_combination = init_parameters() file_no = i * size + rank regularization_vals = par_combination[file_no][0] num_factors = par_combination[file_no][1] user_side_data_par = par_combination[file_no][2] print ",".join([str(file_no), str(regularization_vals), str(num_factors), str(user_side_data_par)]) # 2. LOADING DATA. TRAINING AND TESTING SETS SHOULDNT BE FIXED???? # observation data full_train = gl.SFrame('/scratch/vigliens/9_GL_MODELS/0_1/weekday_train') # weekday_train full_test = gl.SFrame('/scratch/vigliens/9_GL_MODELS/0_1/weekday_test') # weekday_test # userside data usd = gl.SFrame('/scratch/vigliens/9_GL_MODELS/METADATA/metadata_and_three_features_preprocessed/') #CC # usd = gl.SFrame('/Users/gabriel/Documents/5_DATA/METADATA/metadata_and_three_features_preprocessed') #LOCAL usdf = usd.select_columns(user_side_data_par) for i in range(5): # number of created models model = model_training(observation_data=full_train, num_factors=num_factors, regularization_vals=regularization_vals, side_data_factorization=True, user_data=usdf, solver='adagrad') # compare model with test test fact, reg, train, test = GH.model_comparison_pandas(model, full_test) # gl.set_runtime_config('GRAPHLAB_DEFAULT_NUM_PYLAMBDA_WORKERS', 2) storing_results(OUT_FOLDER, '-'.join([format(file_no, '03'), str(i)]), regularization_vals, num_factors, user_side_data_par, train, test, model.training_time)
# -*- coding: utf-8 -*- """Main module.""" import pandas as pd class _Network: # TODO: add H2/CH4 composition LEVELS = {"HP": 5.5e5, "MP": 2.0e5, "BP+": 1.1e5, "BP": 1.025e5} # Pa LHV = 38.1e3 # kJ/kg V_MAX = 2.0 # m/s T_GRND = 10 + 273.15 # K def __init__(self): self.bus = pd.DataFrame(columns=["name", "level", "zone", "type"]) self.pipe = pd.DataFrame( columns=["name", "from_bus", "to_bus", "length_m", "diameter_m", "material", "in_service"] ) self.load = pd.DataFrame(columns=["name", "bus", "p_kW", "min_p_Pa", "scaling"]) self.feeder = pd.DataFrame(columns=["name", "bus", "p_lim_kW", "p_Pa"]) self.station = pd.DataFrame(columns=["name", "bus_high", "bus_low", "p_lim_kW", "p_Pa"]) self.res_bus = pd.DataFrame(columns=["name", "p_Pa", "p_bar"]) self.res_pipe = pd.DataFrame(columns=["name", "m_dot_kg/s", "v_m/s", "p_kW", "loading_%"]) self.res_feeder = pd.DataFrame(columns=["name", "m_dot_kg/s", "p_kW", "loading_%"]) self.res_station = pd.DataFrame(columns=["name", "m_dot_kg/s", "p_kW", "loading_%"]) self.keys = {"bus", "pipe", "load", "feeder", "station", "res_bus", "res_pipe", "res_feeder", "res_station"} def __repr__(self): r = "This pandangas network includes the following parameter tables:" par = [] res = [] for tb in self.keys: if len(getattr(self, tb)) > 0: if "res_" in tb: res.append(tb) else: par.append(tb) for tb in par: length = len(getattr(self, tb)) r += "\n - %s (%s %s)" % (tb, length, "elements" if length > 1 else "element") if res: r += "\n and the following results tables:" for tb in res: length = len(getattr(self, tb)) r += "\n - %s (%s %s)" % (tb, length, "elements" if length > 1 else "element") return r def _try_existing_bus(net, bus): """ Check if a bus exist on a given network, raise ValueError and log an error if not :param net: the given network :param bus: the bus to check existence :return: """ try: assert bus in net.bus.name.unique() except AssertionError: msg = "The bus {} does not exist !".format(bus) raise ValueError(msg) def _check_level(net, bus_a, bus_b, same=True): """ Check the pressure level of two buses on a given network, raise ValueError and log an error depending on parameter when creating a pipe or a station :param net: the given network :param bus_a: the first bus :param bus_b: the second bus :param same: if True, the method will check if the node have the same pressure level if False, the method will check if the node have different pressure levels (default: True) :return: """ lev_a = net.bus.loc[net.bus.name == bus_a, "level"].all() lev_b = net.bus.loc[net.bus.name == bus_b, "level"].all() if same: try: assert lev_a == lev_b except AssertionError: msg = "The buses {} and {} have a different pressure level !".format(bus_a, bus_b) raise ValueError(msg) else: try: assert lev_a != lev_b except AssertionError: msg = "The buses {} and {} have the same pressure level !".format(bus_a, bus_b) raise ValueError(msg) def _change_bus_type(net, bus, bus_type): idx = net.bus.index[net.bus["name"] == bus].tolist()[0] old_type = net.bus.at[idx, "type"] try: assert old_type == "NODE" except AssertionError: msg = "The buses {} is already a {} !".format(bus, old_type) raise ValueError(msg) net.bus.at[idx, "type"] = bus_type def create_empty_network(): """ Create an empty network :return: a Network object that will later contain all the buses, pipes, etc. """ return _Network() def create_bus(net, level, name, zone=None): """ Create a bus on a given network :param net: the given network :param level: nominal pressure level of the bus :param name: name of the bus :param zone: zone of the bus (default: None) :return: name of the bus """ try: assert level in net.LEVELS except AssertionError: msg = "The pressure level of the bus {} is not in {}".format(name, net.LEVELS) raise ValueError(msg) idx = len(net.bus.index) net.bus.loc[idx] = [name, level, zone, "NODE"] return name # TODO: add pipe material into pipe creation and simulation def create_pipe(net, from_bus, to_bus, length_m, diameter_m, name, material="steel", in_service=True): """ Create a pipe between two existing buses on a given network :param net: the given network :param from_bus: the name of the already existing bus where the pipe starts :param to_bus: the name of the already existing bus where the pipe ends :param length_m: length of the pipe (in [m]) :param diameter_m: inner diameter of the pipe (in [m]) :param name: name of the pipe :param material: material of the pipe :param in_service: if False, the simulation will not take this pipe into account (default: True) :return: name of the pipe """ _try_existing_bus(net, from_bus) _try_existing_bus(net, to_bus) _check_level(net, from_bus, to_bus) idx = len(net.pipe.index) net.pipe.loc[idx] = [name, from_bus, to_bus, length_m, diameter_m, material, in_service] return name def create_load(net, bus, p_kW, name, min_p_Pa=1.018e5, scaling=1.0): """ Create a load attached to an existing bus in a given network :param net: the given network :param bus: the existing bus :param p_kW: power consumed by the load (in [kW]) :param name: name of the load :param min_p_Pa: minimum acceptable pressure :param scaling: scaling factor for the load (default: 1.0) :return: name of the load """ _try_existing_bus(net, bus) idx = len(net.load.index) net.load.loc[idx] = [name, bus, p_kW, min_p_Pa, scaling] _change_bus_type(net, bus, "SINK") return name def create_feeder(net, bus, p_lim_kW, p_Pa, name): """ Create a feeder attached to an existing bus in a given network :param net: the given network :param bus: the existing bus :param p_lim_kW: maximum power flowing through the feeder :param p_Pa: operating pressure level at the output of the feeder :param name: name of the feeder :return: name of the feeder """ _try_existing_bus(net, bus) idx = len(net.feeder.index) net.feeder.loc[idx] = [name, bus, p_lim_kW, p_Pa] _change_bus_type(net, bus, "SRCE") return name def create_station(net, bus_high, bus_low, p_lim_kW, p_Pa, name): """ Create a pressure station between two existing buses on different pressure level in a given network :param net: the given network :param bus_high: the existing bus with higher nominal pressure :param bus_low: the existing bus with lower nominal pressure :param p_lim_kW: maximum power flowing through the feeder :param p_Pa: operating pressure level at the output of the feeder :param name: name of the station :return: name of the station """ _try_existing_bus(net, bus_high) _try_existing_bus(net, bus_low) _check_level(net, bus_high, bus_low, same=False) idx = len(net.station.index) net.station.loc[idx] = [name, bus_high, bus_low, p_lim_kW, p_Pa] _change_bus_type(net, bus_high, "SINK") _change_bus_type(net, bus_low, "SRCE") return name
import numpy as np from matplotlib import pyplot as plt from matplotlib import animation def one(): fig = plt.figure() ax1 = fig.add_subplot(121, aspect='equal', autoscale_on=False, xlim=(-0, 2), ylim=(-1.5, 1.5)) ax2 = fig.add_subplot(122, aspect='equal', autoscale_on=False, xlim=(-1.1, 1.1), ylim=(-1.1, 1.1)) line, = ax1.plot([], [], lw=2) time_text = ax1.text(0.02, 0.95, '', transform=ax1.transAxes) scatter = ax2.scatter([], [], s=15) def init(): # initialization function: plot the background of each frame line.set_data([], []) scatter.set_offsets([]) time_text.set_text(' ') return line, scatter, time_text def animate(i): x = np.linspace(0, 2, 1000) y = np.sin(2 * np.pi * (x - 0.01 * i)) xs = 2*np.random.rand(100) - 1 line.set_data(x, y) scatter.set_offsets(xs) time_text.set_text('%i seconds' % i) return line, scatter, time_text #note that the frames defines the number of times animate functions is being called anim = animation.FuncAnimation(fig, animate, init_func=init, frames=500, interval=20, blit=True) anim.save('example.mp4', fps=30) def two(): fig = plt.figure() fig.set_dpi(100) fig.set_size_inches(7, 6.5) ax = plt.axes(xlim=(0, 10), ylim=(0, 10)) patch = plt.Circle((5, -5), 0.75, fc='y') def init(): patch.center = (5, 5) ax.add_patch(patch) return patch, def animate(i): x, y = patch.center x = 5 + 3 * np.sin(np.radians(i)) y = 5 + 3 * np.cos(np.radians(i)) patch.center = (x, y) return patch, anim = animation.FuncAnimation(fig, animate, init_func=init, frames=360, interval=20, blit=True) plt.show()
import unittest from mock import patch class TestPyPiXMLRPC(unittest.TestCase): @patch('xmlrpclib.ServerProxy') def test_search(self, sp): from cheeseprism.rpc import PyPi out = PyPi.search('five.intid') assert sp.called assert out @patch('xmlrpclib.ServerProxy') def test_details(self, sp): from cheeseprism.rpc import PyPi out = PyPi.package_details('wicked', '1.0') ((index,)), _ = sp.call_args assert index == PyPi.index assert sp.called assert out
__copyright__ = """ Copyright (C) 2012 Andreas Kloeckner Copyright (C) 2016, 2017 Matt Wala """ __license__ = """ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import sys import numpy as np import loopy as lp import pyopencl as cl import pyopencl.clmath # noqa import pyopencl.clrandom # noqa import pytest import logging logger = logging.getLogger(__name__) try: import faulthandler except ImportError: pass else: faulthandler.enable() from pyopencl.tools import pytest_generate_tests_for_pyopencl \ as pytest_generate_tests __all__ = [ "pytest_generate_tests", "cl" # 'cl.create_some_context' ] # More things to test. # - scan(a) + scan(b) # - test for badly tagged inames from loopy.version import LOOPY_USE_LANGUAGE_VERSION_2018_2 # noqa @pytest.mark.parametrize("n", [1, 2, 3, 16]) @pytest.mark.parametrize("stride", [1, 2]) def test_sequential_scan(ctx_factory, n, stride): ctx = ctx_factory() queue = cl.CommandQueue(ctx) knl = lp.make_kernel( "[n] -> {[i,j]: 0<=i<n and 0<=j<=%d*i}" % stride, """ a[i] = sum(j, j**2) """ ) knl = lp.fix_parameters(knl, n=n) knl = lp.realize_reduction(knl, force_scan=True) evt, (a,) = knl(queue) assert (a.get() == np.cumsum(np.arange(stride*n)**2)[::stride]).all() @pytest.mark.parametrize("sweep_lbound, scan_lbound", [ (4, 0), (3, 1), (2, 2), (1, 3), (0, 4), (5, -1), ]) def test_scan_with_different_lower_bound_from_sweep( ctx_factory, sweep_lbound, scan_lbound): ctx = ctx_factory() queue = cl.CommandQueue(ctx) knl = lp.make_kernel( "[n, sweep_lbound, scan_lbound] -> " "{[i,j]: sweep_lbound<=i<n+sweep_lbound " "and scan_lbound<=j<=2*(i-sweep_lbound)+scan_lbound}", """ out[i-sweep_lbound] = sum(j, j**2) """ ) n = 10 knl = lp.fix_parameters(knl, sweep_lbound=sweep_lbound, scan_lbound=scan_lbound) knl = lp.realize_reduction(knl, force_scan=True) evt, (out,) = knl(queue, n=n) assert (out.get() == np.cumsum(np.arange(scan_lbound, 2*n+scan_lbound)**2)[::2]).all() def test_automatic_scan_detection(): knl = lp.make_kernel( [ "[n] -> {[i]: 0<=i<n}", "{[j]: 0<=j<=2*i}" ], """ a[i] = sum(j, j**2) """ ) cgr = lp.generate_code_v2(knl) assert "scan" not in cgr.device_code() def test_selective_scan_realization(): pass def test_force_outer_iname_for_scan(): knl = lp.make_kernel( "[n] -> {[i,j,k]: 0<=k<n and 0<=i<=k and 0<=j<=i}", "out[i] = product(j, a[j]) {inames=i:k}") knl = lp.add_dtypes(knl, dict(a=np.float32)) # TODO: Maybe this deserves to work? with pytest.raises(lp.diagnostic.ReductionIsNotTriangularError): lp.realize_reduction(knl, force_scan=True) knl = lp.realize_reduction(knl, force_scan=True, force_outer_iname_for_scan="i") def test_dependent_domain_scan(ctx_factory): ctx = ctx_factory() queue = cl.CommandQueue(ctx) knl = lp.make_kernel( [ "[n] -> {[i]: 0<=i<n}", "{[j]: 0<=j<=2*i}" ], """ a[i] = sum(j, j**2) {id=scan} """ ) knl = lp.realize_reduction(knl, force_scan=True) evt, (a,) = knl(queue, n=100) assert (a.get() == np.cumsum(np.arange(200)**2)[::2]).all() @pytest.mark.parametrize("i_tag, j_tag", [ ("for", "for") ]) def test_nested_scan(ctx_factory, i_tag, j_tag): ctx = ctx_factory() queue = cl.CommandQueue(ctx) knl = lp.make_kernel( [ "[n] -> {[i]: 0 <= i < n}", "[i] -> {[j]: 0 <= j <= i}", "[i] -> {[k]: 0 <= k <= i}" ], """ <>tmp[i] = sum(k, 1) out[i] = sum(j, tmp[j]) """) knl = lp.fix_parameters(knl, n=10) knl = lp.tag_inames(knl, dict(i=i_tag, j=j_tag)) knl = lp.realize_reduction(knl, force_scan=True) print(knl) evt, (out,) = knl(queue) print(out) def test_scan_not_triangular(): knl = lp.make_kernel( "{[i,j]: 0<=i<100 and 1<=j<=2*i}", """ a[i] = sum(j, j**2) """ ) with pytest.raises(lp.diagnostic.ReductionIsNotTriangularError): knl = lp.realize_reduction(knl, force_scan=True) @pytest.mark.parametrize("n", [1, 2, 3, 16, 17]) def test_local_parallel_scan(ctx_factory, n): ctx = ctx_factory() queue = cl.CommandQueue(ctx) knl = lp.make_kernel( "[n] -> {[i,j]: 0<=i<n and 0<=j<=i}", """ out[i] = sum(j, a[j]**2) """, "..." ) knl = lp.fix_parameters(knl, n=n) knl = lp.tag_inames(knl, dict(i="l.0")) knl = lp.realize_reduction(knl, force_scan=True) knl = lp.realize_reduction(knl) knl = lp.add_dtypes(knl, dict(a=int)) print(knl) evt, (a,) = knl(queue, a=np.arange(n)) assert (a == np.cumsum(np.arange(n)**2)).all() def test_local_parallel_scan_with_nonzero_lower_bounds(ctx_factory): ctx = ctx_factory() queue = cl.CommandQueue(ctx) knl = lp.make_kernel( "[n] -> {[i,j]: 1<=i<n+1 and 0<=j<=i-1}", """ out[i-1] = sum(j, a[j]**2) """, "..." ) knl = lp.fix_parameters(knl, n=16) knl = lp.tag_inames(knl, dict(i="l.0")) knl = lp.realize_reduction(knl, force_scan=True) knl = lp.realize_reduction(knl) knl = lp.add_dtypes(knl, dict(a=int)) evt, (out,) = knl(queue, a=np.arange(1, 17)) assert (out == np.cumsum(np.arange(1, 17)**2)).all() def test_scan_extra_constraints_on_domain(): knl = lp.make_kernel( "{[i,j,k]: 0<=i<n and 0<=j<=i and i=k}", "out[i] = sum(j, a[j])") with pytest.raises(lp.diagnostic.ReductionIsNotTriangularError): knl = lp.realize_reduction( knl, force_scan=True, force_outer_iname_for_scan="i") @pytest.mark.parametrize("sweep_iname_tag", ["for", "l.1"]) def test_scan_with_outer_parallel_iname(ctx_factory, sweep_iname_tag): ctx = ctx_factory() queue = cl.CommandQueue(ctx) knl = lp.make_kernel( [ "{[k]: 0<=k<=1}", "[n] -> {[i,j]: 0<=i<n and 0<=j<=i}" ], "out[k,i] = k + sum(j, j**2)" ) knl = lp.tag_inames(knl, dict(k="l.0", i=sweep_iname_tag)) n = 10 knl = lp.fix_parameters(knl, n=n) knl = lp.realize_reduction(knl, force_scan=True) evt, (out,) = knl(queue) inner = np.cumsum(np.arange(n)**2) assert (out.get() == np.array([inner, 1 + inner])).all() @pytest.mark.parametrize("dtype", [ np.int32, np.int64, np.float32, np.float64]) def test_scan_data_types(ctx_factory, dtype): ctx = ctx_factory() queue = cl.CommandQueue(ctx) knl = lp.make_kernel( "{[i,j]: 0<=i<n and 0<=j<=i }", "res[i] = reduce(sum, j, a[j])", assumptions="n>=1") a = np.random.randn(20).astype(dtype) knl = lp.add_dtypes(knl, dict(a=dtype)) knl = lp.realize_reduction(knl, force_scan=True) evt, (res,) = knl(queue, a=a) assert np.allclose(res, np.cumsum(a)) @pytest.mark.parametrize(("op_name", "np_op"), [ ("sum", np.sum), ("product", np.prod), ("min", np.min), ("max", np.max), ]) def test_scan_library(ctx_factory, op_name, np_op): ctx = ctx_factory() queue = cl.CommandQueue(ctx) knl = lp.make_kernel( "{[i,j]: 0<=i<n and 0<=j<=i }", "res[i] = reduce(%s, j, a[j])" % op_name, assumptions="n>=1") a = np.random.randn(20) knl = lp.add_dtypes(knl, dict(a=np.float)) knl = lp.realize_reduction(knl, force_scan=True) evt, (res,) = knl(queue, a=a) assert np.allclose(res, np.array( [np_op(a[:i+1]) for i in range(len(a))])) def test_scan_unsupported_tags(): pass @pytest.mark.parametrize("i_tag", ["for", "l.0"]) def test_argmax(ctx_factory, i_tag): logging.basicConfig(level=logging.INFO) dtype = np.dtype(np.float32) ctx = ctx_factory() queue = cl.CommandQueue(ctx) n = 128 knl = lp.make_kernel( "{[i,j]: 0<=i<%d and 0<=j<=i}" % n, """ max_vals[i], max_indices[i] = argmax(j, abs(a[j]), j) """) knl = lp.tag_inames(knl, dict(i=i_tag)) knl = lp.add_and_infer_dtypes(knl, {"a": np.float32}) knl = lp.realize_reduction(knl, force_scan=True) a = np.random.randn(n).astype(dtype) evt, (max_indices, max_vals) = knl(queue, a=a, out_host=True) assert (max_vals == [np.max(np.abs(a)[0:i+1]) for i in range(n)]).all() assert (max_indices == [np.argmax(np.abs(a[0:i+1])) for i in range(n)]).all() def check_segmented_scan_output(arr, segment_boundaries_indices, out): class SegmentGrouper: def __init__(self): self.seg_idx = 0 self.idx = 0 def __call__(self, key): if self.idx in segment_boundaries_indices: self.seg_idx += 1 self.idx += 1 return self.seg_idx from itertools import groupby expected = [np.cumsum(list(group)) for _, group in groupby(arr, SegmentGrouper())] actual = [np.array(list(group)) for _, group in groupby(out, SegmentGrouper())] assert len(expected) == len(actual) == len(segment_boundaries_indices) assert [(e == a).all() for e, a in zip(expected, actual)] @pytest.mark.parametrize("n, segment_boundaries_indices", [ (1, (0,)), (2, (0,)), (2, (0, 1)), (3, (0,)), (3, (0, 1)), (3, (0, 2)), (3, (0, 1, 2)), (16, (0, 4, 8, 12))]) @pytest.mark.parametrize("iname_tag", ("for", "l.0")) def test_segmented_scan(ctx_factory, n, segment_boundaries_indices, iname_tag): ctx = ctx_factory() queue = cl.CommandQueue(ctx) arr = np.ones(n, dtype=np.float32) segment_boundaries = np.zeros(n, dtype=np.int32) segment_boundaries[(segment_boundaries_indices,)] = 1 knl = lp.make_kernel( "{[i,j]: 0<=i<n and 0<=j<=i}", "out[i], <>_ = reduce(segmented(sum), j, arr[j], segflag[j])", [ lp.GlobalArg("arr", np.float32, shape=("n",)), lp.GlobalArg("segflag", np.int32, shape=("n",)), "..." ]) knl = lp.fix_parameters(knl, n=n) knl = lp.tag_inames(knl, dict(i=iname_tag)) knl = lp.realize_reduction(knl, force_scan=True) (evt, (out,)) = knl(queue, arr=arr, segflag=segment_boundaries) check_segmented_scan_output(arr, segment_boundaries_indices, out) if __name__ == "__main__": if len(sys.argv) > 1: exec(sys.argv[1]) else: from pytest import main main([__file__]) # vim: foldmethod=marker
import config from bot import Bot import bot_common IS_PRINTER_ENABLED = False def increment_bigbrain(self, message): self.state['bigbrain_counter'] += 1 text = f'Big brain moments: {self.state["bigbrain_counter"]}' self.send_privmsg(message.channel, text) self.write_state() def increment_smallbrain(self, message): self.state['smallbrain_counter'] += 1 text = f'Small brain moments: {self.state["smallbrain_counter"]}' self.send_privmsg(message.channel, text) self.write_state() def on_message(self, message): if IS_PRINTER_ENABLED and message.irc_command == 'PRIVMSG': bot_common.send_message_to_printer(message) def main(): custom_commands = { 'date': bot_common.reply_with_date, 'bigbrain': increment_bigbrain, 'smallbrain': increment_smallbrain, 'cmds': bot_common.list_commands, 'addcmd': bot_common.add_template_command, 'editcmd': bot_common.edit_template_command, 'delcmd': bot_common.delete_template_command, 'addquote': bot_common.add_quote, 'quote': bot_common.reply_with_quote, 'weather': bot_common.get_weather, } bot = Bot( custom_commands=custom_commands, oauth_token=config.OAUTH_TOKEN, username='squishymcbotty', command_prefix='!', channels=['clumsycomputer'], caps=[':twitch.tv/tags'], state_filename='data/state_clumsycomputer.json', event_handlers={ 'on_message': on_message, }, state_schema={ 'template_commands': {}, 'bigbrain_counter': 0, 'smallbrain_counter': 0, 'quotes': [], }, modonly_commands=[ 'addcmd', 'editcmd', 'delcmd', 'addquote', 'noot', ], ) bot.init() if __name__ == '__main__': main()
# Copyright 2021, Yahoo # Licensed under the terms of the Apache 2.0 license. See the LICENSE file in the project root for terms from .BaseExecutor import BaseExecutor class SelfTargetExecutor(BaseExecutor): __target_type__ = "self" def execute(self) -> None: raise NotImplementedError()
from ctypes import * import numpy as np Painter = cdll.LoadLibrary('./Rasterizer/libPainter.so') def ProjectPaint(points, normals, point_colors, color, depth, world2cam, intrinsic): Painter.ProjectPaint(c_void_p(points.ctypes.data), c_void_p(normals.ctypes.data), c_void_p(point_colors.ctypes.data), c_void_p(color.ctypes.data), c_void_p(depth.ctypes.data), c_void_p(world2cam.ctypes.data), c_void_p(intrinsic.ctypes.data), c_int(points.shape[0]), c_int(depth.shape[1]), c_void_p(depth.shape[0]), c_void_p(color.shape[1]), c_void_p(color.shape[0])) def PaintToTexturemap(texturemap, point_colors, coords): Painter.PaintToTexturemap(c_void_p(texturemap.ctypes.data), c_void_p(point_colors.ctypes.data), c_void_p(coords.ctypes.data), c_int(point_colors.shape[0]), c_int(texturemap.shape[1]), c_int(texturemap.shape[0])) def PaintToViewNorm(points_cam, normals_cam, mask, depth, coords, textureToImage): Painter.PaintToViewNorm(c_void_p(textureToImage.ctypes.data), c_void_p(points_cam.ctypes.data), c_void_p(normals_cam.ctypes.data), c_void_p(mask.ctypes.data), c_void_p(depth.ctypes.data), c_void_p(coords.ctypes.data), c_int(points_cam.shape[0]), c_int(mask.shape[0]), c_int(mask.shape[1]), c_int(textureToImage.shape[1])) def PaintToView(points_cam, mask, depth, coords, textureToImage): Painter.PaintToView(c_void_p(textureToImage.ctypes.data), c_void_p(points_cam.ctypes.data), c_void_p(mask.ctypes.data), c_void_p(depth.ctypes.data), c_void_p(coords.ctypes.data), c_int(points_cam.shape[0]), c_int(mask.shape[0]), c_int(mask.shape[1]), c_int(textureToImage.shape[1]))
#!~/anaconda3/bin/python3 # ****************************************************** # Author: Pengshuai Yang # Last modified: 2021-08-04 15:10 # Email: yps18@mails.tsinghua.edu.cn # Filename: utils.py # Description: # auxillary functions # ****************************************************** import os def check_directory(directory): if not os.path.exists(directory): os.makedirs(directory) def get_bad_slide_list(txt_path): f = open(txt_path,'r') bad_slide_list = [] for l in f.readlines(): name = l.strip() bad_slide_list.append(name) return bad_slide_list def remove_bad_slide(slide_list, bad_list): good_slide_list = [] for item in slide_list: name = item.split('.')[0] if name not in bad_list: good_slide_list.append(item) return good_slide_list
import pytest import nsrt_mk3_dev @pytest.fixture(scope="session") def nsrt(request): return nsrt_mk3_dev.NsrtMk3Dev(port=request.config.getoption('--vcomm')) def pytest_addoption(parser): parser.addoption("--vcomm", action="store", default='COM20')
# -*- coding: utf-8 -*- """ Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available. Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://opensource.org/licenses/MIT Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import json import logging from django.utils.translation import ugettext_lazy as _ from rest_framework import response, viewsets from rest_framework.exceptions import ValidationError from rest_framework.renderers import BrowsableAPIRenderer from backend.accounts import bcs_perm from backend.bcs_web.audit_log import client from backend.utils.basic import getitems from backend.utils.errcodes import ErrorCode from backend.utils.error_codes import error_codes from backend.utils.renderers import BKAPIRenderer from .. import constants as app_constants from ..base_views import InstanceAPI from ..common_views.utils import delete_pods, get_project_namespaces from ..serializers import ReschedulePodsSLZ logger = logging.getLogger(__name__) class RollbackPreviousVersion(InstanceAPI, viewsets.ViewSet): renderer_classes = (BKAPIRenderer, BrowsableAPIRenderer) def get_config(self, config): try: return json.loads(config) except Exception as err: logger.error("解析实例配置异常,配置: %s,错误详情: %s", config, err) return {} def from_template(self, instance_id): if not self._from_template(instance_id): raise error_codes.CheckFailed(_("非模板集实例化的应用不允许进行回滚操作")) def get_last_config(self, instance_detail): last_config = self.get_config(instance_detail.last_config) last_config = last_config.get('old_conf') or {} if not last_config: raise error_codes.CheckFailed(_("请确认已经执行过更新或滚动升级")) return last_config def get_current_config(self, instance_detail): current_config = self.get_config(instance_detail.config) if not current_config: raise error_codes.CheckFailed(_("获取实例配置为空")) return current_config def get(self, request, project_id, instance_id): # 检查是否是模板集创建 self.from_template(instance_id) instance_detail = self.get_instance_info(instance_id).first() # 校验权限 self.can_use_instance(request, project_id, instance_detail.namespace) # 获取实例的config current_config = self.get_current_config(instance_detail) last_config = self.get_last_config(instance_detail) data = { 'current_config': current_config, 'current_config_yaml': self.json2yaml(last_config), 'last_config': last_config, 'last_config_yaml': self.json2yaml(last_config), } return response.Response(data) def update_resource(self, request, project_id, cluster_id, namespace, config, instance_detail): resp = self.update_deployment( request, project_id, cluster_id, namespace, config, kind=request.project.kind, category=instance_detail.category, app_name=instance_detail.name, ) is_bcs_success = True if resp.data.get('code') == ErrorCode.NoError else False # 更新状态 instance_detail.oper_type = app_constants.ROLLING_UPDATE_INSTANCE instance_detail.is_bcs_success = is_bcs_success if not is_bcs_success: # 出异常时,保存一次;如果正常,最后保存;减少save次数 instance_detail.save() raise error_codes.APIError(_("回滚上一版本失败,{}").format(resp.data.get('message'))) # 更新配置 instance_last_config = json.loads(instance_detail.last_config) instance_last_config['old_conf'] = json.loads(instance_detail.config) instance_detail.last_config = json.dumps(instance_last_config) instance_detail.config = json.dumps(config) instance_detail.save() def update(self, request, project_id, instance_id): """回滚上一版本,只有模板集实例化的才会 1. 判断当前实例允许回滚 2. 对应实例的配置 3. 下发更新操作 """ # 检查是否来源于模板集 self.from_template(instance_id) instance_detail = self.get_instance_info(instance_id).first() # 校验权限 self.can_use_instance(request, project_id, instance_detail.namespace) # 获取实例的config current_config = self.get_current_config(instance_detail) last_config = self.get_last_config(instance_detail) # 兼容annotation和label cluster_id = getitems(current_config, ['metadata', 'annotations', 'io.tencent.bcs.cluster'], '') if not cluster_id: cluster_id = getitems(current_config, ['metadata', 'labels', 'io.tencent.bcs.cluster'], '') namespace = getitems(current_config, ['metadata', 'namespace'], '') desc = _("集群:{}, 命名空间:{}, 应用:[{}] 回滚上一版本").format(cluster_id, namespace, instance_detail.name) # 下发配置 with client.ContextActivityLogClient( project_id=project_id, user=request.user.username, resource_type="instance", resource=instance_detail.name, resource_id=instance_id, description=desc, ).log_modify(): self.update_resource(request, project_id, cluster_id, namespace, last_config, instance_detail) return response.Response() class ReschedulePodsViewSet(InstanceAPI, viewsets.ViewSet): renderer_classes = (BKAPIRenderer, BrowsableAPIRenderer) def _can_use_namespaces(self, request, project_id, data, ns_name_id_map): for info in data: # 通过集群id和namespace名称确认namespace id,判断是否有namespace权限 ns_id = ns_name_id_map.get((info["cluster_id"], info["namespace"])) if not ns_id: raise ValidationError(_("集群:{}下没有查询到namespace:{}").format(info["cluster_id"], info["namespace"])) ns_perm = bcs_perm.Namespace(request, project_id, ns_id) ns_perm.can_use(raise_exception=True) def _get_pod_names(self, request, project_id, resource_list): """通过应用名称获取相应的""" pod_names = {} for info in resource_list: cluster_id = info["cluster_id"] namespace = info["namespace"] resource_kind = info["resource_kind"] name = info["name"] is_bcs_success, data = self.get_pod_or_taskgroup( request, project_id, cluster_id, field=["resourceName"], # 这里仅需要得到podname即可 app_name=name, ns_name=namespace, category=resource_kind, kind=request.project.kind, ) if not is_bcs_success: raise error_codes.APIError(_("查询资源POD出现异常")) # data 结构: [{"resourceName": "test1"}, {"resourceName": "test2"}] key = (cluster_id, namespace, name, resource_kind) for info in data: if key in pod_names: pod_names[key].append(info["resourceName"]) else: pod_names[key] = [info["resourceName"]] return pod_names def reschedule_pods(self, request, project_id): """批量重新调度pod,实现deployment等应用的重建 NOTE: 这里需要注意,因为前端触发,用户需要在前端展示调用成功后,确定任务都已经下发了 因此,这里采用同步操作 """ # 获取请求参数 data_slz = ReschedulePodsSLZ(data=request.data) data_slz.is_valid(raise_exception=True) data = data_slz.validated_data["resource_list"] access_token = request.user.token.access_token # 判断是否有命名空间权限 # 因为操作肯定是同一个项目下的,所以获取项目下的namespace信息,然后判断是否有namespace权限 ns_data = get_project_namespaces(access_token, project_id) ns_name_id_map = {(info["cluster_id"], info["name"]): info["id"] for info in ns_data} self._can_use_namespaces(request, project_id, data, ns_name_id_map) # 查询应用下面的pod pod_names = self._get_pod_names(request, project_id, data) # 删除pod delete_pods(access_token, project_id, pod_names) return response.Response()
import datetime import blessed from word_search import WordSearch class Game: def __init__(self, filename="words_dictionary.json"): self.word_search = WordSearch.generate_json(filename, width=100, height=100) self.terminal = blessed.Terminal() self.keys = { # code - (method, args) 259: (self._move, 0, -1), # Up 258: (self._move, 0, 1), # Down 260: (self._move, -1, 0), # Left 261: (self._move, 1, 0), # Right 343: (self._select, ) # Enter } self.positions = { # "part": [[x, y], [x, y]] # x, y to x, y "grid": [], "words": [], "info": [], "timer": [] } # The mapper that been resized. This helps with calculating if the user is at the end and off setting self.current_mapper = {"mapper": [[]], "top": "", "side_spacing": 0} # TODO: Maybe allow custom colors to coordinates. So, don't have a lot of lists for each color # {[x, y]: string from terminal.color} self.found_coords = [] self.select = [] self.cursor = [0, 0] self.off_set = [0, 0] # TODO: Might want it to start when the user starts playing self.started = datetime.datetime.now() self.info = "" def _draw_line(self, a_x, a_y, b_x, b_y): """ Draws a line between two points (a -> b) :param a_x: x coordinate of a :param a_y: y coordinate of a :param b_x: x coordinate of b :param b_y: y coordinate of b :return: list of coordinates """ # This is a bit confusing and hard to read. Mainly because all variables used are single letter # Variables used - # m - Slope # x, y - Coordinates # b - Y-intercept or X-intercept # a_, b_ - Coordinates of A point / B point (a -> b) # # Equations used - # y = mx + b - to find y (used in x for loop) # x = (y - b) / m - to find x (used in y for loop) # m = (b_y - a_y) / (b_x - a_x) - finding the slope # b = -(m * x) + y - finding the y-intercept try: # Getting the slope m = (b_y - a_y) / (b_x - a_x) except ZeroDivisionError: m = None if m is None: # Slope is undefined, grabbing the x-intercept b = a_x else: # Getting the y-intercept b = -(m * b_x) + b_y coords = [] # Flipping the coordinates if a is greater than b # Doing this for the for loops. Range won't work if first value is greater than the second value if a_y > b_y: a_y, b_y = b_y, a_y if a_x > b_x: a_x, b_x = b_x, a_x # Finding y values for x if m is not None: for x in range(a_x, b_x + 1): y = round(m * x + b) coords.append([x, y]) # Finding x values for y if m != 0: for y in range(a_y, b_y + 1): if m is None: # its a vertical line. x = x-intercept x = b else: x = round((y - b) / m) coords.append([x, y]) return coords def _resize_mapper(self): # Grabbing the number of spaces for the column and row numbers side_spacing = len(str(self.word_search.height - 1)) + 2 top_spacing = len(str(self.word_search.width - 1)) + 1 # max coordinates the grid can expand to max_coord = [ # dividing terminal.width by 2 since each letter is spaced out taking up 2 spaces. self.off_set[0] + (round(self.terminal.width / 2)) - side_spacing + 1, self.off_set[1] + self.terminal.height - top_spacing - 6 ] # Grabbing a copy of the mapper resized or basically cutting some letters out to fit on the screen # depending on off_set mapper = [] for row in self.word_search.mapper[self.off_set[1]:max_coord[1]]: row = row[self.off_set[0]:max_coord[0]] mapper.append(row) # Creating the column numbers top = [[]] for x in range(len(mapper[0])): x += self.off_set[0] x = str(x) while len(x) > len(top): top.append([' ' for _ in range(len(top[0]))]) for k, v in enumerate(x): top[k].append(v) # Adding space if its not the same as top spacing while len(top) < top_spacing - 1: top.append([' ' for _ in range(len(top[0]))]) # Turning the top bar into a string top = top[::-1] # Reversing the top bar top_string = "" for line in top: top_string += " " * side_spacing top_string += f"{' '.join(line)}\n" top_string += "\n" self.positions['grid'] = [ [0, 0], [ len(top[0]) * 2 + side_spacing, len(mapper) + top_spacing ] ] # Setting the current_mapper self.current_mapper = { "mapper": mapper, "top": top_string, "side_spacing": side_spacing } def _get_coord(self, relation, offset): """ Grabs the starting coordinate depending on relation and offset. Made this since the code is used more than once. Also to make it more readable with a method rather than multiple statements. :param relation: str - name of coordinate to go by in self.positions :param offset: list or tuple - x, y to offset the relation :return: list - x, y coordinates """ relation = self.positions[relation][1] # returning the starting coordinate return [ relation[0] + offset[0], relation[1] + offset[1] ] def _grab_grid(self): string = self.current_mapper["top"] if self.select: select_coords = self._draw_line(*self.cursor, *self.select) else: select_coords = [] for i, row in enumerate(self.current_mapper["mapper"]): y = i + self.off_set[1] string += self.terminal.ljust(str(y), self.current_mapper["side_spacing"]) for x, val in enumerate(row): x += self.off_set[0] if [x, y] == self.cursor: # Highlighting the cursor string += self.terminal.reverse(val) elif [x, y] in self.found_coords or [x, y] in select_coords: # Its a word that was found or user is selecting between two coordinates string += self.terminal.white_on_green(val) else: string += val string += " " string += "\n" return self.terminal.move_xy(0, 0) + string def _grab_words(self): pass def _grab_info(self): coord = self._get_coord("timer", (0, 0)) max_x = self.terminal.width - coord[0] info = self.info.format(cursor=self.cursor, select=self.select) if len(info) >= max_x: info = info[0: max_x - 4] info += "..." self.positions['info'] = [ (*coord,), (coord[0] + len(info), coord[1]) ] return self.terminal.move_xy(*coord) + info + self.terminal.clear_eol def _update_timer(self): coord = self._get_coord("grid", (0, 1)) coord[0] = 0 time_since = datetime.datetime.now() - self.started time_since = str(time_since).split('.')[0] + " |" self.positions['timer'] = [ (*coord,), (coord[0] + len(time_since), coord[1]) ] return self.terminal.move_xy(*coord) + str(time_since) def _print(self, grid=False): string = "" if grid: string += self.terminal.clear() + self._grab_grid() string += self._update_timer() string += self._grab_info() print(string) def _move(self, t_x, t_y): def check(number, min_num, max_num, adding): number += adding return min_num <= number < max_num # Setting t_x, t_y with args tuple. To allow to check it in the for loop below args = (t_x, t_y) mapper = self.current_mapper['mapper'] # Max coord of resized mapper max_coord = [len(mapper[0]), len(mapper)] # Max coord of mapper extended_mapper = [self.word_search.width, self.word_search.height] # This is a bit unreadable. Mostly did it this way to only need to type it once rather than two if statements. # This basically checks for both x, y in cursor. Also checks it with the above list's x, y for k, v in enumerate(self.cursor): # Checking if the coordinate isn't outside the resized mapper grid if check(v, self.off_set[k], max_coord[k] + self.off_set[k], args[k]): # Its still inside, move the cursor self.cursor[k] += args[k] # Checking if the coordinate isn't outside the grid elif check(v, 0, extended_mapper[k], args[k]): # Its still inside, need to off_set then resize self.off_set[k] += args[k] self._resize_mapper() # Moving the cursor self.cursor[k] += args[k] def _select(self): if self.select: # TODO: Check if word exist self.select = [] else: self.select = list(self.cursor) def start(self): while True: current_size = (self.terminal.width, self.terminal.height) with self.terminal.fullscreen(), self.terminal.cbreak(), self.terminal.hidden_cursor(): self._resize_mapper() self._print(grid=True) val = '' while val.lower() != 'q': val = self.terminal.inkey(timeout=1) if val.code in self.keys: run, args = self.keys[val.code][0], self.keys[val.code][1:] run(*args) # self.found_coords = self._draw_line(5, 5, *self.cursor) self._print(grid=True) self._print(grid=False) if current_size != (self.terminal.width, self.terminal.height): break if val == 'q': break self.cursor = list(self.off_set) game = Game() game.start() # game._draw_line(10, 10, 50, 0) # print(f"{game.terminal.clear()}{game._grab_grid()}") # print(f"{game.positions} -- {game.terminal.width} / {game.terminal.height}")
import numpy as np import csv import random as rd import os HERE = os.path.dirname(os.path.abspath(__file__)) # useful to locate data files with respect to this fil data_dir = os.path.join(os.path.dirname(os.path.dirname(HERE)), "data") print(f"DATA_PATH: {data_dir}") np.random.seed(2019) def load_data() -> tuple: """ Generate data for the fishes problem. Return: Tuple of Numpy arrays: ``(train_X, train_y), (valid_X, valid_y)``. """ num_fish = {} num_fish['Bream'] = 0 num_fish['Roach'] = 1 num_fish['Whitefish'] = 2 num_fish['Parkki'] = 3 num_fish['Perch'] = 4 num_fish['Pike'] = 5 num_fish['Smelt'] = 6 with open(os.path.join(data_dir, 'Fish.csv'), newline='') as csvfile: spamreader = csv.reader(csvfile, delimiter=',', quotechar='|') data = [] head = True for row in spamreader : if head : head = False continue data.append([float(num_fish[row[0]]),float(row[1]),float(row[2]),float(row[3]),float(row[4]),float(row[5]),float(row[6])]) rd.shuffle(data) data = np.array(data) prop = 0.8 sample_size = data.shape[0] sep_index = int(prop * sample_size) train_X = data[:sep_index+1,0:2] train_y = data[:sep_index+1,2:] valid_X = data[sep_index+1:,0:2] valid_y = data[sep_index+1:,2:] print(f"train_X shape: {np.shape(train_X)}") print(f"train_y shape: {np.shape(train_y)}") print(f"valid_X shape: {np.shape(valid_X)}") print(f"valid_y shape: {np.shape(valid_y)}") return (train_X,train_y), (valid_X, valid_y) if __name__ == "__main__": load_data()
from tkinter import N from tkinter.messagebox import NO from typing import List class Song: def __init__(self, name, artist, album) -> None: self.name = name self.artist = artist self.album = album pass def set_artist(self, artist): self.artist = artist def set_album(self, album): self.album = album def __str__(self) -> str: return self.name class Artist: def __init__(self, name, albums) -> None: self.name = name self.albums = [] if albums == None else albums def set_name(self, name): self.name = name def add_album(self, album): self.albums.append(album) def __str__(self) -> str: return self.name class Album: def __init__(self, name, songs, artist) -> None: self.name = name self.songs = [] if songs == None else songs self.artist = artist pass def add_song(self, song): self.songs.append(song) def __str__(self) -> str: return self.name tdg = Artist("Three Days Grace", albums=None) onex = Album("One-X", songs=None, artist=tdg) ntl = Song("Never Too Late",tdg, onex) print(ntl.artist) tdg.set_name("A") print(ntl.artist)
from string import ascii_letters, whitespace, digits from .Token import Token, TokenTypes class Lexer: def __init__(self, code): self.chars = iter(code) self.advance() def getTokens(self): tokens = [] while self.c != "\0": tokens.append(self.getNextToken()) return iter(tokens) def getNextToken(self): while self.c != "\0": if self.c in whitespace: self.skipWhitespace() if self.c in ascii_letters: return self.collectIdentifier() if self.c in digits: return self.collectNumber() if self.c == "\"": return self.collectString() if self.c == "$": return self.advanceWith(Token(TokenTypes.DOLLAR, self.c)) if self.c == "(": return self.advanceWith(Token(TokenTypes.LPAREN, self.c)) if self.c == ")": return self.advanceWith(Token(TokenTypes.RPAREN, self.c)) if self.c == "*": return self.advanceWith(Token(TokenTypes.ASTERISK, self.c)) if self.c == ";": return self.advanceWith(Token(TokenTypes.SEMI, self.c)) return Token(TokenTypes.EOF, "\0") def skipWhitespace(self): while self.c in whitespace: self.advance() def collectIdentifier(self): s = "" while self.c in ascii_letters: s += self.c self.advance() return Token(TokenTypes.IDENTIFIER, s) def collectNumber(self): s = "" while self.c in digits: s += self.c self.advance() return Token(TokenTypes.NUMBER, s) def collectString(self): s = "" self.advance() # start quote while self.c != "\"": s += self.c self.advance() self.advance() # end quote return Token(TokenTypes.STRING, s) def advance(self): try: self.c = next(self.chars) except StopIteration: self.c = "\0" def advanceWith(self, x): self.advance() return x
""" Tests for catalyst.pipeline.Pipeline """ import inspect from unittest import TestCase from mock import patch from catalyst.pipeline import Factor, Filter, Pipeline from catalyst.pipeline.data import USEquityPricing from catalyst.pipeline.graph import display_graph from catalyst.utils.numpy_utils import float64_dtype class SomeFactor(Factor): dtype = float64_dtype window_length = 5 inputs = [USEquityPricing.close, USEquityPricing.high] class SomeOtherFactor(Factor): dtype = float64_dtype window_length = 5 inputs = [USEquityPricing.close, USEquityPricing.high] class SomeFilter(Filter): window_length = 5 inputs = [USEquityPricing.close, USEquityPricing.high] class SomeOtherFilter(Filter): window_length = 5 inputs = [USEquityPricing.close, USEquityPricing.high] class PipelineTestCase(TestCase): def test_construction(self): p0 = Pipeline() self.assertEqual(p0.columns, {}) self.assertIs(p0.screen, None) columns = {'f': SomeFactor()} p1 = Pipeline(columns=columns) self.assertEqual(p1.columns, columns) screen = SomeFilter() p2 = Pipeline(screen=screen) self.assertEqual(p2.columns, {}) self.assertEqual(p2.screen, screen) p3 = Pipeline(columns=columns, screen=screen) self.assertEqual(p3.columns, columns) self.assertEqual(p3.screen, screen) def test_construction_bad_input_types(self): with self.assertRaises(TypeError): Pipeline(1) Pipeline({}) with self.assertRaises(TypeError): Pipeline({}, 1) with self.assertRaises(TypeError): Pipeline({}, SomeFactor()) with self.assertRaises(TypeError): Pipeline({'open': USEquityPricing.open}) Pipeline({}, SomeFactor() > 5) def test_add(self): p = Pipeline() f = SomeFactor() p.add(f, 'f') self.assertEqual(p.columns, {'f': f}) p.add(f > 5, 'g') self.assertEqual(p.columns, {'f': f, 'g': f > 5}) with self.assertRaises(TypeError): p.add(f, 1) with self.assertRaises(TypeError): p.add(USEquityPricing.open, 'open') def test_overwrite(self): p = Pipeline() f = SomeFactor() other_f = SomeOtherFactor() p.add(f, 'f') self.assertEqual(p.columns, {'f': f}) with self.assertRaises(KeyError) as e: p.add(other_f, 'f') [message] = e.exception.args self.assertEqual(message, "Column 'f' already exists.") p.add(other_f, 'f', overwrite=True) self.assertEqual(p.columns, {'f': other_f}) def test_remove(self): f = SomeFactor() p = Pipeline(columns={'f': f}) with self.assertRaises(KeyError) as e: p.remove('not_a_real_name') self.assertEqual(f, p.remove('f')) with self.assertRaises(KeyError) as e: p.remove('f') self.assertEqual(e.exception.args, ('f',)) def test_set_screen(self): f, g = SomeFilter(), SomeOtherFilter() p = Pipeline() self.assertEqual(p.screen, None) p.set_screen(f) self.assertEqual(p.screen, f) with self.assertRaises(ValueError): p.set_screen(f) p.set_screen(g, overwrite=True) self.assertEqual(p.screen, g) with self.assertRaises(TypeError) as e: p.set_screen(f, g) message = e.exception.args[0] self.assertIn( "expected a value of type bool or int for argument 'overwrite'", message, ) def test_show_graph(self): f = SomeFactor() p = Pipeline(columns={'f': SomeFactor()}) # The real display_graph call shells out to GraphViz, which isn't a # requirement, so patch it out for testing. def mock_display_graph(g, format='svg', include_asset_exists=False): return (g, format, include_asset_exists) self.assertEqual( inspect.getargspec(display_graph), inspect.getargspec(mock_display_graph), msg="Mock signature doesn't match signature for display_graph." ) patch_display_graph = patch( 'catalyst.pipeline.graph.display_graph', mock_display_graph, ) with patch_display_graph: graph, format, include_asset_exists = p.show_graph() self.assertIs(graph.outputs['f'], f) # '' is a sentinel used for screen if it's not supplied. self.assertEqual(sorted(graph.outputs.keys()), ['', 'f']) self.assertEqual(format, 'svg') self.assertEqual(include_asset_exists, False) with patch_display_graph: graph, format, include_asset_exists = p.show_graph(format='png') self.assertIs(graph.outputs['f'], f) # '' is a sentinel used for screen if it's not supplied. self.assertEqual(sorted(graph.outputs.keys()), ['', 'f']) self.assertEqual(format, 'png') self.assertEqual(include_asset_exists, False) with patch_display_graph: graph, format, include_asset_exists = p.show_graph(format='jpeg') self.assertIs(graph.outputs['f'], f) # '' is a sentinel used for screen if it's not supplied. self.assertEqual(sorted(graph.outputs.keys()), ['', 'f']) self.assertEqual(format, 'jpeg') self.assertEqual(include_asset_exists, False) expected = ( r".*\.show_graph\(\) expected a value in " r"\('svg', 'png', 'jpeg'\) for argument 'format', " r"but got 'fizzbuzz' instead." ) with self.assertRaisesRegexp(ValueError, expected): p.show_graph(format='fizzbuzz')
from typing import Literal import functools from abc import abstractmethod from restools.timeintegration import TimeIntegrationChannelFlowV1, TimeIntegrationChannelFlowV2 from restools.data_access_strategies import free_data_after_access_strategy, hold_data_in_memory_after_access_strategy class TimeIntegration3DBuilder: """ Class TimeIntegrationBuilder is a base builder class for TimeIntegration objects (see Builder pattern for details). Since TimeIntegration is a base class itself and cannot thus be created, one should pass a concrete TimeIntegration class to the constructor. One should derive its own class from this base one which will specify transform and DataAccessStrategy for real-valued series and DataAccessStrategy for solution fields. """ def __init__(self, ti_class): self._ti_class = ti_class self._other_data_access_strategy = None self._solution_access_strategy = None def get_timeintegration(self, ti_path): ti_obj = self._ti_class(ti_path) ti_obj.other_data_access_strategy = self._other_data_access_strategy ti_obj.solution_access_strategy = self._solution_access_strategy return ti_obj @abstractmethod def create_other_data_access_strategy(self) -> None: raise NotImplementedError('Must be implemented') @abstractmethod def create_solution_access_strategy(self) -> None: raise NotImplementedError('Must be implemented') class NoBackupAccess3DBuilder(TimeIntegration3DBuilder): """ Class NoBackupAccessBuilder implements TimeIntegrationBuilder with such DataAccessStrategy for solution fields and other data that they never stored in TimeIntegration. """ def __init__(self, ti_class): TimeIntegration3DBuilder.__init__(self, ti_class) def create_other_data_access_strategy(self) -> None: self._other_data_access_strategy = free_data_after_access_strategy def create_solution_access_strategy(self) -> None: self._solution_access_strategy = free_data_after_access_strategy class CacheAllAccess3DBuilder(TimeIntegration3DBuilder): """ Class CacheAllAccessBuilder implements TimeIntegrationBuilder with such DataAccessStrategy for solution fields and other data that they immediately cached in TimeIntegration once they are accessed. """ def __init__(self, ti_class): TimeIntegration3DBuilder.__init__(self, ti_class) def create_other_data_access_strategy(self) -> None: self._other_data_access_strategy = hold_data_in_memory_after_access_strategy def create_solution_access_strategy(self) -> None: self._solution_access_strategy = hold_data_in_memory_after_access_strategy class TimeIntegration3DBuildDirector: """ Class TimeIntegrationBuildDirector is a director in Builder pattern and is used for builder construction. A common use is that one create a builder, then create a director passing the builder to the constructor of the director and then call director's method construct(). After that, the builder can be used to produce TimeIntegration instances -- as many as one wants. """ def __init__(self, builder): self.__builder = builder def construct(self): self.__builder.create_other_data_access_strategy() self.__builder.create_solution_access_strategy() def get_ti_builder(cf_version: Literal['cfv1', 'cfv2'] = 'cfv1', cache=False, upload_data_extension=None) -> TimeIntegration3DBuilder: """ Returns TimeIntegrationBuilder associated with a particular version of channelflow (cf_version), selected xy-averaged quantities, uploaded to vector_series, and able to either store or immediately free all the uploaded data (nobackup) :param cf_version: version of channelflow (can be either 'cfv1' or 'cfv2') :param cache: whether uploaded data should be cached after the use (nobackup=False) or not (nobackup=True) :param upload_data_extension: function with decorator ensure_data_id_supported loading additional data by Data ID :return: TimeIntegrationBuilder constructed by TimeIntegrationBuildDirector """ ti_base_class = None if cf_version == 'cfv1': ti_base_class = TimeIntegrationChannelFlowV1 elif cf_version == 'cfv2': ti_base_class = TimeIntegrationChannelFlowV2 else: raise NotImplemented('The case cf_version={} must be implemented!'.format(cf_version)) ti_class = ti_base_class if upload_data_extension is not None: ti_class = type('{}_{}'.format(ti_base_class.__name__, id(upload_data_extension)), (ti_base_class,), {}) def _overridden_upload_data(obj, data_id): extra_data = upload_data_extension(obj, data_id) if extra_data is None: return super(type(obj), obj).upload_data(data_id) else: return extra_data ti_class.upload_data = _overridden_upload_data builder = None if cache: builder = CacheAllAccess3DBuilder(ti_class) else: builder = NoBackupAccess3DBuilder(ti_class) director = TimeIntegration3DBuildDirector(builder) director.construct() return builder def ensure_data_id_supported(func_=None, *, ids=()): """ Decorator ensure_data_id_supported must be used when one wants to create a function ``upload_data_extension`` to pass it to function get_ti_builder. Argument ids specifies a list of Data IDs introduced by function ``upload_data_extension`` on top of what ``upload_data`` of the corresponding class, derived from TimeIntegration, provides. """ def decorator_(func_): @functools.wraps(func_) def wrapper_(ti_obj, data_id, *args, **kwargs): if data_id in ids: return func_(ti_obj, data_id, *args, **kwargs) else: return None return wrapper_ if func_ is None: return decorator_ else: return decorator_(func_)
import plotly.graph_objs as go from plotly.offline import plot import os import requests from rsi_divergence_finder import * from timeframe import TimeFrame import talib real_path = os.path.dirname(os.path.realpath(__file__)) os.chdir(real_path) def plot_rsi_divergence(candles_df, divergences, pair, file_name): plot_file_name = os.path.join(os.getcwd(), '{}.html'.format(file_name)) all_traces = list() all_traces.append(go.Scatter( x=candles_df['T'].tolist(), y=candles_df['C'].values.tolist(), mode='lines', name='Price' )) all_traces.append(go.Scatter( x=candles_df['T'].tolist(), y=candles_df['rsi'].values.tolist(), mode='lines', name='RSI', xaxis='x2', yaxis='y2' )) for divergence in divergences: dtm_list = [divergence['start_dtm'], divergence['end_dtm']] rsi_list = [divergence['rsi_start'], divergence['rsi_end']] price_list = [divergence['price_start'], divergence['price_end']] color = 'rgb(0,0,255)' if 'bullish' in divergence['type'] else 'rgb(255,0,0)' all_traces.append(go.Scatter( x=dtm_list, y=rsi_list, mode='lines', xaxis='x2', yaxis='y2', line=dict( color=color, width=2) )) all_traces.append(go.Scatter( x=dtm_list, y=price_list, mode='lines', line=dict( color=color, width=2) )) layout = go.Layout( title='{} - RSI divergences'.format(pair), yaxis=dict( domain=[0.52, 1] ), yaxis2=dict( domain=[0, 0.5], anchor='x2' ) ) fig = dict(data=all_traces, layout=layout) plot(fig, filename=plot_file_name) if __name__ == '__main__': pair = "BTCUSDT" time_frame = TimeFrame.ONE_DAY candles = requests.get( 'https://api.binance.com/api/v1/klines?symbol={}&interval={}'.format(pair, time_frame.value[1])) candles_df = pd.DataFrame(candles.json(), columns=[TIME_COLUMN, 'O', 'H', 'L', BASE_COLUMN, 'V', 'CT', 'QV', 'N', 'TB', 'TQ', 'I']) candles_df[TIME_COLUMN] = pd.to_datetime(candles_df[TIME_COLUMN], unit='ms') candles_df[BASE_COLUMN] = pd.to_numeric(candles_df[BASE_COLUMN]) candles_df[RSI_COLUMN] = talib.RSI(candles_df[BASE_COLUMN] * 100000, timeperiod=14) candles_df.dropna(inplace=True) div_df = get_all_rsi_divergences(candles_df, time_frame) if len(div_df) > 0: plot_rsi_divergence(candles_df, div_df, pair, "{0}_{1}".format(pair, time_frame.value[1])) else: logging.info('No divergence found')
from django.shortcuts import render # Create your views here. from django.http import HttpResponse import datetime import sys def index(request): '''Portfolio: overview of all initiatives''' from service.ReportService import ReportService import Config report = ReportService(Config.config) startReporting = datetime.datetime.now() # Portfolio overview text = report.portfolioOverview(Config.app.portfolioData) return HttpResponse(text) def initiative(request, initiative_key): '''Report for Initiative''' from service.ReportService import ReportService from service.PortfolioService import PortfolioService import Config portfolio = PortfolioService(Config.config) initiative_data = portfolio.get(initiative_key) report = ReportService(Config.config) text = report.reportDetails(initiative_data) return HttpResponse(text) def static(request, filename): '''Serve static file. Note: not using Django facilities as static file must also be available from jinja2 template when the application is run from command line ''' from service.ReportService import ReportService import Config report = ReportService(Config.config) template = report.env.get_template('static/%s' % filename) return HttpResponse(template.render(), "text/css")
import cv2 import numpy as np import torch import torch.nn as nn from torch.nn import functional as F class GradCAM(): def __init__(self, model, target_layer, use_cuda): self.model = model.eval() self.target_layer = target_layer self.use_cuda = use_cuda self.feature_map = 0 self.grad = 0 if self.use_cuda: self.model = self.model.cuda() for module in self.model.named_modules(): if module[0] == target_layer: module[1].register_forward_hook(self.save_feature_map) module[1].register_backward_hook(self.save_grad) def save_feature_map(self, module, input, output): self.feature_map = output.detach() def save_grad(self, module, grad_in, grad_out): self.grad = grad_out[0].detach() def __call__(self, x, index=None): x = x.clone() if self.use_cuda: x = x.cuda() output = self.model(x) if index == None: index = np.argmax(output.cpu().data.numpy()) one_hot = np.zeros((1, output.size()[-1]), dtype = np.float32) one_hot[0][index] = 1 one_hot = torch.from_numpy(one_hot) one_hot.requires_grad_() if self.use_cuda: one_hot = torch.sum(one_hot.cuda() * output) else: one_hot = torch.sum(one_hot * output) self.model.zero_grad() one_hot.backward() self.feature_map = self.feature_map.cpu().numpy()[0] self.weights = np.mean(self.grad.cpu().numpy(), axis = (2, 3))[0, :] cam = np.sum(self.feature_map * self.weights[:, None, None], axis=0) cam = np.maximum(cam, 0) cam = cv2.resize(cam, (x.size()[-1], x.size()[-2])) return cam, index def show_cam_on_image(img, mask): heatmap = cv2.applyColorMap(np.uint8(255*mask), cv2.COLORMAP_JET) heatmap = np.float32(heatmap) / 255 cam = heatmap + np.float32(img) cam = cam / np.max(cam) return np.uint8(255 * cam)
# The Intersection Component # Tung M. Phan # California Institute of Technology # August 6, 2018 # import os from PIL import Image dir_path = os.path.dirname(os.path.realpath(__file__)) #intersection_fig = dir_path + "/components/imglib/intersection_states/intersection_lights.png" intersection_fig = os.path.dirname(dir_path) + '/components/imglib/intersection_states/intersection_lights.png' intersection = Image.open(intersection_fig) def get_background(): return Image.open(intersection_fig) def reflect(coords, limit): new_coords = [] for x in coords: new_coords.append(limit - x) return new_coords max_x, max_y = intersection.size traffic_light_walls = dict() traffic_light_walls['west'] ={'x': [345, 365, 365, 345], 'y': [415, 415, 210, 210]} traffic_light_walls['east'] ={'x': reflect([345, 365, 365, 345], max_x), 'y': reflect([415, 415, 210, 210], max_y)} traffic_light_walls['north'] ={'x': [395, 530, 530, 395], 'y': [600, 600, 580, 580]} traffic_light_walls['south'] ={'x': reflect([395, 530, 530, 395], max_x), 'y': reflect([600, 600, 580, 580], max_y)} crossing_walls = dict() crossing_walls ['west'] ={'x': [344, 366, 366, 344], 'y': [553, 553, 209, 209]} crossing_walls ['east'] ={'x': reflect([344, 366, 366, 344], max_x), 'y': reflect([553, 553, 209, 209], max_y)} crossing_walls ['north'] ={'x': [395, 670, 670, 395], 'y': [600, 600, 580, 580]} crossing_walls['south'] ={'x': reflect([395, 670, 670, 395], max_x), 'y': reflect([600, 600, 580, 580], max_y)} ################################## # # # VISUALIZATION # # # ################################## visualize = False if visualize: import matplotlib.pyplot as plt dir = 'south' plt.imshow(intersection) xs = crossing_walls[dir]['x'] xs.append(xs[0]) ys = crossing_walls[dir]['y'] ys.append(ys[0]) plt.plot(xs,ys, 'r') plt.show()
import numpy as np import pandas as pd import matplotlib.pyplot as plt print ( "You should not see any errors when this is run" )
from airflow import DAG from airflow.operators.python_operator import PythonOperator from datetime import datetime, timedelta import os import logging import pendulum import json import yaml import tableauserverclient as TSC default_args = { 'owner': 'airflow', 'depends_on_past': False, 'start_date': datetime(2018, 9, 1, tzinfo=pendulum.timezone('America/Los_Angeles')), 'email': [], 'email_on_failure': False, 'email_on_retry': False, 'retries': 1, 'retry_delay': timedelta(minutes=2) } dag = DAG('metadata_to_file', default_args=default_args, catchup=False, schedule_interval='*/5 * * * *') def get_metadata(): with open('tableau-config.yml', 'r') as stream: tableau_info = yaml.safe_load(stream) project_ls_raw = [] if tableau_info['projects-to-refresh-from']: project_ls_raw.extend(tableau_info['projects-to-refresh-from']) projects_ls = [f'"{proj}"' for proj in project_ls_raw] if len(projects_ls) == 0: project_filter = '' else: projects = str.join(', ', projects_ls) project_filter = '''(filter: {projectNameWithin: [''' + projects + ''']})''' flows_query = '''{ flows''' + project_filter + ''' { id luid name downstreamFlows { luid name } } }''' tableau_auth = TSC.TableauAuth(tableau_info['tableau-username'], tableau_info['tableau-password'], site_id=tableau_info['tableau-site']) server = TSC.Server(tableau_info['tableau-base-url'], use_server_version=True) with server.auth.sign_in(tableau_auth): data = server.metadata.query(flows_query)['data'] out_file = open("flows.json", "w") json.dump(data, out_file, indent=4) path = os.path.dirname(os.path.realpath(__file__)) + '/' name = 'orchestrate_prep_flows' for files in os.walk(path): for subfiles in files: for f in subfiles: if f == name + '.txt': os.rename(path + name + '.txt', path + name + '.py') logging.info(name + '.txt renamed to ' + name + '.py') t = PythonOperator(task_id="metadata_to_json", python_callable=get_metadata, dag=dag)
import re from configparser import ConfigParser from selenium.webdriver.common.by import By class PageObject: __slots__ = [ "config", "current_section" ] PoToBy = { "id": By.ID, "xpath": By.XPATH, "p_link_text": By.PARTIAL_LINK_TEXT, "link_text": By.LINK_TEXT, "tag_name": By.TAG_NAME, "css": By.CSS_SELECTOR, "class_name": By.CLASS_NAME, "name": By.NAME, "js": "js", "jsc": "java_script" } def __init__(self, filename): self.config = ConfigParser() self.config.read(filename) self.current_section = "" def set_current_section(self, section: str): self.current_section = section def get_element(self, name: str): """ 例如: [login] # 元素类型_元素名 input_username= (id,user_name) set_current_section("login") get_element("input_username") -> (id,user_name) :param name: :return: """ # 获取时设置当前的域 if self.current_section == "": raise ValueError("请设置当前的section") # 获取值 result = self.config.get(self.current_section, name) # 利用正则表达式解析处值 (id,user_name) parser_result = re.findall(r"\((\w{2,10}),(.+)\)", result) if len(parser_result) == 0: raise SyntaxError("{}:语法错误".format(result)) try: # method = id , value=user_name (id,user_name) method, value = parser_result[0] # 将获取的method进行转换成合法的定位值(By中的) return PageObject.PoToBy[method], value except KeyError: raise SyntaxError("元素定位语法错误,必须是以下值:{}".format(",".join([x for x in PageObject.PoToBy]))) except IndexError: raise SyntaxError("{}:语法错误".format(parser_result[0]))
#coding:utf-8 # # id: bugs.core_5823 # title: No permission for SELECT access to blob field in stored procedure # decription: # Confirmed bug on 3.0.4.33034 # Checked on: 3.0.4.33053, 4.0.0.1249: OK # # tracker_id: CORE-5823 # min_versions: ['3.0.5'] # versions: 3.0.5 # qmid: None import pytest from firebird.qa import db_factory, isql_act, Action # version: 3.0.5 # resources: None substitutions_1 = [('BLOB_FIELD_ID.*', '')] init_script_1 = """""" db_1 = db_factory(sql_dialect=3, init=init_script_1) test_script_1 = """ create or alter user tmp$c5823 password '123'; commit; set term ^; execute block as begin execute statement 'drop role blob_viewer'; when any do begin end end ^ set term ;^ commit; create role blob_viewer; create or alter procedure test_proc (id integer) as begin end; commit; recreate table test ( id integer, blb blob ); commit; insert into test (id, blb) values (1, 'blob1'); commit; set term ^; create or alter procedure test_proc (id integer) returns (blb blob) as begin for select blb from test where id = :id into blb do suspend; end ^ set term ;^ commit; grant select on test to procedure test_proc; grant execute on procedure test_proc to blob_viewer; grant blob_viewer to tmp$c5823; commit; connect '$(DSN)' user 'tmp$c5823' password '123' role 'blob_viewer'; set list on; set blob on; select mon$user, mon$role from mon$attachments where mon$attachment_id = current_connection; select blb as blob_field_id from test_proc(1); commit; -- cleanup: connect '$(DSN)' user 'SYSDBA' password 'masterkey'; drop user tmp$c5823; commit; """ act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1) expected_stdout_1 = """ MON$USER TMP$C5823 MON$ROLE BLOB_VIEWER BLOB_FIELD_ID 80:0 blob1 """ @pytest.mark.version('>=3.0.5') def test_1(act_1: Action): act_1.expected_stdout = expected_stdout_1 act_1.execute() assert act_1.clean_expected_stdout == act_1.clean_stdout
#coding=utf-8 import time from bs4 import BeautifulSoup from pymongo import MongoClient import requests import urlparse import json import redis import config import sys reload(sys) sys.setdefaultencoding('utf-8') job_redis = redis.Redis(host=config.Redis_ip, port=config.Redis_port, db=config.Redis_db) def search_all_page_url(tieba_name): start_url = 'http://tieba.baidu.com/f' payload = {'ie': 'utf-8', 'kw': str(tieba_name)} r = requests.get(start_url, params=payload) all_pages = '' if r.status_code == 200: time.sleep(2) bs = BeautifulSoup(r.text, 'html5lib') get_all_pages = bs.select('.pagination-default .last')[0].get('href') result = urlparse.urlparse(get_all_pages) parmas = urlparse.parse_qs(result.query, True) all_pages = int(parmas["pn"][0]) print "共计页数为:", all_pages else: print "页面信息获取失败" return all_pages # 获取某条贴吧信息的所有帖子链接 def get_which_all_linkUrl(all_pages, tieba_name): start_url = 'http://tieba.baidu.com/f' host_url = 'http://tieba.baidu.com' for page in range(0, all_pages, 50): payload = {'ie': 'utf-8', 'kw': str(tieba_name), 'pn': page} res = requests.get(start_url, params=payload, timeout=20) print "当前页码为:", page, payload if res.status_code == 200: bs = BeautifulSoup(res.text, 'html5lib') get_which_all_links = bs.select("#thread_list .j_thread_list") for i in range(0, len(get_which_all_links), 1): dataSet = get_which_all_links[i]['data-field'] tie_href = get_which_all_links[i].select('.threadlist_title .j_th_tit')[0]['href'] title = get_which_all_links[i].select('.threadlist_title .j_th_tit')[0].get('title') msgData = json.JSONDecoder().decode(dataSet) msgData['tie_href'] = host_url + tie_href msgData['title'] = str(title) print "awdada:", msgData # 将另外一份数据保存在mongo中 single_data_save_mysql(msgData) # 一份保存在redis中,供爬虫调用 # job_redis.sadd('urls', msgData['tie_href']) else: print "请求出错" def single_data_save_mysql(dataObj): #建立MongoDB数据库连接 client = MongoClient('127.0.0.1', 27017) #连接所需数据库,test为数据库名 db = client.admin db.TieBaList2.insert(dataObj) def mainSpider(tieba_name): all_pages = search_all_page_url(tieba_name) print all_pages get_which_all_linkUrl(all_pages, tieba_name) #先启动这个,优先获取列表 mainSpider("智能家居") # mainSpider("人工智能")
import flwr as fl import sys # Start Flower server for three rounds of federated learning # This requires 3 clients to be trained federatedly. if __name__ == "__main__": strategy = fl.server.strategy.FedAvg( fraction_fit=1, fraction_eval=1, min_fit_clients=3, min_eval_clients=3, min_available_clients=3, ) fl.server.start_server("0.0.0.0:8080", config={"num_rounds": 10}, strategy=strategy)
# import numpy as np # from regression_model.config.core import config # from regression_model.processing.features import ExtractLetterTransformer def test_ExtractLetterTransformer(train_features): # TODO... # PITA as saple_input_)data has NaNs in it so needs to be transformed before # being passed to ExtractLetterTransformer assert True #np.all(ref == transformed[feature])
import bcrypt from urllib.parse import urlencode from django.contrib.auth import get_user_model from django.utils import timezone from rest_framework.authtoken.models import Token from magic_links.models import MagicLinkCredential from magic_links.settings import api_settings User = get_user_model() def authenticate_user(user): token, created = Token.objects.get_or_create(user=user) return token def check_credential_expiry(credential): seconds = (timezone.now() - credential.created_at).total_seconds() credential_expiry_time = api_settings.MAGIC_LINKS_EXPIRE_TIME if seconds <= credential_expiry_time: return True credential.is_active = False credential.save() return False def get_url_for_source(request_source): return api_settings.MAGIC_LINKS_URLS.get(request_source) def append_query_params(url, query_params): separator = "?" if "?" not in url else "&" url = "{}{}{}".format(url, separator, urlencode(query_params)) return url def get_redirect_url(request_source, query_params): base_url = get_url_for_source(request_source) url = append_query_params(base_url, query_params) return url def get_magic_link(user, request_source, go_next=None): # check for existing key credential, created = MagicLinkCredential.objects.get_or_create( user=user, is_active=True ) if not created: if not check_credential_expiry(credential): credential = MagicLinkCredential.objects.create(user=user) token = get_hashed_key(str(credential.key)) payload = { "email": credential.user.email, "token": token, "source": request_source, } if go_next: payload["next"] = go_next # TODO: Error if request_source not specified base_url = get_url_for_source(request_source) url = "{}?{}".format(base_url, urlencode(payload)) return url def get_user_for_email(email): try: user = User.objects.get(email=email) except User.DoesNotExist: if api_settings.MAGIC_LINKS_CREATE_USER is True: user = User.objects.create_user(email=email) # Initially set an unusable password user.set_unusable_password() user.save() else: user = None return user def get_hashed_key(plain_text_key): return bcrypt.hashpw(plain_text_key.encode("utf-8"), bcrypt.gensalt()).decode( "utf-8" ) def check_hashed_key(plain_text_key, hashed_key): return bcrypt.checkpw(plain_text_key.encode("utf-8"), hashed_key.encode("utf8")) def inject_template_context(context): for processor in api_settings.MAGIC_LINKS_CONTEXT_PROCESSORS: context.update(processor()) return context
import grimagents.settings as settings def test_training_wrapper_path(): """Tests for the correct path to training_wrapper.py""" assert settings.get_training_wrapper_path().parts[-1] == ('training_wrapper.py') assert settings.get_training_wrapper_path().exists()
# -*- coding: utf-8 -*- """ -------------------------------------- @File : __init__.py.py @Author : maixiaochai @Email : maixiaochai@outlook.com @Created on : 2020/5/22 15:47 -------------------------------------- """ from .api import * from .model import * from .scheduler import *
# config.py # Enable Flask's debugging feature. Should be False in production DEBUG = True
import logging import re import time import requests from pyquery import PyQuery from pikapi.spiders.spider import Spider logger = logging.getLogger(__name__) class SpiderTxt(Spider): name = 'txt' start_urls = [ "http://www.proxylists.net/http_highanon.txt", "http://ab57.ru/downloads/proxylist.txt", "http://ab57.ru/downloads/proxyold.txt", "http://pubproxy.com/api/proxy?limit=20&format=txt&type=http", "http://comp0.ru/downloads/proxylist.txt", 'https://www.rmccurdy.com/scripts/proxy/good.txt', 'http://www.atomintersoft.com/anonymous_proxy_list', 'https://raw.githubusercontent.com/a2u/free-proxy-list/master/free-proxy-list.txt', 'http://www.atomintersoft.com/high_anonymity_elite_proxy_list', 'https://raw.githubusercontent.com/clarketm/proxy-list/master/proxy-list.txt' ] def parse(self, html): lst = re.findall(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{2,5}', html) for it in lst: ip, port = it.split(':') if ip and port: self._proxies.append((ip, port)) class SpiderData5u(Spider): name = 'www.data5u.com' start_urls = ['http://www.data5u.com'] parse_args = ('ul.l2', 'span', 0, 1) class SpiderIpaddress(Spider): name = 'www.ipaddress.com' start_urls = ['https://www.ipaddress.com/proxy-list/'] parse_args = ('table > tbody > tr', 'td', 0, -1) class SpiderKuaidaili(Spider): name = 'www.kuaidaili.com' # start_urls = ['https://www.kuaidaili.com/free/inha/1'] start_urls = ['https://www.kuaidaili.com/free/inha/%s' % i for i in range(1, 6)] + \ ['https://www.kuaidaili.com/proxylist/%s' % i for i in range(1, 11)] class SpiderMrhinkydink(Spider): name = 'www.mrhinkydink.com' start_urls = ['http://www.mrhinkydink.com/proxies.htm', 'http://www.mrhinkydink.com/proxies2.htm', 'http://www.mrhinkydink.com/proxies3.htm' ] parse_args = ('table > tr', 'td', 0, 1) # parse_args = ('tr.text', 'td', 0, 1) class SpiderXici(Spider): name = 'www.xicidaili.com' start_urls = ['http://www.xicidaili.com/{}/{}'.format(c, i) for c in ['nn', 'wn', 'wt'] for i in range(1, 4)] parse_args = ('#ip_list tr', 'td', 1, 2) class SpiderCnProxy(Spider): name = 'cn-proxy.com' start_urls = ['https://cn-proxy.com/'] parse_args = ('div.table-container > table:nth-child(1) > tbody:nth-child(3) > tr', 'td', 0, 1) class Spider89ip(Spider): name = 'www.89ip.cn' # start_urls = ['http://www.89ip.cn/'] start_urls = ['http://www.89ip.cn/index_%s.html' % i for i in range(1, 17)] class SpiderIphai(Spider): name = 'www.iphai.com' start_urls = ['http://www.iphai.com/', 'http://www.iphai.com/free/ng', 'http://www.iphai.com/free/np', 'http://www.iphai.com/free/wg'] parse_args = ('table > tr', 'td', 0, 1) class SpiderFeilong(Spider): name = 'www.feilongip.com' start_urls = ['http://www.feilongip.com/'] parse_args = ('.FreeIptbody > tr', 'td', 1, -1) class Spider31f(Spider): name = '31f.cn' start_urls = ['https://31f.cn/http-proxy/', 'https://31f.cn/https-proxy/'] parse_args = ('table.table > tr', 'td', 1, 2) class SpiderIp3366(Spider): name = 'www.ip3366.net' start_urls = ['http://www.ip3366.net/free/?stype={0}&page={1}'.format(i, j) for i in range(1, 7) for j in range(1, 7)] class SpiderProxyListen(Spider): name = 'www.proxy-listen.de' start_urls = ['https://www.proxy-listen.de/Proxy/Proxyliste.html'] parse_args = ('table.proxyList > tr', 'td', 0, 1) def crawl(self): exc = None self._session = requests.session() try: for url in self.start_urls: logger.debug('{} requests {}'.format(self.name, url)) resp = self._session.get(url, headers=self._headers, timeout=self._req_timeout, verify=False) key_pattern = re.compile('''name="fefefsfesf4tzrhtzuh" value="([^"]+)"''') keysearch = re.findall(key_pattern, resp.text) data = {"filter_port": "", "filter_http_gateway": "", "filter_http_anon": "", "filter_response_time_http": "", "fefefsfesf4tzrhtzuh": keysearch[0], "filter_country": "", "filter_timeouts1": "", "liststyle": "info", "proxies": "300", "type": "httphttps", "submit": "Show"} resp = self._session.post(url, headers=self._headers, data=data, timeout=self._req_timeout, verify=False) resp.encoding = self._encoding if resp.status_code == 200: self.parse(resp.text) logger.debug('{} crawl proxies: {}'.format(url, len(self._proxies))) else: logger.error("response code:{} from {}".format(resp.status_code, url)) time.sleep(self._sleep) except Exception as e: exc = e finally: self._session.close() return self, exc # class SpiderMimvp(Spider): # # 端口为图片 # name = 'proxy.mimvp.com' # start_urls = ['https://proxy.mimvp.com/free.php?proxy=in_hp', # 'https://proxy.mimvp.com/free.php?proxy=in_tp'] # crack_url = None # # def img2code(self, imgurl): # ir = requests.get(imgurl, headers=self._headers, timeout=10) # if ir.status_code == 200: # post_data = {"image": base64.b64encode(ir.content)} # res = requests.post(self.crack_url, data=post_data) # return res.text # # def parse(self, html): # doc = PyQuery(html) # trs = doc('.free-table > tbody > td.tbl-proxy-ip,.tbl-proxy-port') # for t in trs.items(): # ip = t('td.tbl-proxy-ip').text() # img = t('td.tbl-proxy-port > img').attr('src') # 端口为图片需要进行识别 # port = self.img2code(img) # if re.match(r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$', ip): # self._proxies.append((ip, port)) # class SpiderIpjing(Spider): # # 端口为图片 # name = 'www.ipjing.com' # start_urls = ['https://www.ipjing.com//?page={}'.format(i) for i in range(1, 7)] class SpiderProxylistplus(Spider): name = 'proxylistplus.com' start_urls = ["https://list.proxylistplus.com/Fresh-HTTP-Proxy-List-{0}".format(i) for i in range(1, 7)] parse_args = ('tr.cells', 'td', 1, 2) class SpiderJiangxianli(Spider): name = 'ip.jiangxianli.com' start_urls = ['http://ip.jiangxianli.com/?page={}'.format(i) for i in range(1, 4)] parse_args = ('table > tbody > tr', 'td', 1, 2) class SpiderKxdaili(Spider): name = 'kxdaili.com' start_urls = ['http://www.kxdaili.com/dailiip/%s/%s.html#ip' % (i, j) for i in range(1, 3) for j in range(1, 11)] class SpiderCrossincode(Spider): name = 'lab.crossincode.com' start_urls = ['https://lab.crossincode.com/proxy/'] parse_args = ('table > tr', 'td', 0, 1) class SpiderXsdaili(Spider): name = 'www.xsdaili.com' start_urls = ['http://www.xsdaili.com/'] parse_args = ('.cont', 'br', 0, 1) def setUp(self): super().setUp() html = self.reqs('http://www.xsdaili.com/') doc = PyQuery(html) tabs = doc('div.table') urls = [] for i, t in enumerate(tabs.items()): a = t('div:nth-child(1) > a') # print(i, a.attr('href'), a.text()) urls.append('http://www.xsdaili.com%s' % a.attr('href')) if i > 2: break if len(urls) > 0: SpiderXsdaili.start_urls = urls def parse(self, html): lst = re.findall(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{2,5}', html) for it in lst: ip, port = it.split(':') if ip and port: self._proxies.append((ip, port)) class SpiderZdaye(SpiderXsdaili): #会限制ip 反爬 name = 'ip.zdaye.com' start_urls = ['http://ip.zdaye.com/dayProxy.html'] parse_args = ('.cont', 'br', 0, 1) def setUp(self): Spider.setUp(self) html = self.reqs('http://ip.zdaye.com/dayProxy.html') doc = PyQuery(html) tabs = doc('div.thread_item') urls = [] for i, t in enumerate(tabs.items()): a = t('div:nth-child(1) > h3 > a') urls.append('http://ip.zdaye.com%s' % a.attr('href')) if i > 2: break if len(urls) > 0: SpiderZdaye.start_urls = urls self._headers['Referer'] = 'http://ip.zdaye.com/dayProxy.html' class SpiderSuperfastip(Spider): name = 'www.superfastip.com' start_urls = ['http://www.superfastip.com/welcome/freeip/{}'.format(i) for i in range(1, 5)] class SpiderYqie(Spider): name = 'ip.yqie.com' start_urls = ['http://ip.yqie.com/proxygaoni/', 'http://ip.yqie.com/proxypuni/', 'http://ip.yqie.com/proxyhttps/', 'http://ip.yqie.com/proxyhttp/' ] parse_args = ('#GridViewOrder > tr', 'td', 1, 2) class SpiderXiladaili(Spider): name = 'www.xiladaili.com' start_urls = ['http://www.xiladaili.com/']+ \ ['http://www.xiladaili.com/gaoni/{}/'.format(i) for i in range(1, 5)] + \ ['http://www.xiladaili.com/http/{}/'.format(i) for i in range(1, 5)] + \ ['http://www.xiladaili.com/https/{}/'.format(i) for i in range(1, 5)] parse_args = ('.fl-table > tbody > tr', 'td', 0, -1)
from marvinbot.cache import cache from telegram.ext.filters import BaseFilter import re class RegexpFilter(BaseFilter): def __init__(self, pattern, mode='match', **options): """Takes a pattern, and returns a match object if it complies. Support caching.""" if mode not in ['match', 'search']: raise ValueError('Mode should be either match or search') self.mode = mode self.pattern = re.compile(pattern, **options) self.plain_pattern = pattern def filter(self, message): return bool(message.text is not None and self.pattern.match(message.text)) # @cache.cache_on_arguments() # def __call__(self, expression): # func = getattr(self.pattern, self.mode) # return func(expression) # # def cache_hash(self): # return self.plain_pattern class MultiRegexpFilter(RegexpFilter): def __init__(self, patterns, **kwargs): """Takes a group of patterns and evaluates them. If patterns is a dict, add a capture group for each pattern.""" super(MultiRegexpFilter, self).__init__(self.build_pattern(patterns), **kwargs) @staticmethod def build_pattern(patterns): if isinstance(patterns, dict): return "({})".format('|'.join([r'(?P<{name}>{pattern})'.format(name=name, pattern=pattern) for name, pattern in patterns.items()])) elif isinstance(patterns, list): return "({})".format('|'.join(patterns))
# 12. 整数转罗马数字 # # 20200721 # huao # 理解规则,把数字划分为不同的区域即可 class Solution: def intToRoman(self, num: int) -> str: romanNum = "" thousand = num // 1000 if thousand > 0: for i in range(thousand): romanNum += "M" num %= 1000 hundred = num // 100 if hundred >= 9: romanNum += "CM" elif hundred >= 5: romanNum += "D" for i in range(hundred - 5): romanNum += "C" elif hundred >= 4: romanNum += "CD" elif hundred >= 1: for i in range(hundred): romanNum += "C" num %= 100 ten = num // 10 if ten >= 9: romanNum += "XC" elif ten >= 5: romanNum += "L" for i in range(ten - 5): romanNum += "X" elif ten >= 4: romanNum += "XL" elif ten >= 1: for i in range(ten): romanNum += "X" num %= 10 if num >= 9: romanNum += "IX" elif num >= 5: romanNum += "V" for i in range(num - 5): romanNum += "I" elif num >= 4: romanNum += "IV" elif num >= 1: for i in range(num): romanNum += "I" return romanNum sol = Solution() print(sol.intToRoman(1994))
#!/usr/bin/env python2 # -*- coding: utf-8 -*- import os import sys import re import json import argparse import html2text SCRIPT_PATH=os.path.dirname(os.path.realpath(__file__)) from EnChDict import EnChDict stardict_db = "%s/stardict.sqlite.db" % SCRIPT_PATH ANKI_ENGLISH_NOTE_FIELDS = ["word", "phonetic", "definition", "translation", "exchange", "detail", "bnc", "frq", "tts"] class EnglishToAnki: def __init__(self, args=None): if not os.path.exists(stardict_db): print("%s doesn't exist, please decompress/copy the dictionary."%stardict_db) sys.exit(1) self.en_ch_dict = EnChDict(stardict_db) if args and args.enable_google_tts: self.en_ch_dict.EnableGoogleTTS() self.all_words = {} self.import_words = {} self.args = args return def processWordList(self, words): assert isinstance(words, list) for word in words: word = word.strip() self.all_words[word] = None for p in word.split(","): p = p.strip() self.all_words[p] = None for w in re.findall(r'[a-zA-Z]+', p): if len(w) > 1: self.all_words[w] = None return def processTextList(self, textList): for text in textList: lines = text.split("\n") for line in lines: line=line.strip() #print("process line: %s"%line) if line[0] == '"' and line[-1] == '"' and len(line)>2: w = line[1:-1] w = w.lower() self.all_words[w] = None continue for w in re.findall(r'[a-zA-Z]+', line): if len(w) > 1: w = w.lower() self.all_words[w] = None return def lookup_startdict(self): words = list(self.all_words.keys()) #print("Looking up: %s"% ",".join(words)) result, failed_list = self.en_ch_dict.lookup_stardict_sql(words) for k in result: self.all_words[k] = result[k] self.import_words[k] = result[k] def genAnkiCards(self, tts_dir=None): if not tts_dir: tts_dir = self.getAnkiTTSDir() anki_cards = self.en_ch_dict.result_to_anki(self.import_words, ANKI_ENGLISH_NOTE_FIELDS, tts_dir) return anki_cards def writeAnkiImportFile(self): if not self.args: assert 0 and "Output file is not specified" return fp = open(self.args.out, "w") anki_cards = self.en_ch_dict.result_to_anki(self.import_words, ANKI_ENGLISH_NOTE_FIELDS, self.getAnkiTTSDir()) for ac in anki_cards: fp.write(ac.encode("utf-8")) fp.write("\n") fp.close() def getAnkiDB(self): fp = open(os.path.expanduser("~/.configs.secure/evernote_to_anki.config.json"), "r") js = json.load(fp) self.anki_db = js["anki_db"] fp.close() return self.anki_db def getAnkiTTSDir(self): fp = open(os.path.expanduser("~/.configs.secure/evernote_to_anki.config.json"), "r") js = json.load(fp) anki_db = js["anki_db"] fp.close() dirs = anki_db.split("/") dirs[-1] = "collection.media" self.anki_tts_dir = "/".join(dirs) return self.anki_tts_dir
"""This module contains the GeneFlow AgaveWrapper class.""" import itertools import os import time import urllib.parse try: from agavepy.agave import Agave from agavepy.asynchronous import AgaveAsyncResponse except ImportError: pass from geneflow.log import Log class AgaveWrapper: """ Agave wrapper class. Wraps the Agavepy python module and adds retry and token refresh functionality via a decorator class. """ class AgaveRetry: """ Agave Retry Decorator class. Adds retry and token refresh functionality to all decorated Agave calls. """ def __init__(self, func_key, silent_404=False): """ Initialize decorator class Args: self: instance of decorator class. func_key: descriptor for function to be decorated. This is used to look up the retry # and retry delay times. silent_404: whether to suppress warning messages for 404 errors """ self._func_key = func_key self._silent_404 = silent_404 def __call__(self, func): """ """ def wrapped_func(that, *args, **kwargs): """ Wrap function for executing an AgavePy command. Args: that: AgaveWrapper class instance. *args: Any arguments to be sent to the AgavePy command. **kwargs: Any keyword-value arguments to be sent to the AgavePy command. Returns: result of the AgavePy command call. """ num_tries = 0 num_token_tries = 0 retry = that._config.get( self._func_key+'_retry', that._config['retry'] ) retry_delay = that._config.get( self._func_key+'_retry_delay', that._config['retry_delay'] ) while ( num_tries < retry and num_token_tries < that._config['token_retry'] ): try: try: result = func(that, *args, **kwargs) return result except Exception as err: # check for expired token error if str(err).startswith('401'): num_token_tries += 1 Log.a().warning( 'agave token error [%s]', str(err) ) time.sleep(that._config['token_retry_delay']) # token could not be refreshed, most likely # because token was refreshed in a different # thread/process # create new token if that._config['connection_type']\ == 'impersonate': # re-init object without losing object # binding that._agave.__init__( api_server=that._config['server'], username=that._config['username'], password=that._config['password'], token_username=that._config[ 'token_username' ], client_name=that._config['client'], api_key=that._config['key'], api_secret=that._config['secret'], verify=False ) elif that._config['connection_type']\ == 'agave-cli': # get updated credentials from # ~/.agave/current agave_clients = Agave._read_clients() # don't verify ssl agave_clients[0]['verify'] = False # re-init object without losing object # binding that._agave.__init__(**agave_clients[0]) else: # shouldn't reach this condition, but raise # exception just in case raise Exception( 'invalid agave connection type: {}'\ .format( that._config['connection_type'] ) ) if '404' in str(err): if not self._silent_404: Log.a().warning('agave file/dir/object not found [%s]', str(err)) # don't retry if 404 error return False # not a token error, re-raise raise err except Exception as err: num_tries += 1 Log.a().warning('agave call failed [%s]', str(err)) time.sleep(retry_delay) if num_token_tries == that._config['token_retry']: # failed after reaching token refresh attempt limit Log.an().error( 'agave token refresh max tries (%s) exceeded', that._config['token_retry'] ) return False if num_tries == retry: # failed due to other exceptions Log.an().error( 'agave call max tries (%s) exceeded', retry ) return False return result return wrapped_func def __init__(self, config, agave=None, token_username=None): self._config = config self._agave = agave if token_username: self._config['token_username'] = token_username def connect(self): agave_connection_type = self._config.get( 'connection_type', 'impersonate' ) if agave_connection_type == 'impersonate': self._agave = Agave( api_server=self._config['server'], username=self._config['username'], password=self._config['password'], token_username=self._config['token_username'], client_name=self._config['client'], api_key=self._config['key'], api_secret=self._config['secret'], verify=False ) elif agave_connection_type == 'agave-cli': # get credentials from ~/.agave/current agave_clients = Agave._read_clients() agave_clients[0]['verify'] = False # don't verify ssl self._agave = Agave(**agave_clients[0]) # when using agave-cli, token_username must be the same as the # stored creds in user's home directory, this can be different # from job username self._config['token_username'] \ = agave_clients[0]['username'] else: Log.an().error( 'invalid agave connection type: %s', agave_connection_type ) return False return True @AgaveRetry('files_list', silent_404=True) def files_exist(self, system_id, file_path): """ Wrap AgavePy file listing command to check if file exists. Args: self: class instance. system_id: Identifier for Agave storage system. file_path: Path for file listing. Returns: True if file/dir exists. False if file/dir does not exist. """ if self._agave.files.list( systemId=system_id, filePath=file_path ): return True return False @AgaveRetry('files_list') def files_list(self, system_id, file_path, depth=1): """ Wrap AgavePy file listing command. Args: self: class instance. system_id: Identifier for Agave storage system. file_path: Path for file listing. Returns: List of file names. """ files = [ { 'path': file_path, 'name': f.name, 'type': f.type } for f in self._agave.files.list( systemId=system_id, filePath=file_path, limit=1000000 ) if f.name[:1] != '.' # skip files that start with . ] # list all subdirectories if not at max depth # depth of -1 means unlimited depth files_subdirs = {} if depth > 1 or depth == -1: for f in files: if f['type'] == 'dir': files_subdirs[f['name']] = self.files_list( system_id, file_path+'/'+f['name'], depth-1 if depth > 1 else depth ) # append all items in files_subdirs to files return files + list(itertools.chain(*files_subdirs.values())) def _recursive_download(self, system_id, file_path, target_path, depth): """ Recursively download files from an Agave location. Args: self: class instance. system_id: Agave system to download from. file_path: Agave path of file or folder to download. target_path: local directory path for download target. depth: recursion depth: -1=all, 0=nothing, 1=file or directory and all files under it. Returns: On success: True with no exceptions. On failure: Throws exception. """ if depth == 0: # done downloading to specified depth return True files = self._agave.files.list( systemId=system_id, filePath=file_path, limit=1000000 ) for file in files: if file['name'] == '.': # create directory at target location os.makedirs(target_path) continue if file['type'] == 'file': # download file agave_uri = None local_uri = None if len(files) > 1: agave_uri = 'agave://{}{}/{}'.format( system_id, file_path, file['name'] ) local_uri = os.path.join(target_path, file['name']) else: # toplevel target is a file agave_uri = 'agave://{}{}'.format(system_id, file_path) local_uri = target_path try: self._agave.download_uri( agave_uri, local_uri ) except Exception as err: Log.an().error( 'download FAILED: %s -> %s [%s]', agave_uri, local_uri, str(err) ) return False Log.some().debug( 'download FINISHED: %s -> %s', agave_uri, local_uri ) else: # recursively download the folder self._recursive_download( system_id, '{}/{}'.format(file_path, file['name']), os.path.join(target_path, file['name']), depth-1 if depth != -1 else -1 ) return True @AgaveRetry('files_download') def files_download(self, system_id, file_path, target_path, depth=-1): """ Wraps AgavePy files download and adds recursion. Args: self: class instance. system_id: Agave system to download from. file_path: Agave path of file or folder to download. target_path: local directory path for download target. depth: recursion depth: -1=all, 0=nothing, 1=file or directory and all files under it. Returns: The result of the recursive download function. """ return self._recursive_download( system_id, file_path, target_path, depth ) @AgaveRetry('files_delete') def files_delete(self, system_id, file_path): """ Wrap AgavePy file delete command. Args: self: class instance. system_id: Identifier for Agave storage system. file_path: Path for file to be deleted. Returns: On success: True with no exceptions. On failure: Throws exception. """ self._agave.files.delete( systemId=system_id, filePath=file_path ) return True @AgaveRetry('files_mkdir') def files_mkdir(self, system_id, file_path, dir_name): """ Wrap AgavePy make directory command. Args: self: class instance. system_id: Identifier for Agave storage system. file_path: Path where directory to be created. dir_name: Name of new directory to be created. Returns: On success: True with no exceptions. On failure: Throws exception. """ self._agave.files.manage( systemId=system_id, filePath=file_path, body={ 'action': 'mkdir', 'path': dir_name } ) return True @AgaveRetry('jobs_submit') def jobs_submit(self, body): """ Wrap AgavePy submit job command. Args: self: class instance. body: job template to be submitted. Returns: On success: Job descriptor object. On failure: Throws exception. """ job = self._agave.jobs.submit(body=body) return job @AgaveRetry('files_import') def files_import_from_local( self, system_id, file_path, file_name, file_to_upload ): """ Wrap AgavePy import data file command. Args: self: class instance. system_id: Identifier for Agave storage system. file_path: Path where file is to be imported. file_name: Name of the imported file. file_to_upload: File or folder path to upload to Agave. Returns: On success: True with no exceptions. On failure: Throws exception. """ if os.path.isdir(file_to_upload): # create target directory, which is "file_name" if not self.files_mkdir(system_id, file_path, file_name): Log.an().error( 'cannot create folder at uri: agave://%s%s/%s', system_id, file_path, file_name ) return False # walk through local directory structure for root, dirs, files in os.walk(file_to_upload, topdown=True): # translate local path to dest path dest_file_path = os.path.join( file_path, file_name, root[len(file_to_upload)+1:] ) # upload each file in this directory level for name in files: # read file in binary mode to transfer response = self._agave.files.importData( systemId=system_id, filePath=dest_file_path, fileName=name, fileToUpload=open( '%s/%s' % (root, name), "rb" ) ) async_response = AgaveAsyncResponse(self._agave, response) status = async_response.result() Log.some().debug( 'import %s: %s/%s -> agave://%s/%s/%s', str(status), root, name, system_id, dest_file_path, name ) if status != 'FINISHED': return False # create new directory for each directory in this level for name in dirs: # create dest directory if not self.files_mkdir( system_id, dest_file_path, name ): Log.an().error( 'cannot create folder at uri: agave://%s%s/%s', system_id, dest_file_path, name ) return False elif os.path.isfile(file_to_upload): # import single file response = self._agave.files.importData( systemId=system_id, filePath=file_path, fileName=file_name, fileToUpload=open(file_to_upload, 'rb') ) async_response = AgaveAsyncResponse(self._agave, response) status = async_response.result() Log.some().debug( 'import %s: %s -> agave://%s/%s/%s', str(status), file_to_upload, system_id, file_path, file_name ) if status != 'FINISHED': return False return True @AgaveRetry('files_import') def files_import_from_agave( self, system_id, file_path, file_name, url_to_ingest ): """ Wrap AgavePy import data file command. Args: self: class instance. system_id: Identifier for Agave storage system. file_path: Path where file is to be imported. file_name: Name of the imported file. url_to_ingest: Agave URL to be ingested. Returns: On success: True with no exceptions. On failure: Throws exception. """ response = self._agave.files.importData( systemId=system_id, filePath=file_path, fileName=file_name, urlToIngest=urllib.parse.quote(str(url_to_ingest or ''), safe='/:') ) async_response = AgaveAsyncResponse(self._agave, response) status = async_response.result() Log.some().debug( 'import %s: %s -> agave://%s/%s/%s', str(status), url_to_ingest, system_id, file_path, file_name ) if str(status) == 'FINISHED': return True # not finished, try again raise Exception('agave import failed') @AgaveRetry('jobs_get_status') def jobs_get_status(self, job_id): """ Wrap AgavePy job status command. Args: self: class instance. job_id: job identifer. Returns: On success: Job status. On failure: Throws exception. """ status = self._agave.jobs.getStatus(jobId=job_id)['status'] return status @AgaveRetry('jobs_get_history') def jobs_get_history(self, job_id): """ Wrap agavePy job history command. Args: self: class instance. job_id: job identifer. Returns: On success: Job history. On failure: Throws exception. """ response = self._agave.jobs.getHistory( jobId=job_id ) return response @AgaveRetry('apps_add_update') def apps_add_update(self, body): """ Wrap AgavePy apps add-update command. Args: body: Agave app definition. Returns: On success: Apps add update response. On failure: Throws exception. """ response = self._agave.apps.add( body=body ) return response @AgaveRetry('apps_publish') def apps_publish(self, app_id): """ Wrap agavePy app publish command. Args: self: class instance. app_id: Agave app ID. Returns: On success: Publish result. On failure: Throws exception. """ response = self._agave.apps.manage( appId=app_id, body={ 'action': 'publish' } ) return response
#!/usr/bin/env python '''Check-only run of AStyle and on ubxlib and report results.''' import os # For sep import subprocess import u_report import u_utils import u_settings # Prefix to put at the start of all prints PROMPT = "u_run_astyle_" # The name of the AStyle configuration file to look for # in the root of the ubxlib directory CONFIG_FILE = u_settings.ASTYLE_CONFIG_FILE # File extensions to include ASTYLE_FILE_EXTENSIONS = u_settings.ASTYLE_FILE_EXTENSIONS # Directory names to include; only the directories off the ubxlib # root need be included, AStyle will recurse below each of these ASTYLE_DIRS = u_settings.ASTYLE_DIRS # Directory names to exclude (exclusion is done from # the end of the file path backwards, so "build" excludes # "blah\build" as well as "build" but not "build\blah") EXCLUDE_DIRS = u_settings.ASTYLE_EXCLUDE_DIRS def run(instance, ubxlib_dir, working_dir, printer, reporter): '''Run AStyle''' return_value = 1 got_astyle = False call_list = [] instance_text = u_utils.get_instance_text(instance) prompt = PROMPT + instance_text + ": " # Print out what we've been told to do text = "running AStyle from ubxlib directory \"" + ubxlib_dir + \ "\" using configuration file \"" + ubxlib_dir + os.sep + \ CONFIG_FILE + "\"" if working_dir: text += ", working directory \"" + working_dir + "\"" printer.string("{}{}.".format(prompt, text)) reporter.event(u_report.EVENT_TYPE_CHECK, u_report.EVENT_START, "AStyle") got_astyle = u_utils.exe_where("astyle", \ "ERROR: can't find AStyle, please make" \ " sure that it is installed and on the path.", \ printer, prompt) if got_astyle: # Run AStyle printer.string("{}CD to {}...".format(prompt, ubxlib_dir)) with u_utils.ChangeDir(ubxlib_dir): # Assemble the call list call_list.append("astyle") call_list.append("--options=" + CONFIG_FILE) # Options file call_list.append("--dry-run") # Don't make changes call_list.append("--formatted") # Only list changed files call_list.append("--suffix=none") # Don't leave .orig files everywhere call_list.append("--verbose") # Print out stats for exclude_dir in EXCLUDE_DIRS: # Exclude these directories call_list.append("--exclude=" + exclude_dir) call_list.append("--ignore-exclude-errors-x") # Ignore unfound excludes call_list.append("--recursive") # Recurse through... for include_dir in ASTYLE_DIRS: # ...these files call_list.append(include_dir + os.sep + ASTYLE_FILE_EXTENSIONS) # Print what we're gonna do tmp = "" for item in call_list: tmp += " " + item printer.string("{}in directory {} calling{}". \ format(prompt, os.getcwd(), tmp)) try: text = subprocess.check_output(call_list, stderr=subprocess.STDOUT, shell=True) # Jenkins hangs without this formatted = [] for line in text.splitlines(): line = line.decode(encoding="utf-8", errors="ignore") printer.string("{}{}".format(prompt, line)) # AStyle doesn't return anything other than 0, # need to look for the word "Formatted" to find # a file it has fiddled with if line.startswith("Formatted"): formatted.append(line) if not formatted: reporter.event(u_report.EVENT_TYPE_CHECK, u_report.EVENT_PASSED) else: reporter.event(u_report.EVENT_TYPE_CHECK, u_report.EVENT_WARNING) for line in formatted: reporter.event_extra_information(line) # We don't return any errors about formatting return_value = 0 except subprocess.CalledProcessError as error: reporter.event(u_report.EVENT_TYPE_CHECK, u_report.EVENT_FAILED) printer.string("{}AStyle returned error {}:". format(prompt, error.returncode)) for line in error.output.splitlines(): line = line.strip() if line: reporter.event_extra_information(line) printer.string("{}{}".format(prompt, line)) else: reporter.event(u_report.EVENT_TYPE_INFRASTRUCTURE, u_report.EVENT_FAILED, "there is a problem with the AStyle installation") return return_value
# Generated by Django 3.1.2 on 2020-11-22 13:39 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Pazienti', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('paziente', models.CharField(blank=True, default='DEFAULT VALUE', max_length=200, null=True, verbose_name='paziente')), ('data_nascita_paziente', models.DateField(blank=True, null=True)), ('nome_proprietario', models.CharField(blank=True, max_length=200, null=True, verbose_name='nome proprietario')), ('cognome_proprietario', models.CharField(blank=True, max_length=200, null=True, verbose_name='cognome proprietario')), ], options={ 'ordering': ['paziente'], }, ), ]
# Copyright (c) 2020 fortiss GmbH # # This software is released under the MIT License. # https://opensource.org/licenses/MIT import numpy as np from modules.runtime.scenario.scenario_generation.config_readers.config_readers_interfaces import ConfigReaderControlledAgents from bark.world.goal_definition import GoalDefinition, GoalDefinitionPolygon, GoalDefinitionStateLimits from modules.runtime.commons.parameters import ParameterServer # no one (in this road corridor) is a controlled agent class NoneControlled(ConfigReaderControlledAgents): # returns list of size num agents with true or false depending if agent is controlled or not for each agent based on property, default_params_dict def create_from_config(self, config_param_object, road_corridor, agent_states, **kwargs): return [False] * len(agent_states), {}, config_param_object # Select one agent randomly as controlled agent class RandomSingleAgent(ConfigReaderControlledAgents): # returns list of size num agents with true or false depending if agent is controlled or not for each agent based on property, default_params_dict def create_from_config(self, config_param_object, road_corridor, agent_states, **kwargs): controlled_agent_idx = self.random_state.randint(low=0, high=len(agent_states), size=None) # todo(@bernhard): make seed state global, a.t.m. always same number drawn controlled_list = [False] * len(agent_states) controlled_list[controlled_agent_idx] = True return controlled_list, {}, config_param_object
#!python3 # -*- coding:utf-8 -*- import traceback import subprocess as subp from subprocess import SubprocessError, TimeoutExpired try: result = subp.run(args=["ls", "-l"]) print(result.returncode) result = subp.run(args=["firefox", "https://www.google.com"], shell=True) print(result.returncode) except SubprocessError as se: print(se.with_traceback()) except TimeoutExpired as te: pass finally: pass
# © 2020 Nokia # Licensed under the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause # !/usr/bin/env python3 # coding: utf-8 # Author: Élie de Panafieu <elie.de_panafieu@nokia-bell-labs.com> from fitting_text_distance.tools.matrix_operations import * class Distribution: """Probability distribution on a finite sequence of floats. Attributes ---------- values: list or tuple of floats probabilities: list or tuple of floats summing to '1.' Methods ------- get_moment(order: int) -> float Returns the moment of order 'order' of self. get_mean() -> float get_variance() -> float """ def __init__(self, values, probabilities): """Creates a 'Distribution' object from two lists: values and associated probabilities. Parameters ---------- values: list or tuple of floats probabilities: list or tuple of floats summing to '1.' """ if len(values) != len(probabilities): raise ValueError self.values = values self.probabilities = probabilities self.moments = dict() def __len__(self): return len(self.values) def get_moment(self, order): """Returns the moment of order 'order' of self. Parameters ---------- order: int Returns ------- Sum of self.probabilities[i] * self.values[i] ** order for i from 0 to len(self). """ return scalar_product(self.probabilities, coefficient_wise_power_from_vector(self.values, order)) def get_mean(self): return self.get_moment(1) def get_variance(self): return self.get_moment(2) - self.get_moment(1)**2
#!/usr/bin/env python3 from copy import deepcopy import re discs = [] with open('fifteen.in', 'r') as f: for line in f: m = re.match(r'Disc #(\d+) has (\d+) positions; at time=0, it is at position (\d+).', line) disc, npositions, position = map(int, m.group(1, 2, 3)) discs.append((npositions, position)) discs.append((11, 0)) presstime = 0 fallthrough = False while not fallthrough: t = presstime + 1 for i, (mod, pos) in enumerate(discs): #print('t', (t + i), 'disc', i, 'is at pos', pos + (t + i), '->', (pos + (t + i)) % mod, 'with mod', mod) if (pos + (t + i)) % mod != 0: presstime += 1 break else: fallthrough = True print(presstime)
import garapa import png def init_joystick(): for key in ['right', 'left', 'up', 'down', 'a', 'b', 'select', 'start']: garapa.set_input(key, 0) def dump_sprite(base_addr=0x8000): name_suffix = (base_addr - 0x8000) / 0x0010 png_data = [[]] colors = [ 0xffffff, 0xc0c0c0, 0x808080, 0x000000, ] for index, addr in enumerate(range(base_addr, base_addr + 0x10)): data = garapa.peek(addr) b1 = (data & 0b00000011) >> 0 b2 = (data & 0b00001100) >> 2 b3 = (data & 0b00110000) >> 4 b4 = (data & 0b11000000) >> 6 png_data[-1].append(colors[b1] // (256 ** 2)) png_data[-1].append(colors[b2] // (256 ** 2)) png_data[-1].append(colors[b3] // (256 ** 2)) png_data[-1].append(colors[b4] // (256 ** 2)) if index % 2 == 1: png_data.append([]) png_data.pop() print(f'dumping 0x{base_addr:x} {name_suffix}') png.from_array( png_data, 'L' ).save(f"sprite_{name_suffix:.0f}.png")
""" Copyright (C) 2019 Interactive Brokers LLC. All rights reserved. This code is subject to the terms and conditions of the IB API Non-Commercial License or the IB API Commercial License, as applicable. """ """ The Decoder knows how to transform a message's payload into higher level IB message (eg: order info, mkt data, etc). It will call the corresponding method from the EWrapper so that customer's code (eg: class derived from EWrapper) can make further use of the data. """ from tws_futures.ibapi.message import IN from tws_futures.ibapi.wrapper import * # @UnusedWildImport from tws_futures.ibapi.contract import ContractDescription from tws_futures.ibapi.server_versions import * # @UnusedWildImport from tws_futures.ibapi.utils import * # @UnusedWildImport from tws_futures.ibapi.softdollartier import SoftDollarTier from tws_futures.ibapi.ticktype import * # @UnusedWildImport from tws_futures.ibapi.tag_value import TagValue from tws_futures.ibapi.scanner import ScanData from tws_futures.ibapi.errors import BAD_MESSAGE from tws_futures.ibapi.common import * # @UnusedWildImport from tws_futures.ibapi.orderdecoder import OrderDecoder logger = logging.getLogger(__name__) class HandleInfo(Object): def __init__(self, wrap=None, proc=None): self.wrapperMeth = wrap self.wrapperParams = None self.processMeth = proc if wrap is None and proc is None: raise ValueError("both wrap and proc can't be None") def __str__(self): s = "wrap:%s meth:%s prms:%s" % (self.wrapperMeth, self.processMeth, self.wrapperParams) return s class Decoder(Object): def __init__(self, wrapper, serverVersion): self.wrapper = wrapper self.serverVersion = serverVersion self.discoverParams() #self.printParams() def processTickPriceMsg(self, fields): next(fields) decode(int, fields) reqId = decode(int, fields) tickType = decode(int, fields) price = decode(float, fields) size = decode(int, fields) # ver 2 field attrMask = decode(int, fields) # ver 3 field attrib = TickAttrib() attrib.canAutoExecute = attrMask == 1 if self.serverVersion >= MIN_SERVER_VER_PAST_LIMIT: attrib.canAutoExecute = attrMask & 1 != 0 attrib.pastLimit = attrMask & 2 != 0 if self.serverVersion >= MIN_SERVER_VER_PRE_OPEN_BID_ASK: attrib.preOpen = attrMask & 4 != 0 self.wrapper.tickPrice(reqId, tickType, price, attrib) # process ver 2 fields sizeTickType = TickTypeEnum.NOT_SET if TickTypeEnum.BID == tickType: sizeTickType = TickTypeEnum.BID_SIZE elif TickTypeEnum.ASK == tickType: sizeTickType = TickTypeEnum.ASK_SIZE elif TickTypeEnum.LAST == tickType: sizeTickType = TickTypeEnum.LAST_SIZE elif TickTypeEnum.DELAYED_BID == tickType: sizeTickType = TickTypeEnum.DELAYED_BID_SIZE elif TickTypeEnum.DELAYED_ASK == tickType: sizeTickType = TickTypeEnum.DELAYED_ASK_SIZE elif TickTypeEnum.DELAYED_LAST == tickType: sizeTickType = TickTypeEnum.DELAYED_LAST_SIZE if sizeTickType != TickTypeEnum.NOT_SET: self.wrapper.tickSize(reqId, sizeTickType, size) def processOrderStatusMsg(self, fields): next(fields) if self.serverVersion < MIN_SERVER_VER_MARKET_CAP_PRICE: decode(int, fields) orderId = decode(int, fields) status = decode(str, fields) if self.serverVersion >= MIN_SERVER_VER_FRACTIONAL_POSITIONS: filled = decode(float, fields) else: filled = decode(int, fields) if self.serverVersion >= MIN_SERVER_VER_FRACTIONAL_POSITIONS: remaining = decode(float, fields) else: remaining = decode(int, fields) avgFillPrice = decode(float, fields) permId = decode(int, fields) # ver 2 field parentId = decode(int, fields) # ver 3 field lastFillPrice = decode(float, fields) # ver 4 field clientId = decode(int, fields) # ver 5 field whyHeld = decode(str, fields) # ver 6 field if self.serverVersion >= MIN_SERVER_VER_MARKET_CAP_PRICE: mktCapPrice = decode(float, fields) else: mktCapPrice = None self.wrapper.orderStatus(orderId, status, filled, remaining, avgFillPrice, permId, parentId, lastFillPrice, clientId, whyHeld, mktCapPrice) def processOpenOrder(self, fields): next(fields) order = Order() contract = Contract() orderState = OrderState() if self.serverVersion < MIN_SERVER_VER_ORDER_CONTAINER: version = decode(int, fields) else: version = self.serverVersion OrderDecoder.__init__(self, contract, order, orderState, version, self.serverVersion) # read orderId OrderDecoder.decodeOrderId(self, fields) # read contract fields OrderDecoder.decodeContractFields(self, fields) # read order fields OrderDecoder.decodeAction(self, fields) OrderDecoder.decodeTotalQuantity(self, fields) OrderDecoder.decodeOrderType(self, fields) OrderDecoder.decodeLmtPrice(self, fields) OrderDecoder.decodeAuxPrice(self, fields) OrderDecoder.decodeTIF(self, fields) OrderDecoder.decodeOcaGroup(self, fields) OrderDecoder.decodeAccount(self, fields) OrderDecoder.decodeOpenClose(self, fields) OrderDecoder.decodeOrigin(self, fields) OrderDecoder.decodeOrderRef(self, fields) OrderDecoder.decodeClientId(self, fields) OrderDecoder.decodePermId(self, fields) OrderDecoder.decodeOutsideRth(self, fields) OrderDecoder.decodeHidden(self, fields) OrderDecoder.decodeDiscretionaryAmt(self, fields) OrderDecoder.decodeGoodAfterTime(self, fields) OrderDecoder.skipSharesAllocation(self, fields) OrderDecoder.decodeFAParams(self, fields) OrderDecoder.decodeModelCode(self, fields) OrderDecoder.decodeGoodTillDate(self, fields) OrderDecoder.decodeRule80A(self, fields) OrderDecoder.decodePercentOffset(self, fields) OrderDecoder.decodeSettlingFirm(self, fields) OrderDecoder.decodeShortSaleParams(self, fields) OrderDecoder.decodeAuctionStrategy(self, fields) OrderDecoder.decodeBoxOrderParams(self, fields) OrderDecoder.decodePegToStkOrVolOrderParams(self, fields) OrderDecoder.decodeDisplaySize(self, fields) OrderDecoder.decodeBlockOrder(self, fields) OrderDecoder.decodeSweepToFill(self, fields) OrderDecoder.decodeAllOrNone(self, fields) OrderDecoder.decodeMinQty(self, fields) OrderDecoder.decodeOcaType(self, fields) OrderDecoder.decodeETradeOnly(self, fields) OrderDecoder.decodeFirmQuoteOnly(self, fields) OrderDecoder.decodeNbboPriceCap(self, fields) OrderDecoder.decodeParentId(self, fields) OrderDecoder.decodeTriggerMethod(self, fields) OrderDecoder.decodeVolOrderParams(self, fields, True) OrderDecoder.decodeTrailParams(self, fields) OrderDecoder.decodeBasisPoints(self, fields) OrderDecoder.decodeComboLegs(self, fields) OrderDecoder.decodeSmartComboRoutingParams(self, fields) OrderDecoder.decodeScaleOrderParams(self, fields) OrderDecoder.decodeHedgeParams(self, fields) OrderDecoder.decodeOptOutSmartRouting(self, fields) OrderDecoder.decodeClearingParams(self, fields) OrderDecoder.decodeNotHeld(self, fields) OrderDecoder.decodeDeltaNeutral(self, fields) OrderDecoder.decodeAlgoParams(self, fields) OrderDecoder.decodeSolicited(self, fields) OrderDecoder.decodeWhatIfInfoAndCommission(self, fields) OrderDecoder.decodeVolRandomizeFlags(self, fields) OrderDecoder.decodePegToBenchParams(self, fields) OrderDecoder.decodeConditions(self, fields) OrderDecoder.decodeAdjustedOrderParams(self, fields) OrderDecoder.decodeSoftDollarTier(self, fields) OrderDecoder.decodeCashQty(self, fields) OrderDecoder.decodeDontUseAutoPriceForHedge(self, fields) OrderDecoder.decodeIsOmsContainers(self, fields) OrderDecoder.decodeDiscretionaryUpToLimitPrice(self, fields) OrderDecoder.decodeUsePriceMgmtAlgo(self, fields) self.wrapper.openOrder(order.orderId, contract, order, orderState) def processPortfolioValueMsg(self, fields): next(fields) version = decode(int, fields) # read contract fields contract = Contract() contract.conId = decode(int, fields) # ver 6 field contract.symbol = decode(str, fields) contract.secType = decode(str, fields) contract.lastTradeDateOrContractMonth = decode(str, fields) contract.strike = decode(float, fields) contract.right = decode(str, fields) if version >= 7: contract.multiplier = decode(str, fields) contract.primaryExchange = decode(str, fields) contract.currency = decode(str, fields) contract.localSymbol = decode(str, fields) # ver 2 field if version >= 8: contract.tradingClass = decode(str, fields) if self.serverVersion >= MIN_SERVER_VER_FRACTIONAL_POSITIONS: position = decode(float, fields) else: position = decode(int, fields) marketPrice = decode(float, fields) marketValue = decode(float, fields) averageCost = decode(float, fields) # ver 3 field unrealizedPNL = decode(float, fields) # ver 3 field realizedPNL = decode(float, fields) # ver 3 field accountName = decode(str, fields) # ver 4 field if version == 6 and self.serverVersion == 39: contract.primaryExchange = decode(str, fields) self.wrapper.updatePortfolio( contract, position, marketPrice, marketValue, averageCost, unrealizedPNL, realizedPNL, accountName) def processContractDataMsg(self, fields): next(fields) version = decode(int, fields) reqId = -1 if version >= 3: reqId = decode(int, fields) contract = ContractDetails() contract.contract.symbol = decode(str, fields) contract.contract.secType = decode(str, fields) self.readLastTradeDate(fields, contract, False) contract.contract.strike = decode(float, fields) contract.contract.right = decode(str, fields) contract.contract.exchange = decode(str, fields) contract.contract.currency = decode(str, fields) contract.contract.localSymbol = decode(str, fields) contract.marketName = decode(str, fields) contract.contract.tradingClass = decode(str, fields) contract.contract.conId = decode(int, fields) contract.minTick = decode(float, fields) if self.serverVersion >= MIN_SERVER_VER_MD_SIZE_MULTIPLIER: contract.mdSizeMultiplier = decode(int, fields) contract.contract.multiplier = decode(str, fields) contract.orderTypes = decode(str, fields) contract.validExchanges = decode(str, fields) contract.priceMagnifier = decode(int, fields) # ver 2 field if version >= 4: contract.underConId = decode(int, fields) if version >= 5: contract.longName = decode(str, fields) contract.contract.primaryExchange = decode(str, fields) if version >= 6: contract.contractMonth = decode(str, fields) contract.industry = decode(str, fields) contract.category = decode(str, fields) contract.subcategory = decode(str, fields) contract.timeZoneId = decode(str, fields) contract.tradingHours = decode(str, fields) contract.liquidHours = decode(str, fields) if version >= 8: contract.evRule = decode(str, fields) contract.evMultiplier = decode(int, fields) if version >= 7: secIdListCount = decode(int, fields) if secIdListCount > 0: contract.secIdList = [] for _ in range(secIdListCount): tagValue = TagValue() tagValue.tag = decode(str, fields) tagValue.value = decode(str, fields) contract.secIdList.append(tagValue) if self.serverVersion >= MIN_SERVER_VER_AGG_GROUP: contract.aggGroup = decode(int, fields) if self.serverVersion >= MIN_SERVER_VER_UNDERLYING_INFO: contract.underSymbol = decode(str, fields) contract.underSecType = decode(str, fields) if self.serverVersion >= MIN_SERVER_VER_MARKET_RULES: contract.marketRuleIds = decode(str, fields) if self.serverVersion >= MIN_SERVER_VER_REAL_EXPIRATION_DATE: contract.realExpirationDate = decode(str, fields) self.wrapper.contractDetails(reqId, contract) def processBondContractDataMsg(self, fields): next(fields) version = decode(int, fields) reqId = -1 if version >= 3: reqId = decode(int, fields) contract = ContractDetails() contract.contract.symbol = decode(str, fields) contract.contract.secType = decode(str, fields) contract.cusip = decode(str, fields) contract.coupon = decode(int, fields) self.readLastTradeDate(fields, contract, True) contract.issueDate = decode(str, fields) contract.ratings = decode(str, fields) contract.bondType = decode(str, fields) contract.couponType = decode(str, fields) contract.convertible = decode(bool, fields) contract.callable = decode(bool, fields) contract.putable = decode(bool, fields) contract.descAppend = decode(str, fields) contract.contract.exchange = decode(str, fields) contract.contract.currency = decode(str, fields) contract.marketName = decode(str, fields) contract.contract.tradingClass = decode(str, fields) contract.contract.conId = decode(int, fields) contract.minTick = decode(float, fields) if self.serverVersion >= MIN_SERVER_VER_MD_SIZE_MULTIPLIER: contract.mdSizeMultiplier = decode(int, fields) contract.orderTypes = decode(str, fields) contract.validExchanges = decode(str, fields) contract.nextOptionDate = decode(str, fields) # ver 2 field contract.nextOptionType = decode(str, fields) # ver 2 field contract.nextOptionPartial = decode(bool, fields) # ver 2 field contract.notes = decode(str, fields) # ver 2 field if version >= 4: contract.longName = decode(str, fields) if version >= 6: contract.evRule = decode(str, fields) contract.evMultiplier = decode(int, fields) if version >= 5: secIdListCount = decode(int, fields) if secIdListCount > 0: contract.secIdList = [] for _ in range(secIdListCount): tagValue = TagValue() tagValue.tag = decode(str, fields) tagValue.value = decode(str, fields) contract.secIdList.append(tagValue) if self.serverVersion >= MIN_SERVER_VER_AGG_GROUP: contract.aggGroup = decode(int, fields) if self.serverVersion >= MIN_SERVER_VER_MARKET_RULES: contract.marketRuleIds = decode(str, fields) self.wrapper.bondContractDetails(reqId, contract) def processScannerDataMsg(self, fields): next(fields) decode(int, fields) reqId = decode(int, fields) numberOfElements = decode(int, fields) for _ in range(numberOfElements): data = ScanData() data.contract = ContractDetails() data.rank = decode(int, fields) data.contract.contract.conId = decode(int, fields) # ver 3 field data.contract.contract.symbol = decode(str, fields) data.contract.contract.secType = decode(str, fields) data.contract.contract.lastTradeDateOrContractMonth = decode(str, fields) data.contract.contract.strike = decode(float, fields) data.contract.contract.right = decode(str, fields) data.contract.contract.exchange = decode(str, fields) data.contract.contract.currency = decode(str, fields) data.contract.contract.localSymbol = decode(str, fields) data.contract.marketName = decode(str, fields) data.contract.contract.tradingClass = decode(str, fields) data.distance = decode(str, fields) data.benchmark = decode(str, fields) data.projection = decode(str, fields) data.legsStr = decode(str, fields) self.wrapper.scannerData(reqId, data.rank, data.contract, data.distance, data.benchmark, data.projection, data.legsStr) self.wrapper.scannerDataEnd(reqId) def processExecutionDataMsg(self, fields): next(fields) version = self.serverVersion if(self.serverVersion < MIN_SERVER_VER_LAST_LIQUIDITY): version = decode(int, fields) reqId = -1 if version >= 7: reqId = decode(int, fields) orderId = decode(int, fields) # decode contract fields contract = Contract() contract.conId = decode(int, fields) # ver 5 field contract.symbol = decode(str, fields) contract.secType = decode(str, fields) contract.lastTradeDateOrContractMonth = decode(str, fields) contract.strike = decode(float, fields) contract.right = decode(str, fields) if version >= 9: contract.multiplier = decode(str, fields) contract.exchange = decode(str, fields) contract.currency = decode(str, fields) contract.localSymbol = decode(str, fields) if version >= 10: contract.tradingClass = decode(str, fields) # decode execution fields execution = Execution() execution.orderId = orderId execution.execId = decode(str, fields) execution.time = decode(str, fields) execution.acctNumber = decode(str, fields) execution.exchange = decode(str, fields) execution.side = decode(str, fields) if self.serverVersion >= MIN_SERVER_VER_FRACTIONAL_POSITIONS: execution.shares = decode(float, fields) else: execution.shares = decode(int, fields) execution.price = decode(float, fields) execution.permId = decode(int, fields) # ver 2 field execution.clientId = decode(int, fields) # ver 3 field execution.liquidation = decode(int, fields) # ver 4 field if version >= 6: execution.cumQty = decode(float, fields) execution.avgPrice = decode(float, fields) if version >= 8: execution.orderRef = decode(str, fields) if version >= 9: execution.evRule = decode(str, fields) execution.evMultiplier = decode(float, fields) if self.serverVersion >= MIN_SERVER_VER_MODELS_SUPPORT: execution.modelCode = decode(str, fields) if self.serverVersion >= MIN_SERVER_VER_LAST_LIQUIDITY: execution.lastLiquidity = decode(int, fields) self.wrapper.execDetails(reqId, contract, execution) def processHistoricalDataMsg(self, fields): next(fields) if self.serverVersion < MIN_SERVER_VER_SYNT_REALTIME_BARS: decode(int, fields) reqId = decode(int, fields) startDateStr = decode(str, fields) # ver 2 field endDateStr = decode(str, fields) # ver 2 field itemCount = decode(int, fields) for _ in range(itemCount): bar = BarData() bar.date = decode(str, fields) bar.open = decode(float, fields) bar.high = decode(float, fields) bar.low = decode(float, fields) bar.close = decode(float, fields) bar.volume = decode(int, fields) bar.average = decode(float, fields) if self.serverVersion < MIN_SERVER_VER_SYNT_REALTIME_BARS: decode(str, fields) bar.barCount = decode(int, fields) # ver 3 field self.wrapper.historicalData(reqId, bar) # send end of dataset marker self.wrapper.historicalDataEnd(reqId, startDateStr, endDateStr) def processHistoricalDataUpdateMsg(self, fields): next(fields) reqId = decode(int, fields) bar = BarData() bar.barCount = decode(int, fields) bar.date = decode(str, fields) bar.open = decode(float, fields) bar.close = decode(float, fields) bar.high = decode(float, fields) bar.low = decode(float, fields) bar.average = decode(float, fields) bar.volume = decode(int, fields) self.wrapper.historicalDataUpdate(reqId, bar) def processRealTimeBarMsg(self, fields): next(fields) decode(int, fields) reqId = decode(int, fields) bar = RealTimeBar() bar.time = decode(int, fields) bar.open = decode(float, fields) bar.high = decode(float, fields) bar.low = decode(float, fields) bar.close = decode(float, fields) bar.volume = decode(int, fields) bar.wap = decode(float, fields) bar.count = decode(int, fields) self.wrapper.realtimeBar(reqId, bar.time, bar.open, bar.high, bar.low, bar.close, bar.volume, bar.wap, bar.count) def processTickOptionComputationMsg(self, fields): optPrice = None pvDividend = None gamma = None vega = None theta = None undPrice = None next(fields) version = decode(int, fields) reqId = decode(int, fields) tickTypeInt = decode(int, fields) impliedVol = decode(float, fields) delta = decode(float, fields) if impliedVol < 0: # -1 is the "not computed" indicator impliedVol = None if delta == -2: # -2 is the "not computed" indicator delta = None if version >= 6 or \ tickTypeInt == TickTypeEnum.MODEL_OPTION or \ tickTypeInt == TickTypeEnum.DELAYED_MODEL_OPTION: optPrice = decode(float, fields) pvDividend = decode(float, fields) if optPrice == -1: # -1 is the "not computed" indicator optPrice = None if pvDividend == -1: # -1 is the "not computed" indicator pvDividend = None if version >= 6: gamma = decode(float, fields) vega = decode(float, fields) theta = decode(float, fields) undPrice = decode(float, fields) if gamma == -2: # -2 is the "not yet computed" indicator gamma = None if vega == -2: # -2 is the "not yet computed" indicator vega = None if theta == -2: # -2 is the "not yet computed" indicator theta = None if undPrice == -1: # -1 is the "not computed" indicator undPrice = None self.wrapper.tickOptionComputation(reqId, tickTypeInt, impliedVol, delta, optPrice, pvDividend, gamma, vega, theta, undPrice) def processDeltaNeutralValidationMsg(self, fields): next(fields) decode(int, fields) reqId = decode(int, fields) deltaNeutralContract = DeltaNeutralContract() deltaNeutralContract.conId = decode(int, fields) deltaNeutralContract.delta = decode(float, fields) deltaNeutralContract.price = decode(float, fields) self.wrapper.deltaNeutralValidation(reqId, deltaNeutralContract) def processMarketDataTypeMsg(self, fields): next(fields) decode(int, fields) reqId = decode(int, fields) marketDataType = decode(int, fields) self.wrapper.marketDataType(reqId, marketDataType) def processCommissionReportMsg(self, fields): next(fields) decode(int, fields) commissionReport = CommissionReport() commissionReport.execId = decode(str, fields) commissionReport.commission = decode(float, fields) commissionReport.currency = decode(str, fields) commissionReport.realizedPNL = decode(float, fields) commissionReport.yield_ = decode(float, fields) commissionReport.yieldRedemptionDate = decode(int, fields) self.wrapper.commissionReport(commissionReport) def processPositionDataMsg(self, fields): next(fields) version = decode(int, fields) account = decode(str, fields) # decode contract fields contract = Contract() contract.conId = decode(int, fields) contract.symbol = decode(str, fields) contract.secType = decode(str, fields) contract.lastTradeDateOrContractMonth = decode(str, fields) contract.strike = decode(float, fields) contract.right = decode(str, fields) contract.multiplier = decode(str, fields) contract.exchange = decode(str, fields) contract.currency = decode(str, fields) contract.localSymbol = decode(str, fields) if version >= 2: contract.tradingClass = decode(str, fields) if self.serverVersion >= MIN_SERVER_VER_FRACTIONAL_POSITIONS: position = decode(float, fields) else: position = decode(int, fields) avgCost = 0. if version >= 3: avgCost = decode(float, fields) self.wrapper.position(account, contract, position, avgCost) def processPositionMultiMsg(self, fields): next(fields) decode(int, fields) reqId = decode(int, fields) account = decode(str, fields) # decode contract fields contract = Contract() contract.conId = decode(int, fields) contract.symbol = decode(str, fields) contract.secType = decode(str, fields) contract.lastTradeDateOrContractMonth = decode(str, fields) contract.strike = decode(float, fields) contract.right = decode(str, fields) contract.multiplier = decode(str, fields) contract.exchange = decode(str, fields) contract.currency = decode(str, fields) contract.localSymbol = decode(str, fields) contract.tradingClass = decode(str, fields) position = decode(float, fields) avgCost = decode(float, fields) modelCode = decode(str, fields) self.wrapper.positionMulti(reqId, account, modelCode, contract, position, avgCost) def processSecurityDefinitionOptionParameterMsg(self, fields): next(fields) reqId = decode(int, fields) exchange = decode(str, fields) underlyingConId = decode(int, fields) tradingClass = decode(str, fields) multiplier = decode(str, fields) expCount = decode(int, fields) expirations = set() for _ in range(expCount): expiration = decode(str, fields) expirations.add(expiration) strikeCount = decode(int, fields) strikes = set() for _ in range(strikeCount): strike = decode(float, fields) strikes.add(strike) self.wrapper.securityDefinitionOptionParameter(reqId, exchange, underlyingConId, tradingClass, multiplier, expirations, strikes) def processSecurityDefinitionOptionParameterEndMsg(self, fields): next(fields) reqId = decode(int, fields) self.wrapper.securityDefinitionOptionParameterEnd(reqId) def processSoftDollarTiersMsg(self, fields): next(fields) reqId = decode(int, fields) nTiers = decode(int, fields) tiers = [] for _ in range(nTiers): tier = SoftDollarTier() tier.name = decode(str, fields) tier.val = decode(str, fields) tier.displayName = decode(str, fields) tiers.append(tier) self.wrapper.softDollarTiers(reqId, tiers) def processFamilyCodesMsg(self, fields): next(fields) nFamilyCodes = decode(int, fields) familyCodes = [] for _ in range(nFamilyCodes): famCode = FamilyCode() famCode.accountID = decode(str, fields) famCode.familyCodeStr = decode(str, fields) familyCodes.append(famCode) self.wrapper.familyCodes(familyCodes) def processSymbolSamplesMsg(self, fields): next(fields) reqId = decode(int, fields) nContractDescriptions = decode(int, fields) contractDescriptions = [] for _ in range(nContractDescriptions): conDesc = ContractDescription() conDesc.contract.conId = decode(int, fields) conDesc.contract.symbol = decode(str, fields) conDesc.contract.secType = decode(str, fields) conDesc.contract.primaryExchange = decode(str, fields) conDesc.contract.currency = decode(str, fields) nDerivativeSecTypes = decode(int, fields) conDesc.derivativeSecTypes = [] for _ in range(nDerivativeSecTypes): derivSecType = decode(str, fields) conDesc.derivativeSecTypes.append(derivSecType) contractDescriptions.append(conDesc) self.wrapper.symbolSamples(reqId, contractDescriptions) def processSmartComponents(self,fields): next(fields) reqId = decode(int, fields) n = decode(int, fields) smartComponentMap = [] for _ in range(n): smartComponent = SmartComponent() smartComponent.bitNumber = decode(int, fields) smartComponent.exchange = decode(str, fields) smartComponent.exchangeLetter = decode(str, fields) smartComponentMap.append(smartComponent) self.wrapper.smartComponents(reqId, smartComponentMap) def processTickReqParams(self,fields): next(fields) tickerId = decode(int, fields) minTick = decode(float, fields) bboExchange = decode(str, fields) snapshotPermissions = decode(int, fields) self.wrapper.tickReqParams(tickerId, minTick, bboExchange, snapshotPermissions) def processMktDepthExchanges(self,fields): next(fields) depthMktDataDescriptions = [] nDepthMktDataDescriptions = decode(int, fields) if nDepthMktDataDescriptions > 0: for _ in range(nDepthMktDataDescriptions): desc = DepthMktDataDescription() desc.exchange = decode(str, fields) desc.secType = decode(str, fields) if self.serverVersion >= MIN_SERVER_VER_SERVICE_DATA_TYPE: desc.listingExch = decode(str, fields) desc.serviceDataType = decode(str, fields) desc.aggGroup = decode(int, fields) else: decode(int,fields) #boolean notSuppIsL2 depthMktDataDescriptions.append(desc) self.wrapper.mktDepthExchanges(depthMktDataDescriptions) def processHeadTimestamp(self,fields): next(fields) reqId = decode(int, fields) headTimestamp = decode(str, fields) self.wrapper.headTimestamp(reqId,headTimestamp) def processTickNews(self,fields): next(fields) tickerId = decode( int, fields) timeStamp = decode(int, fields) providerCode = decode(str, fields) articleId = decode(str, fields) headline = decode(str, fields) extraData = decode(str, fields) self.wrapper.tickNews(tickerId, timeStamp, providerCode, articleId, headline, extraData) def processNewsProviders(self,fields): next(fields) newsProviders = [] nNewsProviders = decode(int, fields) if nNewsProviders > 0: for _ in range(nNewsProviders): provider = NewsProvider() provider.code = decode(str, fields) provider.name = decode(str, fields) newsProviders.append(provider) self.wrapper.newsProviders(newsProviders) def processNewsArticle(self,fields): next(fields) reqId = decode(int, fields) articleType = decode(int, fields) articleText = decode(str, fields) self.wrapper.newsArticle(reqId, articleType, articleText) def processHistoricalNews(self,fields): next(fields) requestId = decode(int, fields) time = decode(str, fields) providerCode = decode(str, fields) articleId = decode(str, fields) headline = decode(str, fields) self.wrapper.historicalNews(requestId, time, providerCode, articleId, headline) def processHistoricalNewsEnd(self,fields): next(fields) reqId = decode(int, fields) hasMore = decode(bool, fields) self.wrapper.historicalNewsEnd(reqId, hasMore) def processHistogramData(self,fields): next(fields) reqId = decode(int, fields) numPoints = decode(int, fields) histogram = [] for _ in range(numPoints): dataPoint = HistogramData() dataPoint.price = decode(float,fields) dataPoint.count = decode(int,fields) histogram.append(dataPoint) self.wrapper.histogramData(reqId, histogram) def processRerouteMktDataReq(self, fields): next(fields) reqId = decode(int, fields) conId = decode(int, fields) exchange = decode(str, fields) self.wrapper.rerouteMktDataReq(reqId, conId, exchange) def processRerouteMktDepthReq(self, fields): next(fields) reqId = decode(int, fields) conId = decode(int, fields) exchange = decode(str, fields) self.wrapper.rerouteMktDepthReq(reqId, conId, exchange) def processMarketRuleMsg(self, fields): next(fields) marketRuleId = decode(int, fields) nPriceIncrements = decode(int, fields) priceIncrements = [] if nPriceIncrements > 0: for _ in range(nPriceIncrements): prcInc = PriceIncrement() prcInc.lowEdge = decode(float, fields) prcInc.increment = decode(float, fields) priceIncrements.append(prcInc) self.wrapper.marketRule(marketRuleId, priceIncrements) def processPnLMsg(self, fields): next(fields) reqId = decode(int, fields) dailyPnL = decode(float, fields) unrealizedPnL = None realizedPnL = None if self.serverVersion >= MIN_SERVER_VER_UNREALIZED_PNL: unrealizedPnL = decode(float, fields) if self.serverVersion >= MIN_SERVER_VER_REALIZED_PNL: realizedPnL = decode(float, fields) self.wrapper.pnl(reqId, dailyPnL, unrealizedPnL, realizedPnL) def processPnLSingleMsg(self, fields): next(fields) reqId = decode(int, fields) pos = decode(int, fields) dailyPnL = decode(float, fields) unrealizedPnL = None realizedPnL = None if self.serverVersion >= MIN_SERVER_VER_UNREALIZED_PNL: unrealizedPnL = decode(float, fields) if self.serverVersion >= MIN_SERVER_VER_REALIZED_PNL: realizedPnL = decode(float, fields) value = decode(float, fields) self.wrapper.pnlSingle(reqId, pos, dailyPnL, unrealizedPnL, realizedPnL, value) def processHistoricalTicks(self, fields): next(fields) reqId = decode(int, fields) tickCount = decode(int, fields) ticks = [] for _ in range(tickCount): historicalTick = HistoricalTick() historicalTick.time = decode(int, fields) next(fields) # for consistency historicalTick.price = decode(float, fields) historicalTick.size = decode(int, fields) ticks.append(historicalTick) done = decode(bool, fields) self.wrapper.historicalTicks(reqId, ticks, done) def processHistoricalTicksBidAsk(self, fields): next(fields) reqId = decode(int, fields) tickCount = decode(int, fields) ticks = [] for _ in range(tickCount): historicalTickBidAsk = HistoricalTickBidAsk() historicalTickBidAsk.time = decode(int, fields) mask = decode(int, fields) tickAttribBidAsk = TickAttribBidAsk() tickAttribBidAsk.askPastHigh = mask & 1 != 0 tickAttribBidAsk.bidPastLow = mask & 2 != 0 historicalTickBidAsk.tickAttribBidAsk = tickAttribBidAsk historicalTickBidAsk.priceBid = decode(float, fields) historicalTickBidAsk.priceAsk = decode(float, fields) historicalTickBidAsk.sizeBid = decode(int, fields) historicalTickBidAsk.sizeAsk = decode(int, fields) ticks.append(historicalTickBidAsk) done = decode(bool, fields) self.wrapper.historicalTicksBidAsk(reqId, ticks, done) def processHistoricalTicksLast(self, fields): next(fields) reqId = decode(int, fields) tickCount = decode(int, fields) ticks = [] for _ in range(tickCount): historicalTickLast = HistoricalTickLast() historicalTickLast.time = decode(int, fields) mask = decode(int, fields) tickAttribLast = TickAttribLast() tickAttribLast.pastLimit = mask & 1 != 0 tickAttribLast.unreported = mask & 2 != 0 historicalTickLast.tickAttribLast = tickAttribLast historicalTickLast.price = decode(float, fields) historicalTickLast.size = decode(int, fields) historicalTickLast.exchange = decode(str, fields) historicalTickLast.specialConditions = decode(str, fields) ticks.append(historicalTickLast) done = decode(bool, fields) self.wrapper.historicalTicksLast(reqId, ticks, done) def processTickByTickMsg(self, fields): next(fields) reqId = decode(int, fields) tickType = decode(int, fields) time = decode(int, fields) if tickType == 0: # None pass elif tickType == 1 or tickType == 2: # Last or AllLast price = decode(float, fields) size = decode(int, fields) mask = decode(int, fields) tickAttribLast = TickAttribLast() tickAttribLast.pastLimit = mask & 1 != 0 tickAttribLast.unreported = mask & 2 != 0 exchange = decode(str, fields) specialConditions = decode(str, fields) self.wrapper.tickByTickAllLast(reqId, tickType, time, price, size, tickAttribLast, exchange, specialConditions) elif tickType == 3: # BidAsk bidPrice = decode(float, fields) askPrice = decode(float, fields) bidSize = decode(int, fields) askSize = decode(int, fields) mask = decode(int, fields) tickAttribBidAsk = TickAttribBidAsk() tickAttribBidAsk.bidPastLow = mask & 1 != 0 tickAttribBidAsk.askPastHigh = mask & 2 != 0 self.wrapper.tickByTickBidAsk(reqId, time, bidPrice, askPrice, bidSize, askSize, tickAttribBidAsk) elif tickType == 4: # MidPoint midPoint = decode(float, fields) self.wrapper.tickByTickMidPoint(reqId, time, midPoint) def processOrderBoundMsg(self, fields): next(fields) reqId = decode(int, fields) apiClientId = decode(int, fields) apiOrderId = decode(int, fields) self.wrapper.orderBound(reqId, apiClientId, apiOrderId) def processMarketDepthL2Msg(self, fields): next(fields) decode(int, fields) reqId = decode(int, fields) position = decode(int, fields) marketMaker = decode(str, fields) operation = decode(int, fields) side = decode(int, fields) price = decode(float, fields) size = decode(int, fields) isSmartDepth = False if self.serverVersion >= MIN_SERVER_VER_SMART_DEPTH: isSmartDepth = decode(bool, fields) self.wrapper.updateMktDepthL2(reqId, position, marketMaker, operation, side, price, size, isSmartDepth) def processCompletedOrderMsg(self, fields): next(fields) order = Order() contract = Contract() orderState = OrderState() OrderDecoder.__init__(self, contract, order, orderState, UNSET_INTEGER, self.serverVersion) # read contract fields OrderDecoder.decodeContractFields(self, fields) # read order fields OrderDecoder.decodeAction(self, fields) OrderDecoder.decodeTotalQuantity(self, fields) OrderDecoder.decodeOrderType(self, fields) OrderDecoder.decodeLmtPrice(self, fields) OrderDecoder.decodeAuxPrice(self, fields) OrderDecoder.decodeTIF(self, fields) OrderDecoder.decodeOcaGroup(self, fields) OrderDecoder.decodeAccount(self, fields) OrderDecoder.decodeOpenClose(self, fields) OrderDecoder.decodeOrigin(self, fields) OrderDecoder.decodeOrderRef(self, fields) OrderDecoder.decodePermId(self, fields) OrderDecoder.decodeOutsideRth(self, fields) OrderDecoder.decodeHidden(self, fields) OrderDecoder.decodeDiscretionaryAmt(self, fields) OrderDecoder.decodeGoodAfterTime(self, fields) OrderDecoder.decodeFAParams(self, fields) OrderDecoder.decodeModelCode(self, fields) OrderDecoder.decodeGoodTillDate(self, fields) OrderDecoder.decodeRule80A(self, fields) OrderDecoder.decodePercentOffset(self, fields) OrderDecoder.decodeSettlingFirm(self, fields) OrderDecoder.decodeShortSaleParams(self, fields) OrderDecoder.decodeBoxOrderParams(self, fields) OrderDecoder.decodePegToStkOrVolOrderParams(self, fields) OrderDecoder.decodeDisplaySize(self, fields) OrderDecoder.decodeSweepToFill(self, fields) OrderDecoder.decodeAllOrNone(self, fields) OrderDecoder.decodeMinQty(self, fields) OrderDecoder.decodeOcaType(self, fields) OrderDecoder.decodeTriggerMethod(self, fields) OrderDecoder.decodeVolOrderParams(self, fields, False) OrderDecoder.decodeTrailParams(self, fields) OrderDecoder.decodeComboLegs(self, fields) OrderDecoder.decodeSmartComboRoutingParams(self, fields) OrderDecoder.decodeScaleOrderParams(self, fields) OrderDecoder.decodeHedgeParams(self, fields) OrderDecoder.decodeClearingParams(self, fields) OrderDecoder.decodeNotHeld(self, fields) OrderDecoder.decodeDeltaNeutral(self, fields) OrderDecoder.decodeAlgoParams(self, fields) OrderDecoder.decodeSolicited(self, fields) OrderDecoder.decodeOrderStatus(self, fields) OrderDecoder.decodeVolRandomizeFlags(self, fields) OrderDecoder.decodePegToBenchParams(self, fields) OrderDecoder.decodeConditions(self, fields) OrderDecoder.decodeStopPriceAndLmtPriceOffset(self, fields) OrderDecoder.decodeCashQty(self, fields) OrderDecoder.decodeDontUseAutoPriceForHedge(self, fields) OrderDecoder.decodeIsOmsContainers(self, fields) OrderDecoder.decodeAutoCancelDate(self, fields) OrderDecoder.decodeFilledQuantity(self, fields) OrderDecoder.decodeRefFuturesConId(self, fields) OrderDecoder.decodeAutoCancelParent(self, fields) OrderDecoder.decodeShareholder(self, fields) OrderDecoder.decodeImbalanceOnly(self, fields) OrderDecoder.decodeRouteMarketableToBbo(self, fields) OrderDecoder.decodeParentPermId(self, fields) OrderDecoder.decodeCompletedTime(self, fields) OrderDecoder.decodeCompletedStatus(self, fields) self.wrapper.completedOrder(contract, order, orderState) def processCompletedOrdersEndMsg(self, fields): next(fields) self.wrapper.completedOrdersEnd() ###################################################################### def readLastTradeDate(self, fields, contract: ContractDetails, isBond: bool): lastTradeDateOrContractMonth = decode(str, fields) if lastTradeDateOrContractMonth is not None: splitted = lastTradeDateOrContractMonth.split() if len(splitted) > 0: if isBond: contract.maturity = splitted[0] else: contract.contract.lastTradeDateOrContractMonth = splitted[0] if len(splitted) > 1: contract.lastTradeTime = splitted[1] if isBond and len(splitted) > 2: contract.timeZoneId = splitted[2] ###################################################################### def discoverParams(self): meth2handleInfo = {} for handleInfo in self.msgId2handleInfo.values(): meth2handleInfo[handleInfo.wrapperMeth] = handleInfo methods = inspect.getmembers(EWrapper, inspect.isfunction) for (_, meth) in methods: #logger.debug("meth %s", name) sig = inspect.signature(meth) handleInfo = meth2handleInfo.get(meth, None) if handleInfo is not None: handleInfo.wrapperParams = sig.parameters #for (pname, param) in sig.parameters.items(): # logger.debug("\tparam %s %s %s", pname, param.name, param.annotation) def printParams(self): for (_, handleInfo) in self.msgId2handleInfo.items(): if handleInfo.wrapperMeth is not None: logger.debug("meth %s", handleInfo.wrapperMeth.__name__) if handleInfo.wrapperParams is not None: for (pname, param) in handleInfo.wrapperParams.items(): logger.debug("\tparam %s %s %s", pname, param.name, param.annotation) def interpretWithSignature(self, fields, handleInfo): if handleInfo.wrapperParams is None: logger.debug("%s: no param info in %s", fields, handleInfo) return nIgnoreFields = 2 #bypass msgId and versionId faster this way if len(fields) - nIgnoreFields != len(handleInfo.wrapperParams) - 1: logger.error("diff len fields and params %d %d for fields: %s and handleInfo: %s", len(fields), len(handleInfo.wrapperParams), fields, handleInfo) return fieldIdx = nIgnoreFields args = [] for (pname, param) in handleInfo.wrapperParams.items(): if pname != "self": logger.debug("field %s ", fields[fieldIdx]) try: arg = fields[fieldIdx].decode('UTF-8') except UnicodeDecodeError: arg = fields[fieldIdx].decode('latin-1') logger.debug("arg %s type %s", arg, param.annotation) if param.annotation is int: arg = int(arg) elif param.annotation is float: arg = float(arg) args.append(arg) fieldIdx += 1 method = getattr(self.wrapper, handleInfo.wrapperMeth.__name__) logger.debug("calling %s with %s %s", method, self.wrapper, args) method(*args) def interpret(self, fields): if len(fields) == 0: logger.debug("no fields") return sMsgId = fields[0] nMsgId = int(sMsgId) handleInfo = self.msgId2handleInfo.get(nMsgId, None) if handleInfo is None: logger.debug("%s: no handleInfo", fields) return try: if handleInfo.wrapperMeth is not None: logger.debug("In interpret(), handleInfo: %s", handleInfo) self.interpretWithSignature(fields, handleInfo) elif handleInfo.processMeth is not None: handleInfo.processMeth(self, iter(fields)) except BadMessage: theBadMsg = ",".join(fields) self.wrapper.error(NO_VALID_ID, BAD_MESSAGE.code(), BAD_MESSAGE.msg() + theBadMsg) raise msgId2handleInfo = { IN.TICK_PRICE: HandleInfo(proc=processTickPriceMsg), IN.TICK_SIZE: HandleInfo(wrap=EWrapper.tickSize), IN.ORDER_STATUS: HandleInfo(proc=processOrderStatusMsg), IN.ERR_MSG: HandleInfo(wrap=EWrapper.error), IN.OPEN_ORDER: HandleInfo(proc=processOpenOrder), IN.ACCT_VALUE: HandleInfo(wrap=EWrapper.updateAccountValue), IN.PORTFOLIO_VALUE: HandleInfo(proc=processPortfolioValueMsg), IN.ACCT_UPDATE_TIME: HandleInfo(wrap=EWrapper.updateAccountTime), IN.NEXT_VALID_ID: HandleInfo(wrap=EWrapper.nextValidId, ), IN.CONTRACT_DATA: HandleInfo(proc=processContractDataMsg), IN.EXECUTION_DATA: HandleInfo(proc=processExecutionDataMsg), IN.MARKET_DEPTH: HandleInfo(wrap=EWrapper.updateMktDepth), IN.MARKET_DEPTH_L2: HandleInfo(proc=processMarketDepthL2Msg), IN.NEWS_BULLETINS: HandleInfo(wrap=EWrapper.updateNewsBulletin), IN.MANAGED_ACCTS: HandleInfo(wrap=EWrapper.managedAccounts), IN.RECEIVE_FA: HandleInfo(wrap=EWrapper.receiveFA), IN.HISTORICAL_DATA: HandleInfo(proc=processHistoricalDataMsg), IN.HISTORICAL_DATA_UPDATE: HandleInfo(proc=processHistoricalDataUpdateMsg), IN.BOND_CONTRACT_DATA: HandleInfo(proc=processBondContractDataMsg), IN.SCANNER_PARAMETERS: HandleInfo(wrap=EWrapper.scannerParameters), IN.SCANNER_DATA: HandleInfo(proc=processScannerDataMsg), IN.TICK_OPTION_COMPUTATION: HandleInfo(proc=processTickOptionComputationMsg), IN.TICK_GENERIC: HandleInfo(wrap=EWrapper.tickGeneric), IN.TICK_STRING: HandleInfo(wrap=EWrapper.tickString), IN.TICK_EFP: HandleInfo(wrap=EWrapper.tickEFP), IN.CURRENT_TIME: HandleInfo(wrap=EWrapper.currentTime), IN.REAL_TIME_BARS: HandleInfo(proc=processRealTimeBarMsg), IN.FUNDAMENTAL_DATA: HandleInfo(wrap=EWrapper.fundamentalData), IN.CONTRACT_DATA_END: HandleInfo(wrap=EWrapper.contractDetailsEnd), IN.OPEN_ORDER_END: HandleInfo(wrap=EWrapper.openOrderEnd), IN.ACCT_DOWNLOAD_END: HandleInfo(wrap=EWrapper.accountDownloadEnd), IN.EXECUTION_DATA_END: HandleInfo(wrap=EWrapper.execDetailsEnd), IN.DELTA_NEUTRAL_VALIDATION: HandleInfo(proc=processDeltaNeutralValidationMsg), IN.TICK_SNAPSHOT_END: HandleInfo(wrap=EWrapper.tickSnapshotEnd), IN.MARKET_DATA_TYPE: HandleInfo(wrap=EWrapper.marketDataType), IN.COMMISSION_REPORT: HandleInfo(proc=processCommissionReportMsg), IN.POSITION_DATA: HandleInfo(proc=processPositionDataMsg), IN.POSITION_END: HandleInfo(wrap=EWrapper.positionEnd), IN.ACCOUNT_SUMMARY: HandleInfo(wrap=EWrapper.accountSummary), IN.ACCOUNT_SUMMARY_END: HandleInfo(wrap=EWrapper.accountSummaryEnd), IN.VERIFY_MESSAGE_API: HandleInfo(wrap=EWrapper.verifyMessageAPI), IN.VERIFY_COMPLETED: HandleInfo(wrap=EWrapper.verifyCompleted), IN.DISPLAY_GROUP_LIST: HandleInfo(wrap=EWrapper.displayGroupList), IN.DISPLAY_GROUP_UPDATED: HandleInfo(wrap=EWrapper.displayGroupUpdated), IN.VERIFY_AND_AUTH_MESSAGE_API: HandleInfo(wrap=EWrapper.verifyAndAuthMessageAPI), IN.VERIFY_AND_AUTH_COMPLETED: HandleInfo(wrap=EWrapper.verifyAndAuthCompleted), IN.POSITION_MULTI: HandleInfo(proc=processPositionMultiMsg), IN.POSITION_MULTI_END: HandleInfo(wrap=EWrapper.positionMultiEnd), IN.ACCOUNT_UPDATE_MULTI: HandleInfo(wrap=EWrapper.accountUpdateMulti), IN.ACCOUNT_UPDATE_MULTI_END: HandleInfo(wrap=EWrapper.accountUpdateMultiEnd), IN.SECURITY_DEFINITION_OPTION_PARAMETER: HandleInfo(proc=processSecurityDefinitionOptionParameterMsg), IN.SECURITY_DEFINITION_OPTION_PARAMETER_END: HandleInfo(proc=processSecurityDefinitionOptionParameterEndMsg), IN.SOFT_DOLLAR_TIERS: HandleInfo(proc=processSoftDollarTiersMsg), IN.FAMILY_CODES: HandleInfo(proc=processFamilyCodesMsg), IN.SYMBOL_SAMPLES: HandleInfo(proc=processSymbolSamplesMsg), IN.SMART_COMPONENTS: HandleInfo(proc=processSmartComponents), IN.TICK_REQ_PARAMS: HandleInfo(proc=processTickReqParams), IN.MKT_DEPTH_EXCHANGES: HandleInfo(proc=processMktDepthExchanges), IN.HEAD_TIMESTAMP: HandleInfo(proc=processHeadTimestamp), IN.TICK_NEWS: HandleInfo(proc=processTickNews), IN.NEWS_PROVIDERS: HandleInfo(proc=processNewsProviders), IN.NEWS_ARTICLE: HandleInfo(proc=processNewsArticle), IN.HISTORICAL_NEWS: HandleInfo(proc=processHistoricalNews), IN.HISTORICAL_NEWS_END: HandleInfo(proc=processHistoricalNewsEnd), IN.HISTOGRAM_DATA: HandleInfo(proc=processHistogramData), IN.REROUTE_MKT_DATA_REQ: HandleInfo(proc=processRerouteMktDataReq), IN.REROUTE_MKT_DEPTH_REQ: HandleInfo(proc=processRerouteMktDepthReq), IN.MARKET_RULE: HandleInfo(proc=processMarketRuleMsg), IN.PNL: HandleInfo(proc=processPnLMsg), IN.PNL_SINGLE: HandleInfo(proc=processPnLSingleMsg), IN.HISTORICAL_TICKS: HandleInfo(proc=processHistoricalTicks), IN.HISTORICAL_TICKS_BID_ASK: HandleInfo(proc=processHistoricalTicksBidAsk), IN.HISTORICAL_TICKS_LAST: HandleInfo(proc=processHistoricalTicksLast), IN.TICK_BY_TICK: HandleInfo(proc=processTickByTickMsg), IN.ORDER_BOUND: HandleInfo(proc=processOrderBoundMsg), IN.COMPLETED_ORDER: HandleInfo(proc=processCompletedOrderMsg), IN.COMPLETED_ORDERS_END: HandleInfo(proc=processCompletedOrdersEndMsg) }
#!/usr/bin/env python import argparse import json import subprocess def read_json(f): f = open(f) j = json.loads(f.read()) f.close() return j def update_task_definitions(container_name, task_definition, family, image_name, tag, env): for d in task_definition['containerDefinitions']: d['name'] = container_name d['image'] = image_name + ':' + tag d['environment'].append({"name": "ENV", "value": env}) task_definition['family'] = family return task_definition def register_task_definition(task_definition): definition_str = '' proc = subprocess.Popen(["aws", "ecs", "register-task-definition", "--cli-input-json", json.dumps(task_definition)], stdout=subprocess.PIPE) while True: line = proc.stdout.readline() if line != '': definition_str += line.rstrip() else: break return json.loads(definition_str) def task_arn(task_definition): return task_definition['taskDefinition']['taskDefinitionArn'].split('/')[-1].split(':')[0] def update_service(service, cluster, task_arn): subprocess.call(['aws', 'ecs', 'update-service', '--cluster', cluster, '--service', service, '--task-definition', task_arn]) def run_task(cluster, task_arn): subprocess.call(['aws', 'ecs', 'run-task', '--cluster', cluster, '--task-definition', task_arn]) parser = argparse.ArgumentParser() parser.add_argument('definitions', metavar='d', type=str) parser.add_argument('env', metavar='e', type=str) parser.add_argument('tag', metavar='t', type=str) args = vars(parser.parse_args()) app = read_json(args['definitions']) env = args['env'] env_config = app['envs'][env] for service, service_def in env_config['services'].iteritems(): task_def = read_json(service_def['template']) update_task_definitions(service_def['containerName'], task_def, service_def['family'], app['image'], args['tag'], env) # register task arn = task_arn(register_task_definition(task_def)) # update service update_service(service_def['id'], env_config['cluster'], arn) if 'tasks' in env_config: for task, task_item in env_config['tasks'].iteritems(): task_def = read_json(task_item['template']) update_task_definitions(task_item['containerName'], task_def, task_item['family'], app['image'], args['tag'], env) # register task arn = task_arn(register_task_definition(task_def)) # run task run_task(env_config['cluster'], arn)
""" Implement strStr(). Return the index of the first occurrence of needle in haystack, or -1 if needle is not part of haystack. Ex_1: Input: haystack = "hello", needle = "ll" Output: 2 Ex_2: Input: haystack = "aaaaa", needle = "bba" Output: -1 Clarification: What should we return when needle is an empty string? This is a great question to ask during an interview. For the purpose of this problem, we will return 0 when needle is an empty string. This is consistent to C's strstr() and Java's indexOf(). """ class Solution(object): def strStr(self, haystack, needle): """ :type haystack: str :type needle: str :rtype: int """ #My code starts here if not needle: #If needle is empty, return 0 return 0 if len(needle)>len(haystack): #If the length of needle is larger than the length of haystack, return -1 return -1 for i in range(len(haystack)-len(needle)+1): if haystack[i]==needle[0]: # When the first character of needle is as same as haystack[i] j=0 while j<len(needle): if haystack[i+j]!=needle[j]: break j+=1 if j==len(needle): return i return -1 #If needle is not part of haystack, return -1 """ My thinking: Consider the special case and scan each character of haystack. """
from worker.worker import Worker from worker.http_worker import HttpClient
"""Test the ORM's `Customer` model.""" import pytest from urban_meal_delivery import db class TestSpecialMethods: """Test special methods in `Customer`.""" def test_create_customer(self, customer): """Test instantiation of a new `Customer` object.""" assert customer is not None def test_text_representation(self, customer): """`Customer` has a non-literal text representation.""" result = repr(customer) assert result == f'<Customer(#{customer.id})>' @pytest.mark.db @pytest.mark.no_cover class TestConstraints: """Test the database constraints defined in `Customer`.""" def test_insert_into_database(self, db_session, customer): """Insert an instance into the (empty) database.""" assert db_session.query(db.Customer).count() == 0 db_session.add(customer) db_session.commit() assert db_session.query(db.Customer).count() == 1
""" Incrementally tweak specified axes. Build new faces! """ from argparse import ArgumentParser import pprint import numpy as np from pylearn2.gui.patch_viewer import PatchViewer import theano from adversarial import sampler, util # Parse arguments parser = ArgumentParser(description=('Experiment with tweaking each ' 'axis incrementally.')) parser.add_argument('-s', '--conditional-sampler', default='random', choices=sampler.conditional_samplers.values(), type=lambda k: sampler.conditional_samplers[k]) # parser.add_argument('--conditional-noise-range', default=1., # type=float) parser.add_argument('model_path') parser.add_argument('embedding_file') parser.add_argument('-a', '--axes', help='Comma-separated list of axes to modify') args = parser.parse_args() embeddings = np.load(args.embedding_file)['arr_0'] if args.axes is None: args.axes = range(embeddings.shape[1]) else: args.axes = [int(x) for x in args.axes.strip().split(',')] condition_dim = embeddings.shape[1] m, n = len(args.axes), 10 shift = 7.5 # Prepare generator generator = util.load_generator_from_file(args.model_path) noise_batch = generator.noise_space.make_theano_batch() conditional_batch = generator.condition_space.make_theano_batch() topo_sample_f = theano.function([noise_batch, conditional_batch], generator.dropout_fprop((noise_batch, conditional_batch))[0]) # Sample some noise data -- this needs to be shared between orig and mod # sample pairs noise_data = generator.get_noise((n, generator.noise_dim)).eval() # Begin modifying axes base_conditional_data = args.conditional_sampler(generator, n, 1, embedding_file=args.embedding_file) print 'Mean for each axis:' pprint.pprint(zip(args.axes, base_conditional_data[:, args.axes].mean(axis=1))) base_conditional_data[:, args.axes] -= 0.5 * shift mod_conditional_data = base_conditional_data.copy() # Build up a flat array of modified conditional data mod_conditional_steps = [] for axis in args.axes: mod_conditional_data[:, axis] += shift mod_conditional_steps.extend(mod_conditional_data.copy()) mod_conditional_steps = np.array(mod_conditional_steps) samples_orig = topo_sample_f(noise_data, base_conditional_data).swapaxes(0, 3) samples_mod = topo_sample_f(np.tile(noise_data, (m, 1)), mod_conditional_steps).swapaxes(0, 3) pv = PatchViewer(grid_shape=(m + 1, n), patch_shape=(32,32), is_color=True) for sample_orig in samples_orig: pv.add_patch(sample_orig, activation=1) for sample_mod in samples_mod: pv.add_patch(sample_mod) pv.show()
""" elevate.views ~~~~~~~~~~~~~ :copyright: (c) 2017-present by Justin Mayer. :copyright: (c) 2014-2016 by Matt Robenolt. :license: BSD, see LICENSE for more details. """ from urllib.parse import urlparse, urlunparse from django.contrib.auth.decorators import login_required from django.http import HttpResponseRedirect, QueryDict from django.shortcuts import resolve_url from django.template.response import TemplateResponse from django.views.decorators.debug import sensitive_post_parameters from django.views.decorators.cache import never_cache from django.views.decorators.csrf import csrf_protect from django.views.generic import View from django.utils.decorators import method_decorator from django.utils.http import is_safe_url from django.utils.module_loading import import_string from elevate.settings import (REDIRECT_FIELD_NAME, REDIRECT_URL, REDIRECT_TO_FIELD_NAME, URL) from elevate.forms import ElevateForm from elevate.utils import grant_elevated_privileges class ElevateView(View): """ The default view for the Elevate mode page. The role of this page is to prompt the user for their password again, and if successful, redirect them back to ``next``. """ form_class = ElevateForm template_name = 'elevate/elevate.html' extra_context = None def handle_elevate(self, request, redirect_to, context): return request.method == 'POST' and context['form'].is_valid() def grant_elevated_privileges(self, request, redirect_to): grant_elevated_privileges(request) # Restore the redirect destination from the GET request redirect_to = request.session.pop(REDIRECT_TO_FIELD_NAME, redirect_to) # Double check we're not redirecting to other sites if not is_safe_url(redirect_to, allowed_hosts=[request.get_host()], require_https=request.is_secure()): redirect_to = resolve_url(REDIRECT_URL) return HttpResponseRedirect(redirect_to) @method_decorator(sensitive_post_parameters()) @method_decorator(never_cache) @method_decorator(csrf_protect) @method_decorator(login_required) def dispatch(self, request): redirect_to = request.GET.get(REDIRECT_FIELD_NAME, REDIRECT_URL) # Make sure we're not redirecting to other sites if not is_safe_url(redirect_to, allowed_hosts=[request.get_host()], require_https=request.is_secure()): redirect_to = resolve_url(REDIRECT_URL) if request.is_elevated(): return HttpResponseRedirect(redirect_to) if request.method == 'GET': request.session[REDIRECT_TO_FIELD_NAME] = redirect_to context = { 'form': self.form_class(request, request.user, request.POST or None), 'request': request, REDIRECT_FIELD_NAME: redirect_to, } if self.handle_elevate(request, redirect_to, context): return self.grant_elevated_privileges(request, redirect_to) if self.extra_context is not None: context.update(self.extra_context) return TemplateResponse(request, self.template_name, context) def elevate(request, **kwargs): return ElevateView(**kwargs).dispatch(request) def redirect_to_elevate(next_url, elevate_url=None): """ Redirects the user to the login page, passing the given 'next' page """ if elevate_url is None: elevate_url = URL try: # django 1.10 and greater can't resolve the string 'elevate.views.elevate' to a URL # https://docs.djangoproject.com/en/1.10/releases/1.10/#removed-features-1-10 elevate_url = import_string(elevate_url) except ImportError: pass # wasn't a dotted path elevate_url_parts = list(urlparse(resolve_url(elevate_url))) querystring = QueryDict(elevate_url_parts[4], mutable=True) querystring[REDIRECT_FIELD_NAME] = next_url elevate_url_parts[4] = querystring.urlencode(safe='/') return HttpResponseRedirect(urlunparse(elevate_url_parts))
from PIL import ImageFont from typing import Type import tkinter as tk from ..util import font # determine if we can use 'arialbd' as a default font: DEFAULT_FONTS = [ 'arialbd.ttf', 'DroidSansMono.ttf', ] DEFAULT_FONTS.extend(font.find_system_fonts()) for fnt in DEFAULT_FONTS: default_font = fnt try: ImageFont.truetype(default_font, 14) break except OSError: continue class Gauge: drawresolution = 3 def __init__(self, amountused: float = 0, amounttotal: float = 100, showvalue: bool = True, valuefont: ImageFont.FreeTypeFont = ImageFont.truetype(default_font, 40 * drawresolution), unitsfont: ImageFont.FreeTypeFont = ImageFont.truetype(default_font, 15 * drawresolution), labelfont: ImageFont.FreeTypeFont = ImageFont.truetype(default_font, 20 * drawresolution), unitstext: str = '', labeltext: str = '', metersize: int = 200, wedgesize: int = 0, meterthickness: int = 10, stripethickness: int = 0): self.amountusedvariable = tk.IntVar(value = int(amountused)) self.amounttotalvariable = tk.IntVar(value = int(amounttotal)) self.amountusedvariable.trace_add('write', self.draw_meter) self.showvalue = showvalue self.metersize = metersize self.meterthickness = meterthickness self.stripethickness = stripethickness self.unitsfont = unitsfont self.labelfont = labelfont self.valuefont = valuefont self.unitstext = unitstext self.labeltext = labeltext self.wedgesize = wedgesize def draw_meter(self, *args): raise Exception("Not yet implemented.") @property def amountused(self): return self.amountusedvariable.get() @amountused.setter def amountused(self, value): self.amountusedvariable.set(value) @property def amounttotal(self): return self.amounttotalvariable.get() @amounttotal.setter def amounttotal(self, value): self.amounttotalvariable.set(value) def step(self, delta=1): if self.amountused >= self.amounttotal: self.towardsmaximum = True self.amountused = self.amounttotal - delta elif self.amountused <= 0: self.towardsmaximum = False self.amountused = self.amountused + delta elif self.towardsmaximum: self.amountused = self.amountused - delta else: self.amountused = self.amountused + delta
import json import time import aiohttp from sanic.exceptions import ServerError, Unauthorized from spider.ehall.auth_server import auth_server, auth_server_load_cookies async def auth_ehall(session: aiohttp.ClientSession, cookies: dict): """ 登录到 ehall 平台 """ # 载入 Cookies auth_server_load_cookies(session, cookies) async with session.get('http://ehall.sdut.edu.cn/login?service=http://ehall.sdut.edu.cn/new/ehall.html') as resp: url = str(resp.url) if url == 'http://ehall.sdut.edu.cn/new/ehall.html': return True else: if '?ticket=' in url: raise ServerError('认证错误,请重试') print(url) raise Unauthorized('登录失败,可能是\n1. 登录凭证过期\n2. 您主动退出了登录\n3. 您修改了账号密码') async def user_info(cookies: dict): """ 获取用户基本信息 """ async with aiohttp.ClientSession() as session: # 登录到 ehall await auth_ehall(session, cookies) async with session.get(f'http://ehall.sdut.edu.cn/jsonp/userDesktopInfo.json?amp_jsonp_callback=f&_={int(time.time() * 1000)}') as resp: text = await resp.text() data = json.loads(text[2:-1]) if not data['hasLogin']: raise Unauthorized('登录凭证已失效。') user_info_data = { 'name': data.get('userName'), 'userid': data.get('userId'), 'department': data.get('userDepartment') } return user_info_data
from aiogram.types import (InlineKeyboardButton, InlineKeyboardMarkup, KeyboardButton, ReplyKeyboardMarkup) def craft_startup_keyboard() -> ReplyKeyboardMarkup: """ Method for crafting startup keyboard with function /today, /next, /week """ buttons = [ KeyboardButton('Сегодня'), KeyboardButton('Завтра'), KeyboardButton('Неделя') ] keyboard = ReplyKeyboardMarkup(resize_keyboard=True).row(*buttons) return keyboard def craft_paging_keyboard() -> InlineKeyboardMarkup: """ Method for crafting paging keyboard in paging menu """ buttons = [ InlineKeyboardButton('◀️', callback_data='prev_week'), InlineKeyboardButton('▶️', callback_data='next_week') ] keyboard = InlineKeyboardMarkup(row_width=2).add(*buttons) return keyboard def craft_exams_keyboard() -> ReplyKeyboardMarkup: """ Method for crafting exam keyboard with function /exams """ buttons = [ KeyboardButton("Экзамены") ] keyboard = ReplyKeyboardMarkup(resize_keyboard=True).row(*buttons) return keyboard
# Library Imports from typing import List, Union, Mapping, NamedTuple from typing_extensions import TypeAlias try: from typing import Literal except ImportError: from typing_extensions import Literal # User-Defined Structs # > Passbolt types PassboltFolderIdType: TypeAlias = str PassboltResourceIdType: TypeAlias = str PassboltResourceTypeIdType: TypeAlias = str PassboltUserIdType: TypeAlias = str PassboltRoleIdType: TypeAlias = str PassboltOpenPgpKeyIdType: TypeAlias = str PassboltGroupIdType: TypeAlias = str PassboltSecretIdType: TypeAlias = str PassboltPermissionIdType: TypeAlias = str # refers to the response from passbolt which is a string representation of datetime PassboltDateTimeType: TypeAlias = str PassboltFavoriteDetailsType: TypeAlias = dict class PassboltSecretTuple(NamedTuple): id: PassboltSecretIdType user_id: PassboltUserIdType resource_id: PassboltResourceIdType data: str created: PassboltDateTimeType modified: PassboltDateTimeType class PassboltPermissionTuple(NamedTuple): id: PassboltPermissionIdType aco: Literal["User", "Group"] aco_foreign_key: Union[PassboltUserIdType, PassboltGroupIdType] aro: Literal["Resource", "Folder"] aro_foreign_key: Union[PassboltResourceIdType, PassboltFolderIdType] type: int class PassboltOpenPgpKeyTuple(NamedTuple): id: PassboltOpenPgpKeyIdType armored_key: str created: PassboltDateTimeType key_created: PassboltDateTimeType bits: int deleted: bool modified: PassboltDateTimeType key_id: str fingerprint: str type: Literal["RSA", "ELG", "DSA", "ECDH", "ECDSA", "EDDSA"] expires: PassboltDateTimeType class PassboltUserTuple(NamedTuple): id: PassboltUserIdType created: PassboltDateTimeType active: bool deleted: bool modified: PassboltDateTimeType username: str role_id: PassboltRoleIdType profile: dict role: dict gpgkey: PassboltOpenPgpKeyTuple last_logged_in: PassboltDateTimeType class PassboltResourceTuple(NamedTuple): id: PassboltResourceIdType created: PassboltDateTimeType created_by: PassboltUserIdType deleted: bool description: str modified: PassboltDateTimeType modified_by: PassboltUserIdType name: str uri: str username: str resource_type_id: PassboltResourceIdType folder_parent_id: PassboltFolderIdType creator: Union[None, PassboltUserTuple] = None favorite: Union[None, PassboltFavoriteDetailsType] = None modifier: Union[None, PassboltUserTuple] = None permission: Union[PassboltPermissionTuple] = None class PassboltFolderTuple(NamedTuple): id: PassboltFolderIdType name: str created: PassboltDateTimeType modified: PassboltDateTimeType created_by: PassboltUserIdType modified_by: PassboltUserIdType folder_parent_id: PassboltFolderIdType personal: bool permissions: List[PassboltPermissionTuple] = [] class PassboltGroupTuple(NamedTuple): id: PassboltGroupIdType created: PassboltDateTimeType created_by: PassboltUserIdType deleted: bool modified: PassboltDateTimeType modified_by: PassboltUserIdType name: str AllPassboltTupleTypes = Union[ PassboltSecretTuple, PassboltPermissionTuple, PassboltResourceTuple, PassboltFolderTuple, PassboltGroupTuple, PassboltUserTuple, PassboltOpenPgpKeyTuple ] def constructor(_namedtuple: AllPassboltTupleTypes, renamed_fields: Union[None, dict] = None, filter_fields: bool = True, subconstructors: Union[None, dict] = None): def namedtuple_constructor(data: Union[Mapping, List[Mapping]]) -> List[AllPassboltTupleTypes]: """Returns a namedtuple constructor function that can -- 1. Ingest dictionaries or list of dictionaries directly 2. Renames field names from dict -> namedtuple 3. Filters out dictionary keys that do not exist in namedtuple 4. Can apply further constructors to subfields""" # 1. ingest datatypes is_singleton = False if isinstance(data, dict): # if single, data is a singleton list data = [data] is_singleton = True elif isinstance(data, list): # if list, assert that all elements are dicts assert all(map(lambda datum: type(datum) == dict, data)), "All records must be dicts" else: raise ValueError(f"Data ingested by {_namedtuple} cannot be {type(data)}") # TODO: should the listcomps be made lazy? # 2. rename fields if renamed_fields: # make sure that all final fieldnames are present in the namedtuple assert not set(renamed_fields.values()).difference(_namedtuple._fields) data = [ { (renamed_fields[k] if k in renamed_fields.keys() else k): v for k, v in datum.items() } for datum in data ] # 3. Filter extra fields not present in namedtuple definition if filter_fields: _ = data[0] data = [ {k: v for k, v in datum.items() if k in _namedtuple._fields} for datum in data ] # 4. [Composition] Apply constructors like this to individual fields if subconstructors: data = [ { k: (subconstructors[k](v) if k in subconstructors.keys() else v) for k, v in datum.items() if k in _namedtuple._fields } for datum in data ] # handle singleton lists if is_singleton: return _namedtuple(**data[0]) return [_namedtuple(**datum) for datum in data] return namedtuple_constructor
from keras import backend as K import os # Parameters candle_lib = '/data/BIDS-HPC/public/candle/Candle/common' def initialize_parameters(): print('Initializing parameters...') # Obtain the path of the directory of this script file_path = os.path.dirname(os.path.realpath(__file__)) # Import the CANDLE library import sys sys.path.append(candle_lib) import candle_keras as candle # Instantiate the candle.Benchmark class mymodel_common = candle.Benchmark(file_path,os.getenv("DEFAULT_PARAMS_FILE"),'keras',prog='myprog',desc='My model') # Get a dictionary of the model hyperparamters gParameters = candle.initialize_parameters(mymodel_common) # Return the dictionary of the hyperparameters return(gParameters) def run(gParameters): print('Running model...') #### Begin model input ########################################################################################## def get_model(model_json_fname,modelwtsfname): # This is only for prediction if os.path.isfile(model_json_fname): # Model reconstruction from JSON file with open(model_json_fname, 'r') as f: model = model_from_json(f.read()) else: model = get_unet() #model.summary() # Load weights into the new model model.load_weights(modelwtsfname) return model def focal_loss(gamma=2., alpha=.25): def focal_loss_fixed(y_true, y_pred): pt_1 = tf.where(tf.equal(y_true, 1), y_pred, tf.ones_like(y_pred)) pt_0 = tf.where(tf.equal(y_true, 0), y_pred, tf.zeros_like(y_pred)) return -K.sum(alpha * K.pow(1. - pt_1, gamma) * K.log(pt_1))-K.sum((1-alpha) * K.pow( pt_0, gamma) * K.log(1. - pt_0)) return focal_loss_fixed def jaccard_coef(y_true, y_pred): smooth = 1.0 intersection = K.sum(y_true * y_pred, axis=[-0, -1, 2]) sum_ = K.sum(y_true + y_pred, axis=[-0, -1, 2]) jac = (intersection + smooth) / (sum_ - intersection + smooth) return K.mean(jac) def jaccard_coef_int(y_true, y_pred): smooth = 1.0 y_pred_pos = K.round(K.clip(y_pred, 0, 1)) intersection = K.sum(y_true * y_pred_pos, axis=[-0, -1, 2]) sum_ = K.sum(y_true + y_pred_pos, axis=[-0, -1, 2]) jac = (intersection + smooth) / (sum_ - intersection + smooth) return K.mean(jac) def jaccard_coef_loss(y_true, y_pred): return -K.log(jaccard_coef(y_true, y_pred)) + binary_crossentropy(y_pred, y_true) def dice_coef_batch(y_true, y_pred): smooth = 1.0 intersection = K.sum(y_true * y_pred, axis=[-0, -1, 2]) sum_ = K.sum(y_true + y_pred, axis=[-0, -1, 2]) dice = ((2.0*intersection) + smooth) / (sum_ + intersection + smooth) return K.mean(dice) def dice_coef(y_true, y_pred): smooth = 1.0 y_true_f = K.flatten(y_true) y_pred_f = K.flatten(y_pred) intersection = K.sum(y_true_f * y_pred_f) dice_smooth = ((2. * intersection) + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth) return (dice_smooth) def dice_coef_loss(y_true, y_pred): return -dice_coef(y_true, y_pred) def dice_coef_batch_loss(y_true, y_pred): return -dice_coef_batch(y_true, y_pred) #Define the neural network def get_unet(): droprate = 0.25 filt_size = 32 inputs = Input((None, None, 1)) conv1 = Conv2D(filt_size, (3, 3), activation='relu', padding='same')(inputs) conv1 = Dropout(droprate)(conv1) conv1 = Conv2D(filt_size, (3, 3), activation='relu', padding='same')(conv1) pool1 = MaxPooling2D(pool_size=(2, 2))(conv1) filt_size = filt_size*2 conv2 = Conv2D(filt_size, (3, 3), activation='relu', padding='same')(pool1) conv2 = Dropout(droprate)(conv2) conv2 = Conv2D(filt_size, (3, 3), activation='relu', padding='same')(conv2) pool2 = MaxPooling2D(pool_size=(2, 2))(conv2) filt_size = filt_size*2 conv3 = Conv2D(filt_size, (3, 3), activation='relu', padding='same')(pool2) conv3 = Dropout(droprate)(conv3) conv3 = Conv2D(filt_size, (3, 3), activation='relu', padding='same')(conv3) pool3 = MaxPooling2D(pool_size=(2, 2))(conv3) filt_size = filt_size*2 conv4 = Conv2D(filt_size, (3, 3), activation='relu', padding='same')(pool3) conv4 = Dropout(droprate)(conv4) conv4 = Conv2D(filt_size, (3, 3), activation='relu', padding='same')(conv4) pool4 = MaxPooling2D(pool_size=(2, 2))(conv4) filt_size = filt_size*2 conv5 = Conv2D(filt_size, (3, 3), activation='relu', padding='same')(pool4) conv5 = Dropout(droprate)(conv5) conv5 = Conv2D(filt_size, (3, 3), activation='relu', padding='same')(conv5) filt_size = filt_size/2 up6 = concatenate([Conv2DTranspose(filt_size, (2, 2), strides=(2, 2), padding='same')(conv5), conv4], axis=3) conv6 = Conv2D(filt_size, (3, 3), activation='relu', padding='same')(up6) conv6 = Dropout(droprate)(conv6) conv6 = Conv2D(filt_size, (3, 3), activation='relu', padding='same')(conv6) filt_size = filt_size/2 up7 = concatenate([Conv2DTranspose(filt_size, (2, 2), strides=(2, 2), padding='same')(conv6), conv3], axis=3) conv7 = Conv2D(filt_size, (3, 3), activation='relu', padding='same')(up7) conv7 = Dropout(droprate)(conv7) conv7 = Conv2D(filt_size, (3, 3), activation='relu', padding='same')(conv7) filt_size = filt_size/2 up8 = concatenate([Conv2DTranspose(filt_size, (2, 2), strides=(2, 2), padding='same')(conv7), conv2], axis=3) conv8 = Conv2D(filt_size, (3, 3), activation='relu', padding='same')(up8) conv8 = Dropout(droprate)(conv8) conv8 = Conv2D(filt_size, (3, 3), activation='relu', padding='same')(conv8) filt_size = filt_size/2 up9 = concatenate([Conv2DTranspose(filt_size, (2, 2), strides=(2, 2), padding='same')(conv8), conv1], axis=3) conv9 = Conv2D(filt_size, (3, 3), activation='relu', padding='same')(up9) conv9 = Dropout(droprate)(conv9) conv9 = Conv2D(filt_size, (3, 3), activation='relu', padding='same')(conv9) conv10 = Conv2D(1, (1, 1), activation='sigmoid')(conv9) model = Model(inputs=[inputs], outputs=[conv10]) #model.compile(optimizer=Adam(lr=1e-5), loss=dice_coef_loss, metrics=[dice_coef]) #model.compile(optimizer=Nadam(lr=1e-3), loss=dice_coef_loss, metrics=[dice_coef]) #model.compile(optimizer=Adadelta(), loss=dice_coef_loss, metrics=[dice_coef]) return model def save_model_to_json(model,model_json_fname): #model = unet.UResNet152(input_shape=(None, None, 3), classes=1,encoder_weights="imagenet11k") #model = get_unet() #model.summary() # serialize model to JSON model_json = model.to_json() with open(model_json_fname, "w") as json_file: json_file.write(model_json) def preprocess_data(do_prediction,inputnpyfname,targetnpyfname,expandChannel,backbone): # Preprocess the data (beyond what I already did before) print('-'*30) print('Loading and preprocessing data...') print('-'*30) # Load, normalize, and cast the data imgs_input = ( np.load(inputnpyfname).astype('float32') / (2**16-1) * (2**8-1) ).astype('uint8') print('Input images information:') print(imgs_input.shape) print(imgs_input.dtype) hist,bins = np.histogram(imgs_input) print(hist) print(bins) if not do_prediction: imgs_mask_train = np.load(targetnpyfname).astype('uint8') print('Input masks information:') print(imgs_mask_train.shape) print(imgs_mask_train.dtype) hist,bins = np.histogram(imgs_mask_train) print(hist) print(bins) # Make the grayscale images RGB since that's what the model expects apparently if expandChannel: imgs_input = np.stack((imgs_input,)*3, -1) else: imgs_input = np.expand_dims(imgs_input, 3) print('New shape of input images:') print(imgs_input.shape) if not do_prediction: imgs_mask_train = np.expand_dims(imgs_mask_train, 3) print('New shape of masks:') print(imgs_mask_train.shape) # Preprocess as per https://github.com/qubvel/segmentation_models preprocessing_fn = get_preprocessing(backbone) imgs_input = preprocessing_fn(imgs_input) # Return appropriate variables if not do_prediction: return(imgs_input,imgs_mask_train) else: return(imgs_input) # Import relevant modules and functions import sys sys.path.append(gParameters['segmentation_models_repo']) import numpy as np from keras.models import Model from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, Conv2DTranspose, Dropout from keras.optimizers import Adam from keras.callbacks import ModelCheckpoint,ReduceLROnPlateau,EarlyStopping,CSVLogger from keras.layers.normalization import BatchNormalization from keras.backend import binary_crossentropy import keras import random import tensorflow as tf from keras.models import model_from_json from segmentation_models import Unet from segmentation_models.backbones import get_preprocessing K.set_image_data_format('channels_last') # TF dimension ordering in this code # Basically constants expandChannel = True modelwtsfname = 'model_weights.h5' model_json_fname = 'model.json' csvfname = 'model.csv' do_prediction = gParameters['predict'] if not do_prediction: # Train... print('Training...') # Parameters inputnpyfname = gParameters['images'] labels = gParameters['labels'] initialize = gParameters['initialize'] backbone = gParameters['backbone'] encoder = gParameters['encoder'] lr = float(gParameters['lr']) batch_size = gParameters['batch_size'] obj_return = gParameters['obj_return'] epochs = gParameters['epochs'] # Preprocess the data imgs_train,imgs_mask_train = preprocess_data(do_prediction,inputnpyfname,labels,expandChannel,backbone) # Load, save, and compile the model model = Unet(backbone_name=backbone, encoder_weights=encoder) save_model_to_json(model,model_json_fname) model.compile(optimizer=Adam(lr=lr), loss='binary_crossentropy', metrics=['binary_crossentropy','mean_squared_error',dice_coef, dice_coef_batch, focal_loss()]) # Load previous weights for restarting, if desired and possible if os.path.isfile(initialize): print('-'*30) print('Loading previous weights ...') model.load_weights(initialize) # Set up the training callback functions model_checkpoint = ModelCheckpoint(modelwtsfname, monitor=obj_return, save_best_only=True) reduce_lr = ReduceLROnPlateau(monitor=obj_return, factor=0.1,patience=100, min_lr=0.001,verbose=1) model_es = EarlyStopping(monitor=obj_return, min_delta=0.00000001, patience=100, verbose=1, mode='auto') csv_logger = CSVLogger(csvfname, append=True) # Train the model history_callback = model.fit(imgs_train, imgs_mask_train, batch_size=batch_size, epochs=epochs, verbose=2, shuffle=True, validation_split=0.10, callbacks=[model_checkpoint, reduce_lr, model_es, csv_logger]) print("Minimum validation loss:") print(min(history_callback.history[obj_return])) else: # ...or predict print('Inferring...') # Parameters inputnpyfname = gParameters['images'] initialize = gParameters['initialize'] backbone = gParameters['backbone'] # lr = float(gParameters['lr']) # this isn't needed but we're keeping it for the U-Net, where it is "needed" # Preprocess the data imgs_infer = preprocess_data(do_prediction,inputnpyfname,'',expandChannel,backbone) # Load the model #model = get_model(model_json_fname,initialize) model = get_model(os.path.dirname(initialize)+'/'+model_json_fname,initialize) # Run inference imgs_test_predict = model.predict(imgs_infer, batch_size=1, verbose=1) # Save the predicted masks np.save('mask_predictions.npy', np.squeeze(np.round(imgs_test_predict).astype('uint8'))) history_callback = None #### End model input ############################################################################################ return(history_callback) def main(): print('Running main program...') gParameters = initialize_parameters() run(gParameters) if __name__ == '__main__': main() try: K.clear_session() except AttributeError: pass
import typing def main() -> typing.NoReturn: s = input() t = input() n = len(s) print(sum(s[i] != t[i] for i in range(n))) main()
#!/usr/bin/python # -*- coding: utf-8 -*- from flask import Flask, escape from flask_jsonp import jsonpify import data import json application = Flask(__name__) @application.route("/") def hello(): return "<h1 style='color:blue'>Hello There!</h1>\n" @application.route("/api/api2") def api2(): return "<h1 style='color:blue'>Enter API 2222</h1>\n" @application.route("/api/") def api(): return "<h1 style='color:blue'>Enter API</h1>\n" @application.route("/api/getnannies/<nannysystem>", methods=['GET']) def getnannies(nannysystem): nannysystem = escape(nannysystem) db = data.storage() with db: ret = [n for n in db._CONN.execute('select %s from nannies where nannysystem_no = ?' % ",".join(db._NANNIES_COL[1:]), (nannysystem, ))] # print request.args['a'], type(request.args['a']) # print request.args['b'], type(request.args['b']) return jsonpify(ret) if __name__ == "__main__": application.run(host='0.0.0.0', debug=True) # db = data.storage() # with db: # for n in db._CONN.execute('select * from nannies where nannysystem_no = ?', ('ge',)): # print n # print 'end'
import os, json def check_extension(draft): remainder = "" for i in draft[::-1]: if i == ".": extension = (remainder + i)[::-1] break remainder += i return extension def check_draft_file(draft): key = [] for i in draft: if i == "{": remainder = "" elif i == "}": key.append(remainder) else: remainder += i return key def check_file_name(path,filename,draft): extension = check_extension(draft) if extension not in filename: return False list_filename = filename[:-len(extension)].split("_") list_draft = check_draft_file(draft) for key,value in zip(list_draft,list_filename): if key == "ID": try: int(value) except ValueError: return False return True