commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
11889613df601a54a169bb51511e6eb54dec3988
|
tree.py
|
tree.py
|
import math
def succ_fcn(x):
"""
takes an element of the Calkin Wilf tree and returns the next element
following a breadth first traversal
"""
return 1 / (math.floor(x) + 1 - (x % 1))
def get_nth(n):
"""
takes a natural number n and returns the nth element of the Calkin Wilf tree
following a breadth first traversal
"""
|
import math
def succ_fcn(x):
"""
takes an element of the Calkin Wilf tree and returns the next element
following a breadth first traversal
"""
return 1 / (math.floor(x) + 1 - (x % 1))
def get_nth(n):
"""
takes a natural number n and returns the nth element of the Calkin Wilf tree
following a breadth first traversal
"""
node = 1/1
for i in range(1, n+1):
node = succ_fcn(node)
return node
|
Add basic breadth first return
|
Add basic breadth first return
|
Python
|
mit
|
richardmillson/Calkin_Wilf_tree
|
import math
def succ_fcn(x):
"""
takes an element of the Calkin Wilf tree and returns the next element
following a breadth first traversal
"""
return 1 / (math.floor(x) + 1 - (x % 1))
def get_nth(n):
"""
takes a natural number n and returns the nth element of the Calkin Wilf tree
following a breadth first traversal
"""
Add basic breadth first return
|
import math
def succ_fcn(x):
"""
takes an element of the Calkin Wilf tree and returns the next element
following a breadth first traversal
"""
return 1 / (math.floor(x) + 1 - (x % 1))
def get_nth(n):
"""
takes a natural number n and returns the nth element of the Calkin Wilf tree
following a breadth first traversal
"""
node = 1/1
for i in range(1, n+1):
node = succ_fcn(node)
return node
|
<commit_before>import math
def succ_fcn(x):
"""
takes an element of the Calkin Wilf tree and returns the next element
following a breadth first traversal
"""
return 1 / (math.floor(x) + 1 - (x % 1))
def get_nth(n):
"""
takes a natural number n and returns the nth element of the Calkin Wilf tree
following a breadth first traversal
"""
<commit_msg>Add basic breadth first return<commit_after>
|
import math
def succ_fcn(x):
"""
takes an element of the Calkin Wilf tree and returns the next element
following a breadth first traversal
"""
return 1 / (math.floor(x) + 1 - (x % 1))
def get_nth(n):
"""
takes a natural number n and returns the nth element of the Calkin Wilf tree
following a breadth first traversal
"""
node = 1/1
for i in range(1, n+1):
node = succ_fcn(node)
return node
|
import math
def succ_fcn(x):
"""
takes an element of the Calkin Wilf tree and returns the next element
following a breadth first traversal
"""
return 1 / (math.floor(x) + 1 - (x % 1))
def get_nth(n):
"""
takes a natural number n and returns the nth element of the Calkin Wilf tree
following a breadth first traversal
"""
Add basic breadth first returnimport math
def succ_fcn(x):
"""
takes an element of the Calkin Wilf tree and returns the next element
following a breadth first traversal
"""
return 1 / (math.floor(x) + 1 - (x % 1))
def get_nth(n):
"""
takes a natural number n and returns the nth element of the Calkin Wilf tree
following a breadth first traversal
"""
node = 1/1
for i in range(1, n+1):
node = succ_fcn(node)
return node
|
<commit_before>import math
def succ_fcn(x):
"""
takes an element of the Calkin Wilf tree and returns the next element
following a breadth first traversal
"""
return 1 / (math.floor(x) + 1 - (x % 1))
def get_nth(n):
"""
takes a natural number n and returns the nth element of the Calkin Wilf tree
following a breadth first traversal
"""
<commit_msg>Add basic breadth first return<commit_after>import math
def succ_fcn(x):
"""
takes an element of the Calkin Wilf tree and returns the next element
following a breadth first traversal
"""
return 1 / (math.floor(x) + 1 - (x % 1))
def get_nth(n):
"""
takes a natural number n and returns the nth element of the Calkin Wilf tree
following a breadth first traversal
"""
node = 1/1
for i in range(1, n+1):
node = succ_fcn(node)
return node
|
feb630b75f2a28bb098a4a192a4bbb528e2251fa
|
addons/email/res_partner.py
|
addons/email/res_partner.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv
from osv import fields
class res_partner(osv.osv):
""" Inherits partner and adds CRM information in the partner form """
_inherit = 'res.partner'
_columns = {
'emails': fields.one2many('email.message', 'partner_id',\
'Emails', readonly=True, domain=[('history','=',True)]),
}
res_partner()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv
from osv import fields
class res_partner(osv.osv):
""" Inherits partner and adds CRM information in the partner form """
_inherit = 'res.partner'
_columns = {
'emails': fields.one2many('email.message', 'partner_id', 'Emails', readonly=True),
}
res_partner()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Remove history domain in partner in eamil module.
|
Remove history domain in partner in eamil module.
bzr revid: ysa@tinyerp.com-20110204091248-wnzm7ft6cx3v34p1
|
Python
|
agpl-3.0
|
gsmartway/odoo,odootr/odoo,gdgellatly/OCB1,frouty/odoo_oph,waytai/odoo,NeovaHealth/odoo,jpshort/odoo,zchking/odoo,Elico-Corp/odoo_OCB,Maspear/odoo,takis/odoo,Gitlab11/odoo,0k/OpenUpgrade,gsmartway/odoo,grap/OpenUpgrade,cloud9UG/odoo,jfpla/odoo,syci/OCB,joariasl/odoo,vrenaville/ngo-addons-backport,mmbtba/odoo,osvalr/odoo,gorjuce/odoo,Endika/OpenUpgrade,zchking/odoo,matrixise/odoo,Gitlab11/odoo,frouty/odoo_oph,x111ong/odoo,rowemoore/odoo,oasiswork/odoo,simongoffin/website_version,fgesora/odoo,javierTerry/odoo,0k/OpenUpgrade,hmen89/odoo,KontorConsulting/odoo,JonathanStein/odoo,dalegregory/odoo,hbrunn/OpenUpgrade,massot/odoo,shivam1111/odoo,ovnicraft/odoo,thanhacun/odoo,nhomar/odoo,guerrerocarlos/odoo,glovebx/odoo,VielSoft/odoo,Nowheresly/odoo,naousse/odoo,ThinkOpen-Solutions/odoo,NL66278/OCB,ChanduERP/odoo,VitalPet/odoo,joshuajan/odoo,addition-it-solutions/project-all,collex100/odoo,sadleader/odoo,JGarcia-Panach/odoo,sinbazhou/odoo,ingadhoc/odoo,matrixise/odoo,Endika/OpenUpgrade,sysadminmatmoz/OCB,dfang/odoo,bealdav/OpenUpgrade,abdellatifkarroum/odoo,SerpentCS/odoo,hassoon3/odoo,QianBIG/odoo,factorlibre/OCB,shaufi10/odoo,CopeX/odoo,JCA-Developpement/Odoo,nagyistoce/odoo-dev-odoo,slevenhagen/odoo-npg,Ernesto99/odoo,realsaiko/odoo,hanicker/odoo,sergio-incaser/odoo,factorlibre/OCB,KontorConsulting/odoo,inspyration/odoo,shivam1111/odoo,javierTerry/odoo,rgeleta/odoo,mvaled/OpenUpgrade,sebalix/OpenUpgrade,apanju/odoo,OpenUpgrade/OpenUpgrade,jfpla/odoo,papouso/odoo,ShineFan/odoo,podemos-info/odoo,provaleks/o8,xzYue/odoo,ujjwalwahi/odoo,virgree/odoo,odooindia/odoo,hip-odoo/odoo,lsinfo/odoo,dgzurita/odoo,matrixise/odoo,brijeshkesariya/odoo,erkrishna9/odoo,JCA-Developpement/Odoo,eino-makitalo/odoo,rubencabrera/odoo,gvb/odoo,prospwro/odoo,frouty/odoogoeen,bkirui/odoo,dllsf/odootest,kittiu/odoo,incaser/odoo-odoo,joariasl/odoo,ccomb/OpenUpgrade,cloud9UG/odoo,mustafat/odoo-1,factorlibre/OCB,OpenUpgrade/OpenUpgrade,ramadhane/odoo,osvalr/odoo,damdam-s/OpenUpgrade,draugiskisprendimai/odoo,Grirrane/odoo,provaleks/o8,fdvarela/odoo8,deKupini/erp,hanicker/odoo,highco-groupe/odoo,mkieszek/odoo,collex100/odoo,hip-odoo/odoo,windedge/odoo,factorlibre/OCB,spadae22/odoo,spadae22/odoo,dsfsdgsbngfggb/odoo,aviciimaxwell/odoo,Ichag/odoo,osvalr/odoo,OpusVL/odoo,hip-odoo/odoo,luiseduardohdbackup/odoo,makinacorpus/odoo,odoo-turkiye/odoo,joshuajan/odoo,cloud9UG/odoo,Kilhog/odoo,fdvarela/odoo8,slevenhagen/odoo,nhomar/odoo,jpshort/odoo,dariemp/odoo,shaufi/odoo,pedrobaeza/OpenUpgrade,sv-dev1/odoo,grap/OCB,agrista/odoo-saas,demon-ru/iml-crm,idncom/odoo,elmerdpadilla/iv,cpyou/odoo,camptocamp/ngo-addons-backport,cedk/odoo,PongPi/isl-odoo,bkirui/odoo,steedos/odoo,OSSESAC/odoopubarquiluz,hoatle/odoo,jusdng/odoo,apocalypsebg/odoo,addition-it-solutions/project-all,jusdng/odoo,andreparames/odoo,srsman/odoo,gavin-feng/odoo,blaggacao/OpenUpgrade,joshuajan/odoo,markeTIC/OCB,dllsf/odootest,sinbazhou/odoo,zchking/odoo,provaleks/o8,hbrunn/OpenUpgrade,odootr/odoo,stephen144/odoo,gavin-feng/odoo,leoliujie/odoo,CubicERP/odoo,elmerdpadilla/iv,ecosoft-odoo/odoo,n0m4dz/odoo,Maspear/odoo,waytai/odoo,elmerdpadilla/iv,gavin-feng/odoo,shingonoide/odoo,Endika/odoo,cloud9UG/odoo,osvalr/odoo,frouty/odoogoeen,eino-makitalo/odoo,makinacorpus/odoo,tarzan0820/odoo,AuyaJackie/odoo,avoinsystems/odoo,ovnicraft/odoo,FlorianLudwig/odoo,fuselock/odoo,mszewczy/odoo,rschnapka/odoo,bobisme/odoo,collex100/odoo,jpshort/odoo,Eric-Zhong/odoo,zchking/odoo,BT-rmartin/odoo,wangjun/odoo,makinacorpus/odoo,BT-rmartin/odoo,kittiu/odoo,takis/odoo,syci/OCB,ygol/odoo,simongoffin/website_version,VitalPet/odoo,SerpentCS/odoo,wangjun/odoo,savoirfairelinux/odoo,tinkerthaler/odoo,charbeljc/OCB,lgscofield/odoo,Ichag/odoo,ihsanudin/odoo,datenbetrieb/odoo,tarzan0820/odoo,hoatle/odoo,pedrobaeza/OpenUpgrade,tangyiyong/odoo,florentx/OpenUpgrade,ThinkOpen-Solutions/odoo,dgzurita/odoo,provaleks/o8,feroda/odoo,diagramsoftware/odoo,nagyistoce/odoo-dev-odoo,alexteodor/odoo,naousse/odoo,Endika/odoo,minhtuancn/odoo,Elico-Corp/odoo_OCB,hanicker/odoo,dkubiak789/odoo,odoo-turkiye/odoo,rahuldhote/odoo,bplancher/odoo,Elico-Corp/odoo_OCB,cloud9UG/odoo,RafaelTorrealba/odoo,savoirfairelinux/OpenUpgrade,laslabs/odoo,ovnicraft/odoo,Noviat/odoo,srsman/odoo,lgscofield/odoo,guewen/OpenUpgrade,leorochael/odoo,Bachaco-ve/odoo,Noviat/odoo,mlaitinen/odoo,sysadminmatmoz/OCB,fgesora/odoo,SerpentCS/odoo,alqfahad/odoo,GauravSahu/odoo,takis/odoo,gvb/odoo,savoirfairelinux/OpenUpgrade,lightcn/odoo,bakhtout/odoo-educ,colinnewell/odoo,Nick-OpusVL/odoo,VielSoft/odoo,BT-ojossen/odoo,nhomar/odoo-mirror,BT-fgarbely/odoo,csrocha/OpenUpgrade,grap/OCB,andreparames/odoo,CopeX/odoo,joshuajan/odoo,ecosoft-odoo/odoo,naousse/odoo,NL66278/OCB,shaufi10/odoo,florian-dacosta/OpenUpgrade,shaufi/odoo,synconics/odoo,csrocha/OpenUpgrade,luiseduardohdbackup/odoo,oliverhr/odoo,srimai/odoo,xujb/odoo,abenzbiria/clients_odoo,MarcosCommunity/odoo,camptocamp/ngo-addons-backport,odootr/odoo,lsinfo/odoo,fdvarela/odoo8,draugiskisprendimai/odoo,alexcuellar/odoo,avoinsystems/odoo,sv-dev1/odoo,christophlsa/odoo,ojengwa/odoo,luiseduardohdbackup/odoo,cedk/odoo,grap/OpenUpgrade,VielSoft/odoo,luistorresm/odoo,RafaelTorrealba/odoo,cpyou/odoo,janocat/odoo,ehirt/odoo,lombritz/odoo,VitalPet/odoo,aviciimaxwell/odoo,ehirt/odoo,OpenUpgrade/OpenUpgrade,agrista/odoo-saas,bealdav/OpenUpgrade,stephen144/odoo,Daniel-CA/odoo,sinbazhou/odoo,lightcn/odoo,jiangzhixiao/odoo,feroda/odoo,apanju/odoo,synconics/odoo,naousse/odoo,Danisan/odoo-1,leorochael/odoo,hanicker/odoo,Kilhog/odoo,nitinitprof/odoo,tinkerthaler/odoo,bealdav/OpenUpgrade,rahuldhote/odoo,SerpentCS/odoo,luiseduardohdbackup/odoo,numerigraphe/odoo,tvibliani/odoo,Bachaco-ve/odoo,feroda/odoo,eino-makitalo/odoo,AuyaJackie/odoo,florentx/OpenUpgrade,bwrsandman/OpenUpgrade,hanicker/odoo,fossoult/odoo,lombritz/odoo,cloud9UG/odoo,SAM-IT-SA/odoo,massot/odoo,slevenhagen/odoo,mkieszek/odoo,nagyistoce/odoo-dev-odoo,KontorConsulting/odoo,ApuliaSoftware/odoo,CatsAndDogsbvba/odoo,inspyration/odoo,ubic135/odoo-design,VitalPet/odoo,frouty/odoo_oph,fossoult/odoo,apanju/odoo,mustafat/odoo-1,florentx/OpenUpgrade,highco-groupe/odoo,waytai/odoo,oasiswork/odoo,abenzbiria/clients_odoo,ihsanudin/odoo,BT-fgarbely/odoo,arthru/OpenUpgrade,jusdng/odoo,sv-dev1/odoo,tinkerthaler/odoo,abdellatifkarroum/odoo,numerigraphe/odoo,credativUK/OCB,synconics/odoo,rschnapka/odoo,abstract-open-solutions/OCB,kirca/OpenUpgrade,jusdng/odoo,mmbtba/odoo,blaggacao/OpenUpgrade,hassoon3/odoo,apocalypsebg/odoo,deKupini/erp,thanhacun/odoo,demon-ru/iml-crm,slevenhagen/odoo,rdeheele/odoo,rschnapka/odoo,gavin-feng/odoo,sve-odoo/odoo,mvaled/OpenUpgrade,glovebx/odoo,kybriainfotech/iSocioCRM,Noviat/odoo,acshan/odoo,chiragjogi/odoo,janocat/odoo,juanalfonsopr/odoo,gavin-feng/odoo,numerigraphe/odoo,nuuuboo/odoo,goliveirab/odoo,ecosoft-odoo/odoo,nuncjo/odoo,chiragjogi/odoo,charbeljc/OCB,ihsanudin/odoo,OpenUpgrade-dev/OpenUpgrade,stonegithubs/odoo,OpusVL/odoo,lombritz/odoo,jolevq/odoopub,tinkhaven-organization/odoo,bguillot/OpenUpgrade,idncom/odoo,CatsAndDogsbvba/odoo,leorochael/odoo,dsfsdgsbngfggb/odoo,fossoult/odoo,Maspear/odoo,Adel-Magebinary/odoo,BT-fgarbely/odoo,TRESCLOUD/odoopub,nagyistoce/odoo-dev-odoo,patmcb/odoo,ChanduERP/odoo,dsfsdgsbngfggb/odoo,havt/odoo,erkrishna9/odoo,mlaitinen/odoo,GauravSahu/odoo,collex100/odoo,ihsanudin/odoo,joshuajan/odoo,ShineFan/odoo,gdgellatly/OCB1,havt/odoo,nagyistoce/odoo-dev-odoo,doomsterinc/odoo,storm-computers/odoo,bwrsandman/OpenUpgrade,Adel-Magebinary/odoo,patmcb/odoo,markeTIC/OCB,rubencabrera/odoo,codekaki/odoo,hoatle/odoo,realsaiko/odoo,BT-astauder/odoo,dariemp/odoo,pplatek/odoo,rahuldhote/odoo,doomsterinc/odoo,alqfahad/odoo,alexcuellar/odoo,fuselock/odoo,christophlsa/odoo,avoinsystems/odoo,luistorresm/odoo,windedge/odoo,Eric-Zhong/odoo,shaufi/odoo,laslabs/odoo,dariemp/odoo,hassoon3/odoo,charbeljc/OCB,Gitlab11/odoo,lombritz/odoo,jfpla/odoo,diagramsoftware/odoo,bobisme/odoo,odooindia/odoo,camptocamp/ngo-addons-backport,kybriainfotech/iSocioCRM,MarcosCommunity/odoo,stephen144/odoo,Noviat/odoo,CopeX/odoo,shivam1111/odoo,brijeshkesariya/odoo,takis/odoo,xujb/odoo,Endika/odoo,vnsofthe/odoo,ApuliaSoftware/odoo,acshan/odoo,dfang/odoo,lightcn/odoo,nexiles/odoo,prospwro/odoo,VitalPet/odoo,nexiles/odoo,laslabs/odoo,frouty/odoogoeen,Gitlab11/odoo,oliverhr/odoo,stonegithubs/odoo,rschnapka/odoo,optima-ict/odoo,nexiles/odoo,pplatek/odoo,gdgellatly/OCB1,blaggacao/OpenUpgrade,srsman/odoo,provaleks/o8,florian-dacosta/OpenUpgrade,rgeleta/odoo,codekaki/odoo,slevenhagen/odoo,chiragjogi/odoo,dalegregory/odoo,gorjuce/odoo,shivam1111/odoo,RafaelTorrealba/odoo,hmen89/odoo,doomsterinc/odoo,VitalPet/odoo,mmbtba/odoo,hoatle/odoo,leorochael/odoo,vrenaville/ngo-addons-backport,0k/OpenUpgrade,leoliujie/odoo,oihane/odoo,draugiskisprendimai/odoo,apanju/odoo,gvb/odoo,feroda/odoo,draugiskisprendimai/odoo,pedrobaeza/odoo,dezynetechnologies/odoo,aviciimaxwell/odoo,ujjwalwahi/odoo,factorlibre/OCB,salaria/odoo,jiangzhixiao/odoo,rdeheele/odoo,steedos/odoo,abstract-open-solutions/OCB,chiragjogi/odoo,rubencabrera/odoo,mkieszek/odoo,savoirfairelinux/OpenUpgrade,Danisan/odoo-1,incaser/odoo-odoo,tangyiyong/odoo,x111ong/odoo,odoo-turkiye/odoo,demon-ru/iml-crm,apanju/GMIO_Odoo,bakhtout/odoo-educ,Endika/odoo,havt/odoo,kybriainfotech/iSocioCRM,andreparames/odoo,fuhongliang/odoo,Ernesto99/odoo,Adel-Magebinary/odoo,synconics/odoo,ClearCorp-dev/odoo,sergio-incaser/odoo,BT-astauder/odoo,jiangzhixiao/odoo,RafaelTorrealba/odoo,lgscofield/odoo,grap/OpenUpgrade,Nowheresly/odoo,vrenaville/ngo-addons-backport,pplatek/odoo,Eric-Zhong/odoo,ThinkOpen-Solutions/odoo,alhashash/odoo,ClearCorp-dev/odoo,addition-it-solutions/project-all,AuyaJackie/odoo,simongoffin/website_version,Drooids/odoo,cdrooom/odoo,dfang/odoo,vnsofthe/odoo,joariasl/odoo,sve-odoo/odoo,joariasl/odoo,alexcuellar/odoo,datenbetrieb/odoo,wangjun/odoo,jesramirez/odoo,jfpla/odoo,ygol/odoo,jaxkodex/odoo,kirca/OpenUpgrade,odoo-turkiye/odoo,VitalPet/odoo,fuhongliang/odoo,Antiun/odoo,alqfahad/odoo,makinacorpus/odoo,rgeleta/odoo,x111ong/odoo,numerigraphe/odoo,NeovaHealth/odoo,syci/OCB,gsmartway/odoo,BT-fgarbely/odoo,florentx/OpenUpgrade,minhtuancn/odoo,shaufi10/odoo,NeovaHealth/odoo,ApuliaSoftware/odoo,diagramsoftware/odoo,BT-astauder/odoo,ApuliaSoftware/odoo,mvaled/OpenUpgrade,ujjwalwahi/odoo,BT-ojossen/odoo,kittiu/odoo,jiangzhixiao/odoo,laslabs/odoo,joariasl/odoo,slevenhagen/odoo-npg,xzYue/odoo,ojengwa/odoo,omprakasha/odoo,kirca/OpenUpgrade,agrista/odoo-saas,savoirfairelinux/OpenUpgrade,syci/OCB,lgscofield/odoo,hoatle/odoo,fossoult/odoo,abdellatifkarroum/odoo,dkubiak789/odoo,highco-groupe/odoo,Noviat/odoo,hbrunn/OpenUpgrade,slevenhagen/odoo,bealdav/OpenUpgrade,pplatek/odoo,BT-fgarbely/odoo,lsinfo/odoo,ubic135/odoo-design,ovnicraft/odoo,Ichag/odoo,AuyaJackie/odoo,SAM-IT-SA/odoo,andreparames/odoo,ramadhane/odoo,JCA-Developpement/Odoo,odoousers2014/odoo,Endika/OpenUpgrade,blaggacao/OpenUpgrade,abenzbiria/clients_odoo,stephen144/odoo,makinacorpus/odoo,fevxie/odoo,credativUK/OCB,mmbtba/odoo,OpenPymeMx/OCB,alexteodor/odoo,n0m4dz/odoo,Kilhog/odoo,luistorresm/odoo,lombritz/odoo,slevenhagen/odoo-npg,apanju/odoo,bealdav/OpenUpgrade,shaufi10/odoo,damdam-s/OpenUpgrade,mlaitinen/odoo,jesramirez/odoo,bplancher/odoo,Ernesto99/odoo,AuyaJackie/odoo,stonegithubs/odoo,Endika/odoo,glovebx/odoo,OpenUpgrade/OpenUpgrade,slevenhagen/odoo-npg,joariasl/odoo,ChanduERP/odoo,SerpentCS/odoo,jaxkodex/odoo,Kilhog/odoo,storm-computers/odoo,rdeheele/odoo,juanalfonsopr/odoo,stonegithubs/odoo,jeasoft/odoo,syci/OCB,Ichag/odoo,hopeall/odoo,ccomb/OpenUpgrade,CatsAndDogsbvba/odoo,ramitalat/odoo,CubicERP/odoo,microcom/odoo,jiangzhixiao/odoo,tinkhaven-organization/odoo,spadae22/odoo,CubicERP/odoo,BT-fgarbely/odoo,ecosoft-odoo/odoo,dezynetechnologies/odoo,hassoon3/odoo,sv-dev1/odoo,klunwebale/odoo,OpenUpgrade/OpenUpgrade,arthru/OpenUpgrade,jpshort/odoo,nuuuboo/odoo,Drooids/odoo,fuselock/odoo,nuncjo/odoo,JGarcia-Panach/odoo,omprakasha/odoo,pedrobaeza/odoo,Eric-Zhong/odoo,jfpla/odoo,idncom/odoo,sve-odoo/odoo,sergio-incaser/odoo,BT-rmartin/odoo,minhtuancn/odoo,jusdng/odoo,numerigraphe/odoo,draugiskisprendimai/odoo,hifly/OpenUpgrade,shingonoide/odoo,pedrobaeza/odoo,TRESCLOUD/odoopub,diagramsoftware/odoo,ygol/odoo,Endika/OpenUpgrade,damdam-s/OpenUpgrade,CubicERP/odoo,salaria/odoo,oliverhr/odoo,n0m4dz/odoo,abstract-open-solutions/OCB,MarcosCommunity/odoo,tvtsoft/odoo8,x111ong/odoo,acshan/odoo,Nick-OpusVL/odoo,oihane/odoo,blaggacao/OpenUpgrade,Ernesto99/odoo,OSSESAC/odoopubarquiluz,dsfsdgsbngfggb/odoo,lightcn/odoo,minhtuancn/odoo,JCA-Developpement/Odoo,poljeff/odoo,slevenhagen/odoo-npg,leoliujie/odoo,OpenPymeMx/OCB,kifcaliph/odoo,ramitalat/odoo,sysadminmatmoz/OCB,stonegithubs/odoo,Danisan/odoo-1,mlaitinen/odoo,rowemoore/odoo,sergio-incaser/odoo,Maspear/odoo,bakhtout/odoo-educ,nagyistoce/odoo-dev-odoo,RafaelTorrealba/odoo,Daniel-CA/odoo,patmcb/odoo,podemos-info/odoo,optima-ict/odoo,Nick-OpusVL/odoo,sve-odoo/odoo,sebalix/OpenUpgrade,Nowheresly/odoo,ThinkOpen-Solutions/odoo,markeTIC/OCB,mkieszek/odoo,dllsf/odootest,srimai/odoo,ovnicraft/odoo,colinnewell/odoo,ubic135/odoo-design,0k/odoo,Endika/odoo,AuyaJackie/odoo,CopeX/odoo,fgesora/odoo,Maspear/odoo,FlorianLudwig/odoo,stephen144/odoo,OSSESAC/odoopubarquiluz,mvaled/OpenUpgrade,ramitalat/odoo,datenbetrieb/odoo,poljeff/odoo,mustafat/odoo-1,alqfahad/odoo,kifcaliph/odoo,SerpentCS/odoo,hifly/OpenUpgrade,hopeall/odoo,fevxie/odoo,bkirui/odoo,Adel-Magebinary/odoo,gdgellatly/OCB1,hoatle/odoo,alhashash/odoo,jesramirez/odoo,codekaki/odoo,Grirrane/odoo,hassoon3/odoo,ingadhoc/odoo,nexiles/odoo,odoousers2014/odoo,dgzurita/odoo,Eric-Zhong/odoo,Eric-Zhong/odoo,BT-ojossen/odoo,jaxkodex/odoo,havt/odoo,nexiles/odoo,eino-makitalo/odoo,storm-computers/odoo,microcom/odoo,avoinsystems/odoo,charbeljc/OCB,Adel-Magebinary/odoo,CatsAndDogsbvba/odoo,dezynetechnologies/odoo,rschnapka/odoo,JGarcia-Panach/odoo,tinkhaven-organization/odoo,naousse/odoo,steedos/odoo,PongPi/isl-odoo,cedk/odoo,PongPi/isl-odoo,gsmartway/odoo,sebalix/OpenUpgrade,brijeshkesariya/odoo,abstract-open-solutions/OCB,JonathanStein/odoo,ujjwalwahi/odoo,sergio-incaser/odoo,apanju/GMIO_Odoo,GauravSahu/odoo,ygol/odoo,idncom/odoo,stonegithubs/odoo,factorlibre/OCB,odootr/odoo,addition-it-solutions/project-all,mustafat/odoo-1,shaufi10/odoo,christophlsa/odoo,nitinitprof/odoo,srimai/odoo,salaria/odoo,n0m4dz/odoo,Codefans-fan/odoo,grap/OpenUpgrade,charbeljc/OCB,rgeleta/odoo,srsman/odoo,erkrishna9/odoo,dfang/odoo,jpshort/odoo,sergio-incaser/odoo,dkubiak789/odoo,fjbatresv/odoo,tvibliani/odoo,pplatek/odoo,abdellatifkarroum/odoo,VielSoft/odoo,alexcuellar/odoo,highco-groupe/odoo,nhomar/odoo,JonathanStein/odoo,PongPi/isl-odoo,nuuuboo/odoo,tangyiyong/odoo,sysadminmatmoz/OCB,fuselock/odoo,ojengwa/odoo,Drooids/odoo,synconics/odoo,Kilhog/odoo,Drooids/odoo,frouty/odoogoeen,colinnewell/odoo,oasiswork/odoo,omprakasha/odoo,savoirfairelinux/odoo,NL66278/OCB,addition-it-solutions/project-all,gdgellatly/OCB1,x111ong/odoo,jolevq/odoopub,fuhongliang/odoo,bobisme/odoo,nhomar/odoo-mirror,FlorianLudwig/odoo,QianBIG/odoo,mszewczy/odoo,rubencabrera/odoo,Grirrane/odoo,kybriainfotech/iSocioCRM,fevxie/odoo,srimai/odoo,odooindia/odoo,aviciimaxwell/odoo,CopeX/odoo,dkubiak789/odoo,lgscofield/odoo,Antiun/odoo,stonegithubs/odoo,n0m4dz/odoo,ApuliaSoftware/odoo,Codefans-fan/odoo,cpyou/odoo,SAM-IT-SA/odoo,prospwro/odoo,shaufi10/odoo,odoousers2014/odoo,leorochael/odoo,acshan/odoo,alexteodor/odoo,cdrooom/odoo,mszewczy/odoo,papouso/odoo,hopeall/odoo,Nowheresly/odoo,arthru/OpenUpgrade,vrenaville/ngo-addons-backport,simongoffin/website_version,camptocamp/ngo-addons-backport,Daniel-CA/odoo,nitinitprof/odoo,0k/OpenUpgrade,microcom/odoo,Grirrane/odoo,grap/OpenUpgrade,alexcuellar/odoo,mkieszek/odoo,omprakasha/odoo,MarcosCommunity/odoo,oihane/odoo,doomsterinc/odoo,Antiun/odoo,deKupini/erp,ojengwa/odoo,nhomar/odoo,acshan/odoo,Nowheresly/odoo,guerrerocarlos/odoo,highco-groupe/odoo,wangjun/odoo,osvalr/odoo,demon-ru/iml-crm,abstract-open-solutions/OCB,tvtsoft/odoo8,bguillot/OpenUpgrade,mvaled/OpenUpgrade,hip-odoo/odoo,damdam-s/OpenUpgrade,fevxie/odoo,pplatek/odoo,dezynetechnologies/odoo,OpenPymeMx/OCB,codekaki/odoo,goliveirab/odoo,klunwebale/odoo,guerrerocarlos/odoo,klunwebale/odoo,0k/odoo,papouso/odoo,oihane/odoo,camptocamp/ngo-addons-backport,QianBIG/odoo,GauravSahu/odoo,luistorresm/odoo,rowemoore/odoo,OpenUpgrade-dev/OpenUpgrade,kirca/OpenUpgrade,kirca/OpenUpgrade,bobisme/odoo,savoirfairelinux/OpenUpgrade,javierTerry/odoo,ojengwa/odoo,OpenPymeMx/OCB,CatsAndDogsbvba/odoo,tinkerthaler/odoo,tvibliani/odoo,glovebx/odoo,glovebx/odoo,klunwebale/odoo,CatsAndDogsbvba/odoo,VitalPet/odoo,brijeshkesariya/odoo,grap/OpenUpgrade,diagramsoftware/odoo,nitinitprof/odoo,csrocha/OpenUpgrade,bguillot/OpenUpgrade,gdgellatly/OCB1,fossoult/odoo,christophlsa/odoo,TRESCLOUD/odoopub,cedk/odoo,ccomb/OpenUpgrade,storm-computers/odoo,srimai/odoo,wangjun/odoo,tinkhaven-organization/odoo,jusdng/odoo,AuyaJackie/odoo,vnsofthe/odoo,oliverhr/odoo,takis/odoo,credativUK/OCB,PongPi/isl-odoo,abdellatifkarroum/odoo,oihane/odoo,xzYue/odoo,hbrunn/OpenUpgrade,Bachaco-ve/odoo,frouty/odoo_oph,luiseduardohdbackup/odoo,cpyou/odoo,fuhongliang/odoo,abdellatifkarroum/odoo,dalegregory/odoo,doomsterinc/odoo,sadleader/odoo,juanalfonsopr/odoo,credativUK/OCB,leoliujie/odoo,QianBIG/odoo,apanju/GMIO_Odoo,savoirfairelinux/odoo,pedrobaeza/odoo,oliverhr/odoo,dsfsdgsbngfggb/odoo,Grirrane/odoo,poljeff/odoo,kittiu/odoo,jfpla/odoo,funkring/fdoo,KontorConsulting/odoo,dgzurita/odoo,MarcosCommunity/odoo,arthru/OpenUpgrade,hopeall/odoo,fuselock/odoo,srsman/odoo,csrocha/OpenUpgrade,ingadhoc/odoo,gsmartway/odoo,guerrerocarlos/odoo,vrenaville/ngo-addons-backport,Elico-Corp/odoo_OCB,NeovaHealth/odoo,virgree/odoo,KontorConsulting/odoo,xujb/odoo,fuhongliang/odoo,avoinsystems/odoo,nuuuboo/odoo,dgzurita/odoo,fevxie/odoo,tinkhaven-organization/odoo,ThinkOpen-Solutions/odoo,steedos/odoo,jaxkodex/odoo,hanicker/odoo,OpenUpgrade/OpenUpgrade,pplatek/odoo,bealdav/OpenUpgrade,christophlsa/odoo,codekaki/odoo,omprakasha/odoo,alhashash/odoo,nuncjo/odoo,OpusVL/odoo,xzYue/odoo,pedrobaeza/OpenUpgrade,hmen89/odoo,hmen89/odoo,massot/odoo,Gitlab11/odoo,damdam-s/OpenUpgrade,bguillot/OpenUpgrade,brijeshkesariya/odoo,NL66278/OCB,nhomar/odoo-mirror,dfang/odoo,codekaki/odoo,Codefans-fan/odoo,chiragjogi/odoo,salaria/odoo,cdrooom/odoo,florian-dacosta/OpenUpgrade,feroda/odoo,jaxkodex/odoo,juanalfonsopr/odoo,sysadminmatmoz/OCB,ingadhoc/odoo,tarzan0820/odoo,hubsaysnuaa/odoo,kybriainfotech/iSocioCRM,CopeX/odoo,jeasoft/odoo,havt/odoo,kittiu/odoo,JonathanStein/odoo,hubsaysnuaa/odoo,GauravSahu/odoo,frouty/odoogoeen,xujb/odoo,joariasl/odoo,tangyiyong/odoo,ihsanudin/odoo,mszewczy/odoo,ujjwalwahi/odoo,vnsofthe/odoo,ThinkOpen-Solutions/odoo,rowemoore/odoo,OSSESAC/odoopubarquiluz,guewen/OpenUpgrade,feroda/odoo,lsinfo/odoo,0k/odoo,ecosoft-odoo/odoo,Adel-Magebinary/odoo,Maspear/odoo,tangyiyong/odoo,sebalix/OpenUpgrade,avoinsystems/odoo,sv-dev1/odoo,papouso/odoo,thanhacun/odoo,MarcosCommunity/odoo,datenbetrieb/odoo,shivam1111/odoo,papouso/odoo,fgesora/odoo,patmcb/odoo,janocat/odoo,mustafat/odoo-1,virgree/odoo,ingadhoc/odoo,optima-ict/odoo,xzYue/odoo,dalegregory/odoo,KontorConsulting/odoo,inspyration/odoo,slevenhagen/odoo,Nick-OpusVL/odoo,charbeljc/OCB,numerigraphe/odoo,kybriainfotech/iSocioCRM,waytai/odoo,abstract-open-solutions/OCB,elmerdpadilla/iv,tinkhaven-organization/odoo,juanalfonsopr/odoo,0k/OpenUpgrade,hubsaysnuaa/odoo,Ernesto99/odoo,funkring/fdoo,colinnewell/odoo,apocalypsebg/odoo,ShineFan/odoo,fgesora/odoo,datenbetrieb/odoo,ChanduERP/odoo,gavin-feng/odoo,nuncjo/odoo,Elico-Corp/odoo_OCB,Ichag/odoo,hifly/OpenUpgrade,bakhtout/odoo-educ,apanju/GMIO_Odoo,hopeall/odoo,tarzan0820/odoo,jesramirez/odoo,rschnapka/odoo,ccomb/OpenUpgrade,fuselock/odoo,jiachenning/odoo,spadae22/odoo,JGarcia-Panach/odoo,ShineFan/odoo,fevxie/odoo,rahuldhote/odoo,Nick-OpusVL/odoo,Endika/OpenUpgrade,ChanduERP/odoo,klunwebale/odoo,guerrerocarlos/odoo,deKupini/erp,NeovaHealth/odoo,guewen/OpenUpgrade,jeasoft/odoo,fjbatresv/odoo,bplancher/odoo,havt/odoo,RafaelTorrealba/odoo,grap/OCB,jiachenning/odoo,provaleks/o8,mmbtba/odoo,waytai/odoo,juanalfonsopr/odoo,SAM-IT-SA/odoo,windedge/odoo,ccomb/OpenUpgrade,ShineFan/odoo,bguillot/OpenUpgrade,OSSESAC/odoopubarquiluz,lombritz/odoo,alqfahad/odoo,bobisme/odoo,nuncjo/odoo,synconics/odoo,shingonoide/odoo,Elico-Corp/odoo_OCB,fgesora/odoo,oasiswork/odoo,oliverhr/odoo,guerrerocarlos/odoo,jaxkodex/odoo,odoousers2014/odoo,luistorresm/odoo,fgesora/odoo,guewen/OpenUpgrade,apanju/GMIO_Odoo,podemos-info/odoo,osvalr/odoo,ojengwa/odoo,fuhongliang/odoo,steedos/odoo,alhashash/odoo,jpshort/odoo,goliveirab/odoo,pedrobaeza/OpenUpgrade,agrista/odoo-saas,diagramsoftware/odoo,xujb/odoo,juanalfonsopr/odoo,shivam1111/odoo,eino-makitalo/odoo,hbrunn/OpenUpgrade,poljeff/odoo,sebalix/OpenUpgrade,tinkerthaler/odoo,ShineFan/odoo,jiachenning/odoo,ujjwalwahi/odoo,dezynetechnologies/odoo,dfang/odoo,jusdng/odoo,optima-ict/odoo,nexiles/odoo,Bachaco-ve/odoo,tvtsoft/odoo8,TRESCLOUD/odoopub,mvaled/OpenUpgrade,Bachaco-ve/odoo,odooindia/odoo,blaggacao/OpenUpgrade,janocat/odoo,dgzurita/odoo,bwrsandman/OpenUpgrade,shingonoide/odoo,0k/odoo,ygol/odoo,spadae22/odoo,bwrsandman/OpenUpgrade,Ichag/odoo,rowemoore/odoo,janocat/odoo,minhtuancn/odoo,dllsf/odootest,fjbatresv/odoo,funkring/fdoo,sinbazhou/odoo,glovebx/odoo,mmbtba/odoo,draugiskisprendimai/odoo,apanju/GMIO_Odoo,draugiskisprendimai/odoo,papouso/odoo,brijeshkesariya/odoo,ApuliaSoftware/odoo,alhashash/odoo,vrenaville/ngo-addons-backport,rschnapka/odoo,hubsaysnuaa/odoo,funkring/fdoo,damdam-s/OpenUpgrade,Bachaco-ve/odoo,MarcosCommunity/odoo,odoo-turkiye/odoo,bakhtout/odoo-educ,odooindia/odoo,fdvarela/odoo8,odoo-turkiye/odoo,Endika/OpenUpgrade,xujb/odoo,guewen/OpenUpgrade,sadleader/odoo,fuhongliang/odoo,sve-odoo/odoo,poljeff/odoo,jeasoft/odoo,matrixise/odoo,janocat/odoo,Codefans-fan/odoo,havt/odoo,apocalypsebg/odoo,erkrishna9/odoo,fjbatresv/odoo,pedrobaeza/odoo,BT-rmartin/odoo,OpenPymeMx/OCB,ujjwalwahi/odoo,laslabs/odoo,apocalypsebg/odoo,PongPi/isl-odoo,idncom/odoo,kirca/OpenUpgrade,janocat/odoo,OpenPymeMx/OCB,ovnicraft/odoo,OpenUpgrade-dev/OpenUpgrade,xzYue/odoo,chiragjogi/odoo,shaufi/odoo,addition-it-solutions/project-all,sinbazhou/odoo,florian-dacosta/OpenUpgrade,frouty/odoogoeen,oasiswork/odoo,wangjun/odoo,leoliujie/odoo,fjbatresv/odoo,florian-dacosta/OpenUpgrade,gdgellatly/OCB1,OpenUpgrade-dev/OpenUpgrade,tvtsoft/odoo8,fevxie/odoo,rowemoore/odoo,grap/OCB,collex100/odoo,cysnake4713/odoo,vrenaville/ngo-addons-backport,camptocamp/ngo-addons-backport,lightcn/odoo,Kilhog/odoo,elmerdpadilla/iv,credativUK/OCB,mustafat/odoo-1,andreparames/odoo,nuncjo/odoo,MarcosCommunity/odoo,kifcaliph/odoo,optima-ict/odoo,oihane/odoo,bakhtout/odoo-educ,cysnake4713/odoo,patmcb/odoo,savoirfairelinux/odoo,thanhacun/odoo,codekaki/odoo,podemos-info/odoo,Codefans-fan/odoo,OSSESAC/odoopubarquiluz,jiachenning/odoo,guerrerocarlos/odoo,incaser/odoo-odoo,ojengwa/odoo,NL66278/OCB,frouty/odoogoeen,mszewczy/odoo,TRESCLOUD/odoopub,lgscofield/odoo,Ichag/odoo,luiseduardohdbackup/odoo,BT-fgarbely/odoo,numerigraphe/odoo,CatsAndDogsbvba/odoo,agrista/odoo-saas,JGarcia-Panach/odoo,bguillot/OpenUpgrade,gorjuce/odoo,CubicERP/odoo,mkieszek/odoo,gvb/odoo,jiangzhixiao/odoo,dllsf/odootest,ShineFan/odoo,apocalypsebg/odoo,mustafat/odoo-1,sebalix/OpenUpgrade,luistorresm/odoo,hopeall/odoo,nitinitprof/odoo,Nick-OpusVL/odoo,bwrsandman/OpenUpgrade,gvb/odoo,grap/OCB,grap/OpenUpgrade,salaria/odoo,rgeleta/odoo,ramadhane/odoo,JonathanStein/odoo,JonathanStein/odoo,Antiun/odoo,nagyistoce/odoo-dev-odoo,makinacorpus/odoo,ApuliaSoftware/odoo,patmcb/odoo,ramadhane/odoo,fjbatresv/odoo,mszewczy/odoo,nexiles/odoo,BT-rmartin/odoo,massot/odoo,lombritz/odoo,VielSoft/odoo,abdellatifkarroum/odoo,nhomar/odoo,hifly/OpenUpgrade,lsinfo/odoo,ehirt/odoo,NeovaHealth/odoo,VielSoft/odoo,cysnake4713/odoo,NeovaHealth/odoo,tangyiyong/odoo,dariemp/odoo,ecosoft-odoo/odoo,bguillot/OpenUpgrade,srimai/odoo,kifcaliph/odoo,ramitalat/odoo,javierTerry/odoo,cpyou/odoo,OpusVL/odoo,waytai/odoo,OpenPymeMx/OCB,ehirt/odoo,windedge/odoo,mmbtba/odoo,FlorianLudwig/odoo,markeTIC/OCB,jiangzhixiao/odoo,javierTerry/odoo,charbeljc/OCB,alhashash/odoo,OpenUpgrade-dev/OpenUpgrade,arthru/OpenUpgrade,oliverhr/odoo,Noviat/odoo,ovnicraft/odoo,nhomar/odoo,Nowheresly/odoo,ingadhoc/odoo,sysadminmatmoz/OCB,goliveirab/odoo,rgeleta/odoo,Drooids/odoo,odoousers2014/odoo,pedrobaeza/OpenUpgrade,jeasoft/odoo,storm-computers/odoo,PongPi/isl-odoo,bplancher/odoo,andreparames/odoo,tvibliani/odoo,ramitalat/odoo,storm-computers/odoo,naousse/odoo,hanicker/odoo,dgzurita/odoo,codekaki/odoo,shingonoide/odoo,Danisan/odoo-1,BT-astauder/odoo,microcom/odoo,idncom/odoo,tarzan0820/odoo,odoousers2014/odoo,tarzan0820/odoo,jesramirez/odoo,pedrobaeza/odoo,BT-ojossen/odoo,Eric-Zhong/odoo,kittiu/odoo,oasiswork/odoo,prospwro/odoo,JGarcia-Panach/odoo,Antiun/odoo,andreparames/odoo,Endika/odoo,arthru/OpenUpgrade,prospwro/odoo,matrixise/odoo,kybriainfotech/iSocioCRM,cysnake4713/odoo,alexcuellar/odoo,Danisan/odoo-1,alqfahad/odoo,rahuldhote/odoo,slevenhagen/odoo-npg,lgscofield/odoo,funkring/fdoo,minhtuancn/odoo,Nick-OpusVL/odoo,SAM-IT-SA/odoo,Ernesto99/odoo,ChanduERP/odoo,virgree/odoo,podemos-info/odoo,BT-ojossen/odoo,bwrsandman/OpenUpgrade,rowemoore/odoo,dezynetechnologies/odoo,BT-ojossen/odoo,colinnewell/odoo,pedrobaeza/OpenUpgrade,gdgellatly/OCB1,nitinitprof/odoo,bkirui/odoo,SAM-IT-SA/odoo,Gitlab11/odoo,Bachaco-ve/odoo,zchking/odoo,bplancher/odoo,joshuajan/odoo,tvtsoft/odoo8,goliveirab/odoo,patmcb/odoo,realsaiko/odoo,spadae22/odoo,ehirt/odoo,ecosoft-odoo/odoo,lsinfo/odoo,dariemp/odoo,ihsanudin/odoo,demon-ru/iml-crm,goliveirab/odoo,rubencabrera/odoo,shaufi/odoo,ThinkOpen-Solutions/odoo,glovebx/odoo,QianBIG/odoo,odoo-turkiye/odoo,abenzbiria/clients_odoo,savoirfairelinux/odoo,feroda/odoo,dkubiak789/odoo,n0m4dz/odoo,mszewczy/odoo,datenbetrieb/odoo,cedk/odoo,ClearCorp-dev/odoo,bplancher/odoo,nitinitprof/odoo,incaser/odoo-odoo,omprakasha/odoo,dkubiak789/odoo,ehirt/odoo,gvb/odoo,rahuldhote/odoo,tvtsoft/odoo8,credativUK/OCB,hopeall/odoo,Gitlab11/odoo,windedge/odoo,Grirrane/odoo,ramitalat/odoo,waytai/odoo,fuselock/odoo,sadleader/odoo,thanhacun/odoo,odootr/odoo,markeTIC/OCB,lsinfo/odoo,bobisme/odoo,srsman/odoo,OpenUpgrade-dev/OpenUpgrade,Antiun/odoo,rdeheele/odoo,gsmartway/odoo,savoirfairelinux/OpenUpgrade,gsmartway/odoo,guewen/OpenUpgrade,Danisan/odoo-1,florentx/OpenUpgrade,datenbetrieb/odoo,fossoult/odoo,deKupini/erp,tarzan0820/odoo,slevenhagen/odoo-npg,thanhacun/odoo,jfpla/odoo,savoirfairelinux/odoo,tinkerthaler/odoo,makinacorpus/odoo,diagramsoftware/odoo,prospwro/odoo,hifly/OpenUpgrade,Noviat/odoo,oasiswork/odoo,apanju/GMIO_Odoo,leoliujie/odoo,apanju/odoo,hassoon3/odoo,slevenhagen/odoo,jiachenning/odoo,xzYue/odoo,SerpentCS/odoo,rahuldhote/odoo,incaser/odoo-odoo,salaria/odoo,realsaiko/odoo,avoinsystems/odoo,FlorianLudwig/odoo,gorjuce/odoo,ygol/odoo,florentx/OpenUpgrade,cedk/odoo,poljeff/odoo,rgeleta/odoo,Ernesto99/odoo,rdeheele/odoo,srsman/odoo,shaufi10/odoo,alexcuellar/odoo,FlorianLudwig/odoo,jpshort/odoo,oihane/odoo,odootr/odoo,optima-ict/odoo,nuuuboo/odoo,klunwebale/odoo,gorjuce/odoo,OpenUpgrade/OpenUpgrade,jeasoft/odoo,vnsofthe/odoo,CopeX/odoo,erkrishna9/odoo,markeTIC/OCB,gavin-feng/odoo,shaufi/odoo,sysadminmatmoz/OCB,rubencabrera/odoo,dalegregory/odoo,csrocha/OpenUpgrade,takis/odoo,hmen89/odoo,hip-odoo/odoo,cysnake4713/odoo,Danisan/odoo-1,mlaitinen/odoo,funkring/fdoo,gorjuce/odoo,windedge/odoo,dezynetechnologies/odoo,hubsaysnuaa/odoo,omprakasha/odoo,Daniel-CA/odoo,alqfahad/odoo,sadleader/odoo,Daniel-CA/odoo,ramadhane/odoo,shingonoide/odoo,odootr/odoo,funkring/fdoo,dalegregory/odoo,virgree/odoo,sv-dev1/odoo,alexteodor/odoo,massot/odoo,tinkerthaler/odoo,kittiu/odoo,SAM-IT-SA/odoo,aviciimaxwell/odoo,gorjuce/odoo,stephen144/odoo,luistorresm/odoo,fdvarela/odoo8,ygol/odoo,bwrsandman/OpenUpgrade,Drooids/odoo,acshan/odoo,apanju/odoo,hbrunn/OpenUpgrade,klunwebale/odoo,inspyration/odoo,abstract-open-solutions/OCB,acshan/odoo,sv-dev1/odoo,FlorianLudwig/odoo,florian-dacosta/OpenUpgrade,ccomb/OpenUpgrade,colinnewell/odoo,cedk/odoo,hubsaysnuaa/odoo,papouso/odoo,goliveirab/odoo,frouty/odoo_oph,nuuuboo/odoo,mlaitinen/odoo,lightcn/odoo,microcom/odoo,jolevq/odoopub,sinbazhou/odoo,hifly/OpenUpgrade,x111ong/odoo,ClearCorp-dev/odoo,dalegregory/odoo,mlaitinen/odoo,Kilhog/odoo,realsaiko/odoo,steedos/odoo,hoatle/odoo,christophlsa/odoo,nuncjo/odoo,javierTerry/odoo,leorochael/odoo,bakhtout/odoo-educ,leoliujie/odoo,bkirui/odoo,incaser/odoo-odoo,spadae22/odoo,x111ong/odoo,dsfsdgsbngfggb/odoo,aviciimaxwell/odoo,jeasoft/odoo,laslabs/odoo,ehirt/odoo,Daniel-CA/odoo,naousse/odoo,VielSoft/odoo,JCA-Developpement/Odoo,brijeshkesariya/odoo,csrocha/OpenUpgrade,Endika/OpenUpgrade,sebalix/OpenUpgrade,zchking/odoo,grap/OCB,Maspear/odoo,jolevq/odoopub,Codefans-fan/odoo,synconics/odoo,wangjun/odoo,provaleks/o8,0k/odoo,ccomb/OpenUpgrade,tvibliani/odoo,takis/odoo,collex100/odoo,guewen/OpenUpgrade,tvibliani/odoo,jeasoft/odoo,cdrooom/odoo,QianBIG/odoo,syci/OCB,mvaled/OpenUpgrade,frouty/odoo_oph,0k/OpenUpgrade,jolevq/odoopub,steedos/odoo,factorlibre/OCB,hifly/OpenUpgrade,poljeff/odoo,dariemp/odoo,markeTIC/OCB,CubicERP/odoo,BT-ojossen/odoo,fossoult/odoo,Antiun/odoo,lightcn/odoo,zchking/odoo,chiragjogi/odoo,simongoffin/website_version,podemos-info/odoo,CubicERP/odoo,salaria/odoo,kirca/OpenUpgrade,eino-makitalo/odoo,blaggacao/OpenUpgrade,ChanduERP/odoo,credativUK/OCB,nhomar/odoo-mirror,tinkhaven-organization/odoo,christophlsa/odoo,nhomar/odoo-mirror,minhtuancn/odoo,RafaelTorrealba/odoo,gvb/odoo,ingadhoc/odoo,JonathanStein/odoo,idncom/odoo,microcom/odoo,Daniel-CA/odoo,shaufi/odoo,dariemp/odoo,doomsterinc/odoo,nuuuboo/odoo,grap/OCB,cloud9UG/odoo,shivam1111/odoo,windedge/odoo,Codefans-fan/odoo,GauravSahu/odoo,apocalypsebg/odoo,hubsaysnuaa/odoo,eino-makitalo/odoo,pedrobaeza/OpenUpgrade,abenzbiria/clients_odoo,camptocamp/ngo-addons-backport,tvibliani/odoo,vrenaville/ngo-addons-backport,virgree/odoo,rschnapka/odoo,frouty/odoogoeen,KontorConsulting/odoo,Nowheresly/odoo,BT-rmartin/odoo,tangyiyong/odoo,osvalr/odoo,dsfsdgsbngfggb/odoo,podemos-info/odoo,luiseduardohdbackup/odoo,jiachenning/odoo,alexteodor/odoo,Adel-Magebinary/odoo,BT-rmartin/odoo,camptocamp/ngo-addons-backport,GauravSahu/odoo,ramadhane/odoo,jaxkodex/odoo,rubencabrera/odoo,hip-odoo/odoo,ihsanudin/odoo,doomsterinc/odoo,dkubiak789/odoo,bkirui/odoo,virgree/odoo,ClearCorp-dev/odoo,shingonoide/odoo,OpenPymeMx/OCB,BT-astauder/odoo,damdam-s/OpenUpgrade,credativUK/OCB,bkirui/odoo,JGarcia-Panach/odoo,ramadhane/odoo,vnsofthe/odoo,vnsofthe/odoo,xujb/odoo,Drooids/odoo,collex100/odoo,ubic135/odoo-design,colinnewell/odoo,fjbatresv/odoo,thanhacun/odoo,grap/OCB,javierTerry/odoo,srimai/odoo,n0m4dz/odoo,csrocha/OpenUpgrade,ubic135/odoo-design,sinbazhou/odoo,bobisme/odoo,leorochael/odoo,incaser/odoo-odoo,aviciimaxwell/odoo,prospwro/odoo,kifcaliph/odoo
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv
from osv import fields
class res_partner(osv.osv):
""" Inherits partner and adds CRM information in the partner form """
_inherit = 'res.partner'
_columns = {
'emails': fields.one2many('email.message', 'partner_id',\
'Emails', readonly=True, domain=[('history','=',True)]),
}
res_partner()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
Remove history domain in partner in eamil module.
bzr revid: ysa@tinyerp.com-20110204091248-wnzm7ft6cx3v34p1
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv
from osv import fields
class res_partner(osv.osv):
""" Inherits partner and adds CRM information in the partner form """
_inherit = 'res.partner'
_columns = {
'emails': fields.one2many('email.message', 'partner_id', 'Emails', readonly=True),
}
res_partner()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv
from osv import fields
class res_partner(osv.osv):
""" Inherits partner and adds CRM information in the partner form """
_inherit = 'res.partner'
_columns = {
'emails': fields.one2many('email.message', 'partner_id',\
'Emails', readonly=True, domain=[('history','=',True)]),
}
res_partner()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
<commit_msg>Remove history domain in partner in eamil module.
bzr revid: ysa@tinyerp.com-20110204091248-wnzm7ft6cx3v34p1<commit_after>
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv
from osv import fields
class res_partner(osv.osv):
""" Inherits partner and adds CRM information in the partner form """
_inherit = 'res.partner'
_columns = {
'emails': fields.one2many('email.message', 'partner_id', 'Emails', readonly=True),
}
res_partner()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv
from osv import fields
class res_partner(osv.osv):
""" Inherits partner and adds CRM information in the partner form """
_inherit = 'res.partner'
_columns = {
'emails': fields.one2many('email.message', 'partner_id',\
'Emails', readonly=True, domain=[('history','=',True)]),
}
res_partner()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
Remove history domain in partner in eamil module.
bzr revid: ysa@tinyerp.com-20110204091248-wnzm7ft6cx3v34p1# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv
from osv import fields
class res_partner(osv.osv):
""" Inherits partner and adds CRM information in the partner form """
_inherit = 'res.partner'
_columns = {
'emails': fields.one2many('email.message', 'partner_id', 'Emails', readonly=True),
}
res_partner()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv
from osv import fields
class res_partner(osv.osv):
""" Inherits partner and adds CRM information in the partner form """
_inherit = 'res.partner'
_columns = {
'emails': fields.one2many('email.message', 'partner_id',\
'Emails', readonly=True, domain=[('history','=',True)]),
}
res_partner()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
<commit_msg>Remove history domain in partner in eamil module.
bzr revid: ysa@tinyerp.com-20110204091248-wnzm7ft6cx3v34p1<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv
from osv import fields
class res_partner(osv.osv):
""" Inherits partner and adds CRM information in the partner form """
_inherit = 'res.partner'
_columns = {
'emails': fields.one2many('email.message', 'partner_id', 'Emails', readonly=True),
}
res_partner()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
a80297b8e52ccc2560bcd0b8a204dad7eab4c925
|
website_payment_v10/__init__.py
|
website_payment_v10/__init__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
Use global LICENSE/COPYRIGHT files, remove boilerplate text
|
[LEGAL] Use global LICENSE/COPYRIGHT files, remove boilerplate text
- Preserved explicit 3rd-party copyright notices
- Explicit boilerplate should not be necessary - copyright law applies
automatically in all countries thanks to Berne Convention + WTO rules,
and a reference to the applicable license is clear enough.
|
Python
|
agpl-3.0
|
nicolas-petit/website,JayVora-SerpentCS/website,RoelAdriaans-B-informed/website,Tecnativa/website,RoelAdriaans-B-informed/website,khaeusler/website,khaeusler/website,Tecnativa/website,nicolas-petit/website,nicolas-petit/website,JayVora-SerpentCS/website,RoelAdriaans-B-informed/website,RoelAdriaans-B-informed/website,khaeusler/website,Tecnativa/website,JayVora-SerpentCS/website
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
[LEGAL] Use global LICENSE/COPYRIGHT files, remove boilerplate text
- Preserved explicit 3rd-party copyright notices
- Explicit boilerplate should not be necessary - copyright law applies
automatically in all countries thanks to Berne Convention + WTO rules,
and a reference to the applicable license is clear enough.
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
<commit_msg>[LEGAL] Use global LICENSE/COPYRIGHT files, remove boilerplate text
- Preserved explicit 3rd-party copyright notices
- Explicit boilerplate should not be necessary - copyright law applies
automatically in all countries thanks to Berne Convention + WTO rules,
and a reference to the applicable license is clear enough.<commit_after>
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
[LEGAL] Use global LICENSE/COPYRIGHT files, remove boilerplate text
- Preserved explicit 3rd-party copyright notices
- Explicit boilerplate should not be necessary - copyright law applies
automatically in all countries thanks to Berne Convention + WTO rules,
and a reference to the applicable license is clear enough.# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
<commit_msg>[LEGAL] Use global LICENSE/COPYRIGHT files, remove boilerplate text
- Preserved explicit 3rd-party copyright notices
- Explicit boilerplate should not be necessary - copyright law applies
automatically in all countries thanks to Berne Convention + WTO rules,
and a reference to the applicable license is clear enough.<commit_after># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
70a9da61f49b3f9a59cf0717daf23acb638851c1
|
powerline/lib/shell.py
|
powerline/lib/shell.py
|
# vim:fileencoding=utf-8:noet
from subprocess import Popen, PIPE
def run_cmd(pl, cmd, stdin=None):
try:
p = Popen(cmd, stdout=PIPE, stdin=PIPE)
except OSError as e:
pl.exception('Could not execute command ({0}): {1}'.format(e, cmd))
return None
else:
stdout, err = p.communicate(stdin)
return stdout.strip()
def asrun(pl, ascript):
'''Run the given AppleScript and return the standard output and error.'''
return run_cmd(pl, ['osascript', '-'], ascript)
|
# vim:fileencoding=utf-8:noet
from subprocess import Popen, PIPE
def run_cmd(pl, cmd, stdin=None):
try:
p = Popen(cmd, stdout=PIPE, stdin=PIPE)
except OSError as e:
pl.exception('Could not execute command ({0}): {1}', e, cmd)
return None
else:
stdout, err = p.communicate(stdin)
return stdout.strip()
def asrun(pl, ascript):
'''Run the given AppleScript and return the standard output and error.'''
return run_cmd(pl, ['osascript', '-'], ascript)
|
Format in PowerlineLogger._log, not in pl.exception arguments
|
Format in PowerlineLogger._log, not in pl.exception arguments
|
Python
|
mit
|
blindFS/powerline,dragon788/powerline,EricSB/powerline,DoctorJellyface/powerline,cyrixhero/powerline,russellb/powerline,lukw00/powerline,bartvm/powerline,junix/powerline,kenrachynski/powerline,junix/powerline,dragon788/powerline,IvanAli/powerline,darac/powerline,blindFS/powerline,prvnkumar/powerline,wfscheper/powerline,lukw00/powerline,xxxhycl2010/powerline,bezhermoso/powerline,DoctorJellyface/powerline,russellb/powerline,lukw00/powerline,magus424/powerline,junix/powerline,EricSB/powerline,xfumihiro/powerline,areteix/powerline,russellb/powerline,darac/powerline,EricSB/powerline,Liangjianghao/powerline,xxxhycl2010/powerline,IvanAli/powerline,bartvm/powerline,s0undt3ch/powerline,keelerm84/powerline,bezhermoso/powerline,seanfisk/powerline,firebitsbr/powerline,DoctorJellyface/powerline,seanfisk/powerline,firebitsbr/powerline,magus424/powerline,bezhermoso/powerline,QuLogic/powerline,xfumihiro/powerline,kenrachynski/powerline,darac/powerline,cyrixhero/powerline,S0lll0s/powerline,Liangjianghao/powerline,areteix/powerline,Luffin/powerline,magus424/powerline,cyrixhero/powerline,Liangjianghao/powerline,S0lll0s/powerline,kenrachynski/powerline,wfscheper/powerline,QuLogic/powerline,S0lll0s/powerline,IvanAli/powerline,dragon788/powerline,bartvm/powerline,seanfisk/powerline,Luffin/powerline,QuLogic/powerline,Luffin/powerline,areteix/powerline,s0undt3ch/powerline,firebitsbr/powerline,prvnkumar/powerline,keelerm84/powerline,prvnkumar/powerline,xfumihiro/powerline,blindFS/powerline,wfscheper/powerline,s0undt3ch/powerline,xxxhycl2010/powerline
|
# vim:fileencoding=utf-8:noet
from subprocess import Popen, PIPE
def run_cmd(pl, cmd, stdin=None):
try:
p = Popen(cmd, stdout=PIPE, stdin=PIPE)
except OSError as e:
pl.exception('Could not execute command ({0}): {1}'.format(e, cmd))
return None
else:
stdout, err = p.communicate(stdin)
return stdout.strip()
def asrun(pl, ascript):
'''Run the given AppleScript and return the standard output and error.'''
return run_cmd(pl, ['osascript', '-'], ascript)
Format in PowerlineLogger._log, not in pl.exception arguments
|
# vim:fileencoding=utf-8:noet
from subprocess import Popen, PIPE
def run_cmd(pl, cmd, stdin=None):
try:
p = Popen(cmd, stdout=PIPE, stdin=PIPE)
except OSError as e:
pl.exception('Could not execute command ({0}): {1}', e, cmd)
return None
else:
stdout, err = p.communicate(stdin)
return stdout.strip()
def asrun(pl, ascript):
'''Run the given AppleScript and return the standard output and error.'''
return run_cmd(pl, ['osascript', '-'], ascript)
|
<commit_before># vim:fileencoding=utf-8:noet
from subprocess import Popen, PIPE
def run_cmd(pl, cmd, stdin=None):
try:
p = Popen(cmd, stdout=PIPE, stdin=PIPE)
except OSError as e:
pl.exception('Could not execute command ({0}): {1}'.format(e, cmd))
return None
else:
stdout, err = p.communicate(stdin)
return stdout.strip()
def asrun(pl, ascript):
'''Run the given AppleScript and return the standard output and error.'''
return run_cmd(pl, ['osascript', '-'], ascript)
<commit_msg>Format in PowerlineLogger._log, not in pl.exception arguments<commit_after>
|
# vim:fileencoding=utf-8:noet
from subprocess import Popen, PIPE
def run_cmd(pl, cmd, stdin=None):
try:
p = Popen(cmd, stdout=PIPE, stdin=PIPE)
except OSError as e:
pl.exception('Could not execute command ({0}): {1}', e, cmd)
return None
else:
stdout, err = p.communicate(stdin)
return stdout.strip()
def asrun(pl, ascript):
'''Run the given AppleScript and return the standard output and error.'''
return run_cmd(pl, ['osascript', '-'], ascript)
|
# vim:fileencoding=utf-8:noet
from subprocess import Popen, PIPE
def run_cmd(pl, cmd, stdin=None):
try:
p = Popen(cmd, stdout=PIPE, stdin=PIPE)
except OSError as e:
pl.exception('Could not execute command ({0}): {1}'.format(e, cmd))
return None
else:
stdout, err = p.communicate(stdin)
return stdout.strip()
def asrun(pl, ascript):
'''Run the given AppleScript and return the standard output and error.'''
return run_cmd(pl, ['osascript', '-'], ascript)
Format in PowerlineLogger._log, not in pl.exception arguments# vim:fileencoding=utf-8:noet
from subprocess import Popen, PIPE
def run_cmd(pl, cmd, stdin=None):
try:
p = Popen(cmd, stdout=PIPE, stdin=PIPE)
except OSError as e:
pl.exception('Could not execute command ({0}): {1}', e, cmd)
return None
else:
stdout, err = p.communicate(stdin)
return stdout.strip()
def asrun(pl, ascript):
'''Run the given AppleScript and return the standard output and error.'''
return run_cmd(pl, ['osascript', '-'], ascript)
|
<commit_before># vim:fileencoding=utf-8:noet
from subprocess import Popen, PIPE
def run_cmd(pl, cmd, stdin=None):
try:
p = Popen(cmd, stdout=PIPE, stdin=PIPE)
except OSError as e:
pl.exception('Could not execute command ({0}): {1}'.format(e, cmd))
return None
else:
stdout, err = p.communicate(stdin)
return stdout.strip()
def asrun(pl, ascript):
'''Run the given AppleScript and return the standard output and error.'''
return run_cmd(pl, ['osascript', '-'], ascript)
<commit_msg>Format in PowerlineLogger._log, not in pl.exception arguments<commit_after># vim:fileencoding=utf-8:noet
from subprocess import Popen, PIPE
def run_cmd(pl, cmd, stdin=None):
try:
p = Popen(cmd, stdout=PIPE, stdin=PIPE)
except OSError as e:
pl.exception('Could not execute command ({0}): {1}', e, cmd)
return None
else:
stdout, err = p.communicate(stdin)
return stdout.strip()
def asrun(pl, ascript):
'''Run the given AppleScript and return the standard output and error.'''
return run_cmd(pl, ['osascript', '-'], ascript)
|
e7805528be294374b128dd6e40e3f8990b03cdac
|
main.py
|
main.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Run the Better Bomb Defusal Manual
:Copyright: 2015 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from importlib import import_module
from bombdefusalmanual.ui.console import ConsoleUI
from bombdefusalmanual.ui.models import Answer
ANSWERS = [
Answer('thebutton', 'The Button'),
Answer('complicatedwires', 'Complicated Wires'),
Answer('morsecode', 'Morse Code'),
Answer('passwords', 'Passwords'),
Answer('whosonfirst', 'Who\'s on First'),
]
def ask_for_subject(ui):
return ui.ask_for_choice('Which subject?', ANSWERS)
def import_subject_module(name):
return import_module('bombdefusalmanual.subjects.{}'.format(name))
if __name__ == '__main__':
ui = ConsoleUI()
subject_name = ask_for_subject(ui)
module = import_subject_module(subject_name)
module.execute(ui)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Run the Better Bomb Defusal Manual
:Copyright: 2015 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from argparse import ArgumentParser
from importlib import import_module
from bombdefusalmanual.ui.console import ConsoleUI
from bombdefusalmanual.ui.models import Answer
ANSWERS = [
Answer('thebutton', 'The Button'),
Answer('complicatedwires', 'Complicated Wires'),
Answer('morsecode', 'Morse Code'),
Answer('passwords', 'Passwords'),
Answer('whosonfirst', 'Who\'s on First'),
]
def parse_args():
parser = ArgumentParser()
parser.add_argument(
'--gui',
action='store_true',
default=False,
dest='use_gui',
help='use graphical user interface')
return parser.parse_args()
def get_ui(use_gui):
if use_gui:
from bombdefusalmanual.ui.tk import TkGUI
return TkGUI()
else:
return ConsoleUI()
def ask_for_subject(ui):
return ui.ask_for_choice('Which subject?', ANSWERS)
def import_subject_module(name):
return import_module('bombdefusalmanual.subjects.{}'.format(name))
if __name__ == '__main__':
args = parse_args()
ui = get_ui(args.use_gui)
subject_name = ask_for_subject(ui)
module = import_subject_module(subject_name)
module.execute(ui)
|
Allow to enable graphical UI via command line option.
|
Allow to enable graphical UI via command line option.
|
Python
|
mit
|
homeworkprod/better-bomb-defusal-manual,homeworkprod/better-bomb-defusal-manual
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Run the Better Bomb Defusal Manual
:Copyright: 2015 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from importlib import import_module
from bombdefusalmanual.ui.console import ConsoleUI
from bombdefusalmanual.ui.models import Answer
ANSWERS = [
Answer('thebutton', 'The Button'),
Answer('complicatedwires', 'Complicated Wires'),
Answer('morsecode', 'Morse Code'),
Answer('passwords', 'Passwords'),
Answer('whosonfirst', 'Who\'s on First'),
]
def ask_for_subject(ui):
return ui.ask_for_choice('Which subject?', ANSWERS)
def import_subject_module(name):
return import_module('bombdefusalmanual.subjects.{}'.format(name))
if __name__ == '__main__':
ui = ConsoleUI()
subject_name = ask_for_subject(ui)
module = import_subject_module(subject_name)
module.execute(ui)
Allow to enable graphical UI via command line option.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Run the Better Bomb Defusal Manual
:Copyright: 2015 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from argparse import ArgumentParser
from importlib import import_module
from bombdefusalmanual.ui.console import ConsoleUI
from bombdefusalmanual.ui.models import Answer
ANSWERS = [
Answer('thebutton', 'The Button'),
Answer('complicatedwires', 'Complicated Wires'),
Answer('morsecode', 'Morse Code'),
Answer('passwords', 'Passwords'),
Answer('whosonfirst', 'Who\'s on First'),
]
def parse_args():
parser = ArgumentParser()
parser.add_argument(
'--gui',
action='store_true',
default=False,
dest='use_gui',
help='use graphical user interface')
return parser.parse_args()
def get_ui(use_gui):
if use_gui:
from bombdefusalmanual.ui.tk import TkGUI
return TkGUI()
else:
return ConsoleUI()
def ask_for_subject(ui):
return ui.ask_for_choice('Which subject?', ANSWERS)
def import_subject_module(name):
return import_module('bombdefusalmanual.subjects.{}'.format(name))
if __name__ == '__main__':
args = parse_args()
ui = get_ui(args.use_gui)
subject_name = ask_for_subject(ui)
module = import_subject_module(subject_name)
module.execute(ui)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Run the Better Bomb Defusal Manual
:Copyright: 2015 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from importlib import import_module
from bombdefusalmanual.ui.console import ConsoleUI
from bombdefusalmanual.ui.models import Answer
ANSWERS = [
Answer('thebutton', 'The Button'),
Answer('complicatedwires', 'Complicated Wires'),
Answer('morsecode', 'Morse Code'),
Answer('passwords', 'Passwords'),
Answer('whosonfirst', 'Who\'s on First'),
]
def ask_for_subject(ui):
return ui.ask_for_choice('Which subject?', ANSWERS)
def import_subject_module(name):
return import_module('bombdefusalmanual.subjects.{}'.format(name))
if __name__ == '__main__':
ui = ConsoleUI()
subject_name = ask_for_subject(ui)
module = import_subject_module(subject_name)
module.execute(ui)
<commit_msg>Allow to enable graphical UI via command line option.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Run the Better Bomb Defusal Manual
:Copyright: 2015 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from argparse import ArgumentParser
from importlib import import_module
from bombdefusalmanual.ui.console import ConsoleUI
from bombdefusalmanual.ui.models import Answer
ANSWERS = [
Answer('thebutton', 'The Button'),
Answer('complicatedwires', 'Complicated Wires'),
Answer('morsecode', 'Morse Code'),
Answer('passwords', 'Passwords'),
Answer('whosonfirst', 'Who\'s on First'),
]
def parse_args():
parser = ArgumentParser()
parser.add_argument(
'--gui',
action='store_true',
default=False,
dest='use_gui',
help='use graphical user interface')
return parser.parse_args()
def get_ui(use_gui):
if use_gui:
from bombdefusalmanual.ui.tk import TkGUI
return TkGUI()
else:
return ConsoleUI()
def ask_for_subject(ui):
return ui.ask_for_choice('Which subject?', ANSWERS)
def import_subject_module(name):
return import_module('bombdefusalmanual.subjects.{}'.format(name))
if __name__ == '__main__':
args = parse_args()
ui = get_ui(args.use_gui)
subject_name = ask_for_subject(ui)
module = import_subject_module(subject_name)
module.execute(ui)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Run the Better Bomb Defusal Manual
:Copyright: 2015 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from importlib import import_module
from bombdefusalmanual.ui.console import ConsoleUI
from bombdefusalmanual.ui.models import Answer
ANSWERS = [
Answer('thebutton', 'The Button'),
Answer('complicatedwires', 'Complicated Wires'),
Answer('morsecode', 'Morse Code'),
Answer('passwords', 'Passwords'),
Answer('whosonfirst', 'Who\'s on First'),
]
def ask_for_subject(ui):
return ui.ask_for_choice('Which subject?', ANSWERS)
def import_subject_module(name):
return import_module('bombdefusalmanual.subjects.{}'.format(name))
if __name__ == '__main__':
ui = ConsoleUI()
subject_name = ask_for_subject(ui)
module = import_subject_module(subject_name)
module.execute(ui)
Allow to enable graphical UI via command line option.#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Run the Better Bomb Defusal Manual
:Copyright: 2015 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from argparse import ArgumentParser
from importlib import import_module
from bombdefusalmanual.ui.console import ConsoleUI
from bombdefusalmanual.ui.models import Answer
ANSWERS = [
Answer('thebutton', 'The Button'),
Answer('complicatedwires', 'Complicated Wires'),
Answer('morsecode', 'Morse Code'),
Answer('passwords', 'Passwords'),
Answer('whosonfirst', 'Who\'s on First'),
]
def parse_args():
parser = ArgumentParser()
parser.add_argument(
'--gui',
action='store_true',
default=False,
dest='use_gui',
help='use graphical user interface')
return parser.parse_args()
def get_ui(use_gui):
if use_gui:
from bombdefusalmanual.ui.tk import TkGUI
return TkGUI()
else:
return ConsoleUI()
def ask_for_subject(ui):
return ui.ask_for_choice('Which subject?', ANSWERS)
def import_subject_module(name):
return import_module('bombdefusalmanual.subjects.{}'.format(name))
if __name__ == '__main__':
args = parse_args()
ui = get_ui(args.use_gui)
subject_name = ask_for_subject(ui)
module = import_subject_module(subject_name)
module.execute(ui)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Run the Better Bomb Defusal Manual
:Copyright: 2015 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from importlib import import_module
from bombdefusalmanual.ui.console import ConsoleUI
from bombdefusalmanual.ui.models import Answer
ANSWERS = [
Answer('thebutton', 'The Button'),
Answer('complicatedwires', 'Complicated Wires'),
Answer('morsecode', 'Morse Code'),
Answer('passwords', 'Passwords'),
Answer('whosonfirst', 'Who\'s on First'),
]
def ask_for_subject(ui):
return ui.ask_for_choice('Which subject?', ANSWERS)
def import_subject_module(name):
return import_module('bombdefusalmanual.subjects.{}'.format(name))
if __name__ == '__main__':
ui = ConsoleUI()
subject_name = ask_for_subject(ui)
module = import_subject_module(subject_name)
module.execute(ui)
<commit_msg>Allow to enable graphical UI via command line option.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Run the Better Bomb Defusal Manual
:Copyright: 2015 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from argparse import ArgumentParser
from importlib import import_module
from bombdefusalmanual.ui.console import ConsoleUI
from bombdefusalmanual.ui.models import Answer
ANSWERS = [
Answer('thebutton', 'The Button'),
Answer('complicatedwires', 'Complicated Wires'),
Answer('morsecode', 'Morse Code'),
Answer('passwords', 'Passwords'),
Answer('whosonfirst', 'Who\'s on First'),
]
def parse_args():
parser = ArgumentParser()
parser.add_argument(
'--gui',
action='store_true',
default=False,
dest='use_gui',
help='use graphical user interface')
return parser.parse_args()
def get_ui(use_gui):
if use_gui:
from bombdefusalmanual.ui.tk import TkGUI
return TkGUI()
else:
return ConsoleUI()
def ask_for_subject(ui):
return ui.ask_for_choice('Which subject?', ANSWERS)
def import_subject_module(name):
return import_module('bombdefusalmanual.subjects.{}'.format(name))
if __name__ == '__main__':
args = parse_args()
ui = get_ui(args.use_gui)
subject_name = ask_for_subject(ui)
module = import_subject_module(subject_name)
module.execute(ui)
|
8541737b5a3a50188162349727a0d0230613e630
|
test/features/test_create_pages.py
|
test/features/test_create_pages.py
|
from hamcrest import *
from nose.tools import nottest
from test.features import BrowserTest
class test_create_pages(BrowserTest):
def test_about_page(self):
self.browser.visit("http://0.0.0.0:8000/high-volume-services/by-transactions-per-year/descending")
assert_that(self.browser.find_by_css('h1').text, is_('High-volume services'))
def test_home_page(self):
self.browser.visit("http://0.0.0.0:8000/home")
headlines = self.browser.find_by_css('.headline')
departments = headlines[0].text
services = headlines[1].text
transactions = headlines[2].text
assert_that(departments, contains_string('16'))
assert_that(services, contains_string('654'))
assert_that(transactions, contains_string('1.31bn'))
@nottest
def test_all_services(self):
self.browser.visit("http://0.0.0.0:8000/all-services")
assert_that(self.browser.find_by_css('h1').text, is_("All Services"))
assert_that(self.browser.find_by_css('#navigation .current').text, is_("All services"))
|
from hamcrest import *
from nose.tools import nottest
from test.features import BrowserTest
class test_create_pages(BrowserTest):
def test_about_page(self):
self.browser.visit("http://0.0.0.0:8000/high-volume-services/"
"by-transactions-per-year/descending")
assert_that(self.browser.find_by_css('h1').text,
is_('High-volume services'))
def test_home_page(self):
self.browser.visit("http://0.0.0.0:8000/home")
headlines = self.browser.find_by_css('.headline')
departments = headlines[0].text
services = headlines[1].text
transactions = headlines[2].text
assert_that(departments, contains_string('16'))
assert_that(services, contains_string('658'))
assert_that(transactions, contains_string('1.31bn'))
@nottest
def test_all_services(self):
self.browser.visit("http://0.0.0.0:8000/all-services")
assert_that(self.browser.find_by_css('h1').text, is_("All Services"))
assert_that(self.browser.find_by_css('#navigation .current').text,
is_("All services"))
|
Change services number in test
|
Change services number in test
|
Python
|
mit
|
alphagov/transactions-explorer,gds-attic/transactions-explorer,alphagov/transactions-explorer,alphagov/transactions-explorer,gds-attic/transactions-explorer,gds-attic/transactions-explorer,alphagov/transactions-explorer,alphagov/transactions-explorer,gds-attic/transactions-explorer,gds-attic/transactions-explorer
|
from hamcrest import *
from nose.tools import nottest
from test.features import BrowserTest
class test_create_pages(BrowserTest):
def test_about_page(self):
self.browser.visit("http://0.0.0.0:8000/high-volume-services/by-transactions-per-year/descending")
assert_that(self.browser.find_by_css('h1').text, is_('High-volume services'))
def test_home_page(self):
self.browser.visit("http://0.0.0.0:8000/home")
headlines = self.browser.find_by_css('.headline')
departments = headlines[0].text
services = headlines[1].text
transactions = headlines[2].text
assert_that(departments, contains_string('16'))
assert_that(services, contains_string('654'))
assert_that(transactions, contains_string('1.31bn'))
@nottest
def test_all_services(self):
self.browser.visit("http://0.0.0.0:8000/all-services")
assert_that(self.browser.find_by_css('h1').text, is_("All Services"))
assert_that(self.browser.find_by_css('#navigation .current').text, is_("All services"))
Change services number in test
|
from hamcrest import *
from nose.tools import nottest
from test.features import BrowserTest
class test_create_pages(BrowserTest):
def test_about_page(self):
self.browser.visit("http://0.0.0.0:8000/high-volume-services/"
"by-transactions-per-year/descending")
assert_that(self.browser.find_by_css('h1').text,
is_('High-volume services'))
def test_home_page(self):
self.browser.visit("http://0.0.0.0:8000/home")
headlines = self.browser.find_by_css('.headline')
departments = headlines[0].text
services = headlines[1].text
transactions = headlines[2].text
assert_that(departments, contains_string('16'))
assert_that(services, contains_string('658'))
assert_that(transactions, contains_string('1.31bn'))
@nottest
def test_all_services(self):
self.browser.visit("http://0.0.0.0:8000/all-services")
assert_that(self.browser.find_by_css('h1').text, is_("All Services"))
assert_that(self.browser.find_by_css('#navigation .current').text,
is_("All services"))
|
<commit_before>from hamcrest import *
from nose.tools import nottest
from test.features import BrowserTest
class test_create_pages(BrowserTest):
def test_about_page(self):
self.browser.visit("http://0.0.0.0:8000/high-volume-services/by-transactions-per-year/descending")
assert_that(self.browser.find_by_css('h1').text, is_('High-volume services'))
def test_home_page(self):
self.browser.visit("http://0.0.0.0:8000/home")
headlines = self.browser.find_by_css('.headline')
departments = headlines[0].text
services = headlines[1].text
transactions = headlines[2].text
assert_that(departments, contains_string('16'))
assert_that(services, contains_string('654'))
assert_that(transactions, contains_string('1.31bn'))
@nottest
def test_all_services(self):
self.browser.visit("http://0.0.0.0:8000/all-services")
assert_that(self.browser.find_by_css('h1').text, is_("All Services"))
assert_that(self.browser.find_by_css('#navigation .current').text, is_("All services"))
<commit_msg>Change services number in test<commit_after>
|
from hamcrest import *
from nose.tools import nottest
from test.features import BrowserTest
class test_create_pages(BrowserTest):
def test_about_page(self):
self.browser.visit("http://0.0.0.0:8000/high-volume-services/"
"by-transactions-per-year/descending")
assert_that(self.browser.find_by_css('h1').text,
is_('High-volume services'))
def test_home_page(self):
self.browser.visit("http://0.0.0.0:8000/home")
headlines = self.browser.find_by_css('.headline')
departments = headlines[0].text
services = headlines[1].text
transactions = headlines[2].text
assert_that(departments, contains_string('16'))
assert_that(services, contains_string('658'))
assert_that(transactions, contains_string('1.31bn'))
@nottest
def test_all_services(self):
self.browser.visit("http://0.0.0.0:8000/all-services")
assert_that(self.browser.find_by_css('h1').text, is_("All Services"))
assert_that(self.browser.find_by_css('#navigation .current').text,
is_("All services"))
|
from hamcrest import *
from nose.tools import nottest
from test.features import BrowserTest
class test_create_pages(BrowserTest):
def test_about_page(self):
self.browser.visit("http://0.0.0.0:8000/high-volume-services/by-transactions-per-year/descending")
assert_that(self.browser.find_by_css('h1').text, is_('High-volume services'))
def test_home_page(self):
self.browser.visit("http://0.0.0.0:8000/home")
headlines = self.browser.find_by_css('.headline')
departments = headlines[0].text
services = headlines[1].text
transactions = headlines[2].text
assert_that(departments, contains_string('16'))
assert_that(services, contains_string('654'))
assert_that(transactions, contains_string('1.31bn'))
@nottest
def test_all_services(self):
self.browser.visit("http://0.0.0.0:8000/all-services")
assert_that(self.browser.find_by_css('h1').text, is_("All Services"))
assert_that(self.browser.find_by_css('#navigation .current').text, is_("All services"))
Change services number in testfrom hamcrest import *
from nose.tools import nottest
from test.features import BrowserTest
class test_create_pages(BrowserTest):
def test_about_page(self):
self.browser.visit("http://0.0.0.0:8000/high-volume-services/"
"by-transactions-per-year/descending")
assert_that(self.browser.find_by_css('h1').text,
is_('High-volume services'))
def test_home_page(self):
self.browser.visit("http://0.0.0.0:8000/home")
headlines = self.browser.find_by_css('.headline')
departments = headlines[0].text
services = headlines[1].text
transactions = headlines[2].text
assert_that(departments, contains_string('16'))
assert_that(services, contains_string('658'))
assert_that(transactions, contains_string('1.31bn'))
@nottest
def test_all_services(self):
self.browser.visit("http://0.0.0.0:8000/all-services")
assert_that(self.browser.find_by_css('h1').text, is_("All Services"))
assert_that(self.browser.find_by_css('#navigation .current').text,
is_("All services"))
|
<commit_before>from hamcrest import *
from nose.tools import nottest
from test.features import BrowserTest
class test_create_pages(BrowserTest):
def test_about_page(self):
self.browser.visit("http://0.0.0.0:8000/high-volume-services/by-transactions-per-year/descending")
assert_that(self.browser.find_by_css('h1').text, is_('High-volume services'))
def test_home_page(self):
self.browser.visit("http://0.0.0.0:8000/home")
headlines = self.browser.find_by_css('.headline')
departments = headlines[0].text
services = headlines[1].text
transactions = headlines[2].text
assert_that(departments, contains_string('16'))
assert_that(services, contains_string('654'))
assert_that(transactions, contains_string('1.31bn'))
@nottest
def test_all_services(self):
self.browser.visit("http://0.0.0.0:8000/all-services")
assert_that(self.browser.find_by_css('h1').text, is_("All Services"))
assert_that(self.browser.find_by_css('#navigation .current').text, is_("All services"))
<commit_msg>Change services number in test<commit_after>from hamcrest import *
from nose.tools import nottest
from test.features import BrowserTest
class test_create_pages(BrowserTest):
def test_about_page(self):
self.browser.visit("http://0.0.0.0:8000/high-volume-services/"
"by-transactions-per-year/descending")
assert_that(self.browser.find_by_css('h1').text,
is_('High-volume services'))
def test_home_page(self):
self.browser.visit("http://0.0.0.0:8000/home")
headlines = self.browser.find_by_css('.headline')
departments = headlines[0].text
services = headlines[1].text
transactions = headlines[2].text
assert_that(departments, contains_string('16'))
assert_that(services, contains_string('658'))
assert_that(transactions, contains_string('1.31bn'))
@nottest
def test_all_services(self):
self.browser.visit("http://0.0.0.0:8000/all-services")
assert_that(self.browser.find_by_css('h1').text, is_("All Services"))
assert_that(self.browser.find_by_css('#navigation .current').text,
is_("All services"))
|
9696b687a31a249fc228e58773ff55eacf8beaaa
|
src/vrun/compat.py
|
src/vrun/compat.py
|
# flake8: noqa
import sys
PY2 = sys.version_info[0] == 2
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import SafeConfigParser as ConfigParser
|
# flake8: noqa
import sys
PY2 = sys.version_info[0] == 2
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import SafeConfigParser as ConfigParser
if not hasattr(ConfigParser, 'read_dict'):
def read_dict(self, dictionary, source='<dict>'):
for (section, options) in dictionary.items():
if (
section
not in {
self.default_section if hasattr(self, 'default_section')
else 'DEFAULT'
}
):
self.add_section(section)
for (option, value) in options.items():
self.set(section, option, value)
ConfigParser.read_dict = read_dict
|
Add a read_dict function if it does not exist
|
Add a read_dict function if it does not exist
This is used for testing purposes, but when we drop Py2 support we can
more easily remove it here than worrying about removing a monkeypatch in
the testing code.
|
Python
|
isc
|
bertjwregeer/vrun
|
# flake8: noqa
import sys
PY2 = sys.version_info[0] == 2
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import SafeConfigParser as ConfigParser
Add a read_dict function if it does not exist
This is used for testing purposes, but when we drop Py2 support we can
more easily remove it here than worrying about removing a monkeypatch in
the testing code.
|
# flake8: noqa
import sys
PY2 = sys.version_info[0] == 2
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import SafeConfigParser as ConfigParser
if not hasattr(ConfigParser, 'read_dict'):
def read_dict(self, dictionary, source='<dict>'):
for (section, options) in dictionary.items():
if (
section
not in {
self.default_section if hasattr(self, 'default_section')
else 'DEFAULT'
}
):
self.add_section(section)
for (option, value) in options.items():
self.set(section, option, value)
ConfigParser.read_dict = read_dict
|
<commit_before># flake8: noqa
import sys
PY2 = sys.version_info[0] == 2
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import SafeConfigParser as ConfigParser
<commit_msg>Add a read_dict function if it does not exist
This is used for testing purposes, but when we drop Py2 support we can
more easily remove it here than worrying about removing a monkeypatch in
the testing code.<commit_after>
|
# flake8: noqa
import sys
PY2 = sys.version_info[0] == 2
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import SafeConfigParser as ConfigParser
if not hasattr(ConfigParser, 'read_dict'):
def read_dict(self, dictionary, source='<dict>'):
for (section, options) in dictionary.items():
if (
section
not in {
self.default_section if hasattr(self, 'default_section')
else 'DEFAULT'
}
):
self.add_section(section)
for (option, value) in options.items():
self.set(section, option, value)
ConfigParser.read_dict = read_dict
|
# flake8: noqa
import sys
PY2 = sys.version_info[0] == 2
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import SafeConfigParser as ConfigParser
Add a read_dict function if it does not exist
This is used for testing purposes, but when we drop Py2 support we can
more easily remove it here than worrying about removing a monkeypatch in
the testing code.# flake8: noqa
import sys
PY2 = sys.version_info[0] == 2
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import SafeConfigParser as ConfigParser
if not hasattr(ConfigParser, 'read_dict'):
def read_dict(self, dictionary, source='<dict>'):
for (section, options) in dictionary.items():
if (
section
not in {
self.default_section if hasattr(self, 'default_section')
else 'DEFAULT'
}
):
self.add_section(section)
for (option, value) in options.items():
self.set(section, option, value)
ConfigParser.read_dict = read_dict
|
<commit_before># flake8: noqa
import sys
PY2 = sys.version_info[0] == 2
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import SafeConfigParser as ConfigParser
<commit_msg>Add a read_dict function if it does not exist
This is used for testing purposes, but when we drop Py2 support we can
more easily remove it here than worrying about removing a monkeypatch in
the testing code.<commit_after># flake8: noqa
import sys
PY2 = sys.version_info[0] == 2
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import SafeConfigParser as ConfigParser
if not hasattr(ConfigParser, 'read_dict'):
def read_dict(self, dictionary, source='<dict>'):
for (section, options) in dictionary.items():
if (
section
not in {
self.default_section if hasattr(self, 'default_section')
else 'DEFAULT'
}
):
self.add_section(section)
for (option, value) in options.items():
self.set(section, option, value)
ConfigParser.read_dict = read_dict
|
a1a6312a34bebec7045169727192380a3e76cf39
|
tests/app/main/test_application.py
|
tests/app/main/test_application.py
|
import mock
from nose.tools import assert_equal, assert_true
from ..helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_analytics_code_should_be_in_javascript(self):
res = self.client.get('/suppliers/static/javascripts/application.js')
assert_equal(200, res.status_code)
assert_true(
'GOVUK.analytics.trackPageview'
in res.get_data(as_text=True))
|
import mock
from nose.tools import assert_equal, assert_true
from ..helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_analytics_code_should_be_in_javascript(self):
res = self.client.get('/suppliers/static/javascripts/application.js')
assert_equal(200, res.status_code)
assert_true(
'analytics.trackPageview'
in res.get_data(as_text=True))
|
Allow test for analytics to deal with compiled JS
|
Allow test for analytics to deal with compiled JS
|
Python
|
mit
|
alphagov/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend
|
import mock
from nose.tools import assert_equal, assert_true
from ..helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_analytics_code_should_be_in_javascript(self):
res = self.client.get('/suppliers/static/javascripts/application.js')
assert_equal(200, res.status_code)
assert_true(
'GOVUK.analytics.trackPageview'
in res.get_data(as_text=True))
Allow test for analytics to deal with compiled JS
|
import mock
from nose.tools import assert_equal, assert_true
from ..helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_analytics_code_should_be_in_javascript(self):
res = self.client.get('/suppliers/static/javascripts/application.js')
assert_equal(200, res.status_code)
assert_true(
'analytics.trackPageview'
in res.get_data(as_text=True))
|
<commit_before>import mock
from nose.tools import assert_equal, assert_true
from ..helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_analytics_code_should_be_in_javascript(self):
res = self.client.get('/suppliers/static/javascripts/application.js')
assert_equal(200, res.status_code)
assert_true(
'GOVUK.analytics.trackPageview'
in res.get_data(as_text=True))
<commit_msg>Allow test for analytics to deal with compiled JS<commit_after>
|
import mock
from nose.tools import assert_equal, assert_true
from ..helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_analytics_code_should_be_in_javascript(self):
res = self.client.get('/suppliers/static/javascripts/application.js')
assert_equal(200, res.status_code)
assert_true(
'analytics.trackPageview'
in res.get_data(as_text=True))
|
import mock
from nose.tools import assert_equal, assert_true
from ..helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_analytics_code_should_be_in_javascript(self):
res = self.client.get('/suppliers/static/javascripts/application.js')
assert_equal(200, res.status_code)
assert_true(
'GOVUK.analytics.trackPageview'
in res.get_data(as_text=True))
Allow test for analytics to deal with compiled JSimport mock
from nose.tools import assert_equal, assert_true
from ..helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_analytics_code_should_be_in_javascript(self):
res = self.client.get('/suppliers/static/javascripts/application.js')
assert_equal(200, res.status_code)
assert_true(
'analytics.trackPageview'
in res.get_data(as_text=True))
|
<commit_before>import mock
from nose.tools import assert_equal, assert_true
from ..helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_analytics_code_should_be_in_javascript(self):
res = self.client.get('/suppliers/static/javascripts/application.js')
assert_equal(200, res.status_code)
assert_true(
'GOVUK.analytics.trackPageview'
in res.get_data(as_text=True))
<commit_msg>Allow test for analytics to deal with compiled JS<commit_after>import mock
from nose.tools import assert_equal, assert_true
from ..helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_analytics_code_should_be_in_javascript(self):
res = self.client.get('/suppliers/static/javascripts/application.js')
assert_equal(200, res.status_code)
assert_true(
'analytics.trackPageview'
in res.get_data(as_text=True))
|
5da3928442a884c7b5905ca2b17362ca99fffc2c
|
marten/__init__.py
|
marten/__init__.py
|
"""Stupid simple Python configuration management"""
from __future__ import absolute_import
import os as _os
__version__ = '0.5.0'
# Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable
# MARTEN_ENV defaults to 'default'
config = None
_marten_dir = _os.path.join(_os.getcwd(), '.marten')
_os.environ.setdefault('MARTEN_ENV', 'default')
if _os.path.isdir(_marten_dir):
from .configurations import parse_directory as _parse_directory
config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
|
"""Stupid simple Python configuration management"""
from __future__ import absolute_import
import os as _os
__version__ = '0.5.1'
# Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable
# MARTEN_ENV defaults to 'default'
_marten_dir = _os.path.join(_os.getcwd(), '.marten')
_os.environ.setdefault('MARTEN_ENV', 'default')
if _os.path.isdir(_marten_dir):
from .configurations import parse_directory as _parse_directory
config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
else:
from .configurations import Configuration as _Configuration
config = _Configuration({})
|
Return empty Configuration instance when .marten/ directory is missing
|
Return empty Configuration instance when .marten/ directory is missing
|
Python
|
mit
|
nick-allen/marten
|
"""Stupid simple Python configuration management"""
from __future__ import absolute_import
import os as _os
__version__ = '0.5.0'
# Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable
# MARTEN_ENV defaults to 'default'
config = None
_marten_dir = _os.path.join(_os.getcwd(), '.marten')
_os.environ.setdefault('MARTEN_ENV', 'default')
if _os.path.isdir(_marten_dir):
from .configurations import parse_directory as _parse_directory
config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
Return empty Configuration instance when .marten/ directory is missing
|
"""Stupid simple Python configuration management"""
from __future__ import absolute_import
import os as _os
__version__ = '0.5.1'
# Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable
# MARTEN_ENV defaults to 'default'
_marten_dir = _os.path.join(_os.getcwd(), '.marten')
_os.environ.setdefault('MARTEN_ENV', 'default')
if _os.path.isdir(_marten_dir):
from .configurations import parse_directory as _parse_directory
config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
else:
from .configurations import Configuration as _Configuration
config = _Configuration({})
|
<commit_before>"""Stupid simple Python configuration management"""
from __future__ import absolute_import
import os as _os
__version__ = '0.5.0'
# Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable
# MARTEN_ENV defaults to 'default'
config = None
_marten_dir = _os.path.join(_os.getcwd(), '.marten')
_os.environ.setdefault('MARTEN_ENV', 'default')
if _os.path.isdir(_marten_dir):
from .configurations import parse_directory as _parse_directory
config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
<commit_msg>Return empty Configuration instance when .marten/ directory is missing<commit_after>
|
"""Stupid simple Python configuration management"""
from __future__ import absolute_import
import os as _os
__version__ = '0.5.1'
# Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable
# MARTEN_ENV defaults to 'default'
_marten_dir = _os.path.join(_os.getcwd(), '.marten')
_os.environ.setdefault('MARTEN_ENV', 'default')
if _os.path.isdir(_marten_dir):
from .configurations import parse_directory as _parse_directory
config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
else:
from .configurations import Configuration as _Configuration
config = _Configuration({})
|
"""Stupid simple Python configuration management"""
from __future__ import absolute_import
import os as _os
__version__ = '0.5.0'
# Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable
# MARTEN_ENV defaults to 'default'
config = None
_marten_dir = _os.path.join(_os.getcwd(), '.marten')
_os.environ.setdefault('MARTEN_ENV', 'default')
if _os.path.isdir(_marten_dir):
from .configurations import parse_directory as _parse_directory
config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
Return empty Configuration instance when .marten/ directory is missing"""Stupid simple Python configuration management"""
from __future__ import absolute_import
import os as _os
__version__ = '0.5.1'
# Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable
# MARTEN_ENV defaults to 'default'
_marten_dir = _os.path.join(_os.getcwd(), '.marten')
_os.environ.setdefault('MARTEN_ENV', 'default')
if _os.path.isdir(_marten_dir):
from .configurations import parse_directory as _parse_directory
config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
else:
from .configurations import Configuration as _Configuration
config = _Configuration({})
|
<commit_before>"""Stupid simple Python configuration management"""
from __future__ import absolute_import
import os as _os
__version__ = '0.5.0'
# Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable
# MARTEN_ENV defaults to 'default'
config = None
_marten_dir = _os.path.join(_os.getcwd(), '.marten')
_os.environ.setdefault('MARTEN_ENV', 'default')
if _os.path.isdir(_marten_dir):
from .configurations import parse_directory as _parse_directory
config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
<commit_msg>Return empty Configuration instance when .marten/ directory is missing<commit_after>"""Stupid simple Python configuration management"""
from __future__ import absolute_import
import os as _os
__version__ = '0.5.1'
# Attempt to auto-load a default configuration from files in <cwd>/.marten/ based on the MARTEN_ENV env variable
# MARTEN_ENV defaults to 'default'
_marten_dir = _os.path.join(_os.getcwd(), '.marten')
_os.environ.setdefault('MARTEN_ENV', 'default')
if _os.path.isdir(_marten_dir):
from .configurations import parse_directory as _parse_directory
config = _parse_directory(_marten_dir, _os.environ['MARTEN_ENV'])
else:
from .configurations import Configuration as _Configuration
config = _Configuration({})
|
c39d3801beece54d7403b1a1c7956839aa20df79
|
plot.py
|
plot.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import argparse
import matplotlib.pyplot as plt
import pandas as pd
parser = argparse.ArgumentParser(description='Plot data from output of the n-body simulation.')
parser.add_argument('--output', type=str, default='output_int.dat',
help='The output file (default %(default)s)')
args = parser.parse_args()
if __name__ == '__main__':
# Read data as CSV
data = pd.read_csv(args.output, delim_whitespace=True)
# Plot
plt.plot(data.t, data.Ep, label='$Ep$')
plt.plot(data.t, data.Ec, label='$Ec$')
plt.plot(data.t, data.E, label='$E_\mathrm{tot}$')
plt.grid('on')
plt.legend()
plt.show()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import argparse
import matplotlib.pyplot as plt
import pandas as pd
parser = argparse.ArgumentParser(description='Plot data from output of the n-body simulation.')
parser.add_argument('--output', type=str, default='output_int.dat',
help='The output file (default %(default)s)')
args = parser.parse_args()
if __name__ == '__main__':
# Read data as CSV
data = pd.read_csv(args.output, delim_whitespace=True)
# Plot
plt.plot(data.t, data.Ep.abs(), label='$Ep$')
plt.plot(data.t, data.Ec.abs(), label='$Ec$')
plt.plot(data.t, data.E.abs(), label='$E_\mathrm{tot}$')
plt.yscale('log')
plt.grid('on')
plt.legend()
plt.show()
|
Use absolute value for E and logscale
|
Use absolute value for E and logscale
|
Python
|
mit
|
cphyc/n-body,cphyc/n-body
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import argparse
import matplotlib.pyplot as plt
import pandas as pd
parser = argparse.ArgumentParser(description='Plot data from output of the n-body simulation.')
parser.add_argument('--output', type=str, default='output_int.dat',
help='The output file (default %(default)s)')
args = parser.parse_args()
if __name__ == '__main__':
# Read data as CSV
data = pd.read_csv(args.output, delim_whitespace=True)
# Plot
plt.plot(data.t, data.Ep, label='$Ep$')
plt.plot(data.t, data.Ec, label='$Ec$')
plt.plot(data.t, data.E, label='$E_\mathrm{tot}$')
plt.grid('on')
plt.legend()
plt.show()
Use absolute value for E and logscale
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import argparse
import matplotlib.pyplot as plt
import pandas as pd
parser = argparse.ArgumentParser(description='Plot data from output of the n-body simulation.')
parser.add_argument('--output', type=str, default='output_int.dat',
help='The output file (default %(default)s)')
args = parser.parse_args()
if __name__ == '__main__':
# Read data as CSV
data = pd.read_csv(args.output, delim_whitespace=True)
# Plot
plt.plot(data.t, data.Ep.abs(), label='$Ep$')
plt.plot(data.t, data.Ec.abs(), label='$Ec$')
plt.plot(data.t, data.E.abs(), label='$E_\mathrm{tot}$')
plt.yscale('log')
plt.grid('on')
plt.legend()
plt.show()
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
import argparse
import matplotlib.pyplot as plt
import pandas as pd
parser = argparse.ArgumentParser(description='Plot data from output of the n-body simulation.')
parser.add_argument('--output', type=str, default='output_int.dat',
help='The output file (default %(default)s)')
args = parser.parse_args()
if __name__ == '__main__':
# Read data as CSV
data = pd.read_csv(args.output, delim_whitespace=True)
# Plot
plt.plot(data.t, data.Ep, label='$Ep$')
plt.plot(data.t, data.Ec, label='$Ec$')
plt.plot(data.t, data.E, label='$E_\mathrm{tot}$')
plt.grid('on')
plt.legend()
plt.show()
<commit_msg>Use absolute value for E and logscale<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import argparse
import matplotlib.pyplot as plt
import pandas as pd
parser = argparse.ArgumentParser(description='Plot data from output of the n-body simulation.')
parser.add_argument('--output', type=str, default='output_int.dat',
help='The output file (default %(default)s)')
args = parser.parse_args()
if __name__ == '__main__':
# Read data as CSV
data = pd.read_csv(args.output, delim_whitespace=True)
# Plot
plt.plot(data.t, data.Ep.abs(), label='$Ep$')
plt.plot(data.t, data.Ec.abs(), label='$Ec$')
plt.plot(data.t, data.E.abs(), label='$E_\mathrm{tot}$')
plt.yscale('log')
plt.grid('on')
plt.legend()
plt.show()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import argparse
import matplotlib.pyplot as plt
import pandas as pd
parser = argparse.ArgumentParser(description='Plot data from output of the n-body simulation.')
parser.add_argument('--output', type=str, default='output_int.dat',
help='The output file (default %(default)s)')
args = parser.parse_args()
if __name__ == '__main__':
# Read data as CSV
data = pd.read_csv(args.output, delim_whitespace=True)
# Plot
plt.plot(data.t, data.Ep, label='$Ep$')
plt.plot(data.t, data.Ec, label='$Ec$')
plt.plot(data.t, data.E, label='$E_\mathrm{tot}$')
plt.grid('on')
plt.legend()
plt.show()
Use absolute value for E and logscale#!/usr/bin/python
# -*- coding: utf-8 -*-
import argparse
import matplotlib.pyplot as plt
import pandas as pd
parser = argparse.ArgumentParser(description='Plot data from output of the n-body simulation.')
parser.add_argument('--output', type=str, default='output_int.dat',
help='The output file (default %(default)s)')
args = parser.parse_args()
if __name__ == '__main__':
# Read data as CSV
data = pd.read_csv(args.output, delim_whitespace=True)
# Plot
plt.plot(data.t, data.Ep.abs(), label='$Ep$')
plt.plot(data.t, data.Ec.abs(), label='$Ec$')
plt.plot(data.t, data.E.abs(), label='$E_\mathrm{tot}$')
plt.yscale('log')
plt.grid('on')
plt.legend()
plt.show()
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
import argparse
import matplotlib.pyplot as plt
import pandas as pd
parser = argparse.ArgumentParser(description='Plot data from output of the n-body simulation.')
parser.add_argument('--output', type=str, default='output_int.dat',
help='The output file (default %(default)s)')
args = parser.parse_args()
if __name__ == '__main__':
# Read data as CSV
data = pd.read_csv(args.output, delim_whitespace=True)
# Plot
plt.plot(data.t, data.Ep, label='$Ep$')
plt.plot(data.t, data.Ec, label='$Ec$')
plt.plot(data.t, data.E, label='$E_\mathrm{tot}$')
plt.grid('on')
plt.legend()
plt.show()
<commit_msg>Use absolute value for E and logscale<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
import argparse
import matplotlib.pyplot as plt
import pandas as pd
parser = argparse.ArgumentParser(description='Plot data from output of the n-body simulation.')
parser.add_argument('--output', type=str, default='output_int.dat',
help='The output file (default %(default)s)')
args = parser.parse_args()
if __name__ == '__main__':
# Read data as CSV
data = pd.read_csv(args.output, delim_whitespace=True)
# Plot
plt.plot(data.t, data.Ep.abs(), label='$Ep$')
plt.plot(data.t, data.Ec.abs(), label='$Ec$')
plt.plot(data.t, data.E.abs(), label='$E_\mathrm{tot}$')
plt.yscale('log')
plt.grid('on')
plt.legend()
plt.show()
|
2d5c1064b951f3628e8a4f4a6fadc9c45d490094
|
tools/bots/pub_integration_test.py
|
tools/bots/pub_integration_test.py
|
#!/usr/bin/env python
# Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import subprocess
import sys
import shutil
import tempfile
PUBSPEC = """name: pub_integration_test
dependencies:
shelf:
test:
"""
def Main():
out_dir = 'xcodebuild' if sys.platform == 'darwin' else 'out'
pub = os.path.abspath('%s/ReleaseX64/dart-sdk/bin/pub' % out_dir)
working_dir = tempfile.mkdtemp()
try:
pub_cache_dir = working_dir + '/pub_cache'
env = { 'PUB_CACHE': pub_cache_dir }
with open(working_dir + '/pubspec.yaml', 'w') as pubspec_yaml:
pubspec_yaml.write(PUBSPEC)
exit_code = subprocess.call([pub, 'get'], cwd=working_dir, env=env)
if exit_code is not 0:
return exit_code
exit_code = subprocess.call([pub, 'upgrade'], cwd=working_dir, env=env)
if exit_code is not 0:
return exit_code
finally:
shutil.rmtree(working_dir);
if __name__ == '__main__':
sys.exit(Main())
|
#!/usr/bin/env python
# Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import subprocess
import sys
import shutil
import tempfile
PUBSPEC = """name: pub_integration_test
dependencies:
shelf:
test:
"""
def Main():
out_dir = 'xcodebuild' if sys.platform == 'darwin' else 'out'
extension = '' if not sys.platform == 'win32' else '.bat'
pub = os.path.abspath(
'%s/ReleaseX64/dart-sdk/bin/pub%s' % (out_dir, extension))
working_dir = tempfile.mkdtemp()
try:
pub_cache_dir = working_dir + '/pub_cache'
env = os.environ.copy()
env['PUB_CACHE'] = pub_cache_dir
with open(working_dir + '/pubspec.yaml', 'w') as pubspec_yaml:
pubspec_yaml.write(PUBSPEC)
exit_code = subprocess.call([pub, 'get'], cwd=working_dir, env=env)
if exit_code is not 0:
return exit_code
exit_code = subprocess.call([pub, 'upgrade'], cwd=working_dir, env=env)
if exit_code is not 0:
return exit_code
finally:
shutil.rmtree(working_dir);
if __name__ == '__main__':
sys.exit(Main())
|
Add .bat to pub command on Windows
|
[infra] Add .bat to pub command on Windows
#32656
Change-Id: I3a34bf2c81676eea0ab112a8aad701962590a6c3
Reviewed-on: https://dart-review.googlesource.com/55165
Commit-Queue: Alexander Thomas <29642742b6693024c89de8232f2e2542cf7eedf7@google.com>
Reviewed-by: William Hesse <a821cddceae7dc400f272e3cb1a72f400f9fed6d@google.com>
|
Python
|
bsd-3-clause
|
dart-lang/sdk,dartino/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk
|
#!/usr/bin/env python
# Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import subprocess
import sys
import shutil
import tempfile
PUBSPEC = """name: pub_integration_test
dependencies:
shelf:
test:
"""
def Main():
out_dir = 'xcodebuild' if sys.platform == 'darwin' else 'out'
pub = os.path.abspath('%s/ReleaseX64/dart-sdk/bin/pub' % out_dir)
working_dir = tempfile.mkdtemp()
try:
pub_cache_dir = working_dir + '/pub_cache'
env = { 'PUB_CACHE': pub_cache_dir }
with open(working_dir + '/pubspec.yaml', 'w') as pubspec_yaml:
pubspec_yaml.write(PUBSPEC)
exit_code = subprocess.call([pub, 'get'], cwd=working_dir, env=env)
if exit_code is not 0:
return exit_code
exit_code = subprocess.call([pub, 'upgrade'], cwd=working_dir, env=env)
if exit_code is not 0:
return exit_code
finally:
shutil.rmtree(working_dir);
if __name__ == '__main__':
sys.exit(Main())
[infra] Add .bat to pub command on Windows
#32656
Change-Id: I3a34bf2c81676eea0ab112a8aad701962590a6c3
Reviewed-on: https://dart-review.googlesource.com/55165
Commit-Queue: Alexander Thomas <29642742b6693024c89de8232f2e2542cf7eedf7@google.com>
Reviewed-by: William Hesse <a821cddceae7dc400f272e3cb1a72f400f9fed6d@google.com>
|
#!/usr/bin/env python
# Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import subprocess
import sys
import shutil
import tempfile
PUBSPEC = """name: pub_integration_test
dependencies:
shelf:
test:
"""
def Main():
out_dir = 'xcodebuild' if sys.platform == 'darwin' else 'out'
extension = '' if not sys.platform == 'win32' else '.bat'
pub = os.path.abspath(
'%s/ReleaseX64/dart-sdk/bin/pub%s' % (out_dir, extension))
working_dir = tempfile.mkdtemp()
try:
pub_cache_dir = working_dir + '/pub_cache'
env = os.environ.copy()
env['PUB_CACHE'] = pub_cache_dir
with open(working_dir + '/pubspec.yaml', 'w') as pubspec_yaml:
pubspec_yaml.write(PUBSPEC)
exit_code = subprocess.call([pub, 'get'], cwd=working_dir, env=env)
if exit_code is not 0:
return exit_code
exit_code = subprocess.call([pub, 'upgrade'], cwd=working_dir, env=env)
if exit_code is not 0:
return exit_code
finally:
shutil.rmtree(working_dir);
if __name__ == '__main__':
sys.exit(Main())
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import subprocess
import sys
import shutil
import tempfile
PUBSPEC = """name: pub_integration_test
dependencies:
shelf:
test:
"""
def Main():
out_dir = 'xcodebuild' if sys.platform == 'darwin' else 'out'
pub = os.path.abspath('%s/ReleaseX64/dart-sdk/bin/pub' % out_dir)
working_dir = tempfile.mkdtemp()
try:
pub_cache_dir = working_dir + '/pub_cache'
env = { 'PUB_CACHE': pub_cache_dir }
with open(working_dir + '/pubspec.yaml', 'w') as pubspec_yaml:
pubspec_yaml.write(PUBSPEC)
exit_code = subprocess.call([pub, 'get'], cwd=working_dir, env=env)
if exit_code is not 0:
return exit_code
exit_code = subprocess.call([pub, 'upgrade'], cwd=working_dir, env=env)
if exit_code is not 0:
return exit_code
finally:
shutil.rmtree(working_dir);
if __name__ == '__main__':
sys.exit(Main())
<commit_msg>[infra] Add .bat to pub command on Windows
#32656
Change-Id: I3a34bf2c81676eea0ab112a8aad701962590a6c3
Reviewed-on: https://dart-review.googlesource.com/55165
Commit-Queue: Alexander Thomas <29642742b6693024c89de8232f2e2542cf7eedf7@google.com>
Reviewed-by: William Hesse <a821cddceae7dc400f272e3cb1a72f400f9fed6d@google.com><commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import subprocess
import sys
import shutil
import tempfile
PUBSPEC = """name: pub_integration_test
dependencies:
shelf:
test:
"""
def Main():
out_dir = 'xcodebuild' if sys.platform == 'darwin' else 'out'
extension = '' if not sys.platform == 'win32' else '.bat'
pub = os.path.abspath(
'%s/ReleaseX64/dart-sdk/bin/pub%s' % (out_dir, extension))
working_dir = tempfile.mkdtemp()
try:
pub_cache_dir = working_dir + '/pub_cache'
env = os.environ.copy()
env['PUB_CACHE'] = pub_cache_dir
with open(working_dir + '/pubspec.yaml', 'w') as pubspec_yaml:
pubspec_yaml.write(PUBSPEC)
exit_code = subprocess.call([pub, 'get'], cwd=working_dir, env=env)
if exit_code is not 0:
return exit_code
exit_code = subprocess.call([pub, 'upgrade'], cwd=working_dir, env=env)
if exit_code is not 0:
return exit_code
finally:
shutil.rmtree(working_dir);
if __name__ == '__main__':
sys.exit(Main())
|
#!/usr/bin/env python
# Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import subprocess
import sys
import shutil
import tempfile
PUBSPEC = """name: pub_integration_test
dependencies:
shelf:
test:
"""
def Main():
out_dir = 'xcodebuild' if sys.platform == 'darwin' else 'out'
pub = os.path.abspath('%s/ReleaseX64/dart-sdk/bin/pub' % out_dir)
working_dir = tempfile.mkdtemp()
try:
pub_cache_dir = working_dir + '/pub_cache'
env = { 'PUB_CACHE': pub_cache_dir }
with open(working_dir + '/pubspec.yaml', 'w') as pubspec_yaml:
pubspec_yaml.write(PUBSPEC)
exit_code = subprocess.call([pub, 'get'], cwd=working_dir, env=env)
if exit_code is not 0:
return exit_code
exit_code = subprocess.call([pub, 'upgrade'], cwd=working_dir, env=env)
if exit_code is not 0:
return exit_code
finally:
shutil.rmtree(working_dir);
if __name__ == '__main__':
sys.exit(Main())
[infra] Add .bat to pub command on Windows
#32656
Change-Id: I3a34bf2c81676eea0ab112a8aad701962590a6c3
Reviewed-on: https://dart-review.googlesource.com/55165
Commit-Queue: Alexander Thomas <29642742b6693024c89de8232f2e2542cf7eedf7@google.com>
Reviewed-by: William Hesse <a821cddceae7dc400f272e3cb1a72f400f9fed6d@google.com>#!/usr/bin/env python
# Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import subprocess
import sys
import shutil
import tempfile
PUBSPEC = """name: pub_integration_test
dependencies:
shelf:
test:
"""
def Main():
out_dir = 'xcodebuild' if sys.platform == 'darwin' else 'out'
extension = '' if not sys.platform == 'win32' else '.bat'
pub = os.path.abspath(
'%s/ReleaseX64/dart-sdk/bin/pub%s' % (out_dir, extension))
working_dir = tempfile.mkdtemp()
try:
pub_cache_dir = working_dir + '/pub_cache'
env = os.environ.copy()
env['PUB_CACHE'] = pub_cache_dir
with open(working_dir + '/pubspec.yaml', 'w') as pubspec_yaml:
pubspec_yaml.write(PUBSPEC)
exit_code = subprocess.call([pub, 'get'], cwd=working_dir, env=env)
if exit_code is not 0:
return exit_code
exit_code = subprocess.call([pub, 'upgrade'], cwd=working_dir, env=env)
if exit_code is not 0:
return exit_code
finally:
shutil.rmtree(working_dir);
if __name__ == '__main__':
sys.exit(Main())
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import subprocess
import sys
import shutil
import tempfile
PUBSPEC = """name: pub_integration_test
dependencies:
shelf:
test:
"""
def Main():
out_dir = 'xcodebuild' if sys.platform == 'darwin' else 'out'
pub = os.path.abspath('%s/ReleaseX64/dart-sdk/bin/pub' % out_dir)
working_dir = tempfile.mkdtemp()
try:
pub_cache_dir = working_dir + '/pub_cache'
env = { 'PUB_CACHE': pub_cache_dir }
with open(working_dir + '/pubspec.yaml', 'w') as pubspec_yaml:
pubspec_yaml.write(PUBSPEC)
exit_code = subprocess.call([pub, 'get'], cwd=working_dir, env=env)
if exit_code is not 0:
return exit_code
exit_code = subprocess.call([pub, 'upgrade'], cwd=working_dir, env=env)
if exit_code is not 0:
return exit_code
finally:
shutil.rmtree(working_dir);
if __name__ == '__main__':
sys.exit(Main())
<commit_msg>[infra] Add .bat to pub command on Windows
#32656
Change-Id: I3a34bf2c81676eea0ab112a8aad701962590a6c3
Reviewed-on: https://dart-review.googlesource.com/55165
Commit-Queue: Alexander Thomas <29642742b6693024c89de8232f2e2542cf7eedf7@google.com>
Reviewed-by: William Hesse <a821cddceae7dc400f272e3cb1a72f400f9fed6d@google.com><commit_after>#!/usr/bin/env python
# Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import os
import subprocess
import sys
import shutil
import tempfile
PUBSPEC = """name: pub_integration_test
dependencies:
shelf:
test:
"""
def Main():
out_dir = 'xcodebuild' if sys.platform == 'darwin' else 'out'
extension = '' if not sys.platform == 'win32' else '.bat'
pub = os.path.abspath(
'%s/ReleaseX64/dart-sdk/bin/pub%s' % (out_dir, extension))
working_dir = tempfile.mkdtemp()
try:
pub_cache_dir = working_dir + '/pub_cache'
env = os.environ.copy()
env['PUB_CACHE'] = pub_cache_dir
with open(working_dir + '/pubspec.yaml', 'w') as pubspec_yaml:
pubspec_yaml.write(PUBSPEC)
exit_code = subprocess.call([pub, 'get'], cwd=working_dir, env=env)
if exit_code is not 0:
return exit_code
exit_code = subprocess.call([pub, 'upgrade'], cwd=working_dir, env=env)
if exit_code is not 0:
return exit_code
finally:
shutil.rmtree(working_dir);
if __name__ == '__main__':
sys.exit(Main())
|
5a7c6c38ee28a584b117d3509a11cd8c868712f3
|
importscan/compat.py
|
importscan/compat.py
|
import sys
PY3 = sys.version_info[0] == 3
if PY3:
def is_nonstr_iter(v):
if isinstance(v, str): # pragma: no cover
return False
return hasattr(v, '__iter__')
else: # pragma: no cover
def is_nonstr_iter(v):
return hasattr(v, '__iter__')
if PY3:
INT_TYPES = (int,)
else: # pragma: no cover
INT_TYPES = (int, long)
|
import sys
PY3 = sys.version_info[0] == 3
if PY3:
def is_nonstr_iter(v):
if isinstance(v, str): # pragma: no cover
return False
return hasattr(v, '__iter__')
else: # pragma: no cover
def is_nonstr_iter(v):
return hasattr(v, '__iter__')
if PY3:
INT_TYPES = (int,)
else: # pragma: no cover
INT_TYPES = (int, long) # noqa
|
Exclude Python 2 specific code from pep8
|
Exclude Python 2 specific code from pep8
|
Python
|
bsd-3-clause
|
faassen/importscan
|
import sys
PY3 = sys.version_info[0] == 3
if PY3:
def is_nonstr_iter(v):
if isinstance(v, str): # pragma: no cover
return False
return hasattr(v, '__iter__')
else: # pragma: no cover
def is_nonstr_iter(v):
return hasattr(v, '__iter__')
if PY3:
INT_TYPES = (int,)
else: # pragma: no cover
INT_TYPES = (int, long)
Exclude Python 2 specific code from pep8
|
import sys
PY3 = sys.version_info[0] == 3
if PY3:
def is_nonstr_iter(v):
if isinstance(v, str): # pragma: no cover
return False
return hasattr(v, '__iter__')
else: # pragma: no cover
def is_nonstr_iter(v):
return hasattr(v, '__iter__')
if PY3:
INT_TYPES = (int,)
else: # pragma: no cover
INT_TYPES = (int, long) # noqa
|
<commit_before>import sys
PY3 = sys.version_info[0] == 3
if PY3:
def is_nonstr_iter(v):
if isinstance(v, str): # pragma: no cover
return False
return hasattr(v, '__iter__')
else: # pragma: no cover
def is_nonstr_iter(v):
return hasattr(v, '__iter__')
if PY3:
INT_TYPES = (int,)
else: # pragma: no cover
INT_TYPES = (int, long)
<commit_msg>Exclude Python 2 specific code from pep8<commit_after>
|
import sys
PY3 = sys.version_info[0] == 3
if PY3:
def is_nonstr_iter(v):
if isinstance(v, str): # pragma: no cover
return False
return hasattr(v, '__iter__')
else: # pragma: no cover
def is_nonstr_iter(v):
return hasattr(v, '__iter__')
if PY3:
INT_TYPES = (int,)
else: # pragma: no cover
INT_TYPES = (int, long) # noqa
|
import sys
PY3 = sys.version_info[0] == 3
if PY3:
def is_nonstr_iter(v):
if isinstance(v, str): # pragma: no cover
return False
return hasattr(v, '__iter__')
else: # pragma: no cover
def is_nonstr_iter(v):
return hasattr(v, '__iter__')
if PY3:
INT_TYPES = (int,)
else: # pragma: no cover
INT_TYPES = (int, long)
Exclude Python 2 specific code from pep8import sys
PY3 = sys.version_info[0] == 3
if PY3:
def is_nonstr_iter(v):
if isinstance(v, str): # pragma: no cover
return False
return hasattr(v, '__iter__')
else: # pragma: no cover
def is_nonstr_iter(v):
return hasattr(v, '__iter__')
if PY3:
INT_TYPES = (int,)
else: # pragma: no cover
INT_TYPES = (int, long) # noqa
|
<commit_before>import sys
PY3 = sys.version_info[0] == 3
if PY3:
def is_nonstr_iter(v):
if isinstance(v, str): # pragma: no cover
return False
return hasattr(v, '__iter__')
else: # pragma: no cover
def is_nonstr_iter(v):
return hasattr(v, '__iter__')
if PY3:
INT_TYPES = (int,)
else: # pragma: no cover
INT_TYPES = (int, long)
<commit_msg>Exclude Python 2 specific code from pep8<commit_after>import sys
PY3 = sys.version_info[0] == 3
if PY3:
def is_nonstr_iter(v):
if isinstance(v, str): # pragma: no cover
return False
return hasattr(v, '__iter__')
else: # pragma: no cover
def is_nonstr_iter(v):
return hasattr(v, '__iter__')
if PY3:
INT_TYPES = (int,)
else: # pragma: no cover
INT_TYPES = (int, long) # noqa
|
7325eacc1066970a98be30b56fdf4cd31ecc2f57
|
db_file_storage/views.py
|
db_file_storage/views.py
|
# django
from wsgiref.util import FileWrapper
from django.http import HttpResponse, HttpResponseBadRequest
from django.utils.translation import ugettext as _
# project
from db_file_storage.storage import DatabaseFileStorage
storage = DatabaseFileStorage()
def get_file(request, add_attachment_headers):
name = request.GET.get('name')
try:
_file = storage.open(name)
except Exception:
return HttpResponseBadRequest(_('Invalid request'))
response = HttpResponse(
FileWrapper(_file),
content_type=_file.mimetype
)
if add_attachment_headers:
response['Content-Disposition'] = \
'attachment; filename=%(name)s' % {'name': _file.filename}
return response
|
# django
from wsgiref.util import FileWrapper
from django.http import HttpResponse, HttpResponseBadRequest
from django.utils.translation import ugettext as _
# project
from db_file_storage.storage import DatabaseFileStorage
storage = DatabaseFileStorage()
def get_file(request, add_attachment_headers):
name = request.GET.get('name')
try:
_file = storage.open(name)
except Exception:
return HttpResponseBadRequest(_('Invalid request'))
response = HttpResponse(
FileWrapper(_file),
content_type=_file.mimetype
)
response['Content-Length'] = _file.tell()
if add_attachment_headers:
response['Content-Disposition'] = \
'attachment; filename=%(name)s' % {'name': _file.filename}
return response
|
Set Content-Length header in get_file view
|
Set Content-Length header in get_file view
|
Python
|
mit
|
victor-o-silva/db_file_storage,victor-o-silva/db_file_storage
|
# django
from wsgiref.util import FileWrapper
from django.http import HttpResponse, HttpResponseBadRequest
from django.utils.translation import ugettext as _
# project
from db_file_storage.storage import DatabaseFileStorage
storage = DatabaseFileStorage()
def get_file(request, add_attachment_headers):
name = request.GET.get('name')
try:
_file = storage.open(name)
except Exception:
return HttpResponseBadRequest(_('Invalid request'))
response = HttpResponse(
FileWrapper(_file),
content_type=_file.mimetype
)
if add_attachment_headers:
response['Content-Disposition'] = \
'attachment; filename=%(name)s' % {'name': _file.filename}
return response
Set Content-Length header in get_file view
|
# django
from wsgiref.util import FileWrapper
from django.http import HttpResponse, HttpResponseBadRequest
from django.utils.translation import ugettext as _
# project
from db_file_storage.storage import DatabaseFileStorage
storage = DatabaseFileStorage()
def get_file(request, add_attachment_headers):
name = request.GET.get('name')
try:
_file = storage.open(name)
except Exception:
return HttpResponseBadRequest(_('Invalid request'))
response = HttpResponse(
FileWrapper(_file),
content_type=_file.mimetype
)
response['Content-Length'] = _file.tell()
if add_attachment_headers:
response['Content-Disposition'] = \
'attachment; filename=%(name)s' % {'name': _file.filename}
return response
|
<commit_before># django
from wsgiref.util import FileWrapper
from django.http import HttpResponse, HttpResponseBadRequest
from django.utils.translation import ugettext as _
# project
from db_file_storage.storage import DatabaseFileStorage
storage = DatabaseFileStorage()
def get_file(request, add_attachment_headers):
name = request.GET.get('name')
try:
_file = storage.open(name)
except Exception:
return HttpResponseBadRequest(_('Invalid request'))
response = HttpResponse(
FileWrapper(_file),
content_type=_file.mimetype
)
if add_attachment_headers:
response['Content-Disposition'] = \
'attachment; filename=%(name)s' % {'name': _file.filename}
return response
<commit_msg>Set Content-Length header in get_file view<commit_after>
|
# django
from wsgiref.util import FileWrapper
from django.http import HttpResponse, HttpResponseBadRequest
from django.utils.translation import ugettext as _
# project
from db_file_storage.storage import DatabaseFileStorage
storage = DatabaseFileStorage()
def get_file(request, add_attachment_headers):
name = request.GET.get('name')
try:
_file = storage.open(name)
except Exception:
return HttpResponseBadRequest(_('Invalid request'))
response = HttpResponse(
FileWrapper(_file),
content_type=_file.mimetype
)
response['Content-Length'] = _file.tell()
if add_attachment_headers:
response['Content-Disposition'] = \
'attachment; filename=%(name)s' % {'name': _file.filename}
return response
|
# django
from wsgiref.util import FileWrapper
from django.http import HttpResponse, HttpResponseBadRequest
from django.utils.translation import ugettext as _
# project
from db_file_storage.storage import DatabaseFileStorage
storage = DatabaseFileStorage()
def get_file(request, add_attachment_headers):
name = request.GET.get('name')
try:
_file = storage.open(name)
except Exception:
return HttpResponseBadRequest(_('Invalid request'))
response = HttpResponse(
FileWrapper(_file),
content_type=_file.mimetype
)
if add_attachment_headers:
response['Content-Disposition'] = \
'attachment; filename=%(name)s' % {'name': _file.filename}
return response
Set Content-Length header in get_file view# django
from wsgiref.util import FileWrapper
from django.http import HttpResponse, HttpResponseBadRequest
from django.utils.translation import ugettext as _
# project
from db_file_storage.storage import DatabaseFileStorage
storage = DatabaseFileStorage()
def get_file(request, add_attachment_headers):
name = request.GET.get('name')
try:
_file = storage.open(name)
except Exception:
return HttpResponseBadRequest(_('Invalid request'))
response = HttpResponse(
FileWrapper(_file),
content_type=_file.mimetype
)
response['Content-Length'] = _file.tell()
if add_attachment_headers:
response['Content-Disposition'] = \
'attachment; filename=%(name)s' % {'name': _file.filename}
return response
|
<commit_before># django
from wsgiref.util import FileWrapper
from django.http import HttpResponse, HttpResponseBadRequest
from django.utils.translation import ugettext as _
# project
from db_file_storage.storage import DatabaseFileStorage
storage = DatabaseFileStorage()
def get_file(request, add_attachment_headers):
name = request.GET.get('name')
try:
_file = storage.open(name)
except Exception:
return HttpResponseBadRequest(_('Invalid request'))
response = HttpResponse(
FileWrapper(_file),
content_type=_file.mimetype
)
if add_attachment_headers:
response['Content-Disposition'] = \
'attachment; filename=%(name)s' % {'name': _file.filename}
return response
<commit_msg>Set Content-Length header in get_file view<commit_after># django
from wsgiref.util import FileWrapper
from django.http import HttpResponse, HttpResponseBadRequest
from django.utils.translation import ugettext as _
# project
from db_file_storage.storage import DatabaseFileStorage
storage = DatabaseFileStorage()
def get_file(request, add_attachment_headers):
name = request.GET.get('name')
try:
_file = storage.open(name)
except Exception:
return HttpResponseBadRequest(_('Invalid request'))
response = HttpResponse(
FileWrapper(_file),
content_type=_file.mimetype
)
response['Content-Length'] = _file.tell()
if add_attachment_headers:
response['Content-Disposition'] = \
'attachment; filename=%(name)s' % {'name': _file.filename}
return response
|
9413b4b24c318df4bf68069038081d08fa9ad2e8
|
vumi/transports/infobip/__init__.py
|
vumi/transports/infobip/__init__.py
|
"""Infobip transport."""
from vumi.transports.infobip.infobip import InfobipTransport
__all__ = ['InfobipTransport']
|
"""Infobip transport."""
from vumi.transports.infobip.infobip import InfobipTransport, InfobipError
__all__ = ['InfobipTransport', 'InfobipError']
|
Add InfobipError to things exported by Infobip package.
|
Add InfobipError to things exported by Infobip package.
|
Python
|
bsd-3-clause
|
TouK/vumi,harrissoerja/vumi,vishwaprakashmishra/xmatrix,TouK/vumi,TouK/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,harrissoerja/vumi,vishwaprakashmishra/xmatrix
|
"""Infobip transport."""
from vumi.transports.infobip.infobip import InfobipTransport
__all__ = ['InfobipTransport']
Add InfobipError to things exported by Infobip package.
|
"""Infobip transport."""
from vumi.transports.infobip.infobip import InfobipTransport, InfobipError
__all__ = ['InfobipTransport', 'InfobipError']
|
<commit_before>"""Infobip transport."""
from vumi.transports.infobip.infobip import InfobipTransport
__all__ = ['InfobipTransport']
<commit_msg>Add InfobipError to things exported by Infobip package.<commit_after>
|
"""Infobip transport."""
from vumi.transports.infobip.infobip import InfobipTransport, InfobipError
__all__ = ['InfobipTransport', 'InfobipError']
|
"""Infobip transport."""
from vumi.transports.infobip.infobip import InfobipTransport
__all__ = ['InfobipTransport']
Add InfobipError to things exported by Infobip package."""Infobip transport."""
from vumi.transports.infobip.infobip import InfobipTransport, InfobipError
__all__ = ['InfobipTransport', 'InfobipError']
|
<commit_before>"""Infobip transport."""
from vumi.transports.infobip.infobip import InfobipTransport
__all__ = ['InfobipTransport']
<commit_msg>Add InfobipError to things exported by Infobip package.<commit_after>"""Infobip transport."""
from vumi.transports.infobip.infobip import InfobipTransport, InfobipError
__all__ = ['InfobipTransport', 'InfobipError']
|
a1e331935c91a5ce542020bdfe7b05c29599dac7
|
aiohttp/__init__.py
|
aiohttp/__init__.py
|
# This relies on each of the submodules having an __all__ variable.
__version__ = '0.12.1dev'
from .protocol import * # noqa
from .connector import * # noqa
from .client import * # noqa
from .errors import * # noqa
from .helpers import * # noqa
from .parsers import * # noqa
from .streams import * # noqa
__all__ = (client.__all__ +
errors.__all__ +
helpers.__all__ +
parsers.__all__ +
protocol.__all__ +
connector.__all__ +
streams.__all__ +
['__version__'])
|
# This relies on each of the submodules having an __all__ variable.
__version__ = '0.12.1a'
from .protocol import * # noqa
from .connector import * # noqa
from .client import * # noqa
from .errors import * # noqa
from .helpers import * # noqa
from .parsers import * # noqa
from .streams import * # noqa
__all__ = (client.__all__ +
errors.__all__ +
helpers.__all__ +
parsers.__all__ +
protocol.__all__ +
connector.__all__ +
streams.__all__ +
['__version__'])
|
Change version schema from 0.12.1dev to 0.12.1a for sake of following pep 440
|
Change version schema from 0.12.1dev to 0.12.1a for sake of following pep 440
|
Python
|
apache-2.0
|
rutsky/aiohttp,hellysmile/aiohttp,jettify/aiohttp,Srogozins/aiohttp,vedun/aiohttp,moden-py/aiohttp,z2v/aiohttp,alexsdutton/aiohttp,rutsky/aiohttp,rutsky/aiohttp,juliatem/aiohttp,mind1master/aiohttp,noplay/aiohttp,KeepSafe/aiohttp,sterwill/aiohttp,AlexLisovoy/aiohttp,alex-eri/aiohttp-1,AraHaanOrg/aiohttp,arthurdarcet/aiohttp,alunduil/aiohttp,iksteen/aiohttp,alexsdutton/aiohttp,andyaguiar/aiohttp,jashandeep-sohi/aiohttp,saghul/aiohttp,vasylbo/aiohttp,jettify/aiohttp,singulared/aiohttp,flying-sheep/aiohttp,jojurajan/aiohttp,KeepSafe/aiohttp,vaskalas/aiohttp,mind1master/aiohttp,juliatem/aiohttp,decentfox/aiohttp,mind1master/aiohttp,elastic-coders/aiohttp,vaskalas/aiohttp,elastic-coders/aiohttp,KeepSafe/aiohttp,jashandeep-sohi/aiohttp,Eyepea/aiohttp,vaskalas/aiohttp,decentfox/aiohttp,esaezgil/aiohttp,iksteen/aiohttp,pfreixes/aiohttp,pfreixes/aiohttp,singulared/aiohttp,playpauseandstop/aiohttp,z2v/aiohttp,AlexLisovoy/aiohttp,esaezgil/aiohttp,panda73111/aiohttp,hellysmile/aiohttp,elastic-coders/aiohttp,panda73111/aiohttp,Insoleet/aiohttp,z2v/aiohttp,AraHaanOrg/aiohttp,noplay/aiohttp,moden-py/aiohttp,pathcl/aiohttp,singulared/aiohttp,jettify/aiohttp,alex-eri/aiohttp-1,danielnelson/aiohttp,esaezgil/aiohttp,panda73111/aiohttp,alex-eri/aiohttp-1,jojurajan/aiohttp,jashandeep-sohi/aiohttp,noodle-learns-programming/aiohttp,morgan-del/aiohttp,decentfox/aiohttp,avanov/aiohttp,saghul/aiohttp,arthurdarcet/aiohttp,moden-py/aiohttp,arthurdarcet/aiohttp
|
# This relies on each of the submodules having an __all__ variable.
__version__ = '0.12.1dev'
from .protocol import * # noqa
from .connector import * # noqa
from .client import * # noqa
from .errors import * # noqa
from .helpers import * # noqa
from .parsers import * # noqa
from .streams import * # noqa
__all__ = (client.__all__ +
errors.__all__ +
helpers.__all__ +
parsers.__all__ +
protocol.__all__ +
connector.__all__ +
streams.__all__ +
['__version__'])
Change version schema from 0.12.1dev to 0.12.1a for sake of following pep 440
|
# This relies on each of the submodules having an __all__ variable.
__version__ = '0.12.1a'
from .protocol import * # noqa
from .connector import * # noqa
from .client import * # noqa
from .errors import * # noqa
from .helpers import * # noqa
from .parsers import * # noqa
from .streams import * # noqa
__all__ = (client.__all__ +
errors.__all__ +
helpers.__all__ +
parsers.__all__ +
protocol.__all__ +
connector.__all__ +
streams.__all__ +
['__version__'])
|
<commit_before># This relies on each of the submodules having an __all__ variable.
__version__ = '0.12.1dev'
from .protocol import * # noqa
from .connector import * # noqa
from .client import * # noqa
from .errors import * # noqa
from .helpers import * # noqa
from .parsers import * # noqa
from .streams import * # noqa
__all__ = (client.__all__ +
errors.__all__ +
helpers.__all__ +
parsers.__all__ +
protocol.__all__ +
connector.__all__ +
streams.__all__ +
['__version__'])
<commit_msg>Change version schema from 0.12.1dev to 0.12.1a for sake of following pep 440<commit_after>
|
# This relies on each of the submodules having an __all__ variable.
__version__ = '0.12.1a'
from .protocol import * # noqa
from .connector import * # noqa
from .client import * # noqa
from .errors import * # noqa
from .helpers import * # noqa
from .parsers import * # noqa
from .streams import * # noqa
__all__ = (client.__all__ +
errors.__all__ +
helpers.__all__ +
parsers.__all__ +
protocol.__all__ +
connector.__all__ +
streams.__all__ +
['__version__'])
|
# This relies on each of the submodules having an __all__ variable.
__version__ = '0.12.1dev'
from .protocol import * # noqa
from .connector import * # noqa
from .client import * # noqa
from .errors import * # noqa
from .helpers import * # noqa
from .parsers import * # noqa
from .streams import * # noqa
__all__ = (client.__all__ +
errors.__all__ +
helpers.__all__ +
parsers.__all__ +
protocol.__all__ +
connector.__all__ +
streams.__all__ +
['__version__'])
Change version schema from 0.12.1dev to 0.12.1a for sake of following pep 440# This relies on each of the submodules having an __all__ variable.
__version__ = '0.12.1a'
from .protocol import * # noqa
from .connector import * # noqa
from .client import * # noqa
from .errors import * # noqa
from .helpers import * # noqa
from .parsers import * # noqa
from .streams import * # noqa
__all__ = (client.__all__ +
errors.__all__ +
helpers.__all__ +
parsers.__all__ +
protocol.__all__ +
connector.__all__ +
streams.__all__ +
['__version__'])
|
<commit_before># This relies on each of the submodules having an __all__ variable.
__version__ = '0.12.1dev'
from .protocol import * # noqa
from .connector import * # noqa
from .client import * # noqa
from .errors import * # noqa
from .helpers import * # noqa
from .parsers import * # noqa
from .streams import * # noqa
__all__ = (client.__all__ +
errors.__all__ +
helpers.__all__ +
parsers.__all__ +
protocol.__all__ +
connector.__all__ +
streams.__all__ +
['__version__'])
<commit_msg>Change version schema from 0.12.1dev to 0.12.1a for sake of following pep 440<commit_after># This relies on each of the submodules having an __all__ variable.
__version__ = '0.12.1a'
from .protocol import * # noqa
from .connector import * # noqa
from .client import * # noqa
from .errors import * # noqa
from .helpers import * # noqa
from .parsers import * # noqa
from .streams import * # noqa
__all__ = (client.__all__ +
errors.__all__ +
helpers.__all__ +
parsers.__all__ +
protocol.__all__ +
connector.__all__ +
streams.__all__ +
['__version__'])
|
c13d3584a8cb95b619af7fa876cb87d5b987a5dd
|
akari/log_config.py
|
akari/log_config.py
|
import logging.config
_logging_config = dict(
version=1,
disable_existing_loggers=False,
formatters={
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(message)s'
},
},
handlers={
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
'null': {
'class': 'logging.NullHandler',
}
},
loggers={
'': {
'handlers': ['console'],
'level': logging.DEBUG,
},
'influxdb': {
'level': logging.INFO,
},
'phue': {
'level': logging.INFO,
},
},
)
logging.config.dictConfig(_logging_config)
|
import logging.config
_logging_config = dict(
version=1,
disable_existing_loggers=False,
formatters={
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(message)s'
},
},
handlers={
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
'null': {
'class': 'logging.NullHandler',
}
},
loggers={
'': {
'handlers': ['console'],
'level': logging.INFO,
},
'influxdb': {
'level': logging.INFO,
},
'phue': {
'level': logging.INFO,
},
},
)
logging.config.dictConfig(_logging_config)
|
Set root log level to INFO
|
Set root log level to INFO
|
Python
|
mit
|
kennydo/akari
|
import logging.config
_logging_config = dict(
version=1,
disable_existing_loggers=False,
formatters={
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(message)s'
},
},
handlers={
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
'null': {
'class': 'logging.NullHandler',
}
},
loggers={
'': {
'handlers': ['console'],
'level': logging.DEBUG,
},
'influxdb': {
'level': logging.INFO,
},
'phue': {
'level': logging.INFO,
},
},
)
logging.config.dictConfig(_logging_config)
Set root log level to INFO
|
import logging.config
_logging_config = dict(
version=1,
disable_existing_loggers=False,
formatters={
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(message)s'
},
},
handlers={
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
'null': {
'class': 'logging.NullHandler',
}
},
loggers={
'': {
'handlers': ['console'],
'level': logging.INFO,
},
'influxdb': {
'level': logging.INFO,
},
'phue': {
'level': logging.INFO,
},
},
)
logging.config.dictConfig(_logging_config)
|
<commit_before>import logging.config
_logging_config = dict(
version=1,
disable_existing_loggers=False,
formatters={
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(message)s'
},
},
handlers={
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
'null': {
'class': 'logging.NullHandler',
}
},
loggers={
'': {
'handlers': ['console'],
'level': logging.DEBUG,
},
'influxdb': {
'level': logging.INFO,
},
'phue': {
'level': logging.INFO,
},
},
)
logging.config.dictConfig(_logging_config)
<commit_msg>Set root log level to INFO<commit_after>
|
import logging.config
_logging_config = dict(
version=1,
disable_existing_loggers=False,
formatters={
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(message)s'
},
},
handlers={
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
'null': {
'class': 'logging.NullHandler',
}
},
loggers={
'': {
'handlers': ['console'],
'level': logging.INFO,
},
'influxdb': {
'level': logging.INFO,
},
'phue': {
'level': logging.INFO,
},
},
)
logging.config.dictConfig(_logging_config)
|
import logging.config
_logging_config = dict(
version=1,
disable_existing_loggers=False,
formatters={
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(message)s'
},
},
handlers={
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
'null': {
'class': 'logging.NullHandler',
}
},
loggers={
'': {
'handlers': ['console'],
'level': logging.DEBUG,
},
'influxdb': {
'level': logging.INFO,
},
'phue': {
'level': logging.INFO,
},
},
)
logging.config.dictConfig(_logging_config)
Set root log level to INFOimport logging.config
_logging_config = dict(
version=1,
disable_existing_loggers=False,
formatters={
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(message)s'
},
},
handlers={
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
'null': {
'class': 'logging.NullHandler',
}
},
loggers={
'': {
'handlers': ['console'],
'level': logging.INFO,
},
'influxdb': {
'level': logging.INFO,
},
'phue': {
'level': logging.INFO,
},
},
)
logging.config.dictConfig(_logging_config)
|
<commit_before>import logging.config
_logging_config = dict(
version=1,
disable_existing_loggers=False,
formatters={
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(message)s'
},
},
handlers={
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
'null': {
'class': 'logging.NullHandler',
}
},
loggers={
'': {
'handlers': ['console'],
'level': logging.DEBUG,
},
'influxdb': {
'level': logging.INFO,
},
'phue': {
'level': logging.INFO,
},
},
)
logging.config.dictConfig(_logging_config)
<commit_msg>Set root log level to INFO<commit_after>import logging.config
_logging_config = dict(
version=1,
disable_existing_loggers=False,
formatters={
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(message)s'
},
},
handlers={
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
'null': {
'class': 'logging.NullHandler',
}
},
loggers={
'': {
'handlers': ['console'],
'level': logging.INFO,
},
'influxdb': {
'level': logging.INFO,
},
'phue': {
'level': logging.INFO,
},
},
)
logging.config.dictConfig(_logging_config)
|
4ead2d0b2bc987bcc75a5f94c31553a8024aa8a8
|
src/vault.py
|
src/vault.py
|
from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
print("Warning: you should be authenticated with a root token to effectively create a new token here")
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
|
from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def policies_update():
_warning_root_token()
cmd = "VAULT_ADDR=%s vault policy write %s .vault/%s.hcl" % (vault_addr(), vault_policy(), vault_policy())
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
_warning_root_token()
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
def _warning_root_token():
print("Warning: you should probably be authenticated with a root token for this operation")
|
Add task to update policies
|
Add task to update policies
|
Python
|
mit
|
elifesciences/builder,elifesciences/builder
|
from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
print("Warning: you should be authenticated with a root token to effectively create a new token here")
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
Add task to update policies
|
from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def policies_update():
_warning_root_token()
cmd = "VAULT_ADDR=%s vault policy write %s .vault/%s.hcl" % (vault_addr(), vault_policy(), vault_policy())
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
_warning_root_token()
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
def _warning_root_token():
print("Warning: you should probably be authenticated with a root token for this operation")
|
<commit_before>from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
print("Warning: you should be authenticated with a root token to effectively create a new token here")
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
<commit_msg>Add task to update policies<commit_after>
|
from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def policies_update():
_warning_root_token()
cmd = "VAULT_ADDR=%s vault policy write %s .vault/%s.hcl" % (vault_addr(), vault_policy(), vault_policy())
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
_warning_root_token()
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
def _warning_root_token():
print("Warning: you should probably be authenticated with a root token for this operation")
|
from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
print("Warning: you should be authenticated with a root token to effectively create a new token here")
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
Add task to update policiesfrom fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def policies_update():
_warning_root_token()
cmd = "VAULT_ADDR=%s vault policy write %s .vault/%s.hcl" % (vault_addr(), vault_policy(), vault_policy())
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
_warning_root_token()
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
def _warning_root_token():
print("Warning: you should probably be authenticated with a root token for this operation")
|
<commit_before>from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
print("Warning: you should be authenticated with a root token to effectively create a new token here")
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
<commit_msg>Add task to update policies<commit_after>from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def policies_update():
_warning_root_token()
cmd = "VAULT_ADDR=%s vault policy write %s .vault/%s.hcl" % (vault_addr(), vault_policy(), vault_policy())
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
_warning_root_token()
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
def _warning_root_token():
print("Warning: you should probably be authenticated with a root token for this operation")
|
5f6a404f8a4357c3de72b7ac506168e1bec810a8
|
quicksort/quicksort.py
|
quicksort/quicksort.py
|
from random import randint
def sort(arr, start, length):
if length <= 1:
return arr
pivot = choose_pivot(arr, length)
i = j = start + 1
while j < length:
if arr[j] < pivot:
swap(arr, j, i)
i += 1
j += 1
swap(arr, start, i-1)
return (arr, length, pivot)
def swap(arr, x, y):
temp = arr[x]
arr[x] = arr[y]
arr[y] = temp
def choose_pivot(arr, length):
return arr[0]
if __name__ == '__main__':
unsorted = [randint(0, 100) for n in range(100)]
print sort(unsorted, 0, len(unsorted)-1)
|
from random import randint
def sort(arr, start, length):
if length <= 1:
return arr
pivot = choose_pivot(arr, length)
i = j = start + 1
while j < length:
if arr[j] < pivot:
swap(arr, j, i)
i += 1
j += 1
swap(arr, start, i-1)
first_part = sort(arr[start:i], start, i)
second_part = sort(arr[i:length], start, length - i - 1)
return first_part + second_part
def swap(arr, x, y):
temp = arr[x]
arr[x] = arr[y]
arr[y] = temp
def choose_pivot(arr, length):
return arr[0]
if __name__ == '__main__':
unsorted = [randint(0, 100) for n in range(100)]
print sort(unsorted, 0, len(unsorted)-1)
|
Sort list by recursing through both parts
|
Sort list by recursing through both parts
The array is split into two parts: everything up to and including
the pivot, and everything after the pivot. Sort() is called on
each part and the resulting arrays are combined and returned. This
sorts the array.
|
Python
|
mit
|
timpel/stanford-algs,timpel/stanford-algs
|
from random import randint
def sort(arr, start, length):
if length <= 1:
return arr
pivot = choose_pivot(arr, length)
i = j = start + 1
while j < length:
if arr[j] < pivot:
swap(arr, j, i)
i += 1
j += 1
swap(arr, start, i-1)
return (arr, length, pivot)
def swap(arr, x, y):
temp = arr[x]
arr[x] = arr[y]
arr[y] = temp
def choose_pivot(arr, length):
return arr[0]
if __name__ == '__main__':
unsorted = [randint(0, 100) for n in range(100)]
print sort(unsorted, 0, len(unsorted)-1)
Sort list by recursing through both parts
The array is split into two parts: everything up to and including
the pivot, and everything after the pivot. Sort() is called on
each part and the resulting arrays are combined and returned. This
sorts the array.
|
from random import randint
def sort(arr, start, length):
if length <= 1:
return arr
pivot = choose_pivot(arr, length)
i = j = start + 1
while j < length:
if arr[j] < pivot:
swap(arr, j, i)
i += 1
j += 1
swap(arr, start, i-1)
first_part = sort(arr[start:i], start, i)
second_part = sort(arr[i:length], start, length - i - 1)
return first_part + second_part
def swap(arr, x, y):
temp = arr[x]
arr[x] = arr[y]
arr[y] = temp
def choose_pivot(arr, length):
return arr[0]
if __name__ == '__main__':
unsorted = [randint(0, 100) for n in range(100)]
print sort(unsorted, 0, len(unsorted)-1)
|
<commit_before>from random import randint
def sort(arr, start, length):
if length <= 1:
return arr
pivot = choose_pivot(arr, length)
i = j = start + 1
while j < length:
if arr[j] < pivot:
swap(arr, j, i)
i += 1
j += 1
swap(arr, start, i-1)
return (arr, length, pivot)
def swap(arr, x, y):
temp = arr[x]
arr[x] = arr[y]
arr[y] = temp
def choose_pivot(arr, length):
return arr[0]
if __name__ == '__main__':
unsorted = [randint(0, 100) for n in range(100)]
print sort(unsorted, 0, len(unsorted)-1)
<commit_msg>Sort list by recursing through both parts
The array is split into two parts: everything up to and including
the pivot, and everything after the pivot. Sort() is called on
each part and the resulting arrays are combined and returned. This
sorts the array.<commit_after>
|
from random import randint
def sort(arr, start, length):
if length <= 1:
return arr
pivot = choose_pivot(arr, length)
i = j = start + 1
while j < length:
if arr[j] < pivot:
swap(arr, j, i)
i += 1
j += 1
swap(arr, start, i-1)
first_part = sort(arr[start:i], start, i)
second_part = sort(arr[i:length], start, length - i - 1)
return first_part + second_part
def swap(arr, x, y):
temp = arr[x]
arr[x] = arr[y]
arr[y] = temp
def choose_pivot(arr, length):
return arr[0]
if __name__ == '__main__':
unsorted = [randint(0, 100) for n in range(100)]
print sort(unsorted, 0, len(unsorted)-1)
|
from random import randint
def sort(arr, start, length):
if length <= 1:
return arr
pivot = choose_pivot(arr, length)
i = j = start + 1
while j < length:
if arr[j] < pivot:
swap(arr, j, i)
i += 1
j += 1
swap(arr, start, i-1)
return (arr, length, pivot)
def swap(arr, x, y):
temp = arr[x]
arr[x] = arr[y]
arr[y] = temp
def choose_pivot(arr, length):
return arr[0]
if __name__ == '__main__':
unsorted = [randint(0, 100) for n in range(100)]
print sort(unsorted, 0, len(unsorted)-1)
Sort list by recursing through both parts
The array is split into two parts: everything up to and including
the pivot, and everything after the pivot. Sort() is called on
each part and the resulting arrays are combined and returned. This
sorts the array.from random import randint
def sort(arr, start, length):
if length <= 1:
return arr
pivot = choose_pivot(arr, length)
i = j = start + 1
while j < length:
if arr[j] < pivot:
swap(arr, j, i)
i += 1
j += 1
swap(arr, start, i-1)
first_part = sort(arr[start:i], start, i)
second_part = sort(arr[i:length], start, length - i - 1)
return first_part + second_part
def swap(arr, x, y):
temp = arr[x]
arr[x] = arr[y]
arr[y] = temp
def choose_pivot(arr, length):
return arr[0]
if __name__ == '__main__':
unsorted = [randint(0, 100) for n in range(100)]
print sort(unsorted, 0, len(unsorted)-1)
|
<commit_before>from random import randint
def sort(arr, start, length):
if length <= 1:
return arr
pivot = choose_pivot(arr, length)
i = j = start + 1
while j < length:
if arr[j] < pivot:
swap(arr, j, i)
i += 1
j += 1
swap(arr, start, i-1)
return (arr, length, pivot)
def swap(arr, x, y):
temp = arr[x]
arr[x] = arr[y]
arr[y] = temp
def choose_pivot(arr, length):
return arr[0]
if __name__ == '__main__':
unsorted = [randint(0, 100) for n in range(100)]
print sort(unsorted, 0, len(unsorted)-1)
<commit_msg>Sort list by recursing through both parts
The array is split into two parts: everything up to and including
the pivot, and everything after the pivot. Sort() is called on
each part and the resulting arrays are combined and returned. This
sorts the array.<commit_after>from random import randint
def sort(arr, start, length):
if length <= 1:
return arr
pivot = choose_pivot(arr, length)
i = j = start + 1
while j < length:
if arr[j] < pivot:
swap(arr, j, i)
i += 1
j += 1
swap(arr, start, i-1)
first_part = sort(arr[start:i], start, i)
second_part = sort(arr[i:length], start, length - i - 1)
return first_part + second_part
def swap(arr, x, y):
temp = arr[x]
arr[x] = arr[y]
arr[y] = temp
def choose_pivot(arr, length):
return arr[0]
if __name__ == '__main__':
unsorted = [randint(0, 100) for n in range(100)]
print sort(unsorted, 0, len(unsorted)-1)
|
ed8d7e8a4f474573b629c418898cb88c35366a58
|
scripts/looptmp.py
|
scripts/looptmp.py
|
# Input: ${SGE_TASK_ID}
# Output: generated models in *.B* files, calculated energies in *.E* files
#
from modeller import *
from modeller.automodel import *
import sys
# to get different starting models for each task
taskid = int(sys.argv[1])
env = environ(rand_seed=-1000-taskid)
class myloop(loopmodel):
def select_loop_atoms(self):
res = (
RESIDUE_RANGE
)
s = selection(res)
if len(s.only_no_topology()) > 0:
raise ModellerError, "some selected residues have no topology"
return s
m = myloop(env, inimodel='USER_PDB',
sequence='USER_NAME')
m.loop.md_level = refine.slow
m.loop.starting_model = m.loop.ending_model = taskid
m.make()
|
# Input: ${SGE_TASK_ID}
# Output: generated models in *.B* files, calculated energies in *.E* files
#
from modeller import *
from modeller.automodel import *
import sys
# to get different starting models for each task
taskid = int(sys.argv[1])
env = environ(rand_seed=-1000-taskid)
class MyLoop(loopmodel):
def select_loop_atoms(self):
res = (
RESIDUE_RANGE
)
s = selection(res)
if len(s.only_no_topology()) > 0:
raise ModellerError, "some selected residues have no topology"
return s
m = MyLoop(env, inimodel='USER_PDB',
sequence='USER_NAME')
m.loop.md_level = refine.slow
m.loop.starting_model = m.loop.ending_model = taskid
m.make()
|
Use Python PEP 8 naming for derived class.
|
Use Python PEP 8 naming for derived class.
|
Python
|
lgpl-2.1
|
salilab/modloop,salilab/modloop
|
# Input: ${SGE_TASK_ID}
# Output: generated models in *.B* files, calculated energies in *.E* files
#
from modeller import *
from modeller.automodel import *
import sys
# to get different starting models for each task
taskid = int(sys.argv[1])
env = environ(rand_seed=-1000-taskid)
class myloop(loopmodel):
def select_loop_atoms(self):
res = (
RESIDUE_RANGE
)
s = selection(res)
if len(s.only_no_topology()) > 0:
raise ModellerError, "some selected residues have no topology"
return s
m = myloop(env, inimodel='USER_PDB',
sequence='USER_NAME')
m.loop.md_level = refine.slow
m.loop.starting_model = m.loop.ending_model = taskid
m.make()
Use Python PEP 8 naming for derived class.
|
# Input: ${SGE_TASK_ID}
# Output: generated models in *.B* files, calculated energies in *.E* files
#
from modeller import *
from modeller.automodel import *
import sys
# to get different starting models for each task
taskid = int(sys.argv[1])
env = environ(rand_seed=-1000-taskid)
class MyLoop(loopmodel):
def select_loop_atoms(self):
res = (
RESIDUE_RANGE
)
s = selection(res)
if len(s.only_no_topology()) > 0:
raise ModellerError, "some selected residues have no topology"
return s
m = MyLoop(env, inimodel='USER_PDB',
sequence='USER_NAME')
m.loop.md_level = refine.slow
m.loop.starting_model = m.loop.ending_model = taskid
m.make()
|
<commit_before># Input: ${SGE_TASK_ID}
# Output: generated models in *.B* files, calculated energies in *.E* files
#
from modeller import *
from modeller.automodel import *
import sys
# to get different starting models for each task
taskid = int(sys.argv[1])
env = environ(rand_seed=-1000-taskid)
class myloop(loopmodel):
def select_loop_atoms(self):
res = (
RESIDUE_RANGE
)
s = selection(res)
if len(s.only_no_topology()) > 0:
raise ModellerError, "some selected residues have no topology"
return s
m = myloop(env, inimodel='USER_PDB',
sequence='USER_NAME')
m.loop.md_level = refine.slow
m.loop.starting_model = m.loop.ending_model = taskid
m.make()
<commit_msg>Use Python PEP 8 naming for derived class.<commit_after>
|
# Input: ${SGE_TASK_ID}
# Output: generated models in *.B* files, calculated energies in *.E* files
#
from modeller import *
from modeller.automodel import *
import sys
# to get different starting models for each task
taskid = int(sys.argv[1])
env = environ(rand_seed=-1000-taskid)
class MyLoop(loopmodel):
def select_loop_atoms(self):
res = (
RESIDUE_RANGE
)
s = selection(res)
if len(s.only_no_topology()) > 0:
raise ModellerError, "some selected residues have no topology"
return s
m = MyLoop(env, inimodel='USER_PDB',
sequence='USER_NAME')
m.loop.md_level = refine.slow
m.loop.starting_model = m.loop.ending_model = taskid
m.make()
|
# Input: ${SGE_TASK_ID}
# Output: generated models in *.B* files, calculated energies in *.E* files
#
from modeller import *
from modeller.automodel import *
import sys
# to get different starting models for each task
taskid = int(sys.argv[1])
env = environ(rand_seed=-1000-taskid)
class myloop(loopmodel):
def select_loop_atoms(self):
res = (
RESIDUE_RANGE
)
s = selection(res)
if len(s.only_no_topology()) > 0:
raise ModellerError, "some selected residues have no topology"
return s
m = myloop(env, inimodel='USER_PDB',
sequence='USER_NAME')
m.loop.md_level = refine.slow
m.loop.starting_model = m.loop.ending_model = taskid
m.make()
Use Python PEP 8 naming for derived class.# Input: ${SGE_TASK_ID}
# Output: generated models in *.B* files, calculated energies in *.E* files
#
from modeller import *
from modeller.automodel import *
import sys
# to get different starting models for each task
taskid = int(sys.argv[1])
env = environ(rand_seed=-1000-taskid)
class MyLoop(loopmodel):
def select_loop_atoms(self):
res = (
RESIDUE_RANGE
)
s = selection(res)
if len(s.only_no_topology()) > 0:
raise ModellerError, "some selected residues have no topology"
return s
m = MyLoop(env, inimodel='USER_PDB',
sequence='USER_NAME')
m.loop.md_level = refine.slow
m.loop.starting_model = m.loop.ending_model = taskid
m.make()
|
<commit_before># Input: ${SGE_TASK_ID}
# Output: generated models in *.B* files, calculated energies in *.E* files
#
from modeller import *
from modeller.automodel import *
import sys
# to get different starting models for each task
taskid = int(sys.argv[1])
env = environ(rand_seed=-1000-taskid)
class myloop(loopmodel):
def select_loop_atoms(self):
res = (
RESIDUE_RANGE
)
s = selection(res)
if len(s.only_no_topology()) > 0:
raise ModellerError, "some selected residues have no topology"
return s
m = myloop(env, inimodel='USER_PDB',
sequence='USER_NAME')
m.loop.md_level = refine.slow
m.loop.starting_model = m.loop.ending_model = taskid
m.make()
<commit_msg>Use Python PEP 8 naming for derived class.<commit_after># Input: ${SGE_TASK_ID}
# Output: generated models in *.B* files, calculated energies in *.E* files
#
from modeller import *
from modeller.automodel import *
import sys
# to get different starting models for each task
taskid = int(sys.argv[1])
env = environ(rand_seed=-1000-taskid)
class MyLoop(loopmodel):
def select_loop_atoms(self):
res = (
RESIDUE_RANGE
)
s = selection(res)
if len(s.only_no_topology()) > 0:
raise ModellerError, "some selected residues have no topology"
return s
m = MyLoop(env, inimodel='USER_PDB',
sequence='USER_NAME')
m.loop.md_level = refine.slow
m.loop.starting_model = m.loop.ending_model = taskid
m.make()
|
ee070606be405b86bfcc6e6796bbe322a78511ed
|
ui/assetmanager.py
|
ui/assetmanager.py
|
"""Loads and manages art assets"""
import pyglet
_ASSET_PATHS = ["res"]
_ASSET_FILE_NAMES = [
"black_key_down.png",
"black_key_up.png",
"white_key_down.png",
"white_key_up.png",
"staff_line.png",
]
class Assets(object):
_loadedAssets = None
@staticmethod
def loadAssets():
Assets._loadedAssets = dict()
Assets._updateResourcePath()
for f in _ASSET_FILE_NAMES:
Assets.loadAsset(f)
@staticmethod
def loadAsset(filename):
Assets._loadedAssets[filename] = pyglet.resource.image(filename)
@staticmethod
def _updateResourcePath():
for p in _ASSET_PATHS:
pyglet.resource.path.append(p)
pyglet.resource.reindex()
@staticmethod
def get(filename):
if Assets._loadedAssets is None:
raise RuntimeError("You must initialize the asset manager before "
"retrieving assets")
return Assets._loadedAssets[filename]
|
"""Loads and manages art assets"""
import pyglet
import os
_ASSET_PATHS = ["res"]
_ASSET_FILE_NAMES = [
"black_key_down.png",
"black_key_up.png",
"white_key_down.png",
"white_key_up.png",
"staff_line.png",
]
class Assets(object):
_loadedAssets = None
@staticmethod
def loadAssets():
Assets._loadedAssets = dict()
Assets._updateResourcePath()
for f in _ASSET_FILE_NAMES:
Assets.loadAsset(f)
@staticmethod
def loadAsset(filename):
Assets._loadedAssets[filename] = pyglet.resource.image(filename)
@staticmethod
def _updateResourcePath():
for p in _ASSET_PATHS:
pyglet.resource.path.append(os.path.join(os.getcwd(), p))
pyglet.resource.reindex()
@staticmethod
def get(filename):
if Assets._loadedAssets is None:
raise RuntimeError("You must initialize the asset manager before "
"retrieving assets")
return Assets._loadedAssets[filename]
|
Use absolute resource path in Pyglet
|
Use absolute resource path in Pyglet
It appears that a recent change in Pyglet causes relative paths to fail here.
|
Python
|
bsd-2-clause
|
aschmied/keyzer
|
"""Loads and manages art assets"""
import pyglet
_ASSET_PATHS = ["res"]
_ASSET_FILE_NAMES = [
"black_key_down.png",
"black_key_up.png",
"white_key_down.png",
"white_key_up.png",
"staff_line.png",
]
class Assets(object):
_loadedAssets = None
@staticmethod
def loadAssets():
Assets._loadedAssets = dict()
Assets._updateResourcePath()
for f in _ASSET_FILE_NAMES:
Assets.loadAsset(f)
@staticmethod
def loadAsset(filename):
Assets._loadedAssets[filename] = pyglet.resource.image(filename)
@staticmethod
def _updateResourcePath():
for p in _ASSET_PATHS:
pyglet.resource.path.append(p)
pyglet.resource.reindex()
@staticmethod
def get(filename):
if Assets._loadedAssets is None:
raise RuntimeError("You must initialize the asset manager before "
"retrieving assets")
return Assets._loadedAssets[filename]
Use absolute resource path in Pyglet
It appears that a recent change in Pyglet causes relative paths to fail here.
|
"""Loads and manages art assets"""
import pyglet
import os
_ASSET_PATHS = ["res"]
_ASSET_FILE_NAMES = [
"black_key_down.png",
"black_key_up.png",
"white_key_down.png",
"white_key_up.png",
"staff_line.png",
]
class Assets(object):
_loadedAssets = None
@staticmethod
def loadAssets():
Assets._loadedAssets = dict()
Assets._updateResourcePath()
for f in _ASSET_FILE_NAMES:
Assets.loadAsset(f)
@staticmethod
def loadAsset(filename):
Assets._loadedAssets[filename] = pyglet.resource.image(filename)
@staticmethod
def _updateResourcePath():
for p in _ASSET_PATHS:
pyglet.resource.path.append(os.path.join(os.getcwd(), p))
pyglet.resource.reindex()
@staticmethod
def get(filename):
if Assets._loadedAssets is None:
raise RuntimeError("You must initialize the asset manager before "
"retrieving assets")
return Assets._loadedAssets[filename]
|
<commit_before>"""Loads and manages art assets"""
import pyglet
_ASSET_PATHS = ["res"]
_ASSET_FILE_NAMES = [
"black_key_down.png",
"black_key_up.png",
"white_key_down.png",
"white_key_up.png",
"staff_line.png",
]
class Assets(object):
_loadedAssets = None
@staticmethod
def loadAssets():
Assets._loadedAssets = dict()
Assets._updateResourcePath()
for f in _ASSET_FILE_NAMES:
Assets.loadAsset(f)
@staticmethod
def loadAsset(filename):
Assets._loadedAssets[filename] = pyglet.resource.image(filename)
@staticmethod
def _updateResourcePath():
for p in _ASSET_PATHS:
pyglet.resource.path.append(p)
pyglet.resource.reindex()
@staticmethod
def get(filename):
if Assets._loadedAssets is None:
raise RuntimeError("You must initialize the asset manager before "
"retrieving assets")
return Assets._loadedAssets[filename]
<commit_msg>Use absolute resource path in Pyglet
It appears that a recent change in Pyglet causes relative paths to fail here.<commit_after>
|
"""Loads and manages art assets"""
import pyglet
import os
_ASSET_PATHS = ["res"]
_ASSET_FILE_NAMES = [
"black_key_down.png",
"black_key_up.png",
"white_key_down.png",
"white_key_up.png",
"staff_line.png",
]
class Assets(object):
_loadedAssets = None
@staticmethod
def loadAssets():
Assets._loadedAssets = dict()
Assets._updateResourcePath()
for f in _ASSET_FILE_NAMES:
Assets.loadAsset(f)
@staticmethod
def loadAsset(filename):
Assets._loadedAssets[filename] = pyglet.resource.image(filename)
@staticmethod
def _updateResourcePath():
for p in _ASSET_PATHS:
pyglet.resource.path.append(os.path.join(os.getcwd(), p))
pyglet.resource.reindex()
@staticmethod
def get(filename):
if Assets._loadedAssets is None:
raise RuntimeError("You must initialize the asset manager before "
"retrieving assets")
return Assets._loadedAssets[filename]
|
"""Loads and manages art assets"""
import pyglet
_ASSET_PATHS = ["res"]
_ASSET_FILE_NAMES = [
"black_key_down.png",
"black_key_up.png",
"white_key_down.png",
"white_key_up.png",
"staff_line.png",
]
class Assets(object):
_loadedAssets = None
@staticmethod
def loadAssets():
Assets._loadedAssets = dict()
Assets._updateResourcePath()
for f in _ASSET_FILE_NAMES:
Assets.loadAsset(f)
@staticmethod
def loadAsset(filename):
Assets._loadedAssets[filename] = pyglet.resource.image(filename)
@staticmethod
def _updateResourcePath():
for p in _ASSET_PATHS:
pyglet.resource.path.append(p)
pyglet.resource.reindex()
@staticmethod
def get(filename):
if Assets._loadedAssets is None:
raise RuntimeError("You must initialize the asset manager before "
"retrieving assets")
return Assets._loadedAssets[filename]
Use absolute resource path in Pyglet
It appears that a recent change in Pyglet causes relative paths to fail here."""Loads and manages art assets"""
import pyglet
import os
_ASSET_PATHS = ["res"]
_ASSET_FILE_NAMES = [
"black_key_down.png",
"black_key_up.png",
"white_key_down.png",
"white_key_up.png",
"staff_line.png",
]
class Assets(object):
_loadedAssets = None
@staticmethod
def loadAssets():
Assets._loadedAssets = dict()
Assets._updateResourcePath()
for f in _ASSET_FILE_NAMES:
Assets.loadAsset(f)
@staticmethod
def loadAsset(filename):
Assets._loadedAssets[filename] = pyglet.resource.image(filename)
@staticmethod
def _updateResourcePath():
for p in _ASSET_PATHS:
pyglet.resource.path.append(os.path.join(os.getcwd(), p))
pyglet.resource.reindex()
@staticmethod
def get(filename):
if Assets._loadedAssets is None:
raise RuntimeError("You must initialize the asset manager before "
"retrieving assets")
return Assets._loadedAssets[filename]
|
<commit_before>"""Loads and manages art assets"""
import pyglet
_ASSET_PATHS = ["res"]
_ASSET_FILE_NAMES = [
"black_key_down.png",
"black_key_up.png",
"white_key_down.png",
"white_key_up.png",
"staff_line.png",
]
class Assets(object):
_loadedAssets = None
@staticmethod
def loadAssets():
Assets._loadedAssets = dict()
Assets._updateResourcePath()
for f in _ASSET_FILE_NAMES:
Assets.loadAsset(f)
@staticmethod
def loadAsset(filename):
Assets._loadedAssets[filename] = pyglet.resource.image(filename)
@staticmethod
def _updateResourcePath():
for p in _ASSET_PATHS:
pyglet.resource.path.append(p)
pyglet.resource.reindex()
@staticmethod
def get(filename):
if Assets._loadedAssets is None:
raise RuntimeError("You must initialize the asset manager before "
"retrieving assets")
return Assets._loadedAssets[filename]
<commit_msg>Use absolute resource path in Pyglet
It appears that a recent change in Pyglet causes relative paths to fail here.<commit_after>"""Loads and manages art assets"""
import pyglet
import os
_ASSET_PATHS = ["res"]
_ASSET_FILE_NAMES = [
"black_key_down.png",
"black_key_up.png",
"white_key_down.png",
"white_key_up.png",
"staff_line.png",
]
class Assets(object):
_loadedAssets = None
@staticmethod
def loadAssets():
Assets._loadedAssets = dict()
Assets._updateResourcePath()
for f in _ASSET_FILE_NAMES:
Assets.loadAsset(f)
@staticmethod
def loadAsset(filename):
Assets._loadedAssets[filename] = pyglet.resource.image(filename)
@staticmethod
def _updateResourcePath():
for p in _ASSET_PATHS:
pyglet.resource.path.append(os.path.join(os.getcwd(), p))
pyglet.resource.reindex()
@staticmethod
def get(filename):
if Assets._loadedAssets is None:
raise RuntimeError("You must initialize the asset manager before "
"retrieving assets")
return Assets._loadedAssets[filename]
|
6e1cef7cc97640dc745a8b8975cd5d31d0ffa485
|
selectable/base.py
|
selectable/base.py
|
from django.core.urlresolvers import reverse
from django.utils.encoding import smart_unicode
class LookupBase(object):
def _name(cls):
app_name = cls.__module__.split('.')[-2].lower()
class_name = cls.__name__.lower()
name = u'%s-%s' % (app_name, class_name)
return name
name = classmethod(_name)
def _url(cls):
return reverse('selectable-lookup', args=[cls.name()])
url = classmethod(_url)
def get_query(self, request):
return []
def get_item_name(self, item):
return smart_unicode(item)
def get_item_id(self, item):
return smart_unicode(item)
def get_item_value(self, item):
return smart_unicode(item)
def format_item(self, item):
return {
'id': self.get_item_id(item),
'value': self.get_item_value(item),
'name': self.get_item_name(item)
}
|
from django.core.urlresolvers import reverse
from django.utils.encoding import smart_unicode
class LookupBase(object):
def _name(cls):
app_name = cls.__module__.split('.')[-2].lower()
class_name = cls.__name__.lower()
name = u'%s-%s' % (app_name, class_name)
return name
name = classmethod(_name)
def _url(cls):
return reverse('selectable-lookup', args=[cls.name()])
url = classmethod(_url)
def get_query(self, request):
return []
def get_item_label(self, item):
return smart_unicode(item)
def get_item_id(self, item):
return smart_unicode(item)
def get_item_value(self, item):
return smart_unicode(item)
def format_item(self, item):
return {
'id': self.get_item_id(item),
'value': self.get_item_value(item),
'label': self.get_item_label(item)
}
|
Change to return 'label' not 'name'.
|
Change to return 'label' not 'name'.
|
Python
|
bsd-2-clause
|
makinacorpus/django-selectable,mlavin/django-selectable,affan2/django-selectable,affan2/django-selectable,mlavin/django-selectable,makinacorpus/django-selectable,mlavin/django-selectable,affan2/django-selectable
|
from django.core.urlresolvers import reverse
from django.utils.encoding import smart_unicode
class LookupBase(object):
def _name(cls):
app_name = cls.__module__.split('.')[-2].lower()
class_name = cls.__name__.lower()
name = u'%s-%s' % (app_name, class_name)
return name
name = classmethod(_name)
def _url(cls):
return reverse('selectable-lookup', args=[cls.name()])
url = classmethod(_url)
def get_query(self, request):
return []
def get_item_name(self, item):
return smart_unicode(item)
def get_item_id(self, item):
return smart_unicode(item)
def get_item_value(self, item):
return smart_unicode(item)
def format_item(self, item):
return {
'id': self.get_item_id(item),
'value': self.get_item_value(item),
'name': self.get_item_name(item)
}
Change to return 'label' not 'name'.
|
from django.core.urlresolvers import reverse
from django.utils.encoding import smart_unicode
class LookupBase(object):
def _name(cls):
app_name = cls.__module__.split('.')[-2].lower()
class_name = cls.__name__.lower()
name = u'%s-%s' % (app_name, class_name)
return name
name = classmethod(_name)
def _url(cls):
return reverse('selectable-lookup', args=[cls.name()])
url = classmethod(_url)
def get_query(self, request):
return []
def get_item_label(self, item):
return smart_unicode(item)
def get_item_id(self, item):
return smart_unicode(item)
def get_item_value(self, item):
return smart_unicode(item)
def format_item(self, item):
return {
'id': self.get_item_id(item),
'value': self.get_item_value(item),
'label': self.get_item_label(item)
}
|
<commit_before>from django.core.urlresolvers import reverse
from django.utils.encoding import smart_unicode
class LookupBase(object):
def _name(cls):
app_name = cls.__module__.split('.')[-2].lower()
class_name = cls.__name__.lower()
name = u'%s-%s' % (app_name, class_name)
return name
name = classmethod(_name)
def _url(cls):
return reverse('selectable-lookup', args=[cls.name()])
url = classmethod(_url)
def get_query(self, request):
return []
def get_item_name(self, item):
return smart_unicode(item)
def get_item_id(self, item):
return smart_unicode(item)
def get_item_value(self, item):
return smart_unicode(item)
def format_item(self, item):
return {
'id': self.get_item_id(item),
'value': self.get_item_value(item),
'name': self.get_item_name(item)
}
<commit_msg>Change to return 'label' not 'name'.<commit_after>
|
from django.core.urlresolvers import reverse
from django.utils.encoding import smart_unicode
class LookupBase(object):
def _name(cls):
app_name = cls.__module__.split('.')[-2].lower()
class_name = cls.__name__.lower()
name = u'%s-%s' % (app_name, class_name)
return name
name = classmethod(_name)
def _url(cls):
return reverse('selectable-lookup', args=[cls.name()])
url = classmethod(_url)
def get_query(self, request):
return []
def get_item_label(self, item):
return smart_unicode(item)
def get_item_id(self, item):
return smart_unicode(item)
def get_item_value(self, item):
return smart_unicode(item)
def format_item(self, item):
return {
'id': self.get_item_id(item),
'value': self.get_item_value(item),
'label': self.get_item_label(item)
}
|
from django.core.urlresolvers import reverse
from django.utils.encoding import smart_unicode
class LookupBase(object):
def _name(cls):
app_name = cls.__module__.split('.')[-2].lower()
class_name = cls.__name__.lower()
name = u'%s-%s' % (app_name, class_name)
return name
name = classmethod(_name)
def _url(cls):
return reverse('selectable-lookup', args=[cls.name()])
url = classmethod(_url)
def get_query(self, request):
return []
def get_item_name(self, item):
return smart_unicode(item)
def get_item_id(self, item):
return smart_unicode(item)
def get_item_value(self, item):
return smart_unicode(item)
def format_item(self, item):
return {
'id': self.get_item_id(item),
'value': self.get_item_value(item),
'name': self.get_item_name(item)
}
Change to return 'label' not 'name'.from django.core.urlresolvers import reverse
from django.utils.encoding import smart_unicode
class LookupBase(object):
def _name(cls):
app_name = cls.__module__.split('.')[-2].lower()
class_name = cls.__name__.lower()
name = u'%s-%s' % (app_name, class_name)
return name
name = classmethod(_name)
def _url(cls):
return reverse('selectable-lookup', args=[cls.name()])
url = classmethod(_url)
def get_query(self, request):
return []
def get_item_label(self, item):
return smart_unicode(item)
def get_item_id(self, item):
return smart_unicode(item)
def get_item_value(self, item):
return smart_unicode(item)
def format_item(self, item):
return {
'id': self.get_item_id(item),
'value': self.get_item_value(item),
'label': self.get_item_label(item)
}
|
<commit_before>from django.core.urlresolvers import reverse
from django.utils.encoding import smart_unicode
class LookupBase(object):
def _name(cls):
app_name = cls.__module__.split('.')[-2].lower()
class_name = cls.__name__.lower()
name = u'%s-%s' % (app_name, class_name)
return name
name = classmethod(_name)
def _url(cls):
return reverse('selectable-lookup', args=[cls.name()])
url = classmethod(_url)
def get_query(self, request):
return []
def get_item_name(self, item):
return smart_unicode(item)
def get_item_id(self, item):
return smart_unicode(item)
def get_item_value(self, item):
return smart_unicode(item)
def format_item(self, item):
return {
'id': self.get_item_id(item),
'value': self.get_item_value(item),
'name': self.get_item_name(item)
}
<commit_msg>Change to return 'label' not 'name'.<commit_after>from django.core.urlresolvers import reverse
from django.utils.encoding import smart_unicode
class LookupBase(object):
def _name(cls):
app_name = cls.__module__.split('.')[-2].lower()
class_name = cls.__name__.lower()
name = u'%s-%s' % (app_name, class_name)
return name
name = classmethod(_name)
def _url(cls):
return reverse('selectable-lookup', args=[cls.name()])
url = classmethod(_url)
def get_query(self, request):
return []
def get_item_label(self, item):
return smart_unicode(item)
def get_item_id(self, item):
return smart_unicode(item)
def get_item_value(self, item):
return smart_unicode(item)
def format_item(self, item):
return {
'id': self.get_item_id(item),
'value': self.get_item_value(item),
'label': self.get_item_label(item)
}
|
dd9f11f36668717ee349b357b2f32a7a52e38863
|
pagerduty_events_api/pagerduty_incident.py
|
pagerduty_events_api/pagerduty_incident.py
|
from pagerduty_events_api.pagerduty_rest_client import PagerdutyRestClient
class PagerdutyIncident:
def __init__(self, service_key, incident_key):
self.__service_key = service_key
self.__incident_key = incident_key
def get_service_key(self):
return self.__service_key
def get_incident_key(self):
return self.__incident_key
def acknowledge(self):
payload = {'service_key': self.__service_key,
'event_type': 'acknowledge',
'incident_key': self.__incident_key}
PagerdutyRestClient().post(payload)
def resolve(self):
payload = {'service_key': self.__service_key,
'event_type': 'resolve',
'incident_key': self.__incident_key}
PagerdutyRestClient().post(payload)
|
from pagerduty_events_api.pagerduty_rest_client import PagerdutyRestClient
class PagerdutyIncident:
def __init__(self, service_key, incident_key):
self.__service_key = service_key
self.__incident_key = incident_key
def get_service_key(self):
return self.__service_key
def get_incident_key(self):
return self.__incident_key
def acknowledge(self):
self.__send_request_with_event_type('acknowledge')
def resolve(self):
self.__send_request_with_event_type('resolve')
def __send_request_with_event_type(self, event_type):
payload = {'service_key': self.__service_key,
'event_type': event_type,
'incident_key': self.__incident_key}
PagerdutyRestClient().post(payload)
|
Remove code duplication from PD Incident class.
|
Remove code duplication from PD Incident class.
|
Python
|
mit
|
BlasiusVonSzerencsi/pagerduty-events-api
|
from pagerduty_events_api.pagerduty_rest_client import PagerdutyRestClient
class PagerdutyIncident:
def __init__(self, service_key, incident_key):
self.__service_key = service_key
self.__incident_key = incident_key
def get_service_key(self):
return self.__service_key
def get_incident_key(self):
return self.__incident_key
def acknowledge(self):
payload = {'service_key': self.__service_key,
'event_type': 'acknowledge',
'incident_key': self.__incident_key}
PagerdutyRestClient().post(payload)
def resolve(self):
payload = {'service_key': self.__service_key,
'event_type': 'resolve',
'incident_key': self.__incident_key}
PagerdutyRestClient().post(payload)
Remove code duplication from PD Incident class.
|
from pagerduty_events_api.pagerduty_rest_client import PagerdutyRestClient
class PagerdutyIncident:
def __init__(self, service_key, incident_key):
self.__service_key = service_key
self.__incident_key = incident_key
def get_service_key(self):
return self.__service_key
def get_incident_key(self):
return self.__incident_key
def acknowledge(self):
self.__send_request_with_event_type('acknowledge')
def resolve(self):
self.__send_request_with_event_type('resolve')
def __send_request_with_event_type(self, event_type):
payload = {'service_key': self.__service_key,
'event_type': event_type,
'incident_key': self.__incident_key}
PagerdutyRestClient().post(payload)
|
<commit_before>from pagerduty_events_api.pagerduty_rest_client import PagerdutyRestClient
class PagerdutyIncident:
def __init__(self, service_key, incident_key):
self.__service_key = service_key
self.__incident_key = incident_key
def get_service_key(self):
return self.__service_key
def get_incident_key(self):
return self.__incident_key
def acknowledge(self):
payload = {'service_key': self.__service_key,
'event_type': 'acknowledge',
'incident_key': self.__incident_key}
PagerdutyRestClient().post(payload)
def resolve(self):
payload = {'service_key': self.__service_key,
'event_type': 'resolve',
'incident_key': self.__incident_key}
PagerdutyRestClient().post(payload)
<commit_msg>Remove code duplication from PD Incident class.<commit_after>
|
from pagerduty_events_api.pagerduty_rest_client import PagerdutyRestClient
class PagerdutyIncident:
def __init__(self, service_key, incident_key):
self.__service_key = service_key
self.__incident_key = incident_key
def get_service_key(self):
return self.__service_key
def get_incident_key(self):
return self.__incident_key
def acknowledge(self):
self.__send_request_with_event_type('acknowledge')
def resolve(self):
self.__send_request_with_event_type('resolve')
def __send_request_with_event_type(self, event_type):
payload = {'service_key': self.__service_key,
'event_type': event_type,
'incident_key': self.__incident_key}
PagerdutyRestClient().post(payload)
|
from pagerduty_events_api.pagerduty_rest_client import PagerdutyRestClient
class PagerdutyIncident:
def __init__(self, service_key, incident_key):
self.__service_key = service_key
self.__incident_key = incident_key
def get_service_key(self):
return self.__service_key
def get_incident_key(self):
return self.__incident_key
def acknowledge(self):
payload = {'service_key': self.__service_key,
'event_type': 'acknowledge',
'incident_key': self.__incident_key}
PagerdutyRestClient().post(payload)
def resolve(self):
payload = {'service_key': self.__service_key,
'event_type': 'resolve',
'incident_key': self.__incident_key}
PagerdutyRestClient().post(payload)
Remove code duplication from PD Incident class.from pagerduty_events_api.pagerduty_rest_client import PagerdutyRestClient
class PagerdutyIncident:
def __init__(self, service_key, incident_key):
self.__service_key = service_key
self.__incident_key = incident_key
def get_service_key(self):
return self.__service_key
def get_incident_key(self):
return self.__incident_key
def acknowledge(self):
self.__send_request_with_event_type('acknowledge')
def resolve(self):
self.__send_request_with_event_type('resolve')
def __send_request_with_event_type(self, event_type):
payload = {'service_key': self.__service_key,
'event_type': event_type,
'incident_key': self.__incident_key}
PagerdutyRestClient().post(payload)
|
<commit_before>from pagerduty_events_api.pagerduty_rest_client import PagerdutyRestClient
class PagerdutyIncident:
def __init__(self, service_key, incident_key):
self.__service_key = service_key
self.__incident_key = incident_key
def get_service_key(self):
return self.__service_key
def get_incident_key(self):
return self.__incident_key
def acknowledge(self):
payload = {'service_key': self.__service_key,
'event_type': 'acknowledge',
'incident_key': self.__incident_key}
PagerdutyRestClient().post(payload)
def resolve(self):
payload = {'service_key': self.__service_key,
'event_type': 'resolve',
'incident_key': self.__incident_key}
PagerdutyRestClient().post(payload)
<commit_msg>Remove code duplication from PD Incident class.<commit_after>from pagerduty_events_api.pagerduty_rest_client import PagerdutyRestClient
class PagerdutyIncident:
def __init__(self, service_key, incident_key):
self.__service_key = service_key
self.__incident_key = incident_key
def get_service_key(self):
return self.__service_key
def get_incident_key(self):
return self.__incident_key
def acknowledge(self):
self.__send_request_with_event_type('acknowledge')
def resolve(self):
self.__send_request_with_event_type('resolve')
def __send_request_with_event_type(self, event_type):
payload = {'service_key': self.__service_key,
'event_type': event_type,
'incident_key': self.__incident_key}
PagerdutyRestClient().post(payload)
|
1cf7cbf8c1ed35e70e92b39d2475b120ad17ae3d
|
server/settings.py
|
server/settings.py
|
# TODO @Sumukh Better Secret Management System
class TestConfig(object):
DEBUG = True
SECRET_KEY = 'Testing*ok*server*'
RESTFUL_JSON = {'indent': 4}
TESTING_LOGIN = True # Do NOT turn on for prod
class DevConfig(TestConfig):
ENV = 'dev'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:@localhost:5432/okdev'
CACHE_TYPE = 'simple'
ASSETS_DEBUG = True
class TestConfig(TestConfig):
ENV = 'test'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:@localhost:5432/okdev'
SQLALCHEMY_ECHO = True
CACHE_TYPE = 'simple'
WTF_CSRF_ENABLED = False
class Config:
SECRET_KEY = 'samplekey'
class ProdConfig(Config):
# TODO Move to secret file
ENV = 'prod'
SQLALCHEMY_DATABASE_URI = 'postgresql://user:@localhost:5432/okprod'
CACHE_TYPE = 'simple'
|
# TODO @Sumukh Better Secret Management System
class TestConfig(object):
DEBUG = True
SECRET_KEY = 'Testing*ok*server*'
RESTFUL_JSON = {'indent': 4}
TESTING_LOGIN = True
class DevConfig(TestConfig):
ENV = 'dev'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:@localhost:5432/okdev'
CACHE_TYPE = 'simple'
ASSETS_DEBUG = True
class TestConfig(TestConfig):
ENV = 'test'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:@localhost:5432/okdev'
SQLALCHEMY_ECHO = True
CACHE_TYPE = 'simple'
WTF_CSRF_ENABLED = False
class Config:
SECRET_KEY = 'samplekey'
class ProdConfig(Config):
# TODO Move to secret file
ENV = 'prod'
SQLALCHEMY_DATABASE_URI = 'postgresql://user:@localhost:5432/okprod'
CACHE_TYPE = 'simple'
|
Remove ominous TESTING_LOGIN config comment
|
Remove ominous TESTING_LOGIN config comment
|
Python
|
apache-2.0
|
Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok
|
# TODO @Sumukh Better Secret Management System
class TestConfig(object):
DEBUG = True
SECRET_KEY = 'Testing*ok*server*'
RESTFUL_JSON = {'indent': 4}
TESTING_LOGIN = True # Do NOT turn on for prod
class DevConfig(TestConfig):
ENV = 'dev'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:@localhost:5432/okdev'
CACHE_TYPE = 'simple'
ASSETS_DEBUG = True
class TestConfig(TestConfig):
ENV = 'test'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:@localhost:5432/okdev'
SQLALCHEMY_ECHO = True
CACHE_TYPE = 'simple'
WTF_CSRF_ENABLED = False
class Config:
SECRET_KEY = 'samplekey'
class ProdConfig(Config):
# TODO Move to secret file
ENV = 'prod'
SQLALCHEMY_DATABASE_URI = 'postgresql://user:@localhost:5432/okprod'
CACHE_TYPE = 'simple'
Remove ominous TESTING_LOGIN config comment
|
# TODO @Sumukh Better Secret Management System
class TestConfig(object):
DEBUG = True
SECRET_KEY = 'Testing*ok*server*'
RESTFUL_JSON = {'indent': 4}
TESTING_LOGIN = True
class DevConfig(TestConfig):
ENV = 'dev'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:@localhost:5432/okdev'
CACHE_TYPE = 'simple'
ASSETS_DEBUG = True
class TestConfig(TestConfig):
ENV = 'test'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:@localhost:5432/okdev'
SQLALCHEMY_ECHO = True
CACHE_TYPE = 'simple'
WTF_CSRF_ENABLED = False
class Config:
SECRET_KEY = 'samplekey'
class ProdConfig(Config):
# TODO Move to secret file
ENV = 'prod'
SQLALCHEMY_DATABASE_URI = 'postgresql://user:@localhost:5432/okprod'
CACHE_TYPE = 'simple'
|
<commit_before># TODO @Sumukh Better Secret Management System
class TestConfig(object):
DEBUG = True
SECRET_KEY = 'Testing*ok*server*'
RESTFUL_JSON = {'indent': 4}
TESTING_LOGIN = True # Do NOT turn on for prod
class DevConfig(TestConfig):
ENV = 'dev'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:@localhost:5432/okdev'
CACHE_TYPE = 'simple'
ASSETS_DEBUG = True
class TestConfig(TestConfig):
ENV = 'test'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:@localhost:5432/okdev'
SQLALCHEMY_ECHO = True
CACHE_TYPE = 'simple'
WTF_CSRF_ENABLED = False
class Config:
SECRET_KEY = 'samplekey'
class ProdConfig(Config):
# TODO Move to secret file
ENV = 'prod'
SQLALCHEMY_DATABASE_URI = 'postgresql://user:@localhost:5432/okprod'
CACHE_TYPE = 'simple'
<commit_msg>Remove ominous TESTING_LOGIN config comment<commit_after>
|
# TODO @Sumukh Better Secret Management System
class TestConfig(object):
DEBUG = True
SECRET_KEY = 'Testing*ok*server*'
RESTFUL_JSON = {'indent': 4}
TESTING_LOGIN = True
class DevConfig(TestConfig):
ENV = 'dev'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:@localhost:5432/okdev'
CACHE_TYPE = 'simple'
ASSETS_DEBUG = True
class TestConfig(TestConfig):
ENV = 'test'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:@localhost:5432/okdev'
SQLALCHEMY_ECHO = True
CACHE_TYPE = 'simple'
WTF_CSRF_ENABLED = False
class Config:
SECRET_KEY = 'samplekey'
class ProdConfig(Config):
# TODO Move to secret file
ENV = 'prod'
SQLALCHEMY_DATABASE_URI = 'postgresql://user:@localhost:5432/okprod'
CACHE_TYPE = 'simple'
|
# TODO @Sumukh Better Secret Management System
class TestConfig(object):
DEBUG = True
SECRET_KEY = 'Testing*ok*server*'
RESTFUL_JSON = {'indent': 4}
TESTING_LOGIN = True # Do NOT turn on for prod
class DevConfig(TestConfig):
ENV = 'dev'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:@localhost:5432/okdev'
CACHE_TYPE = 'simple'
ASSETS_DEBUG = True
class TestConfig(TestConfig):
ENV = 'test'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:@localhost:5432/okdev'
SQLALCHEMY_ECHO = True
CACHE_TYPE = 'simple'
WTF_CSRF_ENABLED = False
class Config:
SECRET_KEY = 'samplekey'
class ProdConfig(Config):
# TODO Move to secret file
ENV = 'prod'
SQLALCHEMY_DATABASE_URI = 'postgresql://user:@localhost:5432/okprod'
CACHE_TYPE = 'simple'
Remove ominous TESTING_LOGIN config comment# TODO @Sumukh Better Secret Management System
class TestConfig(object):
DEBUG = True
SECRET_KEY = 'Testing*ok*server*'
RESTFUL_JSON = {'indent': 4}
TESTING_LOGIN = True
class DevConfig(TestConfig):
ENV = 'dev'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:@localhost:5432/okdev'
CACHE_TYPE = 'simple'
ASSETS_DEBUG = True
class TestConfig(TestConfig):
ENV = 'test'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:@localhost:5432/okdev'
SQLALCHEMY_ECHO = True
CACHE_TYPE = 'simple'
WTF_CSRF_ENABLED = False
class Config:
SECRET_KEY = 'samplekey'
class ProdConfig(Config):
# TODO Move to secret file
ENV = 'prod'
SQLALCHEMY_DATABASE_URI = 'postgresql://user:@localhost:5432/okprod'
CACHE_TYPE = 'simple'
|
<commit_before># TODO @Sumukh Better Secret Management System
class TestConfig(object):
DEBUG = True
SECRET_KEY = 'Testing*ok*server*'
RESTFUL_JSON = {'indent': 4}
TESTING_LOGIN = True # Do NOT turn on for prod
class DevConfig(TestConfig):
ENV = 'dev'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:@localhost:5432/okdev'
CACHE_TYPE = 'simple'
ASSETS_DEBUG = True
class TestConfig(TestConfig):
ENV = 'test'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:@localhost:5432/okdev'
SQLALCHEMY_ECHO = True
CACHE_TYPE = 'simple'
WTF_CSRF_ENABLED = False
class Config:
SECRET_KEY = 'samplekey'
class ProdConfig(Config):
# TODO Move to secret file
ENV = 'prod'
SQLALCHEMY_DATABASE_URI = 'postgresql://user:@localhost:5432/okprod'
CACHE_TYPE = 'simple'
<commit_msg>Remove ominous TESTING_LOGIN config comment<commit_after># TODO @Sumukh Better Secret Management System
class TestConfig(object):
DEBUG = True
SECRET_KEY = 'Testing*ok*server*'
RESTFUL_JSON = {'indent': 4}
TESTING_LOGIN = True
class DevConfig(TestConfig):
ENV = 'dev'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:@localhost:5432/okdev'
CACHE_TYPE = 'simple'
ASSETS_DEBUG = True
class TestConfig(TestConfig):
ENV = 'test'
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:@localhost:5432/okdev'
SQLALCHEMY_ECHO = True
CACHE_TYPE = 'simple'
WTF_CSRF_ENABLED = False
class Config:
SECRET_KEY = 'samplekey'
class ProdConfig(Config):
# TODO Move to secret file
ENV = 'prod'
SQLALCHEMY_DATABASE_URI = 'postgresql://user:@localhost:5432/okprod'
CACHE_TYPE = 'simple'
|
b64b4f804137bc5fae5bdac28e4763ed01b71164
|
imagr_site/urls.py
|
imagr_site/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^', include('imagr.urls', namespace='imagr')),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('registration.backends.default.urls'))
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^imagr/', include('imagr.urls', namespace='imagr')),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('registration.backends.default.urls'))
)
|
Change back the url so site root is at /imagr again
|
Change back the url so site root is at /imagr again
|
Python
|
mit
|
markableidinger/django_imagr
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^', include('imagr.urls', namespace='imagr')),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('registration.backends.default.urls'))
)
Change back the url so site root is at /imagr again
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^imagr/', include('imagr.urls', namespace='imagr')),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('registration.backends.default.urls'))
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^', include('imagr.urls', namespace='imagr')),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('registration.backends.default.urls'))
)
<commit_msg>Change back the url so site root is at /imagr again<commit_after>
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^imagr/', include('imagr.urls', namespace='imagr')),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('registration.backends.default.urls'))
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^', include('imagr.urls', namespace='imagr')),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('registration.backends.default.urls'))
)
Change back the url so site root is at /imagr againfrom django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^imagr/', include('imagr.urls', namespace='imagr')),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('registration.backends.default.urls'))
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^', include('imagr.urls', namespace='imagr')),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('registration.backends.default.urls'))
)
<commit_msg>Change back the url so site root is at /imagr again<commit_after>from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^imagr/', include('imagr.urls', namespace='imagr')),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('registration.backends.default.urls'))
)
|
67fadc0ed846a95f6d603827313b555e98985959
|
skimage/viewer/qt.py
|
skimage/viewer/qt.py
|
has_qt = True
try:
from matplotlib.backends.qt_compat import QtGui, QtCore, QtWidgets
except ImportError:
try:
from matplotlib.backends.qt4_compat import QtGui, QtCore
QtWidgets = QtGui
except ImportError:
# Mock objects
class QtGui(object):
QMainWindow = object
QDialog = object
QWidget = object
class QtCore_cls(object):
class Qt(object):
TopDockWidgetArea = None
BottomDockWidgetArea = None
LeftDockWidgetArea = None
RightDockWidgetArea = None
def Signal(self, *args, **kwargs):
pass
QWidget = object
QtCore = QtWidgets = QtCore_cls()
has_qt = False
Qt = QtCore.Qt
Signal = QtCore.Signal
|
has_qt = True
try:
from matplotlib.backends.qt_compat import QtGui, QtCore, QtWidgets
except ImportError:
try:
from matplotlib.backends.qt4_compat import QtGui, QtCore
QtWidgets = QtGui
except ImportError:
# Mock objects
class QtGui_cls(object):
QMainWindow = object
QDialog = object
QWidget = object
class QtCore_cls(object):
class Qt(object):
TopDockWidgetArea = None
BottomDockWidgetArea = None
LeftDockWidgetArea = None
RightDockWidgetArea = None
def Signal(self, *args, **kwargs):
pass
QtGui = QtWidgets = QtGui_cls()
QtCore = QtCore_cls()
has_qt = False
Qt = QtCore.Qt
Signal = QtCore.Signal
|
Fix mock Qt objects again
|
Fix mock Qt objects again
|
Python
|
bsd-3-clause
|
jwiggins/scikit-image,Hiyorimi/scikit-image,paalge/scikit-image,michaelaye/scikit-image,ajaybhat/scikit-image,ofgulban/scikit-image,oew1v07/scikit-image,newville/scikit-image,juliusbierk/scikit-image,chriscrosscutler/scikit-image,vighneshbirodkar/scikit-image,newville/scikit-image,keflavich/scikit-image,bennlich/scikit-image,bsipocz/scikit-image,ClinicalGraphics/scikit-image,rjeli/scikit-image,WarrenWeckesser/scikits-image,pratapvardhan/scikit-image,rjeli/scikit-image,blink1073/scikit-image,paalge/scikit-image,Hiyorimi/scikit-image,oew1v07/scikit-image,bennlich/scikit-image,michaelaye/scikit-image,rjeli/scikit-image,GaZ3ll3/scikit-image,keflavich/scikit-image,Britefury/scikit-image,pratapvardhan/scikit-image,Midafi/scikit-image,youprofit/scikit-image,ofgulban/scikit-image,Midafi/scikit-image,vighneshbirodkar/scikit-image,robintw/scikit-image,warmspringwinds/scikit-image,emon10005/scikit-image,michaelpacer/scikit-image,chriscrosscutler/scikit-image,emon10005/scikit-image,ClinicalGraphics/scikit-image,warmspringwinds/scikit-image,ofgulban/scikit-image,dpshelio/scikit-image,blink1073/scikit-image,michaelpacer/scikit-image,Britefury/scikit-image,WarrenWeckesser/scikits-image,paalge/scikit-image,bsipocz/scikit-image,robintw/scikit-image,GaZ3ll3/scikit-image,ajaybhat/scikit-image,juliusbierk/scikit-image,vighneshbirodkar/scikit-image,jwiggins/scikit-image,youprofit/scikit-image,dpshelio/scikit-image
|
has_qt = True
try:
from matplotlib.backends.qt_compat import QtGui, QtCore, QtWidgets
except ImportError:
try:
from matplotlib.backends.qt4_compat import QtGui, QtCore
QtWidgets = QtGui
except ImportError:
# Mock objects
class QtGui(object):
QMainWindow = object
QDialog = object
QWidget = object
class QtCore_cls(object):
class Qt(object):
TopDockWidgetArea = None
BottomDockWidgetArea = None
LeftDockWidgetArea = None
RightDockWidgetArea = None
def Signal(self, *args, **kwargs):
pass
QWidget = object
QtCore = QtWidgets = QtCore_cls()
has_qt = False
Qt = QtCore.Qt
Signal = QtCore.Signal
Fix mock Qt objects again
|
has_qt = True
try:
from matplotlib.backends.qt_compat import QtGui, QtCore, QtWidgets
except ImportError:
try:
from matplotlib.backends.qt4_compat import QtGui, QtCore
QtWidgets = QtGui
except ImportError:
# Mock objects
class QtGui_cls(object):
QMainWindow = object
QDialog = object
QWidget = object
class QtCore_cls(object):
class Qt(object):
TopDockWidgetArea = None
BottomDockWidgetArea = None
LeftDockWidgetArea = None
RightDockWidgetArea = None
def Signal(self, *args, **kwargs):
pass
QtGui = QtWidgets = QtGui_cls()
QtCore = QtCore_cls()
has_qt = False
Qt = QtCore.Qt
Signal = QtCore.Signal
|
<commit_before>has_qt = True
try:
from matplotlib.backends.qt_compat import QtGui, QtCore, QtWidgets
except ImportError:
try:
from matplotlib.backends.qt4_compat import QtGui, QtCore
QtWidgets = QtGui
except ImportError:
# Mock objects
class QtGui(object):
QMainWindow = object
QDialog = object
QWidget = object
class QtCore_cls(object):
class Qt(object):
TopDockWidgetArea = None
BottomDockWidgetArea = None
LeftDockWidgetArea = None
RightDockWidgetArea = None
def Signal(self, *args, **kwargs):
pass
QWidget = object
QtCore = QtWidgets = QtCore_cls()
has_qt = False
Qt = QtCore.Qt
Signal = QtCore.Signal
<commit_msg>Fix mock Qt objects again<commit_after>
|
has_qt = True
try:
from matplotlib.backends.qt_compat import QtGui, QtCore, QtWidgets
except ImportError:
try:
from matplotlib.backends.qt4_compat import QtGui, QtCore
QtWidgets = QtGui
except ImportError:
# Mock objects
class QtGui_cls(object):
QMainWindow = object
QDialog = object
QWidget = object
class QtCore_cls(object):
class Qt(object):
TopDockWidgetArea = None
BottomDockWidgetArea = None
LeftDockWidgetArea = None
RightDockWidgetArea = None
def Signal(self, *args, **kwargs):
pass
QtGui = QtWidgets = QtGui_cls()
QtCore = QtCore_cls()
has_qt = False
Qt = QtCore.Qt
Signal = QtCore.Signal
|
has_qt = True
try:
from matplotlib.backends.qt_compat import QtGui, QtCore, QtWidgets
except ImportError:
try:
from matplotlib.backends.qt4_compat import QtGui, QtCore
QtWidgets = QtGui
except ImportError:
# Mock objects
class QtGui(object):
QMainWindow = object
QDialog = object
QWidget = object
class QtCore_cls(object):
class Qt(object):
TopDockWidgetArea = None
BottomDockWidgetArea = None
LeftDockWidgetArea = None
RightDockWidgetArea = None
def Signal(self, *args, **kwargs):
pass
QWidget = object
QtCore = QtWidgets = QtCore_cls()
has_qt = False
Qt = QtCore.Qt
Signal = QtCore.Signal
Fix mock Qt objects againhas_qt = True
try:
from matplotlib.backends.qt_compat import QtGui, QtCore, QtWidgets
except ImportError:
try:
from matplotlib.backends.qt4_compat import QtGui, QtCore
QtWidgets = QtGui
except ImportError:
# Mock objects
class QtGui_cls(object):
QMainWindow = object
QDialog = object
QWidget = object
class QtCore_cls(object):
class Qt(object):
TopDockWidgetArea = None
BottomDockWidgetArea = None
LeftDockWidgetArea = None
RightDockWidgetArea = None
def Signal(self, *args, **kwargs):
pass
QtGui = QtWidgets = QtGui_cls()
QtCore = QtCore_cls()
has_qt = False
Qt = QtCore.Qt
Signal = QtCore.Signal
|
<commit_before>has_qt = True
try:
from matplotlib.backends.qt_compat import QtGui, QtCore, QtWidgets
except ImportError:
try:
from matplotlib.backends.qt4_compat import QtGui, QtCore
QtWidgets = QtGui
except ImportError:
# Mock objects
class QtGui(object):
QMainWindow = object
QDialog = object
QWidget = object
class QtCore_cls(object):
class Qt(object):
TopDockWidgetArea = None
BottomDockWidgetArea = None
LeftDockWidgetArea = None
RightDockWidgetArea = None
def Signal(self, *args, **kwargs):
pass
QWidget = object
QtCore = QtWidgets = QtCore_cls()
has_qt = False
Qt = QtCore.Qt
Signal = QtCore.Signal
<commit_msg>Fix mock Qt objects again<commit_after>has_qt = True
try:
from matplotlib.backends.qt_compat import QtGui, QtCore, QtWidgets
except ImportError:
try:
from matplotlib.backends.qt4_compat import QtGui, QtCore
QtWidgets = QtGui
except ImportError:
# Mock objects
class QtGui_cls(object):
QMainWindow = object
QDialog = object
QWidget = object
class QtCore_cls(object):
class Qt(object):
TopDockWidgetArea = None
BottomDockWidgetArea = None
LeftDockWidgetArea = None
RightDockWidgetArea = None
def Signal(self, *args, **kwargs):
pass
QtGui = QtWidgets = QtGui_cls()
QtCore = QtCore_cls()
has_qt = False
Qt = QtCore.Qt
Signal = QtCore.Signal
|
0f04e6ed48227c6904d75a78be9c893f47f9cb80
|
joku/cogs/_common.py
|
joku/cogs/_common.py
|
from collections import OrderedDict
import threading
from joku.bot import Jokusoramame
class _CogMeta(type):
def __prepare__(*args, **kwargs):
# Use an OrderedDict for the class body.
return OrderedDict()
class Cog(metaclass=_CogMeta):
"""
A common class for all cogs. This makes the class body ordered, and provides a `local` which stores thread-local
data. This makes the cogs semi thread-safe.
"""
def __init__(self, bot: Jokusoramame):
self._bot = bot
self.logger = self.bot.logger
@property
def bot(self) -> 'Jokusoramame':
"""
:return: The bot instance associated with this cog.
"""
return self._bot
@classmethod
def setup(cls, bot: Jokusoramame):
bot.add_cog(cls(bot))
|
from collections import OrderedDict
import threading
from joku.bot import Jokusoramame
class _CogMeta(type):
def __prepare__(*args, **kwargs):
# Use an OrderedDict for the class body.
return OrderedDict()
class Cog(metaclass=_CogMeta):
def __init__(self, bot: Jokusoramame):
self._bot = bot
self.logger = self.bot.logger
@property
def bot(self) -> 'Jokusoramame':
"""
:return: The bot instance associated with this cog.
"""
return self._bot
@classmethod
def setup(cls, bot: Jokusoramame):
bot.add_cog(cls(bot))
|
Remove false docstring from common cog.
|
Remove false docstring from common cog.
|
Python
|
mit
|
MJB47/Jokusoramame,MJB47/Jokusoramame,MJB47/Jokusoramame
|
from collections import OrderedDict
import threading
from joku.bot import Jokusoramame
class _CogMeta(type):
def __prepare__(*args, **kwargs):
# Use an OrderedDict for the class body.
return OrderedDict()
class Cog(metaclass=_CogMeta):
"""
A common class for all cogs. This makes the class body ordered, and provides a `local` which stores thread-local
data. This makes the cogs semi thread-safe.
"""
def __init__(self, bot: Jokusoramame):
self._bot = bot
self.logger = self.bot.logger
@property
def bot(self) -> 'Jokusoramame':
"""
:return: The bot instance associated with this cog.
"""
return self._bot
@classmethod
def setup(cls, bot: Jokusoramame):
bot.add_cog(cls(bot))
Remove false docstring from common cog.
|
from collections import OrderedDict
import threading
from joku.bot import Jokusoramame
class _CogMeta(type):
def __prepare__(*args, **kwargs):
# Use an OrderedDict for the class body.
return OrderedDict()
class Cog(metaclass=_CogMeta):
def __init__(self, bot: Jokusoramame):
self._bot = bot
self.logger = self.bot.logger
@property
def bot(self) -> 'Jokusoramame':
"""
:return: The bot instance associated with this cog.
"""
return self._bot
@classmethod
def setup(cls, bot: Jokusoramame):
bot.add_cog(cls(bot))
|
<commit_before>from collections import OrderedDict
import threading
from joku.bot import Jokusoramame
class _CogMeta(type):
def __prepare__(*args, **kwargs):
# Use an OrderedDict for the class body.
return OrderedDict()
class Cog(metaclass=_CogMeta):
"""
A common class for all cogs. This makes the class body ordered, and provides a `local` which stores thread-local
data. This makes the cogs semi thread-safe.
"""
def __init__(self, bot: Jokusoramame):
self._bot = bot
self.logger = self.bot.logger
@property
def bot(self) -> 'Jokusoramame':
"""
:return: The bot instance associated with this cog.
"""
return self._bot
@classmethod
def setup(cls, bot: Jokusoramame):
bot.add_cog(cls(bot))
<commit_msg>Remove false docstring from common cog.<commit_after>
|
from collections import OrderedDict
import threading
from joku.bot import Jokusoramame
class _CogMeta(type):
def __prepare__(*args, **kwargs):
# Use an OrderedDict for the class body.
return OrderedDict()
class Cog(metaclass=_CogMeta):
def __init__(self, bot: Jokusoramame):
self._bot = bot
self.logger = self.bot.logger
@property
def bot(self) -> 'Jokusoramame':
"""
:return: The bot instance associated with this cog.
"""
return self._bot
@classmethod
def setup(cls, bot: Jokusoramame):
bot.add_cog(cls(bot))
|
from collections import OrderedDict
import threading
from joku.bot import Jokusoramame
class _CogMeta(type):
def __prepare__(*args, **kwargs):
# Use an OrderedDict for the class body.
return OrderedDict()
class Cog(metaclass=_CogMeta):
"""
A common class for all cogs. This makes the class body ordered, and provides a `local` which stores thread-local
data. This makes the cogs semi thread-safe.
"""
def __init__(self, bot: Jokusoramame):
self._bot = bot
self.logger = self.bot.logger
@property
def bot(self) -> 'Jokusoramame':
"""
:return: The bot instance associated with this cog.
"""
return self._bot
@classmethod
def setup(cls, bot: Jokusoramame):
bot.add_cog(cls(bot))
Remove false docstring from common cog.from collections import OrderedDict
import threading
from joku.bot import Jokusoramame
class _CogMeta(type):
def __prepare__(*args, **kwargs):
# Use an OrderedDict for the class body.
return OrderedDict()
class Cog(metaclass=_CogMeta):
def __init__(self, bot: Jokusoramame):
self._bot = bot
self.logger = self.bot.logger
@property
def bot(self) -> 'Jokusoramame':
"""
:return: The bot instance associated with this cog.
"""
return self._bot
@classmethod
def setup(cls, bot: Jokusoramame):
bot.add_cog(cls(bot))
|
<commit_before>from collections import OrderedDict
import threading
from joku.bot import Jokusoramame
class _CogMeta(type):
def __prepare__(*args, **kwargs):
# Use an OrderedDict for the class body.
return OrderedDict()
class Cog(metaclass=_CogMeta):
"""
A common class for all cogs. This makes the class body ordered, and provides a `local` which stores thread-local
data. This makes the cogs semi thread-safe.
"""
def __init__(self, bot: Jokusoramame):
self._bot = bot
self.logger = self.bot.logger
@property
def bot(self) -> 'Jokusoramame':
"""
:return: The bot instance associated with this cog.
"""
return self._bot
@classmethod
def setup(cls, bot: Jokusoramame):
bot.add_cog(cls(bot))
<commit_msg>Remove false docstring from common cog.<commit_after>from collections import OrderedDict
import threading
from joku.bot import Jokusoramame
class _CogMeta(type):
def __prepare__(*args, **kwargs):
# Use an OrderedDict for the class body.
return OrderedDict()
class Cog(metaclass=_CogMeta):
def __init__(self, bot: Jokusoramame):
self._bot = bot
self.logger = self.bot.logger
@property
def bot(self) -> 'Jokusoramame':
"""
:return: The bot instance associated with this cog.
"""
return self._bot
@classmethod
def setup(cls, bot: Jokusoramame):
bot.add_cog(cls(bot))
|
225f4ba42e2184b09b49e7d6a933748d436f7d3e
|
eultheme/__init__.py
|
eultheme/__init__.py
|
__version_info__ = (0, 4, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join(str(i) for i in __version_info__[:-1])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
|
__version_info__ = (0, 5, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join(str(i) for i in __version_info__[:-1])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
|
Update version number for release
|
Update version number for release
|
Python
|
apache-2.0
|
emory-libraries/django-eultheme,emory-libraries/django-eultheme,emory-libraries/django-eultheme
|
__version_info__ = (0, 4, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join(str(i) for i in __version_info__[:-1])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))Update version number for release
|
__version_info__ = (0, 5, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join(str(i) for i in __version_info__[:-1])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
|
<commit_before>__version_info__ = (0, 4, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join(str(i) for i in __version_info__[:-1])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))<commit_msg>Update version number for release<commit_after>
|
__version_info__ = (0, 5, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join(str(i) for i in __version_info__[:-1])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
|
__version_info__ = (0, 4, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join(str(i) for i in __version_info__[:-1])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))Update version number for release__version_info__ = (0, 5, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join(str(i) for i in __version_info__[:-1])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
|
<commit_before>__version_info__ = (0, 4, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join(str(i) for i in __version_info__[:-1])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))<commit_msg>Update version number for release<commit_after>__version_info__ = (0, 5, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join(str(i) for i in __version_info__[:-1])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
|
87ba878afe9fac0e8ebe3b11719982148b8ac89a
|
conanfile.py
|
conanfile.py
|
from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class VeraPPTargetCmakeConan(ConanFile):
name = "verapp-target-cmake"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util",
"tooling-cmake-util/master@smspillaz/tooling-cmake-util",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/verapp-target-cmake"
license = "MIT"
def source(self):
zip_name = "verapp-target-cmake.zip"
download("https://github.com/polysquare/"
"verapp-target-cmake/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/verapp-target-cmake",
src="verapp-target-cmake-" + VERSION,
keep_path=True)
|
from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class VeraPPTargetCmakeConan(ConanFile):
name = "verapp-target-cmake"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util",
"tooling-cmake-util/master@smspillaz/tooling-cmake-util",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/verapp-target-cmake"
license = "MIT"
def source(self):
zip_name = "verapp-target-cmake.zip"
download("https://github.com/polysquare/"
"verapp-target-cmake/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="Find*.cmake",
dst="",
src="verapp-target-cmake-" + VERSION,
keep_path=True)
self.copy(pattern="*.cmake",
dst="cmake/verapp-target-cmake",
src="verapp-target-cmake-" + VERSION,
keep_path=True)
|
Copy find modules to root of module path
|
conan: Copy find modules to root of module path
|
Python
|
mit
|
polysquare/veracpp-cmake,polysquare/verapp-cmake
|
from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class VeraPPTargetCmakeConan(ConanFile):
name = "verapp-target-cmake"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util",
"tooling-cmake-util/master@smspillaz/tooling-cmake-util",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/verapp-target-cmake"
license = "MIT"
def source(self):
zip_name = "verapp-target-cmake.zip"
download("https://github.com/polysquare/"
"verapp-target-cmake/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/verapp-target-cmake",
src="verapp-target-cmake-" + VERSION,
keep_path=True)
conan: Copy find modules to root of module path
|
from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class VeraPPTargetCmakeConan(ConanFile):
name = "verapp-target-cmake"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util",
"tooling-cmake-util/master@smspillaz/tooling-cmake-util",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/verapp-target-cmake"
license = "MIT"
def source(self):
zip_name = "verapp-target-cmake.zip"
download("https://github.com/polysquare/"
"verapp-target-cmake/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="Find*.cmake",
dst="",
src="verapp-target-cmake-" + VERSION,
keep_path=True)
self.copy(pattern="*.cmake",
dst="cmake/verapp-target-cmake",
src="verapp-target-cmake-" + VERSION,
keep_path=True)
|
<commit_before>from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class VeraPPTargetCmakeConan(ConanFile):
name = "verapp-target-cmake"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util",
"tooling-cmake-util/master@smspillaz/tooling-cmake-util",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/verapp-target-cmake"
license = "MIT"
def source(self):
zip_name = "verapp-target-cmake.zip"
download("https://github.com/polysquare/"
"verapp-target-cmake/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/verapp-target-cmake",
src="verapp-target-cmake-" + VERSION,
keep_path=True)
<commit_msg>conan: Copy find modules to root of module path<commit_after>
|
from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class VeraPPTargetCmakeConan(ConanFile):
name = "verapp-target-cmake"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util",
"tooling-cmake-util/master@smspillaz/tooling-cmake-util",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/verapp-target-cmake"
license = "MIT"
def source(self):
zip_name = "verapp-target-cmake.zip"
download("https://github.com/polysquare/"
"verapp-target-cmake/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="Find*.cmake",
dst="",
src="verapp-target-cmake-" + VERSION,
keep_path=True)
self.copy(pattern="*.cmake",
dst="cmake/verapp-target-cmake",
src="verapp-target-cmake-" + VERSION,
keep_path=True)
|
from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class VeraPPTargetCmakeConan(ConanFile):
name = "verapp-target-cmake"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util",
"tooling-cmake-util/master@smspillaz/tooling-cmake-util",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/verapp-target-cmake"
license = "MIT"
def source(self):
zip_name = "verapp-target-cmake.zip"
download("https://github.com/polysquare/"
"verapp-target-cmake/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/verapp-target-cmake",
src="verapp-target-cmake-" + VERSION,
keep_path=True)
conan: Copy find modules to root of module pathfrom conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class VeraPPTargetCmakeConan(ConanFile):
name = "verapp-target-cmake"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util",
"tooling-cmake-util/master@smspillaz/tooling-cmake-util",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/verapp-target-cmake"
license = "MIT"
def source(self):
zip_name = "verapp-target-cmake.zip"
download("https://github.com/polysquare/"
"verapp-target-cmake/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="Find*.cmake",
dst="",
src="verapp-target-cmake-" + VERSION,
keep_path=True)
self.copy(pattern="*.cmake",
dst="cmake/verapp-target-cmake",
src="verapp-target-cmake-" + VERSION,
keep_path=True)
|
<commit_before>from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class VeraPPTargetCmakeConan(ConanFile):
name = "verapp-target-cmake"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util",
"tooling-cmake-util/master@smspillaz/tooling-cmake-util",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/verapp-target-cmake"
license = "MIT"
def source(self):
zip_name = "verapp-target-cmake.zip"
download("https://github.com/polysquare/"
"verapp-target-cmake/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="*.cmake",
dst="cmake/verapp-target-cmake",
src="verapp-target-cmake-" + VERSION,
keep_path=True)
<commit_msg>conan: Copy find modules to root of module path<commit_after>from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.2"
class VeraPPTargetCmakeConan(ConanFile):
name = "verapp-target-cmake"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard",
"tooling-find-pkg-util/master@smspillaz/tooling-find-pkg-util",
"tooling-cmake-util/master@smspillaz/tooling-cmake-util",
"cmake-unit/master@smspillaz/cmake-unit")
url = "http://github.com/polysquare/verapp-target-cmake"
license = "MIT"
def source(self):
zip_name = "verapp-target-cmake.zip"
download("https://github.com/polysquare/"
"verapp-target-cmake/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="Find*.cmake",
dst="",
src="verapp-target-cmake-" + VERSION,
keep_path=True)
self.copy(pattern="*.cmake",
dst="cmake/verapp-target-cmake",
src="verapp-target-cmake-" + VERSION,
keep_path=True)
|
0d8ce87cda68a0e882cf1108066e2bde6c9cb1fa
|
shopping_app/forms.py
|
shopping_app/forms.py
|
from wtforms import Form, BooleanField, StringField, PasswordField, validators
from wtforms.validators import DataRequired, InputRequired
class LoginForm(Form):
username = StringField('username', validators=[InputRequired(), DataRequired()])
password = PasswordField('password', validators=[InputRequired(), DataRequired()])
class RegistrationForm(Form):
username = StringField('Username', [validators.Length(min=4, max=25)])
email = StringField('Email Address', [validators.Length(min=6, max=35)])
password = PasswordField('New Password', [
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords must match')
])
confirm = PasswordField('Repeat Password')
|
from wtforms import Form, DecimalField, IntegerField, StringField, PasswordField, validators, ValidationError
from wtforms.validators import DataRequired, InputRequired
from .utils.helpers import check_duplicate_item_name
class LoginForm(Form):
username = StringField('username', validators=[InputRequired(), DataRequired()])
password = PasswordField('password', validators=[InputRequired(), DataRequired()])
class CreateShoppingItemForm(Form):
item_name = StringField('item-name', validators=[InputRequired()])
quantity = IntegerField('quantity', validators=[InputRequired()])
price = DecimalField('price', validators=[InputRequired()])
class RegistrationForm(Form):
username = StringField('Username', [validators.Length(min=4, max=25)])
email = StringField('Email Address', [validators.Length(min=6, max=35)])
password = PasswordField('New Password', [
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords must match')
])
confirm = PasswordField('Repeat Password')
|
Add create shopping item form
|
Add create shopping item form
|
Python
|
mit
|
gr1d99/shopping-list,gr1d99/shopping-list,gr1d99/shopping-list
|
from wtforms import Form, BooleanField, StringField, PasswordField, validators
from wtforms.validators import DataRequired, InputRequired
class LoginForm(Form):
username = StringField('username', validators=[InputRequired(), DataRequired()])
password = PasswordField('password', validators=[InputRequired(), DataRequired()])
class RegistrationForm(Form):
username = StringField('Username', [validators.Length(min=4, max=25)])
email = StringField('Email Address', [validators.Length(min=6, max=35)])
password = PasswordField('New Password', [
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords must match')
])
confirm = PasswordField('Repeat Password')
Add create shopping item form
|
from wtforms import Form, DecimalField, IntegerField, StringField, PasswordField, validators, ValidationError
from wtforms.validators import DataRequired, InputRequired
from .utils.helpers import check_duplicate_item_name
class LoginForm(Form):
username = StringField('username', validators=[InputRequired(), DataRequired()])
password = PasswordField('password', validators=[InputRequired(), DataRequired()])
class CreateShoppingItemForm(Form):
item_name = StringField('item-name', validators=[InputRequired()])
quantity = IntegerField('quantity', validators=[InputRequired()])
price = DecimalField('price', validators=[InputRequired()])
class RegistrationForm(Form):
username = StringField('Username', [validators.Length(min=4, max=25)])
email = StringField('Email Address', [validators.Length(min=6, max=35)])
password = PasswordField('New Password', [
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords must match')
])
confirm = PasswordField('Repeat Password')
|
<commit_before>from wtforms import Form, BooleanField, StringField, PasswordField, validators
from wtforms.validators import DataRequired, InputRequired
class LoginForm(Form):
username = StringField('username', validators=[InputRequired(), DataRequired()])
password = PasswordField('password', validators=[InputRequired(), DataRequired()])
class RegistrationForm(Form):
username = StringField('Username', [validators.Length(min=4, max=25)])
email = StringField('Email Address', [validators.Length(min=6, max=35)])
password = PasswordField('New Password', [
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords must match')
])
confirm = PasswordField('Repeat Password')
<commit_msg>Add create shopping item form<commit_after>
|
from wtforms import Form, DecimalField, IntegerField, StringField, PasswordField, validators, ValidationError
from wtforms.validators import DataRequired, InputRequired
from .utils.helpers import check_duplicate_item_name
class LoginForm(Form):
username = StringField('username', validators=[InputRequired(), DataRequired()])
password = PasswordField('password', validators=[InputRequired(), DataRequired()])
class CreateShoppingItemForm(Form):
item_name = StringField('item-name', validators=[InputRequired()])
quantity = IntegerField('quantity', validators=[InputRequired()])
price = DecimalField('price', validators=[InputRequired()])
class RegistrationForm(Form):
username = StringField('Username', [validators.Length(min=4, max=25)])
email = StringField('Email Address', [validators.Length(min=6, max=35)])
password = PasswordField('New Password', [
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords must match')
])
confirm = PasswordField('Repeat Password')
|
from wtforms import Form, BooleanField, StringField, PasswordField, validators
from wtforms.validators import DataRequired, InputRequired
class LoginForm(Form):
username = StringField('username', validators=[InputRequired(), DataRequired()])
password = PasswordField('password', validators=[InputRequired(), DataRequired()])
class RegistrationForm(Form):
username = StringField('Username', [validators.Length(min=4, max=25)])
email = StringField('Email Address', [validators.Length(min=6, max=35)])
password = PasswordField('New Password', [
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords must match')
])
confirm = PasswordField('Repeat Password')
Add create shopping item formfrom wtforms import Form, DecimalField, IntegerField, StringField, PasswordField, validators, ValidationError
from wtforms.validators import DataRequired, InputRequired
from .utils.helpers import check_duplicate_item_name
class LoginForm(Form):
username = StringField('username', validators=[InputRequired(), DataRequired()])
password = PasswordField('password', validators=[InputRequired(), DataRequired()])
class CreateShoppingItemForm(Form):
item_name = StringField('item-name', validators=[InputRequired()])
quantity = IntegerField('quantity', validators=[InputRequired()])
price = DecimalField('price', validators=[InputRequired()])
class RegistrationForm(Form):
username = StringField('Username', [validators.Length(min=4, max=25)])
email = StringField('Email Address', [validators.Length(min=6, max=35)])
password = PasswordField('New Password', [
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords must match')
])
confirm = PasswordField('Repeat Password')
|
<commit_before>from wtforms import Form, BooleanField, StringField, PasswordField, validators
from wtforms.validators import DataRequired, InputRequired
class LoginForm(Form):
username = StringField('username', validators=[InputRequired(), DataRequired()])
password = PasswordField('password', validators=[InputRequired(), DataRequired()])
class RegistrationForm(Form):
username = StringField('Username', [validators.Length(min=4, max=25)])
email = StringField('Email Address', [validators.Length(min=6, max=35)])
password = PasswordField('New Password', [
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords must match')
])
confirm = PasswordField('Repeat Password')
<commit_msg>Add create shopping item form<commit_after>from wtforms import Form, DecimalField, IntegerField, StringField, PasswordField, validators, ValidationError
from wtforms.validators import DataRequired, InputRequired
from .utils.helpers import check_duplicate_item_name
class LoginForm(Form):
username = StringField('username', validators=[InputRequired(), DataRequired()])
password = PasswordField('password', validators=[InputRequired(), DataRequired()])
class CreateShoppingItemForm(Form):
item_name = StringField('item-name', validators=[InputRequired()])
quantity = IntegerField('quantity', validators=[InputRequired()])
price = DecimalField('price', validators=[InputRequired()])
class RegistrationForm(Form):
username = StringField('Username', [validators.Length(min=4, max=25)])
email = StringField('Email Address', [validators.Length(min=6, max=35)])
password = PasswordField('New Password', [
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords must match')
])
confirm = PasswordField('Repeat Password')
|
5f8069d61eff6241dc036409a5e6862fd8baac27
|
examples/graphviz.py
|
examples/graphviz.py
|
#!/usr/bin/env python
"""
Pandoc filter to process code blocks with class "graphviz" into
graphviz-generated images.
"""
import pygraphviz
import hashlib
import os
import sys
from pandocfilters import toJSONFilter, Str, Para, Image
def sha1(x):
return hashlib.sha1(x).hexdigest()
imagedir = "graphviz-images"
def graphviz(key, value, format, meta):
if key == 'CodeBlock':
[[ident,classes,keyvals], code] = value
caption = "caption"
if "graphviz" in classes:
G = pygraphviz.AGraph(string = code)
G.layout()
filename = sha1(code)
if format == "html":
filetype = "png"
elif format == "latex":
filetype = "pdf"
else:
filetype = "png"
alt = Str(caption)
src = imagedir + '/' + filename + '.' + filetype
if not os.path.isfile(src):
try:
os.mkdir(imagedir)
sys.stderr.write('Created directory ' + imagedir + '\n')
except OSError:
pass
G.draw(src)
sys.stderr.write('Created image ' + src + '\n')
tit = ""
return Para([Image(alt, [src,tit])])
if __name__ == "__main__":
toJSONFilter(graphviz)
|
#!/usr/bin/env python
"""
Pandoc filter to process code blocks with class "graphviz" into
graphviz-generated images.
"""
import pygraphviz
import hashlib
import os
import sys
from pandocfilters import toJSONFilter, Str, Para, Image
def sha1(x):
return hashlib.sha1(x).hexdigest()
imagedir = "graphviz-images"
def graphviz(key, value, format, meta):
if key == 'CodeBlock':
[[ident,classes,keyvals], code] = value
caption = "caption"
if "graphviz" in classes:
G = pygraphviz.AGraph(string = code)
G.layout()
filename = sha1(code)
if format == "html":
filetype = "png"
elif format == "latex":
filetype = "pdf"
else:
filetype = "png"
alt = Str(caption)
src = imagedir + '/' + filename + '.' + filetype
if not os.path.isfile(src):
try:
os.mkdir(imagedir)
sys.stderr.write('Created directory ' + imagedir + '\n')
except OSError:
pass
G.draw(src)
sys.stderr.write('Created image ' + src + '\n')
tit = ""
return Para([Image([alt], [src,tit])])
if __name__ == "__main__":
toJSONFilter(graphviz)
|
Fix creation of json for image (pandoc 1.12.3.3)
|
Fix creation of json for image (pandoc 1.12.3.3)
At least with pandoc 1.12.3.3 otherwise you get an error
pandoc: when expecting a [a], encountered Object instead
|
Python
|
bsd-3-clause
|
AugustH/pandocfilters,alycosta/pandocfilters,silvio/pandocfilters,jgm/pandocfilters,infotroph/pandocfilters,timtylin/scholdoc-filters
|
#!/usr/bin/env python
"""
Pandoc filter to process code blocks with class "graphviz" into
graphviz-generated images.
"""
import pygraphviz
import hashlib
import os
import sys
from pandocfilters import toJSONFilter, Str, Para, Image
def sha1(x):
return hashlib.sha1(x).hexdigest()
imagedir = "graphviz-images"
def graphviz(key, value, format, meta):
if key == 'CodeBlock':
[[ident,classes,keyvals], code] = value
caption = "caption"
if "graphviz" in classes:
G = pygraphviz.AGraph(string = code)
G.layout()
filename = sha1(code)
if format == "html":
filetype = "png"
elif format == "latex":
filetype = "pdf"
else:
filetype = "png"
alt = Str(caption)
src = imagedir + '/' + filename + '.' + filetype
if not os.path.isfile(src):
try:
os.mkdir(imagedir)
sys.stderr.write('Created directory ' + imagedir + '\n')
except OSError:
pass
G.draw(src)
sys.stderr.write('Created image ' + src + '\n')
tit = ""
return Para([Image(alt, [src,tit])])
if __name__ == "__main__":
toJSONFilter(graphviz)
Fix creation of json for image (pandoc 1.12.3.3)
At least with pandoc 1.12.3.3 otherwise you get an error
pandoc: when expecting a [a], encountered Object instead
|
#!/usr/bin/env python
"""
Pandoc filter to process code blocks with class "graphviz" into
graphviz-generated images.
"""
import pygraphviz
import hashlib
import os
import sys
from pandocfilters import toJSONFilter, Str, Para, Image
def sha1(x):
return hashlib.sha1(x).hexdigest()
imagedir = "graphviz-images"
def graphviz(key, value, format, meta):
if key == 'CodeBlock':
[[ident,classes,keyvals], code] = value
caption = "caption"
if "graphviz" in classes:
G = pygraphviz.AGraph(string = code)
G.layout()
filename = sha1(code)
if format == "html":
filetype = "png"
elif format == "latex":
filetype = "pdf"
else:
filetype = "png"
alt = Str(caption)
src = imagedir + '/' + filename + '.' + filetype
if not os.path.isfile(src):
try:
os.mkdir(imagedir)
sys.stderr.write('Created directory ' + imagedir + '\n')
except OSError:
pass
G.draw(src)
sys.stderr.write('Created image ' + src + '\n')
tit = ""
return Para([Image([alt], [src,tit])])
if __name__ == "__main__":
toJSONFilter(graphviz)
|
<commit_before>#!/usr/bin/env python
"""
Pandoc filter to process code blocks with class "graphviz" into
graphviz-generated images.
"""
import pygraphviz
import hashlib
import os
import sys
from pandocfilters import toJSONFilter, Str, Para, Image
def sha1(x):
return hashlib.sha1(x).hexdigest()
imagedir = "graphviz-images"
def graphviz(key, value, format, meta):
if key == 'CodeBlock':
[[ident,classes,keyvals], code] = value
caption = "caption"
if "graphviz" in classes:
G = pygraphviz.AGraph(string = code)
G.layout()
filename = sha1(code)
if format == "html":
filetype = "png"
elif format == "latex":
filetype = "pdf"
else:
filetype = "png"
alt = Str(caption)
src = imagedir + '/' + filename + '.' + filetype
if not os.path.isfile(src):
try:
os.mkdir(imagedir)
sys.stderr.write('Created directory ' + imagedir + '\n')
except OSError:
pass
G.draw(src)
sys.stderr.write('Created image ' + src + '\n')
tit = ""
return Para([Image(alt, [src,tit])])
if __name__ == "__main__":
toJSONFilter(graphviz)
<commit_msg>Fix creation of json for image (pandoc 1.12.3.3)
At least with pandoc 1.12.3.3 otherwise you get an error
pandoc: when expecting a [a], encountered Object instead<commit_after>
|
#!/usr/bin/env python
"""
Pandoc filter to process code blocks with class "graphviz" into
graphviz-generated images.
"""
import pygraphviz
import hashlib
import os
import sys
from pandocfilters import toJSONFilter, Str, Para, Image
def sha1(x):
return hashlib.sha1(x).hexdigest()
imagedir = "graphviz-images"
def graphviz(key, value, format, meta):
if key == 'CodeBlock':
[[ident,classes,keyvals], code] = value
caption = "caption"
if "graphviz" in classes:
G = pygraphviz.AGraph(string = code)
G.layout()
filename = sha1(code)
if format == "html":
filetype = "png"
elif format == "latex":
filetype = "pdf"
else:
filetype = "png"
alt = Str(caption)
src = imagedir + '/' + filename + '.' + filetype
if not os.path.isfile(src):
try:
os.mkdir(imagedir)
sys.stderr.write('Created directory ' + imagedir + '\n')
except OSError:
pass
G.draw(src)
sys.stderr.write('Created image ' + src + '\n')
tit = ""
return Para([Image([alt], [src,tit])])
if __name__ == "__main__":
toJSONFilter(graphviz)
|
#!/usr/bin/env python
"""
Pandoc filter to process code blocks with class "graphviz" into
graphviz-generated images.
"""
import pygraphviz
import hashlib
import os
import sys
from pandocfilters import toJSONFilter, Str, Para, Image
def sha1(x):
return hashlib.sha1(x).hexdigest()
imagedir = "graphviz-images"
def graphviz(key, value, format, meta):
if key == 'CodeBlock':
[[ident,classes,keyvals], code] = value
caption = "caption"
if "graphviz" in classes:
G = pygraphviz.AGraph(string = code)
G.layout()
filename = sha1(code)
if format == "html":
filetype = "png"
elif format == "latex":
filetype = "pdf"
else:
filetype = "png"
alt = Str(caption)
src = imagedir + '/' + filename + '.' + filetype
if not os.path.isfile(src):
try:
os.mkdir(imagedir)
sys.stderr.write('Created directory ' + imagedir + '\n')
except OSError:
pass
G.draw(src)
sys.stderr.write('Created image ' + src + '\n')
tit = ""
return Para([Image(alt, [src,tit])])
if __name__ == "__main__":
toJSONFilter(graphviz)
Fix creation of json for image (pandoc 1.12.3.3)
At least with pandoc 1.12.3.3 otherwise you get an error
pandoc: when expecting a [a], encountered Object instead#!/usr/bin/env python
"""
Pandoc filter to process code blocks with class "graphviz" into
graphviz-generated images.
"""
import pygraphviz
import hashlib
import os
import sys
from pandocfilters import toJSONFilter, Str, Para, Image
def sha1(x):
return hashlib.sha1(x).hexdigest()
imagedir = "graphviz-images"
def graphviz(key, value, format, meta):
if key == 'CodeBlock':
[[ident,classes,keyvals], code] = value
caption = "caption"
if "graphviz" in classes:
G = pygraphviz.AGraph(string = code)
G.layout()
filename = sha1(code)
if format == "html":
filetype = "png"
elif format == "latex":
filetype = "pdf"
else:
filetype = "png"
alt = Str(caption)
src = imagedir + '/' + filename + '.' + filetype
if not os.path.isfile(src):
try:
os.mkdir(imagedir)
sys.stderr.write('Created directory ' + imagedir + '\n')
except OSError:
pass
G.draw(src)
sys.stderr.write('Created image ' + src + '\n')
tit = ""
return Para([Image([alt], [src,tit])])
if __name__ == "__main__":
toJSONFilter(graphviz)
|
<commit_before>#!/usr/bin/env python
"""
Pandoc filter to process code blocks with class "graphviz" into
graphviz-generated images.
"""
import pygraphviz
import hashlib
import os
import sys
from pandocfilters import toJSONFilter, Str, Para, Image
def sha1(x):
return hashlib.sha1(x).hexdigest()
imagedir = "graphviz-images"
def graphviz(key, value, format, meta):
if key == 'CodeBlock':
[[ident,classes,keyvals], code] = value
caption = "caption"
if "graphviz" in classes:
G = pygraphviz.AGraph(string = code)
G.layout()
filename = sha1(code)
if format == "html":
filetype = "png"
elif format == "latex":
filetype = "pdf"
else:
filetype = "png"
alt = Str(caption)
src = imagedir + '/' + filename + '.' + filetype
if not os.path.isfile(src):
try:
os.mkdir(imagedir)
sys.stderr.write('Created directory ' + imagedir + '\n')
except OSError:
pass
G.draw(src)
sys.stderr.write('Created image ' + src + '\n')
tit = ""
return Para([Image(alt, [src,tit])])
if __name__ == "__main__":
toJSONFilter(graphviz)
<commit_msg>Fix creation of json for image (pandoc 1.12.3.3)
At least with pandoc 1.12.3.3 otherwise you get an error
pandoc: when expecting a [a], encountered Object instead<commit_after>#!/usr/bin/env python
"""
Pandoc filter to process code blocks with class "graphviz" into
graphviz-generated images.
"""
import pygraphviz
import hashlib
import os
import sys
from pandocfilters import toJSONFilter, Str, Para, Image
def sha1(x):
return hashlib.sha1(x).hexdigest()
imagedir = "graphviz-images"
def graphviz(key, value, format, meta):
if key == 'CodeBlock':
[[ident,classes,keyvals], code] = value
caption = "caption"
if "graphviz" in classes:
G = pygraphviz.AGraph(string = code)
G.layout()
filename = sha1(code)
if format == "html":
filetype = "png"
elif format == "latex":
filetype = "pdf"
else:
filetype = "png"
alt = Str(caption)
src = imagedir + '/' + filename + '.' + filetype
if not os.path.isfile(src):
try:
os.mkdir(imagedir)
sys.stderr.write('Created directory ' + imagedir + '\n')
except OSError:
pass
G.draw(src)
sys.stderr.write('Created image ' + src + '\n')
tit = ""
return Para([Image([alt], [src,tit])])
if __name__ == "__main__":
toJSONFilter(graphviz)
|
83c92b4e5d28ae94c57c69aa19448f6763c48bc9
|
f8a_jobs/defaults.py
|
f8a_jobs/defaults.py
|
#!/usr/bin/env python3
import os
from datetime import timedelta
_BAYESIAN_JOBS_DIR = os.path.dirname(os.path.realpath(__file__))
DEFAULT_SERVICE_PORT = 34000
SWAGGER_YAML_PATH = os.path.join(_BAYESIAN_JOBS_DIR, 'swagger.yaml')
DEFAULT_JOB_DIR = os.path.join(_BAYESIAN_JOBS_DIR, 'default_jobs')
TOKEN_VALID_TIME = timedelta(days=14)
AUTH_ORGANIZATION = 'fabric8-analytics'
DISABLE_AUTHENTICATION = bool(os.environ.get('DISABLE_AUTHENTICATION', False))
GITHUB_CONSUMER_KEY = os.environ.get('GITHUB_CONSUMENR_KEY', '96d6ad4971dfec52cd7c')
GITHUB_CONSUMER_SECRET = os.environ.get('GITHUB_CONSUMER_SECRET', '97a65e9066a9e4468a9a024a25073ea6e10e8ab6')
GITHUB_ACCESS_TOKEN = os.environ.get('GITHUB_ACCESS_TOKEN', '2ba44d20f2da859184b8ab11460952d49cbde32a')
APP_SECRET_KEY = os.environ.get('APP_SECRET_KEY', 'euYu3Ma6AhV7ieshOen4neigluL9aith')
|
#!/usr/bin/env python3
import os
from datetime import timedelta
_BAYESIAN_JOBS_DIR = os.path.dirname(os.path.realpath(__file__))
DEFAULT_SERVICE_PORT = 34000
SWAGGER_YAML_PATH = os.path.join(_BAYESIAN_JOBS_DIR, 'swagger.yaml')
DEFAULT_JOB_DIR = os.path.join(_BAYESIAN_JOBS_DIR, 'default_jobs')
TOKEN_VALID_TIME = timedelta(days=14)
AUTH_ORGANIZATION = 'fabric8-analytics'
DISABLE_AUTHENTICATION = bool(os.environ.get('DISABLE_AUTHENTICATION', False))
GITHUB_CONSUMER_KEY = os.environ.get('GITHUB_CONSUMER_KEY', '96d6ad4971dfec52cd7c')
GITHUB_CONSUMER_SECRET = os.environ.get('GITHUB_CONSUMER_SECRET', '97a65e9066a9e4468a9a024a25073ea6e10e8ab6')
GITHUB_ACCESS_TOKEN = os.environ.get('GITHUB_ACCESS_TOKEN', '2ba44d20f2da859184b8ab11460952d49cbde32a')
APP_SECRET_KEY = os.environ.get('APP_SECRET_KEY', 'euYu3Ma6AhV7ieshOen4neigluL9aith')
|
Fix typo in name of variable
|
Fix typo in name of variable
|
Python
|
apache-2.0
|
fabric8-analytics/fabric8-analytics-jobs,fabric8-analytics/fabric8-analytics-jobs
|
#!/usr/bin/env python3
import os
from datetime import timedelta
_BAYESIAN_JOBS_DIR = os.path.dirname(os.path.realpath(__file__))
DEFAULT_SERVICE_PORT = 34000
SWAGGER_YAML_PATH = os.path.join(_BAYESIAN_JOBS_DIR, 'swagger.yaml')
DEFAULT_JOB_DIR = os.path.join(_BAYESIAN_JOBS_DIR, 'default_jobs')
TOKEN_VALID_TIME = timedelta(days=14)
AUTH_ORGANIZATION = 'fabric8-analytics'
DISABLE_AUTHENTICATION = bool(os.environ.get('DISABLE_AUTHENTICATION', False))
GITHUB_CONSUMER_KEY = os.environ.get('GITHUB_CONSUMENR_KEY', '96d6ad4971dfec52cd7c')
GITHUB_CONSUMER_SECRET = os.environ.get('GITHUB_CONSUMER_SECRET', '97a65e9066a9e4468a9a024a25073ea6e10e8ab6')
GITHUB_ACCESS_TOKEN = os.environ.get('GITHUB_ACCESS_TOKEN', '2ba44d20f2da859184b8ab11460952d49cbde32a')
APP_SECRET_KEY = os.environ.get('APP_SECRET_KEY', 'euYu3Ma6AhV7ieshOen4neigluL9aith')
Fix typo in name of variable
|
#!/usr/bin/env python3
import os
from datetime import timedelta
_BAYESIAN_JOBS_DIR = os.path.dirname(os.path.realpath(__file__))
DEFAULT_SERVICE_PORT = 34000
SWAGGER_YAML_PATH = os.path.join(_BAYESIAN_JOBS_DIR, 'swagger.yaml')
DEFAULT_JOB_DIR = os.path.join(_BAYESIAN_JOBS_DIR, 'default_jobs')
TOKEN_VALID_TIME = timedelta(days=14)
AUTH_ORGANIZATION = 'fabric8-analytics'
DISABLE_AUTHENTICATION = bool(os.environ.get('DISABLE_AUTHENTICATION', False))
GITHUB_CONSUMER_KEY = os.environ.get('GITHUB_CONSUMER_KEY', '96d6ad4971dfec52cd7c')
GITHUB_CONSUMER_SECRET = os.environ.get('GITHUB_CONSUMER_SECRET', '97a65e9066a9e4468a9a024a25073ea6e10e8ab6')
GITHUB_ACCESS_TOKEN = os.environ.get('GITHUB_ACCESS_TOKEN', '2ba44d20f2da859184b8ab11460952d49cbde32a')
APP_SECRET_KEY = os.environ.get('APP_SECRET_KEY', 'euYu3Ma6AhV7ieshOen4neigluL9aith')
|
<commit_before>#!/usr/bin/env python3
import os
from datetime import timedelta
_BAYESIAN_JOBS_DIR = os.path.dirname(os.path.realpath(__file__))
DEFAULT_SERVICE_PORT = 34000
SWAGGER_YAML_PATH = os.path.join(_BAYESIAN_JOBS_DIR, 'swagger.yaml')
DEFAULT_JOB_DIR = os.path.join(_BAYESIAN_JOBS_DIR, 'default_jobs')
TOKEN_VALID_TIME = timedelta(days=14)
AUTH_ORGANIZATION = 'fabric8-analytics'
DISABLE_AUTHENTICATION = bool(os.environ.get('DISABLE_AUTHENTICATION', False))
GITHUB_CONSUMER_KEY = os.environ.get('GITHUB_CONSUMENR_KEY', '96d6ad4971dfec52cd7c')
GITHUB_CONSUMER_SECRET = os.environ.get('GITHUB_CONSUMER_SECRET', '97a65e9066a9e4468a9a024a25073ea6e10e8ab6')
GITHUB_ACCESS_TOKEN = os.environ.get('GITHUB_ACCESS_TOKEN', '2ba44d20f2da859184b8ab11460952d49cbde32a')
APP_SECRET_KEY = os.environ.get('APP_SECRET_KEY', 'euYu3Ma6AhV7ieshOen4neigluL9aith')
<commit_msg>Fix typo in name of variable<commit_after>
|
#!/usr/bin/env python3
import os
from datetime import timedelta
_BAYESIAN_JOBS_DIR = os.path.dirname(os.path.realpath(__file__))
DEFAULT_SERVICE_PORT = 34000
SWAGGER_YAML_PATH = os.path.join(_BAYESIAN_JOBS_DIR, 'swagger.yaml')
DEFAULT_JOB_DIR = os.path.join(_BAYESIAN_JOBS_DIR, 'default_jobs')
TOKEN_VALID_TIME = timedelta(days=14)
AUTH_ORGANIZATION = 'fabric8-analytics'
DISABLE_AUTHENTICATION = bool(os.environ.get('DISABLE_AUTHENTICATION', False))
GITHUB_CONSUMER_KEY = os.environ.get('GITHUB_CONSUMER_KEY', '96d6ad4971dfec52cd7c')
GITHUB_CONSUMER_SECRET = os.environ.get('GITHUB_CONSUMER_SECRET', '97a65e9066a9e4468a9a024a25073ea6e10e8ab6')
GITHUB_ACCESS_TOKEN = os.environ.get('GITHUB_ACCESS_TOKEN', '2ba44d20f2da859184b8ab11460952d49cbde32a')
APP_SECRET_KEY = os.environ.get('APP_SECRET_KEY', 'euYu3Ma6AhV7ieshOen4neigluL9aith')
|
#!/usr/bin/env python3
import os
from datetime import timedelta
_BAYESIAN_JOBS_DIR = os.path.dirname(os.path.realpath(__file__))
DEFAULT_SERVICE_PORT = 34000
SWAGGER_YAML_PATH = os.path.join(_BAYESIAN_JOBS_DIR, 'swagger.yaml')
DEFAULT_JOB_DIR = os.path.join(_BAYESIAN_JOBS_DIR, 'default_jobs')
TOKEN_VALID_TIME = timedelta(days=14)
AUTH_ORGANIZATION = 'fabric8-analytics'
DISABLE_AUTHENTICATION = bool(os.environ.get('DISABLE_AUTHENTICATION', False))
GITHUB_CONSUMER_KEY = os.environ.get('GITHUB_CONSUMENR_KEY', '96d6ad4971dfec52cd7c')
GITHUB_CONSUMER_SECRET = os.environ.get('GITHUB_CONSUMER_SECRET', '97a65e9066a9e4468a9a024a25073ea6e10e8ab6')
GITHUB_ACCESS_TOKEN = os.environ.get('GITHUB_ACCESS_TOKEN', '2ba44d20f2da859184b8ab11460952d49cbde32a')
APP_SECRET_KEY = os.environ.get('APP_SECRET_KEY', 'euYu3Ma6AhV7ieshOen4neigluL9aith')
Fix typo in name of variable#!/usr/bin/env python3
import os
from datetime import timedelta
_BAYESIAN_JOBS_DIR = os.path.dirname(os.path.realpath(__file__))
DEFAULT_SERVICE_PORT = 34000
SWAGGER_YAML_PATH = os.path.join(_BAYESIAN_JOBS_DIR, 'swagger.yaml')
DEFAULT_JOB_DIR = os.path.join(_BAYESIAN_JOBS_DIR, 'default_jobs')
TOKEN_VALID_TIME = timedelta(days=14)
AUTH_ORGANIZATION = 'fabric8-analytics'
DISABLE_AUTHENTICATION = bool(os.environ.get('DISABLE_AUTHENTICATION', False))
GITHUB_CONSUMER_KEY = os.environ.get('GITHUB_CONSUMER_KEY', '96d6ad4971dfec52cd7c')
GITHUB_CONSUMER_SECRET = os.environ.get('GITHUB_CONSUMER_SECRET', '97a65e9066a9e4468a9a024a25073ea6e10e8ab6')
GITHUB_ACCESS_TOKEN = os.environ.get('GITHUB_ACCESS_TOKEN', '2ba44d20f2da859184b8ab11460952d49cbde32a')
APP_SECRET_KEY = os.environ.get('APP_SECRET_KEY', 'euYu3Ma6AhV7ieshOen4neigluL9aith')
|
<commit_before>#!/usr/bin/env python3
import os
from datetime import timedelta
_BAYESIAN_JOBS_DIR = os.path.dirname(os.path.realpath(__file__))
DEFAULT_SERVICE_PORT = 34000
SWAGGER_YAML_PATH = os.path.join(_BAYESIAN_JOBS_DIR, 'swagger.yaml')
DEFAULT_JOB_DIR = os.path.join(_BAYESIAN_JOBS_DIR, 'default_jobs')
TOKEN_VALID_TIME = timedelta(days=14)
AUTH_ORGANIZATION = 'fabric8-analytics'
DISABLE_AUTHENTICATION = bool(os.environ.get('DISABLE_AUTHENTICATION', False))
GITHUB_CONSUMER_KEY = os.environ.get('GITHUB_CONSUMENR_KEY', '96d6ad4971dfec52cd7c')
GITHUB_CONSUMER_SECRET = os.environ.get('GITHUB_CONSUMER_SECRET', '97a65e9066a9e4468a9a024a25073ea6e10e8ab6')
GITHUB_ACCESS_TOKEN = os.environ.get('GITHUB_ACCESS_TOKEN', '2ba44d20f2da859184b8ab11460952d49cbde32a')
APP_SECRET_KEY = os.environ.get('APP_SECRET_KEY', 'euYu3Ma6AhV7ieshOen4neigluL9aith')
<commit_msg>Fix typo in name of variable<commit_after>#!/usr/bin/env python3
import os
from datetime import timedelta
_BAYESIAN_JOBS_DIR = os.path.dirname(os.path.realpath(__file__))
DEFAULT_SERVICE_PORT = 34000
SWAGGER_YAML_PATH = os.path.join(_BAYESIAN_JOBS_DIR, 'swagger.yaml')
DEFAULT_JOB_DIR = os.path.join(_BAYESIAN_JOBS_DIR, 'default_jobs')
TOKEN_VALID_TIME = timedelta(days=14)
AUTH_ORGANIZATION = 'fabric8-analytics'
DISABLE_AUTHENTICATION = bool(os.environ.get('DISABLE_AUTHENTICATION', False))
GITHUB_CONSUMER_KEY = os.environ.get('GITHUB_CONSUMER_KEY', '96d6ad4971dfec52cd7c')
GITHUB_CONSUMER_SECRET = os.environ.get('GITHUB_CONSUMER_SECRET', '97a65e9066a9e4468a9a024a25073ea6e10e8ab6')
GITHUB_ACCESS_TOKEN = os.environ.get('GITHUB_ACCESS_TOKEN', '2ba44d20f2da859184b8ab11460952d49cbde32a')
APP_SECRET_KEY = os.environ.get('APP_SECRET_KEY', 'euYu3Ma6AhV7ieshOen4neigluL9aith')
|
582460dcfeb85e2132705ba789eb88d1c67ae022
|
counting_sort.py
|
counting_sort.py
|
import random
import time
def counting_sort(array):
k = max(array)
counts = [0]*(k+1)
for x in array:
counts[x] += 1
total = 0
for i in range(0,k+1):
c = counts[i]
counts[i] = total
total = total + c
output = [0]*len(array)
for x in array:
output[counts[x]] = x
counts[x] = counts[x] + 1
return output
if __name__ == "__main__":
assert counting_sort([5,3,2,1]) == [1,2,3,5]
x = []
for i in range(0, 1000):
x.append(random.randint(0, 20))
assert counting_sort(x) == sorted(x)
for i in range(0, 10000000):
x.append(random.randint(0, 4000))
start = time.time()
counting_sort(x)
end = time.time()
print "counting sort took: ", end-start
start = time.time()
sorted(x)
end = time.time()
print "timsort took: ", end-start
|
import random
import time
def counting_sort(array):
k = max(array)
counts = [0]*(k+1)
for x in array:
counts[x] += 1
output = []
for x in xrange(k+1):
output += [x]*counts[x]
return output
if __name__ == "__main__":
assert counting_sort([5,3,2,1]) == [1,2,3,5]
x = []
for i in range(0, 1000):
x.append(random.randint(0, 20))
assert counting_sort(x) == sorted(x)
for i in range(0, 10000000):
x.append(random.randint(0, 4000))
start = time.time()
counting_sort(x)
end = time.time()
print "counting sort took: ", end-start
start = time.time()
sorted(x)
end = time.time()
print "timsort took: ", end-start
|
Use a much simpler (and faster) output building step.
|
Use a much simpler (and faster) output building step.
This implementation is much easeir to read, and is a lot clearer
about what's going on. It turns out that it's about 3 times faster
in python too!
|
Python
|
mit
|
samphippen/linear_sort
|
import random
import time
def counting_sort(array):
k = max(array)
counts = [0]*(k+1)
for x in array:
counts[x] += 1
total = 0
for i in range(0,k+1):
c = counts[i]
counts[i] = total
total = total + c
output = [0]*len(array)
for x in array:
output[counts[x]] = x
counts[x] = counts[x] + 1
return output
if __name__ == "__main__":
assert counting_sort([5,3,2,1]) == [1,2,3,5]
x = []
for i in range(0, 1000):
x.append(random.randint(0, 20))
assert counting_sort(x) == sorted(x)
for i in range(0, 10000000):
x.append(random.randint(0, 4000))
start = time.time()
counting_sort(x)
end = time.time()
print "counting sort took: ", end-start
start = time.time()
sorted(x)
end = time.time()
print "timsort took: ", end-start
Use a much simpler (and faster) output building step.
This implementation is much easeir to read, and is a lot clearer
about what's going on. It turns out that it's about 3 times faster
in python too!
|
import random
import time
def counting_sort(array):
k = max(array)
counts = [0]*(k+1)
for x in array:
counts[x] += 1
output = []
for x in xrange(k+1):
output += [x]*counts[x]
return output
if __name__ == "__main__":
assert counting_sort([5,3,2,1]) == [1,2,3,5]
x = []
for i in range(0, 1000):
x.append(random.randint(0, 20))
assert counting_sort(x) == sorted(x)
for i in range(0, 10000000):
x.append(random.randint(0, 4000))
start = time.time()
counting_sort(x)
end = time.time()
print "counting sort took: ", end-start
start = time.time()
sorted(x)
end = time.time()
print "timsort took: ", end-start
|
<commit_before>import random
import time
def counting_sort(array):
k = max(array)
counts = [0]*(k+1)
for x in array:
counts[x] += 1
total = 0
for i in range(0,k+1):
c = counts[i]
counts[i] = total
total = total + c
output = [0]*len(array)
for x in array:
output[counts[x]] = x
counts[x] = counts[x] + 1
return output
if __name__ == "__main__":
assert counting_sort([5,3,2,1]) == [1,2,3,5]
x = []
for i in range(0, 1000):
x.append(random.randint(0, 20))
assert counting_sort(x) == sorted(x)
for i in range(0, 10000000):
x.append(random.randint(0, 4000))
start = time.time()
counting_sort(x)
end = time.time()
print "counting sort took: ", end-start
start = time.time()
sorted(x)
end = time.time()
print "timsort took: ", end-start
<commit_msg>Use a much simpler (and faster) output building step.
This implementation is much easeir to read, and is a lot clearer
about what's going on. It turns out that it's about 3 times faster
in python too!<commit_after>
|
import random
import time
def counting_sort(array):
k = max(array)
counts = [0]*(k+1)
for x in array:
counts[x] += 1
output = []
for x in xrange(k+1):
output += [x]*counts[x]
return output
if __name__ == "__main__":
assert counting_sort([5,3,2,1]) == [1,2,3,5]
x = []
for i in range(0, 1000):
x.append(random.randint(0, 20))
assert counting_sort(x) == sorted(x)
for i in range(0, 10000000):
x.append(random.randint(0, 4000))
start = time.time()
counting_sort(x)
end = time.time()
print "counting sort took: ", end-start
start = time.time()
sorted(x)
end = time.time()
print "timsort took: ", end-start
|
import random
import time
def counting_sort(array):
k = max(array)
counts = [0]*(k+1)
for x in array:
counts[x] += 1
total = 0
for i in range(0,k+1):
c = counts[i]
counts[i] = total
total = total + c
output = [0]*len(array)
for x in array:
output[counts[x]] = x
counts[x] = counts[x] + 1
return output
if __name__ == "__main__":
assert counting_sort([5,3,2,1]) == [1,2,3,5]
x = []
for i in range(0, 1000):
x.append(random.randint(0, 20))
assert counting_sort(x) == sorted(x)
for i in range(0, 10000000):
x.append(random.randint(0, 4000))
start = time.time()
counting_sort(x)
end = time.time()
print "counting sort took: ", end-start
start = time.time()
sorted(x)
end = time.time()
print "timsort took: ", end-start
Use a much simpler (and faster) output building step.
This implementation is much easeir to read, and is a lot clearer
about what's going on. It turns out that it's about 3 times faster
in python too!import random
import time
def counting_sort(array):
k = max(array)
counts = [0]*(k+1)
for x in array:
counts[x] += 1
output = []
for x in xrange(k+1):
output += [x]*counts[x]
return output
if __name__ == "__main__":
assert counting_sort([5,3,2,1]) == [1,2,3,5]
x = []
for i in range(0, 1000):
x.append(random.randint(0, 20))
assert counting_sort(x) == sorted(x)
for i in range(0, 10000000):
x.append(random.randint(0, 4000))
start = time.time()
counting_sort(x)
end = time.time()
print "counting sort took: ", end-start
start = time.time()
sorted(x)
end = time.time()
print "timsort took: ", end-start
|
<commit_before>import random
import time
def counting_sort(array):
k = max(array)
counts = [0]*(k+1)
for x in array:
counts[x] += 1
total = 0
for i in range(0,k+1):
c = counts[i]
counts[i] = total
total = total + c
output = [0]*len(array)
for x in array:
output[counts[x]] = x
counts[x] = counts[x] + 1
return output
if __name__ == "__main__":
assert counting_sort([5,3,2,1]) == [1,2,3,5]
x = []
for i in range(0, 1000):
x.append(random.randint(0, 20))
assert counting_sort(x) == sorted(x)
for i in range(0, 10000000):
x.append(random.randint(0, 4000))
start = time.time()
counting_sort(x)
end = time.time()
print "counting sort took: ", end-start
start = time.time()
sorted(x)
end = time.time()
print "timsort took: ", end-start
<commit_msg>Use a much simpler (and faster) output building step.
This implementation is much easeir to read, and is a lot clearer
about what's going on. It turns out that it's about 3 times faster
in python too!<commit_after>import random
import time
def counting_sort(array):
k = max(array)
counts = [0]*(k+1)
for x in array:
counts[x] += 1
output = []
for x in xrange(k+1):
output += [x]*counts[x]
return output
if __name__ == "__main__":
assert counting_sort([5,3,2,1]) == [1,2,3,5]
x = []
for i in range(0, 1000):
x.append(random.randint(0, 20))
assert counting_sort(x) == sorted(x)
for i in range(0, 10000000):
x.append(random.randint(0, 4000))
start = time.time()
counting_sort(x)
end = time.time()
print "counting sort took: ", end-start
start = time.time()
sorted(x)
end = time.time()
print "timsort took: ", end-start
|
71671f30589464c4d714110a6f00ca6ab327c5c6
|
blogs/middleware.py
|
blogs/middleware.py
|
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from annoying.functions import get_object_or_None
class BlogMiddleware:
def process_request(self, request):
request.blog_user = None
host = request.META.get('HTTP_HOST', '')
host_s = host.replace('www.', '').split('.')
if host != 'snipt.net' and host != 'snipt.localhost':
if len(host_s) > 2:
if host_s[1] == 'snipt':
blog_user = ''.join(host_s[:-2])
if '-' in blog_user:
request.blog_user = get_object_or_None(User, username__iexact=blog_user)
if request.blog_user is None:
request.blog_user = get_object_or_404(User, username__iexact=blog_user.replace('-', '_'))
else:
request.blog_user = get_object_or_404(User, username__iexact=blog_user)
if request.blog_user is None:
pro_users = User.objects.filter(userprofile__is_pro=True)
for pro_user in pro_users:
if host == pro_user.profile.blog_domain:
request.blog_user = pro_user
|
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from annoying.functions import get_object_or_None
class BlogMiddleware:
def process_request(self, request):
request.blog_user = None
host = request.META.get('HTTP_HOST', '')
host_s = host.replace('www.', '').split('.')
if host != 'snipt.net' and host != 'snipt.localhost':
if len(host_s) > 2:
if host_s[1] == 'snipt':
blog_user = ''.join(host_s[:-2])
if '-' in blog_user:
request.blog_user = get_object_or_None(User, username__iexact=blog_user)
if request.blog_user is None:
request.blog_user = get_object_or_404(User, username__iexact=blog_user.replace('-', '_'))
else:
request.blog_user = get_object_or_404(User, username__iexact=blog_user)
if request.blog_user is None:
pro_users = User.objects.filter(userprofile__is_pro=True)
for pro_user in pro_users:
if host in pro_user.profile.blog_domain.split(' '):
request.blog_user = pro_user
|
Allow Pro users to specify multiple domains to serve their blog. Specifically for www/non-www setups.
|
Allow Pro users to specify multiple domains to serve their blog. Specifically for www/non-www setups.
|
Python
|
mit
|
nicksergeant/snipt,nicksergeant/snipt,nicksergeant/snipt
|
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from annoying.functions import get_object_or_None
class BlogMiddleware:
def process_request(self, request):
request.blog_user = None
host = request.META.get('HTTP_HOST', '')
host_s = host.replace('www.', '').split('.')
if host != 'snipt.net' and host != 'snipt.localhost':
if len(host_s) > 2:
if host_s[1] == 'snipt':
blog_user = ''.join(host_s[:-2])
if '-' in blog_user:
request.blog_user = get_object_or_None(User, username__iexact=blog_user)
if request.blog_user is None:
request.blog_user = get_object_or_404(User, username__iexact=blog_user.replace('-', '_'))
else:
request.blog_user = get_object_or_404(User, username__iexact=blog_user)
if request.blog_user is None:
pro_users = User.objects.filter(userprofile__is_pro=True)
for pro_user in pro_users:
if host == pro_user.profile.blog_domain:
request.blog_user = pro_user
Allow Pro users to specify multiple domains to serve their blog. Specifically for www/non-www setups.
|
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from annoying.functions import get_object_or_None
class BlogMiddleware:
def process_request(self, request):
request.blog_user = None
host = request.META.get('HTTP_HOST', '')
host_s = host.replace('www.', '').split('.')
if host != 'snipt.net' and host != 'snipt.localhost':
if len(host_s) > 2:
if host_s[1] == 'snipt':
blog_user = ''.join(host_s[:-2])
if '-' in blog_user:
request.blog_user = get_object_or_None(User, username__iexact=blog_user)
if request.blog_user is None:
request.blog_user = get_object_or_404(User, username__iexact=blog_user.replace('-', '_'))
else:
request.blog_user = get_object_or_404(User, username__iexact=blog_user)
if request.blog_user is None:
pro_users = User.objects.filter(userprofile__is_pro=True)
for pro_user in pro_users:
if host in pro_user.profile.blog_domain.split(' '):
request.blog_user = pro_user
|
<commit_before>from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from annoying.functions import get_object_or_None
class BlogMiddleware:
def process_request(self, request):
request.blog_user = None
host = request.META.get('HTTP_HOST', '')
host_s = host.replace('www.', '').split('.')
if host != 'snipt.net' and host != 'snipt.localhost':
if len(host_s) > 2:
if host_s[1] == 'snipt':
blog_user = ''.join(host_s[:-2])
if '-' in blog_user:
request.blog_user = get_object_or_None(User, username__iexact=blog_user)
if request.blog_user is None:
request.blog_user = get_object_or_404(User, username__iexact=blog_user.replace('-', '_'))
else:
request.blog_user = get_object_or_404(User, username__iexact=blog_user)
if request.blog_user is None:
pro_users = User.objects.filter(userprofile__is_pro=True)
for pro_user in pro_users:
if host == pro_user.profile.blog_domain:
request.blog_user = pro_user
<commit_msg>Allow Pro users to specify multiple domains to serve their blog. Specifically for www/non-www setups.<commit_after>
|
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from annoying.functions import get_object_or_None
class BlogMiddleware:
def process_request(self, request):
request.blog_user = None
host = request.META.get('HTTP_HOST', '')
host_s = host.replace('www.', '').split('.')
if host != 'snipt.net' and host != 'snipt.localhost':
if len(host_s) > 2:
if host_s[1] == 'snipt':
blog_user = ''.join(host_s[:-2])
if '-' in blog_user:
request.blog_user = get_object_or_None(User, username__iexact=blog_user)
if request.blog_user is None:
request.blog_user = get_object_or_404(User, username__iexact=blog_user.replace('-', '_'))
else:
request.blog_user = get_object_or_404(User, username__iexact=blog_user)
if request.blog_user is None:
pro_users = User.objects.filter(userprofile__is_pro=True)
for pro_user in pro_users:
if host in pro_user.profile.blog_domain.split(' '):
request.blog_user = pro_user
|
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from annoying.functions import get_object_or_None
class BlogMiddleware:
def process_request(self, request):
request.blog_user = None
host = request.META.get('HTTP_HOST', '')
host_s = host.replace('www.', '').split('.')
if host != 'snipt.net' and host != 'snipt.localhost':
if len(host_s) > 2:
if host_s[1] == 'snipt':
blog_user = ''.join(host_s[:-2])
if '-' in blog_user:
request.blog_user = get_object_or_None(User, username__iexact=blog_user)
if request.blog_user is None:
request.blog_user = get_object_or_404(User, username__iexact=blog_user.replace('-', '_'))
else:
request.blog_user = get_object_or_404(User, username__iexact=blog_user)
if request.blog_user is None:
pro_users = User.objects.filter(userprofile__is_pro=True)
for pro_user in pro_users:
if host == pro_user.profile.blog_domain:
request.blog_user = pro_user
Allow Pro users to specify multiple domains to serve their blog. Specifically for www/non-www setups.from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from annoying.functions import get_object_or_None
class BlogMiddleware:
def process_request(self, request):
request.blog_user = None
host = request.META.get('HTTP_HOST', '')
host_s = host.replace('www.', '').split('.')
if host != 'snipt.net' and host != 'snipt.localhost':
if len(host_s) > 2:
if host_s[1] == 'snipt':
blog_user = ''.join(host_s[:-2])
if '-' in blog_user:
request.blog_user = get_object_or_None(User, username__iexact=blog_user)
if request.blog_user is None:
request.blog_user = get_object_or_404(User, username__iexact=blog_user.replace('-', '_'))
else:
request.blog_user = get_object_or_404(User, username__iexact=blog_user)
if request.blog_user is None:
pro_users = User.objects.filter(userprofile__is_pro=True)
for pro_user in pro_users:
if host in pro_user.profile.blog_domain.split(' '):
request.blog_user = pro_user
|
<commit_before>from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from annoying.functions import get_object_or_None
class BlogMiddleware:
def process_request(self, request):
request.blog_user = None
host = request.META.get('HTTP_HOST', '')
host_s = host.replace('www.', '').split('.')
if host != 'snipt.net' and host != 'snipt.localhost':
if len(host_s) > 2:
if host_s[1] == 'snipt':
blog_user = ''.join(host_s[:-2])
if '-' in blog_user:
request.blog_user = get_object_or_None(User, username__iexact=blog_user)
if request.blog_user is None:
request.blog_user = get_object_or_404(User, username__iexact=blog_user.replace('-', '_'))
else:
request.blog_user = get_object_or_404(User, username__iexact=blog_user)
if request.blog_user is None:
pro_users = User.objects.filter(userprofile__is_pro=True)
for pro_user in pro_users:
if host == pro_user.profile.blog_domain:
request.blog_user = pro_user
<commit_msg>Allow Pro users to specify multiple domains to serve their blog. Specifically for www/non-www setups.<commit_after>from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from annoying.functions import get_object_or_None
class BlogMiddleware:
def process_request(self, request):
request.blog_user = None
host = request.META.get('HTTP_HOST', '')
host_s = host.replace('www.', '').split('.')
if host != 'snipt.net' and host != 'snipt.localhost':
if len(host_s) > 2:
if host_s[1] == 'snipt':
blog_user = ''.join(host_s[:-2])
if '-' in blog_user:
request.blog_user = get_object_or_None(User, username__iexact=blog_user)
if request.blog_user is None:
request.blog_user = get_object_or_404(User, username__iexact=blog_user.replace('-', '_'))
else:
request.blog_user = get_object_or_404(User, username__iexact=blog_user)
if request.blog_user is None:
pro_users = User.objects.filter(userprofile__is_pro=True)
for pro_user in pro_users:
if host in pro_user.profile.blog_domain.split(' '):
request.blog_user = pro_user
|
99e443f51e5cab27b4a511d1ff9db8e5fc571a62
|
hostmonitor/management/commands/addhost.py
|
hostmonitor/management/commands/addhost.py
|
from iptools import validate_ip, validate_cidr, IpRange
from django.core.management.base import BaseCommand, CommandError
from hostmonitor.models import Host
class Command(BaseCommand):
args = '<target target ...>'
help = 'Add the specified hosts or CIDR networks (not network/broadcast)'
def add_host(self, ip):
h = Host(ip=ip)
self.stdout.write("Adding host %s\n" % ip)
h.save()
def handle(self, *args, **options):
for target in args:
if validate_ip(target):
self.add_host(target)
elif validate_cidr(target):
hosts = list(IpRange(target))
print hosts
for i in hosts[1:-1]:
self.add_host(i)
else:
self.stderr.write("Invalid host: %s\n" % target)
# try:
# poll = Poll.objects.get(pk=int(poll_id))
# except Poll.DoesNotExist:
# raise CommandError('Poll "%s" does not exist' % poll_id)
#
# poll.opened = False
# poll.save()
#
# self.stdout.write('Successfully closed poll "%s"\n' % poll_id)
|
import socket
from django.core.management.base import BaseCommand, CommandError
from django.db.utils import IntegrityError
from iptools import validate_ip, validate_cidr, IpRange
from hostmonitor.models import Host
def resolve_dns(name):
return set([x[4][0] for x in socket.getaddrinfo(name, 80)])
class Command(BaseCommand):
args = '<target target ...>'
help = 'Add the specified hosts or CIDR networks (not network/broadcast)'
def add_host(self, ip):
h = Host(ip=ip)
self.stdout.write("%s adding\n" % ip)
try:
h.save()
except IntegrityError, e:
self.stderr.write("%s ERROR, already exists, ignoring\n" % ip)
def handle(self, *args, **options):
for target in args:
if validate_ip(target):
self.add_host(target)
elif validate_cidr(target):
hosts = list(IpRange(target))
print hosts
for host in hosts[1:-1]:
self.add_host(host)
else:
hosts = resolve_dns(target)
for host in hosts:
self.add_host(host)
# try:
# poll = Poll.objects.get(pk=int(poll_id))
# except Poll.DoesNotExist:
# raise CommandError('Poll "%s" does not exist' % poll_id)
#
# poll.opened = False
# poll.save()
#
# self.stdout.write('Successfully closed poll "%s"\n' % poll_id)
|
Support adding hosts by DNS
|
Support adding hosts by DNS
|
Python
|
mit
|
kapsiry/vahti
|
from iptools import validate_ip, validate_cidr, IpRange
from django.core.management.base import BaseCommand, CommandError
from hostmonitor.models import Host
class Command(BaseCommand):
args = '<target target ...>'
help = 'Add the specified hosts or CIDR networks (not network/broadcast)'
def add_host(self, ip):
h = Host(ip=ip)
self.stdout.write("Adding host %s\n" % ip)
h.save()
def handle(self, *args, **options):
for target in args:
if validate_ip(target):
self.add_host(target)
elif validate_cidr(target):
hosts = list(IpRange(target))
print hosts
for i in hosts[1:-1]:
self.add_host(i)
else:
self.stderr.write("Invalid host: %s\n" % target)
# try:
# poll = Poll.objects.get(pk=int(poll_id))
# except Poll.DoesNotExist:
# raise CommandError('Poll "%s" does not exist' % poll_id)
#
# poll.opened = False
# poll.save()
#
# self.stdout.write('Successfully closed poll "%s"\n' % poll_id)
Support adding hosts by DNS
|
import socket
from django.core.management.base import BaseCommand, CommandError
from django.db.utils import IntegrityError
from iptools import validate_ip, validate_cidr, IpRange
from hostmonitor.models import Host
def resolve_dns(name):
return set([x[4][0] for x in socket.getaddrinfo(name, 80)])
class Command(BaseCommand):
args = '<target target ...>'
help = 'Add the specified hosts or CIDR networks (not network/broadcast)'
def add_host(self, ip):
h = Host(ip=ip)
self.stdout.write("%s adding\n" % ip)
try:
h.save()
except IntegrityError, e:
self.stderr.write("%s ERROR, already exists, ignoring\n" % ip)
def handle(self, *args, **options):
for target in args:
if validate_ip(target):
self.add_host(target)
elif validate_cidr(target):
hosts = list(IpRange(target))
print hosts
for host in hosts[1:-1]:
self.add_host(host)
else:
hosts = resolve_dns(target)
for host in hosts:
self.add_host(host)
# try:
# poll = Poll.objects.get(pk=int(poll_id))
# except Poll.DoesNotExist:
# raise CommandError('Poll "%s" does not exist' % poll_id)
#
# poll.opened = False
# poll.save()
#
# self.stdout.write('Successfully closed poll "%s"\n' % poll_id)
|
<commit_before>from iptools import validate_ip, validate_cidr, IpRange
from django.core.management.base import BaseCommand, CommandError
from hostmonitor.models import Host
class Command(BaseCommand):
args = '<target target ...>'
help = 'Add the specified hosts or CIDR networks (not network/broadcast)'
def add_host(self, ip):
h = Host(ip=ip)
self.stdout.write("Adding host %s\n" % ip)
h.save()
def handle(self, *args, **options):
for target in args:
if validate_ip(target):
self.add_host(target)
elif validate_cidr(target):
hosts = list(IpRange(target))
print hosts
for i in hosts[1:-1]:
self.add_host(i)
else:
self.stderr.write("Invalid host: %s\n" % target)
# try:
# poll = Poll.objects.get(pk=int(poll_id))
# except Poll.DoesNotExist:
# raise CommandError('Poll "%s" does not exist' % poll_id)
#
# poll.opened = False
# poll.save()
#
# self.stdout.write('Successfully closed poll "%s"\n' % poll_id)
<commit_msg>Support adding hosts by DNS<commit_after>
|
import socket
from django.core.management.base import BaseCommand, CommandError
from django.db.utils import IntegrityError
from iptools import validate_ip, validate_cidr, IpRange
from hostmonitor.models import Host
def resolve_dns(name):
return set([x[4][0] for x in socket.getaddrinfo(name, 80)])
class Command(BaseCommand):
args = '<target target ...>'
help = 'Add the specified hosts or CIDR networks (not network/broadcast)'
def add_host(self, ip):
h = Host(ip=ip)
self.stdout.write("%s adding\n" % ip)
try:
h.save()
except IntegrityError, e:
self.stderr.write("%s ERROR, already exists, ignoring\n" % ip)
def handle(self, *args, **options):
for target in args:
if validate_ip(target):
self.add_host(target)
elif validate_cidr(target):
hosts = list(IpRange(target))
print hosts
for host in hosts[1:-1]:
self.add_host(host)
else:
hosts = resolve_dns(target)
for host in hosts:
self.add_host(host)
# try:
# poll = Poll.objects.get(pk=int(poll_id))
# except Poll.DoesNotExist:
# raise CommandError('Poll "%s" does not exist' % poll_id)
#
# poll.opened = False
# poll.save()
#
# self.stdout.write('Successfully closed poll "%s"\n' % poll_id)
|
from iptools import validate_ip, validate_cidr, IpRange
from django.core.management.base import BaseCommand, CommandError
from hostmonitor.models import Host
class Command(BaseCommand):
args = '<target target ...>'
help = 'Add the specified hosts or CIDR networks (not network/broadcast)'
def add_host(self, ip):
h = Host(ip=ip)
self.stdout.write("Adding host %s\n" % ip)
h.save()
def handle(self, *args, **options):
for target in args:
if validate_ip(target):
self.add_host(target)
elif validate_cidr(target):
hosts = list(IpRange(target))
print hosts
for i in hosts[1:-1]:
self.add_host(i)
else:
self.stderr.write("Invalid host: %s\n" % target)
# try:
# poll = Poll.objects.get(pk=int(poll_id))
# except Poll.DoesNotExist:
# raise CommandError('Poll "%s" does not exist' % poll_id)
#
# poll.opened = False
# poll.save()
#
# self.stdout.write('Successfully closed poll "%s"\n' % poll_id)
Support adding hosts by DNSimport socket
from django.core.management.base import BaseCommand, CommandError
from django.db.utils import IntegrityError
from iptools import validate_ip, validate_cidr, IpRange
from hostmonitor.models import Host
def resolve_dns(name):
return set([x[4][0] for x in socket.getaddrinfo(name, 80)])
class Command(BaseCommand):
args = '<target target ...>'
help = 'Add the specified hosts or CIDR networks (not network/broadcast)'
def add_host(self, ip):
h = Host(ip=ip)
self.stdout.write("%s adding\n" % ip)
try:
h.save()
except IntegrityError, e:
self.stderr.write("%s ERROR, already exists, ignoring\n" % ip)
def handle(self, *args, **options):
for target in args:
if validate_ip(target):
self.add_host(target)
elif validate_cidr(target):
hosts = list(IpRange(target))
print hosts
for host in hosts[1:-1]:
self.add_host(host)
else:
hosts = resolve_dns(target)
for host in hosts:
self.add_host(host)
# try:
# poll = Poll.objects.get(pk=int(poll_id))
# except Poll.DoesNotExist:
# raise CommandError('Poll "%s" does not exist' % poll_id)
#
# poll.opened = False
# poll.save()
#
# self.stdout.write('Successfully closed poll "%s"\n' % poll_id)
|
<commit_before>from iptools import validate_ip, validate_cidr, IpRange
from django.core.management.base import BaseCommand, CommandError
from hostmonitor.models import Host
class Command(BaseCommand):
args = '<target target ...>'
help = 'Add the specified hosts or CIDR networks (not network/broadcast)'
def add_host(self, ip):
h = Host(ip=ip)
self.stdout.write("Adding host %s\n" % ip)
h.save()
def handle(self, *args, **options):
for target in args:
if validate_ip(target):
self.add_host(target)
elif validate_cidr(target):
hosts = list(IpRange(target))
print hosts
for i in hosts[1:-1]:
self.add_host(i)
else:
self.stderr.write("Invalid host: %s\n" % target)
# try:
# poll = Poll.objects.get(pk=int(poll_id))
# except Poll.DoesNotExist:
# raise CommandError('Poll "%s" does not exist' % poll_id)
#
# poll.opened = False
# poll.save()
#
# self.stdout.write('Successfully closed poll "%s"\n' % poll_id)
<commit_msg>Support adding hosts by DNS<commit_after>import socket
from django.core.management.base import BaseCommand, CommandError
from django.db.utils import IntegrityError
from iptools import validate_ip, validate_cidr, IpRange
from hostmonitor.models import Host
def resolve_dns(name):
return set([x[4][0] for x in socket.getaddrinfo(name, 80)])
class Command(BaseCommand):
args = '<target target ...>'
help = 'Add the specified hosts or CIDR networks (not network/broadcast)'
def add_host(self, ip):
h = Host(ip=ip)
self.stdout.write("%s adding\n" % ip)
try:
h.save()
except IntegrityError, e:
self.stderr.write("%s ERROR, already exists, ignoring\n" % ip)
def handle(self, *args, **options):
for target in args:
if validate_ip(target):
self.add_host(target)
elif validate_cidr(target):
hosts = list(IpRange(target))
print hosts
for host in hosts[1:-1]:
self.add_host(host)
else:
hosts = resolve_dns(target)
for host in hosts:
self.add_host(host)
# try:
# poll = Poll.objects.get(pk=int(poll_id))
# except Poll.DoesNotExist:
# raise CommandError('Poll "%s" does not exist' % poll_id)
#
# poll.opened = False
# poll.save()
#
# self.stdout.write('Successfully closed poll "%s"\n' % poll_id)
|
6f0519eebb8449fdc384bdfdd724405cb06b45eb
|
main.py
|
main.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from website.app import init_app
app = init_app('website.settings', set_backends=True, routes=True)
if __name__ == '__main__':
app.run(host='127.0.0.1', port=5000)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from website.app import init_app
app = init_app('website.settings', set_backends=True, routes=True)
if __name__ == '__main__':
host = os.environ.get('OSF_HOST', None)
port = os.environ.get('OSF_PORT', None)
app.run(host=host, port=port)
|
Make host and port configurable thru envvars
|
Make host and port configurable thru envvars
|
Python
|
apache-2.0
|
alexschiller/osf.io,binoculars/osf.io,sbt9uc/osf.io,adlius/osf.io,MerlinZhang/osf.io,samanehsan/osf.io,monikagrabowska/osf.io,ZobairAlijan/osf.io,dplorimer/osf,samchrisinger/osf.io,zachjanicki/osf.io,samchrisinger/osf.io,leb2dg/osf.io,zkraime/osf.io,petermalcolm/osf.io,chrisseto/osf.io,mfraezz/osf.io,hmoco/osf.io,CenterForOpenScience/osf.io,billyhunt/osf.io,jinluyuan/osf.io,GaryKriebel/osf.io,TomBaxter/osf.io,SSJohns/osf.io,lyndsysimon/osf.io,jeffreyliu3230/osf.io,mluke93/osf.io,SSJohns/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,lamdnhan/osf.io,amyshi188/osf.io,RomanZWang/osf.io,mfraezz/osf.io,barbour-em/osf.io,danielneis/osf.io,TomHeatwole/osf.io,baylee-d/osf.io,doublebits/osf.io,caneruguz/osf.io,samchrisinger/osf.io,HalcyonChimera/osf.io,Ghalko/osf.io,himanshuo/osf.io,alexschiller/osf.io,alexschiller/osf.io,jnayak1/osf.io,KAsante95/osf.io,amyshi188/osf.io,adlius/osf.io,aaxelb/osf.io,haoyuchen1992/osf.io,aaxelb/osf.io,brandonPurvis/osf.io,ZobairAlijan/osf.io,himanshuo/osf.io,sbt9uc/osf.io,DanielSBrown/osf.io,njantrania/osf.io,himanshuo/osf.io,cslzchen/osf.io,njantrania/osf.io,jmcarp/osf.io,Nesiehr/osf.io,jnayak1/osf.io,GaryKriebel/osf.io,kushG/osf.io,sloria/osf.io,arpitar/osf.io,baylee-d/osf.io,petermalcolm/osf.io,jmcarp/osf.io,revanthkolli/osf.io,dplorimer/osf,zamattiac/osf.io,saradbowman/osf.io,doublebits/osf.io,asanfilippo7/osf.io,SSJohns/osf.io,Ghalko/osf.io,mfraezz/osf.io,MerlinZhang/osf.io,sbt9uc/osf.io,zamattiac/osf.io,jinluyuan/osf.io,mluke93/osf.io,chennan47/osf.io,adlius/osf.io,kushG/osf.io,petermalcolm/osf.io,ckc6cz/osf.io,rdhyee/osf.io,monikagrabowska/osf.io,HarryRybacki/osf.io,fabianvf/osf.io,felliott/osf.io,amyshi188/osf.io,cwisecarver/osf.io,njantrania/osf.io,arpitar/osf.io,acshi/osf.io,caneruguz/osf.io,TomHeatwole/osf.io,chennan47/osf.io,leb2dg/osf.io,kch8qx/osf.io,crcresearch/osf.io,jolene-esposito/osf.io,chrisseto/osf.io,haoyuchen1992/osf.io,emetsger/osf.io,acshi/osf.io,zachjanicki/osf.io,acshi/osf.io,brandonPurvis/osf.io,adlius/osf.io,barbour-em/osf.io,icereval/osf.io,ticklemepierce/osf.io,ckc6cz/osf.io,DanielSBrown/osf.io,erinspace/osf.io,abought/osf.io,billyhunt/osf.io,jinluyuan/osf.io,RomanZWang/osf.io,Ghalko/osf.io,arpitar/osf.io,revanthkolli/osf.io,ckc6cz/osf.io,icereval/osf.io,HarryRybacki/osf.io,felliott/osf.io,leb2dg/osf.io,TomHeatwole/osf.io,TomBaxter/osf.io,jnayak1/osf.io,kch8qx/osf.io,haoyuchen1992/osf.io,brandonPurvis/osf.io,hmoco/osf.io,brandonPurvis/osf.io,barbour-em/osf.io,ticklemepierce/osf.io,billyhunt/osf.io,mfraezz/osf.io,zamattiac/osf.io,mluo613/osf.io,ticklemepierce/osf.io,lamdnhan/osf.io,samanehsan/osf.io,kushG/osf.io,chrisseto/osf.io,danielneis/osf.io,monikagrabowska/osf.io,fabianvf/osf.io,amyshi188/osf.io,laurenrevere/osf.io,crcresearch/osf.io,jmcarp/osf.io,sbt9uc/osf.io,doublebits/osf.io,erinspace/osf.io,wearpants/osf.io,brianjgeiger/osf.io,ckc6cz/osf.io,jmcarp/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,ZobairAlijan/osf.io,brianjgeiger/osf.io,asanfilippo7/osf.io,Nesiehr/osf.io,cldershem/osf.io,lamdnhan/osf.io,reinaH/osf.io,TomHeatwole/osf.io,GageGaskins/osf.io,cldershem/osf.io,GageGaskins/osf.io,cldershem/osf.io,caneruguz/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,saradbowman/osf.io,laurenrevere/osf.io,acshi/osf.io,bdyetton/prettychart,kwierman/osf.io,abought/osf.io,binoculars/osf.io,barbour-em/osf.io,mluo613/osf.io,jeffreyliu3230/osf.io,laurenrevere/osf.io,HalcyonChimera/osf.io,reinaH/osf.io,Nesiehr/osf.io,emetsger/osf.io,brandonPurvis/osf.io,caseyrygt/osf.io,aaxelb/osf.io,billyhunt/osf.io,Johnetordoff/osf.io,Ghalko/osf.io,kwierman/osf.io,bdyetton/prettychart,samanehsan/osf.io,KAsante95/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,kch8qx/osf.io,jolene-esposito/osf.io,mattclark/osf.io,caneruguz/osf.io,zachjanicki/osf.io,Johnetordoff/osf.io,RomanZWang/osf.io,cosenal/osf.io,caseyrygt/osf.io,pattisdr/osf.io,GageGaskins/osf.io,samanehsan/osf.io,petermalcolm/osf.io,crcresearch/osf.io,caseyrollins/osf.io,lyndsysimon/osf.io,pattisdr/osf.io,fabianvf/osf.io,monikagrabowska/osf.io,jolene-esposito/osf.io,HalcyonChimera/osf.io,cosenal/osf.io,caseyrollins/osf.io,revanthkolli/osf.io,MerlinZhang/osf.io,njantrania/osf.io,kwierman/osf.io,GageGaskins/osf.io,ZobairAlijan/osf.io,monikagrabowska/osf.io,jnayak1/osf.io,reinaH/osf.io,mluke93/osf.io,emetsger/osf.io,jinluyuan/osf.io,RomanZWang/osf.io,abought/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,rdhyee/osf.io,danielneis/osf.io,baylee-d/osf.io,haoyuchen1992/osf.io,cwisecarver/osf.io,cwisecarver/osf.io,felliott/osf.io,HarryRybacki/osf.io,mluke93/osf.io,caseyrygt/osf.io,cldershem/osf.io,jeffreyliu3230/osf.io,wearpants/osf.io,himanshuo/osf.io,zkraime/osf.io,GageGaskins/osf.io,dplorimer/osf,mattclark/osf.io,alexschiller/osf.io,KAsante95/osf.io,ticklemepierce/osf.io,jolene-esposito/osf.io,Nesiehr/osf.io,lyndsysimon/osf.io,cosenal/osf.io,felliott/osf.io,DanielSBrown/osf.io,DanielSBrown/osf.io,rdhyee/osf.io,bdyetton/prettychart,mluo613/osf.io,zkraime/osf.io,icereval/osf.io,cslzchen/osf.io,arpitar/osf.io,MerlinZhang/osf.io,HarryRybacki/osf.io,RomanZWang/osf.io,mluo613/osf.io,chrisseto/osf.io,cslzchen/osf.io,alexschiller/osf.io,cosenal/osf.io,billyhunt/osf.io,lyndsysimon/osf.io,doublebits/osf.io,danielneis/osf.io,asanfilippo7/osf.io,KAsante95/osf.io,zamattiac/osf.io,wearpants/osf.io,mluo613/osf.io,cwisecarver/osf.io,sloria/osf.io,bdyetton/prettychart,hmoco/osf.io,GaryKriebel/osf.io,jeffreyliu3230/osf.io,lamdnhan/osf.io,zkraime/osf.io,caseyrygt/osf.io,GaryKriebel/osf.io,asanfilippo7/osf.io,kwierman/osf.io,revanthkolli/osf.io,kch8qx/osf.io,kch8qx/osf.io,chennan47/osf.io,abought/osf.io,sloria/osf.io,samchrisinger/osf.io,pattisdr/osf.io,erinspace/osf.io,dplorimer/osf,aaxelb/osf.io,SSJohns/osf.io,zachjanicki/osf.io,Johnetordoff/osf.io,doublebits/osf.io,acshi/osf.io,caseyrollins/osf.io,TomBaxter/osf.io,KAsante95/osf.io,emetsger/osf.io,wearpants/osf.io,hmoco/osf.io,fabianvf/osf.io,binoculars/osf.io,reinaH/osf.io,rdhyee/osf.io,kushG/osf.io
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from website.app import init_app
app = init_app('website.settings', set_backends=True, routes=True)
if __name__ == '__main__':
app.run(host='127.0.0.1', port=5000)
Make host and port configurable thru envvars
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from website.app import init_app
app = init_app('website.settings', set_backends=True, routes=True)
if __name__ == '__main__':
host = os.environ.get('OSF_HOST', None)
port = os.environ.get('OSF_PORT', None)
app.run(host=host, port=port)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from website.app import init_app
app = init_app('website.settings', set_backends=True, routes=True)
if __name__ == '__main__':
app.run(host='127.0.0.1', port=5000)
<commit_msg>Make host and port configurable thru envvars<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from website.app import init_app
app = init_app('website.settings', set_backends=True, routes=True)
if __name__ == '__main__':
host = os.environ.get('OSF_HOST', None)
port = os.environ.get('OSF_PORT', None)
app.run(host=host, port=port)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from website.app import init_app
app = init_app('website.settings', set_backends=True, routes=True)
if __name__ == '__main__':
app.run(host='127.0.0.1', port=5000)
Make host and port configurable thru envvars#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from website.app import init_app
app = init_app('website.settings', set_backends=True, routes=True)
if __name__ == '__main__':
host = os.environ.get('OSF_HOST', None)
port = os.environ.get('OSF_PORT', None)
app.run(host=host, port=port)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from website.app import init_app
app = init_app('website.settings', set_backends=True, routes=True)
if __name__ == '__main__':
app.run(host='127.0.0.1', port=5000)
<commit_msg>Make host and port configurable thru envvars<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from website.app import init_app
app = init_app('website.settings', set_backends=True, routes=True)
if __name__ == '__main__':
host = os.environ.get('OSF_HOST', None)
port = os.environ.get('OSF_PORT', None)
app.run(host=host, port=port)
|
4f0c3e800fbbfab2d576a82b5a1db20d7feb676e
|
linked_accounts/authentication.py
|
linked_accounts/authentication.py
|
from django.http import HttpResponse
from django.template import loader
from django.utils.crypto import salted_hmac, constant_time_compare
from django.contrib.auth.models import User
class HMACAuth(object):
def __init__(self, realm='API'):
self.realm = realm
def process_request(self, request):
user_id = request.META.get('HTTP_X_LA_USER_ID', None)
signature = request.META.get('HTTP_X_LA_HASH', None)
return user_id, signature
def is_authenticated(self, request):
user_id, signature = self.process_request(request)
if user_id and signature:
check_digest = salted_hmac("linked_accounts.views.login", str(user_id)).hexdigest()
if not constant_time_compare(signature, check_digest):
return False
try:
user = User.objects.get(id=user_id)
if user.is_active:
request.user = user
except User.DoesNotExist:
pass
return False
def challenge(self):
response = HttpResponse()
response.status_code = 401
tmpl = loader.render_to_string('linked_accounts/api_challenge.html')
response.content = tmpl
return response
def __repr__(self):
return u'<HMACAuth: realm=%s>' % self.realm
|
from django.http import HttpResponse
from django.template import loader
from django.utils.crypto import salted_hmac, constant_time_compare
from django.contrib.auth.models import User
class HMACAuth(object):
def __init__(self, realm='API'):
self.realm = realm
def process_request(self, request):
user_id = request.META.get('HTTP_X_LA_USER_ID', None)
signature = request.META.get('HTTP_X_LA_HASH', None)
return user_id, signature
def is_authenticated(self, request):
user_id, signature = self.process_request(request)
if user_id and signature:
check_digest = salted_hmac("linked_accounts.views.login", str(user_id)).hexdigest()
if not constant_time_compare(signature, check_digest):
return False
try:
user = User.objects.get(id=user_id)
if user.is_active:
request.user = user
return True
except User.DoesNotExist:
pass
return False
def challenge(self):
response = HttpResponse()
response.status_code = 401
tmpl = loader.render_to_string('linked_accounts/api_challenge.html')
response.content = tmpl
return response
def __repr__(self):
return u'<HMACAuth: realm=%s>' % self.realm
|
Return is_authenticated true if we got user and it is active
|
Return is_authenticated true if we got user and it is active
|
Python
|
mit
|
zen4ever/django-linked-accounts,zen4ever/django-linked-accounts
|
from django.http import HttpResponse
from django.template import loader
from django.utils.crypto import salted_hmac, constant_time_compare
from django.contrib.auth.models import User
class HMACAuth(object):
def __init__(self, realm='API'):
self.realm = realm
def process_request(self, request):
user_id = request.META.get('HTTP_X_LA_USER_ID', None)
signature = request.META.get('HTTP_X_LA_HASH', None)
return user_id, signature
def is_authenticated(self, request):
user_id, signature = self.process_request(request)
if user_id and signature:
check_digest = salted_hmac("linked_accounts.views.login", str(user_id)).hexdigest()
if not constant_time_compare(signature, check_digest):
return False
try:
user = User.objects.get(id=user_id)
if user.is_active:
request.user = user
except User.DoesNotExist:
pass
return False
def challenge(self):
response = HttpResponse()
response.status_code = 401
tmpl = loader.render_to_string('linked_accounts/api_challenge.html')
response.content = tmpl
return response
def __repr__(self):
return u'<HMACAuth: realm=%s>' % self.realm
Return is_authenticated true if we got user and it is active
|
from django.http import HttpResponse
from django.template import loader
from django.utils.crypto import salted_hmac, constant_time_compare
from django.contrib.auth.models import User
class HMACAuth(object):
def __init__(self, realm='API'):
self.realm = realm
def process_request(self, request):
user_id = request.META.get('HTTP_X_LA_USER_ID', None)
signature = request.META.get('HTTP_X_LA_HASH', None)
return user_id, signature
def is_authenticated(self, request):
user_id, signature = self.process_request(request)
if user_id and signature:
check_digest = salted_hmac("linked_accounts.views.login", str(user_id)).hexdigest()
if not constant_time_compare(signature, check_digest):
return False
try:
user = User.objects.get(id=user_id)
if user.is_active:
request.user = user
return True
except User.DoesNotExist:
pass
return False
def challenge(self):
response = HttpResponse()
response.status_code = 401
tmpl = loader.render_to_string('linked_accounts/api_challenge.html')
response.content = tmpl
return response
def __repr__(self):
return u'<HMACAuth: realm=%s>' % self.realm
|
<commit_before>from django.http import HttpResponse
from django.template import loader
from django.utils.crypto import salted_hmac, constant_time_compare
from django.contrib.auth.models import User
class HMACAuth(object):
def __init__(self, realm='API'):
self.realm = realm
def process_request(self, request):
user_id = request.META.get('HTTP_X_LA_USER_ID', None)
signature = request.META.get('HTTP_X_LA_HASH', None)
return user_id, signature
def is_authenticated(self, request):
user_id, signature = self.process_request(request)
if user_id and signature:
check_digest = salted_hmac("linked_accounts.views.login", str(user_id)).hexdigest()
if not constant_time_compare(signature, check_digest):
return False
try:
user = User.objects.get(id=user_id)
if user.is_active:
request.user = user
except User.DoesNotExist:
pass
return False
def challenge(self):
response = HttpResponse()
response.status_code = 401
tmpl = loader.render_to_string('linked_accounts/api_challenge.html')
response.content = tmpl
return response
def __repr__(self):
return u'<HMACAuth: realm=%s>' % self.realm
<commit_msg>Return is_authenticated true if we got user and it is active<commit_after>
|
from django.http import HttpResponse
from django.template import loader
from django.utils.crypto import salted_hmac, constant_time_compare
from django.contrib.auth.models import User
class HMACAuth(object):
def __init__(self, realm='API'):
self.realm = realm
def process_request(self, request):
user_id = request.META.get('HTTP_X_LA_USER_ID', None)
signature = request.META.get('HTTP_X_LA_HASH', None)
return user_id, signature
def is_authenticated(self, request):
user_id, signature = self.process_request(request)
if user_id and signature:
check_digest = salted_hmac("linked_accounts.views.login", str(user_id)).hexdigest()
if not constant_time_compare(signature, check_digest):
return False
try:
user = User.objects.get(id=user_id)
if user.is_active:
request.user = user
return True
except User.DoesNotExist:
pass
return False
def challenge(self):
response = HttpResponse()
response.status_code = 401
tmpl = loader.render_to_string('linked_accounts/api_challenge.html')
response.content = tmpl
return response
def __repr__(self):
return u'<HMACAuth: realm=%s>' % self.realm
|
from django.http import HttpResponse
from django.template import loader
from django.utils.crypto import salted_hmac, constant_time_compare
from django.contrib.auth.models import User
class HMACAuth(object):
def __init__(self, realm='API'):
self.realm = realm
def process_request(self, request):
user_id = request.META.get('HTTP_X_LA_USER_ID', None)
signature = request.META.get('HTTP_X_LA_HASH', None)
return user_id, signature
def is_authenticated(self, request):
user_id, signature = self.process_request(request)
if user_id and signature:
check_digest = salted_hmac("linked_accounts.views.login", str(user_id)).hexdigest()
if not constant_time_compare(signature, check_digest):
return False
try:
user = User.objects.get(id=user_id)
if user.is_active:
request.user = user
except User.DoesNotExist:
pass
return False
def challenge(self):
response = HttpResponse()
response.status_code = 401
tmpl = loader.render_to_string('linked_accounts/api_challenge.html')
response.content = tmpl
return response
def __repr__(self):
return u'<HMACAuth: realm=%s>' % self.realm
Return is_authenticated true if we got user and it is activefrom django.http import HttpResponse
from django.template import loader
from django.utils.crypto import salted_hmac, constant_time_compare
from django.contrib.auth.models import User
class HMACAuth(object):
def __init__(self, realm='API'):
self.realm = realm
def process_request(self, request):
user_id = request.META.get('HTTP_X_LA_USER_ID', None)
signature = request.META.get('HTTP_X_LA_HASH', None)
return user_id, signature
def is_authenticated(self, request):
user_id, signature = self.process_request(request)
if user_id and signature:
check_digest = salted_hmac("linked_accounts.views.login", str(user_id)).hexdigest()
if not constant_time_compare(signature, check_digest):
return False
try:
user = User.objects.get(id=user_id)
if user.is_active:
request.user = user
return True
except User.DoesNotExist:
pass
return False
def challenge(self):
response = HttpResponse()
response.status_code = 401
tmpl = loader.render_to_string('linked_accounts/api_challenge.html')
response.content = tmpl
return response
def __repr__(self):
return u'<HMACAuth: realm=%s>' % self.realm
|
<commit_before>from django.http import HttpResponse
from django.template import loader
from django.utils.crypto import salted_hmac, constant_time_compare
from django.contrib.auth.models import User
class HMACAuth(object):
def __init__(self, realm='API'):
self.realm = realm
def process_request(self, request):
user_id = request.META.get('HTTP_X_LA_USER_ID', None)
signature = request.META.get('HTTP_X_LA_HASH', None)
return user_id, signature
def is_authenticated(self, request):
user_id, signature = self.process_request(request)
if user_id and signature:
check_digest = salted_hmac("linked_accounts.views.login", str(user_id)).hexdigest()
if not constant_time_compare(signature, check_digest):
return False
try:
user = User.objects.get(id=user_id)
if user.is_active:
request.user = user
except User.DoesNotExist:
pass
return False
def challenge(self):
response = HttpResponse()
response.status_code = 401
tmpl = loader.render_to_string('linked_accounts/api_challenge.html')
response.content = tmpl
return response
def __repr__(self):
return u'<HMACAuth: realm=%s>' % self.realm
<commit_msg>Return is_authenticated true if we got user and it is active<commit_after>from django.http import HttpResponse
from django.template import loader
from django.utils.crypto import salted_hmac, constant_time_compare
from django.contrib.auth.models import User
class HMACAuth(object):
def __init__(self, realm='API'):
self.realm = realm
def process_request(self, request):
user_id = request.META.get('HTTP_X_LA_USER_ID', None)
signature = request.META.get('HTTP_X_LA_HASH', None)
return user_id, signature
def is_authenticated(self, request):
user_id, signature = self.process_request(request)
if user_id and signature:
check_digest = salted_hmac("linked_accounts.views.login", str(user_id)).hexdigest()
if not constant_time_compare(signature, check_digest):
return False
try:
user = User.objects.get(id=user_id)
if user.is_active:
request.user = user
return True
except User.DoesNotExist:
pass
return False
def challenge(self):
response = HttpResponse()
response.status_code = 401
tmpl = loader.render_to_string('linked_accounts/api_challenge.html')
response.content = tmpl
return response
def __repr__(self):
return u'<HMACAuth: realm=%s>' % self.realm
|
ba3210b802c9cc1395edee1ad84cea025cc275cf
|
regconfig/reg_d.py
|
regconfig/reg_d.py
|
#### Regulation D
INCLUDE_DEFINITIONS_IN_PART_1004 = [
('Alternative mortgage transaction', 'Alternative mortgage transaction'),
('Creditor', 'Creditor'),
('State', 'State'),
('State law', 'State law'),
]
PARAGRAPH_HIERARCHY_1004 = {
'1004.2': [
1,
1,
2, 2, 2,
1,
1,
2, 2, 2, 2,
1,
1,
],
}
|
#### Regulation D
INCLUDE_DEFINITIONS_IN_PART_1004 = [
('Creditor', 'Creditor shall have the same meaning as in 12 CFR 226.2'),
]
PARAGRAPH_HIERARCHY_1004 = {
'1004.2': [
1,
1,
2, 2, 2,
1,
1,
2, 2, 2, 2,
1,
1,
],
}
|
Fix reg d included definitions
|
Fix reg d included definitions
|
Python
|
cc0-1.0
|
cfpb/regulations-configs,ascott1/regulations-configs,grapesmoker/regulations-configs
|
#### Regulation D
INCLUDE_DEFINITIONS_IN_PART_1004 = [
('Alternative mortgage transaction', 'Alternative mortgage transaction'),
('Creditor', 'Creditor'),
('State', 'State'),
('State law', 'State law'),
]
PARAGRAPH_HIERARCHY_1004 = {
'1004.2': [
1,
1,
2, 2, 2,
1,
1,
2, 2, 2, 2,
1,
1,
],
}
Fix reg d included definitions
|
#### Regulation D
INCLUDE_DEFINITIONS_IN_PART_1004 = [
('Creditor', 'Creditor shall have the same meaning as in 12 CFR 226.2'),
]
PARAGRAPH_HIERARCHY_1004 = {
'1004.2': [
1,
1,
2, 2, 2,
1,
1,
2, 2, 2, 2,
1,
1,
],
}
|
<commit_before>#### Regulation D
INCLUDE_DEFINITIONS_IN_PART_1004 = [
('Alternative mortgage transaction', 'Alternative mortgage transaction'),
('Creditor', 'Creditor'),
('State', 'State'),
('State law', 'State law'),
]
PARAGRAPH_HIERARCHY_1004 = {
'1004.2': [
1,
1,
2, 2, 2,
1,
1,
2, 2, 2, 2,
1,
1,
],
}
<commit_msg>Fix reg d included definitions<commit_after>
|
#### Regulation D
INCLUDE_DEFINITIONS_IN_PART_1004 = [
('Creditor', 'Creditor shall have the same meaning as in 12 CFR 226.2'),
]
PARAGRAPH_HIERARCHY_1004 = {
'1004.2': [
1,
1,
2, 2, 2,
1,
1,
2, 2, 2, 2,
1,
1,
],
}
|
#### Regulation D
INCLUDE_DEFINITIONS_IN_PART_1004 = [
('Alternative mortgage transaction', 'Alternative mortgage transaction'),
('Creditor', 'Creditor'),
('State', 'State'),
('State law', 'State law'),
]
PARAGRAPH_HIERARCHY_1004 = {
'1004.2': [
1,
1,
2, 2, 2,
1,
1,
2, 2, 2, 2,
1,
1,
],
}
Fix reg d included definitions#### Regulation D
INCLUDE_DEFINITIONS_IN_PART_1004 = [
('Creditor', 'Creditor shall have the same meaning as in 12 CFR 226.2'),
]
PARAGRAPH_HIERARCHY_1004 = {
'1004.2': [
1,
1,
2, 2, 2,
1,
1,
2, 2, 2, 2,
1,
1,
],
}
|
<commit_before>#### Regulation D
INCLUDE_DEFINITIONS_IN_PART_1004 = [
('Alternative mortgage transaction', 'Alternative mortgage transaction'),
('Creditor', 'Creditor'),
('State', 'State'),
('State law', 'State law'),
]
PARAGRAPH_HIERARCHY_1004 = {
'1004.2': [
1,
1,
2, 2, 2,
1,
1,
2, 2, 2, 2,
1,
1,
],
}
<commit_msg>Fix reg d included definitions<commit_after>#### Regulation D
INCLUDE_DEFINITIONS_IN_PART_1004 = [
('Creditor', 'Creditor shall have the same meaning as in 12 CFR 226.2'),
]
PARAGRAPH_HIERARCHY_1004 = {
'1004.2': [
1,
1,
2, 2, 2,
1,
1,
2, 2, 2, 2,
1,
1,
],
}
|
e6acee0000b68bf57a59d13bda0ca3b547d11c2b
|
tests/acceptance/test_dcos_command.py
|
tests/acceptance/test_dcos_command.py
|
from shakedown import *
def test_run_command():
exit_status, output = run_command(master_ip(), 'cat /etc/motd')
assert exit_status
def test_run_command_on_master():
exit_status, output = run_command_on_master('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_leader():
exit_status, output = run_command_on_leader('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_marathon_leader():
exit_status, output = run_command_on_marathon_leader('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_agent():
# Get all IPs associated with the 'jenkins' task running in the 'marathon' service
service_ips = get_service_ips('marathon', 'jenkins')
for host in service_ips:
exit_status, output = run_command_on_agent(host, 'ps -eaf | grep -i docker | grep -i jenkins')
assert exit_status
assert output.startswith('root')
def test_run_dcos_command():
stdout, stderr, return_code = run_dcos_command('package search jenkins --json')
result_json = json.loads(stdout)
assert result_json['packages'][0]['name'] == 'jenkins'
|
from shakedown import *
def test_run_command():
exit_status, output = run_command(master_ip(), 'cat /etc/motd')
assert exit_status
def test_run_command_on_master():
exit_status, output = run_command_on_master('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_leader():
exit_status, output = run_command_on_leader('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_marathon_leader():
exit_status, output = run_command_on_marathon_leader('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_agent():
"""Run 'ps' on all agents looking for jenkins."""
service_ips = get_private_agents() + get_public_agents()
for host in service_ips:
exit_status, output = run_command_on_agent(host, 'ps -eaf | grep -i docker | grep -i jenkins')
assert exit_status
assert output.startswith('root')
def test_run_dcos_command():
stdout, stderr, return_code = run_dcos_command('package search jenkins --json')
result_json = json.loads(stdout)
assert result_json['packages'][0]['name'] == 'jenkins'
|
Make test_run_command_on_agent connect to all agents
|
Make test_run_command_on_agent connect to all agents
Increase testing of SSH connection handling by connecting to all
agents in the cluster.
|
Python
|
apache-2.0
|
dcos/shakedown
|
from shakedown import *
def test_run_command():
exit_status, output = run_command(master_ip(), 'cat /etc/motd')
assert exit_status
def test_run_command_on_master():
exit_status, output = run_command_on_master('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_leader():
exit_status, output = run_command_on_leader('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_marathon_leader():
exit_status, output = run_command_on_marathon_leader('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_agent():
# Get all IPs associated with the 'jenkins' task running in the 'marathon' service
service_ips = get_service_ips('marathon', 'jenkins')
for host in service_ips:
exit_status, output = run_command_on_agent(host, 'ps -eaf | grep -i docker | grep -i jenkins')
assert exit_status
assert output.startswith('root')
def test_run_dcos_command():
stdout, stderr, return_code = run_dcos_command('package search jenkins --json')
result_json = json.loads(stdout)
assert result_json['packages'][0]['name'] == 'jenkins'
Make test_run_command_on_agent connect to all agents
Increase testing of SSH connection handling by connecting to all
agents in the cluster.
|
from shakedown import *
def test_run_command():
exit_status, output = run_command(master_ip(), 'cat /etc/motd')
assert exit_status
def test_run_command_on_master():
exit_status, output = run_command_on_master('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_leader():
exit_status, output = run_command_on_leader('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_marathon_leader():
exit_status, output = run_command_on_marathon_leader('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_agent():
"""Run 'ps' on all agents looking for jenkins."""
service_ips = get_private_agents() + get_public_agents()
for host in service_ips:
exit_status, output = run_command_on_agent(host, 'ps -eaf | grep -i docker | grep -i jenkins')
assert exit_status
assert output.startswith('root')
def test_run_dcos_command():
stdout, stderr, return_code = run_dcos_command('package search jenkins --json')
result_json = json.loads(stdout)
assert result_json['packages'][0]['name'] == 'jenkins'
|
<commit_before>from shakedown import *
def test_run_command():
exit_status, output = run_command(master_ip(), 'cat /etc/motd')
assert exit_status
def test_run_command_on_master():
exit_status, output = run_command_on_master('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_leader():
exit_status, output = run_command_on_leader('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_marathon_leader():
exit_status, output = run_command_on_marathon_leader('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_agent():
# Get all IPs associated with the 'jenkins' task running in the 'marathon' service
service_ips = get_service_ips('marathon', 'jenkins')
for host in service_ips:
exit_status, output = run_command_on_agent(host, 'ps -eaf | grep -i docker | grep -i jenkins')
assert exit_status
assert output.startswith('root')
def test_run_dcos_command():
stdout, stderr, return_code = run_dcos_command('package search jenkins --json')
result_json = json.loads(stdout)
assert result_json['packages'][0]['name'] == 'jenkins'
<commit_msg>Make test_run_command_on_agent connect to all agents
Increase testing of SSH connection handling by connecting to all
agents in the cluster.<commit_after>
|
from shakedown import *
def test_run_command():
exit_status, output = run_command(master_ip(), 'cat /etc/motd')
assert exit_status
def test_run_command_on_master():
exit_status, output = run_command_on_master('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_leader():
exit_status, output = run_command_on_leader('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_marathon_leader():
exit_status, output = run_command_on_marathon_leader('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_agent():
"""Run 'ps' on all agents looking for jenkins."""
service_ips = get_private_agents() + get_public_agents()
for host in service_ips:
exit_status, output = run_command_on_agent(host, 'ps -eaf | grep -i docker | grep -i jenkins')
assert exit_status
assert output.startswith('root')
def test_run_dcos_command():
stdout, stderr, return_code = run_dcos_command('package search jenkins --json')
result_json = json.loads(stdout)
assert result_json['packages'][0]['name'] == 'jenkins'
|
from shakedown import *
def test_run_command():
exit_status, output = run_command(master_ip(), 'cat /etc/motd')
assert exit_status
def test_run_command_on_master():
exit_status, output = run_command_on_master('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_leader():
exit_status, output = run_command_on_leader('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_marathon_leader():
exit_status, output = run_command_on_marathon_leader('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_agent():
# Get all IPs associated with the 'jenkins' task running in the 'marathon' service
service_ips = get_service_ips('marathon', 'jenkins')
for host in service_ips:
exit_status, output = run_command_on_agent(host, 'ps -eaf | grep -i docker | grep -i jenkins')
assert exit_status
assert output.startswith('root')
def test_run_dcos_command():
stdout, stderr, return_code = run_dcos_command('package search jenkins --json')
result_json = json.loads(stdout)
assert result_json['packages'][0]['name'] == 'jenkins'
Make test_run_command_on_agent connect to all agents
Increase testing of SSH connection handling by connecting to all
agents in the cluster.from shakedown import *
def test_run_command():
exit_status, output = run_command(master_ip(), 'cat /etc/motd')
assert exit_status
def test_run_command_on_master():
exit_status, output = run_command_on_master('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_leader():
exit_status, output = run_command_on_leader('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_marathon_leader():
exit_status, output = run_command_on_marathon_leader('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_agent():
"""Run 'ps' on all agents looking for jenkins."""
service_ips = get_private_agents() + get_public_agents()
for host in service_ips:
exit_status, output = run_command_on_agent(host, 'ps -eaf | grep -i docker | grep -i jenkins')
assert exit_status
assert output.startswith('root')
def test_run_dcos_command():
stdout, stderr, return_code = run_dcos_command('package search jenkins --json')
result_json = json.loads(stdout)
assert result_json['packages'][0]['name'] == 'jenkins'
|
<commit_before>from shakedown import *
def test_run_command():
exit_status, output = run_command(master_ip(), 'cat /etc/motd')
assert exit_status
def test_run_command_on_master():
exit_status, output = run_command_on_master('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_leader():
exit_status, output = run_command_on_leader('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_marathon_leader():
exit_status, output = run_command_on_marathon_leader('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_agent():
# Get all IPs associated with the 'jenkins' task running in the 'marathon' service
service_ips = get_service_ips('marathon', 'jenkins')
for host in service_ips:
exit_status, output = run_command_on_agent(host, 'ps -eaf | grep -i docker | grep -i jenkins')
assert exit_status
assert output.startswith('root')
def test_run_dcos_command():
stdout, stderr, return_code = run_dcos_command('package search jenkins --json')
result_json = json.loads(stdout)
assert result_json['packages'][0]['name'] == 'jenkins'
<commit_msg>Make test_run_command_on_agent connect to all agents
Increase testing of SSH connection handling by connecting to all
agents in the cluster.<commit_after>from shakedown import *
def test_run_command():
exit_status, output = run_command(master_ip(), 'cat /etc/motd')
assert exit_status
def test_run_command_on_master():
exit_status, output = run_command_on_master('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_leader():
exit_status, output = run_command_on_leader('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_marathon_leader():
exit_status, output = run_command_on_marathon_leader('uname -a')
assert exit_status
assert output.startswith('Linux')
def test_run_command_on_agent():
"""Run 'ps' on all agents looking for jenkins."""
service_ips = get_private_agents() + get_public_agents()
for host in service_ips:
exit_status, output = run_command_on_agent(host, 'ps -eaf | grep -i docker | grep -i jenkins')
assert exit_status
assert output.startswith('root')
def test_run_dcos_command():
stdout, stderr, return_code = run_dcos_command('package search jenkins --json')
result_json = json.loads(stdout)
assert result_json['packages'][0]['name'] == 'jenkins'
|
db4620130cf8444dec7c42bc1f907acdec89dfed
|
maws.py
|
maws.py
|
#!/usr/bin/python3
import argparse
import sys
from mawslib.manager import Manager
configfile="cloudconfig.yaml"
parser = argparse.ArgumentParser(
#add_help=False,
description='AWS Manager',
usage='''maws [<options>] <command> <subcommand> [<args>]
For help:
maws help
maws <command> help
maws <command> <subcommand> help
''')
parser.add_argument('command', help='Command to run',
choices = ['help', 'ec2', 'sdb', 'route53', 'r53', 'rds',
'cloudformation', 'cfn' ])
parser.add_argument('--config',
help='alternate config file to use (default: cloudconfig.yaml)',
action="store")
# parse_args defaults to [1:] for args, but you need to
# exclude the rest of the args too, or validation will fail
args, subargs = parser.parse_known_args()
if hasattr(args, "config"): configfile = args.config
mgr = Manager(configfile)
mgr.showname()
if args.command == "cfn": args.command = "cloudformation"
if args.command == "r53": args.command = "route53"
exec("from cli.%s_cli import processCommand" % args.command)
processCommand(mgr, subargs)
|
#!/usr/bin/python3
import argparse
import sys
from mawslib.manager import Manager
import importlib
configfile="cloudconfig.yaml"
parser = argparse.ArgumentParser(
#add_help=False,
description='AWS Manager',
usage='''maws [<options>] <command> <subcommand> [<args>]
For help:
maws help
maws <command> help
maws <command> <subcommand> help
''')
parser.add_argument('command', help='Command to run',
choices = ['help', 'ec2', 'sdb', 'route53', 'r53', 'rds',
'cloudformation', 'cfn' ])
parser.add_argument('--config',
help='alternate config file to use (default: cloudconfig.yaml)',
action="store")
# parse_args defaults to [1:] for args, but you need to
# exclude the rest of the args too, or validation will fail
args, subargs = parser.parse_known_args()
if hasattr(args, "config"): configfile = args.config
mgr = Manager(configfile)
mgr.showname()
if args.command == "cfn": args.command = "cloudformation"
if args.command == "r53": args.command = "route53"
cli_mod = importlib.import_module("cli.%s_cli" % args.command)
cli_mod.processCommand(mgr, subargs)
|
Use importlib instead of exec (exec was pretty ugly)
|
Use importlib instead of exec (exec was pretty ugly)
|
Python
|
mit
|
uva-its/awstools
|
#!/usr/bin/python3
import argparse
import sys
from mawslib.manager import Manager
configfile="cloudconfig.yaml"
parser = argparse.ArgumentParser(
#add_help=False,
description='AWS Manager',
usage='''maws [<options>] <command> <subcommand> [<args>]
For help:
maws help
maws <command> help
maws <command> <subcommand> help
''')
parser.add_argument('command', help='Command to run',
choices = ['help', 'ec2', 'sdb', 'route53', 'r53', 'rds',
'cloudformation', 'cfn' ])
parser.add_argument('--config',
help='alternate config file to use (default: cloudconfig.yaml)',
action="store")
# parse_args defaults to [1:] for args, but you need to
# exclude the rest of the args too, or validation will fail
args, subargs = parser.parse_known_args()
if hasattr(args, "config"): configfile = args.config
mgr = Manager(configfile)
mgr.showname()
if args.command == "cfn": args.command = "cloudformation"
if args.command == "r53": args.command = "route53"
exec("from cli.%s_cli import processCommand" % args.command)
processCommand(mgr, subargs)
Use importlib instead of exec (exec was pretty ugly)
|
#!/usr/bin/python3
import argparse
import sys
from mawslib.manager import Manager
import importlib
configfile="cloudconfig.yaml"
parser = argparse.ArgumentParser(
#add_help=False,
description='AWS Manager',
usage='''maws [<options>] <command> <subcommand> [<args>]
For help:
maws help
maws <command> help
maws <command> <subcommand> help
''')
parser.add_argument('command', help='Command to run',
choices = ['help', 'ec2', 'sdb', 'route53', 'r53', 'rds',
'cloudformation', 'cfn' ])
parser.add_argument('--config',
help='alternate config file to use (default: cloudconfig.yaml)',
action="store")
# parse_args defaults to [1:] for args, but you need to
# exclude the rest of the args too, or validation will fail
args, subargs = parser.parse_known_args()
if hasattr(args, "config"): configfile = args.config
mgr = Manager(configfile)
mgr.showname()
if args.command == "cfn": args.command = "cloudformation"
if args.command == "r53": args.command = "route53"
cli_mod = importlib.import_module("cli.%s_cli" % args.command)
cli_mod.processCommand(mgr, subargs)
|
<commit_before>#!/usr/bin/python3
import argparse
import sys
from mawslib.manager import Manager
configfile="cloudconfig.yaml"
parser = argparse.ArgumentParser(
#add_help=False,
description='AWS Manager',
usage='''maws [<options>] <command> <subcommand> [<args>]
For help:
maws help
maws <command> help
maws <command> <subcommand> help
''')
parser.add_argument('command', help='Command to run',
choices = ['help', 'ec2', 'sdb', 'route53', 'r53', 'rds',
'cloudformation', 'cfn' ])
parser.add_argument('--config',
help='alternate config file to use (default: cloudconfig.yaml)',
action="store")
# parse_args defaults to [1:] for args, but you need to
# exclude the rest of the args too, or validation will fail
args, subargs = parser.parse_known_args()
if hasattr(args, "config"): configfile = args.config
mgr = Manager(configfile)
mgr.showname()
if args.command == "cfn": args.command = "cloudformation"
if args.command == "r53": args.command = "route53"
exec("from cli.%s_cli import processCommand" % args.command)
processCommand(mgr, subargs)
<commit_msg>Use importlib instead of exec (exec was pretty ugly)<commit_after>
|
#!/usr/bin/python3
import argparse
import sys
from mawslib.manager import Manager
import importlib
configfile="cloudconfig.yaml"
parser = argparse.ArgumentParser(
#add_help=False,
description='AWS Manager',
usage='''maws [<options>] <command> <subcommand> [<args>]
For help:
maws help
maws <command> help
maws <command> <subcommand> help
''')
parser.add_argument('command', help='Command to run',
choices = ['help', 'ec2', 'sdb', 'route53', 'r53', 'rds',
'cloudformation', 'cfn' ])
parser.add_argument('--config',
help='alternate config file to use (default: cloudconfig.yaml)',
action="store")
# parse_args defaults to [1:] for args, but you need to
# exclude the rest of the args too, or validation will fail
args, subargs = parser.parse_known_args()
if hasattr(args, "config"): configfile = args.config
mgr = Manager(configfile)
mgr.showname()
if args.command == "cfn": args.command = "cloudformation"
if args.command == "r53": args.command = "route53"
cli_mod = importlib.import_module("cli.%s_cli" % args.command)
cli_mod.processCommand(mgr, subargs)
|
#!/usr/bin/python3
import argparse
import sys
from mawslib.manager import Manager
configfile="cloudconfig.yaml"
parser = argparse.ArgumentParser(
#add_help=False,
description='AWS Manager',
usage='''maws [<options>] <command> <subcommand> [<args>]
For help:
maws help
maws <command> help
maws <command> <subcommand> help
''')
parser.add_argument('command', help='Command to run',
choices = ['help', 'ec2', 'sdb', 'route53', 'r53', 'rds',
'cloudformation', 'cfn' ])
parser.add_argument('--config',
help='alternate config file to use (default: cloudconfig.yaml)',
action="store")
# parse_args defaults to [1:] for args, but you need to
# exclude the rest of the args too, or validation will fail
args, subargs = parser.parse_known_args()
if hasattr(args, "config"): configfile = args.config
mgr = Manager(configfile)
mgr.showname()
if args.command == "cfn": args.command = "cloudformation"
if args.command == "r53": args.command = "route53"
exec("from cli.%s_cli import processCommand" % args.command)
processCommand(mgr, subargs)
Use importlib instead of exec (exec was pretty ugly)#!/usr/bin/python3
import argparse
import sys
from mawslib.manager import Manager
import importlib
configfile="cloudconfig.yaml"
parser = argparse.ArgumentParser(
#add_help=False,
description='AWS Manager',
usage='''maws [<options>] <command> <subcommand> [<args>]
For help:
maws help
maws <command> help
maws <command> <subcommand> help
''')
parser.add_argument('command', help='Command to run',
choices = ['help', 'ec2', 'sdb', 'route53', 'r53', 'rds',
'cloudformation', 'cfn' ])
parser.add_argument('--config',
help='alternate config file to use (default: cloudconfig.yaml)',
action="store")
# parse_args defaults to [1:] for args, but you need to
# exclude the rest of the args too, or validation will fail
args, subargs = parser.parse_known_args()
if hasattr(args, "config"): configfile = args.config
mgr = Manager(configfile)
mgr.showname()
if args.command == "cfn": args.command = "cloudformation"
if args.command == "r53": args.command = "route53"
cli_mod = importlib.import_module("cli.%s_cli" % args.command)
cli_mod.processCommand(mgr, subargs)
|
<commit_before>#!/usr/bin/python3
import argparse
import sys
from mawslib.manager import Manager
configfile="cloudconfig.yaml"
parser = argparse.ArgumentParser(
#add_help=False,
description='AWS Manager',
usage='''maws [<options>] <command> <subcommand> [<args>]
For help:
maws help
maws <command> help
maws <command> <subcommand> help
''')
parser.add_argument('command', help='Command to run',
choices = ['help', 'ec2', 'sdb', 'route53', 'r53', 'rds',
'cloudformation', 'cfn' ])
parser.add_argument('--config',
help='alternate config file to use (default: cloudconfig.yaml)',
action="store")
# parse_args defaults to [1:] for args, but you need to
# exclude the rest of the args too, or validation will fail
args, subargs = parser.parse_known_args()
if hasattr(args, "config"): configfile = args.config
mgr = Manager(configfile)
mgr.showname()
if args.command == "cfn": args.command = "cloudformation"
if args.command == "r53": args.command = "route53"
exec("from cli.%s_cli import processCommand" % args.command)
processCommand(mgr, subargs)
<commit_msg>Use importlib instead of exec (exec was pretty ugly)<commit_after>#!/usr/bin/python3
import argparse
import sys
from mawslib.manager import Manager
import importlib
configfile="cloudconfig.yaml"
parser = argparse.ArgumentParser(
#add_help=False,
description='AWS Manager',
usage='''maws [<options>] <command> <subcommand> [<args>]
For help:
maws help
maws <command> help
maws <command> <subcommand> help
''')
parser.add_argument('command', help='Command to run',
choices = ['help', 'ec2', 'sdb', 'route53', 'r53', 'rds',
'cloudformation', 'cfn' ])
parser.add_argument('--config',
help='alternate config file to use (default: cloudconfig.yaml)',
action="store")
# parse_args defaults to [1:] for args, but you need to
# exclude the rest of the args too, or validation will fail
args, subargs = parser.parse_known_args()
if hasattr(args, "config"): configfile = args.config
mgr = Manager(configfile)
mgr.showname()
if args.command == "cfn": args.command = "cloudformation"
if args.command == "r53": args.command = "route53"
cli_mod = importlib.import_module("cli.%s_cli" % args.command)
cli_mod.processCommand(mgr, subargs)
|
4851e57059e496b311fb68fc566e47e3d76745fb
|
string/first-str-substr-occr.py
|
string/first-str-substr-occr.py
|
# Implement a function that takes two strings, s and x, as arguments and finds the first occurrence of the string x in s. The function should return an integer indicating the index in s of the first occurrence of x. If there are no occurrences of x in s, return -1
def find_substring(string, substr):
string_len = len(string)
substr_len = len(substr)
i = 0
if substr_len >= 1:
i = 0
while i < string_len:
if string[i] == substr[j]:
if j == substr_len:
return i - substr_len
j += 1
else:
j = 0
i += 1
return -1
|
# Implement a function that takes two strings, s and x, as arguments and finds the first occurrence of the string x in s. The function should return an integer indicating the index in s of the first occurrence of x. If there are no occurrences of x in s, return -1
def find_substring(string, substr):
string_len = len(string)
substr_len = len(substr)
j = 0
if substr_len >= 1:
i = 0
while i < string_len:
if string[i] == substr[j]:
j += 1
if j == substr_len:
return i - substr_len
else:
j = 0
i += 1
return -1
# test cases
# test 1
string = "Hello"
substr = "eo"
print find_substring(string, substr)
|
Debug and add first test case
|
Debug and add first test case
|
Python
|
mit
|
derekmpham/interview-prep,derekmpham/interview-prep
|
# Implement a function that takes two strings, s and x, as arguments and finds the first occurrence of the string x in s. The function should return an integer indicating the index in s of the first occurrence of x. If there are no occurrences of x in s, return -1
def find_substring(string, substr):
string_len = len(string)
substr_len = len(substr)
i = 0
if substr_len >= 1:
i = 0
while i < string_len:
if string[i] == substr[j]:
if j == substr_len:
return i - substr_len
j += 1
else:
j = 0
i += 1
return -1
Debug and add first test case
|
# Implement a function that takes two strings, s and x, as arguments and finds the first occurrence of the string x in s. The function should return an integer indicating the index in s of the first occurrence of x. If there are no occurrences of x in s, return -1
def find_substring(string, substr):
string_len = len(string)
substr_len = len(substr)
j = 0
if substr_len >= 1:
i = 0
while i < string_len:
if string[i] == substr[j]:
j += 1
if j == substr_len:
return i - substr_len
else:
j = 0
i += 1
return -1
# test cases
# test 1
string = "Hello"
substr = "eo"
print find_substring(string, substr)
|
<commit_before># Implement a function that takes two strings, s and x, as arguments and finds the first occurrence of the string x in s. The function should return an integer indicating the index in s of the first occurrence of x. If there are no occurrences of x in s, return -1
def find_substring(string, substr):
string_len = len(string)
substr_len = len(substr)
i = 0
if substr_len >= 1:
i = 0
while i < string_len:
if string[i] == substr[j]:
if j == substr_len:
return i - substr_len
j += 1
else:
j = 0
i += 1
return -1
<commit_msg>Debug and add first test case<commit_after>
|
# Implement a function that takes two strings, s and x, as arguments and finds the first occurrence of the string x in s. The function should return an integer indicating the index in s of the first occurrence of x. If there are no occurrences of x in s, return -1
def find_substring(string, substr):
string_len = len(string)
substr_len = len(substr)
j = 0
if substr_len >= 1:
i = 0
while i < string_len:
if string[i] == substr[j]:
j += 1
if j == substr_len:
return i - substr_len
else:
j = 0
i += 1
return -1
# test cases
# test 1
string = "Hello"
substr = "eo"
print find_substring(string, substr)
|
# Implement a function that takes two strings, s and x, as arguments and finds the first occurrence of the string x in s. The function should return an integer indicating the index in s of the first occurrence of x. If there are no occurrences of x in s, return -1
def find_substring(string, substr):
string_len = len(string)
substr_len = len(substr)
i = 0
if substr_len >= 1:
i = 0
while i < string_len:
if string[i] == substr[j]:
if j == substr_len:
return i - substr_len
j += 1
else:
j = 0
i += 1
return -1
Debug and add first test case# Implement a function that takes two strings, s and x, as arguments and finds the first occurrence of the string x in s. The function should return an integer indicating the index in s of the first occurrence of x. If there are no occurrences of x in s, return -1
def find_substring(string, substr):
string_len = len(string)
substr_len = len(substr)
j = 0
if substr_len >= 1:
i = 0
while i < string_len:
if string[i] == substr[j]:
j += 1
if j == substr_len:
return i - substr_len
else:
j = 0
i += 1
return -1
# test cases
# test 1
string = "Hello"
substr = "eo"
print find_substring(string, substr)
|
<commit_before># Implement a function that takes two strings, s and x, as arguments and finds the first occurrence of the string x in s. The function should return an integer indicating the index in s of the first occurrence of x. If there are no occurrences of x in s, return -1
def find_substring(string, substr):
string_len = len(string)
substr_len = len(substr)
i = 0
if substr_len >= 1:
i = 0
while i < string_len:
if string[i] == substr[j]:
if j == substr_len:
return i - substr_len
j += 1
else:
j = 0
i += 1
return -1
<commit_msg>Debug and add first test case<commit_after># Implement a function that takes two strings, s and x, as arguments and finds the first occurrence of the string x in s. The function should return an integer indicating the index in s of the first occurrence of x. If there are no occurrences of x in s, return -1
def find_substring(string, substr):
string_len = len(string)
substr_len = len(substr)
j = 0
if substr_len >= 1:
i = 0
while i < string_len:
if string[i] == substr[j]:
j += 1
if j == substr_len:
return i - substr_len
else:
j = 0
i += 1
return -1
# test cases
# test 1
string = "Hello"
substr = "eo"
print find_substring(string, substr)
|
beaa9f56cc76dc9ebd531d84e595420a4037a9a9
|
tests/factories/user.py
|
tests/factories/user.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2016 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
import factory
from factory.faker import Faker
from pycroft.model.user import User
from .base import BaseFactory
from .facilities import RoomFactory
from .finance import AccountFactory
class UserFactory(BaseFactory):
class Meta:
model = User
login = Faker('user_name')
name = Faker('name')
registered_at = Faker('date_time')
password = Faker('password')
email = Faker('email')
account = factory.SubFactory(AccountFactory, type="USER_ASSET")
room = factory.SubFactory(RoomFactory)
address = factory.SelfAttribute('room.address')
class UserWithHostFactory(UserFactory):
host = factory.RelatedFactory('tests.factories.host.HostFactory', 'owner')
|
# -*- coding: utf-8 -*-
# Copyright (c) 2016 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
import factory
from factory.faker import Faker
from pycroft.model.user import User, RoomHistoryEntry
from .base import BaseFactory
from .facilities import RoomFactory
from .finance import AccountFactory
class UserFactory(BaseFactory):
class Meta:
model = User
login = Faker('user_name')
name = Faker('name')
registered_at = Faker('date_time')
password = Faker('password')
email = Faker('email')
account = factory.SubFactory(AccountFactory, type="USER_ASSET")
room = factory.SubFactory(RoomFactory)
address = factory.SelfAttribute('room.address')
@factory.post_generation
def room_history_entries(self, create, extracted, **kwargs):
if self.room is not None:
# Set room history entry begin to registration date
rhe = RoomHistoryEntry.q.filter_by(user=self, room=self.room).one()
rhe.begins_at = self.registered_at
class UserWithHostFactory(UserFactory):
host = factory.RelatedFactory('tests.factories.host.HostFactory', 'owner')
|
Add correct room_history_entry in UserFactory
|
Add correct room_history_entry in UserFactory
|
Python
|
apache-2.0
|
agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft
|
# -*- coding: utf-8 -*-
# Copyright (c) 2016 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
import factory
from factory.faker import Faker
from pycroft.model.user import User
from .base import BaseFactory
from .facilities import RoomFactory
from .finance import AccountFactory
class UserFactory(BaseFactory):
class Meta:
model = User
login = Faker('user_name')
name = Faker('name')
registered_at = Faker('date_time')
password = Faker('password')
email = Faker('email')
account = factory.SubFactory(AccountFactory, type="USER_ASSET")
room = factory.SubFactory(RoomFactory)
address = factory.SelfAttribute('room.address')
class UserWithHostFactory(UserFactory):
host = factory.RelatedFactory('tests.factories.host.HostFactory', 'owner')
Add correct room_history_entry in UserFactory
|
# -*- coding: utf-8 -*-
# Copyright (c) 2016 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
import factory
from factory.faker import Faker
from pycroft.model.user import User, RoomHistoryEntry
from .base import BaseFactory
from .facilities import RoomFactory
from .finance import AccountFactory
class UserFactory(BaseFactory):
class Meta:
model = User
login = Faker('user_name')
name = Faker('name')
registered_at = Faker('date_time')
password = Faker('password')
email = Faker('email')
account = factory.SubFactory(AccountFactory, type="USER_ASSET")
room = factory.SubFactory(RoomFactory)
address = factory.SelfAttribute('room.address')
@factory.post_generation
def room_history_entries(self, create, extracted, **kwargs):
if self.room is not None:
# Set room history entry begin to registration date
rhe = RoomHistoryEntry.q.filter_by(user=self, room=self.room).one()
rhe.begins_at = self.registered_at
class UserWithHostFactory(UserFactory):
host = factory.RelatedFactory('tests.factories.host.HostFactory', 'owner')
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2016 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
import factory
from factory.faker import Faker
from pycroft.model.user import User
from .base import BaseFactory
from .facilities import RoomFactory
from .finance import AccountFactory
class UserFactory(BaseFactory):
class Meta:
model = User
login = Faker('user_name')
name = Faker('name')
registered_at = Faker('date_time')
password = Faker('password')
email = Faker('email')
account = factory.SubFactory(AccountFactory, type="USER_ASSET")
room = factory.SubFactory(RoomFactory)
address = factory.SelfAttribute('room.address')
class UserWithHostFactory(UserFactory):
host = factory.RelatedFactory('tests.factories.host.HostFactory', 'owner')
<commit_msg>Add correct room_history_entry in UserFactory<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright (c) 2016 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
import factory
from factory.faker import Faker
from pycroft.model.user import User, RoomHistoryEntry
from .base import BaseFactory
from .facilities import RoomFactory
from .finance import AccountFactory
class UserFactory(BaseFactory):
class Meta:
model = User
login = Faker('user_name')
name = Faker('name')
registered_at = Faker('date_time')
password = Faker('password')
email = Faker('email')
account = factory.SubFactory(AccountFactory, type="USER_ASSET")
room = factory.SubFactory(RoomFactory)
address = factory.SelfAttribute('room.address')
@factory.post_generation
def room_history_entries(self, create, extracted, **kwargs):
if self.room is not None:
# Set room history entry begin to registration date
rhe = RoomHistoryEntry.q.filter_by(user=self, room=self.room).one()
rhe.begins_at = self.registered_at
class UserWithHostFactory(UserFactory):
host = factory.RelatedFactory('tests.factories.host.HostFactory', 'owner')
|
# -*- coding: utf-8 -*-
# Copyright (c) 2016 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
import factory
from factory.faker import Faker
from pycroft.model.user import User
from .base import BaseFactory
from .facilities import RoomFactory
from .finance import AccountFactory
class UserFactory(BaseFactory):
class Meta:
model = User
login = Faker('user_name')
name = Faker('name')
registered_at = Faker('date_time')
password = Faker('password')
email = Faker('email')
account = factory.SubFactory(AccountFactory, type="USER_ASSET")
room = factory.SubFactory(RoomFactory)
address = factory.SelfAttribute('room.address')
class UserWithHostFactory(UserFactory):
host = factory.RelatedFactory('tests.factories.host.HostFactory', 'owner')
Add correct room_history_entry in UserFactory# -*- coding: utf-8 -*-
# Copyright (c) 2016 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
import factory
from factory.faker import Faker
from pycroft.model.user import User, RoomHistoryEntry
from .base import BaseFactory
from .facilities import RoomFactory
from .finance import AccountFactory
class UserFactory(BaseFactory):
class Meta:
model = User
login = Faker('user_name')
name = Faker('name')
registered_at = Faker('date_time')
password = Faker('password')
email = Faker('email')
account = factory.SubFactory(AccountFactory, type="USER_ASSET")
room = factory.SubFactory(RoomFactory)
address = factory.SelfAttribute('room.address')
@factory.post_generation
def room_history_entries(self, create, extracted, **kwargs):
if self.room is not None:
# Set room history entry begin to registration date
rhe = RoomHistoryEntry.q.filter_by(user=self, room=self.room).one()
rhe.begins_at = self.registered_at
class UserWithHostFactory(UserFactory):
host = factory.RelatedFactory('tests.factories.host.HostFactory', 'owner')
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2016 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
import factory
from factory.faker import Faker
from pycroft.model.user import User
from .base import BaseFactory
from .facilities import RoomFactory
from .finance import AccountFactory
class UserFactory(BaseFactory):
class Meta:
model = User
login = Faker('user_name')
name = Faker('name')
registered_at = Faker('date_time')
password = Faker('password')
email = Faker('email')
account = factory.SubFactory(AccountFactory, type="USER_ASSET")
room = factory.SubFactory(RoomFactory)
address = factory.SelfAttribute('room.address')
class UserWithHostFactory(UserFactory):
host = factory.RelatedFactory('tests.factories.host.HostFactory', 'owner')
<commit_msg>Add correct room_history_entry in UserFactory<commit_after># -*- coding: utf-8 -*-
# Copyright (c) 2016 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
import factory
from factory.faker import Faker
from pycroft.model.user import User, RoomHistoryEntry
from .base import BaseFactory
from .facilities import RoomFactory
from .finance import AccountFactory
class UserFactory(BaseFactory):
class Meta:
model = User
login = Faker('user_name')
name = Faker('name')
registered_at = Faker('date_time')
password = Faker('password')
email = Faker('email')
account = factory.SubFactory(AccountFactory, type="USER_ASSET")
room = factory.SubFactory(RoomFactory)
address = factory.SelfAttribute('room.address')
@factory.post_generation
def room_history_entries(self, create, extracted, **kwargs):
if self.room is not None:
# Set room history entry begin to registration date
rhe = RoomHistoryEntry.q.filter_by(user=self, room=self.room).one()
rhe.begins_at = self.registered_at
class UserWithHostFactory(UserFactory):
host = factory.RelatedFactory('tests.factories.host.HostFactory', 'owner')
|
327d9c762ddbbfb2c90cea347dd5612499ade3d4
|
tests/setup_teardown.py
|
tests/setup_teardown.py
|
import os
from skidl import *
files_at_start = set([])
def setup_function(f):
global files_at_start
files_at_start = set(os.listdir('.'))
# Make this test directory the library search paths for all ECAD tools
for tool_lib_path in lib_search_paths:
tool_lib_path = [os.path.dirname(os.path.abspath(__file__))]
default_circuit.mini_reset()
def teardown_function(f):
files_at_end = set(os.listdir('.'))
for file in files_at_end - files_at_start:
try:
os.remove(file)
except Exception:
pass
def get_filename(fn):
"""
Resolves a filename relative to the "tests" directory.
"""
abs_fn = \
fn if os.path.isabs(fn) else \
os.path.join(
os.path.dirname(os.path.abspath(__file__)),
fn)
return os.path.realpath(abs_fn)
if __name__ == '__main__':
setup_function(None)
with open('test.txt','wb') as f:
f.write('test')
teardown_function(None)
|
import os
from skidl import *
files_at_start = set([])
def setup_function(f):
global files_at_start
files_at_start = set(os.listdir('.'))
default_circuit.mini_reset()
lib_search_paths.clear()
lib_search_paths.update({
KICAD: [".", get_filename(".")],
SKIDL: [".", get_filename("../skidl/libs")]
})
def teardown_function(f):
files_at_end = set(os.listdir('.'))
for file in files_at_end - files_at_start:
try:
os.remove(file)
except Exception:
pass
def get_filename(fn):
"""
Resolves a filename relative to the "tests" directory.
"""
abs_fn = \
fn if os.path.isabs(fn) else \
os.path.join(
os.path.dirname(os.path.abspath(__file__)),
fn)
return os.path.realpath(abs_fn)
if __name__ == '__main__':
setup_function(None)
with open('test.txt','wb') as f:
f.write('test')
teardown_function(None)
|
Set up library search paths for tests.
|
Set up library search paths for tests.
When running tests, exclusively use the libraries that are either
built-in, or are provided with the test suite.
|
Python
|
mit
|
xesscorp/skidl,xesscorp/skidl
|
import os
from skidl import *
files_at_start = set([])
def setup_function(f):
global files_at_start
files_at_start = set(os.listdir('.'))
# Make this test directory the library search paths for all ECAD tools
for tool_lib_path in lib_search_paths:
tool_lib_path = [os.path.dirname(os.path.abspath(__file__))]
default_circuit.mini_reset()
def teardown_function(f):
files_at_end = set(os.listdir('.'))
for file in files_at_end - files_at_start:
try:
os.remove(file)
except Exception:
pass
def get_filename(fn):
"""
Resolves a filename relative to the "tests" directory.
"""
abs_fn = \
fn if os.path.isabs(fn) else \
os.path.join(
os.path.dirname(os.path.abspath(__file__)),
fn)
return os.path.realpath(abs_fn)
if __name__ == '__main__':
setup_function(None)
with open('test.txt','wb') as f:
f.write('test')
teardown_function(None)
Set up library search paths for tests.
When running tests, exclusively use the libraries that are either
built-in, or are provided with the test suite.
|
import os
from skidl import *
files_at_start = set([])
def setup_function(f):
global files_at_start
files_at_start = set(os.listdir('.'))
default_circuit.mini_reset()
lib_search_paths.clear()
lib_search_paths.update({
KICAD: [".", get_filename(".")],
SKIDL: [".", get_filename("../skidl/libs")]
})
def teardown_function(f):
files_at_end = set(os.listdir('.'))
for file in files_at_end - files_at_start:
try:
os.remove(file)
except Exception:
pass
def get_filename(fn):
"""
Resolves a filename relative to the "tests" directory.
"""
abs_fn = \
fn if os.path.isabs(fn) else \
os.path.join(
os.path.dirname(os.path.abspath(__file__)),
fn)
return os.path.realpath(abs_fn)
if __name__ == '__main__':
setup_function(None)
with open('test.txt','wb') as f:
f.write('test')
teardown_function(None)
|
<commit_before>import os
from skidl import *
files_at_start = set([])
def setup_function(f):
global files_at_start
files_at_start = set(os.listdir('.'))
# Make this test directory the library search paths for all ECAD tools
for tool_lib_path in lib_search_paths:
tool_lib_path = [os.path.dirname(os.path.abspath(__file__))]
default_circuit.mini_reset()
def teardown_function(f):
files_at_end = set(os.listdir('.'))
for file in files_at_end - files_at_start:
try:
os.remove(file)
except Exception:
pass
def get_filename(fn):
"""
Resolves a filename relative to the "tests" directory.
"""
abs_fn = \
fn if os.path.isabs(fn) else \
os.path.join(
os.path.dirname(os.path.abspath(__file__)),
fn)
return os.path.realpath(abs_fn)
if __name__ == '__main__':
setup_function(None)
with open('test.txt','wb') as f:
f.write('test')
teardown_function(None)
<commit_msg>Set up library search paths for tests.
When running tests, exclusively use the libraries that are either
built-in, or are provided with the test suite.<commit_after>
|
import os
from skidl import *
files_at_start = set([])
def setup_function(f):
global files_at_start
files_at_start = set(os.listdir('.'))
default_circuit.mini_reset()
lib_search_paths.clear()
lib_search_paths.update({
KICAD: [".", get_filename(".")],
SKIDL: [".", get_filename("../skidl/libs")]
})
def teardown_function(f):
files_at_end = set(os.listdir('.'))
for file in files_at_end - files_at_start:
try:
os.remove(file)
except Exception:
pass
def get_filename(fn):
"""
Resolves a filename relative to the "tests" directory.
"""
abs_fn = \
fn if os.path.isabs(fn) else \
os.path.join(
os.path.dirname(os.path.abspath(__file__)),
fn)
return os.path.realpath(abs_fn)
if __name__ == '__main__':
setup_function(None)
with open('test.txt','wb') as f:
f.write('test')
teardown_function(None)
|
import os
from skidl import *
files_at_start = set([])
def setup_function(f):
global files_at_start
files_at_start = set(os.listdir('.'))
# Make this test directory the library search paths for all ECAD tools
for tool_lib_path in lib_search_paths:
tool_lib_path = [os.path.dirname(os.path.abspath(__file__))]
default_circuit.mini_reset()
def teardown_function(f):
files_at_end = set(os.listdir('.'))
for file in files_at_end - files_at_start:
try:
os.remove(file)
except Exception:
pass
def get_filename(fn):
"""
Resolves a filename relative to the "tests" directory.
"""
abs_fn = \
fn if os.path.isabs(fn) else \
os.path.join(
os.path.dirname(os.path.abspath(__file__)),
fn)
return os.path.realpath(abs_fn)
if __name__ == '__main__':
setup_function(None)
with open('test.txt','wb') as f:
f.write('test')
teardown_function(None)
Set up library search paths for tests.
When running tests, exclusively use the libraries that are either
built-in, or are provided with the test suite.import os
from skidl import *
files_at_start = set([])
def setup_function(f):
global files_at_start
files_at_start = set(os.listdir('.'))
default_circuit.mini_reset()
lib_search_paths.clear()
lib_search_paths.update({
KICAD: [".", get_filename(".")],
SKIDL: [".", get_filename("../skidl/libs")]
})
def teardown_function(f):
files_at_end = set(os.listdir('.'))
for file in files_at_end - files_at_start:
try:
os.remove(file)
except Exception:
pass
def get_filename(fn):
"""
Resolves a filename relative to the "tests" directory.
"""
abs_fn = \
fn if os.path.isabs(fn) else \
os.path.join(
os.path.dirname(os.path.abspath(__file__)),
fn)
return os.path.realpath(abs_fn)
if __name__ == '__main__':
setup_function(None)
with open('test.txt','wb') as f:
f.write('test')
teardown_function(None)
|
<commit_before>import os
from skidl import *
files_at_start = set([])
def setup_function(f):
global files_at_start
files_at_start = set(os.listdir('.'))
# Make this test directory the library search paths for all ECAD tools
for tool_lib_path in lib_search_paths:
tool_lib_path = [os.path.dirname(os.path.abspath(__file__))]
default_circuit.mini_reset()
def teardown_function(f):
files_at_end = set(os.listdir('.'))
for file in files_at_end - files_at_start:
try:
os.remove(file)
except Exception:
pass
def get_filename(fn):
"""
Resolves a filename relative to the "tests" directory.
"""
abs_fn = \
fn if os.path.isabs(fn) else \
os.path.join(
os.path.dirname(os.path.abspath(__file__)),
fn)
return os.path.realpath(abs_fn)
if __name__ == '__main__':
setup_function(None)
with open('test.txt','wb') as f:
f.write('test')
teardown_function(None)
<commit_msg>Set up library search paths for tests.
When running tests, exclusively use the libraries that are either
built-in, or are provided with the test suite.<commit_after>import os
from skidl import *
files_at_start = set([])
def setup_function(f):
global files_at_start
files_at_start = set(os.listdir('.'))
default_circuit.mini_reset()
lib_search_paths.clear()
lib_search_paths.update({
KICAD: [".", get_filename(".")],
SKIDL: [".", get_filename("../skidl/libs")]
})
def teardown_function(f):
files_at_end = set(os.listdir('.'))
for file in files_at_end - files_at_start:
try:
os.remove(file)
except Exception:
pass
def get_filename(fn):
"""
Resolves a filename relative to the "tests" directory.
"""
abs_fn = \
fn if os.path.isabs(fn) else \
os.path.join(
os.path.dirname(os.path.abspath(__file__)),
fn)
return os.path.realpath(abs_fn)
if __name__ == '__main__':
setup_function(None)
with open('test.txt','wb') as f:
f.write('test')
teardown_function(None)
|
6ef2973fe269ec17d607f63b565ac3a82ff86c0a
|
examples/treeping64.py
|
examples/treeping64.py
|
#!/usr/bin/python
"Create a 64-node tree network, and test connectivity using ping."
from mininet.log import setLogLevel
from mininet.net import init, Mininet
from mininet.node import KernelSwitch, UserSwitch, OVSKernelSwitch
from mininet.topolib import TreeNet
def treePing64():
"Run ping test on 64-node tree networks."
results = {}
switches = { 'reference kernel': KernelSwitch,
'reference user': UserSwitch,
'Open vSwitch kernel': OVSKernelSwitch }
for name in switches.keys():
print "*** Testing", name, "datapath"
switch = switches[ name ]
network = TreeNet( depth=2, fanout=8, switch=switch )
result = network.run( network.pingAll )
results[ name ] = result
print
print "*** Tree network ping results:"
for name in switches.keys():
print "%s: %d%% packet loss" % ( name, results[ name ] )
print
if __name__ == '__main__':
setLogLevel( 'info' )
treePing64()
|
#!/usr/bin/python
"Create a 64-node tree network, and test connectivity using ping."
from mininet.log import setLogLevel
from mininet.net import init, Mininet
from mininet.node import KernelSwitch, UserSwitch, OVSKernelSwitch
from mininet.topolib import TreeNet
def treePing64():
"Run ping test on 64-node tree networks."
results = {}
switches = { 'reference kernel': KernelSwitch,
'reference user': UserSwitch,
'Open vSwitch kernel': OVSKernelSwitch }
for name in switches:
print "*** Testing", name, "datapath"
switch = switches[ name ]
network = TreeNet( depth=2, fanout=8, switch=switch )
result = network.run( network.pingAll )
results[ name ] = result
print
print "*** Tree network ping results:"
for name in switches:
print "%s: %d%% packet loss" % ( name, results[ name ] )
print
if __name__ == '__main__':
setLogLevel( 'info' )
treePing64()
|
Use switches rather than switches.keys().
|
Use switches rather than switches.keys().
Minor cosmetic change, really.
|
Python
|
bsd-3-clause
|
mininet/mininet,mininet/mininet,mininet/mininet
|
#!/usr/bin/python
"Create a 64-node tree network, and test connectivity using ping."
from mininet.log import setLogLevel
from mininet.net import init, Mininet
from mininet.node import KernelSwitch, UserSwitch, OVSKernelSwitch
from mininet.topolib import TreeNet
def treePing64():
"Run ping test on 64-node tree networks."
results = {}
switches = { 'reference kernel': KernelSwitch,
'reference user': UserSwitch,
'Open vSwitch kernel': OVSKernelSwitch }
for name in switches.keys():
print "*** Testing", name, "datapath"
switch = switches[ name ]
network = TreeNet( depth=2, fanout=8, switch=switch )
result = network.run( network.pingAll )
results[ name ] = result
print
print "*** Tree network ping results:"
for name in switches.keys():
print "%s: %d%% packet loss" % ( name, results[ name ] )
print
if __name__ == '__main__':
setLogLevel( 'info' )
treePing64()
Use switches rather than switches.keys().
Minor cosmetic change, really.
|
#!/usr/bin/python
"Create a 64-node tree network, and test connectivity using ping."
from mininet.log import setLogLevel
from mininet.net import init, Mininet
from mininet.node import KernelSwitch, UserSwitch, OVSKernelSwitch
from mininet.topolib import TreeNet
def treePing64():
"Run ping test on 64-node tree networks."
results = {}
switches = { 'reference kernel': KernelSwitch,
'reference user': UserSwitch,
'Open vSwitch kernel': OVSKernelSwitch }
for name in switches:
print "*** Testing", name, "datapath"
switch = switches[ name ]
network = TreeNet( depth=2, fanout=8, switch=switch )
result = network.run( network.pingAll )
results[ name ] = result
print
print "*** Tree network ping results:"
for name in switches:
print "%s: %d%% packet loss" % ( name, results[ name ] )
print
if __name__ == '__main__':
setLogLevel( 'info' )
treePing64()
|
<commit_before>#!/usr/bin/python
"Create a 64-node tree network, and test connectivity using ping."
from mininet.log import setLogLevel
from mininet.net import init, Mininet
from mininet.node import KernelSwitch, UserSwitch, OVSKernelSwitch
from mininet.topolib import TreeNet
def treePing64():
"Run ping test on 64-node tree networks."
results = {}
switches = { 'reference kernel': KernelSwitch,
'reference user': UserSwitch,
'Open vSwitch kernel': OVSKernelSwitch }
for name in switches.keys():
print "*** Testing", name, "datapath"
switch = switches[ name ]
network = TreeNet( depth=2, fanout=8, switch=switch )
result = network.run( network.pingAll )
results[ name ] = result
print
print "*** Tree network ping results:"
for name in switches.keys():
print "%s: %d%% packet loss" % ( name, results[ name ] )
print
if __name__ == '__main__':
setLogLevel( 'info' )
treePing64()
<commit_msg>Use switches rather than switches.keys().
Minor cosmetic change, really.<commit_after>
|
#!/usr/bin/python
"Create a 64-node tree network, and test connectivity using ping."
from mininet.log import setLogLevel
from mininet.net import init, Mininet
from mininet.node import KernelSwitch, UserSwitch, OVSKernelSwitch
from mininet.topolib import TreeNet
def treePing64():
"Run ping test on 64-node tree networks."
results = {}
switches = { 'reference kernel': KernelSwitch,
'reference user': UserSwitch,
'Open vSwitch kernel': OVSKernelSwitch }
for name in switches:
print "*** Testing", name, "datapath"
switch = switches[ name ]
network = TreeNet( depth=2, fanout=8, switch=switch )
result = network.run( network.pingAll )
results[ name ] = result
print
print "*** Tree network ping results:"
for name in switches:
print "%s: %d%% packet loss" % ( name, results[ name ] )
print
if __name__ == '__main__':
setLogLevel( 'info' )
treePing64()
|
#!/usr/bin/python
"Create a 64-node tree network, and test connectivity using ping."
from mininet.log import setLogLevel
from mininet.net import init, Mininet
from mininet.node import KernelSwitch, UserSwitch, OVSKernelSwitch
from mininet.topolib import TreeNet
def treePing64():
"Run ping test on 64-node tree networks."
results = {}
switches = { 'reference kernel': KernelSwitch,
'reference user': UserSwitch,
'Open vSwitch kernel': OVSKernelSwitch }
for name in switches.keys():
print "*** Testing", name, "datapath"
switch = switches[ name ]
network = TreeNet( depth=2, fanout=8, switch=switch )
result = network.run( network.pingAll )
results[ name ] = result
print
print "*** Tree network ping results:"
for name in switches.keys():
print "%s: %d%% packet loss" % ( name, results[ name ] )
print
if __name__ == '__main__':
setLogLevel( 'info' )
treePing64()
Use switches rather than switches.keys().
Minor cosmetic change, really.#!/usr/bin/python
"Create a 64-node tree network, and test connectivity using ping."
from mininet.log import setLogLevel
from mininet.net import init, Mininet
from mininet.node import KernelSwitch, UserSwitch, OVSKernelSwitch
from mininet.topolib import TreeNet
def treePing64():
"Run ping test on 64-node tree networks."
results = {}
switches = { 'reference kernel': KernelSwitch,
'reference user': UserSwitch,
'Open vSwitch kernel': OVSKernelSwitch }
for name in switches:
print "*** Testing", name, "datapath"
switch = switches[ name ]
network = TreeNet( depth=2, fanout=8, switch=switch )
result = network.run( network.pingAll )
results[ name ] = result
print
print "*** Tree network ping results:"
for name in switches:
print "%s: %d%% packet loss" % ( name, results[ name ] )
print
if __name__ == '__main__':
setLogLevel( 'info' )
treePing64()
|
<commit_before>#!/usr/bin/python
"Create a 64-node tree network, and test connectivity using ping."
from mininet.log import setLogLevel
from mininet.net import init, Mininet
from mininet.node import KernelSwitch, UserSwitch, OVSKernelSwitch
from mininet.topolib import TreeNet
def treePing64():
"Run ping test on 64-node tree networks."
results = {}
switches = { 'reference kernel': KernelSwitch,
'reference user': UserSwitch,
'Open vSwitch kernel': OVSKernelSwitch }
for name in switches.keys():
print "*** Testing", name, "datapath"
switch = switches[ name ]
network = TreeNet( depth=2, fanout=8, switch=switch )
result = network.run( network.pingAll )
results[ name ] = result
print
print "*** Tree network ping results:"
for name in switches.keys():
print "%s: %d%% packet loss" % ( name, results[ name ] )
print
if __name__ == '__main__':
setLogLevel( 'info' )
treePing64()
<commit_msg>Use switches rather than switches.keys().
Minor cosmetic change, really.<commit_after>#!/usr/bin/python
"Create a 64-node tree network, and test connectivity using ping."
from mininet.log import setLogLevel
from mininet.net import init, Mininet
from mininet.node import KernelSwitch, UserSwitch, OVSKernelSwitch
from mininet.topolib import TreeNet
def treePing64():
"Run ping test on 64-node tree networks."
results = {}
switches = { 'reference kernel': KernelSwitch,
'reference user': UserSwitch,
'Open vSwitch kernel': OVSKernelSwitch }
for name in switches:
print "*** Testing", name, "datapath"
switch = switches[ name ]
network = TreeNet( depth=2, fanout=8, switch=switch )
result = network.run( network.pingAll )
results[ name ] = result
print
print "*** Tree network ping results:"
for name in switches:
print "%s: %d%% packet loss" % ( name, results[ name ] )
print
if __name__ == '__main__':
setLogLevel( 'info' )
treePing64()
|
9188c2bd910d86a4dc2e57c991d9ce21aecc3316
|
malcolm/core/vmetas/choicemeta.py
|
malcolm/core/vmetas/choicemeta.py
|
from malcolm.compat import str_
from malcolm.core.serializable import Serializable, deserialize_object
from malcolm.core.vmeta import VMeta
@Serializable.register_subclass("malcolm:core/ChoiceMeta:1.0")
class ChoiceMeta(VMeta):
"""Meta object containing information for a enum"""
endpoints = ["description", "choices", "tags", "writeable", "label"]
def __init__(self, description="", choices=None, tags=None, writeable=False,
label=""):
super(ChoiceMeta, self).__init__(description, tags, writeable, label)
if choices is None:
choices = []
self.set_choices(choices)
def set_choices(self, choices, notify=True):
"""Set the choices list"""
choices = [deserialize_object(c, str_) for c in choices]
self.set_endpoint_data("choices", choices, notify)
def validate(self, value):
"""
Check if the value is valid returns it
Args:
value: Value to validate
Returns:
Value if it is valid
Raises:
ValueError: If value not valid
"""
if value is None:
if self.choices:
return self.choices[0]
else:
return ""
elif value in self.choices:
return value
elif isinstance(value, int) and value < len(self.choices):
return self.choices[value]
else:
raise ValueError(
"%s is not a valid value in %s" % (value, self.choices))
|
from malcolm.compat import str_
from malcolm.core.serializable import Serializable, deserialize_object
from malcolm.core.vmeta import VMeta
@Serializable.register_subclass("malcolm:core/ChoiceMeta:1.0")
class ChoiceMeta(VMeta):
"""Meta object containing information for a enum"""
endpoints = ["description", "choices", "tags", "writeable", "label"]
def __init__(self, description="", choices=None, tags=None, writeable=False,
label=""):
super(ChoiceMeta, self).__init__(description, tags, writeable, label)
if choices is None:
choices = []
self.set_choices(choices)
def set_choices(self, choices, notify=True):
"""Set the choices list"""
choices = [deserialize_object(c, str_) for c in choices]
self.set_endpoint_data("choices", choices, notify)
def validate(self, value):
"""
Check if the value is valid returns it
Args:
value: Value to validate
Returns:
Value if it is valid
Raises:
ValueError: If value not valid
"""
if value is None:
if self.choices:
return self.choices[0]
else:
return ""
elif value in self.choices:
return value
elif isinstance(value, int) and value < len(self.choices):
return self.choices[value]
else:
# Hack for PANDA as we get STATUS overriding status
return value
raise ValueError(
"%s is not a valid value in %s" % (value, self.choices))
|
Fix PandA firmware issue with a hack
|
Fix PandA firmware issue with a hack
|
Python
|
apache-2.0
|
dls-controls/pymalcolm,dls-controls/pymalcolm,dls-controls/pymalcolm
|
from malcolm.compat import str_
from malcolm.core.serializable import Serializable, deserialize_object
from malcolm.core.vmeta import VMeta
@Serializable.register_subclass("malcolm:core/ChoiceMeta:1.0")
class ChoiceMeta(VMeta):
"""Meta object containing information for a enum"""
endpoints = ["description", "choices", "tags", "writeable", "label"]
def __init__(self, description="", choices=None, tags=None, writeable=False,
label=""):
super(ChoiceMeta, self).__init__(description, tags, writeable, label)
if choices is None:
choices = []
self.set_choices(choices)
def set_choices(self, choices, notify=True):
"""Set the choices list"""
choices = [deserialize_object(c, str_) for c in choices]
self.set_endpoint_data("choices", choices, notify)
def validate(self, value):
"""
Check if the value is valid returns it
Args:
value: Value to validate
Returns:
Value if it is valid
Raises:
ValueError: If value not valid
"""
if value is None:
if self.choices:
return self.choices[0]
else:
return ""
elif value in self.choices:
return value
elif isinstance(value, int) and value < len(self.choices):
return self.choices[value]
else:
raise ValueError(
"%s is not a valid value in %s" % (value, self.choices))Fix PandA firmware issue with a hack
|
from malcolm.compat import str_
from malcolm.core.serializable import Serializable, deserialize_object
from malcolm.core.vmeta import VMeta
@Serializable.register_subclass("malcolm:core/ChoiceMeta:1.0")
class ChoiceMeta(VMeta):
"""Meta object containing information for a enum"""
endpoints = ["description", "choices", "tags", "writeable", "label"]
def __init__(self, description="", choices=None, tags=None, writeable=False,
label=""):
super(ChoiceMeta, self).__init__(description, tags, writeable, label)
if choices is None:
choices = []
self.set_choices(choices)
def set_choices(self, choices, notify=True):
"""Set the choices list"""
choices = [deserialize_object(c, str_) for c in choices]
self.set_endpoint_data("choices", choices, notify)
def validate(self, value):
"""
Check if the value is valid returns it
Args:
value: Value to validate
Returns:
Value if it is valid
Raises:
ValueError: If value not valid
"""
if value is None:
if self.choices:
return self.choices[0]
else:
return ""
elif value in self.choices:
return value
elif isinstance(value, int) and value < len(self.choices):
return self.choices[value]
else:
# Hack for PANDA as we get STATUS overriding status
return value
raise ValueError(
"%s is not a valid value in %s" % (value, self.choices))
|
<commit_before>from malcolm.compat import str_
from malcolm.core.serializable import Serializable, deserialize_object
from malcolm.core.vmeta import VMeta
@Serializable.register_subclass("malcolm:core/ChoiceMeta:1.0")
class ChoiceMeta(VMeta):
"""Meta object containing information for a enum"""
endpoints = ["description", "choices", "tags", "writeable", "label"]
def __init__(self, description="", choices=None, tags=None, writeable=False,
label=""):
super(ChoiceMeta, self).__init__(description, tags, writeable, label)
if choices is None:
choices = []
self.set_choices(choices)
def set_choices(self, choices, notify=True):
"""Set the choices list"""
choices = [deserialize_object(c, str_) for c in choices]
self.set_endpoint_data("choices", choices, notify)
def validate(self, value):
"""
Check if the value is valid returns it
Args:
value: Value to validate
Returns:
Value if it is valid
Raises:
ValueError: If value not valid
"""
if value is None:
if self.choices:
return self.choices[0]
else:
return ""
elif value in self.choices:
return value
elif isinstance(value, int) and value < len(self.choices):
return self.choices[value]
else:
raise ValueError(
"%s is not a valid value in %s" % (value, self.choices))<commit_msg>Fix PandA firmware issue with a hack<commit_after>
|
from malcolm.compat import str_
from malcolm.core.serializable import Serializable, deserialize_object
from malcolm.core.vmeta import VMeta
@Serializable.register_subclass("malcolm:core/ChoiceMeta:1.0")
class ChoiceMeta(VMeta):
"""Meta object containing information for a enum"""
endpoints = ["description", "choices", "tags", "writeable", "label"]
def __init__(self, description="", choices=None, tags=None, writeable=False,
label=""):
super(ChoiceMeta, self).__init__(description, tags, writeable, label)
if choices is None:
choices = []
self.set_choices(choices)
def set_choices(self, choices, notify=True):
"""Set the choices list"""
choices = [deserialize_object(c, str_) for c in choices]
self.set_endpoint_data("choices", choices, notify)
def validate(self, value):
"""
Check if the value is valid returns it
Args:
value: Value to validate
Returns:
Value if it is valid
Raises:
ValueError: If value not valid
"""
if value is None:
if self.choices:
return self.choices[0]
else:
return ""
elif value in self.choices:
return value
elif isinstance(value, int) and value < len(self.choices):
return self.choices[value]
else:
# Hack for PANDA as we get STATUS overriding status
return value
raise ValueError(
"%s is not a valid value in %s" % (value, self.choices))
|
from malcolm.compat import str_
from malcolm.core.serializable import Serializable, deserialize_object
from malcolm.core.vmeta import VMeta
@Serializable.register_subclass("malcolm:core/ChoiceMeta:1.0")
class ChoiceMeta(VMeta):
"""Meta object containing information for a enum"""
endpoints = ["description", "choices", "tags", "writeable", "label"]
def __init__(self, description="", choices=None, tags=None, writeable=False,
label=""):
super(ChoiceMeta, self).__init__(description, tags, writeable, label)
if choices is None:
choices = []
self.set_choices(choices)
def set_choices(self, choices, notify=True):
"""Set the choices list"""
choices = [deserialize_object(c, str_) for c in choices]
self.set_endpoint_data("choices", choices, notify)
def validate(self, value):
"""
Check if the value is valid returns it
Args:
value: Value to validate
Returns:
Value if it is valid
Raises:
ValueError: If value not valid
"""
if value is None:
if self.choices:
return self.choices[0]
else:
return ""
elif value in self.choices:
return value
elif isinstance(value, int) and value < len(self.choices):
return self.choices[value]
else:
raise ValueError(
"%s is not a valid value in %s" % (value, self.choices))Fix PandA firmware issue with a hackfrom malcolm.compat import str_
from malcolm.core.serializable import Serializable, deserialize_object
from malcolm.core.vmeta import VMeta
@Serializable.register_subclass("malcolm:core/ChoiceMeta:1.0")
class ChoiceMeta(VMeta):
"""Meta object containing information for a enum"""
endpoints = ["description", "choices", "tags", "writeable", "label"]
def __init__(self, description="", choices=None, tags=None, writeable=False,
label=""):
super(ChoiceMeta, self).__init__(description, tags, writeable, label)
if choices is None:
choices = []
self.set_choices(choices)
def set_choices(self, choices, notify=True):
"""Set the choices list"""
choices = [deserialize_object(c, str_) for c in choices]
self.set_endpoint_data("choices", choices, notify)
def validate(self, value):
"""
Check if the value is valid returns it
Args:
value: Value to validate
Returns:
Value if it is valid
Raises:
ValueError: If value not valid
"""
if value is None:
if self.choices:
return self.choices[0]
else:
return ""
elif value in self.choices:
return value
elif isinstance(value, int) and value < len(self.choices):
return self.choices[value]
else:
# Hack for PANDA as we get STATUS overriding status
return value
raise ValueError(
"%s is not a valid value in %s" % (value, self.choices))
|
<commit_before>from malcolm.compat import str_
from malcolm.core.serializable import Serializable, deserialize_object
from malcolm.core.vmeta import VMeta
@Serializable.register_subclass("malcolm:core/ChoiceMeta:1.0")
class ChoiceMeta(VMeta):
"""Meta object containing information for a enum"""
endpoints = ["description", "choices", "tags", "writeable", "label"]
def __init__(self, description="", choices=None, tags=None, writeable=False,
label=""):
super(ChoiceMeta, self).__init__(description, tags, writeable, label)
if choices is None:
choices = []
self.set_choices(choices)
def set_choices(self, choices, notify=True):
"""Set the choices list"""
choices = [deserialize_object(c, str_) for c in choices]
self.set_endpoint_data("choices", choices, notify)
def validate(self, value):
"""
Check if the value is valid returns it
Args:
value: Value to validate
Returns:
Value if it is valid
Raises:
ValueError: If value not valid
"""
if value is None:
if self.choices:
return self.choices[0]
else:
return ""
elif value in self.choices:
return value
elif isinstance(value, int) and value < len(self.choices):
return self.choices[value]
else:
raise ValueError(
"%s is not a valid value in %s" % (value, self.choices))<commit_msg>Fix PandA firmware issue with a hack<commit_after>from malcolm.compat import str_
from malcolm.core.serializable import Serializable, deserialize_object
from malcolm.core.vmeta import VMeta
@Serializable.register_subclass("malcolm:core/ChoiceMeta:1.0")
class ChoiceMeta(VMeta):
"""Meta object containing information for a enum"""
endpoints = ["description", "choices", "tags", "writeable", "label"]
def __init__(self, description="", choices=None, tags=None, writeable=False,
label=""):
super(ChoiceMeta, self).__init__(description, tags, writeable, label)
if choices is None:
choices = []
self.set_choices(choices)
def set_choices(self, choices, notify=True):
"""Set the choices list"""
choices = [deserialize_object(c, str_) for c in choices]
self.set_endpoint_data("choices", choices, notify)
def validate(self, value):
"""
Check if the value is valid returns it
Args:
value: Value to validate
Returns:
Value if it is valid
Raises:
ValueError: If value not valid
"""
if value is None:
if self.choices:
return self.choices[0]
else:
return ""
elif value in self.choices:
return value
elif isinstance(value, int) and value < len(self.choices):
return self.choices[value]
else:
# Hack for PANDA as we get STATUS overriding status
return value
raise ValueError(
"%s is not a valid value in %s" % (value, self.choices))
|
c8a7a53f09f72d9dbe44b1bcb5b85c8ee5ba2c2c
|
services/migrations/0012_unit_data_source.py
|
services/migrations/0012_unit_data_source.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('services', '0011_unit_extensions'),
]
operations = [
migrations.AddField(
model_name='unit',
name='data_source',
field=models.CharField(null=True, max_length=20),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('services', '0011_unit_extensions'),
]
operations = [
migrations.AddField(
model_name='unit',
name='data_source',
field=models.CharField(null=True, max_length=20, default='tprek'),
preserve_default=False
),
]
|
Add default to data_source migration.
|
Add default to data_source migration.
|
Python
|
agpl-3.0
|
City-of-Helsinki/smbackend,City-of-Helsinki/smbackend
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('services', '0011_unit_extensions'),
]
operations = [
migrations.AddField(
model_name='unit',
name='data_source',
field=models.CharField(null=True, max_length=20),
),
]
Add default to data_source migration.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('services', '0011_unit_extensions'),
]
operations = [
migrations.AddField(
model_name='unit',
name='data_source',
field=models.CharField(null=True, max_length=20, default='tprek'),
preserve_default=False
),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('services', '0011_unit_extensions'),
]
operations = [
migrations.AddField(
model_name='unit',
name='data_source',
field=models.CharField(null=True, max_length=20),
),
]
<commit_msg>Add default to data_source migration.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('services', '0011_unit_extensions'),
]
operations = [
migrations.AddField(
model_name='unit',
name='data_source',
field=models.CharField(null=True, max_length=20, default='tprek'),
preserve_default=False
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('services', '0011_unit_extensions'),
]
operations = [
migrations.AddField(
model_name='unit',
name='data_source',
field=models.CharField(null=True, max_length=20),
),
]
Add default to data_source migration.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('services', '0011_unit_extensions'),
]
operations = [
migrations.AddField(
model_name='unit',
name='data_source',
field=models.CharField(null=True, max_length=20, default='tprek'),
preserve_default=False
),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('services', '0011_unit_extensions'),
]
operations = [
migrations.AddField(
model_name='unit',
name='data_source',
field=models.CharField(null=True, max_length=20),
),
]
<commit_msg>Add default to data_source migration.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('services', '0011_unit_extensions'),
]
operations = [
migrations.AddField(
model_name='unit',
name='data_source',
field=models.CharField(null=True, max_length=20, default='tprek'),
preserve_default=False
),
]
|
c86569d46aac2372107a5e2af66208de8c4a4c1d
|
kaleo/receivers.py
|
kaleo/receivers.py
|
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.auth.models import User
from account.models import SignupCodeResult, EmailConfirmation
from account.signals import signup_code_used, email_confirmed
from kaleo.models import JoinInvitation, InvitationStat
@receiver(signup_code_used, sender=SignupCodeResult)
def handle_signup_code_used(sender, **kwargs):
result = kwargs.get("signup_code_result")
try:
invite = result.signup_code.joininvitation
invite.accept(result.user)
except JoinInvitation.DoesNotExist:
pass
@receiver(email_confirmed, sender=EmailConfirmation)
def handle_email_confirmed(sender, **kwargs):
email_address = kwargs.get("email_address")
JoinInvitation.process_independent_joins(
user=email_address.user,
email=email_address.email
)
@receiver(post_save, sender=User)
def create_stat(sender, instance=None, **kwargs):
if instance is None:
return
InvitationStat.objects.get_or_create(user=instance)
|
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.auth.models import User
from account.models import SignupCodeResult, EmailConfirmation
from account.signals import signup_code_used, email_confirmed, user_signed_up
from kaleo.models import JoinInvitation, InvitationStat
@receiver(signup_code_used, sender=SignupCodeResult)
def handle_signup_code_used(sender, **kwargs):
result = kwargs.get("signup_code_result")
try:
invite = result.signup_code.joininvitation
invite.accept(result.user)
except JoinInvitation.DoesNotExist:
pass
@receiver(email_confirmed, sender=EmailConfirmation)
def handle_email_confirmed(sender, **kwargs):
email_address = kwargs.get("email_address")
JoinInvitation.process_independent_joins(
user=email_address.user,
email=email_address.email
)
@receiver(user_signed_up)
def handle_user_signup(sender, user, form, **kwargs):
email_qs = user.emailaddress_set.filter(email=user.email, verified=True)
if user.is_active and email_qs.exists():
JoinInvitation.process_independent_joins(
user=user,
email=user.email
)
@receiver(post_save, sender=User)
def create_stat(sender, instance=None, **kwargs):
if instance is None:
return
InvitationStat.objects.get_or_create(user=instance)
|
Handle case where a user skips email confirmation
|
Handle case where a user skips email confirmation
In DUA, if you are invited to a site and you end
up signing up with the same email address, DUA will
skip the confirmation cycle and count it as
confirmed already.
|
Python
|
bsd-3-clause
|
JPWKU/kaleo,abramia/kaleo,rizumu/pinax-invitations,ntucker/kaleo,jacobwegner/pinax-invitations,pinax/pinax-invitations,eldarion/kaleo
|
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.auth.models import User
from account.models import SignupCodeResult, EmailConfirmation
from account.signals import signup_code_used, email_confirmed
from kaleo.models import JoinInvitation, InvitationStat
@receiver(signup_code_used, sender=SignupCodeResult)
def handle_signup_code_used(sender, **kwargs):
result = kwargs.get("signup_code_result")
try:
invite = result.signup_code.joininvitation
invite.accept(result.user)
except JoinInvitation.DoesNotExist:
pass
@receiver(email_confirmed, sender=EmailConfirmation)
def handle_email_confirmed(sender, **kwargs):
email_address = kwargs.get("email_address")
JoinInvitation.process_independent_joins(
user=email_address.user,
email=email_address.email
)
@receiver(post_save, sender=User)
def create_stat(sender, instance=None, **kwargs):
if instance is None:
return
InvitationStat.objects.get_or_create(user=instance)
Handle case where a user skips email confirmation
In DUA, if you are invited to a site and you end
up signing up with the same email address, DUA will
skip the confirmation cycle and count it as
confirmed already.
|
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.auth.models import User
from account.models import SignupCodeResult, EmailConfirmation
from account.signals import signup_code_used, email_confirmed, user_signed_up
from kaleo.models import JoinInvitation, InvitationStat
@receiver(signup_code_used, sender=SignupCodeResult)
def handle_signup_code_used(sender, **kwargs):
result = kwargs.get("signup_code_result")
try:
invite = result.signup_code.joininvitation
invite.accept(result.user)
except JoinInvitation.DoesNotExist:
pass
@receiver(email_confirmed, sender=EmailConfirmation)
def handle_email_confirmed(sender, **kwargs):
email_address = kwargs.get("email_address")
JoinInvitation.process_independent_joins(
user=email_address.user,
email=email_address.email
)
@receiver(user_signed_up)
def handle_user_signup(sender, user, form, **kwargs):
email_qs = user.emailaddress_set.filter(email=user.email, verified=True)
if user.is_active and email_qs.exists():
JoinInvitation.process_independent_joins(
user=user,
email=user.email
)
@receiver(post_save, sender=User)
def create_stat(sender, instance=None, **kwargs):
if instance is None:
return
InvitationStat.objects.get_or_create(user=instance)
|
<commit_before>from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.auth.models import User
from account.models import SignupCodeResult, EmailConfirmation
from account.signals import signup_code_used, email_confirmed
from kaleo.models import JoinInvitation, InvitationStat
@receiver(signup_code_used, sender=SignupCodeResult)
def handle_signup_code_used(sender, **kwargs):
result = kwargs.get("signup_code_result")
try:
invite = result.signup_code.joininvitation
invite.accept(result.user)
except JoinInvitation.DoesNotExist:
pass
@receiver(email_confirmed, sender=EmailConfirmation)
def handle_email_confirmed(sender, **kwargs):
email_address = kwargs.get("email_address")
JoinInvitation.process_independent_joins(
user=email_address.user,
email=email_address.email
)
@receiver(post_save, sender=User)
def create_stat(sender, instance=None, **kwargs):
if instance is None:
return
InvitationStat.objects.get_or_create(user=instance)
<commit_msg>Handle case where a user skips email confirmation
In DUA, if you are invited to a site and you end
up signing up with the same email address, DUA will
skip the confirmation cycle and count it as
confirmed already.<commit_after>
|
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.auth.models import User
from account.models import SignupCodeResult, EmailConfirmation
from account.signals import signup_code_used, email_confirmed, user_signed_up
from kaleo.models import JoinInvitation, InvitationStat
@receiver(signup_code_used, sender=SignupCodeResult)
def handle_signup_code_used(sender, **kwargs):
result = kwargs.get("signup_code_result")
try:
invite = result.signup_code.joininvitation
invite.accept(result.user)
except JoinInvitation.DoesNotExist:
pass
@receiver(email_confirmed, sender=EmailConfirmation)
def handle_email_confirmed(sender, **kwargs):
email_address = kwargs.get("email_address")
JoinInvitation.process_independent_joins(
user=email_address.user,
email=email_address.email
)
@receiver(user_signed_up)
def handle_user_signup(sender, user, form, **kwargs):
email_qs = user.emailaddress_set.filter(email=user.email, verified=True)
if user.is_active and email_qs.exists():
JoinInvitation.process_independent_joins(
user=user,
email=user.email
)
@receiver(post_save, sender=User)
def create_stat(sender, instance=None, **kwargs):
if instance is None:
return
InvitationStat.objects.get_or_create(user=instance)
|
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.auth.models import User
from account.models import SignupCodeResult, EmailConfirmation
from account.signals import signup_code_used, email_confirmed
from kaleo.models import JoinInvitation, InvitationStat
@receiver(signup_code_used, sender=SignupCodeResult)
def handle_signup_code_used(sender, **kwargs):
result = kwargs.get("signup_code_result")
try:
invite = result.signup_code.joininvitation
invite.accept(result.user)
except JoinInvitation.DoesNotExist:
pass
@receiver(email_confirmed, sender=EmailConfirmation)
def handle_email_confirmed(sender, **kwargs):
email_address = kwargs.get("email_address")
JoinInvitation.process_independent_joins(
user=email_address.user,
email=email_address.email
)
@receiver(post_save, sender=User)
def create_stat(sender, instance=None, **kwargs):
if instance is None:
return
InvitationStat.objects.get_or_create(user=instance)
Handle case where a user skips email confirmation
In DUA, if you are invited to a site and you end
up signing up with the same email address, DUA will
skip the confirmation cycle and count it as
confirmed already.from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.auth.models import User
from account.models import SignupCodeResult, EmailConfirmation
from account.signals import signup_code_used, email_confirmed, user_signed_up
from kaleo.models import JoinInvitation, InvitationStat
@receiver(signup_code_used, sender=SignupCodeResult)
def handle_signup_code_used(sender, **kwargs):
result = kwargs.get("signup_code_result")
try:
invite = result.signup_code.joininvitation
invite.accept(result.user)
except JoinInvitation.DoesNotExist:
pass
@receiver(email_confirmed, sender=EmailConfirmation)
def handle_email_confirmed(sender, **kwargs):
email_address = kwargs.get("email_address")
JoinInvitation.process_independent_joins(
user=email_address.user,
email=email_address.email
)
@receiver(user_signed_up)
def handle_user_signup(sender, user, form, **kwargs):
email_qs = user.emailaddress_set.filter(email=user.email, verified=True)
if user.is_active and email_qs.exists():
JoinInvitation.process_independent_joins(
user=user,
email=user.email
)
@receiver(post_save, sender=User)
def create_stat(sender, instance=None, **kwargs):
if instance is None:
return
InvitationStat.objects.get_or_create(user=instance)
|
<commit_before>from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.auth.models import User
from account.models import SignupCodeResult, EmailConfirmation
from account.signals import signup_code_used, email_confirmed
from kaleo.models import JoinInvitation, InvitationStat
@receiver(signup_code_used, sender=SignupCodeResult)
def handle_signup_code_used(sender, **kwargs):
result = kwargs.get("signup_code_result")
try:
invite = result.signup_code.joininvitation
invite.accept(result.user)
except JoinInvitation.DoesNotExist:
pass
@receiver(email_confirmed, sender=EmailConfirmation)
def handle_email_confirmed(sender, **kwargs):
email_address = kwargs.get("email_address")
JoinInvitation.process_independent_joins(
user=email_address.user,
email=email_address.email
)
@receiver(post_save, sender=User)
def create_stat(sender, instance=None, **kwargs):
if instance is None:
return
InvitationStat.objects.get_or_create(user=instance)
<commit_msg>Handle case where a user skips email confirmation
In DUA, if you are invited to a site and you end
up signing up with the same email address, DUA will
skip the confirmation cycle and count it as
confirmed already.<commit_after>from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.auth.models import User
from account.models import SignupCodeResult, EmailConfirmation
from account.signals import signup_code_used, email_confirmed, user_signed_up
from kaleo.models import JoinInvitation, InvitationStat
@receiver(signup_code_used, sender=SignupCodeResult)
def handle_signup_code_used(sender, **kwargs):
result = kwargs.get("signup_code_result")
try:
invite = result.signup_code.joininvitation
invite.accept(result.user)
except JoinInvitation.DoesNotExist:
pass
@receiver(email_confirmed, sender=EmailConfirmation)
def handle_email_confirmed(sender, **kwargs):
email_address = kwargs.get("email_address")
JoinInvitation.process_independent_joins(
user=email_address.user,
email=email_address.email
)
@receiver(user_signed_up)
def handle_user_signup(sender, user, form, **kwargs):
email_qs = user.emailaddress_set.filter(email=user.email, verified=True)
if user.is_active and email_qs.exists():
JoinInvitation.process_independent_joins(
user=user,
email=user.email
)
@receiver(post_save, sender=User)
def create_stat(sender, instance=None, **kwargs):
if instance is None:
return
InvitationStat.objects.get_or_create(user=instance)
|
52c8a9e3224330e8ddec063fce0bb0822d28f3c4
|
src/home/views.py
|
src/home/views.py
|
from django.shortcuts import render
from django.http import HttpResponse
from django.template import loader
from MinecrunchWeb import settings
# Create your views here.
def index(request):
template = loader.get_template('home/home.html')
return HttpResponse(template.render())
# Create your views here.
def modpacks(request):
template = loader.get_template('home/modpacks.html')
return HttpResponse(template.render())
|
from django.shortcuts import render
from django.http import HttpResponse
from django.template import loader
#from MinecrunchWeb import settings
# Create your views here.
def index(request):
template = loader.get_template('home/home.html')
return HttpResponse(template.render())
# Create your views here.
def modpacks(request):
template = loader.get_template('home/modpacks.html')
return HttpResponse(template.render())
|
Comment out import line to prevent error
|
Comment out import line to prevent error
|
Python
|
mit
|
Jonpro03/Minecrunch_Web,Jonpro03/Minecrunch_Web,Jonpro03/Minecrunch_Web
|
from django.shortcuts import render
from django.http import HttpResponse
from django.template import loader
from MinecrunchWeb import settings
# Create your views here.
def index(request):
template = loader.get_template('home/home.html')
return HttpResponse(template.render())
# Create your views here.
def modpacks(request):
template = loader.get_template('home/modpacks.html')
return HttpResponse(template.render())
Comment out import line to prevent error
|
from django.shortcuts import render
from django.http import HttpResponse
from django.template import loader
#from MinecrunchWeb import settings
# Create your views here.
def index(request):
template = loader.get_template('home/home.html')
return HttpResponse(template.render())
# Create your views here.
def modpacks(request):
template = loader.get_template('home/modpacks.html')
return HttpResponse(template.render())
|
<commit_before>from django.shortcuts import render
from django.http import HttpResponse
from django.template import loader
from MinecrunchWeb import settings
# Create your views here.
def index(request):
template = loader.get_template('home/home.html')
return HttpResponse(template.render())
# Create your views here.
def modpacks(request):
template = loader.get_template('home/modpacks.html')
return HttpResponse(template.render())
<commit_msg>Comment out import line to prevent error<commit_after>
|
from django.shortcuts import render
from django.http import HttpResponse
from django.template import loader
#from MinecrunchWeb import settings
# Create your views here.
def index(request):
template = loader.get_template('home/home.html')
return HttpResponse(template.render())
# Create your views here.
def modpacks(request):
template = loader.get_template('home/modpacks.html')
return HttpResponse(template.render())
|
from django.shortcuts import render
from django.http import HttpResponse
from django.template import loader
from MinecrunchWeb import settings
# Create your views here.
def index(request):
template = loader.get_template('home/home.html')
return HttpResponse(template.render())
# Create your views here.
def modpacks(request):
template = loader.get_template('home/modpacks.html')
return HttpResponse(template.render())
Comment out import line to prevent errorfrom django.shortcuts import render
from django.http import HttpResponse
from django.template import loader
#from MinecrunchWeb import settings
# Create your views here.
def index(request):
template = loader.get_template('home/home.html')
return HttpResponse(template.render())
# Create your views here.
def modpacks(request):
template = loader.get_template('home/modpacks.html')
return HttpResponse(template.render())
|
<commit_before>from django.shortcuts import render
from django.http import HttpResponse
from django.template import loader
from MinecrunchWeb import settings
# Create your views here.
def index(request):
template = loader.get_template('home/home.html')
return HttpResponse(template.render())
# Create your views here.
def modpacks(request):
template = loader.get_template('home/modpacks.html')
return HttpResponse(template.render())
<commit_msg>Comment out import line to prevent error<commit_after>from django.shortcuts import render
from django.http import HttpResponse
from django.template import loader
#from MinecrunchWeb import settings
# Create your views here.
def index(request):
template = loader.get_template('home/home.html')
return HttpResponse(template.render())
# Create your views here.
def modpacks(request):
template = loader.get_template('home/modpacks.html')
return HttpResponse(template.render())
|
d12884572175cc74ea9e410909128e590a29d1d8
|
pygments/styles/igor.py
|
pygments/styles/igor.py
|
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic
class IgorStyle(Style):
default_style = ""
styles = {
Comment: 'italic #FF0000',
Keyword: '#0000FF',
Name.Function: '#C34E00',
Name.Class: '#007575',
String: '#009C00'
}
|
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic
class IgorStyle(Style):
"""
Pygments version of the official colors for Igor Pro procedures.
"""
default_style = ""
styles = {
Comment: 'italic #FF0000',
Keyword: '#0000FF',
Name.Function: '#C34E00',
Name.Decorator: '#CC00A3',
Name.Class: '#007575',
String: '#009C00'
}
|
Add class comment and a custom color for the decorator
|
Add class comment and a custom color for the decorator
|
Python
|
bsd-2-clause
|
spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments,spencerlyon2/pygments
|
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic
class IgorStyle(Style):
default_style = ""
styles = {
Comment: 'italic #FF0000',
Keyword: '#0000FF',
Name.Function: '#C34E00',
Name.Class: '#007575',
String: '#009C00'
}
Add class comment and a custom color for the decorator
|
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic
class IgorStyle(Style):
"""
Pygments version of the official colors for Igor Pro procedures.
"""
default_style = ""
styles = {
Comment: 'italic #FF0000',
Keyword: '#0000FF',
Name.Function: '#C34E00',
Name.Decorator: '#CC00A3',
Name.Class: '#007575',
String: '#009C00'
}
|
<commit_before>from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic
class IgorStyle(Style):
default_style = ""
styles = {
Comment: 'italic #FF0000',
Keyword: '#0000FF',
Name.Function: '#C34E00',
Name.Class: '#007575',
String: '#009C00'
}
<commit_msg>Add class comment and a custom color for the decorator<commit_after>
|
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic
class IgorStyle(Style):
"""
Pygments version of the official colors for Igor Pro procedures.
"""
default_style = ""
styles = {
Comment: 'italic #FF0000',
Keyword: '#0000FF',
Name.Function: '#C34E00',
Name.Decorator: '#CC00A3',
Name.Class: '#007575',
String: '#009C00'
}
|
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic
class IgorStyle(Style):
default_style = ""
styles = {
Comment: 'italic #FF0000',
Keyword: '#0000FF',
Name.Function: '#C34E00',
Name.Class: '#007575',
String: '#009C00'
}
Add class comment and a custom color for the decoratorfrom pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic
class IgorStyle(Style):
"""
Pygments version of the official colors for Igor Pro procedures.
"""
default_style = ""
styles = {
Comment: 'italic #FF0000',
Keyword: '#0000FF',
Name.Function: '#C34E00',
Name.Decorator: '#CC00A3',
Name.Class: '#007575',
String: '#009C00'
}
|
<commit_before>from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic
class IgorStyle(Style):
default_style = ""
styles = {
Comment: 'italic #FF0000',
Keyword: '#0000FF',
Name.Function: '#C34E00',
Name.Class: '#007575',
String: '#009C00'
}
<commit_msg>Add class comment and a custom color for the decorator<commit_after>from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic
class IgorStyle(Style):
"""
Pygments version of the official colors for Igor Pro procedures.
"""
default_style = ""
styles = {
Comment: 'italic #FF0000',
Keyword: '#0000FF',
Name.Function: '#C34E00',
Name.Decorator: '#CC00A3',
Name.Class: '#007575',
String: '#009C00'
}
|
27839484173c4d505ddb9f949da3576f180b8266
|
tests/test_short_url.py
|
tests/test_short_url.py
|
# -*- coding: utf-8 -*-
from random import randrange
from pytest import raises
import short_url
def test_custom_alphabet():
encoder = short_url.UrlEncoder(alphabet='ab')
url = encoder.encode_url(12)
assert url == 'bbaaaaaaaaaaaaaaaaaaaa'
key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
assert key == 12
def test_too_short_alphabet():
with raises(AttributeError):
short_url.UrlEncoder(alphabet='aa')
with raises(AttributeError):
short_url.UrlEncoder(alphabet='a')
|
# -*- coding: utf-8 -*-
import os
from random import randrange
from pytest import raises
import short_url
TEST_DATA = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
TEST_DATA = os.path.join(TEST_DATA, 'tests/data')
def generate_test_data(count=10000):
result = {}
for i in range(1000):
value = short_url.encode_url(i)
result[i] = value
while len(result) < count:
random_int = randrange(1000000)
value = short_url.encode_url(random_int)
result[random_int] = value
with open(os.path.join(TEST_DATA, 'key_values.txt'), 'w') as f:
for k, v in result.items():
f.write('%s:%s\n' % (k, v))
# generate_test_data()
def test_custom_alphabet():
encoder = short_url.UrlEncoder(alphabet='ab')
url = encoder.encode_url(12)
assert url == 'bbaaaaaaaaaaaaaaaaaaaa'
key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
assert key == 12
def test_too_short_alphabet():
with raises(AttributeError):
short_url.UrlEncoder(alphabet='aa')
with raises(AttributeError):
short_url.UrlEncoder(alphabet='a')
|
Add function for generating test data
|
Add function for generating test data
|
Python
|
mit
|
Alir3z4/python-short_url
|
# -*- coding: utf-8 -*-
from random import randrange
from pytest import raises
import short_url
def test_custom_alphabet():
encoder = short_url.UrlEncoder(alphabet='ab')
url = encoder.encode_url(12)
assert url == 'bbaaaaaaaaaaaaaaaaaaaa'
key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
assert key == 12
def test_too_short_alphabet():
with raises(AttributeError):
short_url.UrlEncoder(alphabet='aa')
with raises(AttributeError):
short_url.UrlEncoder(alphabet='a')
Add function for generating test data
|
# -*- coding: utf-8 -*-
import os
from random import randrange
from pytest import raises
import short_url
TEST_DATA = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
TEST_DATA = os.path.join(TEST_DATA, 'tests/data')
def generate_test_data(count=10000):
result = {}
for i in range(1000):
value = short_url.encode_url(i)
result[i] = value
while len(result) < count:
random_int = randrange(1000000)
value = short_url.encode_url(random_int)
result[random_int] = value
with open(os.path.join(TEST_DATA, 'key_values.txt'), 'w') as f:
for k, v in result.items():
f.write('%s:%s\n' % (k, v))
# generate_test_data()
def test_custom_alphabet():
encoder = short_url.UrlEncoder(alphabet='ab')
url = encoder.encode_url(12)
assert url == 'bbaaaaaaaaaaaaaaaaaaaa'
key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
assert key == 12
def test_too_short_alphabet():
with raises(AttributeError):
short_url.UrlEncoder(alphabet='aa')
with raises(AttributeError):
short_url.UrlEncoder(alphabet='a')
|
<commit_before># -*- coding: utf-8 -*-
from random import randrange
from pytest import raises
import short_url
def test_custom_alphabet():
encoder = short_url.UrlEncoder(alphabet='ab')
url = encoder.encode_url(12)
assert url == 'bbaaaaaaaaaaaaaaaaaaaa'
key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
assert key == 12
def test_too_short_alphabet():
with raises(AttributeError):
short_url.UrlEncoder(alphabet='aa')
with raises(AttributeError):
short_url.UrlEncoder(alphabet='a')
<commit_msg>Add function for generating test data<commit_after>
|
# -*- coding: utf-8 -*-
import os
from random import randrange
from pytest import raises
import short_url
TEST_DATA = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
TEST_DATA = os.path.join(TEST_DATA, 'tests/data')
def generate_test_data(count=10000):
result = {}
for i in range(1000):
value = short_url.encode_url(i)
result[i] = value
while len(result) < count:
random_int = randrange(1000000)
value = short_url.encode_url(random_int)
result[random_int] = value
with open(os.path.join(TEST_DATA, 'key_values.txt'), 'w') as f:
for k, v in result.items():
f.write('%s:%s\n' % (k, v))
# generate_test_data()
def test_custom_alphabet():
encoder = short_url.UrlEncoder(alphabet='ab')
url = encoder.encode_url(12)
assert url == 'bbaaaaaaaaaaaaaaaaaaaa'
key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
assert key == 12
def test_too_short_alphabet():
with raises(AttributeError):
short_url.UrlEncoder(alphabet='aa')
with raises(AttributeError):
short_url.UrlEncoder(alphabet='a')
|
# -*- coding: utf-8 -*-
from random import randrange
from pytest import raises
import short_url
def test_custom_alphabet():
encoder = short_url.UrlEncoder(alphabet='ab')
url = encoder.encode_url(12)
assert url == 'bbaaaaaaaaaaaaaaaaaaaa'
key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
assert key == 12
def test_too_short_alphabet():
with raises(AttributeError):
short_url.UrlEncoder(alphabet='aa')
with raises(AttributeError):
short_url.UrlEncoder(alphabet='a')
Add function for generating test data# -*- coding: utf-8 -*-
import os
from random import randrange
from pytest import raises
import short_url
TEST_DATA = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
TEST_DATA = os.path.join(TEST_DATA, 'tests/data')
def generate_test_data(count=10000):
result = {}
for i in range(1000):
value = short_url.encode_url(i)
result[i] = value
while len(result) < count:
random_int = randrange(1000000)
value = short_url.encode_url(random_int)
result[random_int] = value
with open(os.path.join(TEST_DATA, 'key_values.txt'), 'w') as f:
for k, v in result.items():
f.write('%s:%s\n' % (k, v))
# generate_test_data()
def test_custom_alphabet():
encoder = short_url.UrlEncoder(alphabet='ab')
url = encoder.encode_url(12)
assert url == 'bbaaaaaaaaaaaaaaaaaaaa'
key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
assert key == 12
def test_too_short_alphabet():
with raises(AttributeError):
short_url.UrlEncoder(alphabet='aa')
with raises(AttributeError):
short_url.UrlEncoder(alphabet='a')
|
<commit_before># -*- coding: utf-8 -*-
from random import randrange
from pytest import raises
import short_url
def test_custom_alphabet():
encoder = short_url.UrlEncoder(alphabet='ab')
url = encoder.encode_url(12)
assert url == 'bbaaaaaaaaaaaaaaaaaaaa'
key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
assert key == 12
def test_too_short_alphabet():
with raises(AttributeError):
short_url.UrlEncoder(alphabet='aa')
with raises(AttributeError):
short_url.UrlEncoder(alphabet='a')
<commit_msg>Add function for generating test data<commit_after># -*- coding: utf-8 -*-
import os
from random import randrange
from pytest import raises
import short_url
TEST_DATA = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
TEST_DATA = os.path.join(TEST_DATA, 'tests/data')
def generate_test_data(count=10000):
result = {}
for i in range(1000):
value = short_url.encode_url(i)
result[i] = value
while len(result) < count:
random_int = randrange(1000000)
value = short_url.encode_url(random_int)
result[random_int] = value
with open(os.path.join(TEST_DATA, 'key_values.txt'), 'w') as f:
for k, v in result.items():
f.write('%s:%s\n' % (k, v))
# generate_test_data()
def test_custom_alphabet():
encoder = short_url.UrlEncoder(alphabet='ab')
url = encoder.encode_url(12)
assert url == 'bbaaaaaaaaaaaaaaaaaaaa'
key = encoder.decode_url('bbaaaaaaaaaaaaaaaaaaaa')
assert key == 12
def test_too_short_alphabet():
with raises(AttributeError):
short_url.UrlEncoder(alphabet='aa')
with raises(AttributeError):
short_url.UrlEncoder(alphabet='a')
|
d6b8b7bf471f6e78147eb0b8ad518c4f8964deb3
|
cle/backends/cgc.py
|
cle/backends/cgc.py
|
from .elf import ELF
from ..loader import Loader
ELF_HEADER = "7f45 4c46 0101 0100 0000 0000 0000 0000".replace(" ","").decode('hex')
class CGC(ELF):
def __init__(self, path, *args, **kwargs):
self.elf_path = Loader._make_tmp_copy(path)
f = open(self.elf_path, 'r+b')
f.write(ELF_HEADER)
f.close()
super(CGC, self).__init__(self.elf_path, *args, **kwargs)
self.binary = self.elf_path
self.os = 'cgc'
self.execstack = True # the stack is always executable in CGC
supported_filetypes = ['cgc']
|
from .elf import ELF
from ..loader import Loader
ELF_HEADER = "7f45 4c46 0101 0100 0000 0000 0000 0000".replace(" ","").decode('hex')
CGC_HEADER = "7f43 4743 0101 0143 014d 6572 696e 6f00".replace(" ","").decode('hex')
class CGC(ELF):
def __init__(self, path, *args, **kwargs):
self.elf_path = Loader._make_tmp_copy(path)
f = open(self.elf_path, 'r+b')
f.write(ELF_HEADER)
f.close()
super(CGC, self).__init__(self.elf_path, *args, **kwargs)
self.memory.write_bytes(self.get_min_addr(), CGC_HEADER) # repair CGC header
self.binary = self.elf_path
self.os = 'cgc'
self.execstack = True # the stack is always executable in CGC
supported_filetypes = ['cgc']
|
Repair CGC file header after loading
|
Repair CGC file header after loading
|
Python
|
bsd-2-clause
|
chubbymaggie/cle,angr/cle
|
from .elf import ELF
from ..loader import Loader
ELF_HEADER = "7f45 4c46 0101 0100 0000 0000 0000 0000".replace(" ","").decode('hex')
class CGC(ELF):
def __init__(self, path, *args, **kwargs):
self.elf_path = Loader._make_tmp_copy(path)
f = open(self.elf_path, 'r+b')
f.write(ELF_HEADER)
f.close()
super(CGC, self).__init__(self.elf_path, *args, **kwargs)
self.binary = self.elf_path
self.os = 'cgc'
self.execstack = True # the stack is always executable in CGC
supported_filetypes = ['cgc']
Repair CGC file header after loading
|
from .elf import ELF
from ..loader import Loader
ELF_HEADER = "7f45 4c46 0101 0100 0000 0000 0000 0000".replace(" ","").decode('hex')
CGC_HEADER = "7f43 4743 0101 0143 014d 6572 696e 6f00".replace(" ","").decode('hex')
class CGC(ELF):
def __init__(self, path, *args, **kwargs):
self.elf_path = Loader._make_tmp_copy(path)
f = open(self.elf_path, 'r+b')
f.write(ELF_HEADER)
f.close()
super(CGC, self).__init__(self.elf_path, *args, **kwargs)
self.memory.write_bytes(self.get_min_addr(), CGC_HEADER) # repair CGC header
self.binary = self.elf_path
self.os = 'cgc'
self.execstack = True # the stack is always executable in CGC
supported_filetypes = ['cgc']
|
<commit_before>from .elf import ELF
from ..loader import Loader
ELF_HEADER = "7f45 4c46 0101 0100 0000 0000 0000 0000".replace(" ","").decode('hex')
class CGC(ELF):
def __init__(self, path, *args, **kwargs):
self.elf_path = Loader._make_tmp_copy(path)
f = open(self.elf_path, 'r+b')
f.write(ELF_HEADER)
f.close()
super(CGC, self).__init__(self.elf_path, *args, **kwargs)
self.binary = self.elf_path
self.os = 'cgc'
self.execstack = True # the stack is always executable in CGC
supported_filetypes = ['cgc']
<commit_msg>Repair CGC file header after loading<commit_after>
|
from .elf import ELF
from ..loader import Loader
ELF_HEADER = "7f45 4c46 0101 0100 0000 0000 0000 0000".replace(" ","").decode('hex')
CGC_HEADER = "7f43 4743 0101 0143 014d 6572 696e 6f00".replace(" ","").decode('hex')
class CGC(ELF):
def __init__(self, path, *args, **kwargs):
self.elf_path = Loader._make_tmp_copy(path)
f = open(self.elf_path, 'r+b')
f.write(ELF_HEADER)
f.close()
super(CGC, self).__init__(self.elf_path, *args, **kwargs)
self.memory.write_bytes(self.get_min_addr(), CGC_HEADER) # repair CGC header
self.binary = self.elf_path
self.os = 'cgc'
self.execstack = True # the stack is always executable in CGC
supported_filetypes = ['cgc']
|
from .elf import ELF
from ..loader import Loader
ELF_HEADER = "7f45 4c46 0101 0100 0000 0000 0000 0000".replace(" ","").decode('hex')
class CGC(ELF):
def __init__(self, path, *args, **kwargs):
self.elf_path = Loader._make_tmp_copy(path)
f = open(self.elf_path, 'r+b')
f.write(ELF_HEADER)
f.close()
super(CGC, self).__init__(self.elf_path, *args, **kwargs)
self.binary = self.elf_path
self.os = 'cgc'
self.execstack = True # the stack is always executable in CGC
supported_filetypes = ['cgc']
Repair CGC file header after loadingfrom .elf import ELF
from ..loader import Loader
ELF_HEADER = "7f45 4c46 0101 0100 0000 0000 0000 0000".replace(" ","").decode('hex')
CGC_HEADER = "7f43 4743 0101 0143 014d 6572 696e 6f00".replace(" ","").decode('hex')
class CGC(ELF):
def __init__(self, path, *args, **kwargs):
self.elf_path = Loader._make_tmp_copy(path)
f = open(self.elf_path, 'r+b')
f.write(ELF_HEADER)
f.close()
super(CGC, self).__init__(self.elf_path, *args, **kwargs)
self.memory.write_bytes(self.get_min_addr(), CGC_HEADER) # repair CGC header
self.binary = self.elf_path
self.os = 'cgc'
self.execstack = True # the stack is always executable in CGC
supported_filetypes = ['cgc']
|
<commit_before>from .elf import ELF
from ..loader import Loader
ELF_HEADER = "7f45 4c46 0101 0100 0000 0000 0000 0000".replace(" ","").decode('hex')
class CGC(ELF):
def __init__(self, path, *args, **kwargs):
self.elf_path = Loader._make_tmp_copy(path)
f = open(self.elf_path, 'r+b')
f.write(ELF_HEADER)
f.close()
super(CGC, self).__init__(self.elf_path, *args, **kwargs)
self.binary = self.elf_path
self.os = 'cgc'
self.execstack = True # the stack is always executable in CGC
supported_filetypes = ['cgc']
<commit_msg>Repair CGC file header after loading<commit_after>from .elf import ELF
from ..loader import Loader
ELF_HEADER = "7f45 4c46 0101 0100 0000 0000 0000 0000".replace(" ","").decode('hex')
CGC_HEADER = "7f43 4743 0101 0143 014d 6572 696e 6f00".replace(" ","").decode('hex')
class CGC(ELF):
def __init__(self, path, *args, **kwargs):
self.elf_path = Loader._make_tmp_copy(path)
f = open(self.elf_path, 'r+b')
f.write(ELF_HEADER)
f.close()
super(CGC, self).__init__(self.elf_path, *args, **kwargs)
self.memory.write_bytes(self.get_min_addr(), CGC_HEADER) # repair CGC header
self.binary = self.elf_path
self.os = 'cgc'
self.execstack = True # the stack is always executable in CGC
supported_filetypes = ['cgc']
|
e0a5b6d44de699dab48cb9de1f745f77c3674352
|
blackgate/executor.py
|
blackgate/executor.py
|
# -*- coding: utf-8 -*-
import sys
from concurrent.futures import ThreadPoolExecutor
from tornado import gen, queues
from tornado.concurrent import Future, run_on_executor
class WorkItem(object):
executor = ThreadPoolExecutor(20)
def __init__(self, future, fn, args, kwargs):
self.future = future
self.fn = fn
self.args = args
self.kwargs = kwargs
@run_on_executor
def run(self):
try:
result = self.fn(*self.args, **self.kwargs)
except BaseException:
e, tb = sys.exc_info()[1:]
self.future.set_exc_info(e)
else:
self.future.set_result(result)
class QueueExecutor(object):
def __init__(self, pool_key, max_size):
self._pool_key = pool_key
self._max_size = max_size
self._work_queue = queues.Queue(max_size)
def submit(self, fn, *args, **kwargs):
future = Future()
item = WorkItem(future, fn, args, kwargs)
self._work_queue.put_nowait(item)
return future
@gen.coroutine
def consume(self):
while True:
try:
item = yield self._work_queue.get()
yield item.run()
finally:
self._work_queue.task_done()
|
# -*- coding: utf-8 -*-
import sys
from concurrent.futures import ThreadPoolExecutor
from tornado import gen, queues
from tornado.concurrent import Future, run_on_executor
class WorkItem(object):
executor = ThreadPoolExecutor(20)
def __init__(self, future, fn, args, kwargs):
self.future = future
self.fn = fn
self.args = args
self.kwargs = kwargs
@run_on_executor
def run(self):
try:
result = self.fn(*self.args, **self.kwargs)
except BaseException:
self.future.set_exc_info(sys.exc_info())
else:
self.future.set_result(result)
class QueueExecutor(object):
def __init__(self, pool_key, max_size):
self._pool_key = pool_key
self._max_size = max_size
self._work_queue = queues.Queue(max_size)
def submit(self, fn, *args, **kwargs):
future = Future()
item = WorkItem(future, fn, args, kwargs)
self._work_queue.put_nowait(item)
return future
@gen.coroutine
def consume(self):
while True:
try:
item = yield self._work_queue.get()
yield item.run()
finally:
self._work_queue.task_done()
|
Fix exc info setter for future.
|
Fix exc info setter for future.
|
Python
|
mit
|
soasme/blackgate
|
# -*- coding: utf-8 -*-
import sys
from concurrent.futures import ThreadPoolExecutor
from tornado import gen, queues
from tornado.concurrent import Future, run_on_executor
class WorkItem(object):
executor = ThreadPoolExecutor(20)
def __init__(self, future, fn, args, kwargs):
self.future = future
self.fn = fn
self.args = args
self.kwargs = kwargs
@run_on_executor
def run(self):
try:
result = self.fn(*self.args, **self.kwargs)
except BaseException:
e, tb = sys.exc_info()[1:]
self.future.set_exc_info(e)
else:
self.future.set_result(result)
class QueueExecutor(object):
def __init__(self, pool_key, max_size):
self._pool_key = pool_key
self._max_size = max_size
self._work_queue = queues.Queue(max_size)
def submit(self, fn, *args, **kwargs):
future = Future()
item = WorkItem(future, fn, args, kwargs)
self._work_queue.put_nowait(item)
return future
@gen.coroutine
def consume(self):
while True:
try:
item = yield self._work_queue.get()
yield item.run()
finally:
self._work_queue.task_done()
Fix exc info setter for future.
|
# -*- coding: utf-8 -*-
import sys
from concurrent.futures import ThreadPoolExecutor
from tornado import gen, queues
from tornado.concurrent import Future, run_on_executor
class WorkItem(object):
executor = ThreadPoolExecutor(20)
def __init__(self, future, fn, args, kwargs):
self.future = future
self.fn = fn
self.args = args
self.kwargs = kwargs
@run_on_executor
def run(self):
try:
result = self.fn(*self.args, **self.kwargs)
except BaseException:
self.future.set_exc_info(sys.exc_info())
else:
self.future.set_result(result)
class QueueExecutor(object):
def __init__(self, pool_key, max_size):
self._pool_key = pool_key
self._max_size = max_size
self._work_queue = queues.Queue(max_size)
def submit(self, fn, *args, **kwargs):
future = Future()
item = WorkItem(future, fn, args, kwargs)
self._work_queue.put_nowait(item)
return future
@gen.coroutine
def consume(self):
while True:
try:
item = yield self._work_queue.get()
yield item.run()
finally:
self._work_queue.task_done()
|
<commit_before># -*- coding: utf-8 -*-
import sys
from concurrent.futures import ThreadPoolExecutor
from tornado import gen, queues
from tornado.concurrent import Future, run_on_executor
class WorkItem(object):
executor = ThreadPoolExecutor(20)
def __init__(self, future, fn, args, kwargs):
self.future = future
self.fn = fn
self.args = args
self.kwargs = kwargs
@run_on_executor
def run(self):
try:
result = self.fn(*self.args, **self.kwargs)
except BaseException:
e, tb = sys.exc_info()[1:]
self.future.set_exc_info(e)
else:
self.future.set_result(result)
class QueueExecutor(object):
def __init__(self, pool_key, max_size):
self._pool_key = pool_key
self._max_size = max_size
self._work_queue = queues.Queue(max_size)
def submit(self, fn, *args, **kwargs):
future = Future()
item = WorkItem(future, fn, args, kwargs)
self._work_queue.put_nowait(item)
return future
@gen.coroutine
def consume(self):
while True:
try:
item = yield self._work_queue.get()
yield item.run()
finally:
self._work_queue.task_done()
<commit_msg>Fix exc info setter for future.<commit_after>
|
# -*- coding: utf-8 -*-
import sys
from concurrent.futures import ThreadPoolExecutor
from tornado import gen, queues
from tornado.concurrent import Future, run_on_executor
class WorkItem(object):
executor = ThreadPoolExecutor(20)
def __init__(self, future, fn, args, kwargs):
self.future = future
self.fn = fn
self.args = args
self.kwargs = kwargs
@run_on_executor
def run(self):
try:
result = self.fn(*self.args, **self.kwargs)
except BaseException:
self.future.set_exc_info(sys.exc_info())
else:
self.future.set_result(result)
class QueueExecutor(object):
def __init__(self, pool_key, max_size):
self._pool_key = pool_key
self._max_size = max_size
self._work_queue = queues.Queue(max_size)
def submit(self, fn, *args, **kwargs):
future = Future()
item = WorkItem(future, fn, args, kwargs)
self._work_queue.put_nowait(item)
return future
@gen.coroutine
def consume(self):
while True:
try:
item = yield self._work_queue.get()
yield item.run()
finally:
self._work_queue.task_done()
|
# -*- coding: utf-8 -*-
import sys
from concurrent.futures import ThreadPoolExecutor
from tornado import gen, queues
from tornado.concurrent import Future, run_on_executor
class WorkItem(object):
executor = ThreadPoolExecutor(20)
def __init__(self, future, fn, args, kwargs):
self.future = future
self.fn = fn
self.args = args
self.kwargs = kwargs
@run_on_executor
def run(self):
try:
result = self.fn(*self.args, **self.kwargs)
except BaseException:
e, tb = sys.exc_info()[1:]
self.future.set_exc_info(e)
else:
self.future.set_result(result)
class QueueExecutor(object):
def __init__(self, pool_key, max_size):
self._pool_key = pool_key
self._max_size = max_size
self._work_queue = queues.Queue(max_size)
def submit(self, fn, *args, **kwargs):
future = Future()
item = WorkItem(future, fn, args, kwargs)
self._work_queue.put_nowait(item)
return future
@gen.coroutine
def consume(self):
while True:
try:
item = yield self._work_queue.get()
yield item.run()
finally:
self._work_queue.task_done()
Fix exc info setter for future.# -*- coding: utf-8 -*-
import sys
from concurrent.futures import ThreadPoolExecutor
from tornado import gen, queues
from tornado.concurrent import Future, run_on_executor
class WorkItem(object):
executor = ThreadPoolExecutor(20)
def __init__(self, future, fn, args, kwargs):
self.future = future
self.fn = fn
self.args = args
self.kwargs = kwargs
@run_on_executor
def run(self):
try:
result = self.fn(*self.args, **self.kwargs)
except BaseException:
self.future.set_exc_info(sys.exc_info())
else:
self.future.set_result(result)
class QueueExecutor(object):
def __init__(self, pool_key, max_size):
self._pool_key = pool_key
self._max_size = max_size
self._work_queue = queues.Queue(max_size)
def submit(self, fn, *args, **kwargs):
future = Future()
item = WorkItem(future, fn, args, kwargs)
self._work_queue.put_nowait(item)
return future
@gen.coroutine
def consume(self):
while True:
try:
item = yield self._work_queue.get()
yield item.run()
finally:
self._work_queue.task_done()
|
<commit_before># -*- coding: utf-8 -*-
import sys
from concurrent.futures import ThreadPoolExecutor
from tornado import gen, queues
from tornado.concurrent import Future, run_on_executor
class WorkItem(object):
executor = ThreadPoolExecutor(20)
def __init__(self, future, fn, args, kwargs):
self.future = future
self.fn = fn
self.args = args
self.kwargs = kwargs
@run_on_executor
def run(self):
try:
result = self.fn(*self.args, **self.kwargs)
except BaseException:
e, tb = sys.exc_info()[1:]
self.future.set_exc_info(e)
else:
self.future.set_result(result)
class QueueExecutor(object):
def __init__(self, pool_key, max_size):
self._pool_key = pool_key
self._max_size = max_size
self._work_queue = queues.Queue(max_size)
def submit(self, fn, *args, **kwargs):
future = Future()
item = WorkItem(future, fn, args, kwargs)
self._work_queue.put_nowait(item)
return future
@gen.coroutine
def consume(self):
while True:
try:
item = yield self._work_queue.get()
yield item.run()
finally:
self._work_queue.task_done()
<commit_msg>Fix exc info setter for future.<commit_after># -*- coding: utf-8 -*-
import sys
from concurrent.futures import ThreadPoolExecutor
from tornado import gen, queues
from tornado.concurrent import Future, run_on_executor
class WorkItem(object):
executor = ThreadPoolExecutor(20)
def __init__(self, future, fn, args, kwargs):
self.future = future
self.fn = fn
self.args = args
self.kwargs = kwargs
@run_on_executor
def run(self):
try:
result = self.fn(*self.args, **self.kwargs)
except BaseException:
self.future.set_exc_info(sys.exc_info())
else:
self.future.set_result(result)
class QueueExecutor(object):
def __init__(self, pool_key, max_size):
self._pool_key = pool_key
self._max_size = max_size
self._work_queue = queues.Queue(max_size)
def submit(self, fn, *args, **kwargs):
future = Future()
item = WorkItem(future, fn, args, kwargs)
self._work_queue.put_nowait(item)
return future
@gen.coroutine
def consume(self):
while True:
try:
item = yield self._work_queue.get()
yield item.run()
finally:
self._work_queue.task_done()
|
ab2f98539272c8dfb64031cd009c70f7987be359
|
importer/importer/indices.py
|
importer/importer/indices.py
|
import aioes
import os.path
from datetime import datetime
from .utils import read_json_file
from .settings import ELASTICSEARCH_ALIAS
def create_new_index(elastic):
print("Creating new index...")
module_path = os.path.dirname(__file__)
config_filename = os.path.join(module_path, 'configuration', 'index.json')
index_configuration = read_json_file(config_filename)
index_name = generate_index_name()
elastic.indices.create(index_name, index_configuration)
return index_name
def switch_alias_to_index(elastic, alias_name, index_name):
print("Switching alias '%s' to index '%s'..." % (alias_name, index_name))
try:
existing_aliases = elastic.indices.get_alias(name=alias_name)
except aioes.NotFoundError:
existing_aliases = []
actions = []
for existing_index_name in existing_aliases:
actions.append({
'remove': {
'index': existing_index_name,
'alias': alias_name,
}
})
actions.append({
'add': {
'index': index_name,
'alias': alias_name
}
})
elastic.indices.update_aliases({'actions': actions})
def generate_index_name():
return '{}-{}'.format(ELASTICSEARCH_ALIAS, int(datetime.now().timestamp()))
|
import elasticsearch
import os.path
from datetime import datetime
from .utils import read_json_file
from .settings import ELASTICSEARCH_ALIAS
def create_new_index(elastic):
print("Creating new index...")
module_path = os.path.dirname(__file__)
config_filename = os.path.join(module_path, 'configuration', 'index.json')
index_configuration = read_json_file(config_filename)
index_name = generate_index_name()
elastic.indices.create(index_name, index_configuration)
return index_name
def switch_alias_to_index(elastic, alias_name, index_name):
print("Switching alias '%s' to index '%s'..." % (alias_name, index_name))
try:
existing_aliases = elastic.indices.get_alias(name=alias_name)
except elasticsearch.NotFoundError:
existing_aliases = []
actions = []
for existing_index_name in existing_aliases:
actions.append({
'remove': {
'index': existing_index_name,
'alias': alias_name,
}
})
actions.append({
'add': {
'index': index_name,
'alias': alias_name
}
})
elastic.indices.update_aliases({'actions': actions})
def generate_index_name():
return '{}-{}'.format(ELASTICSEARCH_ALIAS, int(datetime.now().timestamp()))
|
Replace one last place where aioes was used
|
Replace one last place where aioes was used
|
Python
|
mit
|
despawnerer/theatrics,despawnerer/theatrics,despawnerer/theatrics
|
import aioes
import os.path
from datetime import datetime
from .utils import read_json_file
from .settings import ELASTICSEARCH_ALIAS
def create_new_index(elastic):
print("Creating new index...")
module_path = os.path.dirname(__file__)
config_filename = os.path.join(module_path, 'configuration', 'index.json')
index_configuration = read_json_file(config_filename)
index_name = generate_index_name()
elastic.indices.create(index_name, index_configuration)
return index_name
def switch_alias_to_index(elastic, alias_name, index_name):
print("Switching alias '%s' to index '%s'..." % (alias_name, index_name))
try:
existing_aliases = elastic.indices.get_alias(name=alias_name)
except aioes.NotFoundError:
existing_aliases = []
actions = []
for existing_index_name in existing_aliases:
actions.append({
'remove': {
'index': existing_index_name,
'alias': alias_name,
}
})
actions.append({
'add': {
'index': index_name,
'alias': alias_name
}
})
elastic.indices.update_aliases({'actions': actions})
def generate_index_name():
return '{}-{}'.format(ELASTICSEARCH_ALIAS, int(datetime.now().timestamp()))
Replace one last place where aioes was used
|
import elasticsearch
import os.path
from datetime import datetime
from .utils import read_json_file
from .settings import ELASTICSEARCH_ALIAS
def create_new_index(elastic):
print("Creating new index...")
module_path = os.path.dirname(__file__)
config_filename = os.path.join(module_path, 'configuration', 'index.json')
index_configuration = read_json_file(config_filename)
index_name = generate_index_name()
elastic.indices.create(index_name, index_configuration)
return index_name
def switch_alias_to_index(elastic, alias_name, index_name):
print("Switching alias '%s' to index '%s'..." % (alias_name, index_name))
try:
existing_aliases = elastic.indices.get_alias(name=alias_name)
except elasticsearch.NotFoundError:
existing_aliases = []
actions = []
for existing_index_name in existing_aliases:
actions.append({
'remove': {
'index': existing_index_name,
'alias': alias_name,
}
})
actions.append({
'add': {
'index': index_name,
'alias': alias_name
}
})
elastic.indices.update_aliases({'actions': actions})
def generate_index_name():
return '{}-{}'.format(ELASTICSEARCH_ALIAS, int(datetime.now().timestamp()))
|
<commit_before>import aioes
import os.path
from datetime import datetime
from .utils import read_json_file
from .settings import ELASTICSEARCH_ALIAS
def create_new_index(elastic):
print("Creating new index...")
module_path = os.path.dirname(__file__)
config_filename = os.path.join(module_path, 'configuration', 'index.json')
index_configuration = read_json_file(config_filename)
index_name = generate_index_name()
elastic.indices.create(index_name, index_configuration)
return index_name
def switch_alias_to_index(elastic, alias_name, index_name):
print("Switching alias '%s' to index '%s'..." % (alias_name, index_name))
try:
existing_aliases = elastic.indices.get_alias(name=alias_name)
except aioes.NotFoundError:
existing_aliases = []
actions = []
for existing_index_name in existing_aliases:
actions.append({
'remove': {
'index': existing_index_name,
'alias': alias_name,
}
})
actions.append({
'add': {
'index': index_name,
'alias': alias_name
}
})
elastic.indices.update_aliases({'actions': actions})
def generate_index_name():
return '{}-{}'.format(ELASTICSEARCH_ALIAS, int(datetime.now().timestamp()))
<commit_msg>Replace one last place where aioes was used<commit_after>
|
import elasticsearch
import os.path
from datetime import datetime
from .utils import read_json_file
from .settings import ELASTICSEARCH_ALIAS
def create_new_index(elastic):
print("Creating new index...")
module_path = os.path.dirname(__file__)
config_filename = os.path.join(module_path, 'configuration', 'index.json')
index_configuration = read_json_file(config_filename)
index_name = generate_index_name()
elastic.indices.create(index_name, index_configuration)
return index_name
def switch_alias_to_index(elastic, alias_name, index_name):
print("Switching alias '%s' to index '%s'..." % (alias_name, index_name))
try:
existing_aliases = elastic.indices.get_alias(name=alias_name)
except elasticsearch.NotFoundError:
existing_aliases = []
actions = []
for existing_index_name in existing_aliases:
actions.append({
'remove': {
'index': existing_index_name,
'alias': alias_name,
}
})
actions.append({
'add': {
'index': index_name,
'alias': alias_name
}
})
elastic.indices.update_aliases({'actions': actions})
def generate_index_name():
return '{}-{}'.format(ELASTICSEARCH_ALIAS, int(datetime.now().timestamp()))
|
import aioes
import os.path
from datetime import datetime
from .utils import read_json_file
from .settings import ELASTICSEARCH_ALIAS
def create_new_index(elastic):
print("Creating new index...")
module_path = os.path.dirname(__file__)
config_filename = os.path.join(module_path, 'configuration', 'index.json')
index_configuration = read_json_file(config_filename)
index_name = generate_index_name()
elastic.indices.create(index_name, index_configuration)
return index_name
def switch_alias_to_index(elastic, alias_name, index_name):
print("Switching alias '%s' to index '%s'..." % (alias_name, index_name))
try:
existing_aliases = elastic.indices.get_alias(name=alias_name)
except aioes.NotFoundError:
existing_aliases = []
actions = []
for existing_index_name in existing_aliases:
actions.append({
'remove': {
'index': existing_index_name,
'alias': alias_name,
}
})
actions.append({
'add': {
'index': index_name,
'alias': alias_name
}
})
elastic.indices.update_aliases({'actions': actions})
def generate_index_name():
return '{}-{}'.format(ELASTICSEARCH_ALIAS, int(datetime.now().timestamp()))
Replace one last place where aioes was usedimport elasticsearch
import os.path
from datetime import datetime
from .utils import read_json_file
from .settings import ELASTICSEARCH_ALIAS
def create_new_index(elastic):
print("Creating new index...")
module_path = os.path.dirname(__file__)
config_filename = os.path.join(module_path, 'configuration', 'index.json')
index_configuration = read_json_file(config_filename)
index_name = generate_index_name()
elastic.indices.create(index_name, index_configuration)
return index_name
def switch_alias_to_index(elastic, alias_name, index_name):
print("Switching alias '%s' to index '%s'..." % (alias_name, index_name))
try:
existing_aliases = elastic.indices.get_alias(name=alias_name)
except elasticsearch.NotFoundError:
existing_aliases = []
actions = []
for existing_index_name in existing_aliases:
actions.append({
'remove': {
'index': existing_index_name,
'alias': alias_name,
}
})
actions.append({
'add': {
'index': index_name,
'alias': alias_name
}
})
elastic.indices.update_aliases({'actions': actions})
def generate_index_name():
return '{}-{}'.format(ELASTICSEARCH_ALIAS, int(datetime.now().timestamp()))
|
<commit_before>import aioes
import os.path
from datetime import datetime
from .utils import read_json_file
from .settings import ELASTICSEARCH_ALIAS
def create_new_index(elastic):
print("Creating new index...")
module_path = os.path.dirname(__file__)
config_filename = os.path.join(module_path, 'configuration', 'index.json')
index_configuration = read_json_file(config_filename)
index_name = generate_index_name()
elastic.indices.create(index_name, index_configuration)
return index_name
def switch_alias_to_index(elastic, alias_name, index_name):
print("Switching alias '%s' to index '%s'..." % (alias_name, index_name))
try:
existing_aliases = elastic.indices.get_alias(name=alias_name)
except aioes.NotFoundError:
existing_aliases = []
actions = []
for existing_index_name in existing_aliases:
actions.append({
'remove': {
'index': existing_index_name,
'alias': alias_name,
}
})
actions.append({
'add': {
'index': index_name,
'alias': alias_name
}
})
elastic.indices.update_aliases({'actions': actions})
def generate_index_name():
return '{}-{}'.format(ELASTICSEARCH_ALIAS, int(datetime.now().timestamp()))
<commit_msg>Replace one last place where aioes was used<commit_after>import elasticsearch
import os.path
from datetime import datetime
from .utils import read_json_file
from .settings import ELASTICSEARCH_ALIAS
def create_new_index(elastic):
print("Creating new index...")
module_path = os.path.dirname(__file__)
config_filename = os.path.join(module_path, 'configuration', 'index.json')
index_configuration = read_json_file(config_filename)
index_name = generate_index_name()
elastic.indices.create(index_name, index_configuration)
return index_name
def switch_alias_to_index(elastic, alias_name, index_name):
print("Switching alias '%s' to index '%s'..." % (alias_name, index_name))
try:
existing_aliases = elastic.indices.get_alias(name=alias_name)
except elasticsearch.NotFoundError:
existing_aliases = []
actions = []
for existing_index_name in existing_aliases:
actions.append({
'remove': {
'index': existing_index_name,
'alias': alias_name,
}
})
actions.append({
'add': {
'index': index_name,
'alias': alias_name
}
})
elastic.indices.update_aliases({'actions': actions})
def generate_index_name():
return '{}-{}'.format(ELASTICSEARCH_ALIAS, int(datetime.now().timestamp()))
|
20450788fd0ddb59f80397542b02a2165333e963
|
intercom/__main__.py
|
intercom/__main__.py
|
import cherrypy
import click
from intercom.roots import IntercomRoot
CLICK_FILE_TYPE = click.Path(exists=True, dir_okay=False)
def run_server(global_config_filename, app_config_filename):
cherrypy.config.update(global_config_filename)
cherrypy.quickstart(root=IntercomRoot(), config=app_config_filename)
@click.command()
@click.argument('global_config', type=CLICK_FILE_TYPE)
@click.argument('app_config', type=CLICK_FILE_TYPE)
def main(global_config, app_config):
run_server(global_config, app_config)
if __name__ == '__main__':
main()
|
import cherrypy
import click
from intercom.roots import IntercomRoot
from cherrypy.process.plugins import Daemonizer, PIDFile
def run_server(
global_config_filename,
app_config_filename,
daemon_pid_filename=None):
if daemon_pid_filename is not None:
Daemonizer(cherrypy.engine).subscribe()
PIDFile(cherrypy.engine, daemon_pid_filename).subscribe()
cherrypy.config.update(global_config_filename)
cherrypy.quickstart(root=IntercomRoot(), config=app_config_filename)
@click.command()
@click.argument('global-config', type=click.Path(exists=True, dir_okay=False))
@click.argument('app-config', type=click.Path(exists=True, dir_okay=False))
@click.option('--daemon-pid-file', type=click.Path(dir_okay=False))
def main(global_config, app_config, daemon_pid_file):
run_server(global_config, app_config, daemon_pid_file)
if __name__ == '__main__':
main()
|
Add support for daemon mode.
|
Add support for daemon mode.
|
Python
|
isc
|
alexhanson/intercom
|
import cherrypy
import click
from intercom.roots import IntercomRoot
CLICK_FILE_TYPE = click.Path(exists=True, dir_okay=False)
def run_server(global_config_filename, app_config_filename):
cherrypy.config.update(global_config_filename)
cherrypy.quickstart(root=IntercomRoot(), config=app_config_filename)
@click.command()
@click.argument('global_config', type=CLICK_FILE_TYPE)
@click.argument('app_config', type=CLICK_FILE_TYPE)
def main(global_config, app_config):
run_server(global_config, app_config)
if __name__ == '__main__':
main()
Add support for daemon mode.
|
import cherrypy
import click
from intercom.roots import IntercomRoot
from cherrypy.process.plugins import Daemonizer, PIDFile
def run_server(
global_config_filename,
app_config_filename,
daemon_pid_filename=None):
if daemon_pid_filename is not None:
Daemonizer(cherrypy.engine).subscribe()
PIDFile(cherrypy.engine, daemon_pid_filename).subscribe()
cherrypy.config.update(global_config_filename)
cherrypy.quickstart(root=IntercomRoot(), config=app_config_filename)
@click.command()
@click.argument('global-config', type=click.Path(exists=True, dir_okay=False))
@click.argument('app-config', type=click.Path(exists=True, dir_okay=False))
@click.option('--daemon-pid-file', type=click.Path(dir_okay=False))
def main(global_config, app_config, daemon_pid_file):
run_server(global_config, app_config, daemon_pid_file)
if __name__ == '__main__':
main()
|
<commit_before>import cherrypy
import click
from intercom.roots import IntercomRoot
CLICK_FILE_TYPE = click.Path(exists=True, dir_okay=False)
def run_server(global_config_filename, app_config_filename):
cherrypy.config.update(global_config_filename)
cherrypy.quickstart(root=IntercomRoot(), config=app_config_filename)
@click.command()
@click.argument('global_config', type=CLICK_FILE_TYPE)
@click.argument('app_config', type=CLICK_FILE_TYPE)
def main(global_config, app_config):
run_server(global_config, app_config)
if __name__ == '__main__':
main()
<commit_msg>Add support for daemon mode.<commit_after>
|
import cherrypy
import click
from intercom.roots import IntercomRoot
from cherrypy.process.plugins import Daemonizer, PIDFile
def run_server(
global_config_filename,
app_config_filename,
daemon_pid_filename=None):
if daemon_pid_filename is not None:
Daemonizer(cherrypy.engine).subscribe()
PIDFile(cherrypy.engine, daemon_pid_filename).subscribe()
cherrypy.config.update(global_config_filename)
cherrypy.quickstart(root=IntercomRoot(), config=app_config_filename)
@click.command()
@click.argument('global-config', type=click.Path(exists=True, dir_okay=False))
@click.argument('app-config', type=click.Path(exists=True, dir_okay=False))
@click.option('--daemon-pid-file', type=click.Path(dir_okay=False))
def main(global_config, app_config, daemon_pid_file):
run_server(global_config, app_config, daemon_pid_file)
if __name__ == '__main__':
main()
|
import cherrypy
import click
from intercom.roots import IntercomRoot
CLICK_FILE_TYPE = click.Path(exists=True, dir_okay=False)
def run_server(global_config_filename, app_config_filename):
cherrypy.config.update(global_config_filename)
cherrypy.quickstart(root=IntercomRoot(), config=app_config_filename)
@click.command()
@click.argument('global_config', type=CLICK_FILE_TYPE)
@click.argument('app_config', type=CLICK_FILE_TYPE)
def main(global_config, app_config):
run_server(global_config, app_config)
if __name__ == '__main__':
main()
Add support for daemon mode.import cherrypy
import click
from intercom.roots import IntercomRoot
from cherrypy.process.plugins import Daemonizer, PIDFile
def run_server(
global_config_filename,
app_config_filename,
daemon_pid_filename=None):
if daemon_pid_filename is not None:
Daemonizer(cherrypy.engine).subscribe()
PIDFile(cherrypy.engine, daemon_pid_filename).subscribe()
cherrypy.config.update(global_config_filename)
cherrypy.quickstart(root=IntercomRoot(), config=app_config_filename)
@click.command()
@click.argument('global-config', type=click.Path(exists=True, dir_okay=False))
@click.argument('app-config', type=click.Path(exists=True, dir_okay=False))
@click.option('--daemon-pid-file', type=click.Path(dir_okay=False))
def main(global_config, app_config, daemon_pid_file):
run_server(global_config, app_config, daemon_pid_file)
if __name__ == '__main__':
main()
|
<commit_before>import cherrypy
import click
from intercom.roots import IntercomRoot
CLICK_FILE_TYPE = click.Path(exists=True, dir_okay=False)
def run_server(global_config_filename, app_config_filename):
cherrypy.config.update(global_config_filename)
cherrypy.quickstart(root=IntercomRoot(), config=app_config_filename)
@click.command()
@click.argument('global_config', type=CLICK_FILE_TYPE)
@click.argument('app_config', type=CLICK_FILE_TYPE)
def main(global_config, app_config):
run_server(global_config, app_config)
if __name__ == '__main__':
main()
<commit_msg>Add support for daemon mode.<commit_after>import cherrypy
import click
from intercom.roots import IntercomRoot
from cherrypy.process.plugins import Daemonizer, PIDFile
def run_server(
global_config_filename,
app_config_filename,
daemon_pid_filename=None):
if daemon_pid_filename is not None:
Daemonizer(cherrypy.engine).subscribe()
PIDFile(cherrypy.engine, daemon_pid_filename).subscribe()
cherrypy.config.update(global_config_filename)
cherrypy.quickstart(root=IntercomRoot(), config=app_config_filename)
@click.command()
@click.argument('global-config', type=click.Path(exists=True, dir_okay=False))
@click.argument('app-config', type=click.Path(exists=True, dir_okay=False))
@click.option('--daemon-pid-file', type=click.Path(dir_okay=False))
def main(global_config, app_config, daemon_pid_file):
run_server(global_config, app_config, daemon_pid_file)
if __name__ == '__main__':
main()
|
a7dc058cf8a1d08d02b16a635b7a05d93ab42c1f
|
shuup/core/utils/db.py
|
shuup/core/utils/db.py
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import math
def extend_sqlite_functions(connection=None, **kwargs):
"""
Extends SQLite with trigonometry functions
"""
if connection and connection.vendor == 'sqlite':
connection.connection.create_function("sin", 1, math.sin)
connection.connection.create_function("cos", 1, math.cos)
connection.connection.create_function("acos", 1, math.acos)
connection.connection.create_function("degrees", 1, math.degrees)
connection.connection.create_function("radians", 1, math.radians)
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import math
def float_wrap(value, func):
try:
return func(float(value))
except:
return None
def extend_sqlite_functions(connection=None, **kwargs):
"""
Extends SQLite with trigonometry functions
"""
if connection and connection.vendor == 'sqlite':
connection.connection.create_function("sin", 1, lambda x: float_wrap(x, math.sin))
connection.connection.create_function("cos", 1, lambda x: float_wrap(x, math.cos))
connection.connection.create_function("acos", 1, lambda x: float_wrap(x, math.acos))
connection.connection.create_function("degrees", 1, lambda x: float_wrap(x, math.degrees))
connection.connection.create_function("radians", 1, lambda x: float_wrap(x, math.radians))
|
Add wrapper to parse values to float in SQLite
|
Add wrapper to parse values to float in SQLite
Refs TREES-359
|
Python
|
agpl-3.0
|
suutari-ai/shoop,shoopio/shoop,shoopio/shoop,suutari-ai/shoop,suutari-ai/shoop,shoopio/shoop
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import math
def extend_sqlite_functions(connection=None, **kwargs):
"""
Extends SQLite with trigonometry functions
"""
if connection and connection.vendor == 'sqlite':
connection.connection.create_function("sin", 1, math.sin)
connection.connection.create_function("cos", 1, math.cos)
connection.connection.create_function("acos", 1, math.acos)
connection.connection.create_function("degrees", 1, math.degrees)
connection.connection.create_function("radians", 1, math.radians)
Add wrapper to parse values to float in SQLite
Refs TREES-359
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import math
def float_wrap(value, func):
try:
return func(float(value))
except:
return None
def extend_sqlite_functions(connection=None, **kwargs):
"""
Extends SQLite with trigonometry functions
"""
if connection and connection.vendor == 'sqlite':
connection.connection.create_function("sin", 1, lambda x: float_wrap(x, math.sin))
connection.connection.create_function("cos", 1, lambda x: float_wrap(x, math.cos))
connection.connection.create_function("acos", 1, lambda x: float_wrap(x, math.acos))
connection.connection.create_function("degrees", 1, lambda x: float_wrap(x, math.degrees))
connection.connection.create_function("radians", 1, lambda x: float_wrap(x, math.radians))
|
<commit_before># -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import math
def extend_sqlite_functions(connection=None, **kwargs):
"""
Extends SQLite with trigonometry functions
"""
if connection and connection.vendor == 'sqlite':
connection.connection.create_function("sin", 1, math.sin)
connection.connection.create_function("cos", 1, math.cos)
connection.connection.create_function("acos", 1, math.acos)
connection.connection.create_function("degrees", 1, math.degrees)
connection.connection.create_function("radians", 1, math.radians)
<commit_msg>Add wrapper to parse values to float in SQLite
Refs TREES-359<commit_after>
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import math
def float_wrap(value, func):
try:
return func(float(value))
except:
return None
def extend_sqlite_functions(connection=None, **kwargs):
"""
Extends SQLite with trigonometry functions
"""
if connection and connection.vendor == 'sqlite':
connection.connection.create_function("sin", 1, lambda x: float_wrap(x, math.sin))
connection.connection.create_function("cos", 1, lambda x: float_wrap(x, math.cos))
connection.connection.create_function("acos", 1, lambda x: float_wrap(x, math.acos))
connection.connection.create_function("degrees", 1, lambda x: float_wrap(x, math.degrees))
connection.connection.create_function("radians", 1, lambda x: float_wrap(x, math.radians))
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import math
def extend_sqlite_functions(connection=None, **kwargs):
"""
Extends SQLite with trigonometry functions
"""
if connection and connection.vendor == 'sqlite':
connection.connection.create_function("sin", 1, math.sin)
connection.connection.create_function("cos", 1, math.cos)
connection.connection.create_function("acos", 1, math.acos)
connection.connection.create_function("degrees", 1, math.degrees)
connection.connection.create_function("radians", 1, math.radians)
Add wrapper to parse values to float in SQLite
Refs TREES-359# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import math
def float_wrap(value, func):
try:
return func(float(value))
except:
return None
def extend_sqlite_functions(connection=None, **kwargs):
"""
Extends SQLite with trigonometry functions
"""
if connection and connection.vendor == 'sqlite':
connection.connection.create_function("sin", 1, lambda x: float_wrap(x, math.sin))
connection.connection.create_function("cos", 1, lambda x: float_wrap(x, math.cos))
connection.connection.create_function("acos", 1, lambda x: float_wrap(x, math.acos))
connection.connection.create_function("degrees", 1, lambda x: float_wrap(x, math.degrees))
connection.connection.create_function("radians", 1, lambda x: float_wrap(x, math.radians))
|
<commit_before># -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import math
def extend_sqlite_functions(connection=None, **kwargs):
"""
Extends SQLite with trigonometry functions
"""
if connection and connection.vendor == 'sqlite':
connection.connection.create_function("sin", 1, math.sin)
connection.connection.create_function("cos", 1, math.cos)
connection.connection.create_function("acos", 1, math.acos)
connection.connection.create_function("degrees", 1, math.degrees)
connection.connection.create_function("radians", 1, math.radians)
<commit_msg>Add wrapper to parse values to float in SQLite
Refs TREES-359<commit_after># -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import math
def float_wrap(value, func):
try:
return func(float(value))
except:
return None
def extend_sqlite_functions(connection=None, **kwargs):
"""
Extends SQLite with trigonometry functions
"""
if connection and connection.vendor == 'sqlite':
connection.connection.create_function("sin", 1, lambda x: float_wrap(x, math.sin))
connection.connection.create_function("cos", 1, lambda x: float_wrap(x, math.cos))
connection.connection.create_function("acos", 1, lambda x: float_wrap(x, math.acos))
connection.connection.create_function("degrees", 1, lambda x: float_wrap(x, math.degrees))
connection.connection.create_function("radians", 1, lambda x: float_wrap(x, math.radians))
|
3602759b633f0643979c8f0970e088f29644b758
|
icekit/plugins/brightcove/models.py
|
icekit/plugins/brightcove/models.py
|
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from fluent_contents.models import ContentItem
try:
from django_brightcove.fields import BrightcoveField
except ImportError:
raise NotImplementedError(
_(
'Please install `django_brightcove`to use the icekit.plugins.brightcove plugin.'
)
)
@python_2_unicode_compatible
class BrightcoveItem(ContentItem):
"""
Media from brightcove.
Brightcove is a video editing and management product which can be
found at http://brightcove.com/.
They have in built APIs and players.
The BrightcoveField is a django specific implementation to allow
the embedding of videos. It anticipates the video ID will be used
as a lookup value.
In the template to be rendered you will need to include:
<script
type="text/javascript"
src="http://admin.brightcove.com/js/BrightcoveExperiences.js"
>
</script>
"""
video = BrightcoveField(
help_text=_('Provide the video ID from the brightcove video.')
)
class Meta:
verbose_name = _('Brightcove Video')
verbose_name_plural = _('Brightcove Videos')
def __str__(self):
return str(self.video)
|
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from fluent_contents.models import ContentItem
try:
from django_brightcove.fields import BrightcoveField
except ImportError:
raise NotImplementedError(
_(
'Please install `django_brightcove`to use the icekit.plugins.brightcove plugin.'
)
)
@python_2_unicode_compatible
class BrightcoveItem(ContentItem):
"""
Media from brightcove.
Brightcove is a video editing and management product which can be
found at http://brightcove.com/.
They have in built APIs and players.
The BrightcoveField is a django specific implementation to allow
the embedding of videos. It anticipates the video ID will be used
as a lookup value.
"""
video = BrightcoveField(
help_text=_('Provide the video ID from the brightcove video.')
)
class Meta:
verbose_name = _('Brightcove Video')
verbose_name_plural = _('Brightcove Videos')
def __str__(self):
return str(self.video)
|
Remove comment as media addition automatically happens.
|
Remove comment as media addition automatically happens.
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from fluent_contents.models import ContentItem
try:
from django_brightcove.fields import BrightcoveField
except ImportError:
raise NotImplementedError(
_(
'Please install `django_brightcove`to use the icekit.plugins.brightcove plugin.'
)
)
@python_2_unicode_compatible
class BrightcoveItem(ContentItem):
"""
Media from brightcove.
Brightcove is a video editing and management product which can be
found at http://brightcove.com/.
They have in built APIs and players.
The BrightcoveField is a django specific implementation to allow
the embedding of videos. It anticipates the video ID will be used
as a lookup value.
In the template to be rendered you will need to include:
<script
type="text/javascript"
src="http://admin.brightcove.com/js/BrightcoveExperiences.js"
>
</script>
"""
video = BrightcoveField(
help_text=_('Provide the video ID from the brightcove video.')
)
class Meta:
verbose_name = _('Brightcove Video')
verbose_name_plural = _('Brightcove Videos')
def __str__(self):
return str(self.video)
Remove comment as media addition automatically happens.
|
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from fluent_contents.models import ContentItem
try:
from django_brightcove.fields import BrightcoveField
except ImportError:
raise NotImplementedError(
_(
'Please install `django_brightcove`to use the icekit.plugins.brightcove plugin.'
)
)
@python_2_unicode_compatible
class BrightcoveItem(ContentItem):
"""
Media from brightcove.
Brightcove is a video editing and management product which can be
found at http://brightcove.com/.
They have in built APIs and players.
The BrightcoveField is a django specific implementation to allow
the embedding of videos. It anticipates the video ID will be used
as a lookup value.
"""
video = BrightcoveField(
help_text=_('Provide the video ID from the brightcove video.')
)
class Meta:
verbose_name = _('Brightcove Video')
verbose_name_plural = _('Brightcove Videos')
def __str__(self):
return str(self.video)
|
<commit_before>from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from fluent_contents.models import ContentItem
try:
from django_brightcove.fields import BrightcoveField
except ImportError:
raise NotImplementedError(
_(
'Please install `django_brightcove`to use the icekit.plugins.brightcove plugin.'
)
)
@python_2_unicode_compatible
class BrightcoveItem(ContentItem):
"""
Media from brightcove.
Brightcove is a video editing and management product which can be
found at http://brightcove.com/.
They have in built APIs and players.
The BrightcoveField is a django specific implementation to allow
the embedding of videos. It anticipates the video ID will be used
as a lookup value.
In the template to be rendered you will need to include:
<script
type="text/javascript"
src="http://admin.brightcove.com/js/BrightcoveExperiences.js"
>
</script>
"""
video = BrightcoveField(
help_text=_('Provide the video ID from the brightcove video.')
)
class Meta:
verbose_name = _('Brightcove Video')
verbose_name_plural = _('Brightcove Videos')
def __str__(self):
return str(self.video)
<commit_msg>Remove comment as media addition automatically happens.<commit_after>
|
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from fluent_contents.models import ContentItem
try:
from django_brightcove.fields import BrightcoveField
except ImportError:
raise NotImplementedError(
_(
'Please install `django_brightcove`to use the icekit.plugins.brightcove plugin.'
)
)
@python_2_unicode_compatible
class BrightcoveItem(ContentItem):
"""
Media from brightcove.
Brightcove is a video editing and management product which can be
found at http://brightcove.com/.
They have in built APIs and players.
The BrightcoveField is a django specific implementation to allow
the embedding of videos. It anticipates the video ID will be used
as a lookup value.
"""
video = BrightcoveField(
help_text=_('Provide the video ID from the brightcove video.')
)
class Meta:
verbose_name = _('Brightcove Video')
verbose_name_plural = _('Brightcove Videos')
def __str__(self):
return str(self.video)
|
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from fluent_contents.models import ContentItem
try:
from django_brightcove.fields import BrightcoveField
except ImportError:
raise NotImplementedError(
_(
'Please install `django_brightcove`to use the icekit.plugins.brightcove plugin.'
)
)
@python_2_unicode_compatible
class BrightcoveItem(ContentItem):
"""
Media from brightcove.
Brightcove is a video editing and management product which can be
found at http://brightcove.com/.
They have in built APIs and players.
The BrightcoveField is a django specific implementation to allow
the embedding of videos. It anticipates the video ID will be used
as a lookup value.
In the template to be rendered you will need to include:
<script
type="text/javascript"
src="http://admin.brightcove.com/js/BrightcoveExperiences.js"
>
</script>
"""
video = BrightcoveField(
help_text=_('Provide the video ID from the brightcove video.')
)
class Meta:
verbose_name = _('Brightcove Video')
verbose_name_plural = _('Brightcove Videos')
def __str__(self):
return str(self.video)
Remove comment as media addition automatically happens.from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from fluent_contents.models import ContentItem
try:
from django_brightcove.fields import BrightcoveField
except ImportError:
raise NotImplementedError(
_(
'Please install `django_brightcove`to use the icekit.plugins.brightcove plugin.'
)
)
@python_2_unicode_compatible
class BrightcoveItem(ContentItem):
"""
Media from brightcove.
Brightcove is a video editing and management product which can be
found at http://brightcove.com/.
They have in built APIs and players.
The BrightcoveField is a django specific implementation to allow
the embedding of videos. It anticipates the video ID will be used
as a lookup value.
"""
video = BrightcoveField(
help_text=_('Provide the video ID from the brightcove video.')
)
class Meta:
verbose_name = _('Brightcove Video')
verbose_name_plural = _('Brightcove Videos')
def __str__(self):
return str(self.video)
|
<commit_before>from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from fluent_contents.models import ContentItem
try:
from django_brightcove.fields import BrightcoveField
except ImportError:
raise NotImplementedError(
_(
'Please install `django_brightcove`to use the icekit.plugins.brightcove plugin.'
)
)
@python_2_unicode_compatible
class BrightcoveItem(ContentItem):
"""
Media from brightcove.
Brightcove is a video editing and management product which can be
found at http://brightcove.com/.
They have in built APIs and players.
The BrightcoveField is a django specific implementation to allow
the embedding of videos. It anticipates the video ID will be used
as a lookup value.
In the template to be rendered you will need to include:
<script
type="text/javascript"
src="http://admin.brightcove.com/js/BrightcoveExperiences.js"
>
</script>
"""
video = BrightcoveField(
help_text=_('Provide the video ID from the brightcove video.')
)
class Meta:
verbose_name = _('Brightcove Video')
verbose_name_plural = _('Brightcove Videos')
def __str__(self):
return str(self.video)
<commit_msg>Remove comment as media addition automatically happens.<commit_after>from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from fluent_contents.models import ContentItem
try:
from django_brightcove.fields import BrightcoveField
except ImportError:
raise NotImplementedError(
_(
'Please install `django_brightcove`to use the icekit.plugins.brightcove plugin.'
)
)
@python_2_unicode_compatible
class BrightcoveItem(ContentItem):
"""
Media from brightcove.
Brightcove is a video editing and management product which can be
found at http://brightcove.com/.
They have in built APIs and players.
The BrightcoveField is a django specific implementation to allow
the embedding of videos. It anticipates the video ID will be used
as a lookup value.
"""
video = BrightcoveField(
help_text=_('Provide the video ID from the brightcove video.')
)
class Meta:
verbose_name = _('Brightcove Video')
verbose_name_plural = _('Brightcove Videos')
def __str__(self):
return str(self.video)
|
aa49e27f1dd385b7cfa3706e4e1a25ad8a72f00e
|
tardis/io/tests/test_decay.py
|
tardis/io/tests/test_decay.py
|
import pytest
import pandas as pd
from tardis.io.decay import IsotopeAbundances
@pytest.fixture
def simple_abundance_model():
index = pd.MultiIndex.from_tuples([(28, 56)],
names=['atomic_number', 'mass_number'])
return IsotopeAbundances([[1.0, 1.0]], index=index)
def test_simple_decay(simple_abundance_model):
1/0
|
import pytest
import pandas as pd
from tardis.io.decay import IsotopeAbundances
from numpy.testing import assert_almost_equal
@pytest.fixture
def simple_abundance_model():
index = pd.MultiIndex.from_tuples([(28, 56)],
names=['atomic_number', 'mass_number'])
return IsotopeAbundances([[1.0, 1.0]], index=index)
def test_simple_decay(simple_abundance_model):
decayed_abundance = simple_abundance_model.decay(100)
assert_almost_equal(decayed_abundance.ix[26, 56][0], 0.55752)
assert_almost_equal(decayed_abundance.ix[26, 56][1], 0.55752)
assert_almost_equal(decayed_abundance.ix[27, 56][0], 0.4423791)
assert_almost_equal(decayed_abundance.ix[27, 56][1], 0.4423791)
assert_almost_equal(decayed_abundance.ix[28, 56][0], 1.1086e-05)
assert_almost_equal(decayed_abundance.ix[28, 56][1], 1.1086e-05)
|
Add unit test for decay method
|
Add unit test for decay method
|
Python
|
bsd-3-clause
|
kaushik94/tardis,kaushik94/tardis,kaushik94/tardis,kaushik94/tardis
|
import pytest
import pandas as pd
from tardis.io.decay import IsotopeAbundances
@pytest.fixture
def simple_abundance_model():
index = pd.MultiIndex.from_tuples([(28, 56)],
names=['atomic_number', 'mass_number'])
return IsotopeAbundances([[1.0, 1.0]], index=index)
def test_simple_decay(simple_abundance_model):
1/0
Add unit test for decay method
|
import pytest
import pandas as pd
from tardis.io.decay import IsotopeAbundances
from numpy.testing import assert_almost_equal
@pytest.fixture
def simple_abundance_model():
index = pd.MultiIndex.from_tuples([(28, 56)],
names=['atomic_number', 'mass_number'])
return IsotopeAbundances([[1.0, 1.0]], index=index)
def test_simple_decay(simple_abundance_model):
decayed_abundance = simple_abundance_model.decay(100)
assert_almost_equal(decayed_abundance.ix[26, 56][0], 0.55752)
assert_almost_equal(decayed_abundance.ix[26, 56][1], 0.55752)
assert_almost_equal(decayed_abundance.ix[27, 56][0], 0.4423791)
assert_almost_equal(decayed_abundance.ix[27, 56][1], 0.4423791)
assert_almost_equal(decayed_abundance.ix[28, 56][0], 1.1086e-05)
assert_almost_equal(decayed_abundance.ix[28, 56][1], 1.1086e-05)
|
<commit_before>import pytest
import pandas as pd
from tardis.io.decay import IsotopeAbundances
@pytest.fixture
def simple_abundance_model():
index = pd.MultiIndex.from_tuples([(28, 56)],
names=['atomic_number', 'mass_number'])
return IsotopeAbundances([[1.0, 1.0]], index=index)
def test_simple_decay(simple_abundance_model):
1/0
<commit_msg>Add unit test for decay method<commit_after>
|
import pytest
import pandas as pd
from tardis.io.decay import IsotopeAbundances
from numpy.testing import assert_almost_equal
@pytest.fixture
def simple_abundance_model():
index = pd.MultiIndex.from_tuples([(28, 56)],
names=['atomic_number', 'mass_number'])
return IsotopeAbundances([[1.0, 1.0]], index=index)
def test_simple_decay(simple_abundance_model):
decayed_abundance = simple_abundance_model.decay(100)
assert_almost_equal(decayed_abundance.ix[26, 56][0], 0.55752)
assert_almost_equal(decayed_abundance.ix[26, 56][1], 0.55752)
assert_almost_equal(decayed_abundance.ix[27, 56][0], 0.4423791)
assert_almost_equal(decayed_abundance.ix[27, 56][1], 0.4423791)
assert_almost_equal(decayed_abundance.ix[28, 56][0], 1.1086e-05)
assert_almost_equal(decayed_abundance.ix[28, 56][1], 1.1086e-05)
|
import pytest
import pandas as pd
from tardis.io.decay import IsotopeAbundances
@pytest.fixture
def simple_abundance_model():
index = pd.MultiIndex.from_tuples([(28, 56)],
names=['atomic_number', 'mass_number'])
return IsotopeAbundances([[1.0, 1.0]], index=index)
def test_simple_decay(simple_abundance_model):
1/0
Add unit test for decay methodimport pytest
import pandas as pd
from tardis.io.decay import IsotopeAbundances
from numpy.testing import assert_almost_equal
@pytest.fixture
def simple_abundance_model():
index = pd.MultiIndex.from_tuples([(28, 56)],
names=['atomic_number', 'mass_number'])
return IsotopeAbundances([[1.0, 1.0]], index=index)
def test_simple_decay(simple_abundance_model):
decayed_abundance = simple_abundance_model.decay(100)
assert_almost_equal(decayed_abundance.ix[26, 56][0], 0.55752)
assert_almost_equal(decayed_abundance.ix[26, 56][1], 0.55752)
assert_almost_equal(decayed_abundance.ix[27, 56][0], 0.4423791)
assert_almost_equal(decayed_abundance.ix[27, 56][1], 0.4423791)
assert_almost_equal(decayed_abundance.ix[28, 56][0], 1.1086e-05)
assert_almost_equal(decayed_abundance.ix[28, 56][1], 1.1086e-05)
|
<commit_before>import pytest
import pandas as pd
from tardis.io.decay import IsotopeAbundances
@pytest.fixture
def simple_abundance_model():
index = pd.MultiIndex.from_tuples([(28, 56)],
names=['atomic_number', 'mass_number'])
return IsotopeAbundances([[1.0, 1.0]], index=index)
def test_simple_decay(simple_abundance_model):
1/0
<commit_msg>Add unit test for decay method<commit_after>import pytest
import pandas as pd
from tardis.io.decay import IsotopeAbundances
from numpy.testing import assert_almost_equal
@pytest.fixture
def simple_abundance_model():
index = pd.MultiIndex.from_tuples([(28, 56)],
names=['atomic_number', 'mass_number'])
return IsotopeAbundances([[1.0, 1.0]], index=index)
def test_simple_decay(simple_abundance_model):
decayed_abundance = simple_abundance_model.decay(100)
assert_almost_equal(decayed_abundance.ix[26, 56][0], 0.55752)
assert_almost_equal(decayed_abundance.ix[26, 56][1], 0.55752)
assert_almost_equal(decayed_abundance.ix[27, 56][0], 0.4423791)
assert_almost_equal(decayed_abundance.ix[27, 56][1], 0.4423791)
assert_almost_equal(decayed_abundance.ix[28, 56][0], 1.1086e-05)
assert_almost_equal(decayed_abundance.ix[28, 56][1], 1.1086e-05)
|
f86c925604356b25a8c5c0c71644f0df6f1b48f8
|
setup_directory.py
|
setup_directory.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import os
import subprocess as sp
from contextlib import contextmanager
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh'
@contextmanager
def change_directory(path):
old_cwd = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(old_cwd)
def download_install_script():
location = os.path.join(
tempfile.gettempdir(),
os.path.split(MINICONDA_URL)[-1])
with open(location, 'wb') as outfile:
response = urllib2.urlopen(MINICONDA_URL)
data = response.read()
outfile.write(data)
return location
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import os
import subprocess as sp
from contextlib import contextmanager
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh'
@contextmanager
def change_directory(path):
old_cwd = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(old_cwd)
def download_install_script():
location = os.path.join(
tempfile.gettempdir(),
os.path.split(MINICONDA_URL)[-1])
with open(location, 'wb') as outfile:
response = urllib2.urlopen(MINICONDA_URL)
data = response.read()
outfile.write(data)
return location
def install_miniconda(script_path, name):
dest = os.path.join(
os.getcwd(), name)
cmd = ['bash', script_path, '-b', '-f', '-p', dest]
sp.check_call(cmd)
|
Add function to install miniconda
|
Add function to install miniconda
|
Python
|
mit
|
NGTS/pipeline-output-analysis-setup-script
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import os
import subprocess as sp
from contextlib import contextmanager
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh'
@contextmanager
def change_directory(path):
old_cwd = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(old_cwd)
def download_install_script():
location = os.path.join(
tempfile.gettempdir(),
os.path.split(MINICONDA_URL)[-1])
with open(location, 'wb') as outfile:
response = urllib2.urlopen(MINICONDA_URL)
data = response.read()
outfile.write(data)
return location
Add function to install miniconda
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import os
import subprocess as sp
from contextlib import contextmanager
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh'
@contextmanager
def change_directory(path):
old_cwd = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(old_cwd)
def download_install_script():
location = os.path.join(
tempfile.gettempdir(),
os.path.split(MINICONDA_URL)[-1])
with open(location, 'wb') as outfile:
response = urllib2.urlopen(MINICONDA_URL)
data = response.read()
outfile.write(data)
return location
def install_miniconda(script_path, name):
dest = os.path.join(
os.getcwd(), name)
cmd = ['bash', script_path, '-b', '-f', '-p', dest]
sp.check_call(cmd)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import os
import subprocess as sp
from contextlib import contextmanager
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh'
@contextmanager
def change_directory(path):
old_cwd = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(old_cwd)
def download_install_script():
location = os.path.join(
tempfile.gettempdir(),
os.path.split(MINICONDA_URL)[-1])
with open(location, 'wb') as outfile:
response = urllib2.urlopen(MINICONDA_URL)
data = response.read()
outfile.write(data)
return location
<commit_msg>Add function to install miniconda<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import os
import subprocess as sp
from contextlib import contextmanager
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh'
@contextmanager
def change_directory(path):
old_cwd = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(old_cwd)
def download_install_script():
location = os.path.join(
tempfile.gettempdir(),
os.path.split(MINICONDA_URL)[-1])
with open(location, 'wb') as outfile:
response = urllib2.urlopen(MINICONDA_URL)
data = response.read()
outfile.write(data)
return location
def install_miniconda(script_path, name):
dest = os.path.join(
os.getcwd(), name)
cmd = ['bash', script_path, '-b', '-f', '-p', dest]
sp.check_call(cmd)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import os
import subprocess as sp
from contextlib import contextmanager
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh'
@contextmanager
def change_directory(path):
old_cwd = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(old_cwd)
def download_install_script():
location = os.path.join(
tempfile.gettempdir(),
os.path.split(MINICONDA_URL)[-1])
with open(location, 'wb') as outfile:
response = urllib2.urlopen(MINICONDA_URL)
data = response.read()
outfile.write(data)
return location
Add function to install miniconda#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import os
import subprocess as sp
from contextlib import contextmanager
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh'
@contextmanager
def change_directory(path):
old_cwd = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(old_cwd)
def download_install_script():
location = os.path.join(
tempfile.gettempdir(),
os.path.split(MINICONDA_URL)[-1])
with open(location, 'wb') as outfile:
response = urllib2.urlopen(MINICONDA_URL)
data = response.read()
outfile.write(data)
return location
def install_miniconda(script_path, name):
dest = os.path.join(
os.getcwd(), name)
cmd = ['bash', script_path, '-b', '-f', '-p', dest]
sp.check_call(cmd)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import os
import subprocess as sp
from contextlib import contextmanager
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh'
@contextmanager
def change_directory(path):
old_cwd = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(old_cwd)
def download_install_script():
location = os.path.join(
tempfile.gettempdir(),
os.path.split(MINICONDA_URL)[-1])
with open(location, 'wb') as outfile:
response = urllib2.urlopen(MINICONDA_URL)
data = response.read()
outfile.write(data)
return location
<commit_msg>Add function to install miniconda<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import os
import subprocess as sp
from contextlib import contextmanager
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
MINICONDA_URL = 'https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh'
@contextmanager
def change_directory(path):
old_cwd = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(old_cwd)
def download_install_script():
location = os.path.join(
tempfile.gettempdir(),
os.path.split(MINICONDA_URL)[-1])
with open(location, 'wb') as outfile:
response = urllib2.urlopen(MINICONDA_URL)
data = response.read()
outfile.write(data)
return location
def install_miniconda(script_path, name):
dest = os.path.join(
os.getcwd(), name)
cmd = ['bash', script_path, '-b', '-f', '-p', dest]
sp.check_call(cmd)
|
f965e5d32e79789cac0ac2f2bae80a399cbeea8a
|
sirius/__init__.py
|
sirius/__init__.py
|
import os as _os
from . import LI_V01_01
from . import TB_V01_03
from . import BO_V03_02
from . import TS_V03_03
from . import SI_V21_02
from . import coordinate_system
from . import discs
with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f:
__version__ = _f.read().strip()
__all__ = ['LI_V01_01', 'TB_V01_03', 'BO_V03_02', 'TS_V03_03', 'SI_V21_02']
li = LI_V01_01
tb = TB_V01_03
bo = BO_V03_02
ts = TS_V03_03
si = SI_V21_02
|
import os as _os
from . import LI_V01_01
from . import TB_V01_03
from . import BO_V03_02
from . import TS_V03_03
from . import SI_V21_02
from . import coordinate_system
with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f:
__version__ = _f.read().strip()
__all__ = ['LI_V01_01', 'TB_V01_03', 'BO_V03_02', 'TS_V03_03', 'SI_V21_02']
li = LI_V01_01
tb = TB_V01_03
bo = BO_V03_02
ts = TS_V03_03
si = SI_V21_02
|
Fix bug in deletion of discs
|
Fix bug in deletion of discs
|
Python
|
mit
|
lnls-fac/sirius
|
import os as _os
from . import LI_V01_01
from . import TB_V01_03
from . import BO_V03_02
from . import TS_V03_03
from . import SI_V21_02
from . import coordinate_system
from . import discs
with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f:
__version__ = _f.read().strip()
__all__ = ['LI_V01_01', 'TB_V01_03', 'BO_V03_02', 'TS_V03_03', 'SI_V21_02']
li = LI_V01_01
tb = TB_V01_03
bo = BO_V03_02
ts = TS_V03_03
si = SI_V21_02
Fix bug in deletion of discs
|
import os as _os
from . import LI_V01_01
from . import TB_V01_03
from . import BO_V03_02
from . import TS_V03_03
from . import SI_V21_02
from . import coordinate_system
with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f:
__version__ = _f.read().strip()
__all__ = ['LI_V01_01', 'TB_V01_03', 'BO_V03_02', 'TS_V03_03', 'SI_V21_02']
li = LI_V01_01
tb = TB_V01_03
bo = BO_V03_02
ts = TS_V03_03
si = SI_V21_02
|
<commit_before>import os as _os
from . import LI_V01_01
from . import TB_V01_03
from . import BO_V03_02
from . import TS_V03_03
from . import SI_V21_02
from . import coordinate_system
from . import discs
with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f:
__version__ = _f.read().strip()
__all__ = ['LI_V01_01', 'TB_V01_03', 'BO_V03_02', 'TS_V03_03', 'SI_V21_02']
li = LI_V01_01
tb = TB_V01_03
bo = BO_V03_02
ts = TS_V03_03
si = SI_V21_02
<commit_msg>Fix bug in deletion of discs<commit_after>
|
import os as _os
from . import LI_V01_01
from . import TB_V01_03
from . import BO_V03_02
from . import TS_V03_03
from . import SI_V21_02
from . import coordinate_system
with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f:
__version__ = _f.read().strip()
__all__ = ['LI_V01_01', 'TB_V01_03', 'BO_V03_02', 'TS_V03_03', 'SI_V21_02']
li = LI_V01_01
tb = TB_V01_03
bo = BO_V03_02
ts = TS_V03_03
si = SI_V21_02
|
import os as _os
from . import LI_V01_01
from . import TB_V01_03
from . import BO_V03_02
from . import TS_V03_03
from . import SI_V21_02
from . import coordinate_system
from . import discs
with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f:
__version__ = _f.read().strip()
__all__ = ['LI_V01_01', 'TB_V01_03', 'BO_V03_02', 'TS_V03_03', 'SI_V21_02']
li = LI_V01_01
tb = TB_V01_03
bo = BO_V03_02
ts = TS_V03_03
si = SI_V21_02
Fix bug in deletion of discsimport os as _os
from . import LI_V01_01
from . import TB_V01_03
from . import BO_V03_02
from . import TS_V03_03
from . import SI_V21_02
from . import coordinate_system
with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f:
__version__ = _f.read().strip()
__all__ = ['LI_V01_01', 'TB_V01_03', 'BO_V03_02', 'TS_V03_03', 'SI_V21_02']
li = LI_V01_01
tb = TB_V01_03
bo = BO_V03_02
ts = TS_V03_03
si = SI_V21_02
|
<commit_before>import os as _os
from . import LI_V01_01
from . import TB_V01_03
from . import BO_V03_02
from . import TS_V03_03
from . import SI_V21_02
from . import coordinate_system
from . import discs
with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f:
__version__ = _f.read().strip()
__all__ = ['LI_V01_01', 'TB_V01_03', 'BO_V03_02', 'TS_V03_03', 'SI_V21_02']
li = LI_V01_01
tb = TB_V01_03
bo = BO_V03_02
ts = TS_V03_03
si = SI_V21_02
<commit_msg>Fix bug in deletion of discs<commit_after>import os as _os
from . import LI_V01_01
from . import TB_V01_03
from . import BO_V03_02
from . import TS_V03_03
from . import SI_V21_02
from . import coordinate_system
with open(_os.path.join(__path__[0], 'VERSION'), 'r') as _f:
__version__ = _f.read().strip()
__all__ = ['LI_V01_01', 'TB_V01_03', 'BO_V03_02', 'TS_V03_03', 'SI_V21_02']
li = LI_V01_01
tb = TB_V01_03
bo = BO_V03_02
ts = TS_V03_03
si = SI_V21_02
|
f0e76bbe2f73514b5663f65dfdc394e02485bc33
|
hello.py
|
hello.py
|
# Hello World
import sys
def hello(thing):
print "Hello {}".format(thing)
return
if __name__ == "__main__":
sys.exit(hello())
|
# Hello World
import sys
def hello(thing):
print "Hello {}".format(thing)
return
if __name__ == "__main__":
user_thing = sys.argv[-1]
sys.exit(hello(user_thing))
|
Allow user to specify thing.
|
Allow user to specify thing.
|
Python
|
mit
|
fgalloway/hg-git-test
|
# Hello World
import sys
def hello(thing):
print "Hello {}".format(thing)
return
if __name__ == "__main__":
sys.exit(hello())
Allow user to specify thing.
|
# Hello World
import sys
def hello(thing):
print "Hello {}".format(thing)
return
if __name__ == "__main__":
user_thing = sys.argv[-1]
sys.exit(hello(user_thing))
|
<commit_before># Hello World
import sys
def hello(thing):
print "Hello {}".format(thing)
return
if __name__ == "__main__":
sys.exit(hello())
<commit_msg>Allow user to specify thing.<commit_after>
|
# Hello World
import sys
def hello(thing):
print "Hello {}".format(thing)
return
if __name__ == "__main__":
user_thing = sys.argv[-1]
sys.exit(hello(user_thing))
|
# Hello World
import sys
def hello(thing):
print "Hello {}".format(thing)
return
if __name__ == "__main__":
sys.exit(hello())
Allow user to specify thing.# Hello World
import sys
def hello(thing):
print "Hello {}".format(thing)
return
if __name__ == "__main__":
user_thing = sys.argv[-1]
sys.exit(hello(user_thing))
|
<commit_before># Hello World
import sys
def hello(thing):
print "Hello {}".format(thing)
return
if __name__ == "__main__":
sys.exit(hello())
<commit_msg>Allow user to specify thing.<commit_after># Hello World
import sys
def hello(thing):
print "Hello {}".format(thing)
return
if __name__ == "__main__":
user_thing = sys.argv[-1]
sys.exit(hello(user_thing))
|
9f64d5e2f9447233df8d3b841c519196c3213e05
|
pyflation/analysis/tests/test_deltaprel.py
|
pyflation/analysis/tests/test_deltaprel.py
|
''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_calc(self):
"""Test results of calculation."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
|
''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_scalar(self):
"""Test results of 1x1x1 calculation."""
arr = deltaprel.soundspeeds(3, 2, 0.5)
assert_(arr == 3)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
|
Add test for scalar values.
|
Add test for scalar values.
|
Python
|
bsd-3-clause
|
ihuston/pyflation,ihuston/pyflation
|
''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_calc(self):
"""Test results of calculation."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
Add test for scalar values.
|
''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_scalar(self):
"""Test results of 1x1x1 calculation."""
arr = deltaprel.soundspeeds(3, 2, 0.5)
assert_(arr == 3)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
|
<commit_before>''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_calc(self):
"""Test results of calculation."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
<commit_msg>Add test for scalar values.<commit_after>
|
''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_scalar(self):
"""Test results of 1x1x1 calculation."""
arr = deltaprel.soundspeeds(3, 2, 0.5)
assert_(arr == 3)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
|
''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_calc(self):
"""Test results of calculation."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
Add test for scalar values.''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_scalar(self):
"""Test results of 1x1x1 calculation."""
arr = deltaprel.soundspeeds(3, 2, 0.5)
assert_(arr == 3)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
|
<commit_before>''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_calc(self):
"""Test results of calculation."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
<commit_msg>Add test for scalar values.<commit_after>''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_scalar(self):
"""Test results of 1x1x1 calculation."""
arr = deltaprel.soundspeeds(3, 2, 0.5)
assert_(arr == 3)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
|
aad8bf692884a2074ad74601e2547ced4ae569bd
|
comics/comics/nemi.py
|
comics/comics/nemi.py
|
from comics.aggregator.crawler import DagbladetCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = 'Nemi (db.no)'
language = 'no'
url = 'http://www.dagbladet.no/tegneserie/nemi/'
start_date = '1997-01-01'
rights = 'Lise Myhre'
class Crawler(DagbladetCrawlerBase):
history_capable_days = 14
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = 'Europe/Oslo'
def crawl(self, pub_date):
return self.crawl_helper('nemi', pub_date)
|
from comics.aggregator.crawler import DagbladetCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = 'Nemi (db.no)'
language = 'no'
url = 'http://www.dagbladet.no/tegneserie/nemi/'
start_date = '1997-01-01'
rights = 'Lise Myhre'
class Crawler(DagbladetCrawlerBase):
history_capable_days = 30
schedule = 'Mo,Tu,We,Th,Fr,Sa'
time_zone = 'Europe/Oslo'
def crawl(self, pub_date):
return self.crawl_helper('nemi', pub_date)
|
Increase history capability and correct schedule
|
Increase history capability and correct schedule
|
Python
|
agpl-3.0
|
jodal/comics,jodal/comics,jodal/comics,datagutten/comics,datagutten/comics,datagutten/comics,jodal/comics,datagutten/comics
|
from comics.aggregator.crawler import DagbladetCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = 'Nemi (db.no)'
language = 'no'
url = 'http://www.dagbladet.no/tegneserie/nemi/'
start_date = '1997-01-01'
rights = 'Lise Myhre'
class Crawler(DagbladetCrawlerBase):
history_capable_days = 14
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = 'Europe/Oslo'
def crawl(self, pub_date):
return self.crawl_helper('nemi', pub_date)
Increase history capability and correct schedule
|
from comics.aggregator.crawler import DagbladetCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = 'Nemi (db.no)'
language = 'no'
url = 'http://www.dagbladet.no/tegneserie/nemi/'
start_date = '1997-01-01'
rights = 'Lise Myhre'
class Crawler(DagbladetCrawlerBase):
history_capable_days = 30
schedule = 'Mo,Tu,We,Th,Fr,Sa'
time_zone = 'Europe/Oslo'
def crawl(self, pub_date):
return self.crawl_helper('nemi', pub_date)
|
<commit_before>from comics.aggregator.crawler import DagbladetCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = 'Nemi (db.no)'
language = 'no'
url = 'http://www.dagbladet.no/tegneserie/nemi/'
start_date = '1997-01-01'
rights = 'Lise Myhre'
class Crawler(DagbladetCrawlerBase):
history_capable_days = 14
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = 'Europe/Oslo'
def crawl(self, pub_date):
return self.crawl_helper('nemi', pub_date)
<commit_msg>Increase history capability and correct schedule<commit_after>
|
from comics.aggregator.crawler import DagbladetCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = 'Nemi (db.no)'
language = 'no'
url = 'http://www.dagbladet.no/tegneserie/nemi/'
start_date = '1997-01-01'
rights = 'Lise Myhre'
class Crawler(DagbladetCrawlerBase):
history_capable_days = 30
schedule = 'Mo,Tu,We,Th,Fr,Sa'
time_zone = 'Europe/Oslo'
def crawl(self, pub_date):
return self.crawl_helper('nemi', pub_date)
|
from comics.aggregator.crawler import DagbladetCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = 'Nemi (db.no)'
language = 'no'
url = 'http://www.dagbladet.no/tegneserie/nemi/'
start_date = '1997-01-01'
rights = 'Lise Myhre'
class Crawler(DagbladetCrawlerBase):
history_capable_days = 14
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = 'Europe/Oslo'
def crawl(self, pub_date):
return self.crawl_helper('nemi', pub_date)
Increase history capability and correct schedulefrom comics.aggregator.crawler import DagbladetCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = 'Nemi (db.no)'
language = 'no'
url = 'http://www.dagbladet.no/tegneserie/nemi/'
start_date = '1997-01-01'
rights = 'Lise Myhre'
class Crawler(DagbladetCrawlerBase):
history_capable_days = 30
schedule = 'Mo,Tu,We,Th,Fr,Sa'
time_zone = 'Europe/Oslo'
def crawl(self, pub_date):
return self.crawl_helper('nemi', pub_date)
|
<commit_before>from comics.aggregator.crawler import DagbladetCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = 'Nemi (db.no)'
language = 'no'
url = 'http://www.dagbladet.no/tegneserie/nemi/'
start_date = '1997-01-01'
rights = 'Lise Myhre'
class Crawler(DagbladetCrawlerBase):
history_capable_days = 14
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = 'Europe/Oslo'
def crawl(self, pub_date):
return self.crawl_helper('nemi', pub_date)
<commit_msg>Increase history capability and correct schedule<commit_after>from comics.aggregator.crawler import DagbladetCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = 'Nemi (db.no)'
language = 'no'
url = 'http://www.dagbladet.no/tegneserie/nemi/'
start_date = '1997-01-01'
rights = 'Lise Myhre'
class Crawler(DagbladetCrawlerBase):
history_capable_days = 30
schedule = 'Mo,Tu,We,Th,Fr,Sa'
time_zone = 'Europe/Oslo'
def crawl(self, pub_date):
return self.crawl_helper('nemi', pub_date)
|
5cb049385aa3d3ae57d18dc4b7d12f3d5e6f1ae4
|
tests/intervaltrigger_test.py
|
tests/intervaltrigger_test.py
|
import time
import mock
import pytest
import spreadsplug.intervaltrigger as intervaltrigger
@pytest.fixture
def plugin(config):
config['intervaltrigger']['interval'] = 0.1
return intervaltrigger.IntervalTrigger(config)
def test_trigger_loop(plugin):
cbmock = mock.Mock()
plugin.start_trigger_loop(cbmock)
time.sleep(0.55)
plugin.stop_trigger_loop()
assert cbmock.call_count == 5
|
import time
import mock
import pytest
import spreadsplug.intervaltrigger as intervaltrigger
@pytest.fixture
def plugin(config):
config['intervaltrigger']['interval'] = 0.1
return intervaltrigger.IntervalTrigger(config)
def test_trigger_loop(plugin):
cbmock = mock.Mock()
plugin.start_trigger_loop(cbmock)
time.sleep(0.6)
plugin.stop_trigger_loop()
assert cbmock.call_count == 5
|
Increase sleep-time in intervaltrigger tests
|
Increase sleep-time in intervaltrigger tests
|
Python
|
agpl-3.0
|
jbaiter/spreads,DIYBookScanner/spreads,nafraf/spreads,adongy/spreads,jbaiter/spreads,miloh/spreads,gareth8118/spreads,gareth8118/spreads,nafraf/spreads,gareth8118/spreads,jbaiter/spreads,DIYBookScanner/spreads,DIYBookScanner/spreads,nafraf/spreads,adongy/spreads,miloh/spreads,adongy/spreads,miloh/spreads
|
import time
import mock
import pytest
import spreadsplug.intervaltrigger as intervaltrigger
@pytest.fixture
def plugin(config):
config['intervaltrigger']['interval'] = 0.1
return intervaltrigger.IntervalTrigger(config)
def test_trigger_loop(plugin):
cbmock = mock.Mock()
plugin.start_trigger_loop(cbmock)
time.sleep(0.55)
plugin.stop_trigger_loop()
assert cbmock.call_count == 5
Increase sleep-time in intervaltrigger tests
|
import time
import mock
import pytest
import spreadsplug.intervaltrigger as intervaltrigger
@pytest.fixture
def plugin(config):
config['intervaltrigger']['interval'] = 0.1
return intervaltrigger.IntervalTrigger(config)
def test_trigger_loop(plugin):
cbmock = mock.Mock()
plugin.start_trigger_loop(cbmock)
time.sleep(0.6)
plugin.stop_trigger_loop()
assert cbmock.call_count == 5
|
<commit_before>import time
import mock
import pytest
import spreadsplug.intervaltrigger as intervaltrigger
@pytest.fixture
def plugin(config):
config['intervaltrigger']['interval'] = 0.1
return intervaltrigger.IntervalTrigger(config)
def test_trigger_loop(plugin):
cbmock = mock.Mock()
plugin.start_trigger_loop(cbmock)
time.sleep(0.55)
plugin.stop_trigger_loop()
assert cbmock.call_count == 5
<commit_msg>Increase sleep-time in intervaltrigger tests<commit_after>
|
import time
import mock
import pytest
import spreadsplug.intervaltrigger as intervaltrigger
@pytest.fixture
def plugin(config):
config['intervaltrigger']['interval'] = 0.1
return intervaltrigger.IntervalTrigger(config)
def test_trigger_loop(plugin):
cbmock = mock.Mock()
plugin.start_trigger_loop(cbmock)
time.sleep(0.6)
plugin.stop_trigger_loop()
assert cbmock.call_count == 5
|
import time
import mock
import pytest
import spreadsplug.intervaltrigger as intervaltrigger
@pytest.fixture
def plugin(config):
config['intervaltrigger']['interval'] = 0.1
return intervaltrigger.IntervalTrigger(config)
def test_trigger_loop(plugin):
cbmock = mock.Mock()
plugin.start_trigger_loop(cbmock)
time.sleep(0.55)
plugin.stop_trigger_loop()
assert cbmock.call_count == 5
Increase sleep-time in intervaltrigger testsimport time
import mock
import pytest
import spreadsplug.intervaltrigger as intervaltrigger
@pytest.fixture
def plugin(config):
config['intervaltrigger']['interval'] = 0.1
return intervaltrigger.IntervalTrigger(config)
def test_trigger_loop(plugin):
cbmock = mock.Mock()
plugin.start_trigger_loop(cbmock)
time.sleep(0.6)
plugin.stop_trigger_loop()
assert cbmock.call_count == 5
|
<commit_before>import time
import mock
import pytest
import spreadsplug.intervaltrigger as intervaltrigger
@pytest.fixture
def plugin(config):
config['intervaltrigger']['interval'] = 0.1
return intervaltrigger.IntervalTrigger(config)
def test_trigger_loop(plugin):
cbmock = mock.Mock()
plugin.start_trigger_loop(cbmock)
time.sleep(0.55)
plugin.stop_trigger_loop()
assert cbmock.call_count == 5
<commit_msg>Increase sleep-time in intervaltrigger tests<commit_after>import time
import mock
import pytest
import spreadsplug.intervaltrigger as intervaltrigger
@pytest.fixture
def plugin(config):
config['intervaltrigger']['interval'] = 0.1
return intervaltrigger.IntervalTrigger(config)
def test_trigger_loop(plugin):
cbmock = mock.Mock()
plugin.start_trigger_loop(cbmock)
time.sleep(0.6)
plugin.stop_trigger_loop()
assert cbmock.call_count == 5
|
9f514f3bde4fd5ca901a08d59e76735861cb9bc9
|
wsgi.py
|
wsgi.py
|
import os
import sys
current_directory = os.path.dirname(__file__)
module_name = os.path.basename(current_directory)
activate_this = '/home/met/venv/bin/activate_this.py'
execfile(activate_this, dict(__file__=activate_this))
sys.path.append(current_directory)
os.environ['DJANGO_SETTINGS_MODULE'] = 'met.settings'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
|
import os
import sys
current_directory = os.path.dirname(__file__)
module_name = os.path.basename(current_directory)
activate_this = '/home/met/met-venv/bin/activate_this.py'
execfile(activate_this, dict(__file__=activate_this))
sys.path.append(current_directory)
os.environ['DJANGO_SETTINGS_MODULE'] = 'met.settings'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
|
Relocate virtualenv location to documentation
|
Relocate virtualenv location to documentation
|
Python
|
bsd-2-clause
|
biancini/met,biancini/met,biancini/met,GEANT/met,TERENA/met,TERENA/met,biancini/met,GEANT/met,TERENA/met,TERENA/met,GEANT/met,GEANT/met
|
import os
import sys
current_directory = os.path.dirname(__file__)
module_name = os.path.basename(current_directory)
activate_this = '/home/met/venv/bin/activate_this.py'
execfile(activate_this, dict(__file__=activate_this))
sys.path.append(current_directory)
os.environ['DJANGO_SETTINGS_MODULE'] = 'met.settings'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
Relocate virtualenv location to documentation
|
import os
import sys
current_directory = os.path.dirname(__file__)
module_name = os.path.basename(current_directory)
activate_this = '/home/met/met-venv/bin/activate_this.py'
execfile(activate_this, dict(__file__=activate_this))
sys.path.append(current_directory)
os.environ['DJANGO_SETTINGS_MODULE'] = 'met.settings'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
|
<commit_before>import os
import sys
current_directory = os.path.dirname(__file__)
module_name = os.path.basename(current_directory)
activate_this = '/home/met/venv/bin/activate_this.py'
execfile(activate_this, dict(__file__=activate_this))
sys.path.append(current_directory)
os.environ['DJANGO_SETTINGS_MODULE'] = 'met.settings'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
<commit_msg>Relocate virtualenv location to documentation<commit_after>
|
import os
import sys
current_directory = os.path.dirname(__file__)
module_name = os.path.basename(current_directory)
activate_this = '/home/met/met-venv/bin/activate_this.py'
execfile(activate_this, dict(__file__=activate_this))
sys.path.append(current_directory)
os.environ['DJANGO_SETTINGS_MODULE'] = 'met.settings'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
|
import os
import sys
current_directory = os.path.dirname(__file__)
module_name = os.path.basename(current_directory)
activate_this = '/home/met/venv/bin/activate_this.py'
execfile(activate_this, dict(__file__=activate_this))
sys.path.append(current_directory)
os.environ['DJANGO_SETTINGS_MODULE'] = 'met.settings'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
Relocate virtualenv location to documentationimport os
import sys
current_directory = os.path.dirname(__file__)
module_name = os.path.basename(current_directory)
activate_this = '/home/met/met-venv/bin/activate_this.py'
execfile(activate_this, dict(__file__=activate_this))
sys.path.append(current_directory)
os.environ['DJANGO_SETTINGS_MODULE'] = 'met.settings'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
|
<commit_before>import os
import sys
current_directory = os.path.dirname(__file__)
module_name = os.path.basename(current_directory)
activate_this = '/home/met/venv/bin/activate_this.py'
execfile(activate_this, dict(__file__=activate_this))
sys.path.append(current_directory)
os.environ['DJANGO_SETTINGS_MODULE'] = 'met.settings'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
<commit_msg>Relocate virtualenv location to documentation<commit_after>import os
import sys
current_directory = os.path.dirname(__file__)
module_name = os.path.basename(current_directory)
activate_this = '/home/met/met-venv/bin/activate_this.py'
execfile(activate_this, dict(__file__=activate_this))
sys.path.append(current_directory)
os.environ['DJANGO_SETTINGS_MODULE'] = 'met.settings'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
|
28e00395cd29dee1449ec522b55d08f68518eb70
|
pyoctree/__init__.py
|
pyoctree/__init__.py
|
# -*- coding: utf-8 -*-
# Copyright (C) 2015 Michael Hogg
# This file is part of pyoctree - See LICENSE.txt for information on usage and redistribution
# Version
import version
__version__ = version.__version__
|
# -*- coding: utf-8 -*-
# Copyright (C) 2017 Michael Hogg
# This file is part of pyoctree - See LICENSE.txt for information on usage and redistribution
# Version
from .version import __version__
__version__ = version.__version__
|
Fix import bug in Python 3
|
Fix import bug in Python 3
|
Python
|
mit
|
mhogg/pyoctree,mhogg/pyoctree
|
# -*- coding: utf-8 -*-
# Copyright (C) 2015 Michael Hogg
# This file is part of pyoctree - See LICENSE.txt for information on usage and redistribution
# Version
import version
__version__ = version.__version__
Fix import bug in Python 3
|
# -*- coding: utf-8 -*-
# Copyright (C) 2017 Michael Hogg
# This file is part of pyoctree - See LICENSE.txt for information on usage and redistribution
# Version
from .version import __version__
__version__ = version.__version__
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (C) 2015 Michael Hogg
# This file is part of pyoctree - See LICENSE.txt for information on usage and redistribution
# Version
import version
__version__ = version.__version__
<commit_msg>Fix import bug in Python 3<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright (C) 2017 Michael Hogg
# This file is part of pyoctree - See LICENSE.txt for information on usage and redistribution
# Version
from .version import __version__
__version__ = version.__version__
|
# -*- coding: utf-8 -*-
# Copyright (C) 2015 Michael Hogg
# This file is part of pyoctree - See LICENSE.txt for information on usage and redistribution
# Version
import version
__version__ = version.__version__
Fix import bug in Python 3# -*- coding: utf-8 -*-
# Copyright (C) 2017 Michael Hogg
# This file is part of pyoctree - See LICENSE.txt for information on usage and redistribution
# Version
from .version import __version__
__version__ = version.__version__
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (C) 2015 Michael Hogg
# This file is part of pyoctree - See LICENSE.txt for information on usage and redistribution
# Version
import version
__version__ = version.__version__
<commit_msg>Fix import bug in Python 3<commit_after># -*- coding: utf-8 -*-
# Copyright (C) 2017 Michael Hogg
# This file is part of pyoctree - See LICENSE.txt for information on usage and redistribution
# Version
from .version import __version__
__version__ = version.__version__
|
4a1bb5f658eb3fa7b419ca5eb23217cc21db003a
|
ssl-cert-parse.py
|
ssl-cert-parse.py
|
#!/usr/bin/env python3
import datetime
import ssl
import OpenSSL
def GetCert(SiteName, Port):
return ssl.get_server_certificate((SiteName, Port))
def ParseCert(CertRaw):
Cert = OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM, CertRaw)
CertSubject = str(Cert.get_subject())[18:-2]
CertStartDate = datetime.datetime.strptime(str(Cert.get_notBefore())[2:-1],
'%Y%m%d%H%M%SZ')
CertEndDate = datetime.datetime.strptime(str(Cert.get_notAfter())[2:-1],
'%Y%m%d%H%M%SZ')
CertIssuer = str(Cert.get_issuer())[18:-2]
return {'CertSubject': CertIssuer, 'CertStartDate': CertStartDate,
'CertEndDate': CertEndDate, 'CertIssuer': CertIssuer}
CertRaw = GetCert('some.domain.tld', 443)
print(CertRaw)
Out = ParseCert(CertRaw)
print(Out)
print(Out['CertSubject'])
print(Out['CertStartDate'])
|
#!/usr/bin/env python3
import datetime
import ssl
import OpenSSL
def GetCert(SiteName, Port):
return ssl.get_server_certificate((SiteName, Port))
def ParseCert(CertRaw):
Cert = OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM, CertRaw)
CertSubject = str(Cert.get_subject())[18:-2]
CertStartDate = datetime.datetime.strptime(str(Cert.get_notBefore())[2:-1],
'%Y%m%d%H%M%SZ')
CertEndDate = datetime.datetime.strptime(str(Cert.get_notAfter())[2:-1],
'%Y%m%d%H%M%SZ')
CertIssuer = str(Cert.get_issuer())[18:-2]
return {'CertSubject': CertSubject, 'CertStartDate': CertStartDate,
'CertEndDate': CertEndDate, 'CertIssuer': CertIssuer}
CertRaw = GetCert('some.domain.tld', 443)
print(CertRaw)
Out = ParseCert(CertRaw)
print(Out)
print(Out['CertSubject'])
print(Out['CertStartDate'])
|
Fix wrong value assignment for CertSubject in the return statement of ParseCert() function
|
Fix wrong value assignment for CertSubject in the return statement of ParseCert() function
|
Python
|
apache-2.0
|
ivuk/ssl-cert-parse
|
#!/usr/bin/env python3
import datetime
import ssl
import OpenSSL
def GetCert(SiteName, Port):
return ssl.get_server_certificate((SiteName, Port))
def ParseCert(CertRaw):
Cert = OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM, CertRaw)
CertSubject = str(Cert.get_subject())[18:-2]
CertStartDate = datetime.datetime.strptime(str(Cert.get_notBefore())[2:-1],
'%Y%m%d%H%M%SZ')
CertEndDate = datetime.datetime.strptime(str(Cert.get_notAfter())[2:-1],
'%Y%m%d%H%M%SZ')
CertIssuer = str(Cert.get_issuer())[18:-2]
return {'CertSubject': CertIssuer, 'CertStartDate': CertStartDate,
'CertEndDate': CertEndDate, 'CertIssuer': CertIssuer}
CertRaw = GetCert('some.domain.tld', 443)
print(CertRaw)
Out = ParseCert(CertRaw)
print(Out)
print(Out['CertSubject'])
print(Out['CertStartDate'])
Fix wrong value assignment for CertSubject in the return statement of ParseCert() function
|
#!/usr/bin/env python3
import datetime
import ssl
import OpenSSL
def GetCert(SiteName, Port):
return ssl.get_server_certificate((SiteName, Port))
def ParseCert(CertRaw):
Cert = OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM, CertRaw)
CertSubject = str(Cert.get_subject())[18:-2]
CertStartDate = datetime.datetime.strptime(str(Cert.get_notBefore())[2:-1],
'%Y%m%d%H%M%SZ')
CertEndDate = datetime.datetime.strptime(str(Cert.get_notAfter())[2:-1],
'%Y%m%d%H%M%SZ')
CertIssuer = str(Cert.get_issuer())[18:-2]
return {'CertSubject': CertSubject, 'CertStartDate': CertStartDate,
'CertEndDate': CertEndDate, 'CertIssuer': CertIssuer}
CertRaw = GetCert('some.domain.tld', 443)
print(CertRaw)
Out = ParseCert(CertRaw)
print(Out)
print(Out['CertSubject'])
print(Out['CertStartDate'])
|
<commit_before>#!/usr/bin/env python3
import datetime
import ssl
import OpenSSL
def GetCert(SiteName, Port):
return ssl.get_server_certificate((SiteName, Port))
def ParseCert(CertRaw):
Cert = OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM, CertRaw)
CertSubject = str(Cert.get_subject())[18:-2]
CertStartDate = datetime.datetime.strptime(str(Cert.get_notBefore())[2:-1],
'%Y%m%d%H%M%SZ')
CertEndDate = datetime.datetime.strptime(str(Cert.get_notAfter())[2:-1],
'%Y%m%d%H%M%SZ')
CertIssuer = str(Cert.get_issuer())[18:-2]
return {'CertSubject': CertIssuer, 'CertStartDate': CertStartDate,
'CertEndDate': CertEndDate, 'CertIssuer': CertIssuer}
CertRaw = GetCert('some.domain.tld', 443)
print(CertRaw)
Out = ParseCert(CertRaw)
print(Out)
print(Out['CertSubject'])
print(Out['CertStartDate'])
<commit_msg>Fix wrong value assignment for CertSubject in the return statement of ParseCert() function<commit_after>
|
#!/usr/bin/env python3
import datetime
import ssl
import OpenSSL
def GetCert(SiteName, Port):
return ssl.get_server_certificate((SiteName, Port))
def ParseCert(CertRaw):
Cert = OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM, CertRaw)
CertSubject = str(Cert.get_subject())[18:-2]
CertStartDate = datetime.datetime.strptime(str(Cert.get_notBefore())[2:-1],
'%Y%m%d%H%M%SZ')
CertEndDate = datetime.datetime.strptime(str(Cert.get_notAfter())[2:-1],
'%Y%m%d%H%M%SZ')
CertIssuer = str(Cert.get_issuer())[18:-2]
return {'CertSubject': CertSubject, 'CertStartDate': CertStartDate,
'CertEndDate': CertEndDate, 'CertIssuer': CertIssuer}
CertRaw = GetCert('some.domain.tld', 443)
print(CertRaw)
Out = ParseCert(CertRaw)
print(Out)
print(Out['CertSubject'])
print(Out['CertStartDate'])
|
#!/usr/bin/env python3
import datetime
import ssl
import OpenSSL
def GetCert(SiteName, Port):
return ssl.get_server_certificate((SiteName, Port))
def ParseCert(CertRaw):
Cert = OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM, CertRaw)
CertSubject = str(Cert.get_subject())[18:-2]
CertStartDate = datetime.datetime.strptime(str(Cert.get_notBefore())[2:-1],
'%Y%m%d%H%M%SZ')
CertEndDate = datetime.datetime.strptime(str(Cert.get_notAfter())[2:-1],
'%Y%m%d%H%M%SZ')
CertIssuer = str(Cert.get_issuer())[18:-2]
return {'CertSubject': CertIssuer, 'CertStartDate': CertStartDate,
'CertEndDate': CertEndDate, 'CertIssuer': CertIssuer}
CertRaw = GetCert('some.domain.tld', 443)
print(CertRaw)
Out = ParseCert(CertRaw)
print(Out)
print(Out['CertSubject'])
print(Out['CertStartDate'])
Fix wrong value assignment for CertSubject in the return statement of ParseCert() function#!/usr/bin/env python3
import datetime
import ssl
import OpenSSL
def GetCert(SiteName, Port):
return ssl.get_server_certificate((SiteName, Port))
def ParseCert(CertRaw):
Cert = OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM, CertRaw)
CertSubject = str(Cert.get_subject())[18:-2]
CertStartDate = datetime.datetime.strptime(str(Cert.get_notBefore())[2:-1],
'%Y%m%d%H%M%SZ')
CertEndDate = datetime.datetime.strptime(str(Cert.get_notAfter())[2:-1],
'%Y%m%d%H%M%SZ')
CertIssuer = str(Cert.get_issuer())[18:-2]
return {'CertSubject': CertSubject, 'CertStartDate': CertStartDate,
'CertEndDate': CertEndDate, 'CertIssuer': CertIssuer}
CertRaw = GetCert('some.domain.tld', 443)
print(CertRaw)
Out = ParseCert(CertRaw)
print(Out)
print(Out['CertSubject'])
print(Out['CertStartDate'])
|
<commit_before>#!/usr/bin/env python3
import datetime
import ssl
import OpenSSL
def GetCert(SiteName, Port):
return ssl.get_server_certificate((SiteName, Port))
def ParseCert(CertRaw):
Cert = OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM, CertRaw)
CertSubject = str(Cert.get_subject())[18:-2]
CertStartDate = datetime.datetime.strptime(str(Cert.get_notBefore())[2:-1],
'%Y%m%d%H%M%SZ')
CertEndDate = datetime.datetime.strptime(str(Cert.get_notAfter())[2:-1],
'%Y%m%d%H%M%SZ')
CertIssuer = str(Cert.get_issuer())[18:-2]
return {'CertSubject': CertIssuer, 'CertStartDate': CertStartDate,
'CertEndDate': CertEndDate, 'CertIssuer': CertIssuer}
CertRaw = GetCert('some.domain.tld', 443)
print(CertRaw)
Out = ParseCert(CertRaw)
print(Out)
print(Out['CertSubject'])
print(Out['CertStartDate'])
<commit_msg>Fix wrong value assignment for CertSubject in the return statement of ParseCert() function<commit_after>#!/usr/bin/env python3
import datetime
import ssl
import OpenSSL
def GetCert(SiteName, Port):
return ssl.get_server_certificate((SiteName, Port))
def ParseCert(CertRaw):
Cert = OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM, CertRaw)
CertSubject = str(Cert.get_subject())[18:-2]
CertStartDate = datetime.datetime.strptime(str(Cert.get_notBefore())[2:-1],
'%Y%m%d%H%M%SZ')
CertEndDate = datetime.datetime.strptime(str(Cert.get_notAfter())[2:-1],
'%Y%m%d%H%M%SZ')
CertIssuer = str(Cert.get_issuer())[18:-2]
return {'CertSubject': CertSubject, 'CertStartDate': CertStartDate,
'CertEndDate': CertEndDate, 'CertIssuer': CertIssuer}
CertRaw = GetCert('some.domain.tld', 443)
print(CertRaw)
Out = ParseCert(CertRaw)
print(Out)
print(Out['CertSubject'])
print(Out['CertStartDate'])
|
bfd887bdb77ea2c8fb4d67895d98d8c923135045
|
Lib/dbhash.py
|
Lib/dbhash.py
|
"""Provide a (g)dbm-compatible interface to bsdhash.hashopen."""
import bsddb
error = bsddb.error
def open(file, flag, mode=0666):
return bsddb.hashopen(file, flag, mode)
|
"""Provide a (g)dbm-compatible interface to bsdhash.hashopen."""
import bsddb
error = bsddb.error # Exported for anydbm
def open(file, flag, mode=0666):
return bsddb.hashopen(file, flag, mode)
|
Clarify why we define error. Suggested by Andrew Dalke.
|
Clarify why we define error. Suggested by Andrew Dalke.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
"""Provide a (g)dbm-compatible interface to bsdhash.hashopen."""
import bsddb
error = bsddb.error
def open(file, flag, mode=0666):
return bsddb.hashopen(file, flag, mode)
Clarify why we define error. Suggested by Andrew Dalke.
|
"""Provide a (g)dbm-compatible interface to bsdhash.hashopen."""
import bsddb
error = bsddb.error # Exported for anydbm
def open(file, flag, mode=0666):
return bsddb.hashopen(file, flag, mode)
|
<commit_before>"""Provide a (g)dbm-compatible interface to bsdhash.hashopen."""
import bsddb
error = bsddb.error
def open(file, flag, mode=0666):
return bsddb.hashopen(file, flag, mode)
<commit_msg>Clarify why we define error. Suggested by Andrew Dalke.<commit_after>
|
"""Provide a (g)dbm-compatible interface to bsdhash.hashopen."""
import bsddb
error = bsddb.error # Exported for anydbm
def open(file, flag, mode=0666):
return bsddb.hashopen(file, flag, mode)
|
"""Provide a (g)dbm-compatible interface to bsdhash.hashopen."""
import bsddb
error = bsddb.error
def open(file, flag, mode=0666):
return bsddb.hashopen(file, flag, mode)
Clarify why we define error. Suggested by Andrew Dalke."""Provide a (g)dbm-compatible interface to bsdhash.hashopen."""
import bsddb
error = bsddb.error # Exported for anydbm
def open(file, flag, mode=0666):
return bsddb.hashopen(file, flag, mode)
|
<commit_before>"""Provide a (g)dbm-compatible interface to bsdhash.hashopen."""
import bsddb
error = bsddb.error
def open(file, flag, mode=0666):
return bsddb.hashopen(file, flag, mode)
<commit_msg>Clarify why we define error. Suggested by Andrew Dalke.<commit_after>"""Provide a (g)dbm-compatible interface to bsdhash.hashopen."""
import bsddb
error = bsddb.error # Exported for anydbm
def open(file, flag, mode=0666):
return bsddb.hashopen(file, flag, mode)
|
785892356bde4c20da265844ae77773266d7c01b
|
tests/iris_test.py
|
tests/iris_test.py
|
#!/usr/bin/env python
from sklearn import datasets
from random import shuffle
# This example loads the IRIS dataset and classifies
# using our neural network implementation.
# The results are visualized in a 2D-plot.
def main():
iris = datasets.load_iris()
X = iris.data
Y = iris.target
# Randomize (shuffle) the indexes
# Shuffle randomizes idx in place
idx = range(len(X))
shuffle(idx)
# Split the shuffled indexes into half, to training and test
# The int conversion is needed in python 3, I think (for odd number of indexes)
train_idx = idx[:int(len(idx)/2)]
test_idx = idx[int(len(X)/2):]
if __name__=='__main__':
main()
|
#!/usr/bin/env python
from sklearn import datasets
from random import shuffle
import numpy as np
from neuralnet import NeuralNet
# This example loads the IRIS dataset and classifies
# using our neural network implementation.
# The results are visualized in a 2D-plot.
def main():
iris = datasets.load_iris()
X = iris.data
Y = iris.target
# Randomize (shuffle) the indexes
# Shuffle randomizes idx in place
idx = range(len(X))
shuffle(idx)
# Split the shuffled indexes into half, to training and test
# The int conversion is needed in python 3, I think (for odd number of indexes)
train_idx = idx[:int(len(idx)/2)]
test_idx = idx[int(len(X)/2):]
# Initialize zero matrix for outputs in binary form
Y_bin = np.zeros((len(Y),3),dtype=np.int)
# Convert output from int to binary representation for neural network
for i in range(len(Y)):
Y_bin[i][Y[i]] = 1
if __name__=='__main__':
main()
|
Convert output values from int to binary for neural network
|
Convert output values from int to binary for neural network
|
Python
|
mit
|
akajuvonen/simple-neuralnet-python
|
#!/usr/bin/env python
from sklearn import datasets
from random import shuffle
# This example loads the IRIS dataset and classifies
# using our neural network implementation.
# The results are visualized in a 2D-plot.
def main():
iris = datasets.load_iris()
X = iris.data
Y = iris.target
# Randomize (shuffle) the indexes
# Shuffle randomizes idx in place
idx = range(len(X))
shuffle(idx)
# Split the shuffled indexes into half, to training and test
# The int conversion is needed in python 3, I think (for odd number of indexes)
train_idx = idx[:int(len(idx)/2)]
test_idx = idx[int(len(X)/2):]
if __name__=='__main__':
main()
Convert output values from int to binary for neural network
|
#!/usr/bin/env python
from sklearn import datasets
from random import shuffle
import numpy as np
from neuralnet import NeuralNet
# This example loads the IRIS dataset and classifies
# using our neural network implementation.
# The results are visualized in a 2D-plot.
def main():
iris = datasets.load_iris()
X = iris.data
Y = iris.target
# Randomize (shuffle) the indexes
# Shuffle randomizes idx in place
idx = range(len(X))
shuffle(idx)
# Split the shuffled indexes into half, to training and test
# The int conversion is needed in python 3, I think (for odd number of indexes)
train_idx = idx[:int(len(idx)/2)]
test_idx = idx[int(len(X)/2):]
# Initialize zero matrix for outputs in binary form
Y_bin = np.zeros((len(Y),3),dtype=np.int)
# Convert output from int to binary representation for neural network
for i in range(len(Y)):
Y_bin[i][Y[i]] = 1
if __name__=='__main__':
main()
|
<commit_before>#!/usr/bin/env python
from sklearn import datasets
from random import shuffle
# This example loads the IRIS dataset and classifies
# using our neural network implementation.
# The results are visualized in a 2D-plot.
def main():
iris = datasets.load_iris()
X = iris.data
Y = iris.target
# Randomize (shuffle) the indexes
# Shuffle randomizes idx in place
idx = range(len(X))
shuffle(idx)
# Split the shuffled indexes into half, to training and test
# The int conversion is needed in python 3, I think (for odd number of indexes)
train_idx = idx[:int(len(idx)/2)]
test_idx = idx[int(len(X)/2):]
if __name__=='__main__':
main()
<commit_msg>Convert output values from int to binary for neural network<commit_after>
|
#!/usr/bin/env python
from sklearn import datasets
from random import shuffle
import numpy as np
from neuralnet import NeuralNet
# This example loads the IRIS dataset and classifies
# using our neural network implementation.
# The results are visualized in a 2D-plot.
def main():
iris = datasets.load_iris()
X = iris.data
Y = iris.target
# Randomize (shuffle) the indexes
# Shuffle randomizes idx in place
idx = range(len(X))
shuffle(idx)
# Split the shuffled indexes into half, to training and test
# The int conversion is needed in python 3, I think (for odd number of indexes)
train_idx = idx[:int(len(idx)/2)]
test_idx = idx[int(len(X)/2):]
# Initialize zero matrix for outputs in binary form
Y_bin = np.zeros((len(Y),3),dtype=np.int)
# Convert output from int to binary representation for neural network
for i in range(len(Y)):
Y_bin[i][Y[i]] = 1
if __name__=='__main__':
main()
|
#!/usr/bin/env python
from sklearn import datasets
from random import shuffle
# This example loads the IRIS dataset and classifies
# using our neural network implementation.
# The results are visualized in a 2D-plot.
def main():
iris = datasets.load_iris()
X = iris.data
Y = iris.target
# Randomize (shuffle) the indexes
# Shuffle randomizes idx in place
idx = range(len(X))
shuffle(idx)
# Split the shuffled indexes into half, to training and test
# The int conversion is needed in python 3, I think (for odd number of indexes)
train_idx = idx[:int(len(idx)/2)]
test_idx = idx[int(len(X)/2):]
if __name__=='__main__':
main()
Convert output values from int to binary for neural network#!/usr/bin/env python
from sklearn import datasets
from random import shuffle
import numpy as np
from neuralnet import NeuralNet
# This example loads the IRIS dataset and classifies
# using our neural network implementation.
# The results are visualized in a 2D-plot.
def main():
iris = datasets.load_iris()
X = iris.data
Y = iris.target
# Randomize (shuffle) the indexes
# Shuffle randomizes idx in place
idx = range(len(X))
shuffle(idx)
# Split the shuffled indexes into half, to training and test
# The int conversion is needed in python 3, I think (for odd number of indexes)
train_idx = idx[:int(len(idx)/2)]
test_idx = idx[int(len(X)/2):]
# Initialize zero matrix for outputs in binary form
Y_bin = np.zeros((len(Y),3),dtype=np.int)
# Convert output from int to binary representation for neural network
for i in range(len(Y)):
Y_bin[i][Y[i]] = 1
if __name__=='__main__':
main()
|
<commit_before>#!/usr/bin/env python
from sklearn import datasets
from random import shuffle
# This example loads the IRIS dataset and classifies
# using our neural network implementation.
# The results are visualized in a 2D-plot.
def main():
iris = datasets.load_iris()
X = iris.data
Y = iris.target
# Randomize (shuffle) the indexes
# Shuffle randomizes idx in place
idx = range(len(X))
shuffle(idx)
# Split the shuffled indexes into half, to training and test
# The int conversion is needed in python 3, I think (for odd number of indexes)
train_idx = idx[:int(len(idx)/2)]
test_idx = idx[int(len(X)/2):]
if __name__=='__main__':
main()
<commit_msg>Convert output values from int to binary for neural network<commit_after>#!/usr/bin/env python
from sklearn import datasets
from random import shuffle
import numpy as np
from neuralnet import NeuralNet
# This example loads the IRIS dataset and classifies
# using our neural network implementation.
# The results are visualized in a 2D-plot.
def main():
iris = datasets.load_iris()
X = iris.data
Y = iris.target
# Randomize (shuffle) the indexes
# Shuffle randomizes idx in place
idx = range(len(X))
shuffle(idx)
# Split the shuffled indexes into half, to training and test
# The int conversion is needed in python 3, I think (for odd number of indexes)
train_idx = idx[:int(len(idx)/2)]
test_idx = idx[int(len(X)/2):]
# Initialize zero matrix for outputs in binary form
Y_bin = np.zeros((len(Y),3),dtype=np.int)
# Convert output from int to binary representation for neural network
for i in range(len(Y)):
Y_bin[i][Y[i]] = 1
if __name__=='__main__':
main()
|
5e4e246b5b30afc946e202696e84000a715bc895
|
bookmarks/__init__.py
|
bookmarks/__init__.py
|
VERSION = (0, 1, 3, "dev")
def get_version():
if VERSION[3] != "final":
return "%s.%s.%s%s" % (VERSION[0], VERSION[1], VERSION[2], VERSION[3])
else:
return "%s.%s.%s" % (VERSION[0], VERSION[1], VERSION[2])
__version__ = get_version()
|
VERSION = (0, 1, 3,)
def get_version():
if VERSION[3] != "final":
return "%s.%s.%s%s" % (VERSION[0], VERSION[1], VERSION[2], VERSION[3])
else:
return "%s.%s.%s" % (VERSION[0], VERSION[1], VERSION[2])
__version__ = get_version()
|
Remove dev from version string
|
Remove dev from version string
|
Python
|
mit
|
incuna/incuna-bookmarks,incuna/incuna-bookmarks
|
VERSION = (0, 1, 3, "dev")
def get_version():
if VERSION[3] != "final":
return "%s.%s.%s%s" % (VERSION[0], VERSION[1], VERSION[2], VERSION[3])
else:
return "%s.%s.%s" % (VERSION[0], VERSION[1], VERSION[2])
__version__ = get_version()
Remove dev from version string
|
VERSION = (0, 1, 3,)
def get_version():
if VERSION[3] != "final":
return "%s.%s.%s%s" % (VERSION[0], VERSION[1], VERSION[2], VERSION[3])
else:
return "%s.%s.%s" % (VERSION[0], VERSION[1], VERSION[2])
__version__ = get_version()
|
<commit_before>VERSION = (0, 1, 3, "dev")
def get_version():
if VERSION[3] != "final":
return "%s.%s.%s%s" % (VERSION[0], VERSION[1], VERSION[2], VERSION[3])
else:
return "%s.%s.%s" % (VERSION[0], VERSION[1], VERSION[2])
__version__ = get_version()
<commit_msg>Remove dev from version string<commit_after>
|
VERSION = (0, 1, 3,)
def get_version():
if VERSION[3] != "final":
return "%s.%s.%s%s" % (VERSION[0], VERSION[1], VERSION[2], VERSION[3])
else:
return "%s.%s.%s" % (VERSION[0], VERSION[1], VERSION[2])
__version__ = get_version()
|
VERSION = (0, 1, 3, "dev")
def get_version():
if VERSION[3] != "final":
return "%s.%s.%s%s" % (VERSION[0], VERSION[1], VERSION[2], VERSION[3])
else:
return "%s.%s.%s" % (VERSION[0], VERSION[1], VERSION[2])
__version__ = get_version()
Remove dev from version stringVERSION = (0, 1, 3,)
def get_version():
if VERSION[3] != "final":
return "%s.%s.%s%s" % (VERSION[0], VERSION[1], VERSION[2], VERSION[3])
else:
return "%s.%s.%s" % (VERSION[0], VERSION[1], VERSION[2])
__version__ = get_version()
|
<commit_before>VERSION = (0, 1, 3, "dev")
def get_version():
if VERSION[3] != "final":
return "%s.%s.%s%s" % (VERSION[0], VERSION[1], VERSION[2], VERSION[3])
else:
return "%s.%s.%s" % (VERSION[0], VERSION[1], VERSION[2])
__version__ = get_version()
<commit_msg>Remove dev from version string<commit_after>VERSION = (0, 1, 3,)
def get_version():
if VERSION[3] != "final":
return "%s.%s.%s%s" % (VERSION[0], VERSION[1], VERSION[2], VERSION[3])
else:
return "%s.%s.%s" % (VERSION[0], VERSION[1], VERSION[2])
__version__ = get_version()
|
3906e118f56b3bcd158c35426955b9f827ad0de8
|
teams/viewsets.py
|
teams/viewsets.py
|
from rest_framework.viewsets import ModelViewSet
from . import models
from rest_framework.permissions import BasePermission
class IsOwnerPermission(BasePermission):
def has_object_permission(self, request, view, obj):
return request.user == obj.owner
class TeamViewSet(ModelViewSet):
model = models.Team
permission_classes = (IsOwnerPermission, )
class PlayerViewSet(ModelViewSet):
model = models.Player
|
from rest_framework.viewsets import ModelViewSet
from . import models
from . import permisssions
class TeamViewSet(ModelViewSet):
model = models.Team
permission_classes = (permisssions.IsOwnerPermission, )
class PlayerViewSet(ModelViewSet):
model = models.Player
|
Move to a module permissions
|
Move to a module permissions
|
Python
|
mit
|
mfernandezmsistemas/phyton1,migonzalvar/teamroulette
|
from rest_framework.viewsets import ModelViewSet
from . import models
from rest_framework.permissions import BasePermission
class IsOwnerPermission(BasePermission):
def has_object_permission(self, request, view, obj):
return request.user == obj.owner
class TeamViewSet(ModelViewSet):
model = models.Team
permission_classes = (IsOwnerPermission, )
class PlayerViewSet(ModelViewSet):
model = models.Player
Move to a module permissions
|
from rest_framework.viewsets import ModelViewSet
from . import models
from . import permisssions
class TeamViewSet(ModelViewSet):
model = models.Team
permission_classes = (permisssions.IsOwnerPermission, )
class PlayerViewSet(ModelViewSet):
model = models.Player
|
<commit_before>from rest_framework.viewsets import ModelViewSet
from . import models
from rest_framework.permissions import BasePermission
class IsOwnerPermission(BasePermission):
def has_object_permission(self, request, view, obj):
return request.user == obj.owner
class TeamViewSet(ModelViewSet):
model = models.Team
permission_classes = (IsOwnerPermission, )
class PlayerViewSet(ModelViewSet):
model = models.Player
<commit_msg>Move to a module permissions<commit_after>
|
from rest_framework.viewsets import ModelViewSet
from . import models
from . import permisssions
class TeamViewSet(ModelViewSet):
model = models.Team
permission_classes = (permisssions.IsOwnerPermission, )
class PlayerViewSet(ModelViewSet):
model = models.Player
|
from rest_framework.viewsets import ModelViewSet
from . import models
from rest_framework.permissions import BasePermission
class IsOwnerPermission(BasePermission):
def has_object_permission(self, request, view, obj):
return request.user == obj.owner
class TeamViewSet(ModelViewSet):
model = models.Team
permission_classes = (IsOwnerPermission, )
class PlayerViewSet(ModelViewSet):
model = models.Player
Move to a module permissionsfrom rest_framework.viewsets import ModelViewSet
from . import models
from . import permisssions
class TeamViewSet(ModelViewSet):
model = models.Team
permission_classes = (permisssions.IsOwnerPermission, )
class PlayerViewSet(ModelViewSet):
model = models.Player
|
<commit_before>from rest_framework.viewsets import ModelViewSet
from . import models
from rest_framework.permissions import BasePermission
class IsOwnerPermission(BasePermission):
def has_object_permission(self, request, view, obj):
return request.user == obj.owner
class TeamViewSet(ModelViewSet):
model = models.Team
permission_classes = (IsOwnerPermission, )
class PlayerViewSet(ModelViewSet):
model = models.Player
<commit_msg>Move to a module permissions<commit_after>from rest_framework.viewsets import ModelViewSet
from . import models
from . import permisssions
class TeamViewSet(ModelViewSet):
model = models.Team
permission_classes = (permisssions.IsOwnerPermission, )
class PlayerViewSet(ModelViewSet):
model = models.Player
|
8fef7f60ed99e3dd92b3d5ef04a1a9ce3140ffaf
|
measurator/main.py
|
measurator/main.py
|
import argparse, csv, datetime, time
def run_main():
fails = 0
succeeds = 0
not_yet = list()
path = file_path()
# read file
with open(path) as f:
reader = csv.reader(f)
for row in reader:
status = row[0]
if status == 'F':
fails = fails + 1
elif status == 'S':
succeeds = succeeds + 1
else:
not_yet.append(row)
# evaluate measurements
now = datetime.datetime.now()
for row in list(not_yet):
evaluate_time = datetime.datetime(*(time.strptime(row[1], '%Y-%m-%d %H:%M:%S')[:6]))
if evaluate_time < now:
print "Time to evaluate:", row[2], "\n Is it true?"
user_input = raw_input()
if user_input.capitalize().startswith('Y'):
succeeds = succeeds + 1
else:
fails = fails + 1
not_yet.remove(row)
# print total statistics
total_done = fails + succeeds
if total_done > 0:
percentage = '%d%%' % (float(100 * succeeds) / float(total_done))
else:
percentage = 'N/A'
print "Succesful predictions:", percentage, ", not done yet:", len(not_yet)
def file_path():
parser = argparse.ArgumentParser()
parser.add_argument("path")
args = parser.parse_args()
return args.path
|
import argparse, csv, datetime, time
def run_main():
fails = list()
succeeds = list()
not_yet = list()
path = file_path()
# read file
with open(path) as f:
reader = csv.reader(f)
for row in reader:
status = row[0]
if status == 'F':
fails.append(row)
elif status == 'S':
succeeds.append(row)
else:
not_yet.append(row)
# evaluate measurements
now = datetime.datetime.now()
for row in list(not_yet):
evaluate_time = datetime.datetime(*(time.strptime(row[1], '%Y-%m-%d %H:%M:%S')[:6]))
if evaluate_time < now:
print "Time to evaluate:", row[2], "\n Is it true?"
user_input = raw_input()
if user_input.capitalize().startswith('Y'):
succeeds.append(row)
else:
fails .append(row)
not_yet.remove(row)
# print total statistics
total_done = len(fails) + len(succeeds)
if total_done > 0:
percentage = '%d%%' % (float(100 * len(succeeds)) / float(total_done))
else:
percentage = 'N/A'
print "Succesful predictions:", percentage, ", not done yet:", len(not_yet)
def file_path():
parser = argparse.ArgumentParser()
parser.add_argument("path")
args = parser.parse_args()
return args.path
|
Read all records from the file
|
Read all records from the file
|
Python
|
mit
|
ahitrin-attic/measurator-proto
|
import argparse, csv, datetime, time
def run_main():
fails = 0
succeeds = 0
not_yet = list()
path = file_path()
# read file
with open(path) as f:
reader = csv.reader(f)
for row in reader:
status = row[0]
if status == 'F':
fails = fails + 1
elif status == 'S':
succeeds = succeeds + 1
else:
not_yet.append(row)
# evaluate measurements
now = datetime.datetime.now()
for row in list(not_yet):
evaluate_time = datetime.datetime(*(time.strptime(row[1], '%Y-%m-%d %H:%M:%S')[:6]))
if evaluate_time < now:
print "Time to evaluate:", row[2], "\n Is it true?"
user_input = raw_input()
if user_input.capitalize().startswith('Y'):
succeeds = succeeds + 1
else:
fails = fails + 1
not_yet.remove(row)
# print total statistics
total_done = fails + succeeds
if total_done > 0:
percentage = '%d%%' % (float(100 * succeeds) / float(total_done))
else:
percentage = 'N/A'
print "Succesful predictions:", percentage, ", not done yet:", len(not_yet)
def file_path():
parser = argparse.ArgumentParser()
parser.add_argument("path")
args = parser.parse_args()
return args.path
Read all records from the file
|
import argparse, csv, datetime, time
def run_main():
fails = list()
succeeds = list()
not_yet = list()
path = file_path()
# read file
with open(path) as f:
reader = csv.reader(f)
for row in reader:
status = row[0]
if status == 'F':
fails.append(row)
elif status == 'S':
succeeds.append(row)
else:
not_yet.append(row)
# evaluate measurements
now = datetime.datetime.now()
for row in list(not_yet):
evaluate_time = datetime.datetime(*(time.strptime(row[1], '%Y-%m-%d %H:%M:%S')[:6]))
if evaluate_time < now:
print "Time to evaluate:", row[2], "\n Is it true?"
user_input = raw_input()
if user_input.capitalize().startswith('Y'):
succeeds.append(row)
else:
fails .append(row)
not_yet.remove(row)
# print total statistics
total_done = len(fails) + len(succeeds)
if total_done > 0:
percentage = '%d%%' % (float(100 * len(succeeds)) / float(total_done))
else:
percentage = 'N/A'
print "Succesful predictions:", percentage, ", not done yet:", len(not_yet)
def file_path():
parser = argparse.ArgumentParser()
parser.add_argument("path")
args = parser.parse_args()
return args.path
|
<commit_before>import argparse, csv, datetime, time
def run_main():
fails = 0
succeeds = 0
not_yet = list()
path = file_path()
# read file
with open(path) as f:
reader = csv.reader(f)
for row in reader:
status = row[0]
if status == 'F':
fails = fails + 1
elif status == 'S':
succeeds = succeeds + 1
else:
not_yet.append(row)
# evaluate measurements
now = datetime.datetime.now()
for row in list(not_yet):
evaluate_time = datetime.datetime(*(time.strptime(row[1], '%Y-%m-%d %H:%M:%S')[:6]))
if evaluate_time < now:
print "Time to evaluate:", row[2], "\n Is it true?"
user_input = raw_input()
if user_input.capitalize().startswith('Y'):
succeeds = succeeds + 1
else:
fails = fails + 1
not_yet.remove(row)
# print total statistics
total_done = fails + succeeds
if total_done > 0:
percentage = '%d%%' % (float(100 * succeeds) / float(total_done))
else:
percentage = 'N/A'
print "Succesful predictions:", percentage, ", not done yet:", len(not_yet)
def file_path():
parser = argparse.ArgumentParser()
parser.add_argument("path")
args = parser.parse_args()
return args.path
<commit_msg>Read all records from the file<commit_after>
|
import argparse, csv, datetime, time
def run_main():
fails = list()
succeeds = list()
not_yet = list()
path = file_path()
# read file
with open(path) as f:
reader = csv.reader(f)
for row in reader:
status = row[0]
if status == 'F':
fails.append(row)
elif status == 'S':
succeeds.append(row)
else:
not_yet.append(row)
# evaluate measurements
now = datetime.datetime.now()
for row in list(not_yet):
evaluate_time = datetime.datetime(*(time.strptime(row[1], '%Y-%m-%d %H:%M:%S')[:6]))
if evaluate_time < now:
print "Time to evaluate:", row[2], "\n Is it true?"
user_input = raw_input()
if user_input.capitalize().startswith('Y'):
succeeds.append(row)
else:
fails .append(row)
not_yet.remove(row)
# print total statistics
total_done = len(fails) + len(succeeds)
if total_done > 0:
percentage = '%d%%' % (float(100 * len(succeeds)) / float(total_done))
else:
percentage = 'N/A'
print "Succesful predictions:", percentage, ", not done yet:", len(not_yet)
def file_path():
parser = argparse.ArgumentParser()
parser.add_argument("path")
args = parser.parse_args()
return args.path
|
import argparse, csv, datetime, time
def run_main():
fails = 0
succeeds = 0
not_yet = list()
path = file_path()
# read file
with open(path) as f:
reader = csv.reader(f)
for row in reader:
status = row[0]
if status == 'F':
fails = fails + 1
elif status == 'S':
succeeds = succeeds + 1
else:
not_yet.append(row)
# evaluate measurements
now = datetime.datetime.now()
for row in list(not_yet):
evaluate_time = datetime.datetime(*(time.strptime(row[1], '%Y-%m-%d %H:%M:%S')[:6]))
if evaluate_time < now:
print "Time to evaluate:", row[2], "\n Is it true?"
user_input = raw_input()
if user_input.capitalize().startswith('Y'):
succeeds = succeeds + 1
else:
fails = fails + 1
not_yet.remove(row)
# print total statistics
total_done = fails + succeeds
if total_done > 0:
percentage = '%d%%' % (float(100 * succeeds) / float(total_done))
else:
percentage = 'N/A'
print "Succesful predictions:", percentage, ", not done yet:", len(not_yet)
def file_path():
parser = argparse.ArgumentParser()
parser.add_argument("path")
args = parser.parse_args()
return args.path
Read all records from the fileimport argparse, csv, datetime, time
def run_main():
fails = list()
succeeds = list()
not_yet = list()
path = file_path()
# read file
with open(path) as f:
reader = csv.reader(f)
for row in reader:
status = row[0]
if status == 'F':
fails.append(row)
elif status == 'S':
succeeds.append(row)
else:
not_yet.append(row)
# evaluate measurements
now = datetime.datetime.now()
for row in list(not_yet):
evaluate_time = datetime.datetime(*(time.strptime(row[1], '%Y-%m-%d %H:%M:%S')[:6]))
if evaluate_time < now:
print "Time to evaluate:", row[2], "\n Is it true?"
user_input = raw_input()
if user_input.capitalize().startswith('Y'):
succeeds.append(row)
else:
fails .append(row)
not_yet.remove(row)
# print total statistics
total_done = len(fails) + len(succeeds)
if total_done > 0:
percentage = '%d%%' % (float(100 * len(succeeds)) / float(total_done))
else:
percentage = 'N/A'
print "Succesful predictions:", percentage, ", not done yet:", len(not_yet)
def file_path():
parser = argparse.ArgumentParser()
parser.add_argument("path")
args = parser.parse_args()
return args.path
|
<commit_before>import argparse, csv, datetime, time
def run_main():
fails = 0
succeeds = 0
not_yet = list()
path = file_path()
# read file
with open(path) as f:
reader = csv.reader(f)
for row in reader:
status = row[0]
if status == 'F':
fails = fails + 1
elif status == 'S':
succeeds = succeeds + 1
else:
not_yet.append(row)
# evaluate measurements
now = datetime.datetime.now()
for row in list(not_yet):
evaluate_time = datetime.datetime(*(time.strptime(row[1], '%Y-%m-%d %H:%M:%S')[:6]))
if evaluate_time < now:
print "Time to evaluate:", row[2], "\n Is it true?"
user_input = raw_input()
if user_input.capitalize().startswith('Y'):
succeeds = succeeds + 1
else:
fails = fails + 1
not_yet.remove(row)
# print total statistics
total_done = fails + succeeds
if total_done > 0:
percentage = '%d%%' % (float(100 * succeeds) / float(total_done))
else:
percentage = 'N/A'
print "Succesful predictions:", percentage, ", not done yet:", len(not_yet)
def file_path():
parser = argparse.ArgumentParser()
parser.add_argument("path")
args = parser.parse_args()
return args.path
<commit_msg>Read all records from the file<commit_after>import argparse, csv, datetime, time
def run_main():
fails = list()
succeeds = list()
not_yet = list()
path = file_path()
# read file
with open(path) as f:
reader = csv.reader(f)
for row in reader:
status = row[0]
if status == 'F':
fails.append(row)
elif status == 'S':
succeeds.append(row)
else:
not_yet.append(row)
# evaluate measurements
now = datetime.datetime.now()
for row in list(not_yet):
evaluate_time = datetime.datetime(*(time.strptime(row[1], '%Y-%m-%d %H:%M:%S')[:6]))
if evaluate_time < now:
print "Time to evaluate:", row[2], "\n Is it true?"
user_input = raw_input()
if user_input.capitalize().startswith('Y'):
succeeds.append(row)
else:
fails .append(row)
not_yet.remove(row)
# print total statistics
total_done = len(fails) + len(succeeds)
if total_done > 0:
percentage = '%d%%' % (float(100 * len(succeeds)) / float(total_done))
else:
percentage = 'N/A'
print "Succesful predictions:", percentage, ", not done yet:", len(not_yet)
def file_path():
parser = argparse.ArgumentParser()
parser.add_argument("path")
args = parser.parse_args()
return args.path
|
0106ca8c4204aaa818ad14878452b29e7dd62f8c
|
tests/__init__.py
|
tests/__init__.py
|
# -*- coding: utf-8 -*-
import os
from hycc.util import hycc_main
def clean():
for path in os.listdir("tests/resources"):
if path not in ["hello.hy", "__init__.py"]:
path = os.path.join("tests/resources", path)
if os.path.isdir(path):
os.rmdir(path)
else:
os.remove(path)
def test_build_executable():
hycc_main("tests/resources/hello.hy".split())
assert os.path.exists("tests/resources/hello")
clean()
def test_shared_library():
hycc_main("tests/resources/hello.hy --shared".split())
from tests.resources.hello import hello
assert hello() == "hello"
clean()
|
# -*- coding: utf-8 -*-
import os
import shutil
from hycc.util import hycc_main
def clean():
for path in os.listdir("tests/resources"):
if path not in ["hello.hy", "__init__.py"]:
path = os.path.join("tests/resources", path)
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
def test_build_executable():
hycc_main("tests/resources/hello.hy".split())
assert os.path.exists("tests/resources/hello")
clean()
def test_shared_library():
hycc_main("tests/resources/hello.hy --shared".split())
from tests.resources.hello import hello
assert hello() == "hello"
clean()
|
Change to use 'shutil.rmtree' instead of 'os.rmdir'
|
Change to use 'shutil.rmtree' instead of 'os.rmdir'
|
Python
|
mit
|
koji-kojiro/hylang-hycc
|
# -*- coding: utf-8 -*-
import os
from hycc.util import hycc_main
def clean():
for path in os.listdir("tests/resources"):
if path not in ["hello.hy", "__init__.py"]:
path = os.path.join("tests/resources", path)
if os.path.isdir(path):
os.rmdir(path)
else:
os.remove(path)
def test_build_executable():
hycc_main("tests/resources/hello.hy".split())
assert os.path.exists("tests/resources/hello")
clean()
def test_shared_library():
hycc_main("tests/resources/hello.hy --shared".split())
from tests.resources.hello import hello
assert hello() == "hello"
clean()
Change to use 'shutil.rmtree' instead of 'os.rmdir'
|
# -*- coding: utf-8 -*-
import os
import shutil
from hycc.util import hycc_main
def clean():
for path in os.listdir("tests/resources"):
if path not in ["hello.hy", "__init__.py"]:
path = os.path.join("tests/resources", path)
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
def test_build_executable():
hycc_main("tests/resources/hello.hy".split())
assert os.path.exists("tests/resources/hello")
clean()
def test_shared_library():
hycc_main("tests/resources/hello.hy --shared".split())
from tests.resources.hello import hello
assert hello() == "hello"
clean()
|
<commit_before># -*- coding: utf-8 -*-
import os
from hycc.util import hycc_main
def clean():
for path in os.listdir("tests/resources"):
if path not in ["hello.hy", "__init__.py"]:
path = os.path.join("tests/resources", path)
if os.path.isdir(path):
os.rmdir(path)
else:
os.remove(path)
def test_build_executable():
hycc_main("tests/resources/hello.hy".split())
assert os.path.exists("tests/resources/hello")
clean()
def test_shared_library():
hycc_main("tests/resources/hello.hy --shared".split())
from tests.resources.hello import hello
assert hello() == "hello"
clean()
<commit_msg>Change to use 'shutil.rmtree' instead of 'os.rmdir'<commit_after>
|
# -*- coding: utf-8 -*-
import os
import shutil
from hycc.util import hycc_main
def clean():
for path in os.listdir("tests/resources"):
if path not in ["hello.hy", "__init__.py"]:
path = os.path.join("tests/resources", path)
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
def test_build_executable():
hycc_main("tests/resources/hello.hy".split())
assert os.path.exists("tests/resources/hello")
clean()
def test_shared_library():
hycc_main("tests/resources/hello.hy --shared".split())
from tests.resources.hello import hello
assert hello() == "hello"
clean()
|
# -*- coding: utf-8 -*-
import os
from hycc.util import hycc_main
def clean():
for path in os.listdir("tests/resources"):
if path not in ["hello.hy", "__init__.py"]:
path = os.path.join("tests/resources", path)
if os.path.isdir(path):
os.rmdir(path)
else:
os.remove(path)
def test_build_executable():
hycc_main("tests/resources/hello.hy".split())
assert os.path.exists("tests/resources/hello")
clean()
def test_shared_library():
hycc_main("tests/resources/hello.hy --shared".split())
from tests.resources.hello import hello
assert hello() == "hello"
clean()
Change to use 'shutil.rmtree' instead of 'os.rmdir'# -*- coding: utf-8 -*-
import os
import shutil
from hycc.util import hycc_main
def clean():
for path in os.listdir("tests/resources"):
if path not in ["hello.hy", "__init__.py"]:
path = os.path.join("tests/resources", path)
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
def test_build_executable():
hycc_main("tests/resources/hello.hy".split())
assert os.path.exists("tests/resources/hello")
clean()
def test_shared_library():
hycc_main("tests/resources/hello.hy --shared".split())
from tests.resources.hello import hello
assert hello() == "hello"
clean()
|
<commit_before># -*- coding: utf-8 -*-
import os
from hycc.util import hycc_main
def clean():
for path in os.listdir("tests/resources"):
if path not in ["hello.hy", "__init__.py"]:
path = os.path.join("tests/resources", path)
if os.path.isdir(path):
os.rmdir(path)
else:
os.remove(path)
def test_build_executable():
hycc_main("tests/resources/hello.hy".split())
assert os.path.exists("tests/resources/hello")
clean()
def test_shared_library():
hycc_main("tests/resources/hello.hy --shared".split())
from tests.resources.hello import hello
assert hello() == "hello"
clean()
<commit_msg>Change to use 'shutil.rmtree' instead of 'os.rmdir'<commit_after># -*- coding: utf-8 -*-
import os
import shutil
from hycc.util import hycc_main
def clean():
for path in os.listdir("tests/resources"):
if path not in ["hello.hy", "__init__.py"]:
path = os.path.join("tests/resources", path)
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
def test_build_executable():
hycc_main("tests/resources/hello.hy".split())
assert os.path.exists("tests/resources/hello")
clean()
def test_shared_library():
hycc_main("tests/resources/hello.hy --shared".split())
from tests.resources.hello import hello
assert hello() == "hello"
clean()
|
ad53de29a29d495fb8958c4e5d1a2cfe206de03e
|
tests/settings.py
|
tests/settings.py
|
import warnings
warnings.simplefilter('always')
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
USE_I18N = True
USE_L10N = True
INSTALLED_APPS = [
'django_superform',
'tests',
]
STATIC_URL = '/static/'
SECRET_KEY = '0'
import django
if django.VERSION < (1, 6):
TEST_RUNNER = 'discover_runner.DiscoverRunner'
|
import warnings
warnings.simplefilter('always')
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
USE_I18N = True
USE_L10N = True
INSTALLED_APPS = [
'django_superform',
'tests',
]
MIDDLEWARE_CLASSES = ()
STATIC_URL = '/static/'
SECRET_KEY = '0'
import django
if django.VERSION < (1, 6):
TEST_RUNNER = 'discover_runner.DiscoverRunner'
|
Remove warning about not-set MIDDLEWARE_CLASSES setting.
|
Remove warning about not-set MIDDLEWARE_CLASSES setting.
|
Python
|
bsd-3-clause
|
gregmuellegger/django-superform,gregmuellegger/django-superform
|
import warnings
warnings.simplefilter('always')
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
USE_I18N = True
USE_L10N = True
INSTALLED_APPS = [
'django_superform',
'tests',
]
STATIC_URL = '/static/'
SECRET_KEY = '0'
import django
if django.VERSION < (1, 6):
TEST_RUNNER = 'discover_runner.DiscoverRunner'
Remove warning about not-set MIDDLEWARE_CLASSES setting.
|
import warnings
warnings.simplefilter('always')
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
USE_I18N = True
USE_L10N = True
INSTALLED_APPS = [
'django_superform',
'tests',
]
MIDDLEWARE_CLASSES = ()
STATIC_URL = '/static/'
SECRET_KEY = '0'
import django
if django.VERSION < (1, 6):
TEST_RUNNER = 'discover_runner.DiscoverRunner'
|
<commit_before>import warnings
warnings.simplefilter('always')
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
USE_I18N = True
USE_L10N = True
INSTALLED_APPS = [
'django_superform',
'tests',
]
STATIC_URL = '/static/'
SECRET_KEY = '0'
import django
if django.VERSION < (1, 6):
TEST_RUNNER = 'discover_runner.DiscoverRunner'
<commit_msg>Remove warning about not-set MIDDLEWARE_CLASSES setting.<commit_after>
|
import warnings
warnings.simplefilter('always')
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
USE_I18N = True
USE_L10N = True
INSTALLED_APPS = [
'django_superform',
'tests',
]
MIDDLEWARE_CLASSES = ()
STATIC_URL = '/static/'
SECRET_KEY = '0'
import django
if django.VERSION < (1, 6):
TEST_RUNNER = 'discover_runner.DiscoverRunner'
|
import warnings
warnings.simplefilter('always')
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
USE_I18N = True
USE_L10N = True
INSTALLED_APPS = [
'django_superform',
'tests',
]
STATIC_URL = '/static/'
SECRET_KEY = '0'
import django
if django.VERSION < (1, 6):
TEST_RUNNER = 'discover_runner.DiscoverRunner'
Remove warning about not-set MIDDLEWARE_CLASSES setting.import warnings
warnings.simplefilter('always')
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
USE_I18N = True
USE_L10N = True
INSTALLED_APPS = [
'django_superform',
'tests',
]
MIDDLEWARE_CLASSES = ()
STATIC_URL = '/static/'
SECRET_KEY = '0'
import django
if django.VERSION < (1, 6):
TEST_RUNNER = 'discover_runner.DiscoverRunner'
|
<commit_before>import warnings
warnings.simplefilter('always')
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
USE_I18N = True
USE_L10N = True
INSTALLED_APPS = [
'django_superform',
'tests',
]
STATIC_URL = '/static/'
SECRET_KEY = '0'
import django
if django.VERSION < (1, 6):
TEST_RUNNER = 'discover_runner.DiscoverRunner'
<commit_msg>Remove warning about not-set MIDDLEWARE_CLASSES setting.<commit_after>import warnings
warnings.simplefilter('always')
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
USE_I18N = True
USE_L10N = True
INSTALLED_APPS = [
'django_superform',
'tests',
]
MIDDLEWARE_CLASSES = ()
STATIC_URL = '/static/'
SECRET_KEY = '0'
import django
if django.VERSION < (1, 6):
TEST_RUNNER = 'discover_runner.DiscoverRunner'
|
ab434ccb502a5bc0dc0581fb64a0d00ec5f08f0f
|
tests/urls.py
|
tests/urls.py
|
from django.conf.urls import include, url
urlpatterns = [
url('', include('rest_friendship.urls', namespace='rest_friendship')),
]
|
from django.urls import path, include
urlpatterns = [
path('', include(('rest_friendship.urls', 'rest_friendship'), namespace='rest_friendship')),
]
|
Fix deprecated url import, (to path)
|
Fix deprecated url import, (to path)
|
Python
|
isc
|
dnmellen/django-rest-friendship
|
from django.conf.urls import include, url
urlpatterns = [
url('', include('rest_friendship.urls', namespace='rest_friendship')),
]
Fix deprecated url import, (to path)
|
from django.urls import path, include
urlpatterns = [
path('', include(('rest_friendship.urls', 'rest_friendship'), namespace='rest_friendship')),
]
|
<commit_before>from django.conf.urls import include, url
urlpatterns = [
url('', include('rest_friendship.urls', namespace='rest_friendship')),
]
<commit_msg>Fix deprecated url import, (to path)<commit_after>
|
from django.urls import path, include
urlpatterns = [
path('', include(('rest_friendship.urls', 'rest_friendship'), namespace='rest_friendship')),
]
|
from django.conf.urls import include, url
urlpatterns = [
url('', include('rest_friendship.urls', namespace='rest_friendship')),
]
Fix deprecated url import, (to path)from django.urls import path, include
urlpatterns = [
path('', include(('rest_friendship.urls', 'rest_friendship'), namespace='rest_friendship')),
]
|
<commit_before>from django.conf.urls import include, url
urlpatterns = [
url('', include('rest_friendship.urls', namespace='rest_friendship')),
]
<commit_msg>Fix deprecated url import, (to path)<commit_after>from django.urls import path, include
urlpatterns = [
path('', include(('rest_friendship.urls', 'rest_friendship'), namespace='rest_friendship')),
]
|
dab4ca109b852154bc34c468ffe598b22abf7040
|
marathon_acme/cli.py
|
marathon_acme/cli.py
|
import click
import sys
@click.command()
@click.option('--acme',
help='The address for the ACME Directory Resource',
default='https://acme-v01.api.letsencrypt.org/directory',
show_default=True)
@click.option('--email',
help=("Email address for Let's Encrypt certificate registration "
"and recovery contact"),
required=True)
@click.option('--storage-dir',
help='Path to directory for storing certificates')
@click.option('--marathon', default='http://marathon.mesos:8080',
help='The address for the Marathon HTTP API',
show_default=True)
@click.option('--listen',
help=("The address of the interface and port to bind to to "
"receive Marathon's event stream"),
default='0.0.0.0:7000',
show_default=True)
@click.option('--advertise', default='http://marathon-acme.marathon.mesos',
help=('The address to advertise to Marathon when registering '
'for the event stream'),
show_default=True)
@click.option('--poll',
help=("Periodically check Marathon's state every _n_ seconds "
"[default: disabled]"),
type=int)
@click.option('--logfile',
help='Where to log output to [default: stdout]',
type=click.File('a'),
default=sys.stdout)
@click.option('--debug',
help='Log debug output',
is_flag=True)
def main(acme, email, storage_dir, # ACME
marathon, listen, advertise, poll, # Marathon
logfile, debug): # Logging
"""
A tool to automatically request, renew and distribute Let's Encrypt
certificates for apps running on Seed Stack.
"""
|
import click
import sys
@click.command()
@click.option('--acme',
help='The address for the ACME Directory Resource',
default='https://acme-v01.api.letsencrypt.org/directory',
show_default=True)
@click.option('--email',
help=("Email address for Let's Encrypt certificate registration "
"and recovery contact"),
required=True)
@click.option('--storage-dir',
help='Path to directory for storing certificates')
@click.option('--marathon', default='http://marathon.mesos:8080',
help='The address for the Marathon HTTP API',
show_default=True)
@click.option('--poll',
help=("Periodically check Marathon's state every _n_ seconds "
"[default: disabled]"),
type=int)
@click.option('--logfile',
help='Where to log output to [default: stdout]',
type=click.File('a'),
default=sys.stdout)
@click.option('--debug',
help='Log debug output',
is_flag=True)
def main(acme, email, storage_dir, # ACME/certificates
marathon, poll, # Marathon
logfile, debug): # Logging
"""
A tool to automatically request, renew and distribute Let's Encrypt
certificates for apps running on Seed Stack.
"""
|
Remove imaginary event server CLI options
|
Remove imaginary event server CLI options
|
Python
|
mit
|
praekeltfoundation/certbot,praekeltfoundation/certbot
|
import click
import sys
@click.command()
@click.option('--acme',
help='The address for the ACME Directory Resource',
default='https://acme-v01.api.letsencrypt.org/directory',
show_default=True)
@click.option('--email',
help=("Email address for Let's Encrypt certificate registration "
"and recovery contact"),
required=True)
@click.option('--storage-dir',
help='Path to directory for storing certificates')
@click.option('--marathon', default='http://marathon.mesos:8080',
help='The address for the Marathon HTTP API',
show_default=True)
@click.option('--listen',
help=("The address of the interface and port to bind to to "
"receive Marathon's event stream"),
default='0.0.0.0:7000',
show_default=True)
@click.option('--advertise', default='http://marathon-acme.marathon.mesos',
help=('The address to advertise to Marathon when registering '
'for the event stream'),
show_default=True)
@click.option('--poll',
help=("Periodically check Marathon's state every _n_ seconds "
"[default: disabled]"),
type=int)
@click.option('--logfile',
help='Where to log output to [default: stdout]',
type=click.File('a'),
default=sys.stdout)
@click.option('--debug',
help='Log debug output',
is_flag=True)
def main(acme, email, storage_dir, # ACME
marathon, listen, advertise, poll, # Marathon
logfile, debug): # Logging
"""
A tool to automatically request, renew and distribute Let's Encrypt
certificates for apps running on Seed Stack.
"""
Remove imaginary event server CLI options
|
import click
import sys
@click.command()
@click.option('--acme',
help='The address for the ACME Directory Resource',
default='https://acme-v01.api.letsencrypt.org/directory',
show_default=True)
@click.option('--email',
help=("Email address for Let's Encrypt certificate registration "
"and recovery contact"),
required=True)
@click.option('--storage-dir',
help='Path to directory for storing certificates')
@click.option('--marathon', default='http://marathon.mesos:8080',
help='The address for the Marathon HTTP API',
show_default=True)
@click.option('--poll',
help=("Periodically check Marathon's state every _n_ seconds "
"[default: disabled]"),
type=int)
@click.option('--logfile',
help='Where to log output to [default: stdout]',
type=click.File('a'),
default=sys.stdout)
@click.option('--debug',
help='Log debug output',
is_flag=True)
def main(acme, email, storage_dir, # ACME/certificates
marathon, poll, # Marathon
logfile, debug): # Logging
"""
A tool to automatically request, renew and distribute Let's Encrypt
certificates for apps running on Seed Stack.
"""
|
<commit_before>import click
import sys
@click.command()
@click.option('--acme',
help='The address for the ACME Directory Resource',
default='https://acme-v01.api.letsencrypt.org/directory',
show_default=True)
@click.option('--email',
help=("Email address for Let's Encrypt certificate registration "
"and recovery contact"),
required=True)
@click.option('--storage-dir',
help='Path to directory for storing certificates')
@click.option('--marathon', default='http://marathon.mesos:8080',
help='The address for the Marathon HTTP API',
show_default=True)
@click.option('--listen',
help=("The address of the interface and port to bind to to "
"receive Marathon's event stream"),
default='0.0.0.0:7000',
show_default=True)
@click.option('--advertise', default='http://marathon-acme.marathon.mesos',
help=('The address to advertise to Marathon when registering '
'for the event stream'),
show_default=True)
@click.option('--poll',
help=("Periodically check Marathon's state every _n_ seconds "
"[default: disabled]"),
type=int)
@click.option('--logfile',
help='Where to log output to [default: stdout]',
type=click.File('a'),
default=sys.stdout)
@click.option('--debug',
help='Log debug output',
is_flag=True)
def main(acme, email, storage_dir, # ACME
marathon, listen, advertise, poll, # Marathon
logfile, debug): # Logging
"""
A tool to automatically request, renew and distribute Let's Encrypt
certificates for apps running on Seed Stack.
"""
<commit_msg>Remove imaginary event server CLI options<commit_after>
|
import click
import sys
@click.command()
@click.option('--acme',
help='The address for the ACME Directory Resource',
default='https://acme-v01.api.letsencrypt.org/directory',
show_default=True)
@click.option('--email',
help=("Email address for Let's Encrypt certificate registration "
"and recovery contact"),
required=True)
@click.option('--storage-dir',
help='Path to directory for storing certificates')
@click.option('--marathon', default='http://marathon.mesos:8080',
help='The address for the Marathon HTTP API',
show_default=True)
@click.option('--poll',
help=("Periodically check Marathon's state every _n_ seconds "
"[default: disabled]"),
type=int)
@click.option('--logfile',
help='Where to log output to [default: stdout]',
type=click.File('a'),
default=sys.stdout)
@click.option('--debug',
help='Log debug output',
is_flag=True)
def main(acme, email, storage_dir, # ACME/certificates
marathon, poll, # Marathon
logfile, debug): # Logging
"""
A tool to automatically request, renew and distribute Let's Encrypt
certificates for apps running on Seed Stack.
"""
|
import click
import sys
@click.command()
@click.option('--acme',
help='The address for the ACME Directory Resource',
default='https://acme-v01.api.letsencrypt.org/directory',
show_default=True)
@click.option('--email',
help=("Email address for Let's Encrypt certificate registration "
"and recovery contact"),
required=True)
@click.option('--storage-dir',
help='Path to directory for storing certificates')
@click.option('--marathon', default='http://marathon.mesos:8080',
help='The address for the Marathon HTTP API',
show_default=True)
@click.option('--listen',
help=("The address of the interface and port to bind to to "
"receive Marathon's event stream"),
default='0.0.0.0:7000',
show_default=True)
@click.option('--advertise', default='http://marathon-acme.marathon.mesos',
help=('The address to advertise to Marathon when registering '
'for the event stream'),
show_default=True)
@click.option('--poll',
help=("Periodically check Marathon's state every _n_ seconds "
"[default: disabled]"),
type=int)
@click.option('--logfile',
help='Where to log output to [default: stdout]',
type=click.File('a'),
default=sys.stdout)
@click.option('--debug',
help='Log debug output',
is_flag=True)
def main(acme, email, storage_dir, # ACME
marathon, listen, advertise, poll, # Marathon
logfile, debug): # Logging
"""
A tool to automatically request, renew and distribute Let's Encrypt
certificates for apps running on Seed Stack.
"""
Remove imaginary event server CLI optionsimport click
import sys
@click.command()
@click.option('--acme',
help='The address for the ACME Directory Resource',
default='https://acme-v01.api.letsencrypt.org/directory',
show_default=True)
@click.option('--email',
help=("Email address for Let's Encrypt certificate registration "
"and recovery contact"),
required=True)
@click.option('--storage-dir',
help='Path to directory for storing certificates')
@click.option('--marathon', default='http://marathon.mesos:8080',
help='The address for the Marathon HTTP API',
show_default=True)
@click.option('--poll',
help=("Periodically check Marathon's state every _n_ seconds "
"[default: disabled]"),
type=int)
@click.option('--logfile',
help='Where to log output to [default: stdout]',
type=click.File('a'),
default=sys.stdout)
@click.option('--debug',
help='Log debug output',
is_flag=True)
def main(acme, email, storage_dir, # ACME/certificates
marathon, poll, # Marathon
logfile, debug): # Logging
"""
A tool to automatically request, renew and distribute Let's Encrypt
certificates for apps running on Seed Stack.
"""
|
<commit_before>import click
import sys
@click.command()
@click.option('--acme',
help='The address for the ACME Directory Resource',
default='https://acme-v01.api.letsencrypt.org/directory',
show_default=True)
@click.option('--email',
help=("Email address for Let's Encrypt certificate registration "
"and recovery contact"),
required=True)
@click.option('--storage-dir',
help='Path to directory for storing certificates')
@click.option('--marathon', default='http://marathon.mesos:8080',
help='The address for the Marathon HTTP API',
show_default=True)
@click.option('--listen',
help=("The address of the interface and port to bind to to "
"receive Marathon's event stream"),
default='0.0.0.0:7000',
show_default=True)
@click.option('--advertise', default='http://marathon-acme.marathon.mesos',
help=('The address to advertise to Marathon when registering '
'for the event stream'),
show_default=True)
@click.option('--poll',
help=("Periodically check Marathon's state every _n_ seconds "
"[default: disabled]"),
type=int)
@click.option('--logfile',
help='Where to log output to [default: stdout]',
type=click.File('a'),
default=sys.stdout)
@click.option('--debug',
help='Log debug output',
is_flag=True)
def main(acme, email, storage_dir, # ACME
marathon, listen, advertise, poll, # Marathon
logfile, debug): # Logging
"""
A tool to automatically request, renew and distribute Let's Encrypt
certificates for apps running on Seed Stack.
"""
<commit_msg>Remove imaginary event server CLI options<commit_after>import click
import sys
@click.command()
@click.option('--acme',
help='The address for the ACME Directory Resource',
default='https://acme-v01.api.letsencrypt.org/directory',
show_default=True)
@click.option('--email',
help=("Email address for Let's Encrypt certificate registration "
"and recovery contact"),
required=True)
@click.option('--storage-dir',
help='Path to directory for storing certificates')
@click.option('--marathon', default='http://marathon.mesos:8080',
help='The address for the Marathon HTTP API',
show_default=True)
@click.option('--poll',
help=("Periodically check Marathon's state every _n_ seconds "
"[default: disabled]"),
type=int)
@click.option('--logfile',
help='Where to log output to [default: stdout]',
type=click.File('a'),
default=sys.stdout)
@click.option('--debug',
help='Log debug output',
is_flag=True)
def main(acme, email, storage_dir, # ACME/certificates
marathon, poll, # Marathon
logfile, debug): # Logging
"""
A tool to automatically request, renew and distribute Let's Encrypt
certificates for apps running on Seed Stack.
"""
|
86066890322e3c3654946a49c8d1cd2e1a1c2980
|
celery/tests/test_backends/__init__.py
|
celery/tests/test_backends/__init__.py
|
import unittest2 as unittest
from celery import backends
from celery.backends.amqp import AMQPBackend
from celery.backends.pyredis import RedisBackend
class TestBackends(unittest.TestCase):
def test_get_backend_aliases(self):
expects = [("amqp", AMQPBackend),
("redis", RedisBackend)]
for expect_name, expect_cls in expects:
self.assertIsInstance(backends.get_backend_cls(expect_name)(),
expect_cls)
def test_get_backend_cahe(self):
backends._backend_cache = {}
backends.get_backend_cls("amqp")
self.assertIn("amqp", backends._backend_cache)
amqp_backend = backends.get_backend_cls("amqp")
self.assertIs(amqp_backend, backends._backend_cache["amqp"])
|
import unittest2 as unittest
from celery import backends
from celery.backends.amqp import AMQPBackend
from celery.backends.database import DatabaseBackend
class TestBackends(unittest.TestCase):
def test_get_backend_aliases(self):
expects = [("amqp", AMQPBackend),
("database", DatabaseBackend)]
for expect_name, expect_cls in expects:
self.assertIsInstance(backends.get_backend_cls(expect_name)(),
expect_cls)
def test_get_backend_cahe(self):
backends._backend_cache = {}
backends.get_backend_cls("amqp")
self.assertIn("amqp", backends._backend_cache)
amqp_backend = backends.get_backend_cls("amqp")
self.assertIs(amqp_backend, backends._backend_cache["amqp"])
|
Test using DatabaseBackend instead of RedisBackend, as the latter requires the redis module to be installed.
|
tests.backends: Test using DatabaseBackend instead of RedisBackend, as the latter requires the redis module to be installed.
|
Python
|
bsd-3-clause
|
WoLpH/celery,ask/celery,mitsuhiko/celery,mitsuhiko/celery,WoLpH/celery,frac/celery,cbrepo/celery,cbrepo/celery,frac/celery,ask/celery
|
import unittest2 as unittest
from celery import backends
from celery.backends.amqp import AMQPBackend
from celery.backends.pyredis import RedisBackend
class TestBackends(unittest.TestCase):
def test_get_backend_aliases(self):
expects = [("amqp", AMQPBackend),
("redis", RedisBackend)]
for expect_name, expect_cls in expects:
self.assertIsInstance(backends.get_backend_cls(expect_name)(),
expect_cls)
def test_get_backend_cahe(self):
backends._backend_cache = {}
backends.get_backend_cls("amqp")
self.assertIn("amqp", backends._backend_cache)
amqp_backend = backends.get_backend_cls("amqp")
self.assertIs(amqp_backend, backends._backend_cache["amqp"])
tests.backends: Test using DatabaseBackend instead of RedisBackend, as the latter requires the redis module to be installed.
|
import unittest2 as unittest
from celery import backends
from celery.backends.amqp import AMQPBackend
from celery.backends.database import DatabaseBackend
class TestBackends(unittest.TestCase):
def test_get_backend_aliases(self):
expects = [("amqp", AMQPBackend),
("database", DatabaseBackend)]
for expect_name, expect_cls in expects:
self.assertIsInstance(backends.get_backend_cls(expect_name)(),
expect_cls)
def test_get_backend_cahe(self):
backends._backend_cache = {}
backends.get_backend_cls("amqp")
self.assertIn("amqp", backends._backend_cache)
amqp_backend = backends.get_backend_cls("amqp")
self.assertIs(amqp_backend, backends._backend_cache["amqp"])
|
<commit_before>import unittest2 as unittest
from celery import backends
from celery.backends.amqp import AMQPBackend
from celery.backends.pyredis import RedisBackend
class TestBackends(unittest.TestCase):
def test_get_backend_aliases(self):
expects = [("amqp", AMQPBackend),
("redis", RedisBackend)]
for expect_name, expect_cls in expects:
self.assertIsInstance(backends.get_backend_cls(expect_name)(),
expect_cls)
def test_get_backend_cahe(self):
backends._backend_cache = {}
backends.get_backend_cls("amqp")
self.assertIn("amqp", backends._backend_cache)
amqp_backend = backends.get_backend_cls("amqp")
self.assertIs(amqp_backend, backends._backend_cache["amqp"])
<commit_msg>tests.backends: Test using DatabaseBackend instead of RedisBackend, as the latter requires the redis module to be installed.<commit_after>
|
import unittest2 as unittest
from celery import backends
from celery.backends.amqp import AMQPBackend
from celery.backends.database import DatabaseBackend
class TestBackends(unittest.TestCase):
def test_get_backend_aliases(self):
expects = [("amqp", AMQPBackend),
("database", DatabaseBackend)]
for expect_name, expect_cls in expects:
self.assertIsInstance(backends.get_backend_cls(expect_name)(),
expect_cls)
def test_get_backend_cahe(self):
backends._backend_cache = {}
backends.get_backend_cls("amqp")
self.assertIn("amqp", backends._backend_cache)
amqp_backend = backends.get_backend_cls("amqp")
self.assertIs(amqp_backend, backends._backend_cache["amqp"])
|
import unittest2 as unittest
from celery import backends
from celery.backends.amqp import AMQPBackend
from celery.backends.pyredis import RedisBackend
class TestBackends(unittest.TestCase):
def test_get_backend_aliases(self):
expects = [("amqp", AMQPBackend),
("redis", RedisBackend)]
for expect_name, expect_cls in expects:
self.assertIsInstance(backends.get_backend_cls(expect_name)(),
expect_cls)
def test_get_backend_cahe(self):
backends._backend_cache = {}
backends.get_backend_cls("amqp")
self.assertIn("amqp", backends._backend_cache)
amqp_backend = backends.get_backend_cls("amqp")
self.assertIs(amqp_backend, backends._backend_cache["amqp"])
tests.backends: Test using DatabaseBackend instead of RedisBackend, as the latter requires the redis module to be installed.import unittest2 as unittest
from celery import backends
from celery.backends.amqp import AMQPBackend
from celery.backends.database import DatabaseBackend
class TestBackends(unittest.TestCase):
def test_get_backend_aliases(self):
expects = [("amqp", AMQPBackend),
("database", DatabaseBackend)]
for expect_name, expect_cls in expects:
self.assertIsInstance(backends.get_backend_cls(expect_name)(),
expect_cls)
def test_get_backend_cahe(self):
backends._backend_cache = {}
backends.get_backend_cls("amqp")
self.assertIn("amqp", backends._backend_cache)
amqp_backend = backends.get_backend_cls("amqp")
self.assertIs(amqp_backend, backends._backend_cache["amqp"])
|
<commit_before>import unittest2 as unittest
from celery import backends
from celery.backends.amqp import AMQPBackend
from celery.backends.pyredis import RedisBackend
class TestBackends(unittest.TestCase):
def test_get_backend_aliases(self):
expects = [("amqp", AMQPBackend),
("redis", RedisBackend)]
for expect_name, expect_cls in expects:
self.assertIsInstance(backends.get_backend_cls(expect_name)(),
expect_cls)
def test_get_backend_cahe(self):
backends._backend_cache = {}
backends.get_backend_cls("amqp")
self.assertIn("amqp", backends._backend_cache)
amqp_backend = backends.get_backend_cls("amqp")
self.assertIs(amqp_backend, backends._backend_cache["amqp"])
<commit_msg>tests.backends: Test using DatabaseBackend instead of RedisBackend, as the latter requires the redis module to be installed.<commit_after>import unittest2 as unittest
from celery import backends
from celery.backends.amqp import AMQPBackend
from celery.backends.database import DatabaseBackend
class TestBackends(unittest.TestCase):
def test_get_backend_aliases(self):
expects = [("amqp", AMQPBackend),
("database", DatabaseBackend)]
for expect_name, expect_cls in expects:
self.assertIsInstance(backends.get_backend_cls(expect_name)(),
expect_cls)
def test_get_backend_cahe(self):
backends._backend_cache = {}
backends.get_backend_cls("amqp")
self.assertIn("amqp", backends._backend_cache)
amqp_backend = backends.get_backend_cls("amqp")
self.assertIs(amqp_backend, backends._backend_cache["amqp"])
|
dc87c5d000f06e1618c7fbd0daae602131e0602e
|
commands/globaladd.py
|
commands/globaladd.py
|
from devbot import chat
def call(message: str, name, protocol, cfg, commands):
if ' ' in message:
chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name))
return
chat.say_wrap('/msg {}',
'You have been added to global chat. Use /g GlobalChat to speak in the group, and /e to exit.'.format(
message))
chat.say('/nlip GlobalChat {}'.format(message))
|
from devbot import chat
def call(message: str, name, protocol, cfg, commands):
if ' ' in message:
chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name))
return
chat.say('/msg {} Invited {} to GlobalChat'.format(name, message))
chat.say_wrap('/msg {}'.format(message),
'You have been added to global chat. Use /g GlobalChat to speak in the group, and /e to exit.')
chat.say('/nlip GlobalChat {}'.format(message))
|
Fix formatting issues with gadd
|
Fix formatting issues with gadd
|
Python
|
mit
|
Ameliorate/DevotedBot,Ameliorate/DevotedBot
|
from devbot import chat
def call(message: str, name, protocol, cfg, commands):
if ' ' in message:
chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name))
return
chat.say_wrap('/msg {}',
'You have been added to global chat. Use /g GlobalChat to speak in the group, and /e to exit.'.format(
message))
chat.say('/nlip GlobalChat {}'.format(message))
Fix formatting issues with gadd
|
from devbot import chat
def call(message: str, name, protocol, cfg, commands):
if ' ' in message:
chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name))
return
chat.say('/msg {} Invited {} to GlobalChat'.format(name, message))
chat.say_wrap('/msg {}'.format(message),
'You have been added to global chat. Use /g GlobalChat to speak in the group, and /e to exit.')
chat.say('/nlip GlobalChat {}'.format(message))
|
<commit_before>from devbot import chat
def call(message: str, name, protocol, cfg, commands):
if ' ' in message:
chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name))
return
chat.say_wrap('/msg {}',
'You have been added to global chat. Use /g GlobalChat to speak in the group, and /e to exit.'.format(
message))
chat.say('/nlip GlobalChat {}'.format(message))
<commit_msg>Fix formatting issues with gadd<commit_after>
|
from devbot import chat
def call(message: str, name, protocol, cfg, commands):
if ' ' in message:
chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name))
return
chat.say('/msg {} Invited {} to GlobalChat'.format(name, message))
chat.say_wrap('/msg {}'.format(message),
'You have been added to global chat. Use /g GlobalChat to speak in the group, and /e to exit.')
chat.say('/nlip GlobalChat {}'.format(message))
|
from devbot import chat
def call(message: str, name, protocol, cfg, commands):
if ' ' in message:
chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name))
return
chat.say_wrap('/msg {}',
'You have been added to global chat. Use /g GlobalChat to speak in the group, and /e to exit.'.format(
message))
chat.say('/nlip GlobalChat {}'.format(message))
Fix formatting issues with gaddfrom devbot import chat
def call(message: str, name, protocol, cfg, commands):
if ' ' in message:
chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name))
return
chat.say('/msg {} Invited {} to GlobalChat'.format(name, message))
chat.say_wrap('/msg {}'.format(message),
'You have been added to global chat. Use /g GlobalChat to speak in the group, and /e to exit.')
chat.say('/nlip GlobalChat {}'.format(message))
|
<commit_before>from devbot import chat
def call(message: str, name, protocol, cfg, commands):
if ' ' in message:
chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name))
return
chat.say_wrap('/msg {}',
'You have been added to global chat. Use /g GlobalChat to speak in the group, and /e to exit.'.format(
message))
chat.say('/nlip GlobalChat {}'.format(message))
<commit_msg>Fix formatting issues with gadd<commit_after>from devbot import chat
def call(message: str, name, protocol, cfg, commands):
if ' ' in message:
chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name))
return
chat.say('/msg {} Invited {} to GlobalChat'.format(name, message))
chat.say_wrap('/msg {}'.format(message),
'You have been added to global chat. Use /g GlobalChat to speak in the group, and /e to exit.')
chat.say('/nlip GlobalChat {}'.format(message))
|
551ab95d733ed89fb88c9d6b91659499e0efcd7a
|
twilix/app.py
|
twilix/app.py
|
from flask import Flask, request
from flask.ext.sqlalchemy import SQLAlchemy
from twilio import twiml
import subprocess
import os
from cmd import cmds
app = Flask(__name__)
#app.config.from_object('config')
db = SQLAlchemy(app)
ACCOUNT_SID = "" #os.environ['ACCOUNT_SID']
AUTH_TOKEN = "" #os.environ['AUTH_TOKEN']
APP_SID = "Twilix" #os.environ['APP_SID']
CALLER_ID = "+14389855700" #os.environ['CALLER_ID']
#CALLER_ID = "+17038955689" #os.environ['CALLER_ID']
#CALLER_ID = "+18175985398" #os.environ['CALLER_ID']
@app.route("/")
def index():
return "Hello, world"
@app.route('/sms', methods=['POST'])
def sms():
response = twiml.Response()
user_input = request.form['Body']
if '|' in user_input:
pass
else:
args = user_input.lower().split()
output = cmds[args[0]](args[1:])
response.sms(output)
return str(response)
if __name__ == "__main__":
#app.run(debug=True)
app.debug = True
app.run(host='0.0.0.0')
|
from flask import Flask, request
from flask.ext.sqlalchemy import SQLAlchemy
from twilio import twiml
import subprocess
import os
from cmd import cmds
app = Flask(__name__)
#app.config.from_object('config')
db = SQLAlchemy(app)
ACCOUNT_SID = os.environ['ACCOUNT_SID']
AUTH_TOKEN = os.environ['AUTH_TOKEN']
APP_SID = os.environ['APP_SID']
CALLER_ID = os.environ['CALLER_ID']
@app.route("/")
def index():
return "Hello, world"
@app.route('/sms', methods=['POST'])
def sms():
response = twiml.Response()
user_input = request.form['Body']
if '|' in user_input:
args = user_input.split('|')
for index, arg in enumerate(args):
args[index] = arg.lower().split()
output = cmds['pipe'](args)
else:
args = user_input.lower().split()
output = cmds[args[0]](args)
response.sms(output)
return str(response)
if __name__ == "__main__":
#app.run(debug=True)
app.debug = True
app.run(host='0.0.0.0')
|
Replace all keys/tokens/passwords by env variables
|
Replace all keys/tokens/passwords by env variables
|
Python
|
mit
|
ueg1990/twilix,ueg1990/twilix
|
from flask import Flask, request
from flask.ext.sqlalchemy import SQLAlchemy
from twilio import twiml
import subprocess
import os
from cmd import cmds
app = Flask(__name__)
#app.config.from_object('config')
db = SQLAlchemy(app)
ACCOUNT_SID = "" #os.environ['ACCOUNT_SID']
AUTH_TOKEN = "" #os.environ['AUTH_TOKEN']
APP_SID = "Twilix" #os.environ['APP_SID']
CALLER_ID = "+14389855700" #os.environ['CALLER_ID']
#CALLER_ID = "+17038955689" #os.environ['CALLER_ID']
#CALLER_ID = "+18175985398" #os.environ['CALLER_ID']
@app.route("/")
def index():
return "Hello, world"
@app.route('/sms', methods=['POST'])
def sms():
response = twiml.Response()
user_input = request.form['Body']
if '|' in user_input:
pass
else:
args = user_input.lower().split()
output = cmds[args[0]](args[1:])
response.sms(output)
return str(response)
if __name__ == "__main__":
#app.run(debug=True)
app.debug = True
app.run(host='0.0.0.0')
Replace all keys/tokens/passwords by env variables
|
from flask import Flask, request
from flask.ext.sqlalchemy import SQLAlchemy
from twilio import twiml
import subprocess
import os
from cmd import cmds
app = Flask(__name__)
#app.config.from_object('config')
db = SQLAlchemy(app)
ACCOUNT_SID = os.environ['ACCOUNT_SID']
AUTH_TOKEN = os.environ['AUTH_TOKEN']
APP_SID = os.environ['APP_SID']
CALLER_ID = os.environ['CALLER_ID']
@app.route("/")
def index():
return "Hello, world"
@app.route('/sms', methods=['POST'])
def sms():
response = twiml.Response()
user_input = request.form['Body']
if '|' in user_input:
args = user_input.split('|')
for index, arg in enumerate(args):
args[index] = arg.lower().split()
output = cmds['pipe'](args)
else:
args = user_input.lower().split()
output = cmds[args[0]](args)
response.sms(output)
return str(response)
if __name__ == "__main__":
#app.run(debug=True)
app.debug = True
app.run(host='0.0.0.0')
|
<commit_before>from flask import Flask, request
from flask.ext.sqlalchemy import SQLAlchemy
from twilio import twiml
import subprocess
import os
from cmd import cmds
app = Flask(__name__)
#app.config.from_object('config')
db = SQLAlchemy(app)
ACCOUNT_SID = "" #os.environ['ACCOUNT_SID']
AUTH_TOKEN = "" #os.environ['AUTH_TOKEN']
APP_SID = "Twilix" #os.environ['APP_SID']
CALLER_ID = "+14389855700" #os.environ['CALLER_ID']
#CALLER_ID = "+17038955689" #os.environ['CALLER_ID']
#CALLER_ID = "+18175985398" #os.environ['CALLER_ID']
@app.route("/")
def index():
return "Hello, world"
@app.route('/sms', methods=['POST'])
def sms():
response = twiml.Response()
user_input = request.form['Body']
if '|' in user_input:
pass
else:
args = user_input.lower().split()
output = cmds[args[0]](args[1:])
response.sms(output)
return str(response)
if __name__ == "__main__":
#app.run(debug=True)
app.debug = True
app.run(host='0.0.0.0')
<commit_msg>Replace all keys/tokens/passwords by env variables<commit_after>
|
from flask import Flask, request
from flask.ext.sqlalchemy import SQLAlchemy
from twilio import twiml
import subprocess
import os
from cmd import cmds
app = Flask(__name__)
#app.config.from_object('config')
db = SQLAlchemy(app)
ACCOUNT_SID = os.environ['ACCOUNT_SID']
AUTH_TOKEN = os.environ['AUTH_TOKEN']
APP_SID = os.environ['APP_SID']
CALLER_ID = os.environ['CALLER_ID']
@app.route("/")
def index():
return "Hello, world"
@app.route('/sms', methods=['POST'])
def sms():
response = twiml.Response()
user_input = request.form['Body']
if '|' in user_input:
args = user_input.split('|')
for index, arg in enumerate(args):
args[index] = arg.lower().split()
output = cmds['pipe'](args)
else:
args = user_input.lower().split()
output = cmds[args[0]](args)
response.sms(output)
return str(response)
if __name__ == "__main__":
#app.run(debug=True)
app.debug = True
app.run(host='0.0.0.0')
|
from flask import Flask, request
from flask.ext.sqlalchemy import SQLAlchemy
from twilio import twiml
import subprocess
import os
from cmd import cmds
app = Flask(__name__)
#app.config.from_object('config')
db = SQLAlchemy(app)
ACCOUNT_SID = "" #os.environ['ACCOUNT_SID']
AUTH_TOKEN = "" #os.environ['AUTH_TOKEN']
APP_SID = "Twilix" #os.environ['APP_SID']
CALLER_ID = "+14389855700" #os.environ['CALLER_ID']
#CALLER_ID = "+17038955689" #os.environ['CALLER_ID']
#CALLER_ID = "+18175985398" #os.environ['CALLER_ID']
@app.route("/")
def index():
return "Hello, world"
@app.route('/sms', methods=['POST'])
def sms():
response = twiml.Response()
user_input = request.form['Body']
if '|' in user_input:
pass
else:
args = user_input.lower().split()
output = cmds[args[0]](args[1:])
response.sms(output)
return str(response)
if __name__ == "__main__":
#app.run(debug=True)
app.debug = True
app.run(host='0.0.0.0')
Replace all keys/tokens/passwords by env variablesfrom flask import Flask, request
from flask.ext.sqlalchemy import SQLAlchemy
from twilio import twiml
import subprocess
import os
from cmd import cmds
app = Flask(__name__)
#app.config.from_object('config')
db = SQLAlchemy(app)
ACCOUNT_SID = os.environ['ACCOUNT_SID']
AUTH_TOKEN = os.environ['AUTH_TOKEN']
APP_SID = os.environ['APP_SID']
CALLER_ID = os.environ['CALLER_ID']
@app.route("/")
def index():
return "Hello, world"
@app.route('/sms', methods=['POST'])
def sms():
response = twiml.Response()
user_input = request.form['Body']
if '|' in user_input:
args = user_input.split('|')
for index, arg in enumerate(args):
args[index] = arg.lower().split()
output = cmds['pipe'](args)
else:
args = user_input.lower().split()
output = cmds[args[0]](args)
response.sms(output)
return str(response)
if __name__ == "__main__":
#app.run(debug=True)
app.debug = True
app.run(host='0.0.0.0')
|
<commit_before>from flask import Flask, request
from flask.ext.sqlalchemy import SQLAlchemy
from twilio import twiml
import subprocess
import os
from cmd import cmds
app = Flask(__name__)
#app.config.from_object('config')
db = SQLAlchemy(app)
ACCOUNT_SID = "" #os.environ['ACCOUNT_SID']
AUTH_TOKEN = "" #os.environ['AUTH_TOKEN']
APP_SID = "Twilix" #os.environ['APP_SID']
CALLER_ID = "+14389855700" #os.environ['CALLER_ID']
#CALLER_ID = "+17038955689" #os.environ['CALLER_ID']
#CALLER_ID = "+18175985398" #os.environ['CALLER_ID']
@app.route("/")
def index():
return "Hello, world"
@app.route('/sms', methods=['POST'])
def sms():
response = twiml.Response()
user_input = request.form['Body']
if '|' in user_input:
pass
else:
args = user_input.lower().split()
output = cmds[args[0]](args[1:])
response.sms(output)
return str(response)
if __name__ == "__main__":
#app.run(debug=True)
app.debug = True
app.run(host='0.0.0.0')
<commit_msg>Replace all keys/tokens/passwords by env variables<commit_after>from flask import Flask, request
from flask.ext.sqlalchemy import SQLAlchemy
from twilio import twiml
import subprocess
import os
from cmd import cmds
app = Flask(__name__)
#app.config.from_object('config')
db = SQLAlchemy(app)
ACCOUNT_SID = os.environ['ACCOUNT_SID']
AUTH_TOKEN = os.environ['AUTH_TOKEN']
APP_SID = os.environ['APP_SID']
CALLER_ID = os.environ['CALLER_ID']
@app.route("/")
def index():
return "Hello, world"
@app.route('/sms', methods=['POST'])
def sms():
response = twiml.Response()
user_input = request.form['Body']
if '|' in user_input:
args = user_input.split('|')
for index, arg in enumerate(args):
args[index] = arg.lower().split()
output = cmds['pipe'](args)
else:
args = user_input.lower().split()
output = cmds[args[0]](args)
response.sms(output)
return str(response)
if __name__ == "__main__":
#app.run(debug=True)
app.debug = True
app.run(host='0.0.0.0')
|
2913ab59af16fe1d2861c11fb12420b2bbd4a880
|
udp_client.py
|
udp_client.py
|
import socket
import time
UDP_IP = "127.0.0.1"
UDP_PORT = 1234
MESSAGES = ["start_horn", "stop_horn", "classic_destroy"]
for message in MESSAGES:
print "UDP target IP:", UDP_IP
print "UDP target port:", UDP_PORT
print "message:", message
sock = socket.socket(socket.AF_INET, # Internet
socket.SOCK_DGRAM) # UDP
sock.sendto(message, (UDP_IP, UDP_PORT))
time.sleep(5)
|
import socket
import time
UDP_IP = "127.0.0.1"
UDP_PORT = 1234
MESSAGES = ["start_horn", "classic_destroy", "stop_horn"]
for message in MESSAGES:
print "UDP target IP:", UDP_IP
print "UDP target port:", UDP_PORT
print "message:", message
sock = socket.socket(socket.AF_INET, # Internet
socket.SOCK_DGRAM) # UDP
sock.sendto(message, (UDP_IP, UDP_PORT))
time.sleep(5)
|
Put them in the right order
|
Put them in the right order
|
Python
|
mit
|
BenIanGifford/Key-Button,BenIanGifford/Key-Button
|
import socket
import time
UDP_IP = "127.0.0.1"
UDP_PORT = 1234
MESSAGES = ["start_horn", "stop_horn", "classic_destroy"]
for message in MESSAGES:
print "UDP target IP:", UDP_IP
print "UDP target port:", UDP_PORT
print "message:", message
sock = socket.socket(socket.AF_INET, # Internet
socket.SOCK_DGRAM) # UDP
sock.sendto(message, (UDP_IP, UDP_PORT))
time.sleep(5)Put them in the right order
|
import socket
import time
UDP_IP = "127.0.0.1"
UDP_PORT = 1234
MESSAGES = ["start_horn", "classic_destroy", "stop_horn"]
for message in MESSAGES:
print "UDP target IP:", UDP_IP
print "UDP target port:", UDP_PORT
print "message:", message
sock = socket.socket(socket.AF_INET, # Internet
socket.SOCK_DGRAM) # UDP
sock.sendto(message, (UDP_IP, UDP_PORT))
time.sleep(5)
|
<commit_before>import socket
import time
UDP_IP = "127.0.0.1"
UDP_PORT = 1234
MESSAGES = ["start_horn", "stop_horn", "classic_destroy"]
for message in MESSAGES:
print "UDP target IP:", UDP_IP
print "UDP target port:", UDP_PORT
print "message:", message
sock = socket.socket(socket.AF_INET, # Internet
socket.SOCK_DGRAM) # UDP
sock.sendto(message, (UDP_IP, UDP_PORT))
time.sleep(5)<commit_msg>Put them in the right order<commit_after>
|
import socket
import time
UDP_IP = "127.0.0.1"
UDP_PORT = 1234
MESSAGES = ["start_horn", "classic_destroy", "stop_horn"]
for message in MESSAGES:
print "UDP target IP:", UDP_IP
print "UDP target port:", UDP_PORT
print "message:", message
sock = socket.socket(socket.AF_INET, # Internet
socket.SOCK_DGRAM) # UDP
sock.sendto(message, (UDP_IP, UDP_PORT))
time.sleep(5)
|
import socket
import time
UDP_IP = "127.0.0.1"
UDP_PORT = 1234
MESSAGES = ["start_horn", "stop_horn", "classic_destroy"]
for message in MESSAGES:
print "UDP target IP:", UDP_IP
print "UDP target port:", UDP_PORT
print "message:", message
sock = socket.socket(socket.AF_INET, # Internet
socket.SOCK_DGRAM) # UDP
sock.sendto(message, (UDP_IP, UDP_PORT))
time.sleep(5)Put them in the right orderimport socket
import time
UDP_IP = "127.0.0.1"
UDP_PORT = 1234
MESSAGES = ["start_horn", "classic_destroy", "stop_horn"]
for message in MESSAGES:
print "UDP target IP:", UDP_IP
print "UDP target port:", UDP_PORT
print "message:", message
sock = socket.socket(socket.AF_INET, # Internet
socket.SOCK_DGRAM) # UDP
sock.sendto(message, (UDP_IP, UDP_PORT))
time.sleep(5)
|
<commit_before>import socket
import time
UDP_IP = "127.0.0.1"
UDP_PORT = 1234
MESSAGES = ["start_horn", "stop_horn", "classic_destroy"]
for message in MESSAGES:
print "UDP target IP:", UDP_IP
print "UDP target port:", UDP_PORT
print "message:", message
sock = socket.socket(socket.AF_INET, # Internet
socket.SOCK_DGRAM) # UDP
sock.sendto(message, (UDP_IP, UDP_PORT))
time.sleep(5)<commit_msg>Put them in the right order<commit_after>import socket
import time
UDP_IP = "127.0.0.1"
UDP_PORT = 1234
MESSAGES = ["start_horn", "classic_destroy", "stop_horn"]
for message in MESSAGES:
print "UDP target IP:", UDP_IP
print "UDP target port:", UDP_PORT
print "message:", message
sock = socket.socket(socket.AF_INET, # Internet
socket.SOCK_DGRAM) # UDP
sock.sendto(message, (UDP_IP, UDP_PORT))
time.sleep(5)
|
39a16e50ad5f4164aed6cce58fb828cc78a9e4f3
|
myhome/blog/tests.py
|
myhome/blog/tests.py
|
from django.test import SimpleTestCase, Client
from .models import BlogPost
class BlogTestCase(SimpleTestCase):
def setUp(self):
BlogPost.objects.create(
datetime='2014-01-01 12:00:00',
title='title',
content='content',
live=True)
def _test_get(self, url, *, ins=[], not_ins=[]):
g = self.client.get(url)
for in_ in ins:
self.assertContains(g, in_)
for nin_ in not_ins:
self.assertNotContains(g, nin_)
def _test_404(self, url):
g = self.client.get(url)
self.assertEqual(g.status_code, 404)
def test_view(self):
self._test_get('/blog/', ins=['title', 'content'], not_ins=['No Items'])
def test_view_one(self):
self._test_get('/blog/post/1/', ins=['title', 'content'])
def test_view_miss(self):
self._test_404('/blog/post/100/')
|
from test_base import MyHomeTest
from .models import BlogPost
class BlogTestCase(MyHomeTest):
def setUp(self):
BlogPost.objects.create(
datetime='2014-01-01T12:00:00Z',
title='livetitle',
content='livecontent',
live=True)
BlogPost.objects.create(
datetime='2014-01-01T12:00:00Z',
title='hiddentitle',
content='hiddencontent',
live=False)
def _test_404(self, url):
g = self.client.get(url)
self.assertEqual(g.status_code, 404)
def test_view(self):
self._test_get('/blog/', ins=['livetitle'], notins=['No Items', 'hiddentitle'])
def test_view_one(self):
self._test_get('/blog/post/1/', ins=['livetitle', 'livecontent'])
def test_view_one_nonlive(self):
self._test_404('/blog/post/2/')
def test_view_miss(self):
self._test_404('/blog/post/100/')
|
Adjust blog test to use the base class
|
Adjust blog test to use the base class
|
Python
|
mit
|
plumdog/myhome,plumdog/myhome,plumdog/myhome,plumdog/myhome
|
from django.test import SimpleTestCase, Client
from .models import BlogPost
class BlogTestCase(SimpleTestCase):
def setUp(self):
BlogPost.objects.create(
datetime='2014-01-01 12:00:00',
title='title',
content='content',
live=True)
def _test_get(self, url, *, ins=[], not_ins=[]):
g = self.client.get(url)
for in_ in ins:
self.assertContains(g, in_)
for nin_ in not_ins:
self.assertNotContains(g, nin_)
def _test_404(self, url):
g = self.client.get(url)
self.assertEqual(g.status_code, 404)
def test_view(self):
self._test_get('/blog/', ins=['title', 'content'], not_ins=['No Items'])
def test_view_one(self):
self._test_get('/blog/post/1/', ins=['title', 'content'])
def test_view_miss(self):
self._test_404('/blog/post/100/')
Adjust blog test to use the base class
|
from test_base import MyHomeTest
from .models import BlogPost
class BlogTestCase(MyHomeTest):
def setUp(self):
BlogPost.objects.create(
datetime='2014-01-01T12:00:00Z',
title='livetitle',
content='livecontent',
live=True)
BlogPost.objects.create(
datetime='2014-01-01T12:00:00Z',
title='hiddentitle',
content='hiddencontent',
live=False)
def _test_404(self, url):
g = self.client.get(url)
self.assertEqual(g.status_code, 404)
def test_view(self):
self._test_get('/blog/', ins=['livetitle'], notins=['No Items', 'hiddentitle'])
def test_view_one(self):
self._test_get('/blog/post/1/', ins=['livetitle', 'livecontent'])
def test_view_one_nonlive(self):
self._test_404('/blog/post/2/')
def test_view_miss(self):
self._test_404('/blog/post/100/')
|
<commit_before>from django.test import SimpleTestCase, Client
from .models import BlogPost
class BlogTestCase(SimpleTestCase):
def setUp(self):
BlogPost.objects.create(
datetime='2014-01-01 12:00:00',
title='title',
content='content',
live=True)
def _test_get(self, url, *, ins=[], not_ins=[]):
g = self.client.get(url)
for in_ in ins:
self.assertContains(g, in_)
for nin_ in not_ins:
self.assertNotContains(g, nin_)
def _test_404(self, url):
g = self.client.get(url)
self.assertEqual(g.status_code, 404)
def test_view(self):
self._test_get('/blog/', ins=['title', 'content'], not_ins=['No Items'])
def test_view_one(self):
self._test_get('/blog/post/1/', ins=['title', 'content'])
def test_view_miss(self):
self._test_404('/blog/post/100/')
<commit_msg>Adjust blog test to use the base class<commit_after>
|
from test_base import MyHomeTest
from .models import BlogPost
class BlogTestCase(MyHomeTest):
def setUp(self):
BlogPost.objects.create(
datetime='2014-01-01T12:00:00Z',
title='livetitle',
content='livecontent',
live=True)
BlogPost.objects.create(
datetime='2014-01-01T12:00:00Z',
title='hiddentitle',
content='hiddencontent',
live=False)
def _test_404(self, url):
g = self.client.get(url)
self.assertEqual(g.status_code, 404)
def test_view(self):
self._test_get('/blog/', ins=['livetitle'], notins=['No Items', 'hiddentitle'])
def test_view_one(self):
self._test_get('/blog/post/1/', ins=['livetitle', 'livecontent'])
def test_view_one_nonlive(self):
self._test_404('/blog/post/2/')
def test_view_miss(self):
self._test_404('/blog/post/100/')
|
from django.test import SimpleTestCase, Client
from .models import BlogPost
class BlogTestCase(SimpleTestCase):
def setUp(self):
BlogPost.objects.create(
datetime='2014-01-01 12:00:00',
title='title',
content='content',
live=True)
def _test_get(self, url, *, ins=[], not_ins=[]):
g = self.client.get(url)
for in_ in ins:
self.assertContains(g, in_)
for nin_ in not_ins:
self.assertNotContains(g, nin_)
def _test_404(self, url):
g = self.client.get(url)
self.assertEqual(g.status_code, 404)
def test_view(self):
self._test_get('/blog/', ins=['title', 'content'], not_ins=['No Items'])
def test_view_one(self):
self._test_get('/blog/post/1/', ins=['title', 'content'])
def test_view_miss(self):
self._test_404('/blog/post/100/')
Adjust blog test to use the base classfrom test_base import MyHomeTest
from .models import BlogPost
class BlogTestCase(MyHomeTest):
def setUp(self):
BlogPost.objects.create(
datetime='2014-01-01T12:00:00Z',
title='livetitle',
content='livecontent',
live=True)
BlogPost.objects.create(
datetime='2014-01-01T12:00:00Z',
title='hiddentitle',
content='hiddencontent',
live=False)
def _test_404(self, url):
g = self.client.get(url)
self.assertEqual(g.status_code, 404)
def test_view(self):
self._test_get('/blog/', ins=['livetitle'], notins=['No Items', 'hiddentitle'])
def test_view_one(self):
self._test_get('/blog/post/1/', ins=['livetitle', 'livecontent'])
def test_view_one_nonlive(self):
self._test_404('/blog/post/2/')
def test_view_miss(self):
self._test_404('/blog/post/100/')
|
<commit_before>from django.test import SimpleTestCase, Client
from .models import BlogPost
class BlogTestCase(SimpleTestCase):
def setUp(self):
BlogPost.objects.create(
datetime='2014-01-01 12:00:00',
title='title',
content='content',
live=True)
def _test_get(self, url, *, ins=[], not_ins=[]):
g = self.client.get(url)
for in_ in ins:
self.assertContains(g, in_)
for nin_ in not_ins:
self.assertNotContains(g, nin_)
def _test_404(self, url):
g = self.client.get(url)
self.assertEqual(g.status_code, 404)
def test_view(self):
self._test_get('/blog/', ins=['title', 'content'], not_ins=['No Items'])
def test_view_one(self):
self._test_get('/blog/post/1/', ins=['title', 'content'])
def test_view_miss(self):
self._test_404('/blog/post/100/')
<commit_msg>Adjust blog test to use the base class<commit_after>from test_base import MyHomeTest
from .models import BlogPost
class BlogTestCase(MyHomeTest):
def setUp(self):
BlogPost.objects.create(
datetime='2014-01-01T12:00:00Z',
title='livetitle',
content='livecontent',
live=True)
BlogPost.objects.create(
datetime='2014-01-01T12:00:00Z',
title='hiddentitle',
content='hiddencontent',
live=False)
def _test_404(self, url):
g = self.client.get(url)
self.assertEqual(g.status_code, 404)
def test_view(self):
self._test_get('/blog/', ins=['livetitle'], notins=['No Items', 'hiddentitle'])
def test_view_one(self):
self._test_get('/blog/post/1/', ins=['livetitle', 'livecontent'])
def test_view_one_nonlive(self):
self._test_404('/blog/post/2/')
def test_view_miss(self):
self._test_404('/blog/post/100/')
|
27923a5490e5e5d2c0503c84fd979b5af6bcba13
|
nn/tests/mlp_test.py
|
nn/tests/mlp_test.py
|
from sklearn.datasets import make_classification, make_regression
from sklearn.metrics import log_loss
from ..mlp import MLP
class TestFitting(object):
def __init__(self):
self.X_cl, self.y_cl = make_classification(100)
self.X_re, self.y_re = make_classification(100)
def test_if_fit_classification():
model = MLP()
model.fit(self.X_cl, self.y_cl)
assert(model.type_of_target_ == 'binary')
def test_if_fit_regression(self):
model = MLP()
model.fit(self.X_re, self.y_re)
assert(model.type_of_target_ == 'continuous')
def test_sigmoid(self):
model = MLP(hidden_layer_type='sigm')
model.fit(self.X_cl, self.y_cl)
def test_dropout(self):
model = MLP(dropout='True')
model.fit(self.X_cl, self.y_cl)
def test_accuracy(self):
model = MLP()
model.fit(self.X_cl[:50], self.y_cl[:50])
y_pred = model.predict_proba(self.X_cl[50:])
ll = log_loss(self.y_cl[50:], y_pred)
assert(ll < .05)
|
from sklearn.datasets import make_classification, make_regression
from sklearn.metrics import log_loss
from sklearn.preprocessing import StandardScaler
from ..mlp import MLP
class TestFitting(object):
def __init__(self):
sc = StandardScaler()
self.X_cl, self.y_cl = make_classification(100)
self.X_cl = sc.fit_transform(self.X_cl)
self.X_re, self.y_re = make_regression(100)
self.X_re = sc.fit_transform(self.X_re)
def test_if_fit_classification(self):
model = MLP()
model.fit(self.X_cl, self.y_cl)
assert(model.type_of_target_ == 'binary')
def test_if_fit_regression(self):
model = MLP()
model.fit(self.X_re, self.y_re)
assert(model.type_of_target_ == 'continuous')
def test_sigmoid(self):
model = MLP(hidden_layer_type='sigm')
model.fit(self.X_cl, self.y_cl)
def test_dropout(self):
model = MLP(dropout='True')
model.fit(self.X_cl, self.y_cl)
def test_accuracy(self):
model = MLP()
model.fit(self.X_cl[:50], self.y_cl[:50])
y_pred = model.predict_proba(self.X_cl[50:])
ll = log_loss(self.y_cl[50:], y_pred)
assert(ll < .05)
|
Scale inputs in test cases
|
Scale inputs in test cases
|
Python
|
mit
|
JakeMick/graymatter
|
from sklearn.datasets import make_classification, make_regression
from sklearn.metrics import log_loss
from ..mlp import MLP
class TestFitting(object):
def __init__(self):
self.X_cl, self.y_cl = make_classification(100)
self.X_re, self.y_re = make_classification(100)
def test_if_fit_classification():
model = MLP()
model.fit(self.X_cl, self.y_cl)
assert(model.type_of_target_ == 'binary')
def test_if_fit_regression(self):
model = MLP()
model.fit(self.X_re, self.y_re)
assert(model.type_of_target_ == 'continuous')
def test_sigmoid(self):
model = MLP(hidden_layer_type='sigm')
model.fit(self.X_cl, self.y_cl)
def test_dropout(self):
model = MLP(dropout='True')
model.fit(self.X_cl, self.y_cl)
def test_accuracy(self):
model = MLP()
model.fit(self.X_cl[:50], self.y_cl[:50])
y_pred = model.predict_proba(self.X_cl[50:])
ll = log_loss(self.y_cl[50:], y_pred)
assert(ll < .05)
Scale inputs in test cases
|
from sklearn.datasets import make_classification, make_regression
from sklearn.metrics import log_loss
from sklearn.preprocessing import StandardScaler
from ..mlp import MLP
class TestFitting(object):
def __init__(self):
sc = StandardScaler()
self.X_cl, self.y_cl = make_classification(100)
self.X_cl = sc.fit_transform(self.X_cl)
self.X_re, self.y_re = make_regression(100)
self.X_re = sc.fit_transform(self.X_re)
def test_if_fit_classification(self):
model = MLP()
model.fit(self.X_cl, self.y_cl)
assert(model.type_of_target_ == 'binary')
def test_if_fit_regression(self):
model = MLP()
model.fit(self.X_re, self.y_re)
assert(model.type_of_target_ == 'continuous')
def test_sigmoid(self):
model = MLP(hidden_layer_type='sigm')
model.fit(self.X_cl, self.y_cl)
def test_dropout(self):
model = MLP(dropout='True')
model.fit(self.X_cl, self.y_cl)
def test_accuracy(self):
model = MLP()
model.fit(self.X_cl[:50], self.y_cl[:50])
y_pred = model.predict_proba(self.X_cl[50:])
ll = log_loss(self.y_cl[50:], y_pred)
assert(ll < .05)
|
<commit_before>from sklearn.datasets import make_classification, make_regression
from sklearn.metrics import log_loss
from ..mlp import MLP
class TestFitting(object):
def __init__(self):
self.X_cl, self.y_cl = make_classification(100)
self.X_re, self.y_re = make_classification(100)
def test_if_fit_classification():
model = MLP()
model.fit(self.X_cl, self.y_cl)
assert(model.type_of_target_ == 'binary')
def test_if_fit_regression(self):
model = MLP()
model.fit(self.X_re, self.y_re)
assert(model.type_of_target_ == 'continuous')
def test_sigmoid(self):
model = MLP(hidden_layer_type='sigm')
model.fit(self.X_cl, self.y_cl)
def test_dropout(self):
model = MLP(dropout='True')
model.fit(self.X_cl, self.y_cl)
def test_accuracy(self):
model = MLP()
model.fit(self.X_cl[:50], self.y_cl[:50])
y_pred = model.predict_proba(self.X_cl[50:])
ll = log_loss(self.y_cl[50:], y_pred)
assert(ll < .05)
<commit_msg>Scale inputs in test cases<commit_after>
|
from sklearn.datasets import make_classification, make_regression
from sklearn.metrics import log_loss
from sklearn.preprocessing import StandardScaler
from ..mlp import MLP
class TestFitting(object):
def __init__(self):
sc = StandardScaler()
self.X_cl, self.y_cl = make_classification(100)
self.X_cl = sc.fit_transform(self.X_cl)
self.X_re, self.y_re = make_regression(100)
self.X_re = sc.fit_transform(self.X_re)
def test_if_fit_classification(self):
model = MLP()
model.fit(self.X_cl, self.y_cl)
assert(model.type_of_target_ == 'binary')
def test_if_fit_regression(self):
model = MLP()
model.fit(self.X_re, self.y_re)
assert(model.type_of_target_ == 'continuous')
def test_sigmoid(self):
model = MLP(hidden_layer_type='sigm')
model.fit(self.X_cl, self.y_cl)
def test_dropout(self):
model = MLP(dropout='True')
model.fit(self.X_cl, self.y_cl)
def test_accuracy(self):
model = MLP()
model.fit(self.X_cl[:50], self.y_cl[:50])
y_pred = model.predict_proba(self.X_cl[50:])
ll = log_loss(self.y_cl[50:], y_pred)
assert(ll < .05)
|
from sklearn.datasets import make_classification, make_regression
from sklearn.metrics import log_loss
from ..mlp import MLP
class TestFitting(object):
def __init__(self):
self.X_cl, self.y_cl = make_classification(100)
self.X_re, self.y_re = make_classification(100)
def test_if_fit_classification():
model = MLP()
model.fit(self.X_cl, self.y_cl)
assert(model.type_of_target_ == 'binary')
def test_if_fit_regression(self):
model = MLP()
model.fit(self.X_re, self.y_re)
assert(model.type_of_target_ == 'continuous')
def test_sigmoid(self):
model = MLP(hidden_layer_type='sigm')
model.fit(self.X_cl, self.y_cl)
def test_dropout(self):
model = MLP(dropout='True')
model.fit(self.X_cl, self.y_cl)
def test_accuracy(self):
model = MLP()
model.fit(self.X_cl[:50], self.y_cl[:50])
y_pred = model.predict_proba(self.X_cl[50:])
ll = log_loss(self.y_cl[50:], y_pred)
assert(ll < .05)
Scale inputs in test casesfrom sklearn.datasets import make_classification, make_regression
from sklearn.metrics import log_loss
from sklearn.preprocessing import StandardScaler
from ..mlp import MLP
class TestFitting(object):
def __init__(self):
sc = StandardScaler()
self.X_cl, self.y_cl = make_classification(100)
self.X_cl = sc.fit_transform(self.X_cl)
self.X_re, self.y_re = make_regression(100)
self.X_re = sc.fit_transform(self.X_re)
def test_if_fit_classification(self):
model = MLP()
model.fit(self.X_cl, self.y_cl)
assert(model.type_of_target_ == 'binary')
def test_if_fit_regression(self):
model = MLP()
model.fit(self.X_re, self.y_re)
assert(model.type_of_target_ == 'continuous')
def test_sigmoid(self):
model = MLP(hidden_layer_type='sigm')
model.fit(self.X_cl, self.y_cl)
def test_dropout(self):
model = MLP(dropout='True')
model.fit(self.X_cl, self.y_cl)
def test_accuracy(self):
model = MLP()
model.fit(self.X_cl[:50], self.y_cl[:50])
y_pred = model.predict_proba(self.X_cl[50:])
ll = log_loss(self.y_cl[50:], y_pred)
assert(ll < .05)
|
<commit_before>from sklearn.datasets import make_classification, make_regression
from sklearn.metrics import log_loss
from ..mlp import MLP
class TestFitting(object):
def __init__(self):
self.X_cl, self.y_cl = make_classification(100)
self.X_re, self.y_re = make_classification(100)
def test_if_fit_classification():
model = MLP()
model.fit(self.X_cl, self.y_cl)
assert(model.type_of_target_ == 'binary')
def test_if_fit_regression(self):
model = MLP()
model.fit(self.X_re, self.y_re)
assert(model.type_of_target_ == 'continuous')
def test_sigmoid(self):
model = MLP(hidden_layer_type='sigm')
model.fit(self.X_cl, self.y_cl)
def test_dropout(self):
model = MLP(dropout='True')
model.fit(self.X_cl, self.y_cl)
def test_accuracy(self):
model = MLP()
model.fit(self.X_cl[:50], self.y_cl[:50])
y_pred = model.predict_proba(self.X_cl[50:])
ll = log_loss(self.y_cl[50:], y_pred)
assert(ll < .05)
<commit_msg>Scale inputs in test cases<commit_after>from sklearn.datasets import make_classification, make_regression
from sklearn.metrics import log_loss
from sklearn.preprocessing import StandardScaler
from ..mlp import MLP
class TestFitting(object):
def __init__(self):
sc = StandardScaler()
self.X_cl, self.y_cl = make_classification(100)
self.X_cl = sc.fit_transform(self.X_cl)
self.X_re, self.y_re = make_regression(100)
self.X_re = sc.fit_transform(self.X_re)
def test_if_fit_classification(self):
model = MLP()
model.fit(self.X_cl, self.y_cl)
assert(model.type_of_target_ == 'binary')
def test_if_fit_regression(self):
model = MLP()
model.fit(self.X_re, self.y_re)
assert(model.type_of_target_ == 'continuous')
def test_sigmoid(self):
model = MLP(hidden_layer_type='sigm')
model.fit(self.X_cl, self.y_cl)
def test_dropout(self):
model = MLP(dropout='True')
model.fit(self.X_cl, self.y_cl)
def test_accuracy(self):
model = MLP()
model.fit(self.X_cl[:50], self.y_cl[:50])
y_pred = model.predict_proba(self.X_cl[50:])
ll = log_loss(self.y_cl[50:], y_pred)
assert(ll < .05)
|
9667e0c6737334ca8ceb4347792e3df39ae52b3a
|
app/errors.py
|
app/errors.py
|
import bugsnag
from aiohttp.client_exceptions import ClientPayloadError
from sanic.exceptions import MethodNotSupported, NotFound
from sanic.handlers import ErrorHandler
from . import settings
IGNORED_EXCEPTIONS = (NotFound, MethodNotSupported, ClientPayloadError)
bugsnag.configure(
api_key=settings.BUGSNAG_API_KEY,
project_root="/app",
release_stage=settings.RELEASE_STAGE,
)
class BugsnagErrorHandler(ErrorHandler):
def default(self, request, exception):
if self._should_notify(exception):
bugsnag.notify(exception, meta_data={"request": request.url})
return super().default(request, exception)
def _should_notify(self, exception) -> bool:
if not settings.BUGSNAG_API_KEY:
return False
if isinstance(exception, IGNORED_EXCEPTIONS):
return False
return True
|
import bugsnag
from aiohttp.client_exceptions import ClientPayloadError
from sanic.exceptions import MethodNotSupported, NotFound
from sanic.handlers import ErrorHandler
from . import settings
IGNORED_EXCEPTIONS = (NotFound, MethodNotSupported, ClientPayloadError)
bugsnag.configure(
api_key=settings.BUGSNAG_API_KEY,
project_root="/app",
release_stage=settings.RELEASE_STAGE,
)
class BugsnagErrorHandler(ErrorHandler):
def default(self, request, exception):
if self._should_notify(exception):
bugsnag.notify(exception, meta_data={"request": request.url})
return super().default(request, exception)
@staticmethod
def _should_notify(exception) -> bool:
if not settings.BUGSNAG_API_KEY:
return False
if isinstance(exception, IGNORED_EXCEPTIONS):
return False
return True
|
Change methods not using its bound instance to staticmethods
|
Change methods not using its bound instance to staticmethods
Signed-off-by: shubhendra <c6e99d273726c3152867e7854a0feab4e6ae83c6@gmail.com>
|
Python
|
mit
|
jacebrowning/memegen,jacebrowning/memegen
|
import bugsnag
from aiohttp.client_exceptions import ClientPayloadError
from sanic.exceptions import MethodNotSupported, NotFound
from sanic.handlers import ErrorHandler
from . import settings
IGNORED_EXCEPTIONS = (NotFound, MethodNotSupported, ClientPayloadError)
bugsnag.configure(
api_key=settings.BUGSNAG_API_KEY,
project_root="/app",
release_stage=settings.RELEASE_STAGE,
)
class BugsnagErrorHandler(ErrorHandler):
def default(self, request, exception):
if self._should_notify(exception):
bugsnag.notify(exception, meta_data={"request": request.url})
return super().default(request, exception)
def _should_notify(self, exception) -> bool:
if not settings.BUGSNAG_API_KEY:
return False
if isinstance(exception, IGNORED_EXCEPTIONS):
return False
return True
Change methods not using its bound instance to staticmethods
Signed-off-by: shubhendra <c6e99d273726c3152867e7854a0feab4e6ae83c6@gmail.com>
|
import bugsnag
from aiohttp.client_exceptions import ClientPayloadError
from sanic.exceptions import MethodNotSupported, NotFound
from sanic.handlers import ErrorHandler
from . import settings
IGNORED_EXCEPTIONS = (NotFound, MethodNotSupported, ClientPayloadError)
bugsnag.configure(
api_key=settings.BUGSNAG_API_KEY,
project_root="/app",
release_stage=settings.RELEASE_STAGE,
)
class BugsnagErrorHandler(ErrorHandler):
def default(self, request, exception):
if self._should_notify(exception):
bugsnag.notify(exception, meta_data={"request": request.url})
return super().default(request, exception)
@staticmethod
def _should_notify(exception) -> bool:
if not settings.BUGSNAG_API_KEY:
return False
if isinstance(exception, IGNORED_EXCEPTIONS):
return False
return True
|
<commit_before>import bugsnag
from aiohttp.client_exceptions import ClientPayloadError
from sanic.exceptions import MethodNotSupported, NotFound
from sanic.handlers import ErrorHandler
from . import settings
IGNORED_EXCEPTIONS = (NotFound, MethodNotSupported, ClientPayloadError)
bugsnag.configure(
api_key=settings.BUGSNAG_API_KEY,
project_root="/app",
release_stage=settings.RELEASE_STAGE,
)
class BugsnagErrorHandler(ErrorHandler):
def default(self, request, exception):
if self._should_notify(exception):
bugsnag.notify(exception, meta_data={"request": request.url})
return super().default(request, exception)
def _should_notify(self, exception) -> bool:
if not settings.BUGSNAG_API_KEY:
return False
if isinstance(exception, IGNORED_EXCEPTIONS):
return False
return True
<commit_msg>Change methods not using its bound instance to staticmethods
Signed-off-by: shubhendra <c6e99d273726c3152867e7854a0feab4e6ae83c6@gmail.com><commit_after>
|
import bugsnag
from aiohttp.client_exceptions import ClientPayloadError
from sanic.exceptions import MethodNotSupported, NotFound
from sanic.handlers import ErrorHandler
from . import settings
IGNORED_EXCEPTIONS = (NotFound, MethodNotSupported, ClientPayloadError)
bugsnag.configure(
api_key=settings.BUGSNAG_API_KEY,
project_root="/app",
release_stage=settings.RELEASE_STAGE,
)
class BugsnagErrorHandler(ErrorHandler):
def default(self, request, exception):
if self._should_notify(exception):
bugsnag.notify(exception, meta_data={"request": request.url})
return super().default(request, exception)
@staticmethod
def _should_notify(exception) -> bool:
if not settings.BUGSNAG_API_KEY:
return False
if isinstance(exception, IGNORED_EXCEPTIONS):
return False
return True
|
import bugsnag
from aiohttp.client_exceptions import ClientPayloadError
from sanic.exceptions import MethodNotSupported, NotFound
from sanic.handlers import ErrorHandler
from . import settings
IGNORED_EXCEPTIONS = (NotFound, MethodNotSupported, ClientPayloadError)
bugsnag.configure(
api_key=settings.BUGSNAG_API_KEY,
project_root="/app",
release_stage=settings.RELEASE_STAGE,
)
class BugsnagErrorHandler(ErrorHandler):
def default(self, request, exception):
if self._should_notify(exception):
bugsnag.notify(exception, meta_data={"request": request.url})
return super().default(request, exception)
def _should_notify(self, exception) -> bool:
if not settings.BUGSNAG_API_KEY:
return False
if isinstance(exception, IGNORED_EXCEPTIONS):
return False
return True
Change methods not using its bound instance to staticmethods
Signed-off-by: shubhendra <c6e99d273726c3152867e7854a0feab4e6ae83c6@gmail.com>import bugsnag
from aiohttp.client_exceptions import ClientPayloadError
from sanic.exceptions import MethodNotSupported, NotFound
from sanic.handlers import ErrorHandler
from . import settings
IGNORED_EXCEPTIONS = (NotFound, MethodNotSupported, ClientPayloadError)
bugsnag.configure(
api_key=settings.BUGSNAG_API_KEY,
project_root="/app",
release_stage=settings.RELEASE_STAGE,
)
class BugsnagErrorHandler(ErrorHandler):
def default(self, request, exception):
if self._should_notify(exception):
bugsnag.notify(exception, meta_data={"request": request.url})
return super().default(request, exception)
@staticmethod
def _should_notify(exception) -> bool:
if not settings.BUGSNAG_API_KEY:
return False
if isinstance(exception, IGNORED_EXCEPTIONS):
return False
return True
|
<commit_before>import bugsnag
from aiohttp.client_exceptions import ClientPayloadError
from sanic.exceptions import MethodNotSupported, NotFound
from sanic.handlers import ErrorHandler
from . import settings
IGNORED_EXCEPTIONS = (NotFound, MethodNotSupported, ClientPayloadError)
bugsnag.configure(
api_key=settings.BUGSNAG_API_KEY,
project_root="/app",
release_stage=settings.RELEASE_STAGE,
)
class BugsnagErrorHandler(ErrorHandler):
def default(self, request, exception):
if self._should_notify(exception):
bugsnag.notify(exception, meta_data={"request": request.url})
return super().default(request, exception)
def _should_notify(self, exception) -> bool:
if not settings.BUGSNAG_API_KEY:
return False
if isinstance(exception, IGNORED_EXCEPTIONS):
return False
return True
<commit_msg>Change methods not using its bound instance to staticmethods
Signed-off-by: shubhendra <c6e99d273726c3152867e7854a0feab4e6ae83c6@gmail.com><commit_after>import bugsnag
from aiohttp.client_exceptions import ClientPayloadError
from sanic.exceptions import MethodNotSupported, NotFound
from sanic.handlers import ErrorHandler
from . import settings
IGNORED_EXCEPTIONS = (NotFound, MethodNotSupported, ClientPayloadError)
bugsnag.configure(
api_key=settings.BUGSNAG_API_KEY,
project_root="/app",
release_stage=settings.RELEASE_STAGE,
)
class BugsnagErrorHandler(ErrorHandler):
def default(self, request, exception):
if self._should_notify(exception):
bugsnag.notify(exception, meta_data={"request": request.url})
return super().default(request, exception)
@staticmethod
def _should_notify(exception) -> bool:
if not settings.BUGSNAG_API_KEY:
return False
if isinstance(exception, IGNORED_EXCEPTIONS):
return False
return True
|
5432ba5b64902f3dd2b491504d06af8237926e37
|
aws/awslib.py
|
aws/awslib.py
|
import boto3
def await_volume(client, volumeId, waitingState, finishedState):
while True:
volumes = client.describe_volumes(VolumeIds=[volumeId])
state = volumes['Volumes'][0]['State']
if state != waitingState:
break
if state != finishedState:
print 'Unexpected volume state (expected {}): {}'.format(finishedState, volumes)
sys.exit(1)
def await_instance(client, instanceId, waitingState, finishedState):
while True:
instances = client.describe_instances(InstanceIds=[instanceId])
state = instances['Reservations'][0]['Instances'][0]['State']['Name']
if waitingState and state != waitingState:
break
if state == finishedState:
break
if state != finishedState:
print 'Unexpected instance state (expected {}): {}'.format(finishedState, instances)
sys.exit(1)
|
import boto3
import time
def await_volume(client, volumeId, waitingState, finishedState):
while True:
volumes = client.describe_volumes(VolumeIds=[volumeId])
state = volumes['Volumes'][0]['State']
if state != waitingState:
break
time.sleep(1)
if state != finishedState:
print 'Unexpected volume state (expected {}): {}'.format(finishedState, volumes)
sys.exit(1)
def await_instance(client, instanceId, waitingState, finishedState):
while True:
instances = client.describe_instances(InstanceIds=[instanceId])
state = instances['Reservations'][0]['Instances'][0]['State']['Name']
if waitingState and state != waitingState:
break
if state == finishedState:
break
time.sleep(1)
if state != finishedState:
print 'Unexpected instance state (expected {}): {}'.format(finishedState, instances)
sys.exit(1)
|
Add sleep calls for AWS loops
|
Add sleep calls for AWS loops
|
Python
|
mpl-2.0
|
bill-mccloskey/searchfox,bill-mccloskey/mozsearch,bill-mccloskey/mozsearch,bill-mccloskey/searchfox,bill-mccloskey/mozsearch,bill-mccloskey/mozsearch,bill-mccloskey/searchfox,bill-mccloskey/searchfox,bill-mccloskey/mozsearch,bill-mccloskey/mozsearch,bill-mccloskey/searchfox,bill-mccloskey/searchfox
|
import boto3
def await_volume(client, volumeId, waitingState, finishedState):
while True:
volumes = client.describe_volumes(VolumeIds=[volumeId])
state = volumes['Volumes'][0]['State']
if state != waitingState:
break
if state != finishedState:
print 'Unexpected volume state (expected {}): {}'.format(finishedState, volumes)
sys.exit(1)
def await_instance(client, instanceId, waitingState, finishedState):
while True:
instances = client.describe_instances(InstanceIds=[instanceId])
state = instances['Reservations'][0]['Instances'][0]['State']['Name']
if waitingState and state != waitingState:
break
if state == finishedState:
break
if state != finishedState:
print 'Unexpected instance state (expected {}): {}'.format(finishedState, instances)
sys.exit(1)
Add sleep calls for AWS loops
|
import boto3
import time
def await_volume(client, volumeId, waitingState, finishedState):
while True:
volumes = client.describe_volumes(VolumeIds=[volumeId])
state = volumes['Volumes'][0]['State']
if state != waitingState:
break
time.sleep(1)
if state != finishedState:
print 'Unexpected volume state (expected {}): {}'.format(finishedState, volumes)
sys.exit(1)
def await_instance(client, instanceId, waitingState, finishedState):
while True:
instances = client.describe_instances(InstanceIds=[instanceId])
state = instances['Reservations'][0]['Instances'][0]['State']['Name']
if waitingState and state != waitingState:
break
if state == finishedState:
break
time.sleep(1)
if state != finishedState:
print 'Unexpected instance state (expected {}): {}'.format(finishedState, instances)
sys.exit(1)
|
<commit_before>import boto3
def await_volume(client, volumeId, waitingState, finishedState):
while True:
volumes = client.describe_volumes(VolumeIds=[volumeId])
state = volumes['Volumes'][0]['State']
if state != waitingState:
break
if state != finishedState:
print 'Unexpected volume state (expected {}): {}'.format(finishedState, volumes)
sys.exit(1)
def await_instance(client, instanceId, waitingState, finishedState):
while True:
instances = client.describe_instances(InstanceIds=[instanceId])
state = instances['Reservations'][0]['Instances'][0]['State']['Name']
if waitingState and state != waitingState:
break
if state == finishedState:
break
if state != finishedState:
print 'Unexpected instance state (expected {}): {}'.format(finishedState, instances)
sys.exit(1)
<commit_msg>Add sleep calls for AWS loops<commit_after>
|
import boto3
import time
def await_volume(client, volumeId, waitingState, finishedState):
while True:
volumes = client.describe_volumes(VolumeIds=[volumeId])
state = volumes['Volumes'][0]['State']
if state != waitingState:
break
time.sleep(1)
if state != finishedState:
print 'Unexpected volume state (expected {}): {}'.format(finishedState, volumes)
sys.exit(1)
def await_instance(client, instanceId, waitingState, finishedState):
while True:
instances = client.describe_instances(InstanceIds=[instanceId])
state = instances['Reservations'][0]['Instances'][0]['State']['Name']
if waitingState and state != waitingState:
break
if state == finishedState:
break
time.sleep(1)
if state != finishedState:
print 'Unexpected instance state (expected {}): {}'.format(finishedState, instances)
sys.exit(1)
|
import boto3
def await_volume(client, volumeId, waitingState, finishedState):
while True:
volumes = client.describe_volumes(VolumeIds=[volumeId])
state = volumes['Volumes'][0]['State']
if state != waitingState:
break
if state != finishedState:
print 'Unexpected volume state (expected {}): {}'.format(finishedState, volumes)
sys.exit(1)
def await_instance(client, instanceId, waitingState, finishedState):
while True:
instances = client.describe_instances(InstanceIds=[instanceId])
state = instances['Reservations'][0]['Instances'][0]['State']['Name']
if waitingState and state != waitingState:
break
if state == finishedState:
break
if state != finishedState:
print 'Unexpected instance state (expected {}): {}'.format(finishedState, instances)
sys.exit(1)
Add sleep calls for AWS loopsimport boto3
import time
def await_volume(client, volumeId, waitingState, finishedState):
while True:
volumes = client.describe_volumes(VolumeIds=[volumeId])
state = volumes['Volumes'][0]['State']
if state != waitingState:
break
time.sleep(1)
if state != finishedState:
print 'Unexpected volume state (expected {}): {}'.format(finishedState, volumes)
sys.exit(1)
def await_instance(client, instanceId, waitingState, finishedState):
while True:
instances = client.describe_instances(InstanceIds=[instanceId])
state = instances['Reservations'][0]['Instances'][0]['State']['Name']
if waitingState and state != waitingState:
break
if state == finishedState:
break
time.sleep(1)
if state != finishedState:
print 'Unexpected instance state (expected {}): {}'.format(finishedState, instances)
sys.exit(1)
|
<commit_before>import boto3
def await_volume(client, volumeId, waitingState, finishedState):
while True:
volumes = client.describe_volumes(VolumeIds=[volumeId])
state = volumes['Volumes'][0]['State']
if state != waitingState:
break
if state != finishedState:
print 'Unexpected volume state (expected {}): {}'.format(finishedState, volumes)
sys.exit(1)
def await_instance(client, instanceId, waitingState, finishedState):
while True:
instances = client.describe_instances(InstanceIds=[instanceId])
state = instances['Reservations'][0]['Instances'][0]['State']['Name']
if waitingState and state != waitingState:
break
if state == finishedState:
break
if state != finishedState:
print 'Unexpected instance state (expected {}): {}'.format(finishedState, instances)
sys.exit(1)
<commit_msg>Add sleep calls for AWS loops<commit_after>import boto3
import time
def await_volume(client, volumeId, waitingState, finishedState):
while True:
volumes = client.describe_volumes(VolumeIds=[volumeId])
state = volumes['Volumes'][0]['State']
if state != waitingState:
break
time.sleep(1)
if state != finishedState:
print 'Unexpected volume state (expected {}): {}'.format(finishedState, volumes)
sys.exit(1)
def await_instance(client, instanceId, waitingState, finishedState):
while True:
instances = client.describe_instances(InstanceIds=[instanceId])
state = instances['Reservations'][0]['Instances'][0]['State']['Name']
if waitingState and state != waitingState:
break
if state == finishedState:
break
time.sleep(1)
if state != finishedState:
print 'Unexpected instance state (expected {}): {}'.format(finishedState, instances)
sys.exit(1)
|
2bcf457d03610b5b4ade891446b7645721b74480
|
sum.py
|
sum.py
|
import sublime, sublime_plugin
class SumCommand(sublime_plugin.TextCommand):
def run(self, edit):
sum_view = self.view.window().new_file()
sum_view.set_name('Sum')
file_text = self.view.substr(sublime.Region(0, self.view.size()))
numbers = []
for s in file_text.split():
try:
numbers.append(int(s))
except ValueError:
try:
numbers.append(float(s))
except ValueError:
pass
result = sum(numbers)
sum_view.insert(edit, 0, str(result))
sum_view.set_read_only(True)
sum_view.set_scratch(True)
|
import sublime, sublime_plugin
class SumCommand(sublime_plugin.TextCommand):
def run(self, edit):
sum_view = self.view.window().new_file()
sum_view.set_name('Sum')
file_text = self.view.substr(sublime.Region(0, self.view.size()))
numbers = [to_number(s) for s in file_text.split() if is_number(s)]
result = sum(numbers)
sum_view.insert(edit, 0, str(result))
sum_view.set_read_only(True)
sum_view.set_scratch(True)
def is_int(s):
"""Return boolean indicating whether a string can be parsed to an int."""
try:
int(s)
return True
except ValueError:
return False
def is_float(s):
"""Return boolean indicating whether a string can be parsed to an float."""
try:
float(s)
return True
except ValueError:
return False
def is_number(s):
"""Return boolean indicating whether a string can be parsed to an int or float."""
return is_int(s) or is_float(s)
def to_number(s):
"""
Parse and return number from string.
Return float only if number is not an int. Assume number can be parsed from string.
"""
try:
return int(s)
except ValueError:
return float(s)
|
Refactor using functions and a list comprehension
|
Refactor using functions and a list comprehension
|
Python
|
mit
|
jbrudvik/sublime-sum,jbrudvik/sublime-sum
|
import sublime, sublime_plugin
class SumCommand(sublime_plugin.TextCommand):
def run(self, edit):
sum_view = self.view.window().new_file()
sum_view.set_name('Sum')
file_text = self.view.substr(sublime.Region(0, self.view.size()))
numbers = []
for s in file_text.split():
try:
numbers.append(int(s))
except ValueError:
try:
numbers.append(float(s))
except ValueError:
pass
result = sum(numbers)
sum_view.insert(edit, 0, str(result))
sum_view.set_read_only(True)
sum_view.set_scratch(True)
Refactor using functions and a list comprehension
|
import sublime, sublime_plugin
class SumCommand(sublime_plugin.TextCommand):
def run(self, edit):
sum_view = self.view.window().new_file()
sum_view.set_name('Sum')
file_text = self.view.substr(sublime.Region(0, self.view.size()))
numbers = [to_number(s) for s in file_text.split() if is_number(s)]
result = sum(numbers)
sum_view.insert(edit, 0, str(result))
sum_view.set_read_only(True)
sum_view.set_scratch(True)
def is_int(s):
"""Return boolean indicating whether a string can be parsed to an int."""
try:
int(s)
return True
except ValueError:
return False
def is_float(s):
"""Return boolean indicating whether a string can be parsed to an float."""
try:
float(s)
return True
except ValueError:
return False
def is_number(s):
"""Return boolean indicating whether a string can be parsed to an int or float."""
return is_int(s) or is_float(s)
def to_number(s):
"""
Parse and return number from string.
Return float only if number is not an int. Assume number can be parsed from string.
"""
try:
return int(s)
except ValueError:
return float(s)
|
<commit_before>import sublime, sublime_plugin
class SumCommand(sublime_plugin.TextCommand):
def run(self, edit):
sum_view = self.view.window().new_file()
sum_view.set_name('Sum')
file_text = self.view.substr(sublime.Region(0, self.view.size()))
numbers = []
for s in file_text.split():
try:
numbers.append(int(s))
except ValueError:
try:
numbers.append(float(s))
except ValueError:
pass
result = sum(numbers)
sum_view.insert(edit, 0, str(result))
sum_view.set_read_only(True)
sum_view.set_scratch(True)
<commit_msg>Refactor using functions and a list comprehension<commit_after>
|
import sublime, sublime_plugin
class SumCommand(sublime_plugin.TextCommand):
def run(self, edit):
sum_view = self.view.window().new_file()
sum_view.set_name('Sum')
file_text = self.view.substr(sublime.Region(0, self.view.size()))
numbers = [to_number(s) for s in file_text.split() if is_number(s)]
result = sum(numbers)
sum_view.insert(edit, 0, str(result))
sum_view.set_read_only(True)
sum_view.set_scratch(True)
def is_int(s):
"""Return boolean indicating whether a string can be parsed to an int."""
try:
int(s)
return True
except ValueError:
return False
def is_float(s):
"""Return boolean indicating whether a string can be parsed to an float."""
try:
float(s)
return True
except ValueError:
return False
def is_number(s):
"""Return boolean indicating whether a string can be parsed to an int or float."""
return is_int(s) or is_float(s)
def to_number(s):
"""
Parse and return number from string.
Return float only if number is not an int. Assume number can be parsed from string.
"""
try:
return int(s)
except ValueError:
return float(s)
|
import sublime, sublime_plugin
class SumCommand(sublime_plugin.TextCommand):
def run(self, edit):
sum_view = self.view.window().new_file()
sum_view.set_name('Sum')
file_text = self.view.substr(sublime.Region(0, self.view.size()))
numbers = []
for s in file_text.split():
try:
numbers.append(int(s))
except ValueError:
try:
numbers.append(float(s))
except ValueError:
pass
result = sum(numbers)
sum_view.insert(edit, 0, str(result))
sum_view.set_read_only(True)
sum_view.set_scratch(True)
Refactor using functions and a list comprehensionimport sublime, sublime_plugin
class SumCommand(sublime_plugin.TextCommand):
def run(self, edit):
sum_view = self.view.window().new_file()
sum_view.set_name('Sum')
file_text = self.view.substr(sublime.Region(0, self.view.size()))
numbers = [to_number(s) for s in file_text.split() if is_number(s)]
result = sum(numbers)
sum_view.insert(edit, 0, str(result))
sum_view.set_read_only(True)
sum_view.set_scratch(True)
def is_int(s):
"""Return boolean indicating whether a string can be parsed to an int."""
try:
int(s)
return True
except ValueError:
return False
def is_float(s):
"""Return boolean indicating whether a string can be parsed to an float."""
try:
float(s)
return True
except ValueError:
return False
def is_number(s):
"""Return boolean indicating whether a string can be parsed to an int or float."""
return is_int(s) or is_float(s)
def to_number(s):
"""
Parse and return number from string.
Return float only if number is not an int. Assume number can be parsed from string.
"""
try:
return int(s)
except ValueError:
return float(s)
|
<commit_before>import sublime, sublime_plugin
class SumCommand(sublime_plugin.TextCommand):
def run(self, edit):
sum_view = self.view.window().new_file()
sum_view.set_name('Sum')
file_text = self.view.substr(sublime.Region(0, self.view.size()))
numbers = []
for s in file_text.split():
try:
numbers.append(int(s))
except ValueError:
try:
numbers.append(float(s))
except ValueError:
pass
result = sum(numbers)
sum_view.insert(edit, 0, str(result))
sum_view.set_read_only(True)
sum_view.set_scratch(True)
<commit_msg>Refactor using functions and a list comprehension<commit_after>import sublime, sublime_plugin
class SumCommand(sublime_plugin.TextCommand):
def run(self, edit):
sum_view = self.view.window().new_file()
sum_view.set_name('Sum')
file_text = self.view.substr(sublime.Region(0, self.view.size()))
numbers = [to_number(s) for s in file_text.split() if is_number(s)]
result = sum(numbers)
sum_view.insert(edit, 0, str(result))
sum_view.set_read_only(True)
sum_view.set_scratch(True)
def is_int(s):
"""Return boolean indicating whether a string can be parsed to an int."""
try:
int(s)
return True
except ValueError:
return False
def is_float(s):
"""Return boolean indicating whether a string can be parsed to an float."""
try:
float(s)
return True
except ValueError:
return False
def is_number(s):
"""Return boolean indicating whether a string can be parsed to an int or float."""
return is_int(s) or is_float(s)
def to_number(s):
"""
Parse and return number from string.
Return float only if number is not an int. Assume number can be parsed from string.
"""
try:
return int(s)
except ValueError:
return float(s)
|
49af73f2903580d55093e8e001585010fb3a3c46
|
locarise_drf_oauth2_support/users/factories.py
|
locarise_drf_oauth2_support/users/factories.py
|
from datetime import timedelta
from django.utils import timezone
from locarise_drf_oauth2_support.users.models import User
try:
import factory
class UserF(factory.DjangoModelFactory):
first_name = factory.Sequence(lambda n: "first_name%s" % n)
last_name = factory.Sequence(lambda n: "last_name%s" % n)
email = factory.Sequence(lambda n: "email%s@example.com" % n)
is_staff = False
is_active = True
is_superuser = False
last_login = timezone.now() - timedelta(days=2)
password = factory.PostGenerationMethodCall('set_password', 'pass')
class Meta:
model = User
except ImportError: # pragma: no cover
pass
|
from datetime import timedelta
from django.utils import timezone
from locarise_drf_oauth2_support.users.models import User
try:
import factory
class UserF(factory.DjangoModelFactory):
first_name = factory.Sequence(lambda n: "first_name%s" % n)
last_name = factory.Sequence(lambda n: "last_name%s" % n)
email = factory.Sequence(lambda n: "email%s@example.com" % n)
is_staff = False
is_active = True
is_superuser = False
last_login = timezone.now() - timedelta(days=2)
password = factory.PostGenerationMethodCall('set_password', 'pass')
organization_set = [{
"uid": "6tbgzDKyZYLCMzDarN7ga8",
"name": "Organization Demo",
"role": "organization-manager",
"is_active": True
}]
class Meta:
model = User
except ImportError: # pragma: no cover
pass
|
Add organization_set field in UserF
|
Add organization_set field in UserF
|
Python
|
mit
|
locarise/locarise-drf-oauth2-support,locarise/locarise-drf-oauth2-support
|
from datetime import timedelta
from django.utils import timezone
from locarise_drf_oauth2_support.users.models import User
try:
import factory
class UserF(factory.DjangoModelFactory):
first_name = factory.Sequence(lambda n: "first_name%s" % n)
last_name = factory.Sequence(lambda n: "last_name%s" % n)
email = factory.Sequence(lambda n: "email%s@example.com" % n)
is_staff = False
is_active = True
is_superuser = False
last_login = timezone.now() - timedelta(days=2)
password = factory.PostGenerationMethodCall('set_password', 'pass')
class Meta:
model = User
except ImportError: # pragma: no cover
pass
Add organization_set field in UserF
|
from datetime import timedelta
from django.utils import timezone
from locarise_drf_oauth2_support.users.models import User
try:
import factory
class UserF(factory.DjangoModelFactory):
first_name = factory.Sequence(lambda n: "first_name%s" % n)
last_name = factory.Sequence(lambda n: "last_name%s" % n)
email = factory.Sequence(lambda n: "email%s@example.com" % n)
is_staff = False
is_active = True
is_superuser = False
last_login = timezone.now() - timedelta(days=2)
password = factory.PostGenerationMethodCall('set_password', 'pass')
organization_set = [{
"uid": "6tbgzDKyZYLCMzDarN7ga8",
"name": "Organization Demo",
"role": "organization-manager",
"is_active": True
}]
class Meta:
model = User
except ImportError: # pragma: no cover
pass
|
<commit_before>from datetime import timedelta
from django.utils import timezone
from locarise_drf_oauth2_support.users.models import User
try:
import factory
class UserF(factory.DjangoModelFactory):
first_name = factory.Sequence(lambda n: "first_name%s" % n)
last_name = factory.Sequence(lambda n: "last_name%s" % n)
email = factory.Sequence(lambda n: "email%s@example.com" % n)
is_staff = False
is_active = True
is_superuser = False
last_login = timezone.now() - timedelta(days=2)
password = factory.PostGenerationMethodCall('set_password', 'pass')
class Meta:
model = User
except ImportError: # pragma: no cover
pass
<commit_msg>Add organization_set field in UserF<commit_after>
|
from datetime import timedelta
from django.utils import timezone
from locarise_drf_oauth2_support.users.models import User
try:
import factory
class UserF(factory.DjangoModelFactory):
first_name = factory.Sequence(lambda n: "first_name%s" % n)
last_name = factory.Sequence(lambda n: "last_name%s" % n)
email = factory.Sequence(lambda n: "email%s@example.com" % n)
is_staff = False
is_active = True
is_superuser = False
last_login = timezone.now() - timedelta(days=2)
password = factory.PostGenerationMethodCall('set_password', 'pass')
organization_set = [{
"uid": "6tbgzDKyZYLCMzDarN7ga8",
"name": "Organization Demo",
"role": "organization-manager",
"is_active": True
}]
class Meta:
model = User
except ImportError: # pragma: no cover
pass
|
from datetime import timedelta
from django.utils import timezone
from locarise_drf_oauth2_support.users.models import User
try:
import factory
class UserF(factory.DjangoModelFactory):
first_name = factory.Sequence(lambda n: "first_name%s" % n)
last_name = factory.Sequence(lambda n: "last_name%s" % n)
email = factory.Sequence(lambda n: "email%s@example.com" % n)
is_staff = False
is_active = True
is_superuser = False
last_login = timezone.now() - timedelta(days=2)
password = factory.PostGenerationMethodCall('set_password', 'pass')
class Meta:
model = User
except ImportError: # pragma: no cover
pass
Add organization_set field in UserFfrom datetime import timedelta
from django.utils import timezone
from locarise_drf_oauth2_support.users.models import User
try:
import factory
class UserF(factory.DjangoModelFactory):
first_name = factory.Sequence(lambda n: "first_name%s" % n)
last_name = factory.Sequence(lambda n: "last_name%s" % n)
email = factory.Sequence(lambda n: "email%s@example.com" % n)
is_staff = False
is_active = True
is_superuser = False
last_login = timezone.now() - timedelta(days=2)
password = factory.PostGenerationMethodCall('set_password', 'pass')
organization_set = [{
"uid": "6tbgzDKyZYLCMzDarN7ga8",
"name": "Organization Demo",
"role": "organization-manager",
"is_active": True
}]
class Meta:
model = User
except ImportError: # pragma: no cover
pass
|
<commit_before>from datetime import timedelta
from django.utils import timezone
from locarise_drf_oauth2_support.users.models import User
try:
import factory
class UserF(factory.DjangoModelFactory):
first_name = factory.Sequence(lambda n: "first_name%s" % n)
last_name = factory.Sequence(lambda n: "last_name%s" % n)
email = factory.Sequence(lambda n: "email%s@example.com" % n)
is_staff = False
is_active = True
is_superuser = False
last_login = timezone.now() - timedelta(days=2)
password = factory.PostGenerationMethodCall('set_password', 'pass')
class Meta:
model = User
except ImportError: # pragma: no cover
pass
<commit_msg>Add organization_set field in UserF<commit_after>from datetime import timedelta
from django.utils import timezone
from locarise_drf_oauth2_support.users.models import User
try:
import factory
class UserF(factory.DjangoModelFactory):
first_name = factory.Sequence(lambda n: "first_name%s" % n)
last_name = factory.Sequence(lambda n: "last_name%s" % n)
email = factory.Sequence(lambda n: "email%s@example.com" % n)
is_staff = False
is_active = True
is_superuser = False
last_login = timezone.now() - timedelta(days=2)
password = factory.PostGenerationMethodCall('set_password', 'pass')
organization_set = [{
"uid": "6tbgzDKyZYLCMzDarN7ga8",
"name": "Organization Demo",
"role": "organization-manager",
"is_active": True
}]
class Meta:
model = User
except ImportError: # pragma: no cover
pass
|
18c4ba9325fe7a460c9c92ffd7bae3ce5d257332
|
tests/test_database.py
|
tests/test_database.py
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
from io import StringIO
import pytest
from django.core.management import call_command
def test_for_missing_migrations():
output = StringIO()
try:
call_command(
'makemigrations', interactive=False, dry_run=True, exit_code=True,
stdout=output)
except SystemExit as exc:
# The exit code will be 1 when there are no missing migrations
assert exc.code == 1
else:
pytest.fail("There are missing migrations:\n %s" % output.getvalue())
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
from io import StringIO
import pytest
from django.core.management import call_command
def test_for_missing_migrations():
output = StringIO()
try:
call_command(
'makemigrations',
interactive=False,
check=True,
stdout=output,
)
except SystemExit as exc:
# The exit code will be 0 when there are no missing migrations
assert exc.code == 1
pytest.fail("There are missing migrations:\n %s" % output.getvalue())
|
Use new command line option for checking if all migrations have been applied.
|
Use new command line option for checking if all migrations have been applied.
|
Python
|
mpl-2.0
|
mozilla/telemetry-analysis-service,mozilla/telemetry-analysis-service,mozilla/telemetry-analysis-service,mozilla/telemetry-analysis-service
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
from io import StringIO
import pytest
from django.core.management import call_command
def test_for_missing_migrations():
output = StringIO()
try:
call_command(
'makemigrations', interactive=False, dry_run=True, exit_code=True,
stdout=output)
except SystemExit as exc:
# The exit code will be 1 when there are no missing migrations
assert exc.code == 1
else:
pytest.fail("There are missing migrations:\n %s" % output.getvalue())
Use new command line option for checking if all migrations have been applied.
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
from io import StringIO
import pytest
from django.core.management import call_command
def test_for_missing_migrations():
output = StringIO()
try:
call_command(
'makemigrations',
interactive=False,
check=True,
stdout=output,
)
except SystemExit as exc:
# The exit code will be 0 when there are no missing migrations
assert exc.code == 1
pytest.fail("There are missing migrations:\n %s" % output.getvalue())
|
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
from io import StringIO
import pytest
from django.core.management import call_command
def test_for_missing_migrations():
output = StringIO()
try:
call_command(
'makemigrations', interactive=False, dry_run=True, exit_code=True,
stdout=output)
except SystemExit as exc:
# The exit code will be 1 when there are no missing migrations
assert exc.code == 1
else:
pytest.fail("There are missing migrations:\n %s" % output.getvalue())
<commit_msg>Use new command line option for checking if all migrations have been applied.<commit_after>
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
from io import StringIO
import pytest
from django.core.management import call_command
def test_for_missing_migrations():
output = StringIO()
try:
call_command(
'makemigrations',
interactive=False,
check=True,
stdout=output,
)
except SystemExit as exc:
# The exit code will be 0 when there are no missing migrations
assert exc.code == 1
pytest.fail("There are missing migrations:\n %s" % output.getvalue())
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
from io import StringIO
import pytest
from django.core.management import call_command
def test_for_missing_migrations():
output = StringIO()
try:
call_command(
'makemigrations', interactive=False, dry_run=True, exit_code=True,
stdout=output)
except SystemExit as exc:
# The exit code will be 1 when there are no missing migrations
assert exc.code == 1
else:
pytest.fail("There are missing migrations:\n %s" % output.getvalue())
Use new command line option for checking if all migrations have been applied.# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
from io import StringIO
import pytest
from django.core.management import call_command
def test_for_missing_migrations():
output = StringIO()
try:
call_command(
'makemigrations',
interactive=False,
check=True,
stdout=output,
)
except SystemExit as exc:
# The exit code will be 0 when there are no missing migrations
assert exc.code == 1
pytest.fail("There are missing migrations:\n %s" % output.getvalue())
|
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
from io import StringIO
import pytest
from django.core.management import call_command
def test_for_missing_migrations():
output = StringIO()
try:
call_command(
'makemigrations', interactive=False, dry_run=True, exit_code=True,
stdout=output)
except SystemExit as exc:
# The exit code will be 1 when there are no missing migrations
assert exc.code == 1
else:
pytest.fail("There are missing migrations:\n %s" % output.getvalue())
<commit_msg>Use new command line option for checking if all migrations have been applied.<commit_after># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
from io import StringIO
import pytest
from django.core.management import call_command
def test_for_missing_migrations():
output = StringIO()
try:
call_command(
'makemigrations',
interactive=False,
check=True,
stdout=output,
)
except SystemExit as exc:
# The exit code will be 0 when there are no missing migrations
assert exc.code == 1
pytest.fail("There are missing migrations:\n %s" % output.getvalue())
|
ee7b5353c039d6e1d2aeabcb084aee79e07b71f8
|
emoji/templatetags/emoji_tags.py
|
emoji/templatetags/emoji_tags.py
|
from django import template
from django.core.urlresolvers import reverse, NoReverseMatch
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
from emoji import Emoji
register = template.Library()
@register.filter(name='emoji_replace', is_safe=True)
@stringfilter
def emoji_replace(value):
return mark_safe(Emoji.replace(value))
@register.filter(name='emoji_replace_unicode', is_safe=True)
@stringfilter
def emoji_replace_unicode(value):
return mark_safe(Emoji.replace_unicode(value))
@register.filter(name='emoji_replace_html_entities', is_safe=True)
@stringfilter
def emoji_replace_html_entities(value):
return mark_safe(Emoji.replace_html_entities(value))
@register.simple_tag
def emoji_load():
try:
url = reverse('emoji:list.json')
except NoReverseMatch:
return ''
else:
return "Emoji.setDataUrl('{0}').load();".format(url)
|
from django import template
from django.core.urlresolvers import reverse, NoReverseMatch
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe, SafeData
from django.utils.html import escape
from emoji import Emoji
register = template.Library()
@register.filter(name='emoji_replace', is_safe=True, needs_autoescape=True)
@stringfilter
def emoji_replace(value, autoescape=None):
autoescape = autoescape and not isinstance(value, SafeData)
if autoescape:
value = escape(value)
return mark_safe(Emoji.replace(value))
@register.filter(name='emoji_replace_unicode', is_safe=True, needs_autoescape=True)
@stringfilter
def emoji_replace_unicode(value, autoescape=None):
autoescape = autoescape and not isinstance(value, SafeData)
if autoescape:
value = escape(value)
return mark_safe(Emoji.replace_unicode(value))
@register.filter(name='emoji_replace_html_entities',
is_safe=True, needs_autoescape=True)
@stringfilter
def emoji_replace_html_entities(value, autoescape=None):
autoescape = autoescape and not isinstance(value, SafeData)
if autoescape:
value = escape(value)
return mark_safe(Emoji.replace_html_entities(value))
@register.simple_tag
def emoji_load():
try:
url = reverse('emoji:list.json')
except NoReverseMatch:
return ''
else:
return "Emoji.setDataUrl('{0}').load();".format(url)
|
Update the filters to escape the characters before replacing them.
|
Update the filters to escape the characters before replacing them.
This prevents the filters from allowing XSS attacks. This follows
the pattern used by django's linebreaksbr filter.
|
Python
|
mit
|
gaqzi/django-emoji,gaqzi/django-emoji,gaqzi/django-emoji
|
from django import template
from django.core.urlresolvers import reverse, NoReverseMatch
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
from emoji import Emoji
register = template.Library()
@register.filter(name='emoji_replace', is_safe=True)
@stringfilter
def emoji_replace(value):
return mark_safe(Emoji.replace(value))
@register.filter(name='emoji_replace_unicode', is_safe=True)
@stringfilter
def emoji_replace_unicode(value):
return mark_safe(Emoji.replace_unicode(value))
@register.filter(name='emoji_replace_html_entities', is_safe=True)
@stringfilter
def emoji_replace_html_entities(value):
return mark_safe(Emoji.replace_html_entities(value))
@register.simple_tag
def emoji_load():
try:
url = reverse('emoji:list.json')
except NoReverseMatch:
return ''
else:
return "Emoji.setDataUrl('{0}').load();".format(url)
Update the filters to escape the characters before replacing them.
This prevents the filters from allowing XSS attacks. This follows
the pattern used by django's linebreaksbr filter.
|
from django import template
from django.core.urlresolvers import reverse, NoReverseMatch
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe, SafeData
from django.utils.html import escape
from emoji import Emoji
register = template.Library()
@register.filter(name='emoji_replace', is_safe=True, needs_autoescape=True)
@stringfilter
def emoji_replace(value, autoescape=None):
autoescape = autoescape and not isinstance(value, SafeData)
if autoescape:
value = escape(value)
return mark_safe(Emoji.replace(value))
@register.filter(name='emoji_replace_unicode', is_safe=True, needs_autoescape=True)
@stringfilter
def emoji_replace_unicode(value, autoescape=None):
autoescape = autoescape and not isinstance(value, SafeData)
if autoescape:
value = escape(value)
return mark_safe(Emoji.replace_unicode(value))
@register.filter(name='emoji_replace_html_entities',
is_safe=True, needs_autoescape=True)
@stringfilter
def emoji_replace_html_entities(value, autoescape=None):
autoescape = autoescape and not isinstance(value, SafeData)
if autoescape:
value = escape(value)
return mark_safe(Emoji.replace_html_entities(value))
@register.simple_tag
def emoji_load():
try:
url = reverse('emoji:list.json')
except NoReverseMatch:
return ''
else:
return "Emoji.setDataUrl('{0}').load();".format(url)
|
<commit_before>from django import template
from django.core.urlresolvers import reverse, NoReverseMatch
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
from emoji import Emoji
register = template.Library()
@register.filter(name='emoji_replace', is_safe=True)
@stringfilter
def emoji_replace(value):
return mark_safe(Emoji.replace(value))
@register.filter(name='emoji_replace_unicode', is_safe=True)
@stringfilter
def emoji_replace_unicode(value):
return mark_safe(Emoji.replace_unicode(value))
@register.filter(name='emoji_replace_html_entities', is_safe=True)
@stringfilter
def emoji_replace_html_entities(value):
return mark_safe(Emoji.replace_html_entities(value))
@register.simple_tag
def emoji_load():
try:
url = reverse('emoji:list.json')
except NoReverseMatch:
return ''
else:
return "Emoji.setDataUrl('{0}').load();".format(url)
<commit_msg>Update the filters to escape the characters before replacing them.
This prevents the filters from allowing XSS attacks. This follows
the pattern used by django's linebreaksbr filter.<commit_after>
|
from django import template
from django.core.urlresolvers import reverse, NoReverseMatch
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe, SafeData
from django.utils.html import escape
from emoji import Emoji
register = template.Library()
@register.filter(name='emoji_replace', is_safe=True, needs_autoescape=True)
@stringfilter
def emoji_replace(value, autoescape=None):
autoescape = autoescape and not isinstance(value, SafeData)
if autoescape:
value = escape(value)
return mark_safe(Emoji.replace(value))
@register.filter(name='emoji_replace_unicode', is_safe=True, needs_autoescape=True)
@stringfilter
def emoji_replace_unicode(value, autoescape=None):
autoescape = autoescape and not isinstance(value, SafeData)
if autoescape:
value = escape(value)
return mark_safe(Emoji.replace_unicode(value))
@register.filter(name='emoji_replace_html_entities',
is_safe=True, needs_autoescape=True)
@stringfilter
def emoji_replace_html_entities(value, autoescape=None):
autoescape = autoescape and not isinstance(value, SafeData)
if autoescape:
value = escape(value)
return mark_safe(Emoji.replace_html_entities(value))
@register.simple_tag
def emoji_load():
try:
url = reverse('emoji:list.json')
except NoReverseMatch:
return ''
else:
return "Emoji.setDataUrl('{0}').load();".format(url)
|
from django import template
from django.core.urlresolvers import reverse, NoReverseMatch
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
from emoji import Emoji
register = template.Library()
@register.filter(name='emoji_replace', is_safe=True)
@stringfilter
def emoji_replace(value):
return mark_safe(Emoji.replace(value))
@register.filter(name='emoji_replace_unicode', is_safe=True)
@stringfilter
def emoji_replace_unicode(value):
return mark_safe(Emoji.replace_unicode(value))
@register.filter(name='emoji_replace_html_entities', is_safe=True)
@stringfilter
def emoji_replace_html_entities(value):
return mark_safe(Emoji.replace_html_entities(value))
@register.simple_tag
def emoji_load():
try:
url = reverse('emoji:list.json')
except NoReverseMatch:
return ''
else:
return "Emoji.setDataUrl('{0}').load();".format(url)
Update the filters to escape the characters before replacing them.
This prevents the filters from allowing XSS attacks. This follows
the pattern used by django's linebreaksbr filter.from django import template
from django.core.urlresolvers import reverse, NoReverseMatch
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe, SafeData
from django.utils.html import escape
from emoji import Emoji
register = template.Library()
@register.filter(name='emoji_replace', is_safe=True, needs_autoescape=True)
@stringfilter
def emoji_replace(value, autoescape=None):
autoescape = autoescape and not isinstance(value, SafeData)
if autoescape:
value = escape(value)
return mark_safe(Emoji.replace(value))
@register.filter(name='emoji_replace_unicode', is_safe=True, needs_autoescape=True)
@stringfilter
def emoji_replace_unicode(value, autoescape=None):
autoescape = autoescape and not isinstance(value, SafeData)
if autoescape:
value = escape(value)
return mark_safe(Emoji.replace_unicode(value))
@register.filter(name='emoji_replace_html_entities',
is_safe=True, needs_autoescape=True)
@stringfilter
def emoji_replace_html_entities(value, autoescape=None):
autoescape = autoescape and not isinstance(value, SafeData)
if autoescape:
value = escape(value)
return mark_safe(Emoji.replace_html_entities(value))
@register.simple_tag
def emoji_load():
try:
url = reverse('emoji:list.json')
except NoReverseMatch:
return ''
else:
return "Emoji.setDataUrl('{0}').load();".format(url)
|
<commit_before>from django import template
from django.core.urlresolvers import reverse, NoReverseMatch
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
from emoji import Emoji
register = template.Library()
@register.filter(name='emoji_replace', is_safe=True)
@stringfilter
def emoji_replace(value):
return mark_safe(Emoji.replace(value))
@register.filter(name='emoji_replace_unicode', is_safe=True)
@stringfilter
def emoji_replace_unicode(value):
return mark_safe(Emoji.replace_unicode(value))
@register.filter(name='emoji_replace_html_entities', is_safe=True)
@stringfilter
def emoji_replace_html_entities(value):
return mark_safe(Emoji.replace_html_entities(value))
@register.simple_tag
def emoji_load():
try:
url = reverse('emoji:list.json')
except NoReverseMatch:
return ''
else:
return "Emoji.setDataUrl('{0}').load();".format(url)
<commit_msg>Update the filters to escape the characters before replacing them.
This prevents the filters from allowing XSS attacks. This follows
the pattern used by django's linebreaksbr filter.<commit_after>from django import template
from django.core.urlresolvers import reverse, NoReverseMatch
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe, SafeData
from django.utils.html import escape
from emoji import Emoji
register = template.Library()
@register.filter(name='emoji_replace', is_safe=True, needs_autoescape=True)
@stringfilter
def emoji_replace(value, autoescape=None):
autoescape = autoescape and not isinstance(value, SafeData)
if autoescape:
value = escape(value)
return mark_safe(Emoji.replace(value))
@register.filter(name='emoji_replace_unicode', is_safe=True, needs_autoescape=True)
@stringfilter
def emoji_replace_unicode(value, autoescape=None):
autoescape = autoescape and not isinstance(value, SafeData)
if autoescape:
value = escape(value)
return mark_safe(Emoji.replace_unicode(value))
@register.filter(name='emoji_replace_html_entities',
is_safe=True, needs_autoescape=True)
@stringfilter
def emoji_replace_html_entities(value, autoescape=None):
autoescape = autoescape and not isinstance(value, SafeData)
if autoescape:
value = escape(value)
return mark_safe(Emoji.replace_html_entities(value))
@register.simple_tag
def emoji_load():
try:
url = reverse('emoji:list.json')
except NoReverseMatch:
return ''
else:
return "Emoji.setDataUrl('{0}').load();".format(url)
|
649c87174711de93261cd7703e67032281e2e8ee
|
salt/modules/scsi.py
|
salt/modules/scsi.py
|
# -*- coding: utf-8 -*-
"""
SCSI administration module
"""
import logging
log = logging.getLogger(__name__)
def lsscsi():
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.lsscsi
'''
cmd = 'lsscsi'
return __salt__['cmd.run'](cmd).splitlines()
def rescan_all(host):
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.rescan_all(0)
'''
if os.path.isdir("/sys/class/scsi_host/host{0}").format(host):
cmd = 'echo "- - -" > /sys/class/scsi_host/host{0}/scan'.format(host)
else:
return 'Host {0} does not exist'.format(host)
return __salt__['cmd.run'](cmd).splitlines()
|
# -*- coding: utf-8 -*-
'''
SCSI administration module
'''
import os.path
import logging
log = logging.getLogger(__name__)
def lsscsi():
'''
List SCSI devices
CLI Example:
.. code-block:: bash
salt '*' scsi.lsscsi
'''
cmd = 'lsscsi'
return __salt__['cmd.run'](cmd).splitlines()
def rescan_all(host):
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.rescan_all(0)
'''
if os.path.isdir('/sys/class/scsi_host/host{0}').format(host):
cmd = 'echo "- - -" > /sys/class/scsi_host/host{0}/scan'.format(host)
else:
return 'Host {0} does not exist'.format(host)
return __salt__['cmd.run'](cmd).splitlines()
|
Update formatting to Salt guidelines
|
Update formatting to Salt guidelines
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
# -*- coding: utf-8 -*-
"""
SCSI administration module
"""
import logging
log = logging.getLogger(__name__)
def lsscsi():
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.lsscsi
'''
cmd = 'lsscsi'
return __salt__['cmd.run'](cmd).splitlines()
def rescan_all(host):
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.rescan_all(0)
'''
if os.path.isdir("/sys/class/scsi_host/host{0}").format(host):
cmd = 'echo "- - -" > /sys/class/scsi_host/host{0}/scan'.format(host)
else:
return 'Host {0} does not exist'.format(host)
return __salt__['cmd.run'](cmd).splitlines()
Update formatting to Salt guidelines
|
# -*- coding: utf-8 -*-
'''
SCSI administration module
'''
import os.path
import logging
log = logging.getLogger(__name__)
def lsscsi():
'''
List SCSI devices
CLI Example:
.. code-block:: bash
salt '*' scsi.lsscsi
'''
cmd = 'lsscsi'
return __salt__['cmd.run'](cmd).splitlines()
def rescan_all(host):
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.rescan_all(0)
'''
if os.path.isdir('/sys/class/scsi_host/host{0}').format(host):
cmd = 'echo "- - -" > /sys/class/scsi_host/host{0}/scan'.format(host)
else:
return 'Host {0} does not exist'.format(host)
return __salt__['cmd.run'](cmd).splitlines()
|
<commit_before># -*- coding: utf-8 -*-
"""
SCSI administration module
"""
import logging
log = logging.getLogger(__name__)
def lsscsi():
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.lsscsi
'''
cmd = 'lsscsi'
return __salt__['cmd.run'](cmd).splitlines()
def rescan_all(host):
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.rescan_all(0)
'''
if os.path.isdir("/sys/class/scsi_host/host{0}").format(host):
cmd = 'echo "- - -" > /sys/class/scsi_host/host{0}/scan'.format(host)
else:
return 'Host {0} does not exist'.format(host)
return __salt__['cmd.run'](cmd).splitlines()
<commit_msg>Update formatting to Salt guidelines<commit_after>
|
# -*- coding: utf-8 -*-
'''
SCSI administration module
'''
import os.path
import logging
log = logging.getLogger(__name__)
def lsscsi():
'''
List SCSI devices
CLI Example:
.. code-block:: bash
salt '*' scsi.lsscsi
'''
cmd = 'lsscsi'
return __salt__['cmd.run'](cmd).splitlines()
def rescan_all(host):
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.rescan_all(0)
'''
if os.path.isdir('/sys/class/scsi_host/host{0}').format(host):
cmd = 'echo "- - -" > /sys/class/scsi_host/host{0}/scan'.format(host)
else:
return 'Host {0} does not exist'.format(host)
return __salt__['cmd.run'](cmd).splitlines()
|
# -*- coding: utf-8 -*-
"""
SCSI administration module
"""
import logging
log = logging.getLogger(__name__)
def lsscsi():
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.lsscsi
'''
cmd = 'lsscsi'
return __salt__['cmd.run'](cmd).splitlines()
def rescan_all(host):
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.rescan_all(0)
'''
if os.path.isdir("/sys/class/scsi_host/host{0}").format(host):
cmd = 'echo "- - -" > /sys/class/scsi_host/host{0}/scan'.format(host)
else:
return 'Host {0} does not exist'.format(host)
return __salt__['cmd.run'](cmd).splitlines()
Update formatting to Salt guidelines# -*- coding: utf-8 -*-
'''
SCSI administration module
'''
import os.path
import logging
log = logging.getLogger(__name__)
def lsscsi():
'''
List SCSI devices
CLI Example:
.. code-block:: bash
salt '*' scsi.lsscsi
'''
cmd = 'lsscsi'
return __salt__['cmd.run'](cmd).splitlines()
def rescan_all(host):
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.rescan_all(0)
'''
if os.path.isdir('/sys/class/scsi_host/host{0}').format(host):
cmd = 'echo "- - -" > /sys/class/scsi_host/host{0}/scan'.format(host)
else:
return 'Host {0} does not exist'.format(host)
return __salt__['cmd.run'](cmd).splitlines()
|
<commit_before># -*- coding: utf-8 -*-
"""
SCSI administration module
"""
import logging
log = logging.getLogger(__name__)
def lsscsi():
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.lsscsi
'''
cmd = 'lsscsi'
return __salt__['cmd.run'](cmd).splitlines()
def rescan_all(host):
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.rescan_all(0)
'''
if os.path.isdir("/sys/class/scsi_host/host{0}").format(host):
cmd = 'echo "- - -" > /sys/class/scsi_host/host{0}/scan'.format(host)
else:
return 'Host {0} does not exist'.format(host)
return __salt__['cmd.run'](cmd).splitlines()
<commit_msg>Update formatting to Salt guidelines<commit_after># -*- coding: utf-8 -*-
'''
SCSI administration module
'''
import os.path
import logging
log = logging.getLogger(__name__)
def lsscsi():
'''
List SCSI devices
CLI Example:
.. code-block:: bash
salt '*' scsi.lsscsi
'''
cmd = 'lsscsi'
return __salt__['cmd.run'](cmd).splitlines()
def rescan_all(host):
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.rescan_all(0)
'''
if os.path.isdir('/sys/class/scsi_host/host{0}').format(host):
cmd = 'echo "- - -" > /sys/class/scsi_host/host{0}/scan'.format(host)
else:
return 'Host {0} does not exist'.format(host)
return __salt__['cmd.run'](cmd).splitlines()
|
a3b8297ca5eafd7694f4fde3f2ee351cdb599a9d
|
derrida/__init__.py
|
derrida/__init__.py
|
__version_info__ = (1, 3, 1, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
__version_info__ = (1, 3, 2, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
Update version number for hotfix 1.3.2
|
Update version number for hotfix 1.3.2
|
Python
|
apache-2.0
|
Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django
|
__version_info__ = (1, 3, 1, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
Update version number for hotfix 1.3.2
|
__version_info__ = (1, 3, 2, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
<commit_before>__version_info__ = (1, 3, 1, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
<commit_msg>Update version number for hotfix 1.3.2<commit_after>
|
__version_info__ = (1, 3, 2, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
__version_info__ = (1, 3, 1, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
Update version number for hotfix 1.3.2__version_info__ = (1, 3, 2, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
<commit_before>__version_info__ = (1, 3, 1, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
<commit_msg>Update version number for hotfix 1.3.2<commit_after>__version_info__ = (1, 3, 2, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
eb5944d9e55f01687aaf53992bc975c2cac5047a
|
code/marv/marv_webapi/__init__.py
|
code/marv/marv_webapi/__init__.py
|
# -*- coding: utf-8 -*-
#
# Copyright 2016 - 2018 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
from __future__ import absolute_import, division, print_function
from .auth import auth
from .comment import comment
from .dataset import dataset
from .delete import delete
from .tag import tag
from .collection import collection, meta
from .tooling import api_group as marv_api_group
@marv_api_group()
def webapi(app):
pass
# Groups and endpoints are all the same for now
webapi.add_endpoint(auth)
webapi.add_endpoint(comment)
webapi.add_endpoint(dataset)
webapi.add_endpoint(delete)
webapi.add_endpoint(collection)
webapi.add_endpoint(meta)
webapi.add_endpoint(tag)
from marv_robotics.webapi import robotics
webapi.add_endpoint(robotics)
|
# -*- coding: utf-8 -*-
#
# Copyright 2016 - 2018 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
from __future__ import absolute_import, division, print_function
from pkg_resources import iter_entry_points
from .auth import auth
from .comment import comment
from .dataset import dataset
from .delete import delete
from .tag import tag
from .collection import collection, meta
from .tooling import api_group as marv_api_group
@marv_api_group()
def webapi(app):
pass
# Groups and endpoints are all the same for now
webapi.add_endpoint(auth)
webapi.add_endpoint(comment)
webapi.add_endpoint(dataset)
webapi.add_endpoint(delete)
webapi.add_endpoint(collection)
webapi.add_endpoint(meta)
webapi.add_endpoint(tag)
from marv_robotics.webapi import robotics
webapi.add_endpoint(robotics)
for ep in iter_entry_points(group='marv_webapi'):
endpoint = ep.load()
webapi.add_endpoint(endpoint)
|
Support webapi extension via entry points
|
[marv] Support webapi extension via entry points
|
Python
|
agpl-3.0
|
ternaris/marv-robotics,ternaris/marv-robotics
|
# -*- coding: utf-8 -*-
#
# Copyright 2016 - 2018 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
from __future__ import absolute_import, division, print_function
from .auth import auth
from .comment import comment
from .dataset import dataset
from .delete import delete
from .tag import tag
from .collection import collection, meta
from .tooling import api_group as marv_api_group
@marv_api_group()
def webapi(app):
pass
# Groups and endpoints are all the same for now
webapi.add_endpoint(auth)
webapi.add_endpoint(comment)
webapi.add_endpoint(dataset)
webapi.add_endpoint(delete)
webapi.add_endpoint(collection)
webapi.add_endpoint(meta)
webapi.add_endpoint(tag)
from marv_robotics.webapi import robotics
webapi.add_endpoint(robotics)
[marv] Support webapi extension via entry points
|
# -*- coding: utf-8 -*-
#
# Copyright 2016 - 2018 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
from __future__ import absolute_import, division, print_function
from pkg_resources import iter_entry_points
from .auth import auth
from .comment import comment
from .dataset import dataset
from .delete import delete
from .tag import tag
from .collection import collection, meta
from .tooling import api_group as marv_api_group
@marv_api_group()
def webapi(app):
pass
# Groups and endpoints are all the same for now
webapi.add_endpoint(auth)
webapi.add_endpoint(comment)
webapi.add_endpoint(dataset)
webapi.add_endpoint(delete)
webapi.add_endpoint(collection)
webapi.add_endpoint(meta)
webapi.add_endpoint(tag)
from marv_robotics.webapi import robotics
webapi.add_endpoint(robotics)
for ep in iter_entry_points(group='marv_webapi'):
endpoint = ep.load()
webapi.add_endpoint(endpoint)
|
<commit_before># -*- coding: utf-8 -*-
#
# Copyright 2016 - 2018 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
from __future__ import absolute_import, division, print_function
from .auth import auth
from .comment import comment
from .dataset import dataset
from .delete import delete
from .tag import tag
from .collection import collection, meta
from .tooling import api_group as marv_api_group
@marv_api_group()
def webapi(app):
pass
# Groups and endpoints are all the same for now
webapi.add_endpoint(auth)
webapi.add_endpoint(comment)
webapi.add_endpoint(dataset)
webapi.add_endpoint(delete)
webapi.add_endpoint(collection)
webapi.add_endpoint(meta)
webapi.add_endpoint(tag)
from marv_robotics.webapi import robotics
webapi.add_endpoint(robotics)
<commit_msg>[marv] Support webapi extension via entry points<commit_after>
|
# -*- coding: utf-8 -*-
#
# Copyright 2016 - 2018 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
from __future__ import absolute_import, division, print_function
from pkg_resources import iter_entry_points
from .auth import auth
from .comment import comment
from .dataset import dataset
from .delete import delete
from .tag import tag
from .collection import collection, meta
from .tooling import api_group as marv_api_group
@marv_api_group()
def webapi(app):
pass
# Groups and endpoints are all the same for now
webapi.add_endpoint(auth)
webapi.add_endpoint(comment)
webapi.add_endpoint(dataset)
webapi.add_endpoint(delete)
webapi.add_endpoint(collection)
webapi.add_endpoint(meta)
webapi.add_endpoint(tag)
from marv_robotics.webapi import robotics
webapi.add_endpoint(robotics)
for ep in iter_entry_points(group='marv_webapi'):
endpoint = ep.load()
webapi.add_endpoint(endpoint)
|
# -*- coding: utf-8 -*-
#
# Copyright 2016 - 2018 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
from __future__ import absolute_import, division, print_function
from .auth import auth
from .comment import comment
from .dataset import dataset
from .delete import delete
from .tag import tag
from .collection import collection, meta
from .tooling import api_group as marv_api_group
@marv_api_group()
def webapi(app):
pass
# Groups and endpoints are all the same for now
webapi.add_endpoint(auth)
webapi.add_endpoint(comment)
webapi.add_endpoint(dataset)
webapi.add_endpoint(delete)
webapi.add_endpoint(collection)
webapi.add_endpoint(meta)
webapi.add_endpoint(tag)
from marv_robotics.webapi import robotics
webapi.add_endpoint(robotics)
[marv] Support webapi extension via entry points# -*- coding: utf-8 -*-
#
# Copyright 2016 - 2018 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
from __future__ import absolute_import, division, print_function
from pkg_resources import iter_entry_points
from .auth import auth
from .comment import comment
from .dataset import dataset
from .delete import delete
from .tag import tag
from .collection import collection, meta
from .tooling import api_group as marv_api_group
@marv_api_group()
def webapi(app):
pass
# Groups and endpoints are all the same for now
webapi.add_endpoint(auth)
webapi.add_endpoint(comment)
webapi.add_endpoint(dataset)
webapi.add_endpoint(delete)
webapi.add_endpoint(collection)
webapi.add_endpoint(meta)
webapi.add_endpoint(tag)
from marv_robotics.webapi import robotics
webapi.add_endpoint(robotics)
for ep in iter_entry_points(group='marv_webapi'):
endpoint = ep.load()
webapi.add_endpoint(endpoint)
|
<commit_before># -*- coding: utf-8 -*-
#
# Copyright 2016 - 2018 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
from __future__ import absolute_import, division, print_function
from .auth import auth
from .comment import comment
from .dataset import dataset
from .delete import delete
from .tag import tag
from .collection import collection, meta
from .tooling import api_group as marv_api_group
@marv_api_group()
def webapi(app):
pass
# Groups and endpoints are all the same for now
webapi.add_endpoint(auth)
webapi.add_endpoint(comment)
webapi.add_endpoint(dataset)
webapi.add_endpoint(delete)
webapi.add_endpoint(collection)
webapi.add_endpoint(meta)
webapi.add_endpoint(tag)
from marv_robotics.webapi import robotics
webapi.add_endpoint(robotics)
<commit_msg>[marv] Support webapi extension via entry points<commit_after># -*- coding: utf-8 -*-
#
# Copyright 2016 - 2018 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
from __future__ import absolute_import, division, print_function
from pkg_resources import iter_entry_points
from .auth import auth
from .comment import comment
from .dataset import dataset
from .delete import delete
from .tag import tag
from .collection import collection, meta
from .tooling import api_group as marv_api_group
@marv_api_group()
def webapi(app):
pass
# Groups and endpoints are all the same for now
webapi.add_endpoint(auth)
webapi.add_endpoint(comment)
webapi.add_endpoint(dataset)
webapi.add_endpoint(delete)
webapi.add_endpoint(collection)
webapi.add_endpoint(meta)
webapi.add_endpoint(tag)
from marv_robotics.webapi import robotics
webapi.add_endpoint(robotics)
for ep in iter_entry_points(group='marv_webapi'):
endpoint = ep.load()
webapi.add_endpoint(endpoint)
|
ecb0922db7aca55bde6177e37b5e20f94fb59b60
|
copy/opt/core/bin/remove_mailbox.py
|
copy/opt/core/bin/remove_mailbox.py
|
#!/usr/bin/env python
import redis
import os.path
import shutil
import glob
""" ***** CONFIG START ***** """
REDIS_SOCKET = '/tmp/redis.sock'
MAILDIR = '/var/mail'
""" ***** CONFIG END ***** """
os.chdir(MAILDIR)
r = redis.Redis(unix_socket_path=REDIS_SOCKET)
filesDepth = glob.glob('*/*')
dirsDepth = filter(lambda f: os.path.isdir(f), filesDepth)
for dirDepth in dirsDepth:
domain, user = dirDepth.split('/')
mail = user + '@' + domain
exists = r.exists('mail.account:obj:' + mail)
if not exists:
mdir = MAILDIR + "/" + domain + "/" + user
shutil.rmtree(mdir)
print '%s,%s' % (mail, mdir)
|
#!/usr/bin/env python
# Thomas Merkel <tm@core.io>
import redis
import os
import shutil
import glob
import time
""" ***** CONFIG START ***** """
REDIS_SOCKET = '/tmp/redis.sock'
MAILDIR = '/var/mail'
ARCHIVE = '/var/mail/.archive'
RM_FILE = 'core-remove-mailbox'
""" ***** CONFIG END ***** """
os.chdir(MAILDIR)
r = redis.Redis(unix_socket_path=REDIS_SOCKET)
filesDepth = glob.glob('*/*')
dirsDepth = filter(lambda f: os.path.isdir(f), filesDepth)
today = str(time.time())
for dirDepth in dirsDepth:
# get all information
domain, user = dirDepth.split('/')
mail = user + '@' + domain
mdir = MAILDIR + "/" + domain + "/" + user
rm_file = mdir + "/" + RM_FILE
# check if redis account exists
exists = r.exists('mail.account:obj:' + mail)
if not exists:
if os.path.exists(rm_file):
# RM_FILE file exists archive maildir
shutil.move(mdir, ARCHIVE + "/" + mail + "_" + today)
print 'Archived: %s,%s' % (mail, mdir)
else:
# create RM_FILE file in maildir
open(rm_file, 'a').close()
print 'Tagged: %s,%s' % (mail, mdir)
else:
# remove RM_FILE because account still exists in redis
if os.path.exists(rm_file):
os.remove(rm_file)
|
Rebuild mailbox remove script with some verify checks
|
Rebuild mailbox remove script with some verify checks
We should not remove a mailbox and only archive it. Also it should
be good to be sure the mailbox doesn't exists in redis anymore after
two runs.
|
Python
|
mit
|
skylime/mi-core-mbox,skylime/mi-core-mbox
|
#!/usr/bin/env python
import redis
import os.path
import shutil
import glob
""" ***** CONFIG START ***** """
REDIS_SOCKET = '/tmp/redis.sock'
MAILDIR = '/var/mail'
""" ***** CONFIG END ***** """
os.chdir(MAILDIR)
r = redis.Redis(unix_socket_path=REDIS_SOCKET)
filesDepth = glob.glob('*/*')
dirsDepth = filter(lambda f: os.path.isdir(f), filesDepth)
for dirDepth in dirsDepth:
domain, user = dirDepth.split('/')
mail = user + '@' + domain
exists = r.exists('mail.account:obj:' + mail)
if not exists:
mdir = MAILDIR + "/" + domain + "/" + user
shutil.rmtree(mdir)
print '%s,%s' % (mail, mdir)
Rebuild mailbox remove script with some verify checks
We should not remove a mailbox and only archive it. Also it should
be good to be sure the mailbox doesn't exists in redis anymore after
two runs.
|
#!/usr/bin/env python
# Thomas Merkel <tm@core.io>
import redis
import os
import shutil
import glob
import time
""" ***** CONFIG START ***** """
REDIS_SOCKET = '/tmp/redis.sock'
MAILDIR = '/var/mail'
ARCHIVE = '/var/mail/.archive'
RM_FILE = 'core-remove-mailbox'
""" ***** CONFIG END ***** """
os.chdir(MAILDIR)
r = redis.Redis(unix_socket_path=REDIS_SOCKET)
filesDepth = glob.glob('*/*')
dirsDepth = filter(lambda f: os.path.isdir(f), filesDepth)
today = str(time.time())
for dirDepth in dirsDepth:
# get all information
domain, user = dirDepth.split('/')
mail = user + '@' + domain
mdir = MAILDIR + "/" + domain + "/" + user
rm_file = mdir + "/" + RM_FILE
# check if redis account exists
exists = r.exists('mail.account:obj:' + mail)
if not exists:
if os.path.exists(rm_file):
# RM_FILE file exists archive maildir
shutil.move(mdir, ARCHIVE + "/" + mail + "_" + today)
print 'Archived: %s,%s' % (mail, mdir)
else:
# create RM_FILE file in maildir
open(rm_file, 'a').close()
print 'Tagged: %s,%s' % (mail, mdir)
else:
# remove RM_FILE because account still exists in redis
if os.path.exists(rm_file):
os.remove(rm_file)
|
<commit_before>#!/usr/bin/env python
import redis
import os.path
import shutil
import glob
""" ***** CONFIG START ***** """
REDIS_SOCKET = '/tmp/redis.sock'
MAILDIR = '/var/mail'
""" ***** CONFIG END ***** """
os.chdir(MAILDIR)
r = redis.Redis(unix_socket_path=REDIS_SOCKET)
filesDepth = glob.glob('*/*')
dirsDepth = filter(lambda f: os.path.isdir(f), filesDepth)
for dirDepth in dirsDepth:
domain, user = dirDepth.split('/')
mail = user + '@' + domain
exists = r.exists('mail.account:obj:' + mail)
if not exists:
mdir = MAILDIR + "/" + domain + "/" + user
shutil.rmtree(mdir)
print '%s,%s' % (mail, mdir)
<commit_msg>Rebuild mailbox remove script with some verify checks
We should not remove a mailbox and only archive it. Also it should
be good to be sure the mailbox doesn't exists in redis anymore after
two runs.<commit_after>
|
#!/usr/bin/env python
# Thomas Merkel <tm@core.io>
import redis
import os
import shutil
import glob
import time
""" ***** CONFIG START ***** """
REDIS_SOCKET = '/tmp/redis.sock'
MAILDIR = '/var/mail'
ARCHIVE = '/var/mail/.archive'
RM_FILE = 'core-remove-mailbox'
""" ***** CONFIG END ***** """
os.chdir(MAILDIR)
r = redis.Redis(unix_socket_path=REDIS_SOCKET)
filesDepth = glob.glob('*/*')
dirsDepth = filter(lambda f: os.path.isdir(f), filesDepth)
today = str(time.time())
for dirDepth in dirsDepth:
# get all information
domain, user = dirDepth.split('/')
mail = user + '@' + domain
mdir = MAILDIR + "/" + domain + "/" + user
rm_file = mdir + "/" + RM_FILE
# check if redis account exists
exists = r.exists('mail.account:obj:' + mail)
if not exists:
if os.path.exists(rm_file):
# RM_FILE file exists archive maildir
shutil.move(mdir, ARCHIVE + "/" + mail + "_" + today)
print 'Archived: %s,%s' % (mail, mdir)
else:
# create RM_FILE file in maildir
open(rm_file, 'a').close()
print 'Tagged: %s,%s' % (mail, mdir)
else:
# remove RM_FILE because account still exists in redis
if os.path.exists(rm_file):
os.remove(rm_file)
|
#!/usr/bin/env python
import redis
import os.path
import shutil
import glob
""" ***** CONFIG START ***** """
REDIS_SOCKET = '/tmp/redis.sock'
MAILDIR = '/var/mail'
""" ***** CONFIG END ***** """
os.chdir(MAILDIR)
r = redis.Redis(unix_socket_path=REDIS_SOCKET)
filesDepth = glob.glob('*/*')
dirsDepth = filter(lambda f: os.path.isdir(f), filesDepth)
for dirDepth in dirsDepth:
domain, user = dirDepth.split('/')
mail = user + '@' + domain
exists = r.exists('mail.account:obj:' + mail)
if not exists:
mdir = MAILDIR + "/" + domain + "/" + user
shutil.rmtree(mdir)
print '%s,%s' % (mail, mdir)
Rebuild mailbox remove script with some verify checks
We should not remove a mailbox and only archive it. Also it should
be good to be sure the mailbox doesn't exists in redis anymore after
two runs.#!/usr/bin/env python
# Thomas Merkel <tm@core.io>
import redis
import os
import shutil
import glob
import time
""" ***** CONFIG START ***** """
REDIS_SOCKET = '/tmp/redis.sock'
MAILDIR = '/var/mail'
ARCHIVE = '/var/mail/.archive'
RM_FILE = 'core-remove-mailbox'
""" ***** CONFIG END ***** """
os.chdir(MAILDIR)
r = redis.Redis(unix_socket_path=REDIS_SOCKET)
filesDepth = glob.glob('*/*')
dirsDepth = filter(lambda f: os.path.isdir(f), filesDepth)
today = str(time.time())
for dirDepth in dirsDepth:
# get all information
domain, user = dirDepth.split('/')
mail = user + '@' + domain
mdir = MAILDIR + "/" + domain + "/" + user
rm_file = mdir + "/" + RM_FILE
# check if redis account exists
exists = r.exists('mail.account:obj:' + mail)
if not exists:
if os.path.exists(rm_file):
# RM_FILE file exists archive maildir
shutil.move(mdir, ARCHIVE + "/" + mail + "_" + today)
print 'Archived: %s,%s' % (mail, mdir)
else:
# create RM_FILE file in maildir
open(rm_file, 'a').close()
print 'Tagged: %s,%s' % (mail, mdir)
else:
# remove RM_FILE because account still exists in redis
if os.path.exists(rm_file):
os.remove(rm_file)
|
<commit_before>#!/usr/bin/env python
import redis
import os.path
import shutil
import glob
""" ***** CONFIG START ***** """
REDIS_SOCKET = '/tmp/redis.sock'
MAILDIR = '/var/mail'
""" ***** CONFIG END ***** """
os.chdir(MAILDIR)
r = redis.Redis(unix_socket_path=REDIS_SOCKET)
filesDepth = glob.glob('*/*')
dirsDepth = filter(lambda f: os.path.isdir(f), filesDepth)
for dirDepth in dirsDepth:
domain, user = dirDepth.split('/')
mail = user + '@' + domain
exists = r.exists('mail.account:obj:' + mail)
if not exists:
mdir = MAILDIR + "/" + domain + "/" + user
shutil.rmtree(mdir)
print '%s,%s' % (mail, mdir)
<commit_msg>Rebuild mailbox remove script with some verify checks
We should not remove a mailbox and only archive it. Also it should
be good to be sure the mailbox doesn't exists in redis anymore after
two runs.<commit_after>#!/usr/bin/env python
# Thomas Merkel <tm@core.io>
import redis
import os
import shutil
import glob
import time
""" ***** CONFIG START ***** """
REDIS_SOCKET = '/tmp/redis.sock'
MAILDIR = '/var/mail'
ARCHIVE = '/var/mail/.archive'
RM_FILE = 'core-remove-mailbox'
""" ***** CONFIG END ***** """
os.chdir(MAILDIR)
r = redis.Redis(unix_socket_path=REDIS_SOCKET)
filesDepth = glob.glob('*/*')
dirsDepth = filter(lambda f: os.path.isdir(f), filesDepth)
today = str(time.time())
for dirDepth in dirsDepth:
# get all information
domain, user = dirDepth.split('/')
mail = user + '@' + domain
mdir = MAILDIR + "/" + domain + "/" + user
rm_file = mdir + "/" + RM_FILE
# check if redis account exists
exists = r.exists('mail.account:obj:' + mail)
if not exists:
if os.path.exists(rm_file):
# RM_FILE file exists archive maildir
shutil.move(mdir, ARCHIVE + "/" + mail + "_" + today)
print 'Archived: %s,%s' % (mail, mdir)
else:
# create RM_FILE file in maildir
open(rm_file, 'a').close()
print 'Tagged: %s,%s' % (mail, mdir)
else:
# remove RM_FILE because account still exists in redis
if os.path.exists(rm_file):
os.remove(rm_file)
|
c44c0751dbf1ffdeeb050f6ef00762f8dec9e51c
|
boot.py
|
boot.py
|
# Only .py files at the top-level of a Sublime package are considered "plugins".
# Make Sublime aware of our *{Command,Listener,Handler} classes by importing them:
from .src.blame import * # noqa: F401,F403
from .src.blame_all import * # noqa: F401,F403
from .src.blame_inline import * # noqa: F401,F403
from .src.blame_instadiff import * # noqa: F401,F403
def plugin_loaded():
pass
def plugin_unloaded():
pass
|
# Only .py files at the top-level of a Sublime package are considered "plugins".
# Make Sublime aware of our *{Command,Listener,Handler} classes by importing them:
from .src.blame import * # noqa: F401,F403
from .src.blame_all import * # noqa: F401,F403
from .src.blame_inline import * # noqa: F401,F403
from .src.blame_instadiff import * # noqa: F401,F403
# def plugin_loaded():
# pass
# def plugin_unloaded():
# pass
|
Stop defining plugin_{,un}loaded functions that aren't doing anything
|
Stop defining plugin_{,un}loaded functions that aren't doing anything
|
Python
|
mit
|
psykzz/st3-gitblame
|
# Only .py files at the top-level of a Sublime package are considered "plugins".
# Make Sublime aware of our *{Command,Listener,Handler} classes by importing them:
from .src.blame import * # noqa: F401,F403
from .src.blame_all import * # noqa: F401,F403
from .src.blame_inline import * # noqa: F401,F403
from .src.blame_instadiff import * # noqa: F401,F403
def plugin_loaded():
pass
def plugin_unloaded():
pass
Stop defining plugin_{,un}loaded functions that aren't doing anything
|
# Only .py files at the top-level of a Sublime package are considered "plugins".
# Make Sublime aware of our *{Command,Listener,Handler} classes by importing them:
from .src.blame import * # noqa: F401,F403
from .src.blame_all import * # noqa: F401,F403
from .src.blame_inline import * # noqa: F401,F403
from .src.blame_instadiff import * # noqa: F401,F403
# def plugin_loaded():
# pass
# def plugin_unloaded():
# pass
|
<commit_before># Only .py files at the top-level of a Sublime package are considered "plugins".
# Make Sublime aware of our *{Command,Listener,Handler} classes by importing them:
from .src.blame import * # noqa: F401,F403
from .src.blame_all import * # noqa: F401,F403
from .src.blame_inline import * # noqa: F401,F403
from .src.blame_instadiff import * # noqa: F401,F403
def plugin_loaded():
pass
def plugin_unloaded():
pass
<commit_msg>Stop defining plugin_{,un}loaded functions that aren't doing anything<commit_after>
|
# Only .py files at the top-level of a Sublime package are considered "plugins".
# Make Sublime aware of our *{Command,Listener,Handler} classes by importing them:
from .src.blame import * # noqa: F401,F403
from .src.blame_all import * # noqa: F401,F403
from .src.blame_inline import * # noqa: F401,F403
from .src.blame_instadiff import * # noqa: F401,F403
# def plugin_loaded():
# pass
# def plugin_unloaded():
# pass
|
# Only .py files at the top-level of a Sublime package are considered "plugins".
# Make Sublime aware of our *{Command,Listener,Handler} classes by importing them:
from .src.blame import * # noqa: F401,F403
from .src.blame_all import * # noqa: F401,F403
from .src.blame_inline import * # noqa: F401,F403
from .src.blame_instadiff import * # noqa: F401,F403
def plugin_loaded():
pass
def plugin_unloaded():
pass
Stop defining plugin_{,un}loaded functions that aren't doing anything# Only .py files at the top-level of a Sublime package are considered "plugins".
# Make Sublime aware of our *{Command,Listener,Handler} classes by importing them:
from .src.blame import * # noqa: F401,F403
from .src.blame_all import * # noqa: F401,F403
from .src.blame_inline import * # noqa: F401,F403
from .src.blame_instadiff import * # noqa: F401,F403
# def plugin_loaded():
# pass
# def plugin_unloaded():
# pass
|
<commit_before># Only .py files at the top-level of a Sublime package are considered "plugins".
# Make Sublime aware of our *{Command,Listener,Handler} classes by importing them:
from .src.blame import * # noqa: F401,F403
from .src.blame_all import * # noqa: F401,F403
from .src.blame_inline import * # noqa: F401,F403
from .src.blame_instadiff import * # noqa: F401,F403
def plugin_loaded():
pass
def plugin_unloaded():
pass
<commit_msg>Stop defining plugin_{,un}loaded functions that aren't doing anything<commit_after># Only .py files at the top-level of a Sublime package are considered "plugins".
# Make Sublime aware of our *{Command,Listener,Handler} classes by importing them:
from .src.blame import * # noqa: F401,F403
from .src.blame_all import * # noqa: F401,F403
from .src.blame_inline import * # noqa: F401,F403
from .src.blame_instadiff import * # noqa: F401,F403
# def plugin_loaded():
# pass
# def plugin_unloaded():
# pass
|
514997ba994f19b7933f9794e16f0668c7c64502
|
kismetclient/handlers.py
|
kismetclient/handlers.py
|
from kismetclient.utils import csv
from kismetclient.exceptions import ServerError
def kismet(server, version, starttime, servername, dumpfiles, uid):
""" Handle server startup string. """
print version, servername, uid
def capability(server, CAPABILITY, capabilities):
""" Register a server's capability. """
server.capabilities[CAPABILITY] = csv(capabilities)
def protocols(server, protocols):
""" Enumerate capabilities so they can be registered. """
for protocol in csv(protocols):
server.cmd('CAPABILITY', protocol)
def ack(server, cmdid, text):
""" Handle ack messages in response to commands. """
# Simply remove from the in_progress queue
server.in_progress.pop(cmdid)
def error(server, cmdid, text):
""" Handle error messages in response to commands. """
cmd = server.in_progress.pop(cmdid)
raise ServerError(cmd, text)
def print_fields(server, **fields):
""" A generic handler which prints all the fields. """
for k, v in fields.items():
print '%s: %s' % (k, v)
print '-' * 80
|
from kismetclient.utils import csv
from kismetclient.exceptions import ServerError
def kismet(client, version, starttime, servername, dumpfiles, uid):
""" Handle server startup string. """
print version, servername, uid
def capability(client, CAPABILITY, capabilities):
""" Register a server capability. """
client.capabilities[CAPABILITY] = csv(capabilities)
def protocols(client, protocols):
""" Enumerate capabilities so they can be registered. """
for protocol in csv(protocols):
client.cmd('CAPABILITY', protocol)
def ack(client, cmdid, text):
""" Handle ack messages in response to commands. """
# Simply remove from the in_progress queue
client.in_progress.pop(cmdid)
def error(client, cmdid, text):
""" Handle error messages in response to commands. """
cmd = client.in_progress.pop(cmdid)
raise ServerError(cmd, text)
def print_fields(client, **fields):
""" A generic handler which prints all the fields. """
for k, v in fields.items():
print '%s: %s' % (k, v)
print '-' * 80
|
Switch first handler arg from "server" to "client".
|
Switch first handler arg from "server" to "client".
|
Python
|
mit
|
PaulMcMillan/kismetclient
|
from kismetclient.utils import csv
from kismetclient.exceptions import ServerError
def kismet(server, version, starttime, servername, dumpfiles, uid):
""" Handle server startup string. """
print version, servername, uid
def capability(server, CAPABILITY, capabilities):
""" Register a server's capability. """
server.capabilities[CAPABILITY] = csv(capabilities)
def protocols(server, protocols):
""" Enumerate capabilities so they can be registered. """
for protocol in csv(protocols):
server.cmd('CAPABILITY', protocol)
def ack(server, cmdid, text):
""" Handle ack messages in response to commands. """
# Simply remove from the in_progress queue
server.in_progress.pop(cmdid)
def error(server, cmdid, text):
""" Handle error messages in response to commands. """
cmd = server.in_progress.pop(cmdid)
raise ServerError(cmd, text)
def print_fields(server, **fields):
""" A generic handler which prints all the fields. """
for k, v in fields.items():
print '%s: %s' % (k, v)
print '-' * 80
Switch first handler arg from "server" to "client".
|
from kismetclient.utils import csv
from kismetclient.exceptions import ServerError
def kismet(client, version, starttime, servername, dumpfiles, uid):
""" Handle server startup string. """
print version, servername, uid
def capability(client, CAPABILITY, capabilities):
""" Register a server capability. """
client.capabilities[CAPABILITY] = csv(capabilities)
def protocols(client, protocols):
""" Enumerate capabilities so they can be registered. """
for protocol in csv(protocols):
client.cmd('CAPABILITY', protocol)
def ack(client, cmdid, text):
""" Handle ack messages in response to commands. """
# Simply remove from the in_progress queue
client.in_progress.pop(cmdid)
def error(client, cmdid, text):
""" Handle error messages in response to commands. """
cmd = client.in_progress.pop(cmdid)
raise ServerError(cmd, text)
def print_fields(client, **fields):
""" A generic handler which prints all the fields. """
for k, v in fields.items():
print '%s: %s' % (k, v)
print '-' * 80
|
<commit_before>from kismetclient.utils import csv
from kismetclient.exceptions import ServerError
def kismet(server, version, starttime, servername, dumpfiles, uid):
""" Handle server startup string. """
print version, servername, uid
def capability(server, CAPABILITY, capabilities):
""" Register a server's capability. """
server.capabilities[CAPABILITY] = csv(capabilities)
def protocols(server, protocols):
""" Enumerate capabilities so they can be registered. """
for protocol in csv(protocols):
server.cmd('CAPABILITY', protocol)
def ack(server, cmdid, text):
""" Handle ack messages in response to commands. """
# Simply remove from the in_progress queue
server.in_progress.pop(cmdid)
def error(server, cmdid, text):
""" Handle error messages in response to commands. """
cmd = server.in_progress.pop(cmdid)
raise ServerError(cmd, text)
def print_fields(server, **fields):
""" A generic handler which prints all the fields. """
for k, v in fields.items():
print '%s: %s' % (k, v)
print '-' * 80
<commit_msg>Switch first handler arg from "server" to "client".<commit_after>
|
from kismetclient.utils import csv
from kismetclient.exceptions import ServerError
def kismet(client, version, starttime, servername, dumpfiles, uid):
""" Handle server startup string. """
print version, servername, uid
def capability(client, CAPABILITY, capabilities):
""" Register a server capability. """
client.capabilities[CAPABILITY] = csv(capabilities)
def protocols(client, protocols):
""" Enumerate capabilities so they can be registered. """
for protocol in csv(protocols):
client.cmd('CAPABILITY', protocol)
def ack(client, cmdid, text):
""" Handle ack messages in response to commands. """
# Simply remove from the in_progress queue
client.in_progress.pop(cmdid)
def error(client, cmdid, text):
""" Handle error messages in response to commands. """
cmd = client.in_progress.pop(cmdid)
raise ServerError(cmd, text)
def print_fields(client, **fields):
""" A generic handler which prints all the fields. """
for k, v in fields.items():
print '%s: %s' % (k, v)
print '-' * 80
|
from kismetclient.utils import csv
from kismetclient.exceptions import ServerError
def kismet(server, version, starttime, servername, dumpfiles, uid):
""" Handle server startup string. """
print version, servername, uid
def capability(server, CAPABILITY, capabilities):
""" Register a server's capability. """
server.capabilities[CAPABILITY] = csv(capabilities)
def protocols(server, protocols):
""" Enumerate capabilities so they can be registered. """
for protocol in csv(protocols):
server.cmd('CAPABILITY', protocol)
def ack(server, cmdid, text):
""" Handle ack messages in response to commands. """
# Simply remove from the in_progress queue
server.in_progress.pop(cmdid)
def error(server, cmdid, text):
""" Handle error messages in response to commands. """
cmd = server.in_progress.pop(cmdid)
raise ServerError(cmd, text)
def print_fields(server, **fields):
""" A generic handler which prints all the fields. """
for k, v in fields.items():
print '%s: %s' % (k, v)
print '-' * 80
Switch first handler arg from "server" to "client".from kismetclient.utils import csv
from kismetclient.exceptions import ServerError
def kismet(client, version, starttime, servername, dumpfiles, uid):
""" Handle server startup string. """
print version, servername, uid
def capability(client, CAPABILITY, capabilities):
""" Register a server capability. """
client.capabilities[CAPABILITY] = csv(capabilities)
def protocols(client, protocols):
""" Enumerate capabilities so they can be registered. """
for protocol in csv(protocols):
client.cmd('CAPABILITY', protocol)
def ack(client, cmdid, text):
""" Handle ack messages in response to commands. """
# Simply remove from the in_progress queue
client.in_progress.pop(cmdid)
def error(client, cmdid, text):
""" Handle error messages in response to commands. """
cmd = client.in_progress.pop(cmdid)
raise ServerError(cmd, text)
def print_fields(client, **fields):
""" A generic handler which prints all the fields. """
for k, v in fields.items():
print '%s: %s' % (k, v)
print '-' * 80
|
<commit_before>from kismetclient.utils import csv
from kismetclient.exceptions import ServerError
def kismet(server, version, starttime, servername, dumpfiles, uid):
""" Handle server startup string. """
print version, servername, uid
def capability(server, CAPABILITY, capabilities):
""" Register a server's capability. """
server.capabilities[CAPABILITY] = csv(capabilities)
def protocols(server, protocols):
""" Enumerate capabilities so they can be registered. """
for protocol in csv(protocols):
server.cmd('CAPABILITY', protocol)
def ack(server, cmdid, text):
""" Handle ack messages in response to commands. """
# Simply remove from the in_progress queue
server.in_progress.pop(cmdid)
def error(server, cmdid, text):
""" Handle error messages in response to commands. """
cmd = server.in_progress.pop(cmdid)
raise ServerError(cmd, text)
def print_fields(server, **fields):
""" A generic handler which prints all the fields. """
for k, v in fields.items():
print '%s: %s' % (k, v)
print '-' * 80
<commit_msg>Switch first handler arg from "server" to "client".<commit_after>from kismetclient.utils import csv
from kismetclient.exceptions import ServerError
def kismet(client, version, starttime, servername, dumpfiles, uid):
""" Handle server startup string. """
print version, servername, uid
def capability(client, CAPABILITY, capabilities):
""" Register a server capability. """
client.capabilities[CAPABILITY] = csv(capabilities)
def protocols(client, protocols):
""" Enumerate capabilities so they can be registered. """
for protocol in csv(protocols):
client.cmd('CAPABILITY', protocol)
def ack(client, cmdid, text):
""" Handle ack messages in response to commands. """
# Simply remove from the in_progress queue
client.in_progress.pop(cmdid)
def error(client, cmdid, text):
""" Handle error messages in response to commands. """
cmd = client.in_progress.pop(cmdid)
raise ServerError(cmd, text)
def print_fields(client, **fields):
""" A generic handler which prints all the fields. """
for k, v in fields.items():
print '%s: %s' % (k, v)
print '-' * 80
|
6ccd9722a6db66666a9400caf7d124c5ac25ab08
|
post_pizza_slices.py
|
post_pizza_slices.py
|
import boto3, json
sdb = boto3.client('sdb')
def lambda_handler(data, context):
"""
Handler for posting data to SimpleDB.
Args:
data -- Data to be stored (Dictionary).
context -- AWS context for the request (Object).
"""
if data['Password'] and data['Password'] == 'INSERT PASSWORD':
try:
for person in ['Sharon', 'Ryan']:
old_slice_count = int(
sdb.get_attributes(
DomainName='pizza',
ItemName=person,
AttributeNames=['Slices']
)['Attributes'][0]['Value']
)
new_slice_count = old_slice_count + data[person]
sdb.put_attributes(
DomainName='pizza',
ItemName=person,
Attributes=[{
'Name': 'Slices',
'Value': str(new_slice_count),
'Replace': True
}]
)
return json.dumps({'Success': 'Your data was submitted!'})
except:
return json.dumps({'Error': 'Your data was not submitted.'})
else:
return json.dumps({
'Error': 'Ah ah ah, you didn\'t say the magic word.'
})
|
import boto3
sdb = boto3.client('sdb')
def lambda_handler(data, context):
"""
Handler for posting data to SimpleDB.
Args:
data -- Data to be stored (Dictionary).
context -- AWS context for the request (Object).
"""
if data['Password'] and data['Password'] == 'INSERT PASSWORD':
try:
for person in ['Sharon', 'Ryan']:
old_slice_count = int(
sdb.get_attributes(
DomainName='pizza',
ItemName=person,
AttributeNames=['Slices']
)['Attributes'][0]['Value']
)
new_slice_count = old_slice_count + data[person]
sdb.put_attributes(
DomainName='pizza',
ItemName=person,
Attributes=[{
'Name': 'Slices',
'Value': str(new_slice_count),
'Replace': True
}]
)
return {'Success': 'Your data was submitted!'}
except:
return {'Error': 'Your data was not submitted.'}
else:
return {'Error': 'Ah ah ah, you didn\'t say the magic word.'}
|
Remove JSON dependency in POST logic.
|
Remove JSON dependency in POST logic.
|
Python
|
mit
|
ryandasher/pizza-tracker,ryandasher/pizza-tracker,ryandasher/pizza-tracker,ryandasher/pizza-tracker
|
import boto3, json
sdb = boto3.client('sdb')
def lambda_handler(data, context):
"""
Handler for posting data to SimpleDB.
Args:
data -- Data to be stored (Dictionary).
context -- AWS context for the request (Object).
"""
if data['Password'] and data['Password'] == 'INSERT PASSWORD':
try:
for person in ['Sharon', 'Ryan']:
old_slice_count = int(
sdb.get_attributes(
DomainName='pizza',
ItemName=person,
AttributeNames=['Slices']
)['Attributes'][0]['Value']
)
new_slice_count = old_slice_count + data[person]
sdb.put_attributes(
DomainName='pizza',
ItemName=person,
Attributes=[{
'Name': 'Slices',
'Value': str(new_slice_count),
'Replace': True
}]
)
return json.dumps({'Success': 'Your data was submitted!'})
except:
return json.dumps({'Error': 'Your data was not submitted.'})
else:
return json.dumps({
'Error': 'Ah ah ah, you didn\'t say the magic word.'
})
Remove JSON dependency in POST logic.
|
import boto3
sdb = boto3.client('sdb')
def lambda_handler(data, context):
"""
Handler for posting data to SimpleDB.
Args:
data -- Data to be stored (Dictionary).
context -- AWS context for the request (Object).
"""
if data['Password'] and data['Password'] == 'INSERT PASSWORD':
try:
for person in ['Sharon', 'Ryan']:
old_slice_count = int(
sdb.get_attributes(
DomainName='pizza',
ItemName=person,
AttributeNames=['Slices']
)['Attributes'][0]['Value']
)
new_slice_count = old_slice_count + data[person]
sdb.put_attributes(
DomainName='pizza',
ItemName=person,
Attributes=[{
'Name': 'Slices',
'Value': str(new_slice_count),
'Replace': True
}]
)
return {'Success': 'Your data was submitted!'}
except:
return {'Error': 'Your data was not submitted.'}
else:
return {'Error': 'Ah ah ah, you didn\'t say the magic word.'}
|
<commit_before>import boto3, json
sdb = boto3.client('sdb')
def lambda_handler(data, context):
"""
Handler for posting data to SimpleDB.
Args:
data -- Data to be stored (Dictionary).
context -- AWS context for the request (Object).
"""
if data['Password'] and data['Password'] == 'INSERT PASSWORD':
try:
for person in ['Sharon', 'Ryan']:
old_slice_count = int(
sdb.get_attributes(
DomainName='pizza',
ItemName=person,
AttributeNames=['Slices']
)['Attributes'][0]['Value']
)
new_slice_count = old_slice_count + data[person]
sdb.put_attributes(
DomainName='pizza',
ItemName=person,
Attributes=[{
'Name': 'Slices',
'Value': str(new_slice_count),
'Replace': True
}]
)
return json.dumps({'Success': 'Your data was submitted!'})
except:
return json.dumps({'Error': 'Your data was not submitted.'})
else:
return json.dumps({
'Error': 'Ah ah ah, you didn\'t say the magic word.'
})
<commit_msg>Remove JSON dependency in POST logic.<commit_after>
|
import boto3
sdb = boto3.client('sdb')
def lambda_handler(data, context):
"""
Handler for posting data to SimpleDB.
Args:
data -- Data to be stored (Dictionary).
context -- AWS context for the request (Object).
"""
if data['Password'] and data['Password'] == 'INSERT PASSWORD':
try:
for person in ['Sharon', 'Ryan']:
old_slice_count = int(
sdb.get_attributes(
DomainName='pizza',
ItemName=person,
AttributeNames=['Slices']
)['Attributes'][0]['Value']
)
new_slice_count = old_slice_count + data[person]
sdb.put_attributes(
DomainName='pizza',
ItemName=person,
Attributes=[{
'Name': 'Slices',
'Value': str(new_slice_count),
'Replace': True
}]
)
return {'Success': 'Your data was submitted!'}
except:
return {'Error': 'Your data was not submitted.'}
else:
return {'Error': 'Ah ah ah, you didn\'t say the magic word.'}
|
import boto3, json
sdb = boto3.client('sdb')
def lambda_handler(data, context):
"""
Handler for posting data to SimpleDB.
Args:
data -- Data to be stored (Dictionary).
context -- AWS context for the request (Object).
"""
if data['Password'] and data['Password'] == 'INSERT PASSWORD':
try:
for person in ['Sharon', 'Ryan']:
old_slice_count = int(
sdb.get_attributes(
DomainName='pizza',
ItemName=person,
AttributeNames=['Slices']
)['Attributes'][0]['Value']
)
new_slice_count = old_slice_count + data[person]
sdb.put_attributes(
DomainName='pizza',
ItemName=person,
Attributes=[{
'Name': 'Slices',
'Value': str(new_slice_count),
'Replace': True
}]
)
return json.dumps({'Success': 'Your data was submitted!'})
except:
return json.dumps({'Error': 'Your data was not submitted.'})
else:
return json.dumps({
'Error': 'Ah ah ah, you didn\'t say the magic word.'
})
Remove JSON dependency in POST logic.import boto3
sdb = boto3.client('sdb')
def lambda_handler(data, context):
"""
Handler for posting data to SimpleDB.
Args:
data -- Data to be stored (Dictionary).
context -- AWS context for the request (Object).
"""
if data['Password'] and data['Password'] == 'INSERT PASSWORD':
try:
for person in ['Sharon', 'Ryan']:
old_slice_count = int(
sdb.get_attributes(
DomainName='pizza',
ItemName=person,
AttributeNames=['Slices']
)['Attributes'][0]['Value']
)
new_slice_count = old_slice_count + data[person]
sdb.put_attributes(
DomainName='pizza',
ItemName=person,
Attributes=[{
'Name': 'Slices',
'Value': str(new_slice_count),
'Replace': True
}]
)
return {'Success': 'Your data was submitted!'}
except:
return {'Error': 'Your data was not submitted.'}
else:
return {'Error': 'Ah ah ah, you didn\'t say the magic word.'}
|
<commit_before>import boto3, json
sdb = boto3.client('sdb')
def lambda_handler(data, context):
"""
Handler for posting data to SimpleDB.
Args:
data -- Data to be stored (Dictionary).
context -- AWS context for the request (Object).
"""
if data['Password'] and data['Password'] == 'INSERT PASSWORD':
try:
for person in ['Sharon', 'Ryan']:
old_slice_count = int(
sdb.get_attributes(
DomainName='pizza',
ItemName=person,
AttributeNames=['Slices']
)['Attributes'][0]['Value']
)
new_slice_count = old_slice_count + data[person]
sdb.put_attributes(
DomainName='pizza',
ItemName=person,
Attributes=[{
'Name': 'Slices',
'Value': str(new_slice_count),
'Replace': True
}]
)
return json.dumps({'Success': 'Your data was submitted!'})
except:
return json.dumps({'Error': 'Your data was not submitted.'})
else:
return json.dumps({
'Error': 'Ah ah ah, you didn\'t say the magic word.'
})
<commit_msg>Remove JSON dependency in POST logic.<commit_after>import boto3
sdb = boto3.client('sdb')
def lambda_handler(data, context):
"""
Handler for posting data to SimpleDB.
Args:
data -- Data to be stored (Dictionary).
context -- AWS context for the request (Object).
"""
if data['Password'] and data['Password'] == 'INSERT PASSWORD':
try:
for person in ['Sharon', 'Ryan']:
old_slice_count = int(
sdb.get_attributes(
DomainName='pizza',
ItemName=person,
AttributeNames=['Slices']
)['Attributes'][0]['Value']
)
new_slice_count = old_slice_count + data[person]
sdb.put_attributes(
DomainName='pizza',
ItemName=person,
Attributes=[{
'Name': 'Slices',
'Value': str(new_slice_count),
'Replace': True
}]
)
return {'Success': 'Your data was submitted!'}
except:
return {'Error': 'Your data was not submitted.'}
else:
return {'Error': 'Ah ah ah, you didn\'t say the magic word.'}
|
e128a1c594bc4ad826249420ed53a60841fecd5c
|
main.py
|
main.py
|
import logging
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from caaas import app
from caaas.cleanup_thread import cleanup_task
from caaas.config_parser import config
DEBUG = True
log = logging.getLogger("caaas")
def main():
if DEBUG:
logging.basicConfig(level=logging.DEBUG)
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("tornado").setLevel(logging.WARNING)
print("Starting app...")
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(5000, "0.0.0.0")
ioloop = IOLoop.instance()
PeriodicCallback(cleanup_task, int(config.cleanup_thread_interval) * 1000).start()
ioloop.start()
if __name__ == "__main__":
main()
|
import logging
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from caaas import app
from caaas.cleanup_thread import cleanup_task
from caaas.config_parser import config
DEBUG = True
log = logging.getLogger("caaas")
def main():
if DEBUG:
logging.basicConfig(level=logging.DEBUG)
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("tornado").setLevel(logging.WARNING)
print("Starting app...")
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(4000, "0.0.0.0")
ioloop = IOLoop.instance()
PeriodicCallback(cleanup_task, int(config.cleanup_thread_interval) * 1000).start()
ioloop.start()
if __name__ == "__main__":
main()
|
Change port from 5000 to 4000
|
Change port from 5000 to 4000
|
Python
|
apache-2.0
|
DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe
|
import logging
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from caaas import app
from caaas.cleanup_thread import cleanup_task
from caaas.config_parser import config
DEBUG = True
log = logging.getLogger("caaas")
def main():
if DEBUG:
logging.basicConfig(level=logging.DEBUG)
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("tornado").setLevel(logging.WARNING)
print("Starting app...")
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(5000, "0.0.0.0")
ioloop = IOLoop.instance()
PeriodicCallback(cleanup_task, int(config.cleanup_thread_interval) * 1000).start()
ioloop.start()
if __name__ == "__main__":
main()
Change port from 5000 to 4000
|
import logging
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from caaas import app
from caaas.cleanup_thread import cleanup_task
from caaas.config_parser import config
DEBUG = True
log = logging.getLogger("caaas")
def main():
if DEBUG:
logging.basicConfig(level=logging.DEBUG)
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("tornado").setLevel(logging.WARNING)
print("Starting app...")
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(4000, "0.0.0.0")
ioloop = IOLoop.instance()
PeriodicCallback(cleanup_task, int(config.cleanup_thread_interval) * 1000).start()
ioloop.start()
if __name__ == "__main__":
main()
|
<commit_before>import logging
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from caaas import app
from caaas.cleanup_thread import cleanup_task
from caaas.config_parser import config
DEBUG = True
log = logging.getLogger("caaas")
def main():
if DEBUG:
logging.basicConfig(level=logging.DEBUG)
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("tornado").setLevel(logging.WARNING)
print("Starting app...")
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(5000, "0.0.0.0")
ioloop = IOLoop.instance()
PeriodicCallback(cleanup_task, int(config.cleanup_thread_interval) * 1000).start()
ioloop.start()
if __name__ == "__main__":
main()
<commit_msg>Change port from 5000 to 4000<commit_after>
|
import logging
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from caaas import app
from caaas.cleanup_thread import cleanup_task
from caaas.config_parser import config
DEBUG = True
log = logging.getLogger("caaas")
def main():
if DEBUG:
logging.basicConfig(level=logging.DEBUG)
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("tornado").setLevel(logging.WARNING)
print("Starting app...")
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(4000, "0.0.0.0")
ioloop = IOLoop.instance()
PeriodicCallback(cleanup_task, int(config.cleanup_thread_interval) * 1000).start()
ioloop.start()
if __name__ == "__main__":
main()
|
import logging
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from caaas import app
from caaas.cleanup_thread import cleanup_task
from caaas.config_parser import config
DEBUG = True
log = logging.getLogger("caaas")
def main():
if DEBUG:
logging.basicConfig(level=logging.DEBUG)
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("tornado").setLevel(logging.WARNING)
print("Starting app...")
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(5000, "0.0.0.0")
ioloop = IOLoop.instance()
PeriodicCallback(cleanup_task, int(config.cleanup_thread_interval) * 1000).start()
ioloop.start()
if __name__ == "__main__":
main()
Change port from 5000 to 4000import logging
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from caaas import app
from caaas.cleanup_thread import cleanup_task
from caaas.config_parser import config
DEBUG = True
log = logging.getLogger("caaas")
def main():
if DEBUG:
logging.basicConfig(level=logging.DEBUG)
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("tornado").setLevel(logging.WARNING)
print("Starting app...")
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(4000, "0.0.0.0")
ioloop = IOLoop.instance()
PeriodicCallback(cleanup_task, int(config.cleanup_thread_interval) * 1000).start()
ioloop.start()
if __name__ == "__main__":
main()
|
<commit_before>import logging
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from caaas import app
from caaas.cleanup_thread import cleanup_task
from caaas.config_parser import config
DEBUG = True
log = logging.getLogger("caaas")
def main():
if DEBUG:
logging.basicConfig(level=logging.DEBUG)
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("tornado").setLevel(logging.WARNING)
print("Starting app...")
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(5000, "0.0.0.0")
ioloop = IOLoop.instance()
PeriodicCallback(cleanup_task, int(config.cleanup_thread_interval) * 1000).start()
ioloop.start()
if __name__ == "__main__":
main()
<commit_msg>Change port from 5000 to 4000<commit_after>import logging
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from caaas import app
from caaas.cleanup_thread import cleanup_task
from caaas.config_parser import config
DEBUG = True
log = logging.getLogger("caaas")
def main():
if DEBUG:
logging.basicConfig(level=logging.DEBUG)
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("tornado").setLevel(logging.WARNING)
print("Starting app...")
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(4000, "0.0.0.0")
ioloop = IOLoop.instance()
PeriodicCallback(cleanup_task, int(config.cleanup_thread_interval) * 1000).start()
ioloop.start()
if __name__ == "__main__":
main()
|
68db590eb373ea2b293f5619dcd7d6515f454507
|
sparkback/__init__.py
|
sparkback/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import division
import argparse
ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def scale_data(data):
m = min(data)
n = (max(data) - m) / (len(ticks) - 1)
# if every element is the same height return all lower ticks, else compute
# the tick height
if n == 0:
return [ ticks[0] for t in data]
else:
return [ ticks[int((t - m) / n)] for t in data ]
def print_ansi_spark(d):
print ''.join(d)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Process numbers')
parser.add_argument('numbers', metavar='N', type=float, nargs='+',
help='series of data to plot')
args = parser.parse_args()
print_ansi_spark(scale_data(args.numbers))
|
# -*- coding: utf-8 -*-
from __future__ import division
import argparse
ansi_ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def scale_data(data, ticks):
m = min(data)
n = (max(data) - m) / (len(ticks) - 1)
# if every element is the same height return all lower ticks, else compute
# the tick height
if n == 0:
return [ ticks[0] for t in data]
else:
return [ ticks[int((t - m) / n)] for t in data ]
def print_ansi_spark(d):
print ''.join(d)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Process numbers')
parser.add_argument('numbers', metavar='N', type=float, nargs='+',
help='series of data to plot')
args = parser.parse_args()
print_ansi_spark(scale_data(args.numbers, ansi_ticks))
|
Make ticks passable argument to data
|
Make ticks passable argument to data
|
Python
|
mit
|
mmichie/sparkback
|
# -*- coding: utf-8 -*-
from __future__ import division
import argparse
ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def scale_data(data):
m = min(data)
n = (max(data) - m) / (len(ticks) - 1)
# if every element is the same height return all lower ticks, else compute
# the tick height
if n == 0:
return [ ticks[0] for t in data]
else:
return [ ticks[int((t - m) / n)] for t in data ]
def print_ansi_spark(d):
print ''.join(d)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Process numbers')
parser.add_argument('numbers', metavar='N', type=float, nargs='+',
help='series of data to plot')
args = parser.parse_args()
print_ansi_spark(scale_data(args.numbers))
Make ticks passable argument to data
|
# -*- coding: utf-8 -*-
from __future__ import division
import argparse
ansi_ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def scale_data(data, ticks):
m = min(data)
n = (max(data) - m) / (len(ticks) - 1)
# if every element is the same height return all lower ticks, else compute
# the tick height
if n == 0:
return [ ticks[0] for t in data]
else:
return [ ticks[int((t - m) / n)] for t in data ]
def print_ansi_spark(d):
print ''.join(d)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Process numbers')
parser.add_argument('numbers', metavar='N', type=float, nargs='+',
help='series of data to plot')
args = parser.parse_args()
print_ansi_spark(scale_data(args.numbers, ansi_ticks))
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import division
import argparse
ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def scale_data(data):
m = min(data)
n = (max(data) - m) / (len(ticks) - 1)
# if every element is the same height return all lower ticks, else compute
# the tick height
if n == 0:
return [ ticks[0] for t in data]
else:
return [ ticks[int((t - m) / n)] for t in data ]
def print_ansi_spark(d):
print ''.join(d)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Process numbers')
parser.add_argument('numbers', metavar='N', type=float, nargs='+',
help='series of data to plot')
args = parser.parse_args()
print_ansi_spark(scale_data(args.numbers))
<commit_msg>Make ticks passable argument to data<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import division
import argparse
ansi_ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def scale_data(data, ticks):
m = min(data)
n = (max(data) - m) / (len(ticks) - 1)
# if every element is the same height return all lower ticks, else compute
# the tick height
if n == 0:
return [ ticks[0] for t in data]
else:
return [ ticks[int((t - m) / n)] for t in data ]
def print_ansi_spark(d):
print ''.join(d)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Process numbers')
parser.add_argument('numbers', metavar='N', type=float, nargs='+',
help='series of data to plot')
args = parser.parse_args()
print_ansi_spark(scale_data(args.numbers, ansi_ticks))
|
# -*- coding: utf-8 -*-
from __future__ import division
import argparse
ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def scale_data(data):
m = min(data)
n = (max(data) - m) / (len(ticks) - 1)
# if every element is the same height return all lower ticks, else compute
# the tick height
if n == 0:
return [ ticks[0] for t in data]
else:
return [ ticks[int((t - m) / n)] for t in data ]
def print_ansi_spark(d):
print ''.join(d)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Process numbers')
parser.add_argument('numbers', metavar='N', type=float, nargs='+',
help='series of data to plot')
args = parser.parse_args()
print_ansi_spark(scale_data(args.numbers))
Make ticks passable argument to data# -*- coding: utf-8 -*-
from __future__ import division
import argparse
ansi_ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def scale_data(data, ticks):
m = min(data)
n = (max(data) - m) / (len(ticks) - 1)
# if every element is the same height return all lower ticks, else compute
# the tick height
if n == 0:
return [ ticks[0] for t in data]
else:
return [ ticks[int((t - m) / n)] for t in data ]
def print_ansi_spark(d):
print ''.join(d)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Process numbers')
parser.add_argument('numbers', metavar='N', type=float, nargs='+',
help='series of data to plot')
args = parser.parse_args()
print_ansi_spark(scale_data(args.numbers, ansi_ticks))
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import division
import argparse
ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def scale_data(data):
m = min(data)
n = (max(data) - m) / (len(ticks) - 1)
# if every element is the same height return all lower ticks, else compute
# the tick height
if n == 0:
return [ ticks[0] for t in data]
else:
return [ ticks[int((t - m) / n)] for t in data ]
def print_ansi_spark(d):
print ''.join(d)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Process numbers')
parser.add_argument('numbers', metavar='N', type=float, nargs='+',
help='series of data to plot')
args = parser.parse_args()
print_ansi_spark(scale_data(args.numbers))
<commit_msg>Make ticks passable argument to data<commit_after># -*- coding: utf-8 -*-
from __future__ import division
import argparse
ansi_ticks = ('▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def scale_data(data, ticks):
m = min(data)
n = (max(data) - m) / (len(ticks) - 1)
# if every element is the same height return all lower ticks, else compute
# the tick height
if n == 0:
return [ ticks[0] for t in data]
else:
return [ ticks[int((t - m) / n)] for t in data ]
def print_ansi_spark(d):
print ''.join(d)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Process numbers')
parser.add_argument('numbers', metavar='N', type=float, nargs='+',
help='series of data to plot')
args = parser.parse_args()
print_ansi_spark(scale_data(args.numbers, ansi_ticks))
|
017182e317aa33c0bb4c13541ef19b11bb48e250
|
members/views.py
|
members/views.py
|
from django.shortcuts import render
from django.http import HttpResponse
from .models import User
def homepage(request):
return render(request, "index.html", {})
def search(request, name):
members = User.objects.filter(first_name__icontains=name) or \
User.objects.filter(last_name__icontains=name) or \
User.objects.filter(username__icontains=name)
json_data = [dict(
id=member.id,
full_name=' '.join([member.first_name, member.last_name]))
for member in members]
return HttpResponse(json_data, mimetype='application/json')
|
from django.shortcuts import render
from django.http import HttpResponse
from hackfmi.utils import json_view
from .models import User
def homepage(request):
return render(request, "index.html", {})
@json_view
def search(request, name):
members = User.objects.filter(first_name__icontains=name) or \
User.objects.filter(last_name__icontains=name) or \
User.objects.filter(username__icontains=name)
json_data = [dict(
id=member.id,
full_name=' '.join([member.first_name, member.last_name]))
for member in members]
return json_data
|
Add view for searching users and return json format
|
Add view for searching users and return json format
|
Python
|
mit
|
Hackfmi/Diaphanum,Hackfmi/Diaphanum
|
from django.shortcuts import render
from django.http import HttpResponse
from .models import User
def homepage(request):
return render(request, "index.html", {})
def search(request, name):
members = User.objects.filter(first_name__icontains=name) or \
User.objects.filter(last_name__icontains=name) or \
User.objects.filter(username__icontains=name)
json_data = [dict(
id=member.id,
full_name=' '.join([member.first_name, member.last_name]))
for member in members]
return HttpResponse(json_data, mimetype='application/json')
Add view for searching users and return json format
|
from django.shortcuts import render
from django.http import HttpResponse
from hackfmi.utils import json_view
from .models import User
def homepage(request):
return render(request, "index.html", {})
@json_view
def search(request, name):
members = User.objects.filter(first_name__icontains=name) or \
User.objects.filter(last_name__icontains=name) or \
User.objects.filter(username__icontains=name)
json_data = [dict(
id=member.id,
full_name=' '.join([member.first_name, member.last_name]))
for member in members]
return json_data
|
<commit_before>from django.shortcuts import render
from django.http import HttpResponse
from .models import User
def homepage(request):
return render(request, "index.html", {})
def search(request, name):
members = User.objects.filter(first_name__icontains=name) or \
User.objects.filter(last_name__icontains=name) or \
User.objects.filter(username__icontains=name)
json_data = [dict(
id=member.id,
full_name=' '.join([member.first_name, member.last_name]))
for member in members]
return HttpResponse(json_data, mimetype='application/json')
<commit_msg>Add view for searching users and return json format<commit_after>
|
from django.shortcuts import render
from django.http import HttpResponse
from hackfmi.utils import json_view
from .models import User
def homepage(request):
return render(request, "index.html", {})
@json_view
def search(request, name):
members = User.objects.filter(first_name__icontains=name) or \
User.objects.filter(last_name__icontains=name) or \
User.objects.filter(username__icontains=name)
json_data = [dict(
id=member.id,
full_name=' '.join([member.first_name, member.last_name]))
for member in members]
return json_data
|
from django.shortcuts import render
from django.http import HttpResponse
from .models import User
def homepage(request):
return render(request, "index.html", {})
def search(request, name):
members = User.objects.filter(first_name__icontains=name) or \
User.objects.filter(last_name__icontains=name) or \
User.objects.filter(username__icontains=name)
json_data = [dict(
id=member.id,
full_name=' '.join([member.first_name, member.last_name]))
for member in members]
return HttpResponse(json_data, mimetype='application/json')
Add view for searching users and return json formatfrom django.shortcuts import render
from django.http import HttpResponse
from hackfmi.utils import json_view
from .models import User
def homepage(request):
return render(request, "index.html", {})
@json_view
def search(request, name):
members = User.objects.filter(first_name__icontains=name) or \
User.objects.filter(last_name__icontains=name) or \
User.objects.filter(username__icontains=name)
json_data = [dict(
id=member.id,
full_name=' '.join([member.first_name, member.last_name]))
for member in members]
return json_data
|
<commit_before>from django.shortcuts import render
from django.http import HttpResponse
from .models import User
def homepage(request):
return render(request, "index.html", {})
def search(request, name):
members = User.objects.filter(first_name__icontains=name) or \
User.objects.filter(last_name__icontains=name) or \
User.objects.filter(username__icontains=name)
json_data = [dict(
id=member.id,
full_name=' '.join([member.first_name, member.last_name]))
for member in members]
return HttpResponse(json_data, mimetype='application/json')
<commit_msg>Add view for searching users and return json format<commit_after>from django.shortcuts import render
from django.http import HttpResponse
from hackfmi.utils import json_view
from .models import User
def homepage(request):
return render(request, "index.html", {})
@json_view
def search(request, name):
members = User.objects.filter(first_name__icontains=name) or \
User.objects.filter(last_name__icontains=name) or \
User.objects.filter(username__icontains=name)
json_data = [dict(
id=member.id,
full_name=' '.join([member.first_name, member.last_name]))
for member in members]
return json_data
|
375cff83beb17f52ead364eef6e690a8bd118be4
|
app/assets.py
|
app/assets.py
|
from flask_assets import Bundle, Environment, Filter
# fixes missing semicolon in last statement of jquery.pjax.js
class ConcatFilter(Filter):
def concat(self, out, hunks, **kw):
out.write(';'.join([h.data() for h, info in hunks]))
js = Bundle(
'node_modules/jquery/dist/jquery.js',
'node_modules/jquery-pjax/jquery.pjax.js',
'node_modules/bootbox/bootbox.js',
'node_modules/bootstrap/dist/js/bootstrap.min.js',
'js/application.js',
filters=(ConcatFilter, 'jsmin'),
output='gen/packed.js'
)
css = Bundle(
'node_modules/bootstrap/dist/css/bootstrap.css',
'node_modules/font-awesome/css/font-awesome.css',
'css/style.css',
filters=('cssmin','cssrewrite'),
output='gen/packed.css'
)
assets = Environment()
assets.register('js_all', js)
assets.register('css_all', css)
|
from flask_assets import Bundle, Environment, Filter
class ConcatFilter(Filter):
"""
Filter that merges files, placing a semicolon between them.
Fixes issues caused by missing semicolons at end of JS assets, for example
with last statement of jquery.pjax.js.
"""
def concat(self, out, hunks, **kw):
out.write(';'.join([h.data() for h, info in hunks]))
js = Bundle(
'node_modules/jquery/dist/jquery.js',
'node_modules/jquery-pjax/jquery.pjax.js',
'node_modules/bootbox/bootbox.js',
'node_modules/bootstrap/dist/js/bootstrap.min.js',
'js/application.js',
filters=(ConcatFilter, 'jsmin'),
output='gen/packed.js'
)
css = Bundle(
'node_modules/bootstrap/dist/css/bootstrap.css',
'node_modules/font-awesome/css/font-awesome.css',
'css/style.css',
filters=('cssmin','cssrewrite'),
output='gen/packed.css'
)
assets = Environment()
assets.register('js_all', js)
assets.register('css_all', css)
|
Move inline comment to class docstring
|
Move inline comment to class docstring
|
Python
|
mit
|
cburmeister/flask-bones,cburmeister/flask-bones,cburmeister/flask-bones
|
from flask_assets import Bundle, Environment, Filter
# fixes missing semicolon in last statement of jquery.pjax.js
class ConcatFilter(Filter):
def concat(self, out, hunks, **kw):
out.write(';'.join([h.data() for h, info in hunks]))
js = Bundle(
'node_modules/jquery/dist/jquery.js',
'node_modules/jquery-pjax/jquery.pjax.js',
'node_modules/bootbox/bootbox.js',
'node_modules/bootstrap/dist/js/bootstrap.min.js',
'js/application.js',
filters=(ConcatFilter, 'jsmin'),
output='gen/packed.js'
)
css = Bundle(
'node_modules/bootstrap/dist/css/bootstrap.css',
'node_modules/font-awesome/css/font-awesome.css',
'css/style.css',
filters=('cssmin','cssrewrite'),
output='gen/packed.css'
)
assets = Environment()
assets.register('js_all', js)
assets.register('css_all', css)
Move inline comment to class docstring
|
from flask_assets import Bundle, Environment, Filter
class ConcatFilter(Filter):
"""
Filter that merges files, placing a semicolon between them.
Fixes issues caused by missing semicolons at end of JS assets, for example
with last statement of jquery.pjax.js.
"""
def concat(self, out, hunks, **kw):
out.write(';'.join([h.data() for h, info in hunks]))
js = Bundle(
'node_modules/jquery/dist/jquery.js',
'node_modules/jquery-pjax/jquery.pjax.js',
'node_modules/bootbox/bootbox.js',
'node_modules/bootstrap/dist/js/bootstrap.min.js',
'js/application.js',
filters=(ConcatFilter, 'jsmin'),
output='gen/packed.js'
)
css = Bundle(
'node_modules/bootstrap/dist/css/bootstrap.css',
'node_modules/font-awesome/css/font-awesome.css',
'css/style.css',
filters=('cssmin','cssrewrite'),
output='gen/packed.css'
)
assets = Environment()
assets.register('js_all', js)
assets.register('css_all', css)
|
<commit_before>from flask_assets import Bundle, Environment, Filter
# fixes missing semicolon in last statement of jquery.pjax.js
class ConcatFilter(Filter):
def concat(self, out, hunks, **kw):
out.write(';'.join([h.data() for h, info in hunks]))
js = Bundle(
'node_modules/jquery/dist/jquery.js',
'node_modules/jquery-pjax/jquery.pjax.js',
'node_modules/bootbox/bootbox.js',
'node_modules/bootstrap/dist/js/bootstrap.min.js',
'js/application.js',
filters=(ConcatFilter, 'jsmin'),
output='gen/packed.js'
)
css = Bundle(
'node_modules/bootstrap/dist/css/bootstrap.css',
'node_modules/font-awesome/css/font-awesome.css',
'css/style.css',
filters=('cssmin','cssrewrite'),
output='gen/packed.css'
)
assets = Environment()
assets.register('js_all', js)
assets.register('css_all', css)
<commit_msg>Move inline comment to class docstring<commit_after>
|
from flask_assets import Bundle, Environment, Filter
class ConcatFilter(Filter):
"""
Filter that merges files, placing a semicolon between them.
Fixes issues caused by missing semicolons at end of JS assets, for example
with last statement of jquery.pjax.js.
"""
def concat(self, out, hunks, **kw):
out.write(';'.join([h.data() for h, info in hunks]))
js = Bundle(
'node_modules/jquery/dist/jquery.js',
'node_modules/jquery-pjax/jquery.pjax.js',
'node_modules/bootbox/bootbox.js',
'node_modules/bootstrap/dist/js/bootstrap.min.js',
'js/application.js',
filters=(ConcatFilter, 'jsmin'),
output='gen/packed.js'
)
css = Bundle(
'node_modules/bootstrap/dist/css/bootstrap.css',
'node_modules/font-awesome/css/font-awesome.css',
'css/style.css',
filters=('cssmin','cssrewrite'),
output='gen/packed.css'
)
assets = Environment()
assets.register('js_all', js)
assets.register('css_all', css)
|
from flask_assets import Bundle, Environment, Filter
# fixes missing semicolon in last statement of jquery.pjax.js
class ConcatFilter(Filter):
def concat(self, out, hunks, **kw):
out.write(';'.join([h.data() for h, info in hunks]))
js = Bundle(
'node_modules/jquery/dist/jquery.js',
'node_modules/jquery-pjax/jquery.pjax.js',
'node_modules/bootbox/bootbox.js',
'node_modules/bootstrap/dist/js/bootstrap.min.js',
'js/application.js',
filters=(ConcatFilter, 'jsmin'),
output='gen/packed.js'
)
css = Bundle(
'node_modules/bootstrap/dist/css/bootstrap.css',
'node_modules/font-awesome/css/font-awesome.css',
'css/style.css',
filters=('cssmin','cssrewrite'),
output='gen/packed.css'
)
assets = Environment()
assets.register('js_all', js)
assets.register('css_all', css)
Move inline comment to class docstringfrom flask_assets import Bundle, Environment, Filter
class ConcatFilter(Filter):
"""
Filter that merges files, placing a semicolon between them.
Fixes issues caused by missing semicolons at end of JS assets, for example
with last statement of jquery.pjax.js.
"""
def concat(self, out, hunks, **kw):
out.write(';'.join([h.data() for h, info in hunks]))
js = Bundle(
'node_modules/jquery/dist/jquery.js',
'node_modules/jquery-pjax/jquery.pjax.js',
'node_modules/bootbox/bootbox.js',
'node_modules/bootstrap/dist/js/bootstrap.min.js',
'js/application.js',
filters=(ConcatFilter, 'jsmin'),
output='gen/packed.js'
)
css = Bundle(
'node_modules/bootstrap/dist/css/bootstrap.css',
'node_modules/font-awesome/css/font-awesome.css',
'css/style.css',
filters=('cssmin','cssrewrite'),
output='gen/packed.css'
)
assets = Environment()
assets.register('js_all', js)
assets.register('css_all', css)
|
<commit_before>from flask_assets import Bundle, Environment, Filter
# fixes missing semicolon in last statement of jquery.pjax.js
class ConcatFilter(Filter):
def concat(self, out, hunks, **kw):
out.write(';'.join([h.data() for h, info in hunks]))
js = Bundle(
'node_modules/jquery/dist/jquery.js',
'node_modules/jquery-pjax/jquery.pjax.js',
'node_modules/bootbox/bootbox.js',
'node_modules/bootstrap/dist/js/bootstrap.min.js',
'js/application.js',
filters=(ConcatFilter, 'jsmin'),
output='gen/packed.js'
)
css = Bundle(
'node_modules/bootstrap/dist/css/bootstrap.css',
'node_modules/font-awesome/css/font-awesome.css',
'css/style.css',
filters=('cssmin','cssrewrite'),
output='gen/packed.css'
)
assets = Environment()
assets.register('js_all', js)
assets.register('css_all', css)
<commit_msg>Move inline comment to class docstring<commit_after>from flask_assets import Bundle, Environment, Filter
class ConcatFilter(Filter):
"""
Filter that merges files, placing a semicolon between them.
Fixes issues caused by missing semicolons at end of JS assets, for example
with last statement of jquery.pjax.js.
"""
def concat(self, out, hunks, **kw):
out.write(';'.join([h.data() for h, info in hunks]))
js = Bundle(
'node_modules/jquery/dist/jquery.js',
'node_modules/jquery-pjax/jquery.pjax.js',
'node_modules/bootbox/bootbox.js',
'node_modules/bootstrap/dist/js/bootstrap.min.js',
'js/application.js',
filters=(ConcatFilter, 'jsmin'),
output='gen/packed.js'
)
css = Bundle(
'node_modules/bootstrap/dist/css/bootstrap.css',
'node_modules/font-awesome/css/font-awesome.css',
'css/style.css',
filters=('cssmin','cssrewrite'),
output='gen/packed.css'
)
assets = Environment()
assets.register('js_all', js)
assets.register('css_all', css)
|
5363224395b26528465417ff550d6a2163cbe8e6
|
spacy/zh/__init__.py
|
spacy/zh/__init__.py
|
from ..language import Language
from ..tokenizer import Tokenizer
from ..tagger import Tagger
class CharacterTokenizer(Tokenizer):
def __call__(self, text):
return self.tokens_from_list(list(text))
class Chinese(Language):
lang = u'zh'
def __call__(self, text):
doc = self.tokenizer.tokens_from_list(list(text))
self.tagger(doc)
self.merge_characters(doc)
return doc
def merge_characters(self, doc):
start = 0
chunks = []
for token in doc:
if token.tag_ != 'CHAR':
chunk = doc[start : token.i + 1]
chunks.append(chunk)
start = token.i + 1
text = doc.text
for chunk in chunks:
chunk.merge(chunk[-1].tag_, chunk.text, u'')
|
import jieba
from ..language import Language
from ..tokens import Doc
class Chinese(Language):
lang = u'zh'
def make_doc(self, text):
words = list(jieba.cut(text, cut_all=True))
return Doc(self.vocab, words=words, spaces=[False]*len(words))
|
Add draft Jieba tokenizer for Chinese
|
Add draft Jieba tokenizer for Chinese
|
Python
|
mit
|
spacy-io/spaCy,explosion/spaCy,oroszgy/spaCy.hu,explosion/spaCy,recognai/spaCy,honnibal/spaCy,banglakit/spaCy,explosion/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,recognai/spaCy,banglakit/spaCy,recognai/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,spacy-io/spaCy,aikramer2/spaCy,aikramer2/spaCy,explosion/spaCy,honnibal/spaCy,raphael0202/spaCy,spacy-io/spaCy,raphael0202/spaCy,honnibal/spaCy,raphael0202/spaCy,aikramer2/spaCy,honnibal/spaCy,spacy-io/spaCy,recognai/spaCy,Gregory-Howard/spaCy,recognai/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,raphael0202/spaCy,banglakit/spaCy,recognai/spaCy,explosion/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,Gregory-Howard/spaCy,aikramer2/spaCy,explosion/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,Gregory-Howard/spaCy,banglakit/spaCy,spacy-io/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy
|
from ..language import Language
from ..tokenizer import Tokenizer
from ..tagger import Tagger
class CharacterTokenizer(Tokenizer):
def __call__(self, text):
return self.tokens_from_list(list(text))
class Chinese(Language):
lang = u'zh'
def __call__(self, text):
doc = self.tokenizer.tokens_from_list(list(text))
self.tagger(doc)
self.merge_characters(doc)
return doc
def merge_characters(self, doc):
start = 0
chunks = []
for token in doc:
if token.tag_ != 'CHAR':
chunk = doc[start : token.i + 1]
chunks.append(chunk)
start = token.i + 1
text = doc.text
for chunk in chunks:
chunk.merge(chunk[-1].tag_, chunk.text, u'')
Add draft Jieba tokenizer for Chinese
|
import jieba
from ..language import Language
from ..tokens import Doc
class Chinese(Language):
lang = u'zh'
def make_doc(self, text):
words = list(jieba.cut(text, cut_all=True))
return Doc(self.vocab, words=words, spaces=[False]*len(words))
|
<commit_before>from ..language import Language
from ..tokenizer import Tokenizer
from ..tagger import Tagger
class CharacterTokenizer(Tokenizer):
def __call__(self, text):
return self.tokens_from_list(list(text))
class Chinese(Language):
lang = u'zh'
def __call__(self, text):
doc = self.tokenizer.tokens_from_list(list(text))
self.tagger(doc)
self.merge_characters(doc)
return doc
def merge_characters(self, doc):
start = 0
chunks = []
for token in doc:
if token.tag_ != 'CHAR':
chunk = doc[start : token.i + 1]
chunks.append(chunk)
start = token.i + 1
text = doc.text
for chunk in chunks:
chunk.merge(chunk[-1].tag_, chunk.text, u'')
<commit_msg>Add draft Jieba tokenizer for Chinese<commit_after>
|
import jieba
from ..language import Language
from ..tokens import Doc
class Chinese(Language):
lang = u'zh'
def make_doc(self, text):
words = list(jieba.cut(text, cut_all=True))
return Doc(self.vocab, words=words, spaces=[False]*len(words))
|
from ..language import Language
from ..tokenizer import Tokenizer
from ..tagger import Tagger
class CharacterTokenizer(Tokenizer):
def __call__(self, text):
return self.tokens_from_list(list(text))
class Chinese(Language):
lang = u'zh'
def __call__(self, text):
doc = self.tokenizer.tokens_from_list(list(text))
self.tagger(doc)
self.merge_characters(doc)
return doc
def merge_characters(self, doc):
start = 0
chunks = []
for token in doc:
if token.tag_ != 'CHAR':
chunk = doc[start : token.i + 1]
chunks.append(chunk)
start = token.i + 1
text = doc.text
for chunk in chunks:
chunk.merge(chunk[-1].tag_, chunk.text, u'')
Add draft Jieba tokenizer for Chineseimport jieba
from ..language import Language
from ..tokens import Doc
class Chinese(Language):
lang = u'zh'
def make_doc(self, text):
words = list(jieba.cut(text, cut_all=True))
return Doc(self.vocab, words=words, spaces=[False]*len(words))
|
<commit_before>from ..language import Language
from ..tokenizer import Tokenizer
from ..tagger import Tagger
class CharacterTokenizer(Tokenizer):
def __call__(self, text):
return self.tokens_from_list(list(text))
class Chinese(Language):
lang = u'zh'
def __call__(self, text):
doc = self.tokenizer.tokens_from_list(list(text))
self.tagger(doc)
self.merge_characters(doc)
return doc
def merge_characters(self, doc):
start = 0
chunks = []
for token in doc:
if token.tag_ != 'CHAR':
chunk = doc[start : token.i + 1]
chunks.append(chunk)
start = token.i + 1
text = doc.text
for chunk in chunks:
chunk.merge(chunk[-1].tag_, chunk.text, u'')
<commit_msg>Add draft Jieba tokenizer for Chinese<commit_after>import jieba
from ..language import Language
from ..tokens import Doc
class Chinese(Language):
lang = u'zh'
def make_doc(self, text):
words = list(jieba.cut(text, cut_all=True))
return Doc(self.vocab, words=words, spaces=[False]*len(words))
|
37fa646f4ddafd4db36e5c29b738408f0e26346f
|
rejected/__init__.py
|
rejected/__init__.py
|
"""
Rejected, a Python AMQP Consumer daemon
"""
__author__ = 'Gavin M. Roy <gmr@myyearbook.com>'
__date__ = "2009-09-10"
__version__ = "1.0.0"
__all__ = ['cli', 'common', 'mcp', 'utils']
|
"""
Rejected, a Python AMQP Consumer daemon
"""
__author__ = 'Gavin M. Roy <gmr@myyearbook.com>'
__since__ = "2009-09-10"
__version__ = "2.0.1"
__all__ = ['cli', 'common', 'mcp', 'utils']
|
Fix the variable name for since and update the version number
|
Fix the variable name for since and update the version number
|
Python
|
bsd-3-clause
|
gmr/rejected,gmr/rejected
|
"""
Rejected, a Python AMQP Consumer daemon
"""
__author__ = 'Gavin M. Roy <gmr@myyearbook.com>'
__date__ = "2009-09-10"
__version__ = "1.0.0"
__all__ = ['cli', 'common', 'mcp', 'utils']
Fix the variable name for since and update the version number
|
"""
Rejected, a Python AMQP Consumer daemon
"""
__author__ = 'Gavin M. Roy <gmr@myyearbook.com>'
__since__ = "2009-09-10"
__version__ = "2.0.1"
__all__ = ['cli', 'common', 'mcp', 'utils']
|
<commit_before>"""
Rejected, a Python AMQP Consumer daemon
"""
__author__ = 'Gavin M. Roy <gmr@myyearbook.com>'
__date__ = "2009-09-10"
__version__ = "1.0.0"
__all__ = ['cli', 'common', 'mcp', 'utils']
<commit_msg>Fix the variable name for since and update the version number<commit_after>
|
"""
Rejected, a Python AMQP Consumer daemon
"""
__author__ = 'Gavin M. Roy <gmr@myyearbook.com>'
__since__ = "2009-09-10"
__version__ = "2.0.1"
__all__ = ['cli', 'common', 'mcp', 'utils']
|
"""
Rejected, a Python AMQP Consumer daemon
"""
__author__ = 'Gavin M. Roy <gmr@myyearbook.com>'
__date__ = "2009-09-10"
__version__ = "1.0.0"
__all__ = ['cli', 'common', 'mcp', 'utils']
Fix the variable name for since and update the version number"""
Rejected, a Python AMQP Consumer daemon
"""
__author__ = 'Gavin M. Roy <gmr@myyearbook.com>'
__since__ = "2009-09-10"
__version__ = "2.0.1"
__all__ = ['cli', 'common', 'mcp', 'utils']
|
<commit_before>"""
Rejected, a Python AMQP Consumer daemon
"""
__author__ = 'Gavin M. Roy <gmr@myyearbook.com>'
__date__ = "2009-09-10"
__version__ = "1.0.0"
__all__ = ['cli', 'common', 'mcp', 'utils']
<commit_msg>Fix the variable name for since and update the version number<commit_after>"""
Rejected, a Python AMQP Consumer daemon
"""
__author__ = 'Gavin M. Roy <gmr@myyearbook.com>'
__since__ = "2009-09-10"
__version__ = "2.0.1"
__all__ = ['cli', 'common', 'mcp', 'utils']
|
2bd4adcb742fbe2d59e18828f0445dff2fba68d9
|
main.py
|
main.py
|
#!/usr/bin/env python
"""
This is the main file. The script finds the game window and sets up the
coordinates for each block.
The MIT License (MIT)
(c) 2016
"""
import pyautogui, logging, time
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s.%(msecs)03d: %(message)s', datefmt='%H:%M:%S')
# logging.disable(logging.DEBUG) # uncomment to block debug log messages
__author__ = "Alex Flores Escarcega"
__copyright__ = "Copyright 2007, Alex Flores Escarcega"
__credits__ = ["Alex Flores Escarcega"]
__license__ = "MIT"
__version__ = "1.0.1"
__maintainer__ = "Alex Flores Escarcega"
__email__ = "alex.floresescar@gmail.com"
__status__ = "Development"
def main():
"""
No inputs
No outputs
Starts up the gameregionfinder
"""
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
"""
This is the main file. The script finds the game window and sets up the
coordinates for each block.
The MIT License (MIT)
(c) 2016
"""
import pyautogui, logging, time
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s.%(msecs)03d: %(message)s', datefmt='%H:%M:%S')
# logging.disable(logging.DEBUG) # uncomment to block debug log messages
__author__ = "Alex Flores Escarcega"
__copyright__ = "Copyright 2007, Alex Flores Escarcega"
__credits__ = ["Alex Flores Escarcega"]
__license__ = "MIT"
__version__ = "1.0.1"
__maintainer__ = "Alex Flores Escarcega"
__email__ = "alex.floresescar@gmail.com"
__status__ = "Development"
def find_game_region():
"""
Uses image inactive_window.png to find the coordinates of the game
Inputs: None
Outputs: the tuple of the coordinates of the game window
"""
logging.debug("About to take a screenshot and look for inactive_window.png")
coors = pyautogui.locateOnScreen("images/inactive_window.png")
if coors is None:
logging.debug("Did not find inactive_window.png")
logging.debug("Maybe the window is active instead. Will look for active_window.png")
coors = pyauto.locateOnscreen("images/inactive_window.png")
if coors is None:
raise Exception("The game as not found on this screen. Make sure it is visible.")
def main():
"""
No inputs
No outputs
Starts up the gameregionfinder
"""
if __name__ == "__main__":
main()
|
Add find_game_region() to return coors of game window
|
Add find_game_region() to return coors of game window
|
Python
|
mit
|
hydrophilicsun/Automating-Minesweeper-
|
#!/usr/bin/env python
"""
This is the main file. The script finds the game window and sets up the
coordinates for each block.
The MIT License (MIT)
(c) 2016
"""
import pyautogui, logging, time
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s.%(msecs)03d: %(message)s', datefmt='%H:%M:%S')
# logging.disable(logging.DEBUG) # uncomment to block debug log messages
__author__ = "Alex Flores Escarcega"
__copyright__ = "Copyright 2007, Alex Flores Escarcega"
__credits__ = ["Alex Flores Escarcega"]
__license__ = "MIT"
__version__ = "1.0.1"
__maintainer__ = "Alex Flores Escarcega"
__email__ = "alex.floresescar@gmail.com"
__status__ = "Development"
def main():
"""
No inputs
No outputs
Starts up the gameregionfinder
"""
if __name__ == "__main__":
main()
Add find_game_region() to return coors of game window
|
#!/usr/bin/env python
"""
This is the main file. The script finds the game window and sets up the
coordinates for each block.
The MIT License (MIT)
(c) 2016
"""
import pyautogui, logging, time
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s.%(msecs)03d: %(message)s', datefmt='%H:%M:%S')
# logging.disable(logging.DEBUG) # uncomment to block debug log messages
__author__ = "Alex Flores Escarcega"
__copyright__ = "Copyright 2007, Alex Flores Escarcega"
__credits__ = ["Alex Flores Escarcega"]
__license__ = "MIT"
__version__ = "1.0.1"
__maintainer__ = "Alex Flores Escarcega"
__email__ = "alex.floresescar@gmail.com"
__status__ = "Development"
def find_game_region():
"""
Uses image inactive_window.png to find the coordinates of the game
Inputs: None
Outputs: the tuple of the coordinates of the game window
"""
logging.debug("About to take a screenshot and look for inactive_window.png")
coors = pyautogui.locateOnScreen("images/inactive_window.png")
if coors is None:
logging.debug("Did not find inactive_window.png")
logging.debug("Maybe the window is active instead. Will look for active_window.png")
coors = pyauto.locateOnscreen("images/inactive_window.png")
if coors is None:
raise Exception("The game as not found on this screen. Make sure it is visible.")
def main():
"""
No inputs
No outputs
Starts up the gameregionfinder
"""
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
"""
This is the main file. The script finds the game window and sets up the
coordinates for each block.
The MIT License (MIT)
(c) 2016
"""
import pyautogui, logging, time
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s.%(msecs)03d: %(message)s', datefmt='%H:%M:%S')
# logging.disable(logging.DEBUG) # uncomment to block debug log messages
__author__ = "Alex Flores Escarcega"
__copyright__ = "Copyright 2007, Alex Flores Escarcega"
__credits__ = ["Alex Flores Escarcega"]
__license__ = "MIT"
__version__ = "1.0.1"
__maintainer__ = "Alex Flores Escarcega"
__email__ = "alex.floresescar@gmail.com"
__status__ = "Development"
def main():
"""
No inputs
No outputs
Starts up the gameregionfinder
"""
if __name__ == "__main__":
main()
<commit_msg>Add find_game_region() to return coors of game window<commit_after>
|
#!/usr/bin/env python
"""
This is the main file. The script finds the game window and sets up the
coordinates for each block.
The MIT License (MIT)
(c) 2016
"""
import pyautogui, logging, time
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s.%(msecs)03d: %(message)s', datefmt='%H:%M:%S')
# logging.disable(logging.DEBUG) # uncomment to block debug log messages
__author__ = "Alex Flores Escarcega"
__copyright__ = "Copyright 2007, Alex Flores Escarcega"
__credits__ = ["Alex Flores Escarcega"]
__license__ = "MIT"
__version__ = "1.0.1"
__maintainer__ = "Alex Flores Escarcega"
__email__ = "alex.floresescar@gmail.com"
__status__ = "Development"
def find_game_region():
"""
Uses image inactive_window.png to find the coordinates of the game
Inputs: None
Outputs: the tuple of the coordinates of the game window
"""
logging.debug("About to take a screenshot and look for inactive_window.png")
coors = pyautogui.locateOnScreen("images/inactive_window.png")
if coors is None:
logging.debug("Did not find inactive_window.png")
logging.debug("Maybe the window is active instead. Will look for active_window.png")
coors = pyauto.locateOnscreen("images/inactive_window.png")
if coors is None:
raise Exception("The game as not found on this screen. Make sure it is visible.")
def main():
"""
No inputs
No outputs
Starts up the gameregionfinder
"""
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
"""
This is the main file. The script finds the game window and sets up the
coordinates for each block.
The MIT License (MIT)
(c) 2016
"""
import pyautogui, logging, time
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s.%(msecs)03d: %(message)s', datefmt='%H:%M:%S')
# logging.disable(logging.DEBUG) # uncomment to block debug log messages
__author__ = "Alex Flores Escarcega"
__copyright__ = "Copyright 2007, Alex Flores Escarcega"
__credits__ = ["Alex Flores Escarcega"]
__license__ = "MIT"
__version__ = "1.0.1"
__maintainer__ = "Alex Flores Escarcega"
__email__ = "alex.floresescar@gmail.com"
__status__ = "Development"
def main():
"""
No inputs
No outputs
Starts up the gameregionfinder
"""
if __name__ == "__main__":
main()
Add find_game_region() to return coors of game window#!/usr/bin/env python
"""
This is the main file. The script finds the game window and sets up the
coordinates for each block.
The MIT License (MIT)
(c) 2016
"""
import pyautogui, logging, time
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s.%(msecs)03d: %(message)s', datefmt='%H:%M:%S')
# logging.disable(logging.DEBUG) # uncomment to block debug log messages
__author__ = "Alex Flores Escarcega"
__copyright__ = "Copyright 2007, Alex Flores Escarcega"
__credits__ = ["Alex Flores Escarcega"]
__license__ = "MIT"
__version__ = "1.0.1"
__maintainer__ = "Alex Flores Escarcega"
__email__ = "alex.floresescar@gmail.com"
__status__ = "Development"
def find_game_region():
"""
Uses image inactive_window.png to find the coordinates of the game
Inputs: None
Outputs: the tuple of the coordinates of the game window
"""
logging.debug("About to take a screenshot and look for inactive_window.png")
coors = pyautogui.locateOnScreen("images/inactive_window.png")
if coors is None:
logging.debug("Did not find inactive_window.png")
logging.debug("Maybe the window is active instead. Will look for active_window.png")
coors = pyauto.locateOnscreen("images/inactive_window.png")
if coors is None:
raise Exception("The game as not found on this screen. Make sure it is visible.")
def main():
"""
No inputs
No outputs
Starts up the gameregionfinder
"""
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
"""
This is the main file. The script finds the game window and sets up the
coordinates for each block.
The MIT License (MIT)
(c) 2016
"""
import pyautogui, logging, time
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s.%(msecs)03d: %(message)s', datefmt='%H:%M:%S')
# logging.disable(logging.DEBUG) # uncomment to block debug log messages
__author__ = "Alex Flores Escarcega"
__copyright__ = "Copyright 2007, Alex Flores Escarcega"
__credits__ = ["Alex Flores Escarcega"]
__license__ = "MIT"
__version__ = "1.0.1"
__maintainer__ = "Alex Flores Escarcega"
__email__ = "alex.floresescar@gmail.com"
__status__ = "Development"
def main():
"""
No inputs
No outputs
Starts up the gameregionfinder
"""
if __name__ == "__main__":
main()
<commit_msg>Add find_game_region() to return coors of game window<commit_after>#!/usr/bin/env python
"""
This is the main file. The script finds the game window and sets up the
coordinates for each block.
The MIT License (MIT)
(c) 2016
"""
import pyautogui, logging, time
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s.%(msecs)03d: %(message)s', datefmt='%H:%M:%S')
# logging.disable(logging.DEBUG) # uncomment to block debug log messages
__author__ = "Alex Flores Escarcega"
__copyright__ = "Copyright 2007, Alex Flores Escarcega"
__credits__ = ["Alex Flores Escarcega"]
__license__ = "MIT"
__version__ = "1.0.1"
__maintainer__ = "Alex Flores Escarcega"
__email__ = "alex.floresescar@gmail.com"
__status__ = "Development"
def find_game_region():
"""
Uses image inactive_window.png to find the coordinates of the game
Inputs: None
Outputs: the tuple of the coordinates of the game window
"""
logging.debug("About to take a screenshot and look for inactive_window.png")
coors = pyautogui.locateOnScreen("images/inactive_window.png")
if coors is None:
logging.debug("Did not find inactive_window.png")
logging.debug("Maybe the window is active instead. Will look for active_window.png")
coors = pyauto.locateOnscreen("images/inactive_window.png")
if coors is None:
raise Exception("The game as not found on this screen. Make sure it is visible.")
def main():
"""
No inputs
No outputs
Starts up the gameregionfinder
"""
if __name__ == "__main__":
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.