commit
stringlengths
40
40
old_file
stringlengths
4
150
new_file
stringlengths
4
150
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
501
message
stringlengths
15
4.06k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
diff
stringlengths
0
4.35k
13f1f2643c6f00a3f890d735e1ae2c8ac757fe75
setup.py
setup.py
import sys import codecs from setuptools import setup, find_packages from setuptools.command.test import test as TestCommand import dmp setup( name='dmp', version=dmp.__version__, description='MuG DMP API', url='http://www.multiscalegenomics.eu', download_url='https://github.com/Multiscale-Genomics/mg-dm-api', author=dmp.__author__, author_email='mcdowall@ebi.ac.uk', license=dmp.__license__, #packages=find_packages(), include_package_data=True, install_requires = [ 'pymongo>=3.3', 'monogomock>=3.7.0' ] )
import sys import codecs from setuptools import setup, find_packages from setuptools.command.test import test as TestCommand import dmp setup( name='dmp', version=dmp.__version__, description='MuG DMP API', url='http://www.multiscalegenomics.eu', download_url='https://github.com/Multiscale-Genomics/mg-dm-api', author=dmp.__author__, author_email='mcdowall@ebi.ac.uk', license=dmp.__license__, #packages=find_packages(), include_package_data=True, install_requires = [ 'pymongo>=3.3', 'mongomock>=3.7' ] )
Fix to the name of the required module
Fix to the name of the required module
Python
apache-2.0
Multiscale-Genomics/mg-dm-api,Multiscale-Genomics/mg-dm-api
--- +++ @@ -18,6 +18,6 @@ #packages=find_packages(), include_package_data=True, install_requires = [ - 'pymongo>=3.3', 'monogomock>=3.7.0' + 'pymongo>=3.3', 'mongomock>=3.7' ] )
1479e89d65c3d3c3b3ce3da9f81dae73e91b5ae3
rororo/__init__.py
rororo/__init__.py
""" ====== rororo ====== Collection of utilities, helpers, and principles for building Python backend applications. Supports `aiohttp.web <http://aiohttp.readthedocs.org/>`_, `Flask <http://flask.pocoo.org/>`_, and your web-framework. """ __author__ = 'Igor Davydenko' __license__ = 'BSD' __version__ = '1.0.dev0'
""" ====== rororo ====== Collection of utilities, helpers, and principles for building Python backend applications. Supports `aiohttp.web <http://aiohttp.readthedocs.org/>`_, `Flask <http://flask.pocoo.org/>`_, and your web-framework. """ __author__ = 'Igor Davydenko' __license__ = 'BSD' __version__ = '1.0.0.dev0'
Use semantic versioning for rororo.
Use semantic versioning for rororo.
Python
bsd-3-clause
playpauseandstop/rororo,playpauseandstop/rororo
--- +++ @@ -11,4 +11,4 @@ __author__ = 'Igor Davydenko' __license__ = 'BSD' -__version__ = '1.0.dev0' +__version__ = '1.0.0.dev0'
9967d14a10c0c53094b65e0946beb2d1c1200916
python/helpers/pydev/pydevd_attach_to_process/linux/lldb_threads_settrace.py
python/helpers/pydev/pydevd_attach_to_process/linux/lldb_threads_settrace.py
# This file is meant to be run inside lldb as a command after # the attach_linux.dylib dll has already been loaded to settrace for all threads. def __lldb_init_module(debugger, internal_dict): # Command Initialization code goes here # print('Startup LLDB in Python!') import lldb try: show_debug_info = 0 is_debug = 0 target = debugger.GetSelectedTarget() if target: process = target.GetProcess() if process: for thread in process: # Get the first frame # print('Thread %s, suspended %s\n'%(thread, thread.IsStopped())) process.SetSelectedThread(thread) if internal_dict.get('_thread_%d' % thread.GetThreadID(), False): frame = thread.GetSelectedFrame() if frame: print('Will settrace in: %s' % (frame,)) res = frame.EvaluateExpression("(int) SetSysTraceFunc(%s, %s)" % ( show_debug_info, is_debug), lldb.eDynamicCanRunTarget) error = res.GetError() if error: print(error) thread.Resume() except: import traceback;traceback.print_exc()
# This file is meant to be run inside lldb as a command after # the attach_linux.dylib dll has already been loaded to settrace for all threads. def __lldb_init_module(debugger, internal_dict): # Command Initialization code goes here # print('Startup LLDB in Python!') import lldb try: show_debug_info = 1 is_debug = 0 options = lldb.SBExpressionOptions() options.SetFetchDynamicValue() options.SetTryAllThreads(run_others=False) options.SetTimeoutInMicroSeconds(timeout=10000000) target = debugger.GetSelectedTarget() if target: process = target.GetProcess() if process: for thread in process: # Get the first frame # print('Thread %s, suspended %s\n'%(thread, thread.IsStopped())) if internal_dict.get('_thread_%d' % thread.GetThreadID(), False): process.SetSelectedThread(thread) if not thread.IsStopped(): # thread.Suspend() error = process.Stop() frame = thread.GetSelectedFrame() if frame: print('Will settrace in: %s' % (frame,)) res = frame.EvaluateExpression("(int) SetSysTraceFunc(%s, %s)" % ( show_debug_info, is_debug), options) error = res.GetError() if error: print(error) thread.Resume() except: import traceback;traceback.print_exc()
Use timeout for lldb expr eval (PY-14252).
Use timeout for lldb expr eval (PY-14252).
Python
apache-2.0
fnouama/intellij-community,samthor/intellij-community,ftomassetti/intellij-community,idea4bsd/idea4bsd,dslomov/intellij-community,ThiagoGarciaAlves/intellij-community,izonder/intellij-community,fnouama/intellij-community,holmes/intellij-community,MER-GROUP/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,SerCeMan/intellij-community,ivan-fedorov/intellij-community,kdwink/intellij-community,dslomov/intellij-community,petteyg/intellij-community,allotria/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,jagguli/intellij-community,mglukhikh/intellij-community,ol-loginov/intellij-community,blademainer/intellij-community,ibinti/intellij-community,vladmm/intellij-community,asedunov/intellij-community,orekyuu/intellij-community,muntasirsyed/intellij-community,dslomov/intellij-community,ThiagoGarciaAlves/intellij-community,ol-loginov/intellij-community,dslomov/intellij-community,muntasirsyed/intellij-community,apixandru/intellij-community,kool79/intellij-community,vvv1559/intellij-community,Lekanich/intellij-community,youdonghai/intellij-community,Lekanich/intellij-community,MER-GROUP/intellij-community,muntasirsyed/intellij-community,da1z/intellij-community,amith01994/intellij-community,akosyakov/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,dslomov/intellij-community,orekyuu/intellij-community,salguarnieri/intellij-community,allotria/intellij-community,da1z/intellij-community,Distrotech/intellij-community,youdonghai/intellij-community,retomerz/intellij-community,dslomov/intellij-community,izonder/intellij-community,Distrotech/intellij-community,samthor/intellij-community,kool79/intellij-community,adedayo/intellij-community,akosyakov/intellij-community,caot/intellij-community,retomerz/intellij-community,xfournet/intellij-community,nicolargo/intellij-community,ibinti/intellij-community,supersven/intellij-community,idea4bsd/idea4bsd,wreckJ/intellij-community,hurricup/intellij-community,TangHao1987/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,slisson/intellij-community,ibinti/intellij-community,gnuhub/intellij-community,retomerz/intellij-community,akosyakov/intellij-community,fengbaicanhe/intellij-community,kdwink/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,adedayo/intellij-community,ryano144/intellij-community,pwoodworth/intellij-community,ahb0327/intellij-community,fitermay/intellij-community,ftomassetti/intellij-community,ivan-fedorov/intellij-community,adedayo/intellij-community,apixandru/intellij-community,xfournet/intellij-community,izonder/intellij-community,jagguli/intellij-community,kdwink/intellij-community,wreckJ/intellij-community,vvv1559/intellij-community,blademainer/intellij-community,michaelgallacher/intellij-community,MER-GROUP/intellij-community,ryano144/intellij-community,orekyuu/intellij-community,izonder/intellij-community,fengbaicanhe/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,wreckJ/intellij-community,orekyuu/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,allotria/intellij-community,ivan-fedorov/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,MichaelNedzelsky/intellij-community,ol-loginov/intellij-community,samthor/intellij-community,ftomassetti/intellij-community,fnouama/intellij-community,izonder/intellij-community,lucafavatella/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,vladmm/intellij-community,ftomassetti/intellij-community,fitermay/intellij-community,retomerz/intellij-community,petteyg/intellij-community,ahb0327/intellij-community,pwoodworth/intellij-community,fnouama/intellij-community,youdonghai/intellij-community,akosyakov/intellij-community,fitermay/intellij-community,tmpgit/intellij-community,ftomassetti/intellij-community,TangHao1987/intellij-community,tmpgit/intellij-community,jagguli/intellij-community,diorcety/intellij-community,holmes/intellij-community,ibinti/intellij-community,diorcety/intellij-community,nicolargo/intellij-community,suncycheng/intellij-community,vladmm/intellij-community,TangHao1987/intellij-community,blademainer/intellij-community,jagguli/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,robovm/robovm-studio,wreckJ/intellij-community,petteyg/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,ftomassetti/intellij-community,muntasirsyed/intellij-community,da1z/intellij-community,izonder/intellij-community,fitermay/intellij-community,nicolargo/intellij-community,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,vvv1559/intellij-community,semonte/intellij-community,orekyuu/intellij-community,FHannes/intellij-community,holmes/intellij-community,supersven/intellij-community,fengbaicanhe/intellij-community,Distrotech/intellij-community,ahb0327/intellij-community,signed/intellij-community,ibinti/intellij-community,supersven/intellij-community,apixandru/intellij-community,adedayo/intellij-community,clumsy/intellij-community,alphafoobar/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,salguarnieri/intellij-community,petteyg/intellij-community,gnuhub/intellij-community,apixandru/intellij-community,ibinti/intellij-community,ryano144/intellij-community,apixandru/intellij-community,pwoodworth/intellij-community,nicolargo/intellij-community,diorcety/intellij-community,slisson/intellij-community,xfournet/intellij-community,robovm/robovm-studio,fengbaicanhe/intellij-community,ahb0327/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,youdonghai/intellij-community,akosyakov/intellij-community,fitermay/intellij-community,retomerz/intellij-community,caot/intellij-community,ThiagoGarciaAlves/intellij-community,MER-GROUP/intellij-community,gnuhub/intellij-community,kdwink/intellij-community,kdwink/intellij-community,ivan-fedorov/intellij-community,dslomov/intellij-community,petteyg/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,samthor/intellij-community,SerCeMan/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,akosyakov/intellij-community,signed/intellij-community,petteyg/intellij-community,semonte/intellij-community,clumsy/intellij-community,alphafoobar/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,samthor/intellij-community,caot/intellij-community,Lekanich/intellij-community,samthor/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,gnuhub/intellij-community,blademainer/intellij-community,SerCeMan/intellij-community,signed/intellij-community,supersven/intellij-community,izonder/intellij-community,samthor/intellij-community,pwoodworth/intellij-community,blademainer/intellij-community,semonte/intellij-community,ryano144/intellij-community,xfournet/intellij-community,Lekanich/intellij-community,xfournet/intellij-community,MichaelNedzelsky/intellij-community,allotria/intellij-community,petteyg/intellij-community,wreckJ/intellij-community,michaelgallacher/intellij-community,caot/intellij-community,ibinti/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,jagguli/intellij-community,MER-GROUP/intellij-community,amith01994/intellij-community,semonte/intellij-community,da1z/intellij-community,clumsy/intellij-community,robovm/robovm-studio,kool79/intellij-community,ol-loginov/intellij-community,signed/intellij-community,ryano144/intellij-community,amith01994/intellij-community,ryano144/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,hurricup/intellij-community,muntasirsyed/intellij-community,jagguli/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,kool79/intellij-community,supersven/intellij-community,ryano144/intellij-community,robovm/robovm-studio,orekyuu/intellij-community,ryano144/intellij-community,hurricup/intellij-community,adedayo/intellij-community,SerCeMan/intellij-community,kdwink/intellij-community,asedunov/intellij-community,vladmm/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,fitermay/intellij-community,muntasirsyed/intellij-community,ThiagoGarciaAlves/intellij-community,nicolargo/intellij-community,da1z/intellij-community,holmes/intellij-community,robovm/robovm-studio,vladmm/intellij-community,samthor/intellij-community,MichaelNedzelsky/intellij-community,tmpgit/intellij-community,MichaelNedzelsky/intellij-community,idea4bsd/idea4bsd,petteyg/intellij-community,semonte/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,blademainer/intellij-community,muntasirsyed/intellij-community,amith01994/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,petteyg/intellij-community,pwoodworth/intellij-community,ryano144/intellij-community,Distrotech/intellij-community,petteyg/intellij-community,mglukhikh/intellij-community,supersven/intellij-community,nicolargo/intellij-community,holmes/intellij-community,salguarnieri/intellij-community,muntasirsyed/intellij-community,akosyakov/intellij-community,tmpgit/intellij-community,ftomassetti/intellij-community,youdonghai/intellij-community,petteyg/intellij-community,dslomov/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,lucafavatella/intellij-community,samthor/intellij-community,slisson/intellij-community,ibinti/intellij-community,xfournet/intellij-community,dslomov/intellij-community,izonder/intellij-community,kool79/intellij-community,Lekanich/intellij-community,amith01994/intellij-community,holmes/intellij-community,caot/intellij-community,signed/intellij-community,apixandru/intellij-community,ivan-fedorov/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,fnouama/intellij-community,MER-GROUP/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,holmes/intellij-community,youdonghai/intellij-community,tmpgit/intellij-community,vladmm/intellij-community,blademainer/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,wreckJ/intellij-community,idea4bsd/idea4bsd,diorcety/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,ryano144/intellij-community,salguarnieri/intellij-community,Distrotech/intellij-community,wreckJ/intellij-community,robovm/robovm-studio,FHannes/intellij-community,FHannes/intellij-community,alphafoobar/intellij-community,ol-loginov/intellij-community,vvv1559/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,slisson/intellij-community,TangHao1987/intellij-community,Lekanich/intellij-community,supersven/intellij-community,ivan-fedorov/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,wreckJ/intellij-community,gnuhub/intellij-community,idea4bsd/idea4bsd,gnuhub/intellij-community,salguarnieri/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,idea4bsd/idea4bsd,fengbaicanhe/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,muntasirsyed/intellij-community,ftomassetti/intellij-community,holmes/intellij-community,signed/intellij-community,alphafoobar/intellij-community,kool79/intellij-community,diorcety/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,hurricup/intellij-community,xfournet/intellij-community,jagguli/intellij-community,dslomov/intellij-community,TangHao1987/intellij-community,pwoodworth/intellij-community,fitermay/intellij-community,da1z/intellij-community,alphafoobar/intellij-community,amith01994/intellij-community,vladmm/intellij-community,asedunov/intellij-community,semonte/intellij-community,ol-loginov/intellij-community,FHannes/intellij-community,retomerz/intellij-community,clumsy/intellij-community,jagguli/intellij-community,michaelgallacher/intellij-community,diorcety/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,fengbaicanhe/intellij-community,diorcety/intellij-community,asedunov/intellij-community,ol-loginov/intellij-community,MER-GROUP/intellij-community,MER-GROUP/intellij-community,suncycheng/intellij-community,akosyakov/intellij-community,asedunov/intellij-community,holmes/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,signed/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,xfournet/intellij-community,robovm/robovm-studio,caot/intellij-community,caot/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,clumsy/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,signed/intellij-community,alphafoobar/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,SerCeMan/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,nicolargo/intellij-community,alphafoobar/intellij-community,asedunov/intellij-community,slisson/intellij-community,clumsy/intellij-community,kool79/intellij-community,samthor/intellij-community,gnuhub/intellij-community,lucafavatella/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,nicolargo/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,Distrotech/intellij-community,salguarnieri/intellij-community,fnouama/intellij-community,slisson/intellij-community,caot/intellij-community,fitermay/intellij-community,ahb0327/intellij-community,apixandru/intellij-community,amith01994/intellij-community,TangHao1987/intellij-community,blademainer/intellij-community,caot/intellij-community,signed/intellij-community,ahb0327/intellij-community,Distrotech/intellij-community,lucafavatella/intellij-community,alphafoobar/intellij-community,gnuhub/intellij-community,vvv1559/intellij-community,dslomov/intellij-community,ThiagoGarciaAlves/intellij-community,holmes/intellij-community,suncycheng/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,ryano144/intellij-community,asedunov/intellij-community,asedunov/intellij-community,Lekanich/intellij-community,alphafoobar/intellij-community,ibinti/intellij-community,TangHao1987/intellij-community,gnuhub/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,wreckJ/intellij-community,kool79/intellij-community,supersven/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,robovm/robovm-studio,FHannes/intellij-community,retomerz/intellij-community,SerCeMan/intellij-community,clumsy/intellij-community,TangHao1987/intellij-community,ahb0327/intellij-community,apixandru/intellij-community,fengbaicanhe/intellij-community,tmpgit/intellij-community,SerCeMan/intellij-community,da1z/intellij-community,vvv1559/intellij-community,allotria/intellij-community,suncycheng/intellij-community,jagguli/intellij-community,fnouama/intellij-community,holmes/intellij-community,lucafavatella/intellij-community,jagguli/intellij-community,fitermay/intellij-community,adedayo/intellij-community,fnouama/intellij-community,ol-loginov/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,ftomassetti/intellij-community,supersven/intellij-community,clumsy/intellij-community,Lekanich/intellij-community,MichaelNedzelsky/intellij-community,hurricup/intellij-community,slisson/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,tmpgit/intellij-community,diorcety/intellij-community,diorcety/intellij-community,mglukhikh/intellij-community,adedayo/intellij-community,slisson/intellij-community,tmpgit/intellij-community,robovm/robovm-studio,Lekanich/intellij-community,fitermay/intellij-community,blademainer/intellij-community,FHannes/intellij-community,muntasirsyed/intellij-community,hurricup/intellij-community,caot/intellij-community,orekyuu/intellij-community,allotria/intellij-community,retomerz/intellij-community,adedayo/intellij-community,supersven/intellij-community,adedayo/intellij-community,fnouama/intellij-community,ibinti/intellij-community,asedunov/intellij-community,ol-loginov/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,semonte/intellij-community,ahb0327/intellij-community,muntasirsyed/intellij-community,akosyakov/intellij-community,apixandru/intellij-community,caot/intellij-community,SerCeMan/intellij-community,kdwink/intellij-community,ol-loginov/intellij-community,gnuhub/intellij-community,hurricup/intellij-community,asedunov/intellij-community,diorcety/intellij-community,Distrotech/intellij-community,allotria/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,izonder/intellij-community,dslomov/intellij-community,vvv1559/intellij-community,amith01994/intellij-community,supersven/intellij-community,MER-GROUP/intellij-community,amith01994/intellij-community,nicolargo/intellij-community,kdwink/intellij-community,xfournet/intellij-community,blademainer/intellij-community,fengbaicanhe/intellij-community,jagguli/intellij-community,lucafavatella/intellij-community,slisson/intellij-community,mglukhikh/intellij-community,petteyg/intellij-community,Lekanich/intellij-community,semonte/intellij-community,nicolargo/intellij-community,izonder/intellij-community,ivan-fedorov/intellij-community,ivan-fedorov/intellij-community,kdwink/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,clumsy/intellij-community,SerCeMan/intellij-community,holmes/intellij-community,semonte/intellij-community,FHannes/intellij-community,michaelgallacher/intellij-community,fnouama/intellij-community,semonte/intellij-community,FHannes/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,robovm/robovm-studio,vladmm/intellij-community,amith01994/intellij-community,kool79/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,semonte/intellij-community,asedunov/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,ivan-fedorov/intellij-community,blademainer/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,orekyuu/intellij-community,orekyuu/intellij-community,vvv1559/intellij-community,alphafoobar/intellij-community,semonte/intellij-community,fengbaicanhe/intellij-community,Lekanich/intellij-community,allotria/intellij-community,ol-loginov/intellij-community,wreckJ/intellij-community,lucafavatella/intellij-community,ol-loginov/intellij-community,signed/intellij-community,tmpgit/intellij-community,supersven/intellij-community,vvv1559/intellij-community,samthor/intellij-community,allotria/intellij-community,vladmm/intellij-community,fengbaicanhe/intellij-community,akosyakov/intellij-community,FHannes/intellij-community,amith01994/intellij-community,suncycheng/intellij-community,izonder/intellij-community,kdwink/intellij-community,jagguli/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,fengbaicanhe/intellij-community,caot/intellij-community,amith01994/intellij-community,ryano144/intellij-community,ahb0327/intellij-community,MichaelNedzelsky/intellij-community,slisson/intellij-community,ThiagoGarciaAlves/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,tmpgit/intellij-community,signed/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,MER-GROUP/intellij-community,TangHao1987/intellij-community,TangHao1987/intellij-community,FHannes/intellij-community,apixandru/intellij-community,slisson/intellij-community,mglukhikh/intellij-community,tmpgit/intellij-community,nicolargo/intellij-community,robovm/robovm-studio,da1z/intellij-community,clumsy/intellij-community,michaelgallacher/intellij-community,izonder/intellij-community,da1z/intellij-community,salguarnieri/intellij-community,kool79/intellij-community,alphafoobar/intellij-community,fnouama/intellij-community,signed/intellij-community,blademainer/intellij-community,idea4bsd/idea4bsd,salguarnieri/intellij-community,vladmm/intellij-community,robovm/robovm-studio,SerCeMan/intellij-community,fnouama/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,orekyuu/intellij-community,clumsy/intellij-community,ahb0327/intellij-community,SerCeMan/intellij-community
--- +++ @@ -6,8 +6,14 @@ import lldb try: - show_debug_info = 0 + show_debug_info = 1 is_debug = 0 + + options = lldb.SBExpressionOptions() + options.SetFetchDynamicValue() + options.SetTryAllThreads(run_others=False) + options.SetTimeoutInMicroSeconds(timeout=10000000) + target = debugger.GetSelectedTarget() if target: process = target.GetProcess() @@ -16,17 +22,21 @@ # Get the first frame # print('Thread %s, suspended %s\n'%(thread, thread.IsStopped())) - process.SetSelectedThread(thread) - if internal_dict.get('_thread_%d' % thread.GetThreadID(), False): + process.SetSelectedThread(thread) + if not thread.IsStopped(): + # thread.Suspend() + error = process.Stop() frame = thread.GetSelectedFrame() if frame: print('Will settrace in: %s' % (frame,)) + res = frame.EvaluateExpression("(int) SetSysTraceFunc(%s, %s)" % ( - show_debug_info, is_debug), lldb.eDynamicCanRunTarget) + show_debug_info, is_debug), options) error = res.GetError() if error: print(error) + thread.Resume() except: import traceback;traceback.print_exc()
90012f9fb9a256e6086a0b421661fd74cd8ef880
sedlex/AddCocoricoVoteVisitor.py
sedlex/AddCocoricoVoteVisitor.py
# -*- coding: utf-8 -*- from AbstractVisitor import AbstractVisitor from duralex.alinea_parser import * import requests class AddCocoricoVoteVisitor(AbstractVisitor): def __init__(self, args): self.url = 'https://local.cocorico.cc' r = requests.post( self.url + '/api/oauth/token', auth=(args.cocorico_app_id, args.cocorico_secret), data={ 'grant_type': 'client_credentials' }, verify=self.url != 'https://local.cocorico.cc' ) self.access_token = r.json()['access_token'] super(AddCocoricoVoteVisitor, self).__init__() def visit_node(self, node): if not self.access_token: return # if on root node if 'parent' not in node and 'type' not in node: r = requests.post( self.url + '/api/vote', headers={'Authorization': 'Bearer ' + self.access_token}, data={ 'title': 'test de vote', 'description': 'ceci est un test', 'url': 'https://legilibre.fr/?test=49' }, verify=self.url != 'https://local.cocorico.cc' ) node['cocoricoVote'] = r.json()['vote']['id']
# -*- coding: utf-8 -*- from AbstractVisitor import AbstractVisitor from duralex.alinea_parser import * import requests class AddCocoricoVoteVisitor(AbstractVisitor): def __init__(self, args): self.url = args.cocorico_url if not self.url: self.url = 'https://cocorico.cc' r = requests.post( self.url + '/api/oauth/token', auth=(args.cocorico_app_id, args.cocorico_secret), data={ 'grant_type': 'client_credentials' }, verify=self.url != 'https://local.cocorico.cc' ) self.access_token = r.json()['access_token'] super(AddCocoricoVoteVisitor, self).__init__() def visit_node(self, node): if not self.access_token: return # if on root node if 'parent' not in node and 'type' not in node: r = requests.post( self.url + '/api/vote', headers={'Authorization': 'Bearer ' + self.access_token}, data={ 'title': 'test de vote', 'description': 'ceci est un test', 'url': 'https://legilibre.fr/?test=49' }, verify=self.url != 'https://local.cocorico.cc' ) node['cocoricoVote'] = r.json()['vote']['id']
Handle the --cocorico-url command line option.
Handle the --cocorico-url command line option.
Python
agpl-3.0
Legilibre/SedLex
--- +++ @@ -8,7 +8,9 @@ class AddCocoricoVoteVisitor(AbstractVisitor): def __init__(self, args): - self.url = 'https://local.cocorico.cc' + self.url = args.cocorico_url + if not self.url: + self.url = 'https://cocorico.cc' r = requests.post( self.url + '/api/oauth/token',
4c5cf98be65ee2564062cce2a43b7833eef1a6c9
AFQ/utils/volume.py
AFQ/utils/volume.py
import scipy.ndimage as ndim from skimage.filters import gaussian def patch_up_roi(roi, sigma=0.5, truncate=2): """ After being non-linearly transformed, ROIs tend to have holes in them. We perform a couple of computational geometry operations on the ROI to fix that up. Parameters ---------- roi : 3D binary array The ROI after it has been transformed. sigma : float The sigma for initial Gaussian smoothing. truncate : float The truncation for the Gaussian Returns ------- ROI after dilation and hole-filling """ return (ndim.binary_fill_holes( gaussian(roi, sigma=sigma, truncate=truncate)).astype(float) > 0.1)
import scipy.ndimage as ndim from skimage.filters import gaussian from skimage.morphology import convex_hull_image def patch_up_roi(roi, sigma=0.5, truncate=2): """ After being non-linearly transformed, ROIs tend to have holes in them. We perform a couple of computational geometry operations on the ROI to fix that up. Parameters ---------- roi : 3D binary array The ROI after it has been transformed. sigma : float The sigma for initial Gaussian smoothing. truncate : float The truncation for the Gaussian Returns ------- ROI after dilation and hole-filling """ return convex_hull_image(gaussian(ndim.binary_fill_holes(roi), sigma=sigma, truncate=truncate) > 0.1)
Add a convex hull operation to really close this up.
Add a convex hull operation to really close this up.
Python
bsd-2-clause
yeatmanlab/pyAFQ,arokem/pyAFQ,arokem/pyAFQ,yeatmanlab/pyAFQ
--- +++ @@ -1,6 +1,6 @@ import scipy.ndimage as ndim from skimage.filters import gaussian - +from skimage.morphology import convex_hull_image def patch_up_roi(roi, sigma=0.5, truncate=2): """ @@ -24,5 +24,5 @@ ROI after dilation and hole-filling """ - return (ndim.binary_fill_holes( - gaussian(roi, sigma=sigma, truncate=truncate)).astype(float) > 0.1) + return convex_hull_image(gaussian(ndim.binary_fill_holes(roi), + sigma=sigma, truncate=truncate) > 0.1)
788229f43eab992d6f4d79681604336e4d721b0c
gameserver/api/endpoints/players.py
gameserver/api/endpoints/players.py
import logging from flask import request from flask_restplus import Resource from gameserver.game import Game from gameserver.models import Player from gameserver.api.restplus import api from gameserver.api.serializers import player_get, player_post from gameserver.database import db db_session = db.session log = logging.getLogger(__name__) ns = api.namespace('players', description='Operations related to players') game = Game() @ns.route('/') class PlayerCollection(Resource): @api.response(200, 'Success') @api.marshal_list_with(player_get) def get(self): """ Returns list of players. """ players = game.get_players() return players @api.response(201, 'Player successfully created.') @api.expect(player_post) def post(self): """ Creates a new game player. """ data = request.json player = game.create_player(data['name']) db_session.commit() return player.id, 201 @ns.route('/<string:id>') @ns.param('id', 'The player id') class Player(Resource): @api.response(404, 'Player not found') @api.response(200, 'Success') @api.marshal_with(player_get) def get(self, id): """ Returns the specified player. """ player = game.get_player(id) if not player: api.abort(404) return player
import logging from flask import request from flask_restplus import Resource from gameserver.game import Game from gameserver.models import Player from gameserver.api.restplus import api from gameserver.api.serializers import player_get, player_post from gameserver.database import db db_session = db.session log = logging.getLogger(__name__) ns = api.namespace('players', description='Operations related to players') game = Game() @ns.route('/') class PlayerCollection(Resource): @api.response(200, 'Success') @api.marshal_list_with(player_get) def get(self): """ Returns list of players. """ players = game.get_players() return players @api.response(201, 'Player successfully created.') @api.expect(player_post) def post(self): """ Creates a new game player. """ data = request.json player = game.create_player(data['name']) db_session.commit() return dict(id=player.id), 201 @ns.route('/<string:id>') @ns.param('id', 'The player id') class Player(Resource): @api.response(404, 'Player not found') @api.response(200, 'Success') @api.marshal_with(player_get) def get(self, id): """ Returns the specified player. """ player = game.get_player(id) if not player: api.abort(404) return player
Return json, not just a string id
Return json, not just a string id
Python
apache-2.0
hammertoe/didactic-spork,hammertoe/didactic-spork,hammertoe/didactic-spork,hammertoe/didactic-spork
--- +++ @@ -40,7 +40,7 @@ data = request.json player = game.create_player(data['name']) db_session.commit() - return player.id, 201 + return dict(id=player.id), 201 @ns.route('/<string:id>') @@ -51,8 +51,8 @@ @api.response(200, 'Success') @api.marshal_with(player_get) def get(self, id): - """ - Returns the specified player. + """ + Returns the specified player. """ player = game.get_player(id) if not player:
99e164e2e4cf732485e6692a67c732cb657a1c11
test/util.py
test/util.py
'''Helper code for theanets unit tests.''' import numpy as np import skdata.mnist class MNIST: NUM_DIGITS = 100 DIGIT_SIZE = 784 def setUp(self): np.random.seed(3) mnist = skdata.mnist.dataset.MNIST() mnist.meta # trigger download if needed. def arr(n, dtype): arr = mnist.arrays[n] return arr.reshape((len(arr), -1)).astype(dtype) self.images = arr('train_images', 'f')[:MNIST.NUM_DIGITS] / 255. self.labels = arr('train_labels', 'b')[:MNIST.NUM_DIGITS]
'''Helper code for theanets unit tests.''' import numpy as np import skdata.mnist class MNIST(object): NUM_DIGITS = 100 DIGIT_SIZE = 784 def setUp(self): np.random.seed(3) mnist = skdata.mnist.dataset.MNIST() mnist.meta # trigger download if needed. def arr(n, dtype): arr = mnist.arrays[n] return arr.reshape((len(arr), -1)).astype(dtype) self.images = arr('train_images', 'f')[:MNIST.NUM_DIGITS] / 255. self.labels = arr('train_labels', 'b')[:MNIST.NUM_DIGITS]
Make MNIST inherit from object for py2k.
Make MNIST inherit from object for py2k.
Python
mit
devdoer/theanets,chrinide/theanets,lmjohns3/theanets
--- +++ @@ -4,7 +4,7 @@ import skdata.mnist -class MNIST: +class MNIST(object): NUM_DIGITS = 100 DIGIT_SIZE = 784
9e110d998532600a351ae35f79b0f46bce576c9f
humfrey/desc/rdf_processors.py
humfrey/desc/rdf_processors.py
import rdflib from humfrey.utils.namespaces import NS from humfrey.linkeddata.uri import doc_forwards def formats(request, context): graph, subject, doc_uri = context['graph'], context['subject'], context['doc_uri'] formats_for_context = [] for renderer in context['renderers']: url = rdflib.URIRef(renderer['url']) graph += [ (doc_uri, NS['dcterms'].hasFormat, url), (url, NS['dcterms']['title'], rdflib.Literal('%s description of %s' % (renderer['name'], subject.label))), ] graph += [(url, NS['dc']['format'], rdflib.Literal(mimetype)) for mimetype in renderer['mimetypes']] formats_for_context.sort(key=lambda f:f['name']) return { 'formats': formats_for_context, } def doc_meta(request, context): doc_uri = context['doc_uri'] context['graph'] += [ (doc_uri, NS['foaf'].primaryTopic, context['subject']._identifier), (doc_uri, NS['rdf'].type, NS['foaf'].Document), (doc_uri, NS['dcterms']['title'], rdflib.Literal('Description of {0}'.format(context['subject'].label)),) ]
import rdflib from humfrey.utils.namespaces import NS from humfrey.linkeddata.uri import doc_forwards def formats(request, context): graph, subject, doc_uri = context['graph'], context['subject'], context['doc_uri'] formats_for_context = [] for renderer in context['renderers']: url = rdflib.URIRef(renderer['url']) graph += [ (doc_uri, NS['dcterms'].hasFormat, url), (url, NS['dcterms']['title'], rdflib.Literal('%s description of %s' % (renderer['name'], subject.label))), ] graph += [(url, NS['dc']['format'], rdflib.Literal(mimetype)) for mimetype in renderer['mimetypes']] formats_for_context.sort(key=lambda f:f['name']) return { 'formats': formats_for_context, } def doc_meta(request, context): doc_uri = context['doc_uri'] context['graph'] += [ (doc_uri, NS['foaf'].primaryTopic, context['subject']._identifier), (doc_uri, NS['rdf'].type, NS['foaf'].Document), (doc_uri, NS['dcterms']['title'], rdflib.Literal(u'Description of {0}'.format(context['subject'].label)),) ]
Fix ascii-only bug when something's label contains non-ascii characters.
Fix ascii-only bug when something's label contains non-ascii characters.
Python
bsd-3-clause
ox-it/humfrey,ox-it/humfrey,ox-it/humfrey
--- +++ @@ -24,5 +24,5 @@ context['graph'] += [ (doc_uri, NS['foaf'].primaryTopic, context['subject']._identifier), (doc_uri, NS['rdf'].type, NS['foaf'].Document), - (doc_uri, NS['dcterms']['title'], rdflib.Literal('Description of {0}'.format(context['subject'].label)),) + (doc_uri, NS['dcterms']['title'], rdflib.Literal(u'Description of {0}'.format(context['subject'].label)),) ]
a7e64e1594ffc1dba32e55f99779c79d85fd05b9
app.py
app.py
from flask import Flask, render_template, flash from flask_wtf import Form from flask_wtf.file import FileField from tools import s3_upload, store_locally import json app = Flask(__name__) app.config.from_object('config') class UploadForm(Form): example = FileField('Example File') @app.route('/', methods=['POST', 'GET']) def upload_page(): form = UploadForm(csrf_enabled=False) upload_file = form.example if form.validate_on_submit(): output = store_locally(upload_file) response = {} if output is not None: response['url'] = output return json.dumps(response, indent=4) else: response['url'] = None return json.dumps(response, indent=4), app.config["INVALID_DATA"] return render_template('example.html', form=form) if __name__ == '__main__': app.run(host=app.config["HOST"], port=app.config["PORT"], debug=app.config["DEBUG"])
from os import environ from flask import Flask, render_template, flash from flask_wtf import Form from flask_wtf.file import FileField from tools import s3_upload, store_locally import json app = Flask(__name__) app.config.from_object('config') class UploadForm(Form): example = FileField('Example File') @app.route('/', methods=['POST', 'GET']) def upload_page(): form = UploadForm(csrf_enabled=False) upload_file = form.example if form.validate_on_submit(): output = store_locally(upload_file) response = {} if output is not None: response['url'] = output return json.dumps(response, indent=4) else: response['url'] = None return json.dumps(response, indent=4), app.config["INVALID_DATA"] return render_template('example.html', form=form) if __name__ == '__main__': if environ.has_key('FLASK_ENV') and environ['FLASK_ENV'] == 'production': from gevent.wsgi import WSGIServer WSGIServer((app.config["HOST"], app.config["PORT"]), app).serve_forever() else: app.run(host=app.config["HOST"], port=app.config["PORT"], debug=app.config["DEBUG"])
Use a production ready server; no disk polling
Use a production ready server; no disk polling
Python
mit
Mouleshwar/Flask-S3-Uploader,themouli/Flask-S3-Uploader,themouli/Flask-S3-Uploader,Mouleshwar/Flask-S3-Uploader
--- +++ @@ -1,3 +1,4 @@ +from os import environ from flask import Flask, render_template, flash from flask_wtf import Form from flask_wtf.file import FileField @@ -28,4 +29,8 @@ return render_template('example.html', form=form) if __name__ == '__main__': - app.run(host=app.config["HOST"], port=app.config["PORT"], debug=app.config["DEBUG"]) + if environ.has_key('FLASK_ENV') and environ['FLASK_ENV'] == 'production': + from gevent.wsgi import WSGIServer + WSGIServer((app.config["HOST"], app.config["PORT"]), app).serve_forever() + else: + app.run(host=app.config["HOST"], port=app.config["PORT"], debug=app.config["DEBUG"])
0dc84650b2929d31c054882ad67570fda6f1ffb9
incuna_test_utils/testcases/urls.py
incuna_test_utils/testcases/urls.py
from django.core.urlresolvers import resolve, reverse from django.test import TestCase class URLsTestCase(TestCase): """A TestCase with a check_url helper method for testing urls""" def check_url(self, view_class, url, url_name, url_args=None, url_kwargs=None): """ Assert a view's url is correctly configured Check the url_name reverses to give a correctly formated url. Check the url resolves to the correct view. """ reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs) self.assertEqual(reversed_url, url) resolved_view_class = resolve(url).func.cls self.assertEqual(resolved_view_class, view_class)
from django.core.urlresolvers import resolve, reverse from django.test import TestCase class URLsMixin(object): """A TestCase Mixin with a check_url helper method for testing urls""" def check_url(self, view_class, expected_url, url_name, url_args=None, url_kwargs=None): """ Assert a view's url is correctly configured Check the url_name reverses to give a correctly formated expected_url. Check the expected_url resolves to the correct view. """ reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs) self.assertEqual(reversed_url, expected_url) resolved_view_class = resolve(expected_url).func.cls self.assertEqual(resolved_view_class, view_class) class URLsTestCase(URLsMixin, TestCase): pass
Rename url -> expected_url; Add URLsMixin
Rename url -> expected_url; Add URLsMixin
Python
bsd-2-clause
incuna/incuna-test-utils,incuna/incuna-test-utils
--- +++ @@ -2,19 +2,24 @@ from django.test import TestCase -class URLsTestCase(TestCase): - """A TestCase with a check_url helper method for testing urls""" +class URLsMixin(object): + """A TestCase Mixin with a check_url helper method for testing urls""" - def check_url(self, view_class, url, url_name, url_args=None, url_kwargs=None): + def check_url(self, view_class, expected_url, url_name, + url_args=None, url_kwargs=None): """ Assert a view's url is correctly configured - Check the url_name reverses to give a correctly formated url. - Check the url resolves to the correct view. + Check the url_name reverses to give a correctly formated expected_url. + Check the expected_url resolves to the correct view. """ reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs) - self.assertEqual(reversed_url, url) + self.assertEqual(reversed_url, expected_url) - resolved_view_class = resolve(url).func.cls + resolved_view_class = resolve(expected_url).func.cls self.assertEqual(resolved_view_class, view_class) + + +class URLsTestCase(URLsMixin, TestCase): + pass
a0348f21ce7abb577b93913c6f1c805cc6ccc75f
knowit2019/13.py
knowit2019/13.py
import json def navigate_maze_struct(strategy, f='input/MAZE.txt'): rooms = json.load(open(f)) for row in rooms: for room in row: room['visited'] = False queue = [(0, 0)] while queue: y, x = queue.pop() room = rooms[y][x] if room['visited']: continue room['visited'] = True if room['y'] == 499 and room['x'] == 499: return sum_visited(rooms) for d in strategy: if d == 'D' and room['y'] < 499 and not room['syd']: queue.append((y + 1, x), ) elif d == 'U' and y > 0 and not room['nord']: queue.append((y - 1, x), ) elif d == 'R' and x < 499 and not room['aust']: queue.append((y, x + 1), ) elif d == 'L' and x > 0 and not room['vest']: queue.append((y, x - 1), ) return None def sum_visited(rooms): visited = 0 for row in rooms: for room in row: visited += 1 if room['visited'] else 0 return visited print(str(navigate_maze_struct('DRLU')) + " woop") print(navigate_maze_struct('RDLU'))
import json def navigate_maze_struct(strategy, f='input/MAZE.txt'): rooms = json.load(open(f)) for row in rooms: for room in row: room['visited'] = False queue = [(0, 0)] while queue: y, x = queue.pop() room = rooms[y][x] if room['visited']: continue room['visited'] = True if room['y'] == 499 and room['x'] == 499: return sum_visited(rooms) for d in strategy: if d == 'D' and room['y'] < 499 and not room['syd']: queue.append((y + 1, x), ) elif d == 'U' and y > 0 and not room['nord']: queue.append((y - 1, x), ) elif d == 'R' and x < 499 and not room['aust']: queue.append((y, x + 1), ) elif d == 'L' and x > 0 and not room['vest']: queue.append((y, x - 1), ) return None def sum_visited(rooms): visited = 0 for row in rooms: for room in row: visited += 1 if room['visited'] else 0 return visited print(abs(navigate_maze_struct('ULRD') - navigate_maze_struct('ULDR')))
Update strategy (was wrong way around before)
Update strategy (was wrong way around before)
Python
mit
matslindh/codingchallenges,matslindh/codingchallenges
--- +++ @@ -45,5 +45,4 @@ return visited -print(str(navigate_maze_struct('DRLU')) + " woop") -print(navigate_maze_struct('RDLU')) +print(abs(navigate_maze_struct('ULRD') - navigate_maze_struct('ULDR')))
a6a95afca2964756a7777ea43839da1709187a27
planetstack/openstack_observer/backend.py
planetstack/openstack_observer/backend.py
import threading import time from observer.event_loop import PlanetStackObserver from observer.event_manager import EventListener from util.logger import Logger, logging logger = Logger(level=logging.INFO) class Backend: def run(self): try: # start the openstack observer observer = PlanetStackObserver() observer_thread = threading.Thread(target=observer.run) observer_thread.start() # start event listene event_manager = EventListener(wake_up=observer.wake_up) event_manager_thread = threading.Thread(target=event_manager.run) event_manager_thread.start() except: logger.log_exc("Exception in child thread")
import threading import time from observer.event_loop import PlanetStackObserver from observer.event_manager import EventListener from util.logger import Logger, logging logger = Logger(level=logging.INFO) class Backend: def run(self): # start the openstack observer observer = PlanetStackObserver() observer_thread = threading.Thread(target=observer.run) observer_thread.start() # start event listene event_manager = EventListener(wake_up=observer.wake_up) event_manager_thread = threading.Thread(target=event_manager.run) event_manager_thread.start()
Drop try/catch that causes uncaught errors in the Observer to be silently ignored
Drop try/catch that causes uncaught errors in the Observer to be silently ignored
Python
apache-2.0
opencord/xos,opencord/xos,zdw/xos,open-cloud/xos,cboling/xos,zdw/xos,cboling/xos,opencord/xos,open-cloud/xos,zdw/xos,cboling/xos,cboling/xos,cboling/xos,open-cloud/xos,zdw/xos
--- +++ @@ -9,7 +9,6 @@ class Backend: def run(self): - try: # start the openstack observer observer = PlanetStackObserver() observer_thread = threading.Thread(target=observer.run) @@ -19,6 +18,4 @@ event_manager = EventListener(wake_up=observer.wake_up) event_manager_thread = threading.Thread(target=event_manager.run) event_manager_thread.start() - except: - logger.log_exc("Exception in child thread")
131a6d6a60b975b45cd551c1b52c059c857cf1e5
user/views.py
user/views.py
# -*- coding: utf-8 -*- from django.contrib.auth.decorators import login_required from django.shortcuts import render_to_response from django.template import RequestContext from django.contrib.auth.models import User from cronos.announcements.models import Id if request.user.email[-21:] == 'notapplicablemail.com': mail = 'unset' elif request.user.get_profile().webmail_username: mail = webmail_username + '@teilar.gr' else: '' for item in Id.objects.filter(urlid__exact = request.user.get_profile().school): school = str(item.name) @login_required def user(request): return render_to_response('user.html', { 'mail': mail, }, context_instance = RequestContext(request)) @login_required def user_settings(request): return render_to_response('settings.html', { 'school': school, 'mail': mail, }, context_instance = RequestContext(request))
# -*- coding: utf-8 -*- from django.contrib.auth.decorators import login_required from django.shortcuts import render_to_response from django.template import RequestContext from django.contrib.auth.models import User from cronos.announcements.models import Id def getmail(request): if request.user.email[-21:] == 'notapplicablemail.com': mail = 'unset' elif request.user.get_profile().webmail_username: mail = webmail_username + '@teilar.gr' else: '' return mail def getschool(request): for item in Id.objects.filter(urlid__exact = request.user.get_profile().school): school = str(item.name) return school @login_required def user(request): return render_to_response('user.html', { 'mail': getmail(request), }, context_instance = RequestContext(request)) @login_required def user_settings(request): return render_to_response('settings.html', { 'school': getschool(request), 'mail': getmail(request), }, context_instance = RequestContext(request))
Create functions getmail and getschool so they can be used in both sites user and settings
Create functions getmail and getschool so they can be used in both sites user and settings
Python
agpl-3.0
LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr
--- +++ @@ -5,25 +5,30 @@ from django.template import RequestContext from django.contrib.auth.models import User from cronos.announcements.models import Id - -if request.user.email[-21:] == 'notapplicablemail.com': - mail = 'unset' -elif request.user.get_profile().webmail_username: - mail = webmail_username + '@teilar.gr' -else: - '' -for item in Id.objects.filter(urlid__exact = request.user.get_profile().school): - school = str(item.name) + +def getmail(request): + if request.user.email[-21:] == 'notapplicablemail.com': + mail = 'unset' + elif request.user.get_profile().webmail_username: + mail = webmail_username + '@teilar.gr' + else: + '' + return mail + +def getschool(request): + for item in Id.objects.filter(urlid__exact = request.user.get_profile().school): + school = str(item.name) + return school @login_required def user(request): return render_to_response('user.html', { - 'mail': mail, + 'mail': getmail(request), }, context_instance = RequestContext(request)) @login_required def user_settings(request): return render_to_response('settings.html', { - 'school': school, - 'mail': mail, + 'school': getschool(request), + 'mail': getmail(request), }, context_instance = RequestContext(request))
283ba7e4a08aeac07b030700b58e672f3f54ed12
utils/migrate.py
utils/migrate.py
import settings import os import yoyo import yoyo.connections def path(): return os.path.join(os.path.dirname(__file__), 'migrations') if __name__ == '__main__': conn, paramstyle = yoyo.connections.connect(settings.DATABASE_PATH) migrations = yoyo.read_migrations(conn, paramstyle, path()) migrations.to_apply().apply() conn.commit()
import os import psycopg2 import yoyo import yoyo.connections import settings def path(): return os.path.join(os.path.dirname(__file__), '..', 'migrations') def run_migrations(dbconn=None, names=[]): if dbconn is None: dbconn, paramstyle = yoyo.connections.connect(settings.DATABASE_PATH) else: paramstyle = psycopg2.paramstyle migrations = yoyo.read_migrations(dbconn, paramstyle, path(), names=names) migrations.to_apply().apply() dbconn.commit() if __name__ == '__main__': run_migrations()
Make migrations runnable from external modules
Make migrations runnable from external modules These will be used in tests to setup the database.
Python
mit
Storj/accounts
--- +++ @@ -1,16 +1,24 @@ -import settings import os +import psycopg2 import yoyo import yoyo.connections +import settings + def path(): - return os.path.join(os.path.dirname(__file__), 'migrations') + return os.path.join(os.path.dirname(__file__), '..', 'migrations') + +def run_migrations(dbconn=None, names=[]): + if dbconn is None: + dbconn, paramstyle = yoyo.connections.connect(settings.DATABASE_PATH) + else: + paramstyle = psycopg2.paramstyle + + migrations = yoyo.read_migrations(dbconn, paramstyle, path(), names=names) + migrations.to_apply().apply() + + dbconn.commit() if __name__ == '__main__': - conn, paramstyle = yoyo.connections.connect(settings.DATABASE_PATH) - - migrations = yoyo.read_migrations(conn, paramstyle, path()) - migrations.to_apply().apply() - - conn.commit() + run_migrations()
52c0b5d678e062384b6d4682b85b632bdc0ab093
ktbs_bench/utils/decorators.py
ktbs_bench/utils/decorators.py
from functools import wraps from inspect import getcallargs from timer import Timer def bench(f): """Times a function given specific arguments.""" timer = Timer(tick_now=False) @wraps(f) def wrapped(*args, **kwargs): timer.start() f(*args, **kwargs) timer.stop() res = {call_signature(f, *args, **kwargs): timer.get_times()['real']} # TODO penser a quel temps garder return res return wrapped def call_signature(f, *args, **kwargs): """Return a string representation of a function call.""" call_args = getcallargs(f, *args, **kwargs) return ';'.join(["%s=%s" % (k, v) for k, v in call_args.items()]) @bench def lala(a, b, c="default c", d="default d"): print("lala est appelee") if __name__ == '__main__': print(lala("cest a", "cest b", d="change d"))
from functools import wraps from inspect import getcallargs from timer import Timer def bench(f): """Times a function given specific arguments.""" timer = Timer(tick_now=False) @wraps(f) def wrapped(*args, **kwargs): timer.start() f(*args, **kwargs) timer.stop() res = [call_signature(f, *args, **kwargs), timer.get_times()['real']] # TODO penser a quel temps garder return res return wrapped def call_signature(f, *args, **kwargs): """Return a string representation of a function call.""" call_args = getcallargs(f, *args, **kwargs) return ';'.join(["%s=%s" % (k, v) for k, v in call_args.items()]) @bench def lala(a, b, c="default c", d="default d"): print("lala est appelee") if __name__ == '__main__': print(lala("cest a", "cest b", d="change d"))
Change @bench to return a list, because there will never be more than 1 key in the dict
Change @bench to return a list, because there will never be more than 1 key in the dict
Python
mit
ktbs/ktbs-bench,ktbs/ktbs-bench
--- +++ @@ -15,7 +15,8 @@ f(*args, **kwargs) timer.stop() - res = {call_signature(f, *args, **kwargs): timer.get_times()['real']} # TODO penser a quel temps garder + res = [call_signature(f, *args, **kwargs), + timer.get_times()['real']] # TODO penser a quel temps garder return res return wrapped
6520fde5be81eb3d1a91662edeef8bd2a1f6389c
stonemason/service/tileserver/helper.py
stonemason/service/tileserver/helper.py
# -*- encoding: utf-8 -*- __author__ = 'ray' __date__ = '4/4/15' from stonemason.mason import Portrayal from stonemason.mason.theme import Theme def jsonify_portrayal(portrayal): assert isinstance(portrayal, Portrayal) template = { 'name': portrayal.name, 'metadata': { 'version': portrayal.metadata.version, 'abstract': portrayal.metadata.abstract, 'attribution': portrayal.metadata.attribution, 'center': portrayal.metadata.center, 'center_zoom': portrayal.metadata.center_zoom }, 'maptype': portrayal.bundle.map_type, 'tileformat': portrayal.bundle.tile_format, 'pyramid': portrayal.pyramid, 'schemas': [] } for tag in portrayal.iter_schema(): template['schemas'].append(tag) return template def jsonify_map_theme(map_theme): assert isinstance(map_theme, Theme) return repr(map_theme)
# -*- encoding: utf-8 -*- __author__ = 'ray' __date__ = '4/4/15' from stonemason.mason import Portrayal from stonemason.mason.theme import Theme def jsonify_portrayal(portrayal): assert isinstance(portrayal, Portrayal) template = { 'name': portrayal.name, 'metadata': { 'title': portrayal.metadata.title, 'version': portrayal.metadata.version, 'abstract': portrayal.metadata.abstract, 'attribution': portrayal.metadata.attribution, 'center': portrayal.metadata.center, 'center_zoom': portrayal.metadata.center_zoom }, 'maptype': portrayal.bundle.map_type, 'tileformat': portrayal.bundle.tile_format, 'pyramid': portrayal.pyramid, 'schemas': [] } for tag in portrayal.iter_schema(): template['schemas'].append(tag) return template def jsonify_map_theme(map_theme): assert isinstance(map_theme, Theme) return repr(map_theme)
Add metadata title in portrayal view
FEATURE: Add metadata title in portrayal view
Python
mit
Kotaimen/stonemason,Kotaimen/stonemason
--- +++ @@ -12,6 +12,7 @@ template = { 'name': portrayal.name, 'metadata': { + 'title': portrayal.metadata.title, 'version': portrayal.metadata.version, 'abstract': portrayal.metadata.abstract, 'attribution': portrayal.metadata.attribution,
3785b2804c88215114e0bb21f1aab6dc0554b30c
django_react_templatetags/ssr/hypernova.py
django_react_templatetags/ssr/hypernova.py
import logging import json from django.conf import settings import hypernova from hypernova.plugins.dev_mode import DevModePlugin logger = logging.getLogger(__name__) class HypernovaService(): def load_or_empty(self, component, headers={}, ssr_context=None): renderer = hypernova.Renderer( settings.REACT_RENDER_HOST, [DevModePlugin(logger)] if settings.DEBUG else [], timeout=get_request_timeout(), headers=headers, ) inner_html = "" try: inner_html = renderer.render({component['name']: component['json']}) except Exception as e: msg = "SSR request to '{}' failed: {}".format( settings.REACT_RENDER_HOST, e.__class__.__name__ ) logger.exception(msg) return inner_html def get_request_timeout(): if not hasattr(settings, 'REACT_RENDER_TIMEOUT'): return 20 return settings.REACT_RENDER_TIMEOUT
import logging import json from django.conf import settings import hypernova logger = logging.getLogger(__name__) class HypernovaService(): def load_or_empty(self, component, headers={}, ssr_context=None): # from hypernova.plugins.dev_mode import DevModePlugin renderer = hypernova.Renderer( settings.REACT_RENDER_HOST, # [DevModePlugin(logger)] if settings.DEBUG else [], [], timeout=get_request_timeout(), headers=headers, ) inner_html = "" try: inner_html = renderer.render({component['name']: component['json']}) except Exception as e: msg = "SSR request to '{}' failed: {}".format( settings.REACT_RENDER_HOST, e.__class__.__name__ ) logger.exception(msg) return inner_html def get_request_timeout(): if not hasattr(settings, 'REACT_RENDER_TIMEOUT'): return 20 return settings.REACT_RENDER_TIMEOUT
Disable DevModePlugin until py3 fix is fixed upstream
Disable DevModePlugin until py3 fix is fixed upstream
Python
mit
Frojd/django-react-templatetags,Frojd/django-react-templatetags,Frojd/django-react-templatetags
--- +++ @@ -3,7 +3,6 @@ from django.conf import settings import hypernova -from hypernova.plugins.dev_mode import DevModePlugin logger = logging.getLogger(__name__) @@ -11,9 +10,12 @@ class HypernovaService(): def load_or_empty(self, component, headers={}, ssr_context=None): + # from hypernova.plugins.dev_mode import DevModePlugin + renderer = hypernova.Renderer( settings.REACT_RENDER_HOST, - [DevModePlugin(logger)] if settings.DEBUG else [], + # [DevModePlugin(logger)] if settings.DEBUG else [], + [], timeout=get_request_timeout(), headers=headers, )
d0c775dd7f7964db608dd56d1899aa4e3697cd1e
life/__main__.py
life/__main__.py
import pyglet from life import WIDTH, HEIGHT, CELL_SIZE, DISPLAY_FPS, FULLSCREEN from life.creator import Creator from life.view import Field creator = Creator(width=WIDTH, height=HEIGHT) if FULLSCREEN: window = pyglet.window.Window(fullscreen=True) cell_size = min(window.width // WIDTH, window.height // HEIGHT) field = Field( field_creator=creator, cell_size=cell_size, dx=(window.width - WIDTH * cell_size) // 2, dy=(window.height - HEIGHT * cell_size) // 2) else: field = Field(field_creator=creator, cell_size=CELL_SIZE) window = pyglet.window.Window(width=field.width, height=field.height) if DISPLAY_FPS: fps_display = pyglet.clock.ClockDisplay() else: fps_display = None @window.event def on_draw(): window.clear() field.draw() if fps_display: fps_display.draw() creator.start() pyglet.app.run()
import pyglet from life import WIDTH, HEIGHT, CELL_SIZE, DISPLAY_FPS, FULLSCREEN from life.creator import Creator from life.view import Field creator = Creator(width=WIDTH, height=HEIGHT) if FULLSCREEN: window = pyglet.window.Window(fullscreen=True) cell_size = min(window.width // WIDTH, window.height // HEIGHT) field = Field( field_creator=creator, cell_size=cell_size, dx=(window.width - WIDTH * cell_size) // 2, dy=(window.height - HEIGHT * cell_size) // 2) else: field = Field(field_creator=creator, cell_size=CELL_SIZE) window = pyglet.window.Window(width=field.width, height=field.height) if DISPLAY_FPS: fps_display = pyglet.window.FPSDisplay(window) fps_display.update_period = 1. else: fps_display = None @window.event def on_draw(): window.clear() field.draw() if fps_display: fps_display.draw() creator.start() pyglet.app.run()
Use correct FPS display implementation.
Use correct FPS display implementation.
Python
bsd-2-clause
lig/life
--- +++ @@ -20,7 +20,9 @@ window = pyglet.window.Window(width=field.width, height=field.height) if DISPLAY_FPS: - fps_display = pyglet.clock.ClockDisplay() + fps_display = pyglet.window.FPSDisplay(window) + fps_display.update_period = 1. + else: fps_display = None
69d22e9e7ff574d4f510269e589dafa45132047f
stdnum/br/__init__.py
stdnum/br/__init__.py
# __init__.py - collection of Brazilian numbers # coding: utf-8 # # Copyright (C) 2012 Arthur de Jong # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA """Collection of Brazilian numbers."""
# __init__.py - collection of Brazilian numbers # coding: utf-8 # # Copyright (C) 2012 Arthur de Jong # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA """Collection of Brazilian numbers.""" from stdnum.br import cnpj as vat # noqa: F401
Add missing vat alias for Brazil
Add missing vat alias for Brazil
Python
lgpl-2.1
arthurdejong/python-stdnum,arthurdejong/python-stdnum,arthurdejong/python-stdnum
--- +++ @@ -19,3 +19,4 @@ # 02110-1301 USA """Collection of Brazilian numbers.""" +from stdnum.br import cnpj as vat # noqa: F401
c22894e1ac7071e19515321df8eaa639045c9ae5
dlux/api.py
dlux/api.py
# Copyright 2014 Hewlett-Packard Development Company, L.P. # # Author: Endre Karlson <endre.karlson@hp.com> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import requests from django.conf import settings from odlclient.v2 import client as odl_client def get_client(request): session = requests.Session() session.cookies.update({ 'JSESSIONID': request.user.jsessionid, 'JSESSIONIDSSO': request.user.jsessionidsso }) url = request.user.controller + '/controller/nb/v2' http = odl_client.HTTPClient(url, http=session, debug=settings.DEBUG) client = odl_client.Client(http) return client
# Copyright 2014 Hewlett-Packard Development Company, L.P. # # Author: Endre Karlson <endre.karlson@hp.com> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import requests from django.conf import settings from odlclient.v2 import client as odl_client def get_client(request): session = requests.Session() session.cookies.update({ 'JSESSIONID': request.user.jsessionid, 'JSESSIONIDSSO': request.user.jsessionidsso }) http = odl_client.HTTPClient(request.user.controller, http=session, debug=settings.DEBUG) client = odl_client.Client(http) return client
Update according to latest odlclient
Update according to latest odlclient
Python
apache-2.0
ekarlso/dlux-horizon,ekarlso/dlux-horizon,ekarlso/dlux-horizon
--- +++ @@ -25,7 +25,7 @@ 'JSESSIONID': request.user.jsessionid, 'JSESSIONIDSSO': request.user.jsessionidsso }) - url = request.user.controller + '/controller/nb/v2' - http = odl_client.HTTPClient(url, http=session, debug=settings.DEBUG) + http = odl_client.HTTPClient(request.user.controller, http=session, + debug=settings.DEBUG) client = odl_client.Client(http) return client
ba499556cf3a1f09c55ba2631c1dbb988e95fb82
web/test/test_web.py
web/test/test_web.py
def test_web(app): client = app.test_client() response = client.post( '/user/sign-in?next=/', follow_redirects=True, data=dict( username='test@test.com', password='Password1' ) ) response = client.get('/') assert response.status_code == 200 assert b"<title>Training stats</title>" in response.data
def login(client): response = client.post( '/user/sign-in?next=/', follow_redirects=True, data=dict( username='test@test.com', password='Password1' ) ) return response def test_navigating_to_startpage(app): client = app.test_client() login(client) response = client.get('/') assert response.status_code == 200 assert b"<title>Training stats</title>" in response.data
Refactor test to be more clear
Refactor test to be more clear
Python
apache-2.0
vinntreus/training_stats,vinntreus/training_stats
--- +++ @@ -1,5 +1,4 @@ -def test_web(app): - client = app.test_client() +def login(client): response = client.post( '/user/sign-in?next=/', follow_redirects=True, @@ -8,6 +7,11 @@ password='Password1' ) ) + return response + +def test_navigating_to_startpage(app): + client = app.test_client() + login(client) response = client.get('/') assert response.status_code == 200 assert b"<title>Training stats</title>" in response.data
d8168185aa0153fac55e3c59761a5e561a5b0137
src/ocspdash/__init__.py
src/ocspdash/__init__.py
"""A dashboard for the status of the top certificate authorities' OCSP responders.""" # metadata __version__ = '0.1.0-dev' __title__ = 'OCSPdash' # keep the __description__ synchronized with the package docstring __description__ = "A dashboard for the status of the top certificate authorities' OCSP responders." __url__ = 'https://github.com/scolby33/OCSPdash' __author__ = 'Scott Colby' __email__ = 'scolby33@gmail.com' __license__ = 'MIT' __copyright__ = 'Copyright (c) 2017 Scott Colby' from .util import install_custom_json_encoder install_custom_json_encoder()
"""A dashboard for the status of the top certificate authorities' OCSP responders.""" # metadata __version__ = '0.1.0-dev' __title__ = 'OCSPdash' # keep the __description__ synchronized with the package docstring __description__ = "A dashboard for the status of the top certificate authorities' OCSP responders." __url__ = 'https://github.com/scolby33/OCSPdash' __author__ = 'Scott Colby' __email__ = 'scolby33@gmail.com' __license__ = 'MIT' __copyright__ = 'Copyright (c) 2017 Scott Colby and Charles Tapley Hoyt' from .util import install_custom_json_encoder install_custom_json_encoder()
Add @cthoyt to package __copyright__
Add @cthoyt to package __copyright__
Python
mit
scolby33/OCSPdash,scolby33/OCSPdash,scolby33/OCSPdash
--- +++ @@ -11,7 +11,7 @@ __email__ = 'scolby33@gmail.com' __license__ = 'MIT' -__copyright__ = 'Copyright (c) 2017 Scott Colby' +__copyright__ = 'Copyright (c) 2017 Scott Colby and Charles Tapley Hoyt' from .util import install_custom_json_encoder install_custom_json_encoder()
9bc3c6ef95bd3b229b019dcd0d39aa9940528cf4
config/development.py
config/development.py
# Secret passphrase # FIXME: Temporarily SECRET must have the same value as SECRET_KEYS # due to the current spring boot implementation SECRET = '/etc/keyczar/keys' # Secret keyczar keys SECRET_KEYS = '/etc/keyczar/keys' # Fill as needed DEBUG = True IGNORE_AUTH = True MESOS_FRAMEWORK_ENDPOINT = 'http://127.0.0.1:5004/bigdata/mesos_framework/v1/clusters' ORQUESTRATOR_ENDPOINT = 'http://127.0.0.1:5005/orquestrator/v1/clusters' #MESOS_FRAMEWORK_ENDPOINT = 'http://mesos_framework:5000/bigdata/mesos_framework/v1/clusters' #ORQUESTRATOR_ENDPOINT = 'http://orquestrator:5005/orquestrator/v1/clusters' CONSUL_ENDPOINT = 'http://consul:8500/v1/kv'
# Secret passphrase # FIXME: Temporarily SECRET must have the same value as SECRET_KEYS # due to the current spring boot implementation SECRET = '/etc/keyczar/keys' # Secret keyczar keys SECRET_KEYS = '/etc/keyczar/keys' # Fill as needed DEBUG = True IGNORE_AUTH = True #MESOS_FRAMEWORK_ENDPOINT = 'http://127.0.0.1:5004/bigdata/mesos_framework/v1/clusters' #ORQUESTRATOR_ENDPOINT = 'http://127.0.0.1:5005/orquestrator/v1/clusters' MESOS_FRAMEWORK_ENDPOINT = 'http://mesos_framework:5000/bigdata/mesos_framework/v1/clusters' ORQUESTRATOR_ENDPOINT = 'http://orquestrator:5005/orquestrator/v1/clusters' CONSUL_ENDPOINT = 'http://consul:8500/v1/kv'
Fix de mis endpoints que uso para desarrollo, disculpas T_T
Fix de mis endpoints que uso para desarrollo, disculpas T_T
Python
apache-2.0
bigdatacesga/paas-service,bigdatacesga/paas-service,bigdatacesga/paas-service
--- +++ @@ -8,8 +8,8 @@ # Fill as needed DEBUG = True IGNORE_AUTH = True -MESOS_FRAMEWORK_ENDPOINT = 'http://127.0.0.1:5004/bigdata/mesos_framework/v1/clusters' -ORQUESTRATOR_ENDPOINT = 'http://127.0.0.1:5005/orquestrator/v1/clusters' -#MESOS_FRAMEWORK_ENDPOINT = 'http://mesos_framework:5000/bigdata/mesos_framework/v1/clusters' -#ORQUESTRATOR_ENDPOINT = 'http://orquestrator:5005/orquestrator/v1/clusters' +#MESOS_FRAMEWORK_ENDPOINT = 'http://127.0.0.1:5004/bigdata/mesos_framework/v1/clusters' +#ORQUESTRATOR_ENDPOINT = 'http://127.0.0.1:5005/orquestrator/v1/clusters' +MESOS_FRAMEWORK_ENDPOINT = 'http://mesos_framework:5000/bigdata/mesos_framework/v1/clusters' +ORQUESTRATOR_ENDPOINT = 'http://orquestrator:5005/orquestrator/v1/clusters' CONSUL_ENDPOINT = 'http://consul:8500/v1/kv'
00d835c3b4512b407033af280600d9428a155b22
noah/noah.py
noah/noah.py
import json import random import pprint class Noah(object): def __init__(self, dictionary_file): self.dictionary = json.load(dictionary_file) def list(self): return '\n'.join([entry['word'] for entry in self.dictionary]) def define(self, word): return self.output(filter(lambda x: x['word'] == word, self.dictionary)) if not entry is None: return self.output(entry) def random(self): return self.output(random.choice(self.dictionary)) def output(self, data): return json.dumps(data, indent=4) def main(): with open('../dictionaries/english.json') as dictionary: n = Noah(dictionary) print n.list() print n.define('run') print n.random() if __name__ == '__main__': main()
import json import random import pprint class Noah(object): def __init__(self, dictionary_file): self.dictionary = json.load(dictionary_file) def list(self): return '\n'.join([entry['word'] for entry in self.dictionary]) def define(self, word): return self.output(filter(lambda x: x['word'] == word, self.dictionary)) def random(self): return self.output(random.choice(self.dictionary)) def output(self, data): return json.dumps(data, indent=4) def main(): with open('../dictionaries/english.json') as dictionary: n = Noah(dictionary) print n.list() print n.define('run') print n.random() if __name__ == '__main__': main()
Remove unneeded block in define.
Remove unneeded block in define.
Python
mit
maxdeviant/noah
--- +++ @@ -11,9 +11,6 @@ def define(self, word): return self.output(filter(lambda x: x['word'] == word, self.dictionary)) - - if not entry is None: - return self.output(entry) def random(self): return self.output(random.choice(self.dictionary))
4bcf7f83351bc64ed47c5531cb66ccb20f762dd0
pyMKL/__init__.py
pyMKL/__init__.py
from __future__ import absolute_import from __future__ import unicode_literals from __future__ import print_function from __future__ import division from future import standard_library standard_library.install_aliases() import numpy as np import scipy.sparse as sp from ctypes import CDLL, cdll, RTLD_GLOBAL from ctypes import POINTER, byref, c_int, c_longlong path = 'libmkl_intel_lp64.dylib' MKLlib = CDLL(path, RTLD_GLOBAL) from .pardisoInterface import pardisoinit, pardiso from .pardisoSolver import pardisoSolver
from __future__ import absolute_import from __future__ import unicode_literals from __future__ import print_function from __future__ import division from future import standard_library standard_library.install_aliases() import platform import numpy as np import scipy.sparse as sp from ctypes import CDLL, cdll, RTLD_GLOBAL from ctypes import POINTER, byref, c_int, c_longlong systype = platform.system() if systype == 'Darwin': path = 'libmkl_intel_lp64.dylib' elif systype == 'Linux': path = 'libmkl_intel_lp64.so' elif systype == 'Windows': path = 'mkl_intel_lp64_dll.lib' else: raise ImportError('Cannot determine MKL library to import') MKLlib = CDLL(path, RTLD_GLOBAL) from .pardisoInterface import pardisoinit, pardiso from .pardisoSolver import pardisoSolver
Add experimental support for other OS versions.
Add experimental support for other OS versions.
Python
mit
dwfmarchant/pyMKL
--- +++ @@ -5,12 +5,21 @@ from future import standard_library standard_library.install_aliases() +import platform import numpy as np import scipy.sparse as sp from ctypes import CDLL, cdll, RTLD_GLOBAL from ctypes import POINTER, byref, c_int, c_longlong -path = 'libmkl_intel_lp64.dylib' +systype = platform.system() +if systype == 'Darwin': + path = 'libmkl_intel_lp64.dylib' +elif systype == 'Linux': + path = 'libmkl_intel_lp64.so' +elif systype == 'Windows': + path = 'mkl_intel_lp64_dll.lib' +else: + raise ImportError('Cannot determine MKL library to import') MKLlib = CDLL(path, RTLD_GLOBAL) from .pardisoInterface import pardisoinit, pardiso
6f1dc606b4c4f2702e0a5b48338488ac2eec197c
scripts/utils.py
scripts/utils.py
#!/usr/bin/env python3 # Touhou Community Reliant Automatic Patcher # Scripts # # ---- # """Utility functions shared among all the scripts.""" from collections import OrderedDict import json import os json_load_params = { 'object_pairs_hook': OrderedDict } def patch_files_filter(files): """Filters all file names that can not be among the content of a patch.""" for i in files: if i != 'files.js': yield i json_dump_params = { 'ensure_ascii': False, 'indent': '\t', 'separators': (',', ': '), 'sort_keys': True } # Default parameters for JSON input and output def json_load(fn, json_kwargs=json_load_params): with open(fn, 'r', encoding='utf-8') as file: return json.load(file, **json_kwargs) def json_store(fn, obj, dirs=[''], json_kwargs=json_dump_params): """Saves the JSON object [obj] to [fn], creating all necessary directories in the process. If [dirs] is given, the function is executed for every root directory in the array.""" for i in dirs: full_fn = os.path.join(i, fn) os.makedirs(os.path.dirname(full_fn), exist_ok=True) with open(full_fn, 'w', encoding='utf-8') as file: json.dump(obj, file, **json_kwargs) file.write('\n')
#!/usr/bin/env python3 # Touhou Community Reliant Automatic Patcher # Scripts # # ---- # """Utility functions shared among all the scripts.""" from collections import OrderedDict import json import os json_load_params = { 'object_pairs_hook': OrderedDict } def patch_files_filter(files): """Filters all file names that can not be among the content of a patch.""" for i in files: if i != 'files.js': yield i json_dump_params = { 'ensure_ascii': False, 'indent': '\t', 'separators': (',', ': '), 'sort_keys': True } # Default parameters for JSON input and output def json_load(fn, json_kwargs=json_load_params): with open(fn, 'r', encoding='utf-8') as file: return json.load(file, **json_kwargs) def json_store(fn, obj, dirs=[''], json_kwargs=json_dump_params): """Saves the JSON object [obj] to [fn], creating all necessary directories in the process. If [dirs] is given, the function is executed for every root directory in the array.""" for i in dirs: full_fn = os.path.join(i, fn) os.makedirs(os.path.dirname(full_fn), exist_ok=True) with open(full_fn, 'w', newline='\n', encoding='utf-8') as file: json.dump(obj, file, **json_kwargs) file.write('\n')
Enforce Unix newlines when writing JSON files.
scripts: Enforce Unix newlines when writing JSON files.
Python
unlicense
VBChunguk/thcrap,thpatch/thcrap,thpatch/thcrap,thpatch/thcrap,VBChunguk/thcrap,VBChunguk/thcrap,thpatch/thcrap,thpatch/thcrap
--- +++ @@ -42,6 +42,6 @@ for i in dirs: full_fn = os.path.join(i, fn) os.makedirs(os.path.dirname(full_fn), exist_ok=True) - with open(full_fn, 'w', encoding='utf-8') as file: + with open(full_fn, 'w', newline='\n', encoding='utf-8') as file: json.dump(obj, file, **json_kwargs) file.write('\n')
89929acbb2ee3c5617758966d8916139726d7b74
app/state.py
app/state.py
import multiprocessing import unicornhathd as unicornhat import importlib import sys import os import app.programs.hd class State: ''' Handles the Unicorn HAT state''' def __init__(self): self._process = None def start_program(self, name, params={}): try: program = getattr(app.programs.hd, name) except AttributeError: raise ProgramNotFound(name) self.stop_program() if params.get("brightness"): unicornhat.brightness(float(params["brightness"])) if params.get("rotation"): unicornhat.rotation(int(params["rotation"])) self._process = multiprocessing.Process(target=program.run, args=(params,)) self._process.start() def stop_program(self): if self._process is not None: self._process.terminate() unicornhat.show() class ProgramNotFound(Exception): pass
import multiprocessing import unicornhathd as unicornhat import importlib import sys import os import app.programs.hd class State: ''' Handles the Unicorn HAT state''' def __init__(self): self._process = None def start_program(self, name, params={}): try: program = getattr(app.programs.hd, name) except AttributeError: raise ProgramNotFound(name) self.stop_program() if params.get("brightness") is not None: unicornhat.brightness(float(params["brightness"])) if params.get("rotation") is not None: unicornhat.rotation(int(params["rotation"])) self._process = multiprocessing.Process(target=program.run, args=(params,)) self._process.start() def stop_program(self): if self._process is not None: self._process.terminate() unicornhat.show() class ProgramNotFound(Exception): pass
Fix setting rotation to 0
Fix setting rotation to 0
Python
mit
njbbaer/unicorn-remote,njbbaer/unicorn-remote,njbbaer/unicorn-remote
--- +++ @@ -21,10 +21,10 @@ self.stop_program() - if params.get("brightness"): + if params.get("brightness") is not None: unicornhat.brightness(float(params["brightness"])) - if params.get("rotation"): + if params.get("rotation") is not None: unicornhat.rotation(int(params["rotation"])) self._process = multiprocessing.Process(target=program.run, args=(params,))
013ed651c3e8e7cfa4b8babefc2664644b928852
pybtex/bibtex/exceptions.py
pybtex/bibtex/exceptions.py
# Copyright (C) 2006, 2007, 2008 Andrey Golovizin # # This file is part of pybtex. # # pybtex is free software; you can redistribute it and/or modify # under the terms of the GNU General Public License as published by the # Free Software Foundation; either version 2 of the License, or (at your # option) any later version. # # pybtex is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with pybtex; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 # USA class BibTeXError(Exception): pass
# Copyright (C) 2006, 2007, 2008 Andrey Golovizin # # This file is part of pybtex. # # pybtex is free software; you can redistribute it and/or modify # under the terms of the GNU General Public License as published by the # Free Software Foundation; either version 2 of the License, or (at your # option) any later version. # # pybtex is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with pybtex; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 # USA from pybtex.exceptions import PybtexError class BibTeXError(PybtexError): pass
Make BibTeXError a subclass of PybtexError.
Make BibTeXError a subclass of PybtexError.
Python
mit
andreas-h/pybtex,chbrown/pybtex,andreas-h/pybtex,chbrown/pybtex
--- +++ @@ -17,5 +17,7 @@ # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 # USA -class BibTeXError(Exception): +from pybtex.exceptions import PybtexError + +class BibTeXError(PybtexError): pass
e75e6ec300e1127f7010d36ef63343e522318f90
sunpy/instr/iris/iris.py
sunpy/instr/iris/iris.py
""" Some very beta tools for IRIS """ import sunpy.io import sunpy.time import sunpy.map __all__ = ['SJI_to_cube'] def SJI_to_cube(filename, start=0, stop=None): """ Read a SJI file and return a MapCube ..warning:: This function is a very early beta and is not stable. Further work is on going to improve SunPy IRIS support. Parameters ---------- filename: string File to read start: Temporal axis index to create MapCube from stop: Temporal index to stop MapCube at Returns ------- iris_cube: sunpy.map.MapCube A map cube of the SJI sequence """ hdus = sunpy.io.read_file(filename) #Get the time delta time_range = sunpy.time.TimeRange(hdus[0][1]['STARTOBS'], hdus[0][1]['ENDOBS']) splits = time_range.split(hdus[0][0].shape[0]) if not stop: stop = len(splits) headers = [hdus[0][1]]*(stop-start) datas = hdus[0][0][start:stop] #Make the cube: iris_cube = sunpy.map.Map(zip(datas,headers),cube=True) #Set the date/time for i,m in enumerate(iris_cube): m.meta['DATE-OBS'] = splits[i].center().isoformat() return iris_cube
""" Some very beta tools for IRIS """ import sunpy.io import sunpy.time import sunpy.map __all__ = ['SJI_to_cube'] def SJI_to_cube(filename, start=0, stop=None, hdu=0): """ Read a SJI file and return a MapCube ..warning:: This function is a very early beta and is not stable. Further work is on going to improve SunPy IRIS support. Parameters ---------- filename: string File to read start: Temporal axis index to create MapCube from stop: Temporal index to stop MapCube at hdu: Choose hdu index Returns ------- iris_cube: sunpy.map.MapCube A map cube of the SJI sequence """ hdus = sunpy.io.read_file(filename) #Get the time delta time_range = sunpy.time.TimeRange(hdus[hdu][1]['STARTOBS'], hdus[hdu][1]['ENDOBS']) splits = time_range.split(hdus[hdu][0].shape[0]) if not stop: stop = len(splits) headers = [hdus[hdu][1]]*(stop-start) datas = hdus[hdu][0][start:stop] #Make the cube: iris_cube = sunpy.map.Map(zip(datas,headers),cube=True) #Set the date/time for i,m in enumerate(iris_cube): m.meta['DATE-OBS'] = splits[i].center().isoformat() return iris_cube
Change hdu[0] to hdu for optional indexing
Change hdu[0] to hdu for optional indexing
Python
bsd-2-clause
Alex-Ian-Hamilton/sunpy,dpshelio/sunpy,dpshelio/sunpy,dpshelio/sunpy,Alex-Ian-Hamilton/sunpy,Alex-Ian-Hamilton/sunpy
--- +++ @@ -8,7 +8,7 @@ __all__ = ['SJI_to_cube'] -def SJI_to_cube(filename, start=0, stop=None): +def SJI_to_cube(filename, start=0, stop=None, hdu=0): """ Read a SJI file and return a MapCube @@ -27,6 +27,9 @@ stop: Temporal index to stop MapCube at + hdu: + Choose hdu index + Returns ------- @@ -36,14 +39,14 @@ hdus = sunpy.io.read_file(filename) #Get the time delta - time_range = sunpy.time.TimeRange(hdus[0][1]['STARTOBS'], hdus[0][1]['ENDOBS']) - splits = time_range.split(hdus[0][0].shape[0]) + time_range = sunpy.time.TimeRange(hdus[hdu][1]['STARTOBS'], hdus[hdu][1]['ENDOBS']) + splits = time_range.split(hdus[hdu][0].shape[0]) if not stop: stop = len(splits) - headers = [hdus[0][1]]*(stop-start) - datas = hdus[0][0][start:stop] + headers = [hdus[hdu][1]]*(stop-start) + datas = hdus[hdu][0][start:stop] #Make the cube: iris_cube = sunpy.map.Map(zip(datas,headers),cube=True)
230bb0a09146cd0b696b528b3ad6dd9ccf057113
tests/test_checker.py
tests/test_checker.py
import pytest import os, stat from botbot import checker, problems def test_fastq_checker(): assert checker.is_fastq("bad.fastq") == problems.PROB_FILE_IS_FASTQ assert checker.is_fastq("good.py") == problems.PROB_NO_PROBLEM assert checker.is_fastq("fastq.actually_ok_too") == problems.PROB_NO_PROBLEM def test_permission_checker(tmpdir): # Create a test file p = tmpdir.join("bad_permissions.txt") p.write('') prev = tmpdir.chdir() # Change its permissions a bunch... maybe this is too expensive? for m in range(0o300, 0o700, 0o010): p.chmod(m) prob = checker.has_permission_issues(os.path.abspath(p.basename)) if not bool(0o040 & m): # octal Unix permission for 'group readable' assert prob == problems.PROB_FILE_NOT_GRPRD else: assert prob == problems.PROB_NO_PROBLEM prev.chdir()
import pytest import os, stat from botbot import checker, problems def test_fastq_checker_path_names(): assert checker.is_fastq("bad.fastq") == problems.PROB_FILE_IS_FASTQ assert checker.is_fastq("good.py") == problems.PROB_NO_PROBLEM assert checker.is_fastq("fastq.actually_ok_too") == problems.PROB_NO_PROBLEM def test_fastq_checker_symlinks(tmpdir): prev = tmpdir.chdir() # Make a test file p = tmpdir.join("bad.fastq") p.write('') os.symlink(p.basename, "good.fastq") assert checker.is_fastq("bad.fastq") == problems.PROB_FILE_IS_FASTQ assert checker.is_fastq("good.fastq") == problems.PROB_NO_PROBLEM prev.chdir() def test_permission_checker(tmpdir): # Create a test file p = tmpdir.join("bad_permissions.txt") p.write('') prev = tmpdir.chdir() # Change its permissions a bunch... maybe this is too expensive? for m in range(0o300, 0o700, 0o010): p.chmod(m) prob = checker.has_permission_issues(os.path.abspath(p.basename)) if not bool(0o040 & m): # octal Unix permission for 'group readable' assert prob == problems.PROB_FILE_NOT_GRPRD else: assert prob == problems.PROB_NO_PROBLEM prev.chdir()
Add test for symlink detection
Add test for symlink detection
Python
mit
jackstanek/BotBot,jackstanek/BotBot
--- +++ @@ -3,10 +3,22 @@ from botbot import checker, problems -def test_fastq_checker(): +def test_fastq_checker_path_names(): assert checker.is_fastq("bad.fastq") == problems.PROB_FILE_IS_FASTQ assert checker.is_fastq("good.py") == problems.PROB_NO_PROBLEM assert checker.is_fastq("fastq.actually_ok_too") == problems.PROB_NO_PROBLEM + +def test_fastq_checker_symlinks(tmpdir): + prev = tmpdir.chdir() + + # Make a test file + p = tmpdir.join("bad.fastq") + p.write('') + os.symlink(p.basename, "good.fastq") + + assert checker.is_fastq("bad.fastq") == problems.PROB_FILE_IS_FASTQ + assert checker.is_fastq("good.fastq") == problems.PROB_NO_PROBLEM + prev.chdir() def test_permission_checker(tmpdir): # Create a test file
faed82947209b34ccb4063e8244a9da019fa52a2
bills/urls.py
bills/urls.py
from . import views from django.conf.urls import url urlpatterns = [ url(r'^by_topic/', views.bill_list_by_topic), url(r'^by_location', views.bill_list_by_location), url(r'^latest_activity/', views.latest_bill_activity), url(r'^latest/', views.latest_bill_actions), url(r'^detail/(?P<bill_session>(.*))/(?P<bill_identifier>(.*))/$', views.bill_detail, name='bill_detail'), ]
from . import views from django.conf.urls import url urlpatterns = [ url(r'^by_topic/', views.bill_list_by_topic), url(r'^by_location', views.bill_list_by_location), url(r'^current_session/', views.bill_list_current_session), url(r'^latest_activity/', views.bill_list_latest), url(r'^detail/(?P<bill_session>(.*))/(?P<bill_identifier>(.*))/$', views.bill_detail, name='bill_detail'), ]
Update bills added by current session
Update bills added by current session
Python
mit
jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot
--- +++ @@ -5,7 +5,7 @@ urlpatterns = [ url(r'^by_topic/', views.bill_list_by_topic), url(r'^by_location', views.bill_list_by_location), - url(r'^latest_activity/', views.latest_bill_activity), - url(r'^latest/', views.latest_bill_actions), + url(r'^current_session/', views.bill_list_current_session), + url(r'^latest_activity/', views.bill_list_latest), url(r'^detail/(?P<bill_session>(.*))/(?P<bill_identifier>(.*))/$', views.bill_detail, name='bill_detail'), ]
b631dadb54f90e4abb251f7680f883f2e3e0e914
radar/radar/validation/patient_numbers.py
radar/radar/validation/patient_numbers.py
from radar.groups import is_radar_group from radar.validation.core import Validation, pass_call, ValidationError, Field from radar.validation.sources import RadarSourceValidationMixin from radar.validation.meta import MetaValidationMixin from radar.validation.patients import PatientValidationMixin from radar.validation.validators import required, max_length, not_empty, normalise_whitespace from radar.validation.number_validators import NUMBER_VALIDATORS class PatientNumberValidation(PatientValidationMixin, RadarSourceValidationMixin, MetaValidationMixin, Validation): number = Field([not_empty(), normalise_whitespace(), max_length(50)]) number_group = Field([required()]) def validate_number_group(self, number_group): if is_radar_group(number_group): raise ValidationError("Can't add RaDaR numbers.") return number_group @pass_call def validate(self, call, obj): number_group = obj.number_group number_validators = NUMBER_VALIDATORS.get((number_group.type, number_group.code)) if number_validators is not None: call.validators_for_field(number_validators, obj, self.number) return obj
from radar.groups import is_radar_group, get_radar_group from radar.validation.core import Validation, pass_call, ValidationError, Field from radar.validation.sources import RadarSourceValidationMixin from radar.validation.meta import MetaValidationMixin from radar.validation.patients import PatientValidationMixin from radar.validation.validators import required, max_length, not_empty, normalise_whitespace from radar.validation.number_validators import NUMBER_VALIDATORS from radar.models.patient_numbers import PatientNumber from radar.database import db class PatientNumberValidation(PatientValidationMixin, RadarSourceValidationMixin, MetaValidationMixin, Validation): number = Field([not_empty(), normalise_whitespace(), max_length(50)]) number_group = Field([required()]) def validate_number_group(self, number_group): if is_radar_group(number_group): raise ValidationError("Can't add RaDaR numbers.") return number_group @classmethod def is_duplicate(cls, obj): q = PatientNumber.query q = q.filter(PatientNumber.source_group == get_radar_group()) q = q.filter(PatientNumber.number_group == obj.number_group) q = q.filter(PatientNumber.number == obj.number) if obj.id is not None: q = q.filter(PatientNumber.id != obj.id) q = q.exists() duplicate = db.session.query(q).scalar() return duplicate @pass_call def validate(self, call, obj): number_group = obj.number_group number_validators = NUMBER_VALIDATORS.get((number_group.type, number_group.code)) if number_validators is not None: call.validators_for_field(number_validators, obj, self.number) if self.is_duplicate(obj): raise ValidationError({'number': 'A patient already exists with this number.'}) return obj
Check for duplicate patient numbers
Check for duplicate patient numbers Fixes #286
Python
agpl-3.0
renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar
--- +++ @@ -1,10 +1,12 @@ -from radar.groups import is_radar_group +from radar.groups import is_radar_group, get_radar_group from radar.validation.core import Validation, pass_call, ValidationError, Field from radar.validation.sources import RadarSourceValidationMixin from radar.validation.meta import MetaValidationMixin from radar.validation.patients import PatientValidationMixin from radar.validation.validators import required, max_length, not_empty, normalise_whitespace from radar.validation.number_validators import NUMBER_VALIDATORS +from radar.models.patient_numbers import PatientNumber +from radar.database import db class PatientNumberValidation(PatientValidationMixin, RadarSourceValidationMixin, MetaValidationMixin, Validation): @@ -17,6 +19,22 @@ return number_group + @classmethod + def is_duplicate(cls, obj): + q = PatientNumber.query + q = q.filter(PatientNumber.source_group == get_radar_group()) + q = q.filter(PatientNumber.number_group == obj.number_group) + q = q.filter(PatientNumber.number == obj.number) + + if obj.id is not None: + q = q.filter(PatientNumber.id != obj.id) + + q = q.exists() + + duplicate = db.session.query(q).scalar() + + return duplicate + @pass_call def validate(self, call, obj): number_group = obj.number_group @@ -26,4 +44,7 @@ if number_validators is not None: call.validators_for_field(number_validators, obj, self.number) + if self.is_duplicate(obj): + raise ValidationError({'number': 'A patient already exists with this number.'}) + return obj
6a01e99585db3ea38a8d8325dd4f826e78fc0f1d
test_project/settings.py
test_project/settings.py
import os import sys PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) sys.path.insert(0, os.path.join(PROJECT_ROOT, '..')) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', } } INSTALLED_APPS = ( 'djcelery_email', 'appconf', 'tester', ) SECRET_KEY = 'unique snowflake' TEST_RUNNER = "test_runner.DJCETestSuiteRunner" # Not set here - see 'test_runner.py' # EMAIL_BACKEND = 'djcelery_email.backends.CeleryEmailBackend' CELERY_EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' CELERY_EMAIL_TASK_CONFIG = { 'queue' : 'django_email', 'delivery_mode' : 1, # non persistent 'rate_limit' : '50/m', # 50 chunks per minute }
import os import sys PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) sys.path.insert(0, os.path.join(PROJECT_ROOT, '..')) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', } } INSTALLED_APPS = ( 'djcelery_email', 'appconf', 'tester', ) SECRET_KEY = 'unique snowflake' # Django 1.7 throws dire warnings if this is not set. # We don't actually use any middleware, given that there are no views. MIDDLEWARE_CLASSES = () TEST_RUNNER = "test_runner.DJCETestSuiteRunner" # Not set here - see 'test_runner.py' # EMAIL_BACKEND = 'djcelery_email.backends.CeleryEmailBackend' CELERY_EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' CELERY_EMAIL_TASK_CONFIG = { 'queue' : 'django_email', 'delivery_mode' : 1, # non persistent 'rate_limit' : '50/m', # 50 chunks per minute }
Set MIDDLEWARE_CLASSES to stop Django 1.7 warnings.
Set MIDDLEWARE_CLASSES to stop Django 1.7 warnings.
Python
bsd-3-clause
pmclanahan/django-celery-email,pmclanahan/django-celery-email
--- +++ @@ -19,6 +19,9 @@ SECRET_KEY = 'unique snowflake' +# Django 1.7 throws dire warnings if this is not set. +# We don't actually use any middleware, given that there are no views. +MIDDLEWARE_CLASSES = () TEST_RUNNER = "test_runner.DJCETestSuiteRunner"
f59b249cf2b149f96833d9e1025a98819bf5f62a
sharepa/search.py
sharepa/search.py
import json import requests from elasticsearch_dsl import Search from elasticsearch_dsl.result import Response class ShareSearch(Search): BASE_URL = 'https://osf.io/api/v1/share/search/' HEADERS = {'content-type': 'application/json'} PARAMS = dict(raw=True) def execute(self): return Response( self._query(self.to_dict()), callbacks=self._doc_type_map ) def count(self): d = self.to_dict() if d.get('aggs'): del d['aggs'] self = ShareSearch.from_dict(d) return self._query(self.to_dict(), params=dict(count=True))['count'] def scan(self, size=100): count = 0 page = list(self[0:size].execute()) while(page): for hit in page: count += 1 yield hit page = list(self[count:count + size].execute()) def _query(self, data, params=None): return requests.post( self.BASE_URL, headers=self.HEADERS, data=json.dumps(self.to_dict()), params=params or self.PARAMS ).json() basic_search = ShareSearch() basic_search.aggs.bucket( 'sourceAgg', 'terms', field='_type', size=0, min_doc_count=0 )
import json import requests from elasticsearch_dsl import Search from elasticsearch_dsl.result import Response class ShareSearch(Search): BASE_URL = 'http://localhost:8000/api/search/abstractcreativework/_search' HEADERS = {'content-type': 'application/json'} PARAMS = dict(raw=True) def execute(self): return Response( self._query(self.to_dict()), callbacks=self._doc_type_map ) def count(self): d = self.to_dict() if d.get('aggs'): del d['aggs'] self = ShareSearch.from_dict(d) return self._query(self.to_dict(), params=dict(size=0))['hits']['total'] def scan(self, size=100): count = 0 page = list(self[0:size].execute()) while(page): for hit in page: count += 1 yield hit page = list(self[count:count + size].execute()) def _query(self, data, params=None): return requests.post( self.BASE_URL, headers=self.HEADERS, data=json.dumps(self.to_dict()), params=params or self.PARAMS ).json() basic_search = ShareSearch() basic_search.aggs.bucket( 'sourceAgg', 'terms', field='_type', size=0, min_doc_count=0 )
Fix count param, use local es for now
Fix count param, use local es for now
Python
mit
CenterForOpenScience/sharepa,fabianvf/sharepa
--- +++ @@ -6,7 +6,7 @@ class ShareSearch(Search): - BASE_URL = 'https://osf.io/api/v1/share/search/' + BASE_URL = 'http://localhost:8000/api/search/abstractcreativework/_search' HEADERS = {'content-type': 'application/json'} PARAMS = dict(raw=True) @@ -21,7 +21,7 @@ if d.get('aggs'): del d['aggs'] self = ShareSearch.from_dict(d) - return self._query(self.to_dict(), params=dict(count=True))['count'] + return self._query(self.to_dict(), params=dict(size=0))['hits']['total'] def scan(self, size=100): count = 0
7e883fcfc539f18cd29c2babaf083583495f46d3
migrations/versions/1f9c61031fa_.py
migrations/versions/1f9c61031fa_.py
"""empty message Revision ID: 1f9c61031fa Revises: 1f872d11bbf Create Date: 2016-01-24 17:46:54.879784 """ # revision identifiers, used by Alembic. revision = '1f9c61031fa' down_revision = '1f872d11bbf' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('setting', sa.Column('key', sa.String(length=100))) op.drop_column('setting', 'id') ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('setting', sa.Column('id', sa.INTEGER(), nullable=False)) op.drop_column('setting', 'key') ### end Alembic commands ###
"""empty message Revision ID: 1f9c61031fa Revises: 1f872d11bbf Create Date: 2016-01-24 17:46:54.879784 """ # revision identifiers, used by Alembic. revision = '1f9c61031fa' down_revision = '1f872d11bbf' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('setting', sa.Column('key', sa.String(length=100))) op.alter_column('setting', 'name', existing_type=sa.VARCHAR(length=100), nullable=True) op.drop_column('setting', 'id') ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('setting', sa.Column('id', sa.INTEGER(), nullable=False)) op.alter_column('setting', 'name', existing_type=sa.VARCHAR(length=100), nullable=False) op.drop_column('setting', 'key') ### end Alembic commands ###
Fix NOT NULL constraint on Setting name not being removed
Fix NOT NULL constraint on Setting name not being removed
Python
mit
Encrylize/flask-blogger,Encrylize/flask-blogger,Encrylize/flask-blogger
--- +++ @@ -17,6 +17,9 @@ def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('setting', sa.Column('key', sa.String(length=100))) + op.alter_column('setting', 'name', + existing_type=sa.VARCHAR(length=100), + nullable=True) op.drop_column('setting', 'id') ### end Alembic commands ### @@ -24,5 +27,8 @@ def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('setting', sa.Column('id', sa.INTEGER(), nullable=False)) + op.alter_column('setting', 'name', + existing_type=sa.VARCHAR(length=100), + nullable=False) op.drop_column('setting', 'key') ### end Alembic commands ###
26c1daab6095c6110995104b94ad5b6260557c70
aiortp/sdp.py
aiortp/sdp.py
class SDP: def __init__(self, local_addr, ptime): self.local_addr = local_addr self.ptime = ptime local_addr_desc = f'IN IP4 {self.local_addr[0]}' self.payload = '\r\n'.join([ 'v=0', f'o=user1 53655765 2353687637 {local_addr_desc}', 's=-', 't=0 0', 'i=aiortp media stream', f'm=audio {self.local_addr[1]} RTP/AVP 0 101 13', f'c={local_addr_desc}', 'a=rtpmap:0 PCMU/8000/1', 'a=rtpmap:101 telephone-event/8000', 'a=fmtp:101 0-15', f'a=ptime:{self.ptime}', 'a=sendrecv', '', ]) def __str__(self): return self.payload
class SDP: def __init__(self, local_addr, ptime): self.local_addr = local_addr self.ptime = ptime local_addr_desc = 'IN IP4 {}'.format(self.local_addr[0]) self.payload = '\r\n'.join([ 'v=0', 'o=user1 53655765 2353687637 {local_addr_desc}', 's=-', 't=0 0', 'i=aiortp media stream', 'm=audio {local_port} RTP/AVP 0 101 13', 'c={local_addr_desc}', 'a=rtpmap:0 PCMU/8000/1', 'a=rtpmap:101 telephone-event/8000', 'a=fmtp:101 0-15', 'a=ptime:{ptime}', 'a=sendrecv', '', ]).format(local_addr_desc=local_addr_desc, local_port=self.local_addr[1], ptime=self.ptime) def __str__(self): return self.payload
Remove python 3.6 only format strings
Remove python 3.6 only format strings
Python
apache-2.0
vodik/aiortp
--- +++ @@ -3,22 +3,24 @@ self.local_addr = local_addr self.ptime = ptime - local_addr_desc = f'IN IP4 {self.local_addr[0]}' + local_addr_desc = 'IN IP4 {}'.format(self.local_addr[0]) self.payload = '\r\n'.join([ 'v=0', - f'o=user1 53655765 2353687637 {local_addr_desc}', + 'o=user1 53655765 2353687637 {local_addr_desc}', 's=-', 't=0 0', 'i=aiortp media stream', - f'm=audio {self.local_addr[1]} RTP/AVP 0 101 13', - f'c={local_addr_desc}', + 'm=audio {local_port} RTP/AVP 0 101 13', + 'c={local_addr_desc}', 'a=rtpmap:0 PCMU/8000/1', 'a=rtpmap:101 telephone-event/8000', 'a=fmtp:101 0-15', - f'a=ptime:{self.ptime}', + 'a=ptime:{ptime}', 'a=sendrecv', '', - ]) + ]).format(local_addr_desc=local_addr_desc, + local_port=self.local_addr[1], + ptime=self.ptime) def __str__(self): return self.payload
e2ee9045c59e3f03c5342ee41d23e4adece43535
weather/admin.py
weather/admin.py
from django.contrib.admin import ModelAdmin, register from django.contrib.gis.admin import GeoModelAdmin from weather.models import WeatherStation, Location @register(Location) class LocationAdmin(GeoModelAdmin): pass @register(WeatherStation) class WeatherStationAdmin(ModelAdmin): list_display = ( 'name', 'abbreviation', 'ip_address', 'last_reading', 'battery_voltage', 'connect_every', 'active')
from django.contrib.admin import ModelAdmin, register from django.contrib.gis.admin import GeoModelAdmin from weather.models import WeatherStation, Location @register(Location) class LocationAdmin(GeoModelAdmin): openlayers_url = '//static.dpaw.wa.gov.au/static/libs/openlayers/2.13.1/OpenLayers.js' @register(WeatherStation) class WeatherStationAdmin(ModelAdmin): list_display = ( 'name', 'abbreviation', 'ip_address', 'last_reading', 'battery_voltage', 'connect_every', 'active')
Define URL for OpenLayers.js to DPaW CDN.
Define URL for OpenLayers.js to DPaW CDN.
Python
bsd-3-clause
parksandwildlife/resource_tracking,parksandwildlife/resource_tracking,ropable/resource_tracking,ropable/resource_tracking,ropable/resource_tracking,parksandwildlife/resource_tracking
--- +++ @@ -5,7 +5,7 @@ @register(Location) class LocationAdmin(GeoModelAdmin): - pass + openlayers_url = '//static.dpaw.wa.gov.au/static/libs/openlayers/2.13.1/OpenLayers.js' @register(WeatherStation)
a6bd1cfc5f87d6f9a7ac846665fcab5b02c33c1d
tubular/scripts/hipchat/submit_hipchat_msg.py
tubular/scripts/hipchat/submit_hipchat_msg.py
import os import sys import requests import click HIPCHAT_API_URL = "http://api.hipchat.com" NOTIFICATION_POST = "/v2/room/{}/notification" AUTH_HEADER = "Authorization: Bearer {}" @click.command() @click.option('--auth_token_env_var', '-a', help="Environment variable containing authentication token to use for HipChat REST API.", ) @click.option('--channel', '-c', default="release pipeline", help="Channel to which the script should post a message.", ) def cli(auth_token_env_var, channel): """ Post a message to a HipChat channel. """ msg = "Test message from the demo GoCD release pipeline." headers = { "Authorization": "Bearer {}".format(os.environ[auth_token_env_var]) } msg_payload = { "color": "green", "message": msg, "notify": False, "message_format": "text" } post_url = HIPCHAT_API_URL + NOTIFICATION_POST.format(channel) r = requests.post(post_url, headers=headers, json=msg_payload) # An exit code of 0 means success and non-zero means failure. success = r.status_code in (200, 201, 204) sys.exit(not success) if __name__ == '__main__': cli()
import os import sys import requests import click HIPCHAT_API_URL = "http://api.hipchat.com" NOTIFICATION_POST = "/v2/room/{}/notification" AUTH_HEADER = "Authorization: Bearer {}" @click.command() @click.option('--auth_token_env_var', '-a', help="Environment variable containing authentication token to use for HipChat REST API.", ) @click.option('--channel', '-c', default="release pipeline", help="Channel to which the script should post a message.", ) @click.option('--message', '-m', default="Default message.", help="Message to send to HipChat channel.", ) def cli(auth_token_env_var, channel, message): """ Post a message to a HipChat channel. """ headers = { "Authorization": "Bearer {}".format(os.environ[auth_token_env_var]) } msg_payload = { "color": "green", "message": message, "notify": False, "message_format": "text" } post_url = HIPCHAT_API_URL + NOTIFICATION_POST.format(channel) r = requests.post(post_url, headers=headers, json=msg_payload) # An exit code of 0 means success and non-zero means failure. success = r.status_code in (200, 201, 204) sys.exit(not success) if __name__ == '__main__': cli()
Add ability to set HipChat message contents.
Add ability to set HipChat message contents.
Python
agpl-3.0
eltoncarr/tubular,eltoncarr/tubular
--- +++ @@ -17,18 +17,20 @@ default="release pipeline", help="Channel to which the script should post a message.", ) -def cli(auth_token_env_var, channel): +@click.option('--message', '-m', + default="Default message.", + help="Message to send to HipChat channel.", + ) +def cli(auth_token_env_var, channel, message): """ Post a message to a HipChat channel. """ - msg = "Test message from the demo GoCD release pipeline." - headers = { "Authorization": "Bearer {}".format(os.environ[auth_token_env_var]) } msg_payload = { "color": "green", - "message": msg, + "message": message, "notify": False, "message_format": "text" }
e58e33fbce322ba94ce60afc053b3610ad5bf993
opps/__init__.py
opps/__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import pkg_resources pkg_resources.declare_namespace(__name__) VERSION = (0, 1, 4) __version__ = ".".join(map(str, VERSION)) __status__ = "Development" __description__ = u"Opps CMS websites magazines and high-traffic" __author__ = u"Thiago Avelino" __credits__ = [] __email__ = u"opps-developers@googlegroups.com" __license__ = u"MIT License" __copyright__ = u"Copyright 2013, YACOWS"
#!/usr/bin/env python # -*- coding: utf-8 -*- import pkg_resources pkg_resources.declare_namespace(__name__) VERSION = (0, 1, 5) __version__ = ".".join(map(str, VERSION)) __status__ = "Development" __description__ = u"Opps CMS websites magazines and high-traffic" __author__ = u"Thiago Avelino" __credits__ = [] __email__ = u"opps-developers@googlegroups.com" __license__ = u"MIT License" __copyright__ = u"Copyright 2013, YACOWS"
Upgrade version 0.1.4 to 0.1.5
Upgrade version 0.1.4 to 0.1.5
Python
mit
jeanmask/opps,YACOWS/opps,opps/opps,williamroot/opps,williamroot/opps,YACOWS/opps,opps/opps,williamroot/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps,jeanmask/opps,opps/opps,opps/opps
--- +++ @@ -4,7 +4,7 @@ pkg_resources.declare_namespace(__name__) -VERSION = (0, 1, 4) +VERSION = (0, 1, 5) __version__ = ".".join(map(str, VERSION)) __status__ = "Development"
283f4d0dc1896b35e1c6be3458a99c87b9296659
amaascore/asset_managers/enums.py
amaascore/asset_managers/enums.py
from __future__ import absolute_import, division, print_function, unicode_literals ASSET_MANAGER_TYPES = {'Accredited Investor', 'Bank', 'Broker', 'Corporate Treasury', 'Family Office', 'Fund Administrator', 'Fund Manager', 'Hedge Fund', 'Private Equity', 'Retail', 'Venture Capital'} RELATIONSHIP_TYPES = {'Administrator', 'External', 'Front Office', 'Employee'} ACCOUNT_TYPES = {'Test', 'Basic', 'Professional'}
from __future__ import absolute_import, division, print_function, unicode_literals ASSET_MANAGER_TYPES = {'Accredited Investor', 'Bank', 'Broker', 'Corporate Treasury', 'Family Office', 'Fund Administrator', 'Fund Manager', 'Hedge Fund', 'Individual', 'Private Equity', 'Venture Capital'} RELATIONSHIP_TYPES = {'Administrator', 'External', 'Front Office', 'Employee'} ACCOUNT_TYPES = {'Test', 'Basic', 'Professional'}
Migrate “Retail” to “Individual” for clarity. AMAAS-764.
Migrate “Retail” to “Individual” for clarity. AMAAS-764.
Python
apache-2.0
paul-rs/amaas-core-sdk-python,amaas-fintech/amaas-core-sdk-python,nedlowe/amaas-core-sdk-python,nedlowe/amaas-core-sdk-python,amaas-fintech/amaas-core-sdk-python,paul-rs/amaas-core-sdk-python
--- +++ @@ -2,7 +2,7 @@ ASSET_MANAGER_TYPES = {'Accredited Investor', 'Bank', 'Broker', 'Corporate Treasury', 'Family Office', - 'Fund Administrator', 'Fund Manager', 'Hedge Fund', 'Private Equity', 'Retail', + 'Fund Administrator', 'Fund Manager', 'Hedge Fund', 'Individual', 'Private Equity', 'Venture Capital'} RELATIONSHIP_TYPES = {'Administrator', 'External', 'Front Office', 'Employee'} ACCOUNT_TYPES = {'Test', 'Basic', 'Professional'}
b17104be53389604b4b7f5f109895bdaa6389e43
hic/flow.py
hic/flow.py
# -*- coding: utf-8 -*- from __future__ import division import numpy as np import numexpr as ne __all__ = 'qn', 'FlowCumulant' def qn(n, phi): return ne.evaluate('sum(exp(1j*n*phi))') class FlowCumulant(object): def __init__(self, multiplicities, qn): self.multiplicities = np.asarray(multiplicities) self._qn = dict(qn) self._corr2 = {} self._corr4 = {} def _calculate_corr2(self, n): try: qn = self._qn[n] # noqa except KeyError: raise M = self.multiplicities # noqa self._corr[n][2] = ne.evaluate( 'sum(qn*conj(qn) - M) / sum(M*(M-1))' ) def _calculate_corr4(self, n): pass def _get_corr(self, n, k): pass def correlation(self, n, k): pass def cumulant(self, n, k, error=False, negative_imaginary=False): pass
# -*- coding: utf-8 -*- from __future__ import division import numpy as np import numexpr as ne __all__ = 'qn', 'FlowCumulant' # If a variable is only ever used by numexpr, flake8 will flag it as unused. # The comment 'noqa' prevents this warning. def qn(n, phi): return ne.evaluate('sum(exp(1j*n*phi))') class FlowCumulant(object): def __init__(self, multiplicities, qn): self.multiplicities = np.asarray(multiplicities) self._qn = dict(qn) self._corr2 = {} self._corr4 = {} def _calculate_corr2(self, n): try: qn = self._qn[n] # noqa except KeyError: raise M = self.multiplicities # noqa self._corr[n][2] = ne.evaluate( 'sum(qn*conj(qn) - M) / sum(M*(M-1))' ) def _calculate_corr4(self, n): pass def _get_corr(self, n, k): pass def correlation(self, n, k): pass def cumulant(self, n, k, error=False, negative_imaginary=False): pass
Add note about flake8 ignore flag.
Add note about flake8 ignore flag.
Python
mit
jbernhard/hic,Duke-QCD/hic
--- +++ @@ -6,6 +6,10 @@ import numexpr as ne __all__ = 'qn', 'FlowCumulant' + + +# If a variable is only ever used by numexpr, flake8 will flag it as unused. +# The comment 'noqa' prevents this warning. def qn(n, phi):
bbd3190b31a3751d9173b81d6f53c937208969a7
tests/main_test.py
tests/main_test.py
#!/usr/bin/env python3 from libpals.util import xor_find_singlechar_key, hamming_distance, fixed_xor def test_xor_find_singlechar_key(): input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736' ciphertext = bytes.fromhex(input) result = xor_find_singlechar_key(ciphertext) assert result['key'] == 88 assert result['plaintext'] == b"Cooking MC's like a pound of bacon" def test_hamming_distance(): assert hamming_distance(b"this is a test", b"wokka wokka!!!") == 37 def test_fixed_xor(): input = bytes.fromhex("1c0111001f010100061a024b53535009181c") key = bytes.fromhex("686974207468652062756c6c277320657965") assert fixed_xor(input, key) == b"the kid don't play"
#!/usr/bin/env python3 from libpals.util import ( xor_find_singlechar_key, hamming_distance, fixed_xor ) def test_xor_find_singlechar_key(): input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736' ciphertext = bytes.fromhex(input) result = xor_find_singlechar_key(ciphertext) assert result['key'] == 88 assert result['plaintext'] == b"Cooking MC's like a pound of bacon" def test_hamming_distance(): assert hamming_distance(b"this is a test", b"wokka wokka!!!") == 37 def test_fixed_xor(): input = bytes.fromhex("1c0111001f010100061a024b53535009181c") key = bytes.fromhex("686974207468652062756c6c277320657965") assert fixed_xor(input, key) == b"the kid don't play"
Change to multi-line imports in the test suite
Change to multi-line imports in the test suite
Python
bsd-2-clause
cpach/cryptopals-python3
--- +++ @@ -1,6 +1,10 @@ #!/usr/bin/env python3 -from libpals.util import xor_find_singlechar_key, hamming_distance, fixed_xor +from libpals.util import ( + xor_find_singlechar_key, + hamming_distance, + fixed_xor +) def test_xor_find_singlechar_key(): input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736'
6a54876a30ec92c0243cf758b7ddf35d7ad3b926
pgup/__init__.py
pgup/__init__.py
# -*- coding: utf-8 -*- __version__ = "0.2.3" from config import Config from build_init import build_init from build_diff import build_diff
# -*- coding: utf-8 -*- __version__ = "0.2.4" from config import Config from build_init import build_init from build_diff import build_diff
Add skiping not Table, Procedure files
Add skiping not Table, Procedure files
Python
mit
stepan-perlov/pgup
--- +++ @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -__version__ = "0.2.3" +__version__ = "0.2.4" from config import Config from build_init import build_init from build_diff import build_diff
e44eb0bd99b4dec1b78707c7343fc6d9b647c7bb
scripts/write_antenna_location_file.py
scripts/write_antenna_location_file.py
#! /usr/bin/env python # -*- mode: python; coding: utf-8 -*- # Copyright 2016 the HERA Collaboration # Licensed under the 2-clause BSD license. """ Script to write out antenna locations for use in cal files. """ import pandas as pd from hera_mc import mc, geo_handling import datetime parser = mc.get_mc_argument_parser() parser.add_argument('--file', help="file name to save antenna locations to", default='hera_ant_locs_' + datetime.date.today().strftime("%m_%d_%Y") + '.csv') args = parser.parse_args() filename = args.file db = mc.connect_to_mc_db(args) locations = geo_handling.get_all_locations(args) df = pd.DataFrame(locations) df = df[['station_name', 'station_type', 'longitude', 'latitude', 'elevation', 'antenna_number', 'start_date', 'stop_date']] df.to_csv(filename, index=False)
#! /usr/bin/env python # -*- mode: python; coding: utf-8 -*- # Copyright 2016 the HERA Collaboration # Licensed under the 2-clause BSD license. """ Script to write out antenna locations for use in cal files. """ import pandas as pd from hera_mc import mc, geo_handling import datetime parser = mc.get_mc_argument_parser() parser.add_argument('--file', help="file name to save antenna locations to", default='hera_ant_locs_' + datetime.date.today().strftime("%m_%d_%Y") + '.csv') args = parser.parse_args() filename = args.file db = mc.connect_to_mc_db(args) locations = geo_handling.get_all_locations(args) cofa_loc = geo_handling.cofa() locations.append({'station_name': cofa_loc.station_name, 'station_type': cofa_loc.station_type_name, 'longitude': cofa_loc.lon, 'latitude': cofa_loc.lat, 'elevation': cofa_loc.elevation, 'antenna_number': None, 'start_date': cofa_loc.created_date, 'stop_date': None}) df = pd.DataFrame(locations) df = df[['station_name', 'station_type', 'longitude', 'latitude', 'elevation', 'antenna_number', 'start_date', 'stop_date']] df.to_csv(filename, index=False)
Add cofa information to antenna location files
Add cofa information to antenna location files
Python
bsd-2-clause
HERA-Team/hera_mc,HERA-Team/Monitor_and_Control,HERA-Team/hera_mc
--- +++ @@ -18,6 +18,15 @@ db = mc.connect_to_mc_db(args) locations = geo_handling.get_all_locations(args) +cofa_loc = geo_handling.cofa() +locations.append({'station_name': cofa_loc.station_name, + 'station_type': cofa_loc.station_type_name, + 'longitude': cofa_loc.lon, + 'latitude': cofa_loc.lat, + 'elevation': cofa_loc.elevation, + 'antenna_number': None, + 'start_date': cofa_loc.created_date, + 'stop_date': None}) df = pd.DataFrame(locations) df = df[['station_name', 'station_type', 'longitude', 'latitude', 'elevation', 'antenna_number', 'start_date', 'stop_date']]
6bbee1abf5e53c3e3ce9cc84f35820ee9fe11500
dsub/_dsub_version.py
dsub/_dsub_version.py
# Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Single source of truth for dsub's version. This must remain small and dependency-free so that any dsub module may import it without creating circular dependencies. Note that this module is parsed as a text file by setup.py and changes to the format of this file could break setup.py. The version should follow formatting requirements specified in PEP-440. - https://www.python.org/dev/peps/pep-0440 A typical release sequence will be versioned as: 0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ... """ DSUB_VERSION = '0.3.9.dev0'
# Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Single source of truth for dsub's version. This must remain small and dependency-free so that any dsub module may import it without creating circular dependencies. Note that this module is parsed as a text file by setup.py and changes to the format of this file could break setup.py. The version should follow formatting requirements specified in PEP-440. - https://www.python.org/dev/peps/pep-0440 A typical release sequence will be versioned as: 0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ... """ DSUB_VERSION = '0.3.9'
Update dsub version to 0.3.9
Update dsub version to 0.3.9 PiperOrigin-RevId: 319808345
Python
apache-2.0
DataBiosphere/dsub,DataBiosphere/dsub
--- +++ @@ -26,4 +26,4 @@ 0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ... """ -DSUB_VERSION = '0.3.9.dev0' +DSUB_VERSION = '0.3.9'
e3cba925ea106baa99951ac7b3ee72599ee7277d
demos/fs-demo/main.py
demos/fs-demo/main.py
import random import os from microbit import * if 'messages.txt' in os.listdir(): with open('messages.txt') as message_file: messages = message_file.read().split('\n') while True: if button_a.was_pressed(): display.scroll(random.choice(messages))
import random import os import speech from microbit import * if 'messages.txt' in os.listdir(): with open('messages.txt') as message_file: messages = message_file.read().split('\n') while True: if button_a.was_pressed(): speech.say(random.choice(messages))
Change output in fs-demo to voice.
Change output in fs-demo to voice.
Python
mit
mathisgerdes/microbit-macau
--- +++ @@ -1,5 +1,6 @@ import random import os +import speech from microbit import * @@ -10,4 +11,4 @@ while True: if button_a.was_pressed(): - display.scroll(random.choice(messages)) + speech.say(random.choice(messages))
4545d11c2462ccb6d7848d185f5fe358a51af5f6
Trimmer.py
Trimmer.py
import sublime import sublime_plugin class TrimmerCommand(sublime_plugin.TextCommand): def run(self, edit): view = self.view trailing_white_space = view.find_all("[\t ]+$") trailing_white_space.reverse() edit = view.begin_edit() for r in trailing_white_space: view.erase(edit, r) view.end_edit(edit) sublime.set_timeout(lambda: self.save(view), 10) def save(self, view): if view.file_name() is None: view.run_command('prompt_save_as') else: view.run_command('save') sublime.status_message('Trimmer: Removed trailing whitespace and saved.')
import sublime import sublime_plugin class TrimmerCommand(sublime_plugin.TextCommand): def run(self, edit): view = self.view trailing_white_space = view.find_all("[\t ]+$") trailing_white_space.reverse() for r in trailing_white_space: view.erase(edit, r) sublime.set_timeout(lambda: self.save(view), 10) def save(self, view): if view.file_name() is None: view.run_command('prompt_save_as') else: view.run_command('save') sublime.status_message('Trimmer: Removed trailing whitespace and saved.')
Remove calls to begin, end edit object.
Remove calls to begin, end edit object.
Python
mit
jonlabelle/Trimmer,jonlabelle/Trimmer
--- +++ @@ -9,12 +9,9 @@ trailing_white_space = view.find_all("[\t ]+$") trailing_white_space.reverse() - edit = view.begin_edit() - for r in trailing_white_space: view.erase(edit, r) - view.end_edit(edit) sublime.set_timeout(lambda: self.save(view), 10) def save(self, view):
ca6891f3b867fd691c0b682566ffec1fd7f0ac2a
pryvate/blueprints/simple/simple.py
pryvate/blueprints/simple/simple.py
"""Simple blueprint.""" import os from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('simple', __name__, url_prefix='/simple', template_folder='templates') @blueprint.route('', methods=['POST']) def search_simple(): """Handling pip search.""" return make_response('Not implemented', 501) @blueprint.route('', methods=['GET']) def get_simple(): """List all packages.""" packages = os.listdir(current_app.config['BASEDIR']) return render_template('simple.html', packages=packages) @blueprint.route('/<package>', methods=['GET']) @blueprint.route('/<package>/', methods=['GET']) def get_package(package): """List versions of a package.""" package_path = os.path.join(current_app.config['BASEDIR'], package.lower()) files = os.listdir(package_path) packages = [] for filename in files: if filename.endswith('md5'): with open(os.path.join(package_path, filename), 'r') as md5_digest: item = { 'name': package, 'version': filename.replace('.md5', ''), 'digest': md5_digest.read() } packages.append(item) return render_template('simple_package.html', packages=packages, letter=package[:1].lower())
"""Simple blueprint.""" import os from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('simple', __name__, url_prefix='/simple', template_folder='templates') @blueprint.route('', methods=['POST']) def search_simple(): """Handling pip search.""" return make_response('Not implemented', 501) @blueprint.route('', methods=['GET']) def get_simple(): """List all packages.""" packages = os.listdir(current_app.config['BASEDIR']) return render_template('simple.html', packages=packages) @blueprint.route('/<package>', methods=['GET']) @blueprint.route('/<package>/', methods=['GET']) def get_package(package): """List versions of a package.""" package_path = os.path.join(current_app.config['BASEDIR'], package.lower()) if os.path.isdir(package_path): files = os.listdir(package_path) packages = [] for filename in files: if filename.endswith('md5'): digest_file = os.path.join(package_path, filename) with open(digest_file, 'r') as md5_digest: item = { 'name': package, 'version': filename.replace('.md5', ''), 'digest': md5_digest.read() } packages.append(item) return render_template('simple_package.html', packages=packages, letter=package[:1].lower()) else: return make_response('404', 404)
Return 404 if package was not found instead of raising an exception
Return 404 if package was not found instead of raising an exception
Python
mit
Dinoshauer/pryvate,Dinoshauer/pryvate
--- +++ @@ -26,17 +26,21 @@ """List versions of a package.""" package_path = os.path.join(current_app.config['BASEDIR'], package.lower()) - files = os.listdir(package_path) + if os.path.isdir(package_path): + files = os.listdir(package_path) - packages = [] - for filename in files: - if filename.endswith('md5'): - with open(os.path.join(package_path, filename), 'r') as md5_digest: - item = { - 'name': package, - 'version': filename.replace('.md5', ''), - 'digest': md5_digest.read() - } - packages.append(item) - return render_template('simple_package.html', packages=packages, - letter=package[:1].lower()) + packages = [] + for filename in files: + if filename.endswith('md5'): + digest_file = os.path.join(package_path, filename) + with open(digest_file, 'r') as md5_digest: + item = { + 'name': package, + 'version': filename.replace('.md5', ''), + 'digest': md5_digest.read() + } + packages.append(item) + return render_template('simple_package.html', packages=packages, + letter=package[:1].lower()) + else: + return make_response('404', 404)
37c08c15ecc31f90429b20ee2d4561c2886036c2
pyaavso/utils.py
pyaavso/utils.py
from __future__ import unicode_literals import logging import requests from .parsers import WebObsResultsParser logger = logging.getLogger(__name__) WEBOBS_RESULTS_URL = 'http://www.aavso.org/apps/webobs/results/' def download_observations(observer_code): """ Downloads all variable star observations by a given observer. Performs a series of HTTP requests to AAVSO's WebObs search and downloads the results page by page. Each page is then passed to :py:class:`~pyaavso.parsers.webobs.WebObsResultsParser` and parse results are added to the final observation list. """ page_number = 1 observations = [] while True: logger.info('Downloading page %d...', page_number) response = requests.get(WEBOBS_RESULTS_URL, params={ 'obscode': observer_code, 'num_results': 200, 'obs_types': 'all', 'page': page_number, }) parser = WebObsResultsParser(response.content) observations.extend(parser.get_observations()) # kinda silly, but there's no need for lxml machinery here if '>Next</a>' not in response.content: break page_number += 1 return observations
from __future__ import unicode_literals import logging import requests from .parsers import WebObsResultsParser logger = logging.getLogger(__name__) WEBOBS_RESULTS_URL = 'http://www.aavso.org/apps/webobs/results/' def download_observations(observer_code): """ Downloads all variable star observations by a given observer. Performs a series of HTTP requests to AAVSO's WebObs search and downloads the results page by page. Each page is then passed to :py:class:`~pyaavso.parsers.webobs.WebObsResultsParser` and parse results are added to the final observation list. """ page_number = 1 observations = [] while True: logger.info('Downloading page %d...', page_number) response = requests.get(WEBOBS_RESULTS_URL, params={ 'obscode': observer_code, 'num_results': 200, 'obs_types': 'all', 'page': page_number, }) parser = WebObsResultsParser(response.text) observations.extend(parser.get_observations()) # kinda silly, but there's no need for lxml machinery here if '>Next</a>' not in response.text: break page_number += 1 return observations
Use response.text for automatic decoding.
Use response.text for automatic decoding.
Python
mit
zsiciarz/pyaavso
--- +++ @@ -31,10 +31,10 @@ 'obs_types': 'all', 'page': page_number, }) - parser = WebObsResultsParser(response.content) + parser = WebObsResultsParser(response.text) observations.extend(parser.get_observations()) # kinda silly, but there's no need for lxml machinery here - if '>Next</a>' not in response.content: + if '>Next</a>' not in response.text: break page_number += 1 return observations
52e675ec6789d8ecaddae98a6b36bc8b0c3f6e1e
socketio/sdjango.py
socketio/sdjango.py
import logging from socketio import socketio_manage from django.conf.urls import patterns, url, include from django.http import HttpResponse SOCKETIO_NS = {} class namespace(object): def __init__(self, name=''): self.name = name def __call__(self, handler): SOCKETIO_NS[self.name] = handler def socketio(request): try: socketio_manage(request.environ, SOCKETIO_NS, request) except: logging.getLogger("socketio").error("Exception while handling socketio connection", exc_info=True) return HttpResponse("") urls = patterns("", (r'', socketio))
import logging from socketio import socketio_manage from django.conf.urls import patterns, url, include from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt SOCKETIO_NS = {} class namespace(object): def __init__(self, name=''): self.name = name def __call__(self, handler): SOCKETIO_NS[self.name] = handler @csrf_exempt def socketio(request): try: socketio_manage(request.environ, SOCKETIO_NS, request) except: logging.getLogger("socketio").error("Exception while handling socketio connection", exc_info=True) return HttpResponse("") urls = patterns("", (r'', socketio))
Remove django CSRF protection for socket.io view
Remove django CSRF protection for socket.io view
Python
bsd-3-clause
abourget/gevent-socketio,yacneyac/gevent-socketio,kazmiruk/gevent-socketio,kazmiruk/gevent-socketio,yacneyac/gevent-socketio,arnuschky/gevent-socketio,gutomaia/gevent-socketio,smurfix/gevent-socketio,arnuschky/gevent-socketio,hzruandd/gevent-socketio,theskumar-archive/gevent-socketio,abourget/gevent-socketio,gutomaia/gevent-socketio,theskumar-archive/gevent-socketio,smurfix/gevent-socketio,smurfix/gevent-socketio,gutomaia/gevent-socketio,Eugeny/gevent-socketio,bobvandevijver/gevent-socketio,Eugeny/gevent-socketio,bobvandevijver/gevent-socketio,hzruandd/gevent-socketio
--- +++ @@ -3,6 +3,7 @@ from socketio import socketio_manage from django.conf.urls import patterns, url, include from django.http import HttpResponse +from django.views.decorators.csrf import csrf_exempt SOCKETIO_NS = {} @@ -14,7 +15,7 @@ def __call__(self, handler): SOCKETIO_NS[self.name] = handler - +@csrf_exempt def socketio(request): try: socketio_manage(request.environ, SOCKETIO_NS, request)
1fdceb6b8072317eff901a8f2b74db60269ca373
python/grade-school/grade_school.py
python/grade-school/grade_school.py
from collections import defaultdict class School: def __init__(self): self.db = defaultdict(list) def add_student(self, name, grade): self.db[grade].append(name) self.db[grade] = sorted(self.db[grade]) def roster(self): all_student_names = [] for grade_number in sorted(self.db.keys()): all_student_names.extend(self.db[grade_number]) return all_student_names def grade(self, grade_number): return self.db[grade_number]
from collections import defaultdict class School: def __init__(self): self.db = defaultdict(list) def add_student(self, name, grade): self.db[grade].append(name) self.db[grade] = sorted(self.db[grade]) def roster(self): all_student_names = [] for grade_number in sorted(self.db.keys()): all_student_names.extend(self.db[grade_number]) return all_student_names def grade(self, grade_number): return self.db[grade_number]
Add newline to fix lint warning
Add newline to fix lint warning
Python
mit
rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism
--- +++ @@ -1,4 +1,5 @@ from collections import defaultdict + class School: def __init__(self):
8ff8b9400adf24e082908befed7788099b01f328
bench/pact-suite/scripts/opcount_merge.py
bench/pact-suite/scripts/opcount_merge.py
#!/usr/bin/env python2.7 import sys files = sys.argv[1:] keys = set() fileVals = [] for file in files: vals = {} fileVals.append(vals) try: for line in open(file).readlines(): k, v = line.split() vals[k] = v keys.add(k) except Exception, e: print "Error in line \"%s\" of file %s" % (line, file) raise e for key in sorted(keys): sys.stdout.write(key) for vals in fileVals: sys.stdout.write("\t") sys.stdout.write(str(vals.get(key, 0))) sys.stdout.write("\n")
#!/usr/bin/env python2.7 import sys files = sys.argv[1:] keys = set() fileVals = [] for file in files: vals = {} fileVals.append(vals) try: for line in open(file).readlines(): toks = line.split() if len(toks) != 2: print >> sys.stderr, "Bad line: %s" % repr(toks) else: k, v = toks vals[k] = v keys.add(k) except Exception, e: print >> sys.stderr, "Error in line \"%s\" of file %s" % (line, file) raise e for key in sorted(keys): sys.stdout.write(key) for vals in fileVals: sys.stdout.write("\t") sys.stdout.write(str(vals.get(key, 0))) sys.stdout.write("\n")
Add initial data for operations counts
Add initial data for operations counts git-svn-id: 0c5512015aa96f7d3f5c3ad598bd98edc52008b1@12204 dc4e9af1-7f46-4ead-bba6-71afc04862de
Python
apache-2.0
basheersubei/swift-t,blue42u/swift-t,swift-lang/swift-t,JohnPJenkins/swift-t,swift-lang/swift-t,blue42u/swift-t,swift-lang/swift-t,swift-lang/swift-t,JohnPJenkins/swift-t,basheersubei/swift-t,JohnPJenkins/swift-t,JohnPJenkins/swift-t,JohnPJenkins/swift-t,basheersubei/swift-t,basheersubei/swift-t,basheersubei/swift-t,JohnPJenkins/swift-t,JohnPJenkins/swift-t,swift-lang/swift-t,basheersubei/swift-t,blue42u/swift-t,blue42u/swift-t,swift-lang/swift-t,blue42u/swift-t,swift-lang/swift-t,blue42u/swift-t,blue42u/swift-t,basheersubei/swift-t
--- +++ @@ -10,11 +10,15 @@ fileVals.append(vals) try: for line in open(file).readlines(): - k, v = line.split() - vals[k] = v - keys.add(k) + toks = line.split() + if len(toks) != 2: + print >> sys.stderr, "Bad line: %s" % repr(toks) + else: + k, v = toks + vals[k] = v + keys.add(k) except Exception, e: - print "Error in line \"%s\" of file %s" % (line, file) + print >> sys.stderr, "Error in line \"%s\" of file %s" % (line, file) raise e
b6099b64efb6e74e754d0911bd64512b6b6b631b
quokka/modules/accounts/tests/test_model.py
quokka/modules/accounts/tests/test_model.py
# coding: utf-8 from flask.ext.testing import TestCase from quokka import create_app from flask.ext.security.utils import encrypt_password from ..models import User class TestAuthModels(TestCase): def setUp(self): self.db = self.app.extensions.get('mongoengine') self.user_dict = { 'name': u'Guybrush Treepwood', 'email': u'guybrush@monkeyisland.com', 'password': encrypt_password(u'lechucksucks'), } self.user = User.objects.create(**self.user_dict) def tearDown(self): User.objects.all().delete() def create_app(self): return create_app(config='quokka.test_settings', DEBUG=False, test=True) def test_user_fields(self): self.assertIsInstance(self.user, User) self.assertEqual(self.user.username, u'guybrush_monkeyisland_com') self.assertEqual(self.user.name, u'Guybrush Treepwood') self.assertEqual(self.user.email, u'guybrush@monkeyisland.com') self.assertEqual(self.user.password, self.user_dict['password']) self.assertEqual(self.user.display_name, self.user.name)
# coding: utf-8 from flask.ext.testing import TestCase from quokka import create_app from flask.ext.security.utils import encrypt_password from ..models import User class TestAuthModels(TestCase): def setUp(self): self.user_dict = { 'name': u'Guybrush Treepwood', 'email': u'guybrush@monkeyisland.com', 'password': encrypt_password(u'lechucksucks'), } self.user = User.objects.create(**self.user_dict) def tearDown(self): User.objects.all().delete() def create_app(self): return create_app(config='quokka.test_settings', DEBUG=False, test=True) def test_user_fields(self): self.assertIsInstance(self.user, User) self.assertEqual(self.user.username, u'guybrush_monkeyisland_com') self.assertEqual(self.user.name, u'Guybrush Treepwood') self.assertEqual(self.user.email, u'guybrush@monkeyisland.com') self.assertEqual(self.user.password, self.user_dict['password']) self.assertEqual(self.user.display_name, self.user.name)
Remove unused config on accounts tests
Remove unused config on accounts tests
Python
mit
maurobaraldi/quokka,ChengChiongWah/quokka,wushuyi/quokka,cbeloni/quokka,alexandre/quokka,wushuyi/quokka,cbeloni/quokka,wushuyi/quokka,fdumpling/quokka,fdumpling/quokka,lnick/quokka,maurobaraldi/quokka,fdumpling/quokka,ChengChiongWah/quokka,romulocollopy/quokka,CoolCloud/quokka,romulocollopy/quokka,CoolCloud/quokka,Ckai1991/quokka,cbeloni/quokka,lnick/quokka,felipevolpone/quokka,Ckai1991/quokka,romulocollopy/quokka,felipevolpone/quokka,maurobaraldi/quokka,cbeloni/quokka,lnick/quokka,romulocollopy/quokka,Ckai1991/quokka,ChengChiongWah/quokka,fdumpling/quokka,wushuyi/quokka,felipevolpone/quokka,maurobaraldi/quokka,lnick/quokka,CoolCloud/quokka,felipevolpone/quokka,ChengChiongWah/quokka,Ckai1991/quokka,CoolCloud/quokka,alexandre/quokka
--- +++ @@ -8,7 +8,6 @@ class TestAuthModels(TestCase): def setUp(self): - self.db = self.app.extensions.get('mongoengine') self.user_dict = { 'name': u'Guybrush Treepwood', 'email': u'guybrush@monkeyisland.com',
24e780dd0f30e4bf9696a6fd185d20fb297f0bd0
rsk_mind/transformer/transformer.py
rsk_mind/transformer/transformer.py
class Transformer(object): class Feats(): exclude = None def __init__(self): for field in self.get_feats(): getattr(self.Feats, field).bind(field, self) def get_feats(self): return [x for x in dir(self.Feats) if not (x.startswith('__') or x in ['exclude'])] def get_transformer_func(self, feat_name): return getattr(self.Feats, feat_name).transform
class Transformer(object): """ Base class for all transformer """ class Feats: """ Define feats on dataset """ exclude = None def __init__(self): for field in self.get_feats(): getattr(self.Feats, field).bind(field, self) def get_feats(self): """ :return: a list of feats """ return [x for x in dir(self.Feats) if not (x.startswith('__') or x in ['exclude'])] def get_transformer_func(self, feat_name): """ :param feat_name: name of feat :return: a transformer function on feat """ return getattr(self.Feats, feat_name).transform def get_excluded_feats(self): """ :return: a list with excluded feats """ return self.Feats.exclude
Add documentation and some methods
Add documentation and some methods
Python
mit
rsk-mind/rsk-mind-framework
--- +++ @@ -1,5 +1,12 @@ class Transformer(object): - class Feats(): + """ + Base class for all transformer + """ + + class Feats: + """ + Define feats on dataset + """ exclude = None def __init__(self): @@ -7,7 +14,23 @@ getattr(self.Feats, field).bind(field, self) def get_feats(self): + """ + + :return: a list of feats + """ return [x for x in dir(self.Feats) if not (x.startswith('__') or x in ['exclude'])] def get_transformer_func(self, feat_name): + """ + + :param feat_name: name of feat + :return: a transformer function on feat + """ return getattr(self.Feats, feat_name).transform + + def get_excluded_feats(self): + """ + + :return: a list with excluded feats + """ + return self.Feats.exclude
644c69a25d81ae4473d19bfe1faa6b7fe10e8afd
dsub/_dsub_version.py
dsub/_dsub_version.py
# Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Single source of truth for dsub's version. This must remain small and dependency-free so that any dsub module may import it without creating circular dependencies. Note that this module is parsed as a text file by setup.py and changes to the format of this file could break setup.py. The version should follow formatting requirements specified in PEP-440. - https://www.python.org/dev/peps/pep-0440 A typical release sequence will be versioned as: 0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ... """ DSUB_VERSION = '0.3.8'
# Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Single source of truth for dsub's version. This must remain small and dependency-free so that any dsub module may import it without creating circular dependencies. Note that this module is parsed as a text file by setup.py and changes to the format of this file could break setup.py. The version should follow formatting requirements specified in PEP-440. - https://www.python.org/dev/peps/pep-0440 A typical release sequence will be versioned as: 0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ... """ DSUB_VERSION = '0.3.9.dev0'
Update dsub version to 0.3.9.dev0
Update dsub version to 0.3.9.dev0 PiperOrigin-RevId: 313432033
Python
apache-2.0
DataBiosphere/dsub,DataBiosphere/dsub
--- +++ @@ -26,4 +26,4 @@ 0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ... """ -DSUB_VERSION = '0.3.8' +DSUB_VERSION = '0.3.9.dev0'
a6ae4171de33dd77e9109523380c1330d4037f9f
gengine/app/tests/runner.py
gengine/app/tests/runner.py
from gengine.app.tests import db as db from gengine.metadata import init_declarative_base, init_session import unittest import os import pkgutil import testing.redis import logging log = logging.getLogger(__name__) init_session() init_declarative_base() __path__ = [x[0] for x in os.walk(os.path.dirname(__file__))] def create_test_suite(): suite = unittest.TestSuite() for imp, modname, _ in pkgutil.walk_packages(__path__): mod = imp.find_module(modname).load_module(modname) for test in unittest.defaultTestLoader.loadTestsFromModule(mod): suite.addTests(test) return suite if __name__=="__main__": exit = 1 try: redis = testing.redis.RedisServer() from gengine.base.cache import setup_redis_cache dsn = redis.dsn() setup_redis_cache(dsn["host"], dsn["port"], dsn["db"]) from gengine.app.cache import init_caches init_caches() db.setupDB() testSuite = create_test_suite() text_runner = unittest.TextTestRunner(failfast=True).run(testSuite) if text_runner.wasSuccessful(): exit = 0 finally: try: db.unsetupDB() except: log.exception() try: redis.stop() except: log.exception() sys.exit(exit)
from gengine.app.tests import db as db from gengine.metadata import init_declarative_base, init_session import unittest import os import pkgutil import testing.redis import logging import sys log = logging.getLogger(__name__) init_session() init_declarative_base() __path__ = [x[0] for x in os.walk(os.path.dirname(__file__))] def create_test_suite(): suite = unittest.TestSuite() for imp, modname, _ in pkgutil.walk_packages(__path__): mod = imp.find_module(modname).load_module(modname) for test in unittest.defaultTestLoader.loadTestsFromModule(mod): suite.addTests(test) return suite if __name__=="__main__": exit = 1 try: redis = testing.redis.RedisServer() from gengine.base.cache import setup_redis_cache dsn = redis.dsn() setup_redis_cache(dsn["host"], dsn["port"], dsn["db"]) from gengine.app.cache import init_caches init_caches() db.setupDB() testSuite = create_test_suite() text_runner = unittest.TextTestRunner(failfast=True).run(testSuite) if text_runner.wasSuccessful(): exit = 0 finally: try: db.unsetupDB() except: log.exception() try: redis.stop() except: log.exception() sys.exit(exit)
Add missing import for sys
Add missing import for sys
Python
mit
ActiDoo/gamification-engine,ActiDoo/gamification-engine,ActiDoo/gamification-engine,ActiDoo/gamification-engine
--- +++ @@ -5,6 +5,7 @@ import pkgutil import testing.redis import logging +import sys log = logging.getLogger(__name__)
2ebb667b38b3d74003948347f411f177ca584834
boardinghouse/contrib/template/models.py
boardinghouse/contrib/template/models.py
from django.db import models from django.utils import six from boardinghouse.base import SharedSchemaMixin from boardinghouse.schema import activate_schema, deactivate_schema @six.python_2_unicode_compatible class SchemaTemplate(SharedSchemaMixin, models.Model): """ A ``boardinghouse.contrib.template.models.SchemaTemplate`` can be used for creating a new schema complete with some initial data. """ template_schema_id = models.AutoField(primary_key=True) name = models.CharField(max_length=128, unique=True) is_active = models.BooleanField(default=True) description = models.TextField(null=True, blank=True) class Meta: default_permissions = ('add', 'change', 'delete', 'view', 'activate', 'clone') verbose_name_plural = u'template schemata' def __str__(self): return self.name @property def schema(self): return '__template_{}'.format(self.pk) def activate(self): activate_schema(self.schema) def deactivate(self): deactivate_schema()
from django.db import models from django.utils import six from django.utils.functional import lazy from boardinghouse.base import SharedSchemaMixin from boardinghouse.schema import activate_schema, deactivate_schema, get_schema_model def verbose_name_plural(): return u'template {}'.format(get_schema_model()._meta.verbose_name_plural) def verbose_name(): return u'template {}'.format(get_schema_model()._meta.verbose_name) @six.python_2_unicode_compatible class SchemaTemplate(SharedSchemaMixin, models.Model): """ A ``boardinghouse.contrib.template.models.SchemaTemplate`` can be used for creating a new schema complete with some initial data. """ template_schema_id = models.AutoField(primary_key=True) name = models.CharField(max_length=128, unique=True) is_active = models.BooleanField(default=True) description = models.TextField(null=True, blank=True) class Meta: default_permissions = ('add', 'change', 'delete', 'view', 'activate', 'clone') verbose_name = lazy(verbose_name, six.text_type)() verbose_name_plural = lazy(verbose_name_plural, six.text_type)() def __str__(self): return self.name @property def schema(self): return '__template_{}'.format(self.pk) def activate(self): activate_schema(self.schema) def deactivate(self): deactivate_schema()
Use 'template ...' for the SchemaTemplate verbose_name*
Use 'template ...' for the SchemaTemplate verbose_name*
Python
bsd-3-clause
schinckel/django-boardinghouse,schinckel/django-boardinghouse,schinckel/django-boardinghouse
--- +++ @@ -1,8 +1,17 @@ from django.db import models from django.utils import six +from django.utils.functional import lazy from boardinghouse.base import SharedSchemaMixin -from boardinghouse.schema import activate_schema, deactivate_schema +from boardinghouse.schema import activate_schema, deactivate_schema, get_schema_model + + +def verbose_name_plural(): + return u'template {}'.format(get_schema_model()._meta.verbose_name_plural) + + +def verbose_name(): + return u'template {}'.format(get_schema_model()._meta.verbose_name) @six.python_2_unicode_compatible @@ -18,7 +27,8 @@ class Meta: default_permissions = ('add', 'change', 'delete', 'view', 'activate', 'clone') - verbose_name_plural = u'template schemata' + verbose_name = lazy(verbose_name, six.text_type)() + verbose_name_plural = lazy(verbose_name_plural, six.text_type)() def __str__(self): return self.name
0ea4abe8b2e44bdd02308ad590ffb1e846201300
terms/sitemaps.py
terms/sitemaps.py
from django.contrib.sitemaps import Sitemap from .models import Term class TermsSitemap(Sitemap): changefreq = 'yearly' priority = 0.1 def items(self): return Term.objects.all()
from django.contrib.sitemaps import Sitemap from django.db.models import Q from .models import Term class TermsSitemap(Sitemap): changefreq = 'yearly' priority = 0.1 def items(self): return Term.objects.filter(Q(url__startswith='/') | Q(url=''))
Exclude external urls from the sitemap.
Exclude external urls from the sitemap.
Python
bsd-3-clause
philippeowagner/django-terms,BertrandBordage/django-terms,philippeowagner/django-terms,BertrandBordage/django-terms
--- +++ @@ -1,4 +1,5 @@ from django.contrib.sitemaps import Sitemap +from django.db.models import Q from .models import Term @@ -7,4 +8,4 @@ priority = 0.1 def items(self): - return Term.objects.all() + return Term.objects.filter(Q(url__startswith='/') | Q(url=''))
ddf2075228a8c250cf75ec85914801262cb73177
zerver/migrations/0032_verify_all_medium_avatar_images.py
zerver/migrations/0032_verify_all_medium_avatar_images.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor from django.db.migrations.state import StateApps from zerver.lib.upload import upload_backend def verify_medium_avatar_image(apps, schema_editor): # type: (StateApps, DatabaseSchemaEditor) -> None user_profile_model = apps.get_model('zerver', 'UserProfile') for user_profile in user_profile_model.objects.filter(avatar_source=u"U"): upload_backend.ensure_medium_avatar_image(user_profile) class Migration(migrations.Migration): dependencies = [ ('zerver', '0031_remove_system_avatar_source'), ] operations = [ migrations.RunPython(verify_medium_avatar_image) ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.conf import settings from django.db import migrations from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor from django.db.migrations.state import StateApps from mock import patch from zerver.lib.utils import make_safe_digest from zerver.lib.upload import upload_backend from zerver.models import UserProfile from typing import Text import hashlib # We hackishly patch this function in order to revert it to the state # it had when this migration was first written. This is a balance # between copying in a historical version of hundreds of lines of code # from zerver.lib.upload (which would pretty annoying, but would be a # pain) and just using the current version, which doesn't work # since we rearranged the avatars in Zulip 1.6. def patched_user_avatar_path(user_profile): # type: (UserProfile) -> Text email = user_profile.email user_key = email.lower() + settings.AVATAR_SALT return make_safe_digest(user_key, hashlib.sha1) @patch('zerver.lib.upload.user_avatar_path', patched_user_avatar_path) def verify_medium_avatar_image(apps, schema_editor): # type: (StateApps, DatabaseSchemaEditor) -> None user_profile_model = apps.get_model('zerver', 'UserProfile') for user_profile in user_profile_model.objects.filter(avatar_source=u"U"): upload_backend.ensure_medium_avatar_image(user_profile) class Migration(migrations.Migration): dependencies = [ ('zerver', '0031_remove_system_avatar_source'), ] operations = [ migrations.RunPython(verify_medium_avatar_image) ]
Make migration 0032 use an old version of user_avatar_path.
Make migration 0032 use an old version of user_avatar_path. This fixes upgrading from very old Zulip servers (e.g. 1.4.3) all the way to current. Fixes: #6516.
Python
apache-2.0
hackerkid/zulip,kou/zulip,amanharitsh123/zulip,brockwhittaker/zulip,showell/zulip,hackerkid/zulip,rishig/zulip,verma-varsha/zulip,synicalsyntax/zulip,zulip/zulip,amanharitsh123/zulip,showell/zulip,punchagan/zulip,amanharitsh123/zulip,punchagan/zulip,timabbott/zulip,rht/zulip,tommyip/zulip,eeshangarg/zulip,Galexrt/zulip,eeshangarg/zulip,rishig/zulip,rishig/zulip,Galexrt/zulip,dhcrzf/zulip,rht/zulip,shubhamdhama/zulip,rht/zulip,tommyip/zulip,mahim97/zulip,kou/zulip,kou/zulip,timabbott/zulip,zulip/zulip,brainwane/zulip,verma-varsha/zulip,verma-varsha/zulip,punchagan/zulip,brainwane/zulip,zulip/zulip,eeshangarg/zulip,timabbott/zulip,synicalsyntax/zulip,brockwhittaker/zulip,brockwhittaker/zulip,brainwane/zulip,showell/zulip,brainwane/zulip,brainwane/zulip,rishig/zulip,tommyip/zulip,Galexrt/zulip,tommyip/zulip,synicalsyntax/zulip,zulip/zulip,jackrzhang/zulip,eeshangarg/zulip,andersk/zulip,kou/zulip,punchagan/zulip,rht/zulip,kou/zulip,andersk/zulip,kou/zulip,timabbott/zulip,jackrzhang/zulip,synicalsyntax/zulip,eeshangarg/zulip,brockwhittaker/zulip,tommyip/zulip,brainwane/zulip,dhcrzf/zulip,dhcrzf/zulip,rishig/zulip,showell/zulip,showell/zulip,andersk/zulip,hackerkid/zulip,brockwhittaker/zulip,andersk/zulip,showell/zulip,timabbott/zulip,dhcrzf/zulip,mahim97/zulip,jackrzhang/zulip,andersk/zulip,hackerkid/zulip,jackrzhang/zulip,jackrzhang/zulip,amanharitsh123/zulip,shubhamdhama/zulip,amanharitsh123/zulip,mahim97/zulip,kou/zulip,tommyip/zulip,dhcrzf/zulip,shubhamdhama/zulip,rht/zulip,rht/zulip,eeshangarg/zulip,shubhamdhama/zulip,jackrzhang/zulip,shubhamdhama/zulip,zulip/zulip,punchagan/zulip,rishig/zulip,shubhamdhama/zulip,hackerkid/zulip,shubhamdhama/zulip,dhcrzf/zulip,timabbott/zulip,rishig/zulip,punchagan/zulip,zulip/zulip,verma-varsha/zulip,showell/zulip,Galexrt/zulip,mahim97/zulip,Galexrt/zulip,verma-varsha/zulip,punchagan/zulip,hackerkid/zulip,jackrzhang/zulip,Galexrt/zulip,brainwane/zulip,hackerkid/zulip,synicalsyntax/zulip,eeshangarg/zulip,zulip/zulip,dhcrzf/zulip,andersk/zulip,brockwhittaker/zulip,mahim97/zulip,amanharitsh123/zulip,timabbott/zulip,synicalsyntax/zulip,rht/zulip,tommyip/zulip,synicalsyntax/zulip,verma-varsha/zulip,mahim97/zulip,Galexrt/zulip,andersk/zulip
--- +++ @@ -1,13 +1,31 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals +from django.conf import settings from django.db import migrations from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor from django.db.migrations.state import StateApps +from mock import patch +from zerver.lib.utils import make_safe_digest from zerver.lib.upload import upload_backend +from zerver.models import UserProfile +from typing import Text +import hashlib +# We hackishly patch this function in order to revert it to the state +# it had when this migration was first written. This is a balance +# between copying in a historical version of hundreds of lines of code +# from zerver.lib.upload (which would pretty annoying, but would be a +# pain) and just using the current version, which doesn't work +# since we rearranged the avatars in Zulip 1.6. +def patched_user_avatar_path(user_profile): + # type: (UserProfile) -> Text + email = user_profile.email + user_key = email.lower() + settings.AVATAR_SALT + return make_safe_digest(user_key, hashlib.sha1) +@patch('zerver.lib.upload.user_avatar_path', patched_user_avatar_path) def verify_medium_avatar_image(apps, schema_editor): # type: (StateApps, DatabaseSchemaEditor) -> None user_profile_model = apps.get_model('zerver', 'UserProfile')
674dfb000cca79998674cd0b490ae6f3f992b313
blazarclient/tests/__init__.py
blazarclient/tests/__init__.py
# Copyright (c) 2014 Mirantis. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from oslotest import base from oslotest import mockpatch class TestCase(base.BaseTestCase): """Test case base class for all unit tests.""" def patch(self, obj, attr): """Returns a Mocked object on the patched attribute.""" mockfixture = self.useFixture(mockpatch.PatchObject(obj, attr)) return mockfixture.mock
# Copyright (c) 2014 Mirantis. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import fixtures from oslotest import base class TestCase(base.BaseTestCase): """Test case base class for all unit tests.""" def patch(self, obj, attr): """Returns a Mocked object on the patched attribute.""" mockfixture = self.useFixture(fixtures.MockPatchObject(obj, attr)) return mockfixture.mock
Use fixtures instead of deprecated mockpatch module
Use fixtures instead of deprecated mockpatch module The mockpatch module of oslotest is deprecated since version 1.13 and may be removed in version 2.0. Use fixtures.Mock* classes instead. Change-Id: I0ea834d41664efe84aa28ef2362467e2ad8b1928
Python
apache-2.0
openstack/python-blazarclient,ChameleonCloud/python-blazarclient,stackforge/python-blazarclient
--- +++ @@ -14,9 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. - +import fixtures from oslotest import base -from oslotest import mockpatch class TestCase(base.BaseTestCase): @@ -24,5 +23,5 @@ def patch(self, obj, attr): """Returns a Mocked object on the patched attribute.""" - mockfixture = self.useFixture(mockpatch.PatchObject(obj, attr)) + mockfixture = self.useFixture(fixtures.MockPatchObject(obj, attr)) return mockfixture.mock
191d73fb6d30b691da8d9c55bfd36f055aea19d5
backend/pokehelper.py
backend/pokehelper.py
import json import os class Pokehelper(object): def __init__(self): basepath = os.path.dirname(__file__) filepath = os.path.abspath(os.path.join(basepath, 'data/pokemon.json' )) with open(filepath) as pokejson: self.pokelist = json.load(pokejson) ### ### LIST STARTS AT 0, EVERY PokeNumber needs a -1 ### def get_pokefamily(self, pokemon_number): return self.pokelist[pokemon_number-1]['family'] def get_evolution_name(self, pokemon_number): # NOT a safe method to use, just for testing purposes return self.pokelist[pokemon_number]['name'] def get_pokename(self, poke_number): return self.pokelist[poke_number-1]['name'] def get_base_attack(self, poke_number): return self.pokelist[poke_number-1]['stats']['attack'] def get_base_defense(self, poke_number): return self.pokelist[poke_number-1]['stats']['defense']
import json import os emptymon = {'moves1': [], 'family': 1, 'name': 'not-in-database', 'moves2': [], 'type2': 'nil', 'id': -1, 'candy': -1, 'type1': 'nil', 'stats': {'stamina': -1, 'attack': -1, 'defense': -1}} class Pokehelper(object): def __init__(self): basepath = os.path.dirname(__file__) filepath = os.path.abspath(os.path.join(basepath, 'data/pokemon.json')) with open(filepath) as pokejson: self.pokelist = json.load(pokejson) ### # LIST STARTS AT 0, EVERY PokeNumber needs a -1 ### def get_pokefamily(self, poke_number): if (poke_number > 151): return emptymon['family'] return self.pokelist[poke_number-1]['family'] def get_evolution_name(self, poke_number): # NOT a safe method to use, just for testing purposes if (poke_number > 151): return emptymon['name'] return self.pokelist[poke_number]['name'] def get_pokename(self, poke_number): if (poke_number > 151): return emptymon['name'] return self.pokelist[poke_number-1]['name'] def get_base_attack(self, poke_number): if (poke_number > 151): return emptymon['stats']['attack'] return self.pokelist[poke_number-1]['stats']['attack'] def get_base_defense(self, poke_number): if (poke_number > 151): return emptymon['stats']['defense'] return self.pokelist[poke_number-1]['stats']['defense']
Add fallback if pokemon_id > 151
Add fallback if pokemon_id > 151
Python
mit
Phaetec/pogo-cruncher,Phaetec/pogo-cruncher,Phaetec/pogo-cruncher
--- +++ @@ -1,31 +1,44 @@ import json import os + +emptymon = {'moves1': [], 'family': 1, 'name': 'not-in-database', 'moves2': [], 'type2': 'nil', 'id': -1, 'candy': -1, 'type1': 'nil', 'stats': {'stamina': -1, 'attack': -1, 'defense': -1}} + class Pokehelper(object): def __init__(self): basepath = os.path.dirname(__file__) - filepath = os.path.abspath(os.path.join(basepath, 'data/pokemon.json' )) + filepath = os.path.abspath(os.path.join(basepath, 'data/pokemon.json')) with open(filepath) as pokejson: self.pokelist = json.load(pokejson) -### -### LIST STARTS AT 0, EVERY PokeNumber needs a -1 -### + ### + # LIST STARTS AT 0, EVERY PokeNumber needs a -1 + ### - def get_pokefamily(self, pokemon_number): - return self.pokelist[pokemon_number-1]['family'] + def get_pokefamily(self, poke_number): + if (poke_number > 151): + return emptymon['family'] + return self.pokelist[poke_number-1]['family'] - def get_evolution_name(self, pokemon_number): + def get_evolution_name(self, poke_number): # NOT a safe method to use, just for testing purposes - return self.pokelist[pokemon_number]['name'] + if (poke_number > 151): + return emptymon['name'] + return self.pokelist[poke_number]['name'] def get_pokename(self, poke_number): + if (poke_number > 151): + return emptymon['name'] return self.pokelist[poke_number-1]['name'] def get_base_attack(self, poke_number): + if (poke_number > 151): + return emptymon['stats']['attack'] return self.pokelist[poke_number-1]['stats']['attack'] def get_base_defense(self, poke_number): + if (poke_number > 151): + return emptymon['stats']['defense'] return self.pokelist[poke_number-1]['stats']['defense']
4b3e2289dbf20c0e2a7e0f83c7bd5963f2aa311f
longshot.py
longshot.py
HOME_URL = 'https://github.com/ftobia/longshot/blob/master/longshot.py' def upgrade(): backup_self() download_and_overwrite() restart() def backup_self(): import shutil new_name = __file__ + '.bak' shutil.copy(__file__, new_name) def download_and_overwrite(): import urllib2 response = urllib2.urlopen(HOME_URL) with open(__file__, 'w') as f: f.write(response.read()) def restart(): import os os.execlp('python', __name__) if __name__ == '__main__': backup_self() download_and_overwrite()
HOME_URL = 'https://raw.githubusercontent.com/ftobia/longshot/develop/longshot.py' def upgrade(): backup_self() download_and_overwrite() restart() def backup_self(): import shutil new_name = __file__ + '.bak' shutil.copy(__file__, new_name) def download_and_overwrite(): import urllib2 response = urllib2.urlopen(HOME_URL) with open(__file__, 'w') as f: f.write(response.read()) def restart(): import os os.execlp('python', __name__) if __name__ == '__main__': backup_self() download_and_overwrite()
Use the right download URL.
Use the right download URL.
Python
bsd-3-clause
ftobia/longshot
--- +++ @@ -1,6 +1,6 @@ -HOME_URL = 'https://github.com/ftobia/longshot/blob/master/longshot.py' +HOME_URL = 'https://raw.githubusercontent.com/ftobia/longshot/develop/longshot.py' def upgrade():
7574528d839dc627ea53032b547e0e1c23a51f6b
rdioexport/_client/__init__.py
rdioexport/_client/__init__.py
import json from ._base import get_base_rdio_client class _RdioExportClient(object): def __init__(self, base_client): self.base_client = base_client def get_current_user_key(self): return self.base_client.call('currentUser')['key'] def get_collection_by_album(self, batch_size=100): current_user_key = self.get_current_user_key() start = 0 result = [] while True: batch = self.base_client.call( 'getAlbumsInCollection', user=current_user_key, sort='dateAdded', start=start, count=batch_size, extras=json.dumps([ {'field': '*', 'exclude': True}, {'field': 'key'}, {'field': 'artist'}, {'field': 'trackKeys'}, ]), ) for album in batch: yield album if (len(batch) < batch_size): break else: start += batch_size def get_album_data(self, album_key): return self.base_client.call( 'get', keys=album_key, extras=json.dumps([ {'field': '*'}, { 'field': 'track', 'extras': [ {'field': '*'}, ], }, ]), ) def get_rdio_client(): base_client = get_base_rdio_client() return _RdioExportClient(base_client)
import json from ._base import get_base_rdio_client class _RdioExportClient(object): def __init__(self, base_client): self.base_client = base_client def get_current_user_key(self): return self.base_client.call('currentUser')['key'] def get_collection_by_album(self, batch_size=100): current_user_key = self.get_current_user_key() start = 0 result = [] while True: batch = self.base_client.call( 'getAlbumsInCollection', user=current_user_key, sort='dateAdded', start=start, count=batch_size, extras=json.dumps([ {'field': '*', 'exclude': True}, {'field': 'key'}, {'field': 'trackKeys'}, ]), ) for album in batch: yield album if (len(batch) < batch_size): break else: start += batch_size def get_album_data(self, album_key): return self.base_client.call( 'get', keys=album_key, extras=json.dumps([ {'field': '*'}, { 'field': 'track', 'extras': [ {'field': '*'}, ], }, ]), ) def get_rdio_client(): base_client = get_base_rdio_client() return _RdioExportClient(base_client)
Remove unused field from request.
Remove unused field from request.
Python
isc
alexhanson/rdio-export
--- +++ @@ -25,7 +25,6 @@ extras=json.dumps([ {'field': '*', 'exclude': True}, {'field': 'key'}, - {'field': 'artist'}, {'field': 'trackKeys'}, ]), )
e5d78dcfca7afffda7126e4e71944f40cdd9c272
tests/__init__.py
tests/__init__.py
# # For the license of this file, please consult the LICENSE file in the # root directory of this distribution. # # All tests in the test suite. __all__ = ( "bitfield_tests", "zscii_tests" )
# # For the license of this file, please consult the LICENSE file in the # root directory of this distribution. # # All tests in the test suite. __all__ = ( "bitfield_tests", "zscii_tests", "lexer_tests", "glk_tests" )
Make run_tests run all tests if no arguments are provided.
Make run_tests run all tests if no arguments are provided.
Python
bsd-3-clause
sussman/zvm,sussman/zvm
--- +++ @@ -4,4 +4,4 @@ # # All tests in the test suite. -__all__ = ( "bitfield_tests", "zscii_tests" ) +__all__ = ( "bitfield_tests", "zscii_tests", "lexer_tests", "glk_tests" )
65cd819b73c4a28b67a30b46b264b330d9967582
flicks/users/forms.py
flicks/users/forms.py
from django import forms from tower import ugettext_lazy as _lazy from flicks.base.util import country_choices from flicks.users.models import UserProfile class UserProfileForm(forms.ModelForm): # L10n: Used in a choice field where users can choose between receiving # L10n: HTML-based or Text-only newsletter emails. NEWSLETTER_FORMATS = (('html', 'HTML'), ('text', _lazy('Text'))) privacy_policy_agree = forms.BooleanField(required=True) mailing_list_signup = forms.BooleanField(required=False) mailing_list_format = forms.ChoiceField(required=False, choices=NEWSLETTER_FORMATS, initial='html') class Meta: model = UserProfile fields = ('full_name', 'nickname', 'country', 'address1', 'address2', 'city', 'mailing_country', 'state', 'postal_code') widgets = { 'full_name': forms.TextInput(attrs={'required': 'required'}), 'privacy_policy_agree': forms.CheckboxInput( attrs={'required': 'required'}), } def __init__(self, *args, **kwargs): super(UserProfileForm, self).__init__(*args, **kwargs) # Localize countries list self.fields['country'].choices = country_choices(allow_empty=False) self.fields['mailing_country'].choices = country_choices()
from django import forms from tower import ugettext_lazy as _lazy from flicks.base.util import country_choices from flicks.users.models import UserProfile class UserProfileForm(forms.ModelForm): # L10n: Used in a choice field where users can choose between receiving # L10n: HTML-based or Text-only newsletter emails. NEWSLETTER_FORMATS = (('html', 'HTML'), ('text', _lazy('Text'))) privacy_policy_agree = forms.BooleanField( required=True, widget=forms.CheckboxInput(attrs={'required': 'required'})) mailing_list_signup = forms.BooleanField(required=False) mailing_list_format = forms.ChoiceField(required=False, choices=NEWSLETTER_FORMATS, initial='html') class Meta: model = UserProfile fields = ('full_name', 'nickname', 'country', 'address1', 'address2', 'city', 'mailing_country', 'state', 'postal_code') widgets = { 'full_name': forms.TextInput(attrs={'required': 'required'}), } def __init__(self, *args, **kwargs): super(UserProfileForm, self).__init__(*args, **kwargs) # Localize countries list self.fields['country'].choices = country_choices(allow_empty=False) self.fields['mailing_country'].choices = country_choices()
Make privacy checkbox on user form required via required attribute.
Make privacy checkbox on user form required via required attribute.
Python
bsd-3-clause
mozilla/firefox-flicks,mozilla/firefox-flicks,mozilla/firefox-flicks,mozilla/firefox-flicks
--- +++ @@ -11,7 +11,10 @@ # L10n: HTML-based or Text-only newsletter emails. NEWSLETTER_FORMATS = (('html', 'HTML'), ('text', _lazy('Text'))) - privacy_policy_agree = forms.BooleanField(required=True) + privacy_policy_agree = forms.BooleanField( + required=True, + widget=forms.CheckboxInput(attrs={'required': 'required'})) + mailing_list_signup = forms.BooleanField(required=False) mailing_list_format = forms.ChoiceField(required=False, choices=NEWSLETTER_FORMATS, @@ -23,8 +26,6 @@ 'city', 'mailing_country', 'state', 'postal_code') widgets = { 'full_name': forms.TextInput(attrs={'required': 'required'}), - 'privacy_policy_agree': forms.CheckboxInput( - attrs={'required': 'required'}), } def __init__(self, *args, **kwargs):
1e6ccfe615ee5d3e873e341a3d38553c3b07a3a0
athumb/validators.py
athumb/validators.py
from django.conf import settings from django.core.validators import ValidationError # A list of allowable thumbnail file extensions. ALLOWABLE_THUMBNAIL_EXTENSIONS = getattr( settings, 'ALLOWABLE_THUMBNAIL_EXTENSIONS', ['png', 'jpg', 'jpeg', 'gif']) class ImageUploadExtensionValidator(object): """ Perform some basic image uploading extension validation. """ compare = lambda self, a, b: a is not b clean = lambda self, x: x def __call__(self, value): filename = value.name filename_split = filename.split('.') extension = filename_split[-1] # Decided to require file extensions. if len(filename_split) < 2: raise ValidationError( "Your file lacks an extension such as .jpg or .png. " "Please re-name it on your computer and re-upload it.", code='no_extension' ) # Restrict allowable extensions. if extension.lower() not in ALLOWABLE_THUMBNAIL_EXTENSIONS: # Format for your viewing pleasure. allowable_str = ' '.join(ALLOWABLE_THUMBNAIL_EXTENSIONS) raise ValidationError( "Your file is not one of the allowable types: %s" % allowable_str, code='extension_not_allowed' )
from django.conf import settings from django.core.validators import ValidationError # A list of allowable thumbnail file extensions. ALLOWABLE_THUMBNAIL_EXTENSIONS = getattr( settings, 'ALLOWABLE_THUMBNAIL_EXTENSIONS', ['png', 'jpg', 'jpeg', 'gif']) class ImageUploadExtensionValidator(object): """ Perform some basic image uploading extension validation. """ compare = lambda self, a, b: a is not b clean = lambda self, x: x def deconstruct(self): path = "athumb.validators.ImageUploadExtensionValidator" args = [] kwargs = {} return (path, args, kwargs) def __call__(self, value): filename = value.name filename_split = filename.split('.') extension = filename_split[-1] # Decided to require file extensions. if len(filename_split) < 2: raise ValidationError( "Your file lacks an extension such as .jpg or .png. " "Please re-name it on your computer and re-upload it.", code='no_extension' ) # Restrict allowable extensions. if extension.lower() not in ALLOWABLE_THUMBNAIL_EXTENSIONS: # Format for your viewing pleasure. allowable_str = ' '.join(ALLOWABLE_THUMBNAIL_EXTENSIONS) raise ValidationError( "Your file is not one of the allowable types: %s" % allowable_str, code='extension_not_allowed' )
Make ImageUploadExtensionValidator work with django 1.7 migrations
Make ImageUploadExtensionValidator work with django 1.7 migrations
Python
bsd-3-clause
ligonier/django-athumb
--- +++ @@ -11,6 +11,14 @@ """ compare = lambda self, a, b: a is not b clean = lambda self, x: x + + + def deconstruct(self): + path = "athumb.validators.ImageUploadExtensionValidator" + args = [] + kwargs = {} + return (path, args, kwargs) + def __call__(self, value): filename = value.name
a5f3aa3d1098645f1b24539714bf22244141811c
src/pyprobe/sensors/system/sensor_uptime.py
src/pyprobe/sensors/system/sensor_uptime.py
# coding=utf-8 from datetime import datetime import psutil from pyprobe.sensors import * __author__ = 'Dirk Dittert' class UptimeSensor(BaseSensor): KIND = u'uptime' def define(self, configuration): result = SensorDescription(u"Laufzeit", self.KIND) result.description = u"Monitort die Laufzeit eines Geräts." return result def execute(self, sensorid, host, parameters, configuration): uptime = datetime.now() - datetime.fromtimestamp(psutil.get_boot_time()) result = SensorResult(sensorid) channel = SensorChannel(u"System-Laufzeit", ModeType.FLOAT, ValueType.TIME_SECONDS, uptime.total_seconds()) result.channel.append(channel) return result
# coding=utf-8 from datetime import datetime import psutil from pyprobe.sensors import * __author__ = 'Dirk Dittert' class UptimeSensor(BaseSensor): KIND = u'uptime' def define(self, configuration): result = SensorDescription(u"Laufzeit", self.KIND) result.description = u"Monitort die Laufzeit eines Geräts." return result def execute(self, sensorid, host, parameters, configuration): uptime = datetime.now() - datetime.fromtimestamp(psutil.boot_time()) result = SensorResult(sensorid) channel = SensorChannel(u"System-Laufzeit", ModeType.FLOAT, ValueType.TIME_SECONDS, uptime.total_seconds()) result.channel.append(channel) return result
Use proper method to determine system uptime.
Use proper method to determine system uptime. Fixes https://github.com/dittert/pyprobe/issues/9
Python
apache-2.0
dittert/pyprobe,dittert/pyprobe
--- +++ @@ -18,7 +18,7 @@ return result def execute(self, sensorid, host, parameters, configuration): - uptime = datetime.now() - datetime.fromtimestamp(psutil.get_boot_time()) + uptime = datetime.now() - datetime.fromtimestamp(psutil.boot_time()) result = SensorResult(sensorid) channel = SensorChannel(u"System-Laufzeit", ModeType.FLOAT, ValueType.TIME_SECONDS, uptime.total_seconds()) result.channel.append(channel)
bb575cfdf4a6781c878a12f80987fb3e62fe56d4
chandl/model/posts.py
chandl/model/posts.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals class Posts(list): """ Represents a list of posts in a thread. """ def __init__(self, *args): """ Initialise a new posts list. :param args: The list of posts. """ super(Posts, self).__init__(*args) def filter(self, predicate): """ Take a subset of this list of posts. :param predicate: The predicate to use to choose which posts make the cut. :return: The filtered posts. """ return Posts(filter(predicate, self)) def map(self, transformation): """ Applies a transformation function to each post, returning a list of this function's returned values. :param transformation: The transformation function. :return: The transformed list of posts. """ return map(transformation, self) def foreach(self, function): """ Call a function for each post. :param function: A function taking a post argument. Return values are ignored. """ for post in self: function(post)
# -*- coding: utf-8 -*- from __future__ import unicode_literals class Posts(list): """ Represents a list of posts in a thread. """ def __init__(self, *args): """ Initialise a new posts list. :param args: The list of posts. """ super(Posts, self).__init__(*args) def filter(self, predicate): """ Take a subset of this list of posts. :param predicate: The predicate to use to choose which posts make the cut. :return: The filtered posts. """ return Posts([post for post in self if predicate(post)]) def map(self, transform): """ Applies a transformation function to each post, returning a list of this function's returned values. :param transform: The transformation function. :return: The transformed list of posts. """ return [transform(post) for post in self] def foreach(self, function): """ Call a function for each post. :param function: A function taking a post argument. Return values are ignored. """ for post in self: function(post)
Make post filtering and mapping more pythonic
Make post filtering and mapping more pythonic
Python
mit
gebn/chandl,gebn/chandl
--- +++ @@ -25,18 +25,18 @@ :return: The filtered posts. """ - return Posts(filter(predicate, self)) + return Posts([post for post in self if predicate(post)]) - def map(self, transformation): + def map(self, transform): """ Applies a transformation function to each post, returning a list of this function's returned values. - :param transformation: The transformation function. + :param transform: The transformation function. :return: The transformed list of posts. """ - return map(transformation, self) + return [transform(post) for post in self] def foreach(self, function): """
1fa7fed7d63fc7819ae5378f9a9addf7439e9b92
messages.py
messages.py
class Dispatched(object): def __init__(self, w=None, cb=None): self.w = w self.cb = cb self.retval = None self.output = None class DispatchInquiry(object): def __init__(self, msg=None, payload=None, resp=None): self.msg = msg self.resp = resp self.payload = payload
class Dispatched(object): def __init__(self, w=None, _id=None): self.w = w self.id = _id if _id != None else id(self) class DispatchedState(object): def __init__(self, retval=None, output=None, exc=None, _id=None): self.retval = retval self.output = output self.exc = exc self.id = _id if _id != None else id(self) class DispatchInquiry(object): def __init__(self, msg=None, payload=None, tgt=None, resp=None, _id=None): self.id = _id if _id != None else id(self) self.msg = msg self.tgt = tgt self.resp = resp self.payload = payload
Add DispatchedState, add target, add id
Add DispatchedState, add target, add id
Python
mit
joushou/dispatch,joushou/dispatch
--- +++ @@ -1,12 +1,19 @@ class Dispatched(object): - def __init__(self, w=None, cb=None): + def __init__(self, w=None, _id=None): self.w = w - self.cb = cb - self.retval = None - self.output = None + self.id = _id if _id != None else id(self) + +class DispatchedState(object): + def __init__(self, retval=None, output=None, exc=None, _id=None): + self.retval = retval + self.output = output + self.exc = exc + self.id = _id if _id != None else id(self) class DispatchInquiry(object): - def __init__(self, msg=None, payload=None, resp=None): + def __init__(self, msg=None, payload=None, tgt=None, resp=None, _id=None): + self.id = _id if _id != None else id(self) self.msg = msg + self.tgt = tgt self.resp = resp self.payload = payload
074dcf9c822827c6609dc11c0047aa79dd8c1ad8
tests/test_cli.py
tests/test_cli.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """Tests for `pyutrack` package.""" import unittest from click.testing import CliRunner from pyutrack import cli class TestYoutrack_cli(unittest.TestCase): def test_improt(self): import pyutrack def test_command_line_interface(self): runner = CliRunner() result = runner.invoke(cli.cli) assert result.exit_code == 0 assert 'YouTrack' in result.output help_result = runner.invoke(cli.cli, ['--help']) assert help_result.exit_code == 0 assert 'Show this message and exit.' in help_result.output
#!/usr/bin/env python # -*- coding: utf-8 -*- """Tests for `pyutrack` package.""" import unittest from click.testing import CliRunner from pyutrack import cli from tests import PyutrackTest class TestYoutrack_cli(PyutrackTest): def test_improt(self): import pyutrack def test_command_line_interface(self): runner = CliRunner() result = runner.invoke(cli.cli) assert result.exit_code == 0 assert 'YouTrack' in result.output help_result = runner.invoke(cli.cli, ['--help']) assert help_result.exit_code == 0 assert 'Show this message and exit.' in help_result.output
Set cli tests to base test class
Set cli tests to base test class
Python
mit
alisaifee/pyutrack,alisaifee/pyutrack
--- +++ @@ -1,17 +1,15 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- - """Tests for `pyutrack` package.""" - import unittest from click.testing import CliRunner from pyutrack import cli +from tests import PyutrackTest -class TestYoutrack_cli(unittest.TestCase): - +class TestYoutrack_cli(PyutrackTest): def test_improt(self): import pyutrack
5bd56894f3f4bbb3fadc3430238c394660b249c4
core/bootstrap.py
core/bootstrap.py
from __future__ import print_function from tinydb import TinyDB from core.models import Movie TABLE_POPULAR = "popular" TABLE_NAME_TO_ID = "name_to_id_mapping" TABLE_MOVIES = "movies" class Application(object): def __init__(self, settings): database = TinyDB(settings["DATABASE"]) self.Movie = Movie(database, TABLE_MOVIES) self.settings = settings def setting(self, key): return self.settings[key] def debug(self, message): if self.settings.get("DEBUG", False): print(message) def output(self, message): print(message) def debug_or_dot(self, message): if self.settings.get("DEBUG", False): print(message) else: print(".", end="")
from __future__ import print_function from tinydb import TinyDB from core.models import Movie TABLE_POPULAR = "popular" TABLE_NAME_TO_ID = "name_to_id_mapping" TABLE_MOVIES = "movies" class Application(object): def __init__(self, settings): database = TinyDB(settings["DATABASE"], indent=4) self.Movie = Movie(database, TABLE_MOVIES) self.settings = settings def setting(self, key): return self.settings[key] def debug(self, message): if self.settings.get("DEBUG", False): print(message) def output(self, message): print(message) def debug_or_dot(self, message): if self.settings.get("DEBUG", False): print(message) else: print(".", end="")
Make the database JSON easier to read.
Make the database JSON easier to read.
Python
mit
EmilStenstrom/nephele
--- +++ @@ -8,7 +8,7 @@ class Application(object): def __init__(self, settings): - database = TinyDB(settings["DATABASE"]) + database = TinyDB(settings["DATABASE"], indent=4) self.Movie = Movie(database, TABLE_MOVIES) self.settings = settings
c9c0aace029dd07a96ceed4f14303d5f0eadee13
blackjax/__init__.py
blackjax/__init__.py
from .mcmc import hmc, nuts, rmh from .mcmc_adaptation import window_adaptation __version__ = "0.3.0" __all__ = [ "hmc", "nuts", "rmh", "window_adaptation", "adaptive_tempered_smc", "tempered_smc", "inference", "adaptation", "diagnostics", ]
from .diagnostics import effective_sample_size as ess from .diagnostics import potential_scale_reduction as rhat from .mcmc import hmc, nuts, rmh from .mcmc_adaptation import window_adaptation __version__ = "0.3.0" __all__ = [ "hmc", "nuts", "rmh", "window_adaptation", "adaptive_tempered_smc", "tempered_smc", "ess", "rhat", ]
Add diagnostics to blackjax namespace
Add diagnostics to blackjax namespace
Python
apache-2.0
blackjax-devs/blackjax
--- +++ @@ -1,3 +1,5 @@ +from .diagnostics import effective_sample_size as ess +from .diagnostics import potential_scale_reduction as rhat from .mcmc import hmc, nuts, rmh from .mcmc_adaptation import window_adaptation @@ -10,7 +12,6 @@ "window_adaptation", "adaptive_tempered_smc", "tempered_smc", - "inference", - "adaptation", - "diagnostics", + "ess", + "rhat", ]
d3992b1677a5186b8b4072c9fdf50e4cb44dc5ef
base_accounts/models.py
base_accounts/models.py
from django.db import models from django.contrib.auth.models import AbstractUser from django.utils.translation import ugettext_lazy as _ from django.template.defaultfilters import slugify class BaseUser(AbstractUser): slug = models.SlugField(_('slug'), max_length=255) name = models.CharField(_('name'), max_length=255, blank=True) first_login = models.BooleanField(_('first login'), default=True) image = models.ImageField(_('image'), blank=True, null=True, upload_to="images/avatars/%Y/%m/%d", max_length=255) class Meta: abstract = True def save(self, *args, **kwargs): if not self.id: self.slug = slugify(self.username) if not self.name.strip(): self.name = "%s %s" % (self.first_name, self.last_name) super(BaseUser, self).save(*args, **kwargs) def get_display_name(self): return self.name or self.username
from django.db import models from django.contrib.auth.models import AbstractUser from django.utils.translation import ugettext_lazy as _ from django.template.defaultfilters import slugify class BaseUser(AbstractUser): slug = models.SlugField(_('slug'), max_length=255) name = models.CharField(_('name'), max_length=255, blank=True) first_login = models.BooleanField(_('first login'), default=True) image = models.ImageField(_('image'), blank=True, null=True, upload_to="images/avatars/%Y/%m/%d", max_length=255) class Meta: abstract = True def save(self, *args, **kwargs): # Create slug from username. Altough field is not unique at database # level, it will be as long as username stays unique as well. if not self.id: self.slug = slugify(self.username) # Assign username as name if empty if not self.name.strip(): if not self.first_name: self.first_name = self.username name = "%s %s" % (self.first_name, self.last_name) self.name = name.strip() super(BaseUser, self).save(*args, **kwargs) def get_display_name(self): return self.name or self.username
Fix name field for empty values
Fix name field for empty values
Python
bsd-3-clause
Nomadblue/django-nomad-base-accounts,Nomadblue/django-nomad-base-accounts
--- +++ @@ -14,10 +14,19 @@ abstract = True def save(self, *args, **kwargs): + + # Create slug from username. Altough field is not unique at database + # level, it will be as long as username stays unique as well. if not self.id: self.slug = slugify(self.username) + + # Assign username as name if empty if not self.name.strip(): - self.name = "%s %s" % (self.first_name, self.last_name) + if not self.first_name: + self.first_name = self.username + name = "%s %s" % (self.first_name, self.last_name) + self.name = name.strip() + super(BaseUser, self).save(*args, **kwargs) def get_display_name(self):
e1bfa7170d4cf6a78cd0f2ca9c3d5302e04323f5
utensils/forms.py
utensils/forms.py
# encoding: utf-8 from django import forms class SearchForm(forms.Form): search = forms.CharField( label='', required=False, widget=forms.widgets.TextInput())
# encoding: utf-8 from django import forms from django.utils.functional import curry class SearchForm(forms.Form): search = forms.CharField( label='', required=False, widget=forms.widgets.TextInput()) class UniqueModelFieldsMixin(object): """ Mixin that enforces unique fields on ModelForm forms. Must be left of ModelForm when defining the form class (see https://code.djangoproject.com/ticket/13075). unique_fields = ['name', 'username'] unique_fields = ['name', {'field': 'username', case_insensitive=True}] """ unique_fields = [] def __init__(self, *args, **kwargs): super(UniqueModelFieldsMixin, self).__init__(*args, **kwargs) def _make_validator(field, case_insensitive): model = self.Meta.model value = self.cleaned_data.get(field) case = 'i' if case_insensitive else '' qs = model.objects.filter( **{field + '__{}exact'.format(case): value}) if self.instance.pk: qs = qs.exclude(pk=self.instance.pk) if qs.exists(): raise forms.ValidationError( "That {} is not available.".format(field)) return value for field in self.unique_fields: if isinstance(field, dict): case_insensitive = field.get('case_insensitive', False) field_name = field['field'] else: field_name = field case_insensitive = False func_name = "clean_{}".format(field_name) setattr(self, func_name, curry(_make_validator, field_name, case_insensitive))
Add unique model fields form mixin.
Add unique model fields form mixin.
Python
mit
code-kitchen/django-utensils,code-kitchen/django-utensils,code-kitchen/django-utensils
--- +++ @@ -1,8 +1,51 @@ # encoding: utf-8 from django import forms +from django.utils.functional import curry class SearchForm(forms.Form): search = forms.CharField( label='', required=False, widget=forms.widgets.TextInput()) + + +class UniqueModelFieldsMixin(object): + """ + Mixin that enforces unique fields on ModelForm forms. + + Must be left of ModelForm when defining the form class (see + https://code.djangoproject.com/ticket/13075). + + unique_fields = ['name', 'username'] + unique_fields = ['name', {'field': 'username', case_insensitive=True}] + """ + unique_fields = [] + + def __init__(self, *args, **kwargs): + super(UniqueModelFieldsMixin, self).__init__(*args, **kwargs) + + def _make_validator(field, case_insensitive): + model = self.Meta.model + value = self.cleaned_data.get(field) + + case = 'i' if case_insensitive else '' + qs = model.objects.filter( + **{field + '__{}exact'.format(case): value}) + if self.instance.pk: + qs = qs.exclude(pk=self.instance.pk) + + if qs.exists(): + raise forms.ValidationError( + "That {} is not available.".format(field)) + return value + + for field in self.unique_fields: + if isinstance(field, dict): + case_insensitive = field.get('case_insensitive', False) + field_name = field['field'] + else: + field_name = field + case_insensitive = False + func_name = "clean_{}".format(field_name) + setattr(self, func_name, + curry(_make_validator, field_name, case_insensitive))
3d84e8e871b1049102815136ef23e3e630461918
connman_dispatcher/utils.py
connman_dispatcher/utils.py
import os import subprocess import logbook logger = logbook.Logger('connman-dispatcher') def execute_scripts_in_dirs(paths, state): for path in sorted(paths): if os.path.exists(path) and os.path.isdir(path): execute_scripts_in_dir(path, state) def execute_scripts_in_dir(path, state): for script in sorted(os.listdir(path)): full_scirpt_path = os.path.join(path, script) if os.path.exists(full_scirpt_path): logger.info('executing: %s %s' % (full_scirpt_path, state)) subprocess.Popen([full_scirpt_path, state])
import os import subprocess import logbook logger = logbook.Logger('connman-dispatcher') def is_executable(path): return all([os.path.isfile(path), os.access(path, os.X_OK)]) def execute_scripts_in_dirs(paths, state): for path in sorted(paths): if os.path.exists(path) and os.path.isdir(path): execute_scripts_in_dir(path, state) def execute_scripts_in_dir(path, state): for script in sorted(os.listdir(path)): full_scirpt_path = os.path.join(path, script) if os.path.exists(full_scirpt_path): if is_executable(full_scirpt_path): logger.info('executing: %s %s' % (full_scirpt_path, state)) subprocess.Popen([full_scirpt_path, state]) else: logger.error('%s is not executable file' % full_scirpt_path)
Check if file is executable, before executing it
Check if file is executable, before executing it
Python
isc
a-sk/connman-dispatcher
--- +++ @@ -3,6 +3,9 @@ import logbook logger = logbook.Logger('connman-dispatcher') + +def is_executable(path): + return all([os.path.isfile(path), os.access(path, os.X_OK)]) def execute_scripts_in_dirs(paths, state): for path in sorted(paths): @@ -13,6 +16,9 @@ for script in sorted(os.listdir(path)): full_scirpt_path = os.path.join(path, script) if os.path.exists(full_scirpt_path): - logger.info('executing: %s %s' % (full_scirpt_path, state)) - subprocess.Popen([full_scirpt_path, state]) + if is_executable(full_scirpt_path): + logger.info('executing: %s %s' % (full_scirpt_path, state)) + subprocess.Popen([full_scirpt_path, state]) + else: + logger.error('%s is not executable file' % full_scirpt_path)
7a901a8edd850dc5e2e75c89362444768722592c
svs_interface.py
svs_interface.py
#!/usr/bin/env python import subprocess from Tkinter import * from tkFileDialog import * import os class GpgApp(object): def __init__(self, master): frame = Frame(master) frame.pack() self.text = Text() self.text.pack() menu = Menu(master) root.config(menu=menu) filemenu = Menu(menu, tearoff=0) menu.add_cascade(label="File", menu=filemenu) filemenu.add_command(label="Open", command=self.filename_open) filemenu.add_separator() filemenu.add_command(label="Exit", command=self.do_exit) def filename_open(self): fin = askopenfilenames() if fin: self.text.insert(END,fin) return fin def do_exit(self): root.destroy() root = Tk() root.title("a simple GnuPG interface") app = GpgApp(root) root.mainloop()
#!/usr/bin/env python import subprocess from Tkinter import * from tkFileDialog import * import os GPG = 'gpg2' SERVER_KEY = '' # replace with gpg key ID of server key class GpgApp(object): def __init__(self, master): frame = Frame(master) frame.pack() self.text = Text() self.text.pack() menu = Menu(master) root.config(menu=menu) filemenu = Menu(menu, tearoff=0) menu.add_cascade(label="File", menu=filemenu) filemenu.add_command(label="Open", command=self.filename_open) filemenu.add_separator() filemenu.add_command(label="Exit", command=self.do_exit) def filename_open(self): fin = askopenfilenames() if fin: self.text.insert(END,fin) return fin def encrypt_file(self, input_file, output_file, recipient): args = [GPG, '--output', output_file, '--recipient', recipient, '-sea', input_file] subprocess.call(args) def do_exit(self): root.destroy() root = Tk() root.title("a simple GnuPG interface") app = GpgApp(root) root.mainloop()
Add method to encrypt files
Add method to encrypt files
Python
agpl-3.0
jeann2013/securedrop,pwplus/securedrop,chadmiller/securedrop,jrosco/securedrop,GabeIsman/securedrop,GabeIsman/securedrop,jaseg/securedrop,chadmiller/securedrop,jrosco/securedrop,micahflee/securedrop,jaseg/securedrop,conorsch/securedrop,kelcecil/securedrop,chadmiller/securedrop,pwplus/securedrop,jaseg/securedrop,ehartsuyker/securedrop,ehartsuyker/securedrop,micahflee/securedrop,garrettr/securedrop,harlo/securedrop,pwplus/securedrop,micahflee/securedrop,harlo/securedrop,jaseg/securedrop,jrosco/securedrop,ageis/securedrop,jeann2013/securedrop,jaseg/securedrop,jeann2013/securedrop,GabeIsman/securedrop,heartsucker/securedrop,harlo/securedrop,kelcecil/securedrop,pwplus/securedrop,harlo/securedrop,jrosco/securedrop,kelcecil/securedrop,ageis/securedrop,ehartsuyker/securedrop,ageis/securedrop,conorsch/securedrop,jrosco/securedrop,micahflee/securedrop,jeann2013/securedrop,jeann2013/securedrop,chadmiller/securedrop,heartsucker/securedrop,garrettr/securedrop,kelcecil/securedrop,kelcecil/securedrop,harlo/securedrop,garrettr/securedrop,ehartsuyker/securedrop,heartsucker/securedrop,GabeIsman/securedrop,heartsucker/securedrop,GabeIsman/securedrop,ehartsuyker/securedrop,garrettr/securedrop,pwplus/securedrop,conorsch/securedrop,chadmiller/securedrop,heartsucker/securedrop,GabeIsman/securedrop,ehartsuyker/securedrop,jrosco/securedrop,conorsch/securedrop,chadmiller/securedrop,pwplus/securedrop,kelcecil/securedrop,conorsch/securedrop,harlo/securedrop,ageis/securedrop,jeann2013/securedrop,jaseg/securedrop
--- +++ @@ -4,6 +4,9 @@ from Tkinter import * from tkFileDialog import * import os + +GPG = 'gpg2' +SERVER_KEY = '' # replace with gpg key ID of server key class GpgApp(object): def __init__(self, master): @@ -24,6 +27,9 @@ if fin: self.text.insert(END,fin) return fin + def encrypt_file(self, input_file, output_file, recipient): + args = [GPG, '--output', output_file, '--recipient', recipient, '-sea', input_file] + subprocess.call(args) def do_exit(self): root.destroy()
f3cbe52e0d65e8d6647815b25c79a836db93fb41
gitcd/Cli/Command.py
gitcd/Cli/Command.py
import subprocess import string class Command(object): def execute(self, command: str): cliArgs = self.parseCliArgs(command) process = subprocess.Popen(cliArgs, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) output, err = process.communicate() if process.returncode != 0: return False return output.decode("utf-8").strip() def parseCliArgs(self, command: str): rawArgs = command.split(" ") parsedArgs = [] tmpString = False isSingle = False isDouble = False for arg in rawArgs: # handle strings in single quotes if arg.startswith("'") and isSingle == False and isDouble == False: isSingle = True tmpString = arg elif arg.endswith("'") and isSingle == True: arg = "%s %s" % (tmpString, arg) parsedArgs.append(arg) isSingle = False tmpString = False # handle strings in double quotes elif arg.startswith('"') and isDouble == False and isSingle == False: isDouble = True tmpString = arg elif arg.endswith('"') and isDouble == True: arg = "%s %s" % (tmpString, arg) parsedArgs.append(arg) isDouble = False tmpString = False # extend current string elif tmpString != False: tmpString = "%s %s" % (tmpString, arg) else: parsedArgs.append(arg) return parsedArgs
import subprocess import string from pprint import pprint class Command(object): def execute(self, command: str): cliArgs = self.parseCliArgs(command) pprint(cliArgs) process = subprocess.Popen(cliArgs, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) output, err = process.communicate() if process.returncode != 0: return False return output.decode("utf-8").strip() def parseCliArgs(self, command: str): rawArgs = command.split(" ") parsedArgs = [] tmpString = False isSingle = False isDouble = False for arg in rawArgs: # handle strings in single quotes if arg.startswith("'") and isSingle == False and isDouble == False: isSingle = True tmpString = arg elif arg.endswith("'") and isSingle == True: arg = "%s %s" % (tmpString, arg) parsedArgs.append(arg) isSingle = False tmpString = False # handle strings in double quotes elif arg.startswith('"') and isDouble == False and isSingle == False: isDouble = True tmpString = arg elif arg.endswith('"') and isDouble == True: arg = "%s %s" % (tmpString, arg) parsedArgs.append(arg) isDouble = False tmpString = False # extend current string elif tmpString != False: tmpString = "%s %s" % (tmpString, arg) else: parsedArgs.append(arg) return parsedArgs
Add some debug for debian box
Add some debug for debian box
Python
apache-2.0
claudio-walser/gitcd,claudio-walser/gitcd
--- +++ @@ -1,10 +1,14 @@ import subprocess import string + +from pprint import pprint class Command(object): def execute(self, command: str): cliArgs = self.parseCliArgs(command) + + pprint(cliArgs) process = subprocess.Popen(cliArgs, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) output, err = process.communicate()
c72b712cf84e63dd2d72fdc6d64c50a65b8a88a0
courant/core/search/urls.py
courant/core/search/urls.py
from django.conf.urls.defaults import * from courant.core.search.views import * from haystack.forms import ModelSearchForm from haystack.query import SearchQuerySet from haystack.views import SearchView urlpatterns = patterns('', url(r'', CourantSearchView(template='search/results_page.html', form_class=ModelSearchForm, searchqueryset=SearchQuerySet().all()), name="search"), )
from django.conf.urls.defaults import * from courant.core.search.views import * from haystack.forms import ModelSearchForm from haystack.query import SearchQuerySet from haystack.views import SearchView urlpatterns = patterns('', url(r'', SearchView(template='search/results_page.html', load_all=True, form_class=ModelSearchForm, searchqueryset=SearchQuerySet().all()), name="search"), )
Remove all Haystack customization of search view pending further investigations.
Remove all Haystack customization of search view pending further investigations.
Python
bsd-3-clause
maxcutler/Courant-News,maxcutler/Courant-News
--- +++ @@ -6,7 +6,8 @@ from haystack.views import SearchView urlpatterns = patterns('', - url(r'', CourantSearchView(template='search/results_page.html', - form_class=ModelSearchForm, - searchqueryset=SearchQuerySet().all()), name="search"), + url(r'', SearchView(template='search/results_page.html', + load_all=True, + form_class=ModelSearchForm, + searchqueryset=SearchQuerySet().all()), name="search"), )
1504710d748a86bbd4eed717b4bcc2f5d15ec1b7
SatNOGS/base/management/commands/initialize.py
SatNOGS/base/management/commands/initialize.py
from orbit import satellite from django.core.management.base import BaseCommand from base.tests import ObservationFactory, StationFactory from base.models import Satellite class Command(BaseCommand): help = 'Create initial fixtures' def handle(self, *args, **options): ObservationFactory.create_batch(200) StationFactory.create_batch(200) satellites = Satellite.objects.all() for obj in satellites: try: sat = satellite(obj.norad_cat_id) except: self.stdout.write(('Satellite {} with Identifier {} does ' 'not exist [deleted]').format(obj.name, obj.norad_cat_id)) obj.delete() continue obj.name = sat.name() tle = sat.tle() obj.tle0 = tle[0] obj.tle1 = tle[1] obj.tle2 = tle[2] obj.save() self.stdout.write(('Satellite {} with Identifier {} ' 'found [updated]').format(obj.norad_cat_id, obj.name))
from orbit import satellite from django.core.management.base import BaseCommand from base.tests import ObservationFactory, StationFactory from base.models import Satellite class Command(BaseCommand): help = 'Create initial fixtures' def handle(self, *args, **options): ObservationFactory.create_batch(20) StationFactory.create_batch(20) satellites = Satellite.objects.all() for obj in satellites: try: sat = satellite(obj.norad_cat_id) except: self.stdout.write(('Satellite {} with Identifier {} does ' 'not exist [deleted]').format(obj.name, obj.norad_cat_id)) obj.delete() continue obj.name = sat.name() tle = sat.tle() obj.tle0 = tle[0] obj.tle1 = tle[1] obj.tle2 = tle[2] obj.save() self.stdout.write(('Satellite {} with Identifier {} ' 'found [updated]').format(obj.norad_cat_id, obj.name))
Use more sane numbers for initial data
Use more sane numbers for initial data
Python
agpl-3.0
cshields/satnogs-network,cshields/satnogs-network,cshields/satnogs-network,cshields/satnogs-network
--- +++ @@ -10,8 +10,8 @@ help = 'Create initial fixtures' def handle(self, *args, **options): - ObservationFactory.create_batch(200) - StationFactory.create_batch(200) + ObservationFactory.create_batch(20) + StationFactory.create_batch(20) satellites = Satellite.objects.all()
d413345197abe9092979e324498c766f7410d34b
bazaar/goods/utils.py
bazaar/goods/utils.py
from __future__ import unicode_literals from .models import Product, PriceList def create_product_for_good(good, price, quantity=1): """ Creates a product for the specified `good` with `quantity`. `price` is set to the default price list. Returns the new product instance """ product = Product.objects.create(name=good.name, description=good.description) product.save() # Add good to product elements list product.elements.create(good=good, quantity=quantity) # Set product's base price on default price list default_price_list = PriceList.objects.get_default() product.prices.create(product=product, price_list=default_price_list, price=price) return product
from __future__ import unicode_literals from .models import Product, PriceList def create_product_for_good(good, price, quantity=1, name=None): """ Creates a product for the specified `good` with `quantity`. `price` is set to the default price list. Returns the new product instance """ product_name = name or good.name product = Product.objects.create(name=product_name, description=good.description) product.save() # Add good to product elements list product.elements.create(good=good, quantity=quantity) # Set product's base price on default price list default_price_list = PriceList.objects.get_default() product.prices.create(product=product, price_list=default_price_list, price=price) return product
Add name parameter to create_product_for_good which defaults to good.name
Add name parameter to create_product_for_good which defaults to good.name
Python
bsd-2-clause
evonove/django-bazaar,evonove/django-bazaar,meghabhoj/NEWBAZAAR,evonove/django-bazaar,meghabhoj/NEWBAZAAR,meghabhoj/NEWBAZAAR
--- +++ @@ -3,13 +3,14 @@ from .models import Product, PriceList -def create_product_for_good(good, price, quantity=1): +def create_product_for_good(good, price, quantity=1, name=None): """ Creates a product for the specified `good` with `quantity`. `price` is set to the default price list. Returns the new product instance """ + product_name = name or good.name - product = Product.objects.create(name=good.name, description=good.description) + product = Product.objects.create(name=product_name, description=good.description) product.save() # Add good to product elements list
f21da23d45c328acffaba69a6f2fbf2056ca326b
datapipe/denoising/__init__.py
datapipe/denoising/__init__.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright (c) 2016 Jérémie DECOCK (http://www.jdhp.org) # This script is provided under the terms and conditions of the MIT license: # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. __all__ = ['abstract_cleaning_algorithm', 'fft', 'null', 'null_ref', 'tailcut', 'tailcut_jd', 'wavelets_mrfilter', 'wavelets_mrtransform']
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright (c) 2016 Jérémie DECOCK (http://www.jdhp.org) # This script is provided under the terms and conditions of the MIT license: # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. __all__ = ['abstract_cleaning_algorithm', 'fft', 'null', 'null_ref', 'tailcut', 'tailcut_jd', 'wavelets_mrfilter', 'wavelets_mrtransform', 'inverse_transform_sampling']
Add a module to the __all__ list.
Add a module to the __all__ list.
Python
mit
jdhp-sap/sap-cta-data-pipeline,jdhp-sap/sap-cta-data-pipeline,jdhp-sap/data-pipeline-standalone-scripts,jdhp-sap/data-pipeline-standalone-scripts
--- +++ @@ -27,5 +27,6 @@ 'tailcut', 'tailcut_jd', 'wavelets_mrfilter', - 'wavelets_mrtransform'] + 'wavelets_mrtransform', + 'inverse_transform_sampling']
c6a56604562460b2a8a72b25b23ce2ff4958d184
test/test_api.py
test/test_api.py
import unittest import timevis class TestAPIs(unittest.TestCase): def setUp(self): self.app = timevis.app.test_client() def test_api(self): resp = self.app.get('/api/v2/experiment') self.assertIsNotNone(resp.data) if __name__ == '__main__': unittest.main()
import unittest import timevis import os.path import json # The folder holding the test data test_path = os.path.dirname(__file__) class TestExperiment(unittest.TestCase): def setUp(self): self.app = timevis.app.test_client() self.url = '/api/v2/experiment' # TODO create test db timevis.models.init_db() def test_post(self): with open(test_path + '/post_exp.json') as file: obj = json.load(file) resp = self.app.post(self.url, data=json.dumps(obj), content_type='application/json') self.assertIsNotNone(resp.data) if __name__ == '__main__': unittest.main()
Add test case: Post method for experiment
Add test case: Post method for experiment
Python
mit
gaoce/TimeVis,gaoce/TimeVis,gaoce/TimeVis
--- +++ @@ -1,13 +1,25 @@ import unittest import timevis +import os.path +import json -class TestAPIs(unittest.TestCase): +# The folder holding the test data +test_path = os.path.dirname(__file__) + + +class TestExperiment(unittest.TestCase): def setUp(self): self.app = timevis.app.test_client() + self.url = '/api/v2/experiment' + # TODO create test db + timevis.models.init_db() - def test_api(self): - resp = self.app.get('/api/v2/experiment') + def test_post(self): + with open(test_path + '/post_exp.json') as file: + obj = json.load(file) + resp = self.app.post(self.url, data=json.dumps(obj), + content_type='application/json') self.assertIsNotNone(resp.data)
c9b38972486b588790371ab41c961e68609e0b4b
fabfile.py
fabfile.py
from fabric.api import sudo, cd, env, run, local env.hosts = ['ibadaw@sableamd2.cs.mcgill.ca'] DEPLOY_DIR = '/var/www/mcbench/mcbench' def deploy(): with cd(DEPLOY_DIR): run('git pull origin master') restart() def restart(): sudo('service httpd restart') def test(): local('nosetests') def coverage(): nose_flags = [ '--with-coverage', '--cover-html', '--cover-package=app,manage,mcbench' ] local('nosetests ' + ' '.join(nose_flags))
from fabric.api import sudo, cd, env, run, local env.hosts = ['ibadaw@sableamd2.cs.mcgill.ca'] DEPLOY_DIR = '/var/www/mcbench/mcbench' def deploy(): with cd(DEPLOY_DIR): run('git pull origin master') restart() def restart(): sudo('service httpd restart') def test(): local('nosetests') def coverage(): nose_flags = [ '--with-coverage', '--cover-html', '--cover-package=app,manage,mcbench' ] local('nosetests ' + ' '.join(nose_flags)) def up(): local('python manage.py runserver')
Add command to bring up dev server.
Add command to bring up dev server.
Python
mit
isbadawi/mcbench,isbadawi/mcbench
--- +++ @@ -26,3 +26,7 @@ '--cover-package=app,manage,mcbench' ] local('nosetests ' + ' '.join(nose_flags)) + + +def up(): + local('python manage.py runserver')
0257d01e53a314b176f3a3b97259b46a271a08be
tests/test_tx.py
tests/test_tx.py
from __future__ import absolute_import, division, print_function import pytest pytest.importorskip("twisted") from twisted.internet.defer import Deferred, succeed, fail from prometheus_async import tx class TestTime(object): @pytest.inlineCallbacks def test_decorator(self, fo, patch_timer): """ time works with functions returning Deferreds. """ @tx.time(fo) def func(): return succeed(42) rv = func() # Twisted runs fires callbacks immediately. assert [1] == fo._observed assert 42 == (yield rv) assert [1] == fo._observed @pytest.inlineCallbacks def test_decorator_exc(self, fo, patch_timer): """ Does not swallow exceptions. """ v = ValueError("foo") @tx.time(fo) def func(): return fail(v) with pytest.raises(ValueError) as e: yield func() assert v is e.value @pytest.inlineCallbacks def test_deferred(self, fo, patch_timer): """ time works with Deferreds. """ d = tx.time(fo, Deferred()) assert [] == fo._observed d.callback(42) assert 42 == (yield d) assert [1] == fo._observed
from __future__ import absolute_import, division, print_function import pytest pytest.importorskip("twisted") from twisted.internet.defer import Deferred, succeed, fail from prometheus_async import tx class TestTime(object): @pytest.inlineCallbacks def test_decorator_sync(self, fo, patch_timer): """ time works with sync results functions. """ @tx.time(fo) def func(): return 42 assert 42 == (yield func()) assert [1] == fo._observed @pytest.inlineCallbacks def test_decorator(self, fo, patch_timer): """ time works with functions returning Deferreds. """ @tx.time(fo) def func(): return succeed(42) rv = func() # Twisted runs fires callbacks immediately. assert [1] == fo._observed assert 42 == (yield rv) assert [1] == fo._observed @pytest.inlineCallbacks def test_decorator_exc(self, fo, patch_timer): """ Does not swallow exceptions. """ v = ValueError("foo") @tx.time(fo) def func(): return fail(v) with pytest.raises(ValueError) as e: yield func() assert v is e.value @pytest.inlineCallbacks def test_deferred(self, fo, patch_timer): """ time works with Deferreds. """ d = tx.time(fo, Deferred()) assert [] == fo._observed d.callback(42) assert 42 == (yield d) assert [1] == fo._observed
Test sync return for Twisted too
Test sync return for Twisted too
Python
apache-2.0
hynek/prometheus_async
--- +++ @@ -10,6 +10,18 @@ class TestTime(object): + @pytest.inlineCallbacks + def test_decorator_sync(self, fo, patch_timer): + """ + time works with sync results functions. + """ + @tx.time(fo) + def func(): + return 42 + + assert 42 == (yield func()) + assert [1] == fo._observed + @pytest.inlineCallbacks def test_decorator(self, fo, patch_timer): """
7ef23761c64c1e1b1ac47c72a78d5109c36761d0
tests/testing.py
tests/testing.py
import os import os.path import subprocess class HelloWorld(object): BUILD = r"""#!/bin/sh set -e cd $1 cat > hello << EOF #!/bin/sh echo Hello world! EOF chmod +x hello """ EXPECTED_OUTPUT = "Hello world!\n" def write_package_source(package_dir, scripts): whack_dir = os.path.join(package_dir, "whack") os.makedirs(whack_dir) for name, contents in scripts.iteritems(): _write_script(os.path.join(whack_dir, name), contents) def _write_script(path, contents): _write_file(path, contents) _make_executable(path) def _make_executable(path): subprocess.check_call(["chmod", "u+x", path]) def _write_file(path, contents): open(path, "w").write(contents)
import os import os.path import subprocess from whack.files import write_file class HelloWorld(object): BUILD = r"""#!/bin/sh set -e cd $1 cat > hello << EOF #!/bin/sh echo Hello world! EOF chmod +x hello """ EXPECTED_OUTPUT = "Hello world!\n" def write_package_source(package_dir, scripts): whack_dir = os.path.join(package_dir, "whack") os.makedirs(whack_dir) for name, contents in scripts.iteritems(): _write_script(os.path.join(whack_dir, name), contents) def _write_script(path, contents): write_file(path, contents) _make_executable(path) def _make_executable(path): subprocess.check_call(["chmod", "u+x", path])
Remove duplicate definition of write_file
Remove duplicate definition of write_file
Python
bsd-2-clause
mwilliamson/whack
--- +++ @@ -1,6 +1,8 @@ import os import os.path import subprocess + +from whack.files import write_file class HelloWorld(object): @@ -25,12 +27,8 @@ _write_script(os.path.join(whack_dir, name), contents) def _write_script(path, contents): - _write_file(path, contents) + write_file(path, contents) _make_executable(path) def _make_executable(path): subprocess.check_call(["chmod", "u+x", path]) - -def _write_file(path, contents): - open(path, "w").write(contents) -
d8444cec60f38baa75b89892dda6163bf63917af
todo/__init__.py
todo/__init__.py
"""django todo""" __version__ = '1.5.dev' __author__ = 'Scot Hacker' __email__ = 'shacker@birdhouse.org' __url__ = 'https://github.com/shacker/django-todo' __license__ = 'BSD License'
"""django todo""" __version__ = '1.5' __author__ = 'Scot Hacker' __email__ = 'shacker@birdhouse.org' __url__ = 'https://github.com/shacker/django-todo' __license__ = 'BSD License'
Bump version number for release 1.5
Bump version number for release 1.5
Python
bsd-3-clause
jwiltshire/django-todo,shacker/django-todo,jwiltshire/django-todo,shacker/django-todo,jwiltshire/django-todo,shacker/django-todo
--- +++ @@ -1,5 +1,5 @@ """django todo""" -__version__ = '1.5.dev' +__version__ = '1.5' __author__ = 'Scot Hacker' __email__ = 'shacker@birdhouse.org'
f5728e24ba6dec2d2d7c2eff7888137e91469094
overlay/Data.py
overlay/Data.py
import time class Data: def __init__(self, secs_since_epoch, depth_chart, temperature_chart, frame_path): # general settings self.width = 1296 self.height = 972 self.padding = 5 self.frame_path = frame_path # date/time settings self.time = time.localtime(secs_since_epoch) self.frame_date = time.strftime("%B %d, %Y", self.time) self.frame_time = time.strftime("%I:%M:%S %p", self.time) self.font_size = 22 self.text_color = "rgb(255,255,255)" # charts self.depth_chart = depth_chart.to_svg() self.temperature_chart = temperature_chart.to_svg() @property def datetime_x(self): return self.width - self.padding @property def depth_background_y(self): return self.height - 3 * self.padding - self.depth_graph_height @property def depth_background_width(self): return self.depth_graph_width + 2 * self.padding @property def depth_background_height(self): return self.depth_graph_height + 2 * self.padding @property def depth_text_x(self): return self.depth_background_width * 0.5 @property def depth_text_y(self): return self.depth_background_height - self.padding
import time class Data: def __init__(self, secs_since_epoch, depth_chart, temperature_chart, frame_path): # general settings self.width = 1296 self.height = 972 self.padding = 5 self.frame_path = frame_path # date/time settings local_time = time.localtime(secs_since_epoch) self.frame_date = time.strftime("%B %d, %Y", local_time) self.frame_time = time.strftime("%I:%M:%S %p", local_time) self.font_size = 22 self.text_color = "rgb(255,255,255)" self.datetime_x = self.width - self.padding # charts self.depth_chart = depth_chart.to_svg() self.temperature_chart = temperature_chart.to_svg()
Remove unneeded properties from main data object
Remove unneeded properties from main data object
Python
mit
thelonious/g2x,gizmo-cda/g2x,gizmo-cda/g2x,gizmo-cda/g2x,gizmo-cda/g2x,thelonious/g2x
--- +++ @@ -10,36 +10,13 @@ self.frame_path = frame_path # date/time settings - self.time = time.localtime(secs_since_epoch) - self.frame_date = time.strftime("%B %d, %Y", self.time) - self.frame_time = time.strftime("%I:%M:%S %p", self.time) + local_time = time.localtime(secs_since_epoch) + self.frame_date = time.strftime("%B %d, %Y", local_time) + self.frame_time = time.strftime("%I:%M:%S %p", local_time) self.font_size = 22 self.text_color = "rgb(255,255,255)" + self.datetime_x = self.width - self.padding # charts self.depth_chart = depth_chart.to_svg() self.temperature_chart = temperature_chart.to_svg() - - @property - def datetime_x(self): - return self.width - self.padding - - @property - def depth_background_y(self): - return self.height - 3 * self.padding - self.depth_graph_height - - @property - def depth_background_width(self): - return self.depth_graph_width + 2 * self.padding - - @property - def depth_background_height(self): - return self.depth_graph_height + 2 * self.padding - - @property - def depth_text_x(self): - return self.depth_background_width * 0.5 - - @property - def depth_text_y(self): - return self.depth_background_height - self.padding
29f3bb4fc549f78771294f90f5168b20f9ea7b5e
sdi/corestick.py
sdi/corestick.py
def read(filename): """ Reads in a corestick file and returns a dictionary keyed by core_id. Layer interface depths are positive and are relative to the lake bottom. depths are returned in meters. Northing and Easting are typically in the coordinate system used in the rest of the lake survey. We ignore the display related color and width fields in the file. """ cores = {} with open(filename) as f: units = f.readline().strip('\r\n').lower() if units not in ['feet', 'meters', 'meter']: raise NotImplementedError('Only units of FEET and METERS/METER are supported ') conv_factor = 1.0 if units == 'feet': conv_factor = 0.3048 f.readline() for line in f.readlines(): fields = line.split() core_id = fields[2] data = {} data['easting'] = float(fields[0]) data['northing'] = float(fields[1]) data['layer_interface_depths'] = [ float(fields[i]) * conv_factor for i in range(5, len(fields), 4) ] cores[core_id] = data return cores
def read(filename): """ Reads in a corestick file and returns a dictionary keyed by core_id. Layer interface depths are positive and are relative to the lake bottom. depths are returned in meters. Northing and Easting are typically in the coordinate system used in the rest of the lake survey. We ignore the width fields in the file. """ cores = {} with open(filename) as f: units = f.readline().strip('\r\n').lower() if units not in ['feet', 'meters', 'meter']: raise NotImplementedError('Only units of FEET and METERS/METER are supported ') conv_factor = 1.0 if units == 'feet': conv_factor = 0.3048 f.readline() for line in f.readlines(): fields = line.split() core_id = fields[2] data = {} data['easting'] = float(fields[0]) data['northing'] = float(fields[1]) data['layer_interface_depths'] = [ float(fields[i]) * conv_factor for i in range(5, len(fields), 4) ] data['layer_colors'] = [i for i in range(6, len(fields), 4)] cores[core_id] = data return cores
Modify to read layer colors.
Modify to read layer colors.
Python
bsd-3-clause
twdb/sdi
--- +++ @@ -3,8 +3,8 @@ Reads in a corestick file and returns a dictionary keyed by core_id. Layer interface depths are positive and are relative to the lake bottom. depths are returned in meters. Northing and Easting are typically in the - coordinate system used in the rest of the lake survey. We ignore the display - related color and width fields in the file. + coordinate system used in the rest of the lake survey. We ignore the + width fields in the file. """ cores = {} @@ -30,6 +30,7 @@ float(fields[i]) * conv_factor for i in range(5, len(fields), 4) ] + data['layer_colors'] = [i for i in range(6, len(fields), 4)] cores[core_id] = data return cores
51aaf9c19f92db4b3ad5d7aa646bf6ef8d5e62a6
runtests.py
runtests.py
#!/usr/bin/env python import sys from optparse import OptionParser from django.conf import settings if not settings.configured: settings.configure( DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'django_celery_rpc', 'USER': '', 'PASSWORD': '', 'HOST': '', 'PORT': '', } }, INSTALLED_APPS=[ 'django.contrib.contenttypes', 'celery_rpc', 'celery_rpc.tests', ], ROOT_URLCONF='', DEBUG=False, ) from django_nose import NoseTestSuiteRunner def runtests(*test_args, **kwargs): if 'south' in settings.INSTALLED_APPS: from south.management.commands import patch_for_test_db_setup patch_for_test_db_setup() if not test_args: test_args = ['celery_rpc'] test_runner = NoseTestSuiteRunner(**kwargs) failures = test_runner.run_tests(test_args) sys.exit(failures) if __name__ == '__main__': parser = OptionParser() parser.add_option('--verbosity', dest='verbosity', action='store', default=1, type=int) parser.add_options(NoseTestSuiteRunner.options) (options, args) = parser.parse_args() runtests(*args, **options.__dict__)
#!/usr/bin/env python import sys from optparse import OptionParser from django.conf import settings if not settings.configured: settings.configure( DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', 'USER': '', 'PASSWORD': '', 'HOST': '', 'PORT': '', } }, INSTALLED_APPS=[ 'django.contrib.contenttypes', 'celery_rpc', 'celery_rpc.tests', ], ROOT_URLCONF='', DEBUG=True, CELERY_RPC_CONFIG = { 'CELERY_ALWAYS_EAGER': True }, ) from django_nose import NoseTestSuiteRunner def runtests(*test_args, **kwargs): if 'south' in settings.INSTALLED_APPS: from south.management.commands import patch_for_test_db_setup patch_for_test_db_setup() if not test_args: test_args = ['celery_rpc'] test_runner = NoseTestSuiteRunner(**kwargs) failures = test_runner.run_tests(test_args) sys.exit(failures) if __name__ == '__main__': parser = OptionParser() parser.add_option('--verbosity', dest='verbosity', action='store', default=1, type=int) parser.add_options(NoseTestSuiteRunner.options) (options, args) = parser.parse_args() runtests(*args, **options.__dict__)
Fix run tests with Celery
Fix run tests with Celery
Python
unlicense
bourivouh/django-celery-rpc,ttyS15/django-celery-rpc,tumb1er/django-celery-rpc
--- +++ @@ -9,7 +9,7 @@ DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': 'django_celery_rpc', + 'NAME': ':memory:', 'USER': '', 'PASSWORD': '', 'HOST': '', @@ -23,7 +23,11 @@ 'celery_rpc.tests', ], ROOT_URLCONF='', - DEBUG=False, + DEBUG=True, + + CELERY_RPC_CONFIG = { + 'CELERY_ALWAYS_EAGER': True + }, )
6290d72458a470947c5da651968cf20f8714c646
fancypages/contrib/oscar_fancypages/views.py
fancypages/contrib/oscar_fancypages/views.py
from oscar.core.loading import load_class from . import mixins ProductCategoryView = load_class('catalogue.views', 'ProductCategoryView') class FancyPageDetailView(mixins.OscarFancyPageMixin, ProductCategoryView): pass
from oscar.core.loading import get_class from . import mixins ProductCategoryView = get_class('catalogue.views', 'ProductCategoryView') class FancyPageDetailView(mixins.OscarFancyPageMixin, ProductCategoryView): pass
Fix class loader called in Oscar contrib package
Fix class loader called in Oscar contrib package
Python
bsd-3-clause
socradev/django-fancypages,socradev/django-fancypages,socradev/django-fancypages,tangentlabs/django-fancypages,tangentlabs/django-fancypages,tangentlabs/django-fancypages
--- +++ @@ -1,9 +1,9 @@ -from oscar.core.loading import load_class +from oscar.core.loading import get_class from . import mixins -ProductCategoryView = load_class('catalogue.views', 'ProductCategoryView') +ProductCategoryView = get_class('catalogue.views', 'ProductCategoryView') class FancyPageDetailView(mixins.OscarFancyPageMixin, ProductCategoryView):
9f5ed14f24aecdd46699e84e13e9fa1f90cbf793
script/lib/config.py
script/lib/config.py
#!/usr/bin/env python import platform import sys BASE_URL = 'http://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent' LIBCHROMIUMCONTENT_COMMIT = '17a0e24666d0198810752284690bc2d0d87094d7' ARCH = { 'cygwin': '32bit', 'darwin': '64bit', 'linux2': platform.architecture()[0], 'win32': '32bit', }[sys.platform] DIST_ARCH = { '32bit': 'ia32', '64bit': 'x64', }[ARCH] TARGET_PLATFORM = { 'cygwin': 'win32', 'darwin': 'darwin', 'linux2': 'linux', 'win32': 'win32', }[sys.platform] verbose_mode = False def enable_verbose_mode(): print 'Running in verbose mode' global verbose_mode verbose_mode = True def is_verbose_mode(): return verbose_mode
#!/usr/bin/env python import platform import sys BASE_URL = 'http://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent' LIBCHROMIUMCONTENT_COMMIT = '6300862b4b16bd171f00ae566b697098c29743f7' ARCH = { 'cygwin': '32bit', 'darwin': '64bit', 'linux2': platform.architecture()[0], 'win32': '32bit', }[sys.platform] DIST_ARCH = { '32bit': 'ia32', '64bit': 'x64', }[ARCH] TARGET_PLATFORM = { 'cygwin': 'win32', 'darwin': 'darwin', 'linux2': 'linux', 'win32': 'win32', }[sys.platform] verbose_mode = False def enable_verbose_mode(): print 'Running in verbose mode' global verbose_mode verbose_mode = True def is_verbose_mode(): return verbose_mode
Upgrade libchromiumcontent to fix linking error
mac: Upgrade libchromiumcontent to fix linking error
Python
mit
brave/muon,chriskdon/electron,Gerhut/electron,twolfson/electron,icattlecoder/electron,the-ress/electron,bruce/electron,soulteary/electron,nekuz0r/electron,tylergibson/electron,Zagorakiss/electron,tomashanacek/electron,chrisswk/electron,destan/electron,roadev/electron,simongregory/electron,gbn972/electron,jiaz/electron,leolujuyi/electron,seanchas116/electron,baiwyc119/electron,GoooIce/electron,arturts/electron,bruce/electron,rhencke/electron,nicholasess/electron,tincan24/electron,John-Lin/electron,kcrt/electron,seanchas116/electron,mirrh/electron,destan/electron,fffej/electron,bwiggs/electron,chriskdon/electron,jaanus/electron,voidbridge/electron,webmechanicx/electron,joaomoreno/atom-shell,Andrey-Pavlov/electron,wolfflow/electron,mirrh/electron,mattdesl/electron,pandoraui/electron,Neron-X5/electron,bbondy/electron,sircharleswatson/electron,tinydew4/electron,jacksondc/electron,IonicaBizauKitchen/electron,GoooIce/electron,seanchas116/electron,jaanus/electron,webmechanicx/electron,JesselJohn/electron,simonfork/electron,wolfflow/electron,nekuz0r/electron,fffej/electron,dahal/electron,simongregory/electron,fritx/electron,bitemyapp/electron,rreimann/electron,MaxGraey/electron,kazupon/electron,sshiting/electron,shiftkey/electron,coderhaoxin/electron,leftstick/electron,lzpfmh/electron,stevekinney/electron,jacksondc/electron,dongjoon-hyun/electron,felixrieseberg/electron,preco21/electron,nicholasess/electron,kikong/electron,synaptek/electron,jsutcodes/electron,arusakov/electron,trigrass2/electron,kokdemo/electron,setzer777/electron,christian-bromann/electron,voidbridge/electron,gstack/infinium-shell,timruffles/electron,faizalpribadi/electron,jaanus/electron,aichingm/electron,brave/electron,shockone/electron,egoist/electron,michaelchiche/electron,hokein/atom-shell,minggo/electron,noikiy/electron,bwiggs/electron,MaxWhere/electron,LadyNaggaga/electron,aliib/electron,synaptek/electron,joneit/electron,Jonekee/electron,rajatsingla28/electron,iftekeriba/electron,jsutcodes/electron,dahal/electron,stevemao/electron,benweissmann/electron,kokdemo/electron,stevekinney/electron,abhishekgahlot/electron,John-Lin/electron,Faiz7412/electron,fomojola/electron,hokein/atom-shell,astoilkov/electron,mrwizard82d1/electron,kostia/electron,matiasinsaurralde/electron,maxogden/atom-shell,Jacobichou/electron,neutrous/electron,d-salas/electron,joaomoreno/atom-shell,astoilkov/electron,Ivshti/electron,xiruibing/electron,mattotodd/electron,stevemao/electron,tomashanacek/electron,nicobot/electron,anko/electron,bwiggs/electron,setzer777/electron,RobertJGabriel/electron,smczk/electron,MaxWhere/electron,digideskio/electron,jlhbaseball15/electron,jaanus/electron,biblerule/UMCTelnetHub,beni55/electron,gerhardberger/electron,meowlab/electron,zhakui/electron,jcblw/electron,deed02392/electron,renaesop/electron,wolfflow/electron,leethomas/electron,adamjgray/electron,Floato/electron,leftstick/electron,eric-seekas/electron,rajatsingla28/electron,BionicClick/electron,leethomas/electron,kokdemo/electron,gstack/infinium-shell,MaxWhere/electron,coderhaoxin/electron,saronwei/electron,robinvandernoord/electron,xfstudio/electron,darwin/electron,aichingm/electron,vipulroxx/electron,jonatasfreitasv/electron,d-salas/electron,LadyNaggaga/electron,chriskdon/electron,pirafrank/electron,Gerhut/electron,fritx/electron,mubassirhayat/electron,edulan/electron,egoist/electron,minggo/electron,brave/electron,minggo/electron,jsutcodes/electron,yalexx/electron,kazupon/electron,jsutcodes/electron,bright-sparks/electron,mrwizard82d1/electron,greyhwndz/electron,IonicaBizauKitchen/electron,MaxWhere/electron,gstack/infinium-shell,vipulroxx/electron,tincan24/electron,leftstick/electron,howmuchcomputer/electron,BionicClick/electron,faizalpribadi/electron,voidbridge/electron,adcentury/electron,edulan/electron,renaesop/electron,synaptek/electron,noikiy/electron,stevekinney/electron,jannishuebl/electron,minggo/electron,eriser/electron,oiledCode/electron,leolujuyi/electron,nicobot/electron,eriser/electron,egoist/electron,Jonekee/electron,farmisen/electron,yalexx/electron,sshiting/electron,biblerule/UMCTelnetHub,aecca/electron,shiftkey/electron,leftstick/electron,mhkeller/electron,egoist/electron,RIAEvangelist/electron,mjaniszew/electron,aecca/electron,d-salas/electron,brave/electron,vaginessa/electron,icattlecoder/electron,bbondy/electron,destan/electron,xfstudio/electron,aliib/electron,bbondy/electron,joaomoreno/atom-shell,gamedevsam/electron,jjz/electron,gabrielPeart/electron,posix4e/electron,wan-qy/electron,lrlna/electron,robinvandernoord/electron,kostia/electron,DivyaKMenon/electron,robinvandernoord/electron,bpasero/electron,zhakui/electron,kazupon/electron,simongregory/electron,astoilkov/electron,tinydew4/electron,SufianHassan/electron,michaelchiche/electron,gbn972/electron,gerhardberger/electron,arturts/electron,digideskio/electron,cqqccqc/electron,posix4e/electron,MaxGraey/electron,Neron-X5/electron,joneit/electron,dkfiresky/electron,Ivshti/electron,thingsinjars/electron,yalexx/electron,Evercoder/electron,tonyganch/electron,wan-qy/electron,pandoraui/electron,thomsonreuters/electron,rhencke/electron,cos2004/electron,xfstudio/electron,fireball-x/atom-shell,jlord/electron,kostia/electron,fireball-x/atom-shell,shiftkey/electron,wan-qy/electron,arusakov/electron,medixdev/electron,kostia/electron,deepak1556/atom-shell,neutrous/electron,bobwol/electron,gerhardberger/electron,lrlna/electron,jjz/electron,synaptek/electron,systembugtj/electron,the-ress/electron,mirrh/electron,sky7sea/electron,bpasero/electron,mhkeller/electron,faizalpribadi/electron,wolfflow/electron,takashi/electron,natgolov/electron,kenmozi/electron,jacksondc/electron,digideskio/electron,brenca/electron,vHanda/electron,Jonekee/electron,jlord/electron,Andrey-Pavlov/electron,dongjoon-hyun/electron,beni55/electron,Rokt33r/electron,aaron-goshine/electron,icattlecoder/electron,felixrieseberg/electron,soulteary/electron,simonfork/electron,stevemao/electron,coderhaoxin/electron,adcentury/electron,rsvip/electron,mjaniszew/electron,oiledCode/electron,gamedevsam/electron,d-salas/electron,timruffles/electron,yan-foto/electron,natgolov/electron,egoist/electron,farmisen/electron,lzpfmh/electron,bitemyapp/electron,twolfson/electron,posix4e/electron,davazp/electron,mattotodd/electron,biblerule/UMCTelnetHub,matiasinsaurralde/electron,jannishuebl/electron,chriskdon/electron,anko/electron,SufianHassan/electron,davazp/electron,joneit/electron,SufianHassan/electron,natgolov/electron,ianscrivener/electron,webmechanicx/electron,nekuz0r/electron,christian-bromann/electron,miniak/electron,yalexx/electron,ianscrivener/electron,fireball-x/atom-shell,synaptek/electron,neutrous/electron,lrlna/electron,etiktin/electron,mubassirhayat/electron,sky7sea/electron,jtburke/electron,d-salas/electron,mhkeller/electron,kokdemo/electron,electron/electron,subblue/electron,destan/electron,leethomas/electron,eric-seekas/electron,deed02392/electron,yan-foto/electron,astoilkov/electron,Zagorakiss/electron,zhakui/electron,smczk/electron,beni55/electron,meowlab/electron,bbondy/electron,GoooIce/electron,hokein/atom-shell,bobwol/electron,arusakov/electron,aaron-goshine/electron,mattotodd/electron,shennushi/electron,bright-sparks/electron,MaxGraey/electron,shaundunne/electron,ervinb/electron,gstack/infinium-shell,Jacobichou/electron,aliib/electron,bitemyapp/electron,carsonmcdonald/electron,sky7sea/electron,JesselJohn/electron,sircharleswatson/electron,jonatasfreitasv/electron,fffej/electron,jonatasfreitasv/electron,mirrh/electron,MaxGraey/electron,ankitaggarwal011/electron,RobertJGabriel/electron,robinvandernoord/electron,Floato/electron,xfstudio/electron,rreimann/electron,thomsonreuters/electron,eriser/electron,coderhaoxin/electron,deed02392/electron,xiruibing/electron,aaron-goshine/electron,lzpfmh/electron,edulan/electron,eriser/electron,edulan/electron,mjaniszew/electron,baiwyc119/electron,rreimann/electron,simongregory/electron,fritx/electron,jlhbaseball15/electron,cqqccqc/electron,fffej/electron,zhakui/electron,gamedevsam/electron,evgenyzinoviev/electron,Zagorakiss/electron,deepak1556/atom-shell,aichingm/electron,gabrielPeart/electron,aaron-goshine/electron,natgolov/electron,gamedevsam/electron,brave/electron,arturts/electron,stevekinney/electron,aichingm/electron,sshiting/electron,zhakui/electron,pombredanne/electron,Rokt33r/electron,thompsonemerson/electron,jlhbaseball15/electron,leethomas/electron,maxogden/atom-shell,ankitaggarwal011/electron,neutrous/electron,nicholasess/electron,thompsonemerson/electron,darwin/electron,subblue/electron,jiaz/electron,trankmichael/electron,joaomoreno/atom-shell,icattlecoder/electron,ianscrivener/electron,bitemyapp/electron,adamjgray/electron,ankitaggarwal011/electron,etiktin/electron,brenca/electron,jiaz/electron,jlord/electron,seanchas116/electron,electron/electron,ankitaggarwal011/electron,jjz/electron,subblue/electron,jonatasfreitasv/electron,greyhwndz/electron,anko/electron,takashi/electron,voidbridge/electron,fffej/electron,bruce/electron,arusakov/electron,GoooIce/electron,adcentury/electron,egoist/electron,micalan/electron,JussMee15/electron,roadev/electron,aichingm/electron,michaelchiche/electron,bitemyapp/electron,darwin/electron,xfstudio/electron,SufianHassan/electron,Neron-X5/electron,Evercoder/electron,bruce/electron,trankmichael/electron,evgenyzinoviev/electron,chrisswk/electron,lzpfmh/electron,rreimann/electron,farmisen/electron,thomsonreuters/electron,jcblw/electron,Floato/electron,destan/electron,setzer777/electron,lrlna/electron,pandoraui/electron,tylergibson/electron,GoooIce/electron,etiktin/electron,Jonekee/electron,eric-seekas/electron,BionicClick/electron,jhen0409/electron,renaesop/electron,stevemao/electron,miniak/electron,Gerhut/electron,webmechanicx/electron,leftstick/electron,rajatsingla28/electron,pandoraui/electron,tylergibson/electron,Andrey-Pavlov/electron,nagyistoce/electron-atom-shell,vaginessa/electron,Jacobichou/electron,Ivshti/electron,brave/muon,shockone/electron,oiledCode/electron,LadyNaggaga/electron,edulan/electron,miniak/electron,simonfork/electron,leolujuyi/electron,ervinb/electron,posix4e/electron,deepak1556/atom-shell,beni55/electron,seanchas116/electron,setzer777/electron,baiwyc119/electron,kikong/electron,DivyaKMenon/electron,jonatasfreitasv/electron,JussMee15/electron,roadev/electron,leolujuyi/electron,destan/electron,bobwol/electron,saronwei/electron,brenca/electron,rsvip/electron,deed02392/electron,kokdemo/electron,mhkeller/electron,Jacobichou/electron,maxogden/atom-shell,kcrt/electron,preco21/electron,kenmozi/electron,fomojola/electron,jlhbaseball15/electron,gabriel/electron,IonicaBizauKitchen/electron,RobertJGabriel/electron,dkfiresky/electron,lzpfmh/electron,IonicaBizauKitchen/electron,yalexx/electron,michaelchiche/electron,Andrey-Pavlov/electron,trigrass2/electron,tomashanacek/electron,pombredanne/electron,nekuz0r/electron,DivyaKMenon/electron,fomojola/electron,simonfork/electron,xfstudio/electron,tinydew4/electron,tinydew4/electron,baiwyc119/electron,trigrass2/electron,wan-qy/electron,ervinb/electron,SufianHassan/electron,stevekinney/electron,rsvip/electron,lzpfmh/electron,rhencke/electron,kenmozi/electron,xiruibing/electron,micalan/electron,mrwizard82d1/electron,RobertJGabriel/electron,leolujuyi/electron,RIAEvangelist/electron,shaundunne/electron,fabien-d/electron,voidbridge/electron,bwiggs/electron,John-Lin/electron,jtburke/electron,simonfork/electron,cos2004/electron,evgenyzinoviev/electron,faizalpribadi/electron,gbn972/electron,mrwizard82d1/electron,electron/electron,shiftkey/electron,carsonmcdonald/electron,kikong/electron,brenca/electron,kcrt/electron,mhkeller/electron,gamedevsam/electron,thompsonemerson/electron,kazupon/electron,xiruibing/electron,jcblw/electron,jlord/electron,joneit/electron,beni55/electron,tincan24/electron,brave/electron,matiasinsaurralde/electron,gbn972/electron,tonyganch/electron,saronwei/electron,gabrielPeart/electron,etiktin/electron,shockone/electron,rajatsingla28/electron,icattlecoder/electron,shiftkey/electron,the-ress/electron,sircharleswatson/electron,shaundunne/electron,tonyganch/electron,nagyistoce/electron-atom-shell,arusakov/electron,chrisswk/electron,webmechanicx/electron,kazupon/electron,ervinb/electron,meowlab/electron,gerhardberger/electron,shennushi/electron,mjaniszew/electron,the-ress/electron,christian-bromann/electron,Rokt33r/electron,RobertJGabriel/electron,matiasinsaurralde/electron,pirafrank/electron,bruce/electron,RIAEvangelist/electron,adamjgray/electron,miniak/electron,cos2004/electron,jiaz/electron,JesselJohn/electron,brave/electron,maxogden/atom-shell,Faiz7412/electron,iftekeriba/electron,dahal/electron,stevemao/electron,adamjgray/electron,evgenyzinoviev/electron,leethomas/electron,nicobot/electron,zhakui/electron,kcrt/electron,yan-foto/electron,BionicClick/electron,shennushi/electron,RIAEvangelist/electron,abhishekgahlot/electron,tonyganch/electron,vHanda/electron,nicholasess/electron,BionicClick/electron,eriser/electron,maxogden/atom-shell,wan-qy/electron,carsonmcdonald/electron,jhen0409/electron,rsvip/electron,aichingm/electron,jannishuebl/electron,Jacobichou/electron,jtburke/electron,voidbridge/electron,kenmozi/electron,vaginessa/electron,oiledCode/electron,shockone/electron,LadyNaggaga/electron,Zagorakiss/electron,thomsonreuters/electron,nicobot/electron,cos2004/electron,MaxWhere/electron,noikiy/electron,christian-bromann/electron,mattotodd/electron,anko/electron,Gerhut/electron,felixrieseberg/electron,Floato/electron,saronwei/electron,thingsinjars/electron,trankmichael/electron,oiledCode/electron,timruffles/electron,mjaniszew/electron,rreimann/electron,gstack/infinium-shell,neutrous/electron,Floato/electron,benweissmann/electron,takashi/electron,micalan/electron,brave/muon,hokein/atom-shell,vaginessa/electron,pirafrank/electron,vHanda/electron,sircharleswatson/electron,jjz/electron,greyhwndz/electron,tincan24/electron,yalexx/electron,roadev/electron,gabriel/electron,Faiz7412/electron,takashi/electron,kcrt/electron,jsutcodes/electron,chrisswk/electron,greyhwndz/electron,tylergibson/electron,roadev/electron,chriskdon/electron,pirafrank/electron,jhen0409/electron,jtburke/electron,mirrh/electron,Neron-X5/electron,faizalpribadi/electron,bobwol/electron,shockone/electron,shaundunne/electron,Rokt33r/electron,shiftkey/electron,ervinb/electron,sky7sea/electron,setzer777/electron,John-Lin/electron,gabriel/electron,evgenyzinoviev/electron,icattlecoder/electron,bwiggs/electron,vaginessa/electron,cos2004/electron,medixdev/electron,twolfson/electron,hokein/atom-shell,deed02392/electron,posix4e/electron,aliib/electron,mrwizard82d1/electron,gerhardberger/electron,Evercoder/electron,dongjoon-hyun/electron,Jonekee/electron,pandoraui/electron,timruffles/electron,twolfson/electron,Ivshti/electron,benweissmann/electron,thingsinjars/electron,bbondy/electron,joaomoreno/atom-shell,cqqccqc/electron,bitemyapp/electron,abhishekgahlot/electron,BionicClick/electron,bruce/electron,jhen0409/electron,iftekeriba/electron,mubassirhayat/electron,miniak/electron,smczk/electron,gbn972/electron,vipulroxx/electron,davazp/electron,digideskio/electron,dkfiresky/electron,nagyistoce/electron-atom-shell,simongregory/electron,d-salas/electron,nicobot/electron,jhen0409/electron,Evercoder/electron,vHanda/electron,eric-seekas/electron,jsutcodes/electron,sky7sea/electron,thingsinjars/electron,howmuchcomputer/electron,iftekeriba/electron,bright-sparks/electron,Andrey-Pavlov/electron,posix4e/electron,electron/electron,bpasero/electron,twolfson/electron,fireball-x/atom-shell,kokdemo/electron,jiaz/electron,greyhwndz/electron,meowlab/electron,John-Lin/electron,chriskdon/electron,farmisen/electron,micalan/electron,nekuz0r/electron,fabien-d/electron,nicholasess/electron,rsvip/electron,LadyNaggaga/electron,fabien-d/electron,aecca/electron,cqqccqc/electron,tinydew4/electron,natgolov/electron,rajatsingla28/electron,fomojola/electron,brave/muon,bpasero/electron,kcrt/electron,smczk/electron,bobwol/electron,eric-seekas/electron,gabrielPeart/electron,sshiting/electron,Neron-X5/electron,twolfson/electron,renaesop/electron,ianscrivener/electron,adcentury/electron,kenmozi/electron,synaptek/electron,gamedevsam/electron,rhencke/electron,Evercoder/electron,jaanus/electron,aecca/electron,howmuchcomputer/electron,davazp/electron,iftekeriba/electron,electron/electron,aliib/electron,joneit/electron,trigrass2/electron,dkfiresky/electron,takashi/electron,biblerule/UMCTelnetHub,medixdev/electron,sky7sea/electron,fffej/electron,bobwol/electron,jlord/electron,benweissmann/electron,fabien-d/electron,thompsonemerson/electron,gabriel/electron,simonfork/electron,xiruibing/electron,jcblw/electron,mattotodd/electron,John-Lin/electron,fabien-d/electron,electron/electron,ianscrivener/electron,digideskio/electron,pombredanne/electron,stevemao/electron,davazp/electron,jhen0409/electron,edulan/electron,joaomoreno/atom-shell,mrwizard82d1/electron,carsonmcdonald/electron,Rokt33r/electron,bpasero/electron,aaron-goshine/electron,electron/electron,Jonekee/electron,benweissmann/electron,gabriel/electron,mhkeller/electron,bright-sparks/electron,JesselJohn/electron,rreimann/electron,kikong/electron,astoilkov/electron,christian-bromann/electron,soulteary/electron,coderhaoxin/electron,Ivshti/electron,mattotodd/electron,shennushi/electron,pombredanne/electron,eric-seekas/electron,DivyaKMenon/electron,ervinb/electron,nicholasess/electron,jjz/electron,JussMee15/electron,jiaz/electron,mubassirhayat/electron,thomsonreuters/electron,anko/electron,GoooIce/electron,davazp/electron,arturts/electron,leolujuyi/electron,etiktin/electron,DivyaKMenon/electron,stevekinney/electron,wan-qy/electron,jacksondc/electron,mattdesl/electron,Zagorakiss/electron,robinvandernoord/electron,rajatsingla28/electron,brave/muon,tincan24/electron,brave/muon,gabrielPeart/electron,jonatasfreitasv/electron,yan-foto/electron,thingsinjars/electron,mjaniszew/electron,setzer777/electron,simongregory/electron,kenmozi/electron,biblerule/UMCTelnetHub,pirafrank/electron,yan-foto/electron,felixrieseberg/electron,smczk/electron,medixdev/electron,bright-sparks/electron,kostia/electron,leftstick/electron,Neron-X5/electron,carsonmcdonald/electron,JussMee15/electron,ianscrivener/electron,oiledCode/electron,jcblw/electron,etiktin/electron,fireball-x/atom-shell,meowlab/electron,Floato/electron,RobertJGabriel/electron,gbn972/electron,nicobot/electron,noikiy/electron,jlhbaseball15/electron,miniak/electron,pirafrank/electron,tonyganch/electron,adamjgray/electron,preco21/electron,tomashanacek/electron,Rokt33r/electron,bwiggs/electron,gabrielPeart/electron,dongjoon-hyun/electron,iftekeriba/electron,xiruibing/electron,jtburke/electron,deepak1556/atom-shell,mattdesl/electron,evgenyzinoviev/electron,jannishuebl/electron,medixdev/electron,saronwei/electron,Andrey-Pavlov/electron,seanchas116/electron,joneit/electron,vipulroxx/electron,Evercoder/electron,tonyganch/electron,shennushi/electron,Zagorakiss/electron,yan-foto/electron,astoilkov/electron,MaxGraey/electron,systembugtj/electron,sshiting/electron,thompsonemerson/electron,thompsonemerson/electron,howmuchcomputer/electron,dongjoon-hyun/electron,aliib/electron,michaelchiche/electron,Gerhut/electron,carsonmcdonald/electron,ankitaggarwal011/electron,the-ress/electron,tinydew4/electron,bpasero/electron,vipulroxx/electron,shaundunne/electron,deed02392/electron,greyhwndz/electron,jjz/electron,jacksondc/electron,DivyaKMenon/electron,Jacobichou/electron,subblue/electron,webmechanicx/electron,brenca/electron,timruffles/electron,micalan/electron,robinvandernoord/electron,adcentury/electron,rhencke/electron,baiwyc119/electron,Faiz7412/electron,jlhbaseball15/electron,matiasinsaurralde/electron,faizalpribadi/electron,systembugtj/electron,nagyistoce/electron-atom-shell,vaginessa/electron,coderhaoxin/electron,jannishuebl/electron,soulteary/electron,lrlna/electron,roadev/electron,baiwyc119/electron,RIAEvangelist/electron,fritx/electron,preco21/electron,micalan/electron,beni55/electron,ankitaggarwal011/electron,dahal/electron,mattdesl/electron,SufianHassan/electron,trigrass2/electron,shennushi/electron,howmuchcomputer/electron,the-ress/electron,biblerule/UMCTelnetHub,kikong/electron,preco21/electron,howmuchcomputer/electron,soulteary/electron,JussMee15/electron,neutrous/electron,farmisen/electron,adamjgray/electron,saronwei/electron,smczk/electron,dongjoon-hyun/electron,lrlna/electron,IonicaBizauKitchen/electron,nagyistoce/electron-atom-shell,jcblw/electron,kazupon/electron,sshiting/electron,brenca/electron,Faiz7412/electron,medixdev/electron,thomsonreuters/electron,fomojola/electron,MaxWhere/electron,systembugtj/electron,chrisswk/electron,aecca/electron,dkfiresky/electron,sircharleswatson/electron,digideskio/electron,pandoraui/electron,mattdesl/electron,shaundunne/electron,fritx/electron,tomashanacek/electron,bright-sparks/electron,cos2004/electron,thingsinjars/electron,tylergibson/electron,Gerhut/electron,minggo/electron,abhishekgahlot/electron,dahal/electron,abhishekgahlot/electron,jacksondc/electron,arusakov/electron,RIAEvangelist/electron,wolfflow/electron,vHanda/electron,anko/electron,matiasinsaurralde/electron,natgolov/electron,cqqccqc/electron,IonicaBizauKitchen/electron,gerhardberger/electron,subblue/electron,felixrieseberg/electron,bpasero/electron,bbondy/electron,jaanus/electron,trankmichael/electron,christian-bromann/electron,dkfiresky/electron,soulteary/electron,pombredanne/electron,aaron-goshine/electron,mirrh/electron,eriser/electron,subblue/electron,vipulroxx/electron,michaelchiche/electron,tincan24/electron,deepak1556/atom-shell,noikiy/electron,gerhardberger/electron,noikiy/electron,preco21/electron,gabriel/electron,fritx/electron,vHanda/electron,cqqccqc/electron,the-ress/electron,pombredanne/electron,JesselJohn/electron,trankmichael/electron,rhencke/electron,arturts/electron,trigrass2/electron,takashi/electron,JesselJohn/electron,trankmichael/electron,JussMee15/electron,dahal/electron,renaesop/electron,nekuz0r/electron,mubassirhayat/electron,jannishuebl/electron,abhishekgahlot/electron,darwin/electron,felixrieseberg/electron,wolfflow/electron,darwin/electron,jtburke/electron,farmisen/electron,systembugtj/electron,LadyNaggaga/electron,fomojola/electron,shockone/electron,meowlab/electron,renaesop/electron,tylergibson/electron,leethomas/electron,aecca/electron,mattdesl/electron,sircharleswatson/electron,tomashanacek/electron,benweissmann/electron,arturts/electron,minggo/electron,kostia/electron,adcentury/electron,systembugtj/electron
--- +++ @@ -4,7 +4,7 @@ import sys BASE_URL = 'http://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent' -LIBCHROMIUMCONTENT_COMMIT = '17a0e24666d0198810752284690bc2d0d87094d7' +LIBCHROMIUMCONTENT_COMMIT = '6300862b4b16bd171f00ae566b697098c29743f7' ARCH = { 'cygwin': '32bit',
4146be648f04ed409eb82e43528bc700751ef03c
src/qtlayoutbuilder/builderror_test.py
src/qtlayoutbuilder/builderror_test.py
from unittest import TestCase class TestBuildError(TestCase): def test_push_message(self): self.fail() def test_format_as_single_string(self): self.faildoo()
from unittest import TestCase from builderror import BuildError class TestBuildError(TestCase): def test_that_multiple_pushed_messages_are_formatted_properly_when_asked_for(self): err = BuildError() err.push_message('message about error details') err.push_message('message about error context') formatted_message = err.format_as_single_string() self.assertEquals(formatted_message, 'message about error context\nmessage about error details')
Put in first proper tiny package class and unit test.
Put in first proper tiny package class and unit test.
Python
mit
peterhoward42/qt-layout-gen
--- +++ @@ -1,9 +1,13 @@ from unittest import TestCase +from builderror import BuildError class TestBuildError(TestCase): - def test_push_message(self): - self.fail() - def test_format_as_single_string(self): - self.faildoo() + def test_that_multiple_pushed_messages_are_formatted_properly_when_asked_for(self): + err = BuildError() + err.push_message('message about error details') + err.push_message('message about error context') + formatted_message = err.format_as_single_string() + self.assertEquals(formatted_message, 'message about error context\nmessage about error details') +
1713cf8553d7f21d1192ed58138ecf7875c4b181
icebergsdk/front_modules.py
icebergsdk/front_modules.py
# -*- coding: utf-8 -*- import logging from icebergsdk.mixins.request_mixin import IcebergRequestBase logger = logging.getLogger('icebergsdk.frontmodules') class FrontModules(IcebergRequestBase): cache_key = "icebergsdk:frontmodule:data" cache_expire = 60*60 # one hour def __init__(self, *args, **kwargs): super(FrontModules, self).__init__(*args, **kwargs) self.cache = kwargs.get('cache', None) self.lang = kwargs.get('lang', "en") def get_module_data(self, module_name): return self.modules_data['modules'][module_name] #### # Loader #### @property def modules_data(self): """ Helper to fetch Iceberg client side javascript templates """ if hasattr(self, "_modules_data"): return getattr(self, "_modules_data") if self.cache: data = self.cache.get("%s:%s" % (self.cache_key, self.lang), False) if data: setattr(self, '_modules_data', data) return data data = self.request(self.conf.ICEBERG_MODULES_URL) # Do to, add lang setattr(self, '_modules_data', data) if self.cache: self.cache.set("%s:%s" % (self.cache_key, self.lang), data, self.cache_expire) return data
# -*- coding: utf-8 -*- import logging from icebergsdk.mixins.request_mixin import IcebergRequestBase logger = logging.getLogger('icebergsdk.frontmodules') class FrontModules(IcebergRequestBase): cache_key = "icebergsdk:frontmodule:data" cache_expire = 60*60 # one hour def __init__(self, *args, **kwargs): super(FrontModules, self).__init__(*args, **kwargs) self.cache = kwargs.get('cache', None) self.lang = kwargs.get('lang', "en") self.debug = kwargs.get('debug', False) def get_module_data(self, module_name): return self.modules_data['modules'][module_name] #### # Loader #### @property def modules_data(self): """ Helper to fetch Iceberg client side javascript templates """ if hasattr(self, "_modules_data"): return getattr(self, "_modules_data") if self.cache: data = self.cache.get("%s:%s" % (self.cache_key, self.lang), False) if data: setattr(self, '_modules_data', data) return data data = self.request(self.conf.ICEBERG_MODULES_URL, args = { "lang": self.lang, "enviro": self.conf.ICEBERG_ENV, "debug": self.debug }) # Do to, add lang setattr(self, '_modules_data', data) if self.cache: self.cache.set("%s:%s" % (self.cache_key, self.lang), data, self.cache_expire) return data
Add lang, enviro in request
Add lang, enviro in request
Python
mit
izberg-marketplace/izberg-api-python,Iceberg-Marketplace/Iceberg-API-PYTHON
--- +++ @@ -13,6 +13,7 @@ super(FrontModules, self).__init__(*args, **kwargs) self.cache = kwargs.get('cache', None) self.lang = kwargs.get('lang', "en") + self.debug = kwargs.get('debug', False) def get_module_data(self, module_name): @@ -35,7 +36,11 @@ setattr(self, '_modules_data', data) return data - data = self.request(self.conf.ICEBERG_MODULES_URL) # Do to, add lang + data = self.request(self.conf.ICEBERG_MODULES_URL, args = { + "lang": self.lang, + "enviro": self.conf.ICEBERG_ENV, + "debug": self.debug + }) # Do to, add lang setattr(self, '_modules_data', data) if self.cache: self.cache.set("%s:%s" % (self.cache_key, self.lang), data, self.cache_expire)
adf747998641b1aeb75feada25470aa2a072bd37
examples/test-mh/policies/participant_3.py
examples/test-mh/policies/participant_3.py
{ "inbound": [ { "cookie": 1, "match": { "tcp_dst": 4321 }, "action": { "fwd": 0 } }, { "cookie": 2, "match": { "tcp_dst": 4322 }, "action": { "fwd": 1 } }, { "cookie": 3, "match": { "tcp_dst": 4323 }, "action": { "drop": 0 } } ] }
{ "inbound": [ { "cookie": 1, "match": { "tcp_dst": 4321 }, "action": { "fwd": 0 } }, { "cookie": 2, "match": { "tcp_dst": 4322 }, "action": { "fwd": 1 } }, { "cookie": 3, "match": { "eth_src": '08:00:27:89:3b:9f' }, "action": { "drop": 0 } } ] }
Add inbound drop policy for participant 3 based on eth_src of participant 1
Add inbound drop policy for participant 3 based on eth_src of participant 1
Python
apache-2.0
h2020-endeavour/endeavour,h2020-endeavour/endeavour
--- +++ @@ -26,7 +26,7 @@ "cookie": 3, "match": { - "tcp_dst": 4323 + "eth_src": '08:00:27:89:3b:9f' }, "action": {
626345a10ee3d0aa90b7791e88c9b09544bdfa88
daiquiri/files/views.py
daiquiri/files/views.py
import logging from django.contrib.auth.views import redirect_to_login from django.core.exceptions import PermissionDenied from django.http import Http404 from django.views.generic import View from .utils import file_exists, get_directory, render_with_layout, send_file logger = logging.getLogger(__name__) class FileView(View): def get(self, request, file_path): # append 'index.html' when the file_path is a directory if not file_path or file_path.endswith('/'): file_path += 'index.html' if not file_exists(file_path): logger.debug('%s not found', file_path) raise Http404 directory = get_directory(request.user, file_path) if directory is None: logger.debug('%s if forbidden', file_path) if request.user.is_authenticated: raise PermissionDenied else: return redirect_to_login(request.path_info) if directory.layout: return render_with_layout(request, file_path) else: return send_file(request, file_path)
import logging from django.contrib.auth.views import redirect_to_login from django.core.exceptions import PermissionDenied from django.http import Http404 from django.views.generic import View from .utils import file_exists, get_directory, render_with_layout, send_file logger = logging.getLogger(__name__) class FileView(View): def get(self, request, file_path): # append 'index.html' when the file_path is a directory if not file_path or file_path.endswith('/'): file_path += 'index.html' if not file_exists(file_path): logger.debug('%s not found', file_path) raise Http404 directory = get_directory(request.user, file_path) if directory is None: logger.debug('%s if forbidden', file_path) if request.user.is_authenticated: raise PermissionDenied else: return redirect_to_login(request.path_info) if file_path.endswith('.html') and directory.layout: return render_with_layout(request, file_path) else: return send_file(request, file_path)
Remove layout for non-html files
Remove layout for non-html files
Python
apache-2.0
aipescience/django-daiquiri,aipescience/django-daiquiri,aipescience/django-daiquiri
--- +++ @@ -29,7 +29,7 @@ else: return redirect_to_login(request.path_info) - if directory.layout: + if file_path.endswith('.html') and directory.layout: return render_with_layout(request, file_path) else: return send_file(request, file_path)
b71f076bbb745764d6ed5724d494e878a8fbd785
syncplay/__init__.py
syncplay/__init__.py
version = '1.6.5' revision = ' development' milestone = 'Yoitsu' release_number = '85' projectURL = 'https://syncplay.pl/'
version = '1.6.5' revision = ' release' milestone = 'Yoitsu' release_number = '86' projectURL = 'https://syncplay.pl/'
Mark build 86 and v1.6.5 release
Mark build 86 and v1.6.5 release
Python
apache-2.0
Syncplay/syncplay,Syncplay/syncplay,alby128/syncplay,alby128/syncplay
--- +++ @@ -1,5 +1,5 @@ version = '1.6.5' -revision = ' development' +revision = ' release' milestone = 'Yoitsu' -release_number = '85' +release_number = '86' projectURL = 'https://syncplay.pl/'
b5206de1d6a99cd4a22f1ef5681e7964a7289b2e
debug_toolbar_multilang/pseudo/expander_pseudo_language.py
debug_toolbar_multilang/pseudo/expander_pseudo_language.py
from django.utils import six from debug_toolbar_multilang.pseudo import STR_FORMAT_PATTERN, \ STR_FORMAT_NAMED_PATTERN from debug_toolbar_multilang.pseudo.pseudo_language import PseudoLanguage class ExpanderPseudoLanguage(PseudoLanguage): """ Pseudo Language for expanding the strings. This is useful for verifying that the message still fits on the screen. Remember that some words are much more longer in other languages than in English. For instance, German words that 30% more space in average. """ def make_pseudo(self, message): # message without %s or {} in it. # {test} or %(test)s is allowed, though. safeMessage = list(message) # find every matching string for match in reversed(list(STR_FORMAT_PATTERN.finditer(message))): # Check if string uses the "named format". # If not, the string will be replaced and saved # into safeMessage if not STR_FORMAT_NAMED_PATTERN.match(match.group()): start, end = match.span() safeMessage[start:end] = "???" # create complete message by using the original, appending # a space and finally converting the safeMessage to a string # again. return six.u("%s %s" % (message, "".join(safeMessage))) def language(self): return "pse-expander" @property def name(self): return "Pseudo-Expander Language"
from django.utils import six from debug_toolbar_multilang.pseudo import STR_FORMAT_PATTERN, \ STR_FORMAT_NAMED_PATTERN from debug_toolbar_multilang.pseudo.pseudo_language import PseudoLanguage class ExpanderPseudoLanguage(PseudoLanguage): """ Pseudo Language for expanding the strings. This is useful for verifying that the message still fits on the screen. Remember that some words are much more longer in other languages than in English. For instance, German words that 30% more space in average. """ def make_pseudo(self, message): # message without %s or {} in it. # {test} or %(test)s is allowed, though. safeMessage = list(message) # find every matching string for match in reversed(list(STR_FORMAT_PATTERN.finditer(message))): # Check if string uses the "named format". # If not, the string will be replaced and saved # into safeMessage if not STR_FORMAT_NAMED_PATTERN.match(match.group()): start, end = match.span() safeMessage[start:end] = "???" # create complete message by using the original, appending # a space and finally converting the safeMessage to a string # again. return "%s %s" % (message, "".join(safeMessage)) def language(self): return "pse-expander" @property def name(self): return "Pseudo-Expander Language"
Fix python 2 unicode issue.
Fix python 2 unicode issue.
Python
mit
Matt3o12/django-debug-toolbar-multilang,Matt3o12/django-debug-toolbar-multilang
--- +++ @@ -30,7 +30,7 @@ # create complete message by using the original, appending # a space and finally converting the safeMessage to a string # again. - return six.u("%s %s" % (message, "".join(safeMessage))) + return "%s %s" % (message, "".join(safeMessage)) def language(self): return "pse-expander"
c27010a3d5265d9eb783f627adca7cb0c25dcb9a
ctypeslib/test/stdio.py
ctypeslib/test/stdio.py
import os from ctypeslib.dynamic_module import include from ctypes import * import logging logging.basicConfig(level=logging.INFO) if os.name == "nt": _libc = CDLL("msvcrt") else: _libc = CDLL(None) include("""\ #include <stdio.h> #ifdef _MSC_VER # include <fcntl.h> #else # include <sys/fcntl.h> #endif """, persist=False)
import os from ctypeslib.dynamic_module import include from ctypes import * if os.name == "nt": _libc = CDLL("msvcrt") else: _libc = CDLL(None) include("""\ #include <stdio.h> #ifdef _MSC_VER # include <fcntl.h> #else # include <sys/fcntl.h> #endif """, persist=False)
Remove the logging setup call.
Remove the logging setup call.
Python
mit
sugarmanz/ctypeslib
--- +++ @@ -1,8 +1,6 @@ import os from ctypeslib.dynamic_module import include from ctypes import * -import logging -logging.basicConfig(level=logging.INFO) if os.name == "nt": _libc = CDLL("msvcrt")
5d0541f5b5b8cc18b2e3f86b237c01ed915d5c0a
dhcp2nest/util.py
dhcp2nest/util.py
""" Utility functions for dhcp2nest """ from queue import Queue from subprocess import Popen, PIPE from threading import Thread def follow_file(fn, max_lines=100): """ Return a Queue that is fed lines (up to max_lines) from the given file (fn) continuously The implementation given here was inspired by http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python """ fq = Queue(maxsize=max_lines) # Declare the helper routine def _follow_file_thread(fn, fq): # Use system tail with name-based following and retry p = Popen(["tail", "-F", fn], stdout=PIPE) # Loop forever on pulling data from tail line = True while line: line = p.stdout.readline() fq.put(line) # Spawn a thread to read data from tail Thread(target=_follow_file_thread, args=(fn, fq)).start() # Return the queue return fq
""" Utility functions for dhcp2nest """ from queue import Queue from subprocess import Popen, PIPE from threading import Thread def follow_file(fn, max_lines=100): """ Return a Queue that is fed lines (up to max_lines) from the given file (fn) continuously The implementation given here was inspired by http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python """ fq = Queue(maxsize=max_lines) # Declare the helper routine def _follow_file_thread(fn, fq): # Use system tail with name-based following and retry p = Popen(["tail", "-F", fn], stdout=PIPE) # Loop forever on pulling data from tail line = True while line: line = p.stdout.readline().decode('utf-8') fq.put(line) # Spawn a thread to read data from tail Thread(target=_follow_file_thread, args=(fn, fq)).start() # Return the queue return fq
Make sure that follow-file decodes utf-8 from its input
Make sure that follow-file decodes utf-8 from its input Signed-off-by: Jason Bernardino Alonso <f71c42a1353bbcdbe07e24c2a1c893f8ea1d05ee@hackorp.com>
Python
mit
jbalonso/dhcp2nest
--- +++ @@ -24,7 +24,7 @@ # Loop forever on pulling data from tail line = True while line: - line = p.stdout.readline() + line = p.stdout.readline().decode('utf-8') fq.put(line) # Spawn a thread to read data from tail
cb75a7ad69b273a57d2b10378712388f179abca3
pande_gas/features/tests/test_fingerprints.py
pande_gas/features/tests/test_fingerprints.py
""" Test topological fingerprints. """ import unittest from rdkit import Chem from pande_gas.features import fingerprints as fp class TestCircularFingerprint(unittest.TestCase): """ Tests for CircularFingerprint. """ def setUp(self): """ Set up tests. """ smiles = 'CC(=O)OC1=CC=CC=C1C(=O)O' self.mol = Chem.MolFromSmiles(smiles) self.engine = fp.CircularFingerprint() def test_circular_fingerprints(self): """ Test CircularFingerprint. """ rval = self.engine([self.mol]) assert rval.shape == (1, self.engine.size) def test_sparse_circular_fingerprints(self): """ Test CircularFingerprint with sparse encoding. """ self.engine = fp.CircularFingerprint(sparse=True) rval = self.engine([self.mol]) assert rval.shape == (1,) assert isinstance(rval[0], dict) assert len(rval[0])
""" Test topological fingerprints. """ import unittest from rdkit import Chem from pande_gas.features import fingerprints as fp class TestCircularFingerprint(unittest.TestCase): """ Tests for CircularFingerprint. """ def setUp(self): """ Set up tests. """ smiles = 'CC(=O)OC1=CC=CC=C1C(=O)O' self.mol = Chem.MolFromSmiles(smiles) self.engine = fp.CircularFingerprint() def test_circular_fingerprints(self): """ Test CircularFingerprint. """ rval = self.engine([self.mol]) assert rval.shape == (1, self.engine.size) def test_sparse_circular_fingerprints(self): """ Test CircularFingerprint with sparse encoding. """ self.engine = fp.CircularFingerprint(sparse=True) rval = self.engine([self.mol]) assert rval.shape == (1,) assert isinstance(rval[0], dict) assert len(rval[0]) def test_sparse_circular_fingerprints_with_smiles(self): """ Test CircularFingerprint with sparse encoding and SMILES for each fragment. """ self.engine = fp.CircularFingerprint(sparse=True, smiles=True) rval = self.engine([self.mol]) assert rval.shape == (1,) assert isinstance(rval[0], dict) assert len(rval[0]) # check for separate count and SMILES entries for each fragment for fragment_id, value in rval[0].items(): assert 'count' in value assert 'smiles' in value
Add test for fragment SMILES
Add test for fragment SMILES
Python
bsd-3-clause
rbharath/pande-gas,rbharath/pande-gas
--- +++ @@ -36,3 +36,19 @@ assert rval.shape == (1,) assert isinstance(rval[0], dict) assert len(rval[0]) + + def test_sparse_circular_fingerprints_with_smiles(self): + """ + Test CircularFingerprint with sparse encoding and SMILES for each + fragment. + """ + self.engine = fp.CircularFingerprint(sparse=True, smiles=True) + rval = self.engine([self.mol]) + assert rval.shape == (1,) + assert isinstance(rval[0], dict) + assert len(rval[0]) + + # check for separate count and SMILES entries for each fragment + for fragment_id, value in rval[0].items(): + assert 'count' in value + assert 'smiles' in value