commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
219eddef46d17486324240856005dc2be40083a4
newparp/tasks/__init__.py
newparp/tasks/__init__.py
import os import raven from celery import Celery, Task from classtools import reify from redis import StrictRedis from raven.contrib.celery import register_signal, register_logger_signal from newparp.model import sm from newparp.model.connections import redis_pool celery = Celery("newparp", include=[ "newparp.tasks.background", "newparp.tasks.matchmaker", "newparp.tasks.reaper", "newparp.tasks.roulette_matchmaker", "newparp.tasks.chat", ]) # Sentry exception logging if there is a sentry object. if "SENTRY_PRIVATE_DSN" in os.environ: sentry = raven.Client( dsn=os.environ["SENTRY_PRIVATE_DSN"], include_paths=["newparp"], ) register_logger_signal(sentry) register_signal(sentry) celery.config_from_object('newparp.tasks.config') class WorkerTask(Task): abstract = True @reify def db(self): return sm() @reify def redis(self): return StrictRedis(connection_pool=redis_pool) def after_return(self, *args, **kwargs): if hasattr(self, "db"): self.db.close() del self.db if hasattr(self, "redis"): del self.redis
import os import raven from celery import Celery, Task from classtools import reify from redis import StrictRedis from raven.contrib.celery import register_signal, register_logger_signal from newparp.model import sm from newparp.model.connections import redis_pool celery = Celery("newparp", include=[ "newparp.tasks.background", "newparp.tasks.matchmaker", "newparp.tasks.reaper", "newparp.tasks.roulette_matchmaker", "newparp.tasks.chat", "newparp.tasks.test", ]) # Sentry exception logging if there is a sentry object. if "SENTRY_PRIVATE_DSN" in os.environ: sentry = raven.Client( dsn=os.environ["SENTRY_PRIVATE_DSN"], include_paths=["newparp"], ) register_logger_signal(sentry) register_signal(sentry) celery.config_from_object('newparp.tasks.config') class WorkerTask(Task): abstract = True @reify def db(self): return sm() @reify def redis(self): return StrictRedis(connection_pool=redis_pool) def after_return(self, *args, **kwargs): if hasattr(self, "db"): self.db.close() del self.db if hasattr(self, "redis"): del self.redis
Add test tasks to the celery includes.
Add test tasks to the celery includes.
Python
agpl-3.0
MSPARP/newparp,MSPARP/newparp,MSPARP/newparp
import os import raven from celery import Celery, Task from classtools import reify from redis import StrictRedis from raven.contrib.celery import register_signal, register_logger_signal from newparp.model import sm from newparp.model.connections import redis_pool celery = Celery("newparp", include=[ "newparp.tasks.background", "newparp.tasks.matchmaker", "newparp.tasks.reaper", "newparp.tasks.roulette_matchmaker", "newparp.tasks.chat", ]) # Sentry exception logging if there is a sentry object. if "SENTRY_PRIVATE_DSN" in os.environ: sentry = raven.Client( dsn=os.environ["SENTRY_PRIVATE_DSN"], include_paths=["newparp"], ) register_logger_signal(sentry) register_signal(sentry) celery.config_from_object('newparp.tasks.config') class WorkerTask(Task): abstract = True @reify def db(self): return sm() @reify def redis(self): return StrictRedis(connection_pool=redis_pool) def after_return(self, *args, **kwargs): if hasattr(self, "db"): self.db.close() del self.db if hasattr(self, "redis"): del self.redis Add test tasks to the celery includes.
import os import raven from celery import Celery, Task from classtools import reify from redis import StrictRedis from raven.contrib.celery import register_signal, register_logger_signal from newparp.model import sm from newparp.model.connections import redis_pool celery = Celery("newparp", include=[ "newparp.tasks.background", "newparp.tasks.matchmaker", "newparp.tasks.reaper", "newparp.tasks.roulette_matchmaker", "newparp.tasks.chat", "newparp.tasks.test", ]) # Sentry exception logging if there is a sentry object. if "SENTRY_PRIVATE_DSN" in os.environ: sentry = raven.Client( dsn=os.environ["SENTRY_PRIVATE_DSN"], include_paths=["newparp"], ) register_logger_signal(sentry) register_signal(sentry) celery.config_from_object('newparp.tasks.config') class WorkerTask(Task): abstract = True @reify def db(self): return sm() @reify def redis(self): return StrictRedis(connection_pool=redis_pool) def after_return(self, *args, **kwargs): if hasattr(self, "db"): self.db.close() del self.db if hasattr(self, "redis"): del self.redis
<commit_before>import os import raven from celery import Celery, Task from classtools import reify from redis import StrictRedis from raven.contrib.celery import register_signal, register_logger_signal from newparp.model import sm from newparp.model.connections import redis_pool celery = Celery("newparp", include=[ "newparp.tasks.background", "newparp.tasks.matchmaker", "newparp.tasks.reaper", "newparp.tasks.roulette_matchmaker", "newparp.tasks.chat", ]) # Sentry exception logging if there is a sentry object. if "SENTRY_PRIVATE_DSN" in os.environ: sentry = raven.Client( dsn=os.environ["SENTRY_PRIVATE_DSN"], include_paths=["newparp"], ) register_logger_signal(sentry) register_signal(sentry) celery.config_from_object('newparp.tasks.config') class WorkerTask(Task): abstract = True @reify def db(self): return sm() @reify def redis(self): return StrictRedis(connection_pool=redis_pool) def after_return(self, *args, **kwargs): if hasattr(self, "db"): self.db.close() del self.db if hasattr(self, "redis"): del self.redis <commit_msg>Add test tasks to the celery includes.<commit_after>
import os import raven from celery import Celery, Task from classtools import reify from redis import StrictRedis from raven.contrib.celery import register_signal, register_logger_signal from newparp.model import sm from newparp.model.connections import redis_pool celery = Celery("newparp", include=[ "newparp.tasks.background", "newparp.tasks.matchmaker", "newparp.tasks.reaper", "newparp.tasks.roulette_matchmaker", "newparp.tasks.chat", "newparp.tasks.test", ]) # Sentry exception logging if there is a sentry object. if "SENTRY_PRIVATE_DSN" in os.environ: sentry = raven.Client( dsn=os.environ["SENTRY_PRIVATE_DSN"], include_paths=["newparp"], ) register_logger_signal(sentry) register_signal(sentry) celery.config_from_object('newparp.tasks.config') class WorkerTask(Task): abstract = True @reify def db(self): return sm() @reify def redis(self): return StrictRedis(connection_pool=redis_pool) def after_return(self, *args, **kwargs): if hasattr(self, "db"): self.db.close() del self.db if hasattr(self, "redis"): del self.redis
import os import raven from celery import Celery, Task from classtools import reify from redis import StrictRedis from raven.contrib.celery import register_signal, register_logger_signal from newparp.model import sm from newparp.model.connections import redis_pool celery = Celery("newparp", include=[ "newparp.tasks.background", "newparp.tasks.matchmaker", "newparp.tasks.reaper", "newparp.tasks.roulette_matchmaker", "newparp.tasks.chat", ]) # Sentry exception logging if there is a sentry object. if "SENTRY_PRIVATE_DSN" in os.environ: sentry = raven.Client( dsn=os.environ["SENTRY_PRIVATE_DSN"], include_paths=["newparp"], ) register_logger_signal(sentry) register_signal(sentry) celery.config_from_object('newparp.tasks.config') class WorkerTask(Task): abstract = True @reify def db(self): return sm() @reify def redis(self): return StrictRedis(connection_pool=redis_pool) def after_return(self, *args, **kwargs): if hasattr(self, "db"): self.db.close() del self.db if hasattr(self, "redis"): del self.redis Add test tasks to the celery includes.import os import raven from celery import Celery, Task from classtools import reify from redis import StrictRedis from raven.contrib.celery import register_signal, register_logger_signal from newparp.model import sm from newparp.model.connections import redis_pool celery = Celery("newparp", include=[ "newparp.tasks.background", "newparp.tasks.matchmaker", "newparp.tasks.reaper", "newparp.tasks.roulette_matchmaker", "newparp.tasks.chat", "newparp.tasks.test", ]) # Sentry exception logging if there is a sentry object. if "SENTRY_PRIVATE_DSN" in os.environ: sentry = raven.Client( dsn=os.environ["SENTRY_PRIVATE_DSN"], include_paths=["newparp"], ) register_logger_signal(sentry) register_signal(sentry) celery.config_from_object('newparp.tasks.config') class WorkerTask(Task): abstract = True @reify def db(self): return sm() @reify def redis(self): return StrictRedis(connection_pool=redis_pool) def after_return(self, *args, **kwargs): if hasattr(self, "db"): self.db.close() del self.db if hasattr(self, "redis"): del self.redis
<commit_before>import os import raven from celery import Celery, Task from classtools import reify from redis import StrictRedis from raven.contrib.celery import register_signal, register_logger_signal from newparp.model import sm from newparp.model.connections import redis_pool celery = Celery("newparp", include=[ "newparp.tasks.background", "newparp.tasks.matchmaker", "newparp.tasks.reaper", "newparp.tasks.roulette_matchmaker", "newparp.tasks.chat", ]) # Sentry exception logging if there is a sentry object. if "SENTRY_PRIVATE_DSN" in os.environ: sentry = raven.Client( dsn=os.environ["SENTRY_PRIVATE_DSN"], include_paths=["newparp"], ) register_logger_signal(sentry) register_signal(sentry) celery.config_from_object('newparp.tasks.config') class WorkerTask(Task): abstract = True @reify def db(self): return sm() @reify def redis(self): return StrictRedis(connection_pool=redis_pool) def after_return(self, *args, **kwargs): if hasattr(self, "db"): self.db.close() del self.db if hasattr(self, "redis"): del self.redis <commit_msg>Add test tasks to the celery includes.<commit_after>import os import raven from celery import Celery, Task from classtools import reify from redis import StrictRedis from raven.contrib.celery import register_signal, register_logger_signal from newparp.model import sm from newparp.model.connections import redis_pool celery = Celery("newparp", include=[ "newparp.tasks.background", "newparp.tasks.matchmaker", "newparp.tasks.reaper", "newparp.tasks.roulette_matchmaker", "newparp.tasks.chat", "newparp.tasks.test", ]) # Sentry exception logging if there is a sentry object. if "SENTRY_PRIVATE_DSN" in os.environ: sentry = raven.Client( dsn=os.environ["SENTRY_PRIVATE_DSN"], include_paths=["newparp"], ) register_logger_signal(sentry) register_signal(sentry) celery.config_from_object('newparp.tasks.config') class WorkerTask(Task): abstract = True @reify def db(self): return sm() @reify def redis(self): return StrictRedis(connection_pool=redis_pool) def after_return(self, *args, **kwargs): if hasattr(self, "db"): self.db.close() del self.db if hasattr(self, "redis"): del self.redis
455783a2ef4c47a5bc9933d48e7d44dcf3c41dc0
tests/integration/grains/test_core.py
tests/integration/grains/test_core.py
# -*- coding: utf-8 -*- ''' Test the core grains ''' # Import python libs from __future__ import absolute_import # Import Salt Testing libs import tests.integration as integration from tests.support.unit import skipIf # Import salt libs import salt.utils if salt.utils.is_windows(): try: import salt.modules.reg except: pass class TestGrainsCore(integration.ModuleCase): ''' Test the core grains grains ''' @skipIf(not salt.utils.is_windows(), 'Only run on Windows') def test_win_cpu_model(self): ''' test grains['cpu_model'] ''' opts = self.minion_opts cpu_model_text = salt.modules.reg.read_value( "HKEY_LOCAL_MACHINE", "HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0", "ProcessorNameString").get('vdata') self.assertEqual( self.run_function('grains.items')['cpu_model'], cpu_model_text )
# -*- coding: utf-8 -*- ''' Test the core grains ''' # Import python libs from __future__ import absolute_import # Import Salt Testing libs import tests.integration as integration from tests.support.unit import skipIf # Import salt libs import salt.utils if salt.utils.is_windows(): try: import salt.modules.reg except ImportError: pass class TestGrainsCore(integration.ModuleCase): ''' Test the core grains grains ''' @skipIf(not salt.utils.is_windows(), 'Only run on Windows') def test_win_cpu_model(self): ''' test grains['cpu_model'] ''' opts = self.minion_opts cpu_model_text = salt.modules.reg.read_value( "HKEY_LOCAL_MACHINE", "HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0", "ProcessorNameString").get('vdata') self.assertEqual( self.run_function('grains.items')['cpu_model'], cpu_model_text )
Add ImportError to exception instead of bare "except"
Add ImportError to exception instead of bare "except" Fixes lint error on develop.
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
# -*- coding: utf-8 -*- ''' Test the core grains ''' # Import python libs from __future__ import absolute_import # Import Salt Testing libs import tests.integration as integration from tests.support.unit import skipIf # Import salt libs import salt.utils if salt.utils.is_windows(): try: import salt.modules.reg except: pass class TestGrainsCore(integration.ModuleCase): ''' Test the core grains grains ''' @skipIf(not salt.utils.is_windows(), 'Only run on Windows') def test_win_cpu_model(self): ''' test grains['cpu_model'] ''' opts = self.minion_opts cpu_model_text = salt.modules.reg.read_value( "HKEY_LOCAL_MACHINE", "HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0", "ProcessorNameString").get('vdata') self.assertEqual( self.run_function('grains.items')['cpu_model'], cpu_model_text ) Add ImportError to exception instead of bare "except" Fixes lint error on develop.
# -*- coding: utf-8 -*- ''' Test the core grains ''' # Import python libs from __future__ import absolute_import # Import Salt Testing libs import tests.integration as integration from tests.support.unit import skipIf # Import salt libs import salt.utils if salt.utils.is_windows(): try: import salt.modules.reg except ImportError: pass class TestGrainsCore(integration.ModuleCase): ''' Test the core grains grains ''' @skipIf(not salt.utils.is_windows(), 'Only run on Windows') def test_win_cpu_model(self): ''' test grains['cpu_model'] ''' opts = self.minion_opts cpu_model_text = salt.modules.reg.read_value( "HKEY_LOCAL_MACHINE", "HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0", "ProcessorNameString").get('vdata') self.assertEqual( self.run_function('grains.items')['cpu_model'], cpu_model_text )
<commit_before># -*- coding: utf-8 -*- ''' Test the core grains ''' # Import python libs from __future__ import absolute_import # Import Salt Testing libs import tests.integration as integration from tests.support.unit import skipIf # Import salt libs import salt.utils if salt.utils.is_windows(): try: import salt.modules.reg except: pass class TestGrainsCore(integration.ModuleCase): ''' Test the core grains grains ''' @skipIf(not salt.utils.is_windows(), 'Only run on Windows') def test_win_cpu_model(self): ''' test grains['cpu_model'] ''' opts = self.minion_opts cpu_model_text = salt.modules.reg.read_value( "HKEY_LOCAL_MACHINE", "HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0", "ProcessorNameString").get('vdata') self.assertEqual( self.run_function('grains.items')['cpu_model'], cpu_model_text ) <commit_msg>Add ImportError to exception instead of bare "except" Fixes lint error on develop.<commit_after>
# -*- coding: utf-8 -*- ''' Test the core grains ''' # Import python libs from __future__ import absolute_import # Import Salt Testing libs import tests.integration as integration from tests.support.unit import skipIf # Import salt libs import salt.utils if salt.utils.is_windows(): try: import salt.modules.reg except ImportError: pass class TestGrainsCore(integration.ModuleCase): ''' Test the core grains grains ''' @skipIf(not salt.utils.is_windows(), 'Only run on Windows') def test_win_cpu_model(self): ''' test grains['cpu_model'] ''' opts = self.minion_opts cpu_model_text = salt.modules.reg.read_value( "HKEY_LOCAL_MACHINE", "HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0", "ProcessorNameString").get('vdata') self.assertEqual( self.run_function('grains.items')['cpu_model'], cpu_model_text )
# -*- coding: utf-8 -*- ''' Test the core grains ''' # Import python libs from __future__ import absolute_import # Import Salt Testing libs import tests.integration as integration from tests.support.unit import skipIf # Import salt libs import salt.utils if salt.utils.is_windows(): try: import salt.modules.reg except: pass class TestGrainsCore(integration.ModuleCase): ''' Test the core grains grains ''' @skipIf(not salt.utils.is_windows(), 'Only run on Windows') def test_win_cpu_model(self): ''' test grains['cpu_model'] ''' opts = self.minion_opts cpu_model_text = salt.modules.reg.read_value( "HKEY_LOCAL_MACHINE", "HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0", "ProcessorNameString").get('vdata') self.assertEqual( self.run_function('grains.items')['cpu_model'], cpu_model_text ) Add ImportError to exception instead of bare "except" Fixes lint error on develop.# -*- coding: utf-8 -*- ''' Test the core grains ''' # Import python libs from __future__ import absolute_import # Import Salt Testing libs import tests.integration as integration from tests.support.unit import skipIf # Import salt libs import salt.utils if salt.utils.is_windows(): try: import salt.modules.reg except ImportError: pass class TestGrainsCore(integration.ModuleCase): ''' Test the core grains grains ''' @skipIf(not salt.utils.is_windows(), 'Only run on Windows') def test_win_cpu_model(self): ''' test grains['cpu_model'] ''' opts = self.minion_opts cpu_model_text = salt.modules.reg.read_value( "HKEY_LOCAL_MACHINE", "HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0", "ProcessorNameString").get('vdata') self.assertEqual( self.run_function('grains.items')['cpu_model'], cpu_model_text )
<commit_before># -*- coding: utf-8 -*- ''' Test the core grains ''' # Import python libs from __future__ import absolute_import # Import Salt Testing libs import tests.integration as integration from tests.support.unit import skipIf # Import salt libs import salt.utils if salt.utils.is_windows(): try: import salt.modules.reg except: pass class TestGrainsCore(integration.ModuleCase): ''' Test the core grains grains ''' @skipIf(not salt.utils.is_windows(), 'Only run on Windows') def test_win_cpu_model(self): ''' test grains['cpu_model'] ''' opts = self.minion_opts cpu_model_text = salt.modules.reg.read_value( "HKEY_LOCAL_MACHINE", "HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0", "ProcessorNameString").get('vdata') self.assertEqual( self.run_function('grains.items')['cpu_model'], cpu_model_text ) <commit_msg>Add ImportError to exception instead of bare "except" Fixes lint error on develop.<commit_after># -*- coding: utf-8 -*- ''' Test the core grains ''' # Import python libs from __future__ import absolute_import # Import Salt Testing libs import tests.integration as integration from tests.support.unit import skipIf # Import salt libs import salt.utils if salt.utils.is_windows(): try: import salt.modules.reg except ImportError: pass class TestGrainsCore(integration.ModuleCase): ''' Test the core grains grains ''' @skipIf(not salt.utils.is_windows(), 'Only run on Windows') def test_win_cpu_model(self): ''' test grains['cpu_model'] ''' opts = self.minion_opts cpu_model_text = salt.modules.reg.read_value( "HKEY_LOCAL_MACHINE", "HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0", "ProcessorNameString").get('vdata') self.assertEqual( self.run_function('grains.items')['cpu_model'], cpu_model_text )
a07c3db369fec32507a7f51b96927bfe383597bc
tests/PexpectTestCase.py
tests/PexpectTestCase.py
''' PEXPECT LICENSE This license is approved by the OSI and FSF as GPL-compatible. http://opensource.org/licenses/isc-license.txt Copyright (c) 2012, Noah Spurrier <noah@noah.org> PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ''' import unittest import sys import os class PexpectTestCase(unittest.TestCase): def setUp(self): self.PYTHONBIN = sys.executable self.original_path = os.getcwd() newpath = os.path.join (os.environ['PROJECT_PEXPECT_HOME'], 'tests') os.chdir (newpath) print '\n', self.id(), unittest.TestCase.setUp(self) def tearDown(self): os.chdir (self.original_path)
''' PEXPECT LICENSE This license is approved by the OSI and FSF as GPL-compatible. http://opensource.org/licenses/isc-license.txt Copyright (c) 2012, Noah Spurrier <noah@noah.org> PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ''' from __future__ import print_function import unittest import sys import os class PexpectTestCase(unittest.TestCase): def setUp(self): self.PYTHONBIN = sys.executable self.original_path = os.getcwd() newpath = os.path.join (os.environ['PROJECT_PEXPECT_HOME'], 'tests') os.chdir (newpath) print('\n', self.id(), end='') unittest.TestCase.setUp(self) def tearDown(self): os.chdir (self.original_path)
Make test case base compatible with Python 3
Make test case base compatible with Python 3
Python
isc
Wakeupbuddy/pexpect,dongguangming/pexpect,nodish/pexpect,Depado/pexpect,bangi123/pexpect,bangi123/pexpect,Depado/pexpect,quatanium/pexpect,dongguangming/pexpect,bangi123/pexpect,nodish/pexpect,blink1073/pexpect,Depado/pexpect,Wakeupbuddy/pexpect,dongguangming/pexpect,Wakeupbuddy/pexpect,crdoconnor/pexpect,blink1073/pexpect,quatanium/pexpect,crdoconnor/pexpect,Wakeupbuddy/pexpect,Depado/pexpect,nodish/pexpect,quatanium/pexpect,blink1073/pexpect,dongguangming/pexpect,bangi123/pexpect,crdoconnor/pexpect
''' PEXPECT LICENSE This license is approved by the OSI and FSF as GPL-compatible. http://opensource.org/licenses/isc-license.txt Copyright (c) 2012, Noah Spurrier <noah@noah.org> PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ''' import unittest import sys import os class PexpectTestCase(unittest.TestCase): def setUp(self): self.PYTHONBIN = sys.executable self.original_path = os.getcwd() newpath = os.path.join (os.environ['PROJECT_PEXPECT_HOME'], 'tests') os.chdir (newpath) print '\n', self.id(), unittest.TestCase.setUp(self) def tearDown(self): os.chdir (self.original_path) Make test case base compatible with Python 3
''' PEXPECT LICENSE This license is approved by the OSI and FSF as GPL-compatible. http://opensource.org/licenses/isc-license.txt Copyright (c) 2012, Noah Spurrier <noah@noah.org> PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ''' from __future__ import print_function import unittest import sys import os class PexpectTestCase(unittest.TestCase): def setUp(self): self.PYTHONBIN = sys.executable self.original_path = os.getcwd() newpath = os.path.join (os.environ['PROJECT_PEXPECT_HOME'], 'tests') os.chdir (newpath) print('\n', self.id(), end='') unittest.TestCase.setUp(self) def tearDown(self): os.chdir (self.original_path)
<commit_before> ''' PEXPECT LICENSE This license is approved by the OSI and FSF as GPL-compatible. http://opensource.org/licenses/isc-license.txt Copyright (c) 2012, Noah Spurrier <noah@noah.org> PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ''' import unittest import sys import os class PexpectTestCase(unittest.TestCase): def setUp(self): self.PYTHONBIN = sys.executable self.original_path = os.getcwd() newpath = os.path.join (os.environ['PROJECT_PEXPECT_HOME'], 'tests') os.chdir (newpath) print '\n', self.id(), unittest.TestCase.setUp(self) def tearDown(self): os.chdir (self.original_path) <commit_msg>Make test case base compatible with Python 3<commit_after>
''' PEXPECT LICENSE This license is approved by the OSI and FSF as GPL-compatible. http://opensource.org/licenses/isc-license.txt Copyright (c) 2012, Noah Spurrier <noah@noah.org> PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ''' from __future__ import print_function import unittest import sys import os class PexpectTestCase(unittest.TestCase): def setUp(self): self.PYTHONBIN = sys.executable self.original_path = os.getcwd() newpath = os.path.join (os.environ['PROJECT_PEXPECT_HOME'], 'tests') os.chdir (newpath) print('\n', self.id(), end='') unittest.TestCase.setUp(self) def tearDown(self): os.chdir (self.original_path)
''' PEXPECT LICENSE This license is approved by the OSI and FSF as GPL-compatible. http://opensource.org/licenses/isc-license.txt Copyright (c) 2012, Noah Spurrier <noah@noah.org> PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ''' import unittest import sys import os class PexpectTestCase(unittest.TestCase): def setUp(self): self.PYTHONBIN = sys.executable self.original_path = os.getcwd() newpath = os.path.join (os.environ['PROJECT_PEXPECT_HOME'], 'tests') os.chdir (newpath) print '\n', self.id(), unittest.TestCase.setUp(self) def tearDown(self): os.chdir (self.original_path) Make test case base compatible with Python 3 ''' PEXPECT LICENSE This license is approved by the OSI and FSF as GPL-compatible. http://opensource.org/licenses/isc-license.txt Copyright (c) 2012, Noah Spurrier <noah@noah.org> PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ''' from __future__ import print_function import unittest import sys import os class PexpectTestCase(unittest.TestCase): def setUp(self): self.PYTHONBIN = sys.executable self.original_path = os.getcwd() newpath = os.path.join (os.environ['PROJECT_PEXPECT_HOME'], 'tests') os.chdir (newpath) print('\n', self.id(), end='') unittest.TestCase.setUp(self) def tearDown(self): os.chdir (self.original_path)
<commit_before> ''' PEXPECT LICENSE This license is approved by the OSI and FSF as GPL-compatible. http://opensource.org/licenses/isc-license.txt Copyright (c) 2012, Noah Spurrier <noah@noah.org> PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ''' import unittest import sys import os class PexpectTestCase(unittest.TestCase): def setUp(self): self.PYTHONBIN = sys.executable self.original_path = os.getcwd() newpath = os.path.join (os.environ['PROJECT_PEXPECT_HOME'], 'tests') os.chdir (newpath) print '\n', self.id(), unittest.TestCase.setUp(self) def tearDown(self): os.chdir (self.original_path) <commit_msg>Make test case base compatible with Python 3<commit_after> ''' PEXPECT LICENSE This license is approved by the OSI and FSF as GPL-compatible. http://opensource.org/licenses/isc-license.txt Copyright (c) 2012, Noah Spurrier <noah@noah.org> PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ''' from __future__ import print_function import unittest import sys import os class PexpectTestCase(unittest.TestCase): def setUp(self): self.PYTHONBIN = sys.executable self.original_path = os.getcwd() newpath = os.path.join (os.environ['PROJECT_PEXPECT_HOME'], 'tests') os.chdir (newpath) print('\n', self.id(), end='') unittest.TestCase.setUp(self) def tearDown(self): os.chdir (self.original_path)
cd59731f1b62265b699f82359a2e3c146feb7845
oslo_cache/_i18n.py
oslo_cache/_i18n.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """oslo.i18n integration module. See http://docs.openstack.org/developer/oslo.i18n/usage.html """ import oslo_i18n _translators = oslo_i18n.TranslatorFactory(domain='oslo_cache') # The primary translation function using the well-known name "_" _ = _translators.primary
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """oslo.i18n integration module. See https://docs.openstack.org/oslo.i18n/latest/user/index.html """ import oslo_i18n _translators = oslo_i18n.TranslatorFactory(domain='oslo_cache') # The primary translation function using the well-known name "_" _ = _translators.primary
Update the documentation link for doc migration
Update the documentation link for doc migration This patch is proposed according to the Direction 10 of doc migration(https://etherpad.openstack.org/p/doc-migration-tracking). Change-Id: I62ae28d10f70d63ba693ac0ab6581faf85f1bf6e
Python
apache-2.0
openstack/oslo.cache,openstack/oslo.cache
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """oslo.i18n integration module. See http://docs.openstack.org/developer/oslo.i18n/usage.html """ import oslo_i18n _translators = oslo_i18n.TranslatorFactory(domain='oslo_cache') # The primary translation function using the well-known name "_" _ = _translators.primary Update the documentation link for doc migration This patch is proposed according to the Direction 10 of doc migration(https://etherpad.openstack.org/p/doc-migration-tracking). Change-Id: I62ae28d10f70d63ba693ac0ab6581faf85f1bf6e
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """oslo.i18n integration module. See https://docs.openstack.org/oslo.i18n/latest/user/index.html """ import oslo_i18n _translators = oslo_i18n.TranslatorFactory(domain='oslo_cache') # The primary translation function using the well-known name "_" _ = _translators.primary
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """oslo.i18n integration module. See http://docs.openstack.org/developer/oslo.i18n/usage.html """ import oslo_i18n _translators = oslo_i18n.TranslatorFactory(domain='oslo_cache') # The primary translation function using the well-known name "_" _ = _translators.primary <commit_msg>Update the documentation link for doc migration This patch is proposed according to the Direction 10 of doc migration(https://etherpad.openstack.org/p/doc-migration-tracking). Change-Id: I62ae28d10f70d63ba693ac0ab6581faf85f1bf6e<commit_after>
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """oslo.i18n integration module. See https://docs.openstack.org/oslo.i18n/latest/user/index.html """ import oslo_i18n _translators = oslo_i18n.TranslatorFactory(domain='oslo_cache') # The primary translation function using the well-known name "_" _ = _translators.primary
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """oslo.i18n integration module. See http://docs.openstack.org/developer/oslo.i18n/usage.html """ import oslo_i18n _translators = oslo_i18n.TranslatorFactory(domain='oslo_cache') # The primary translation function using the well-known name "_" _ = _translators.primary Update the documentation link for doc migration This patch is proposed according to the Direction 10 of doc migration(https://etherpad.openstack.org/p/doc-migration-tracking). Change-Id: I62ae28d10f70d63ba693ac0ab6581faf85f1bf6e# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """oslo.i18n integration module. See https://docs.openstack.org/oslo.i18n/latest/user/index.html """ import oslo_i18n _translators = oslo_i18n.TranslatorFactory(domain='oslo_cache') # The primary translation function using the well-known name "_" _ = _translators.primary
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """oslo.i18n integration module. See http://docs.openstack.org/developer/oslo.i18n/usage.html """ import oslo_i18n _translators = oslo_i18n.TranslatorFactory(domain='oslo_cache') # The primary translation function using the well-known name "_" _ = _translators.primary <commit_msg>Update the documentation link for doc migration This patch is proposed according to the Direction 10 of doc migration(https://etherpad.openstack.org/p/doc-migration-tracking). Change-Id: I62ae28d10f70d63ba693ac0ab6581faf85f1bf6e<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """oslo.i18n integration module. See https://docs.openstack.org/oslo.i18n/latest/user/index.html """ import oslo_i18n _translators = oslo_i18n.TranslatorFactory(domain='oslo_cache') # The primary translation function using the well-known name "_" _ = _translators.primary
8732b76c56b25d77e7972706f3a335acf3986f14
pod_manager/utils.py
pod_manager/utils.py
import logging __all__ = [ 'get_logger' ] def get_logger(name): logger = logging.getLogger(name) # TODO: set level, add handler return logger
import sys import logging from libcloud.compute.types import Provider from libcloud.compute.providers import get_driver as get_libcloud_driver from pod_manager.settings import LOG_LEVEL, LOG_FORMAT from pod_manager.settings import PROVIDER, PROVIDER_CREDENTIALS, PROVIDER_KWARGS __all__ = [ 'get_logger', 'get_driver' ] def get_logger(name): logger = logging.getLogger(name) logger.setLevel(logging.DEBUG) handler = logging.StreamHandler(sys.__stdout__) formatter = logging.Formatter(LOG_FORMAT) handler.setFormatter(formatter) logger.addHandler(handler) return logger def get_driver(): cls = get_libcloud_driver(PROVIDER) driver = cls(*PROVIDER_CREDENTIALS, **PROVIDER_KWARGS) return driver
Modify get_logger to set level and formatter, add get_driver method.
Modify get_logger to set level and formatter, add get_driver method.
Python
apache-2.0
racker/pod-manager
import logging __all__ = [ 'get_logger' ] def get_logger(name): logger = logging.getLogger(name) # TODO: set level, add handler return logger Modify get_logger to set level and formatter, add get_driver method.
import sys import logging from libcloud.compute.types import Provider from libcloud.compute.providers import get_driver as get_libcloud_driver from pod_manager.settings import LOG_LEVEL, LOG_FORMAT from pod_manager.settings import PROVIDER, PROVIDER_CREDENTIALS, PROVIDER_KWARGS __all__ = [ 'get_logger', 'get_driver' ] def get_logger(name): logger = logging.getLogger(name) logger.setLevel(logging.DEBUG) handler = logging.StreamHandler(sys.__stdout__) formatter = logging.Formatter(LOG_FORMAT) handler.setFormatter(formatter) logger.addHandler(handler) return logger def get_driver(): cls = get_libcloud_driver(PROVIDER) driver = cls(*PROVIDER_CREDENTIALS, **PROVIDER_KWARGS) return driver
<commit_before>import logging __all__ = [ 'get_logger' ] def get_logger(name): logger = logging.getLogger(name) # TODO: set level, add handler return logger <commit_msg>Modify get_logger to set level and formatter, add get_driver method.<commit_after>
import sys import logging from libcloud.compute.types import Provider from libcloud.compute.providers import get_driver as get_libcloud_driver from pod_manager.settings import LOG_LEVEL, LOG_FORMAT from pod_manager.settings import PROVIDER, PROVIDER_CREDENTIALS, PROVIDER_KWARGS __all__ = [ 'get_logger', 'get_driver' ] def get_logger(name): logger = logging.getLogger(name) logger.setLevel(logging.DEBUG) handler = logging.StreamHandler(sys.__stdout__) formatter = logging.Formatter(LOG_FORMAT) handler.setFormatter(formatter) logger.addHandler(handler) return logger def get_driver(): cls = get_libcloud_driver(PROVIDER) driver = cls(*PROVIDER_CREDENTIALS, **PROVIDER_KWARGS) return driver
import logging __all__ = [ 'get_logger' ] def get_logger(name): logger = logging.getLogger(name) # TODO: set level, add handler return logger Modify get_logger to set level and formatter, add get_driver method.import sys import logging from libcloud.compute.types import Provider from libcloud.compute.providers import get_driver as get_libcloud_driver from pod_manager.settings import LOG_LEVEL, LOG_FORMAT from pod_manager.settings import PROVIDER, PROVIDER_CREDENTIALS, PROVIDER_KWARGS __all__ = [ 'get_logger', 'get_driver' ] def get_logger(name): logger = logging.getLogger(name) logger.setLevel(logging.DEBUG) handler = logging.StreamHandler(sys.__stdout__) formatter = logging.Formatter(LOG_FORMAT) handler.setFormatter(formatter) logger.addHandler(handler) return logger def get_driver(): cls = get_libcloud_driver(PROVIDER) driver = cls(*PROVIDER_CREDENTIALS, **PROVIDER_KWARGS) return driver
<commit_before>import logging __all__ = [ 'get_logger' ] def get_logger(name): logger = logging.getLogger(name) # TODO: set level, add handler return logger <commit_msg>Modify get_logger to set level and formatter, add get_driver method.<commit_after>import sys import logging from libcloud.compute.types import Provider from libcloud.compute.providers import get_driver as get_libcloud_driver from pod_manager.settings import LOG_LEVEL, LOG_FORMAT from pod_manager.settings import PROVIDER, PROVIDER_CREDENTIALS, PROVIDER_KWARGS __all__ = [ 'get_logger', 'get_driver' ] def get_logger(name): logger = logging.getLogger(name) logger.setLevel(logging.DEBUG) handler = logging.StreamHandler(sys.__stdout__) formatter = logging.Formatter(LOG_FORMAT) handler.setFormatter(formatter) logger.addHandler(handler) return logger def get_driver(): cls = get_libcloud_driver(PROVIDER) driver = cls(*PROVIDER_CREDENTIALS, **PROVIDER_KWARGS) return driver
7ff0e821b2d5e04f5d4edd198ae913a2e8e1da6e
micronota/db/test/test_tigrfam.py
micronota/db/test/test_tigrfam.py
#!/usr/bin/env python # ---------------------------------------------------------------------------- # Copyright (c) 2015--, micronota development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. # ---------------------------------------------------------------------------- from tempfile import mktemp from unittest import TestCase, main from os.path import dirname from micronota.bfillings.util import _get_data_dir from micronota.db.tigrfam import prepare_metadata class TigrfamTests(TestCase): def setUp(self): self.obs_db_fp = mktemp() self.exp_db_fp = _get_data_dir()('tigrfam.db') self.d = dirname(self.exp_db_fp) def test_prepare_metadata(self): prepare_metadata(self.d, self.obs_db_fp) with open(self.obs_db_fp, 'rb') as o, open(self.exp_db_fp, 'rb') as e: self.assertEqual(o.read(), e.read()) if __name__ == '__main__': main()
#!/usr/bin/env python # ---------------------------------------------------------------------------- # Copyright (c) 2015--, micronota development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. # ---------------------------------------------------------------------------- from tempfile import mktemp from unittest import TestCase, main from os.path import dirname from sqlite3 import connect from micronota.bfillings.util import _get_data_dir from micronota.db.tigrfam import prepare_metadata class TigrfamTests(TestCase): def setUp(self): self.obs_db_fp = mktemp() self.exp_db_fp = _get_data_dir()('tigrfam.db') self.d = dirname(self.exp_db_fp) def test_prepare_metadata(self): prepare_metadata(self.d, self.obs_db_fp) with connect(self.obs_db_fp) as o, connect(self.exp_db_fp) as e: co = o.cursor() co.execute('SELECT * from tigrfam') ce = e.cursor() ce.execute('SELECT * from tigrfam') self.assertCountEqual(co.fetchall(), ce.fetchall()) if __name__ == '__main__': main()
Update the equality test for database files
Update the equality test for database files
Python
bsd-3-clause
RNAer/micronota,tkosciol/micronota,mortonjt/micronota,mortonjt/micronota,RNAer/micronota,biocore/micronota,biocore/micronota,tkosciol/micronota
#!/usr/bin/env python # ---------------------------------------------------------------------------- # Copyright (c) 2015--, micronota development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. # ---------------------------------------------------------------------------- from tempfile import mktemp from unittest import TestCase, main from os.path import dirname from micronota.bfillings.util import _get_data_dir from micronota.db.tigrfam import prepare_metadata class TigrfamTests(TestCase): def setUp(self): self.obs_db_fp = mktemp() self.exp_db_fp = _get_data_dir()('tigrfam.db') self.d = dirname(self.exp_db_fp) def test_prepare_metadata(self): prepare_metadata(self.d, self.obs_db_fp) with open(self.obs_db_fp, 'rb') as o, open(self.exp_db_fp, 'rb') as e: self.assertEqual(o.read(), e.read()) if __name__ == '__main__': main() Update the equality test for database files
#!/usr/bin/env python # ---------------------------------------------------------------------------- # Copyright (c) 2015--, micronota development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. # ---------------------------------------------------------------------------- from tempfile import mktemp from unittest import TestCase, main from os.path import dirname from sqlite3 import connect from micronota.bfillings.util import _get_data_dir from micronota.db.tigrfam import prepare_metadata class TigrfamTests(TestCase): def setUp(self): self.obs_db_fp = mktemp() self.exp_db_fp = _get_data_dir()('tigrfam.db') self.d = dirname(self.exp_db_fp) def test_prepare_metadata(self): prepare_metadata(self.d, self.obs_db_fp) with connect(self.obs_db_fp) as o, connect(self.exp_db_fp) as e: co = o.cursor() co.execute('SELECT * from tigrfam') ce = e.cursor() ce.execute('SELECT * from tigrfam') self.assertCountEqual(co.fetchall(), ce.fetchall()) if __name__ == '__main__': main()
<commit_before>#!/usr/bin/env python # ---------------------------------------------------------------------------- # Copyright (c) 2015--, micronota development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. # ---------------------------------------------------------------------------- from tempfile import mktemp from unittest import TestCase, main from os.path import dirname from micronota.bfillings.util import _get_data_dir from micronota.db.tigrfam import prepare_metadata class TigrfamTests(TestCase): def setUp(self): self.obs_db_fp = mktemp() self.exp_db_fp = _get_data_dir()('tigrfam.db') self.d = dirname(self.exp_db_fp) def test_prepare_metadata(self): prepare_metadata(self.d, self.obs_db_fp) with open(self.obs_db_fp, 'rb') as o, open(self.exp_db_fp, 'rb') as e: self.assertEqual(o.read(), e.read()) if __name__ == '__main__': main() <commit_msg>Update the equality test for database files<commit_after>
#!/usr/bin/env python # ---------------------------------------------------------------------------- # Copyright (c) 2015--, micronota development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. # ---------------------------------------------------------------------------- from tempfile import mktemp from unittest import TestCase, main from os.path import dirname from sqlite3 import connect from micronota.bfillings.util import _get_data_dir from micronota.db.tigrfam import prepare_metadata class TigrfamTests(TestCase): def setUp(self): self.obs_db_fp = mktemp() self.exp_db_fp = _get_data_dir()('tigrfam.db') self.d = dirname(self.exp_db_fp) def test_prepare_metadata(self): prepare_metadata(self.d, self.obs_db_fp) with connect(self.obs_db_fp) as o, connect(self.exp_db_fp) as e: co = o.cursor() co.execute('SELECT * from tigrfam') ce = e.cursor() ce.execute('SELECT * from tigrfam') self.assertCountEqual(co.fetchall(), ce.fetchall()) if __name__ == '__main__': main()
#!/usr/bin/env python # ---------------------------------------------------------------------------- # Copyright (c) 2015--, micronota development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. # ---------------------------------------------------------------------------- from tempfile import mktemp from unittest import TestCase, main from os.path import dirname from micronota.bfillings.util import _get_data_dir from micronota.db.tigrfam import prepare_metadata class TigrfamTests(TestCase): def setUp(self): self.obs_db_fp = mktemp() self.exp_db_fp = _get_data_dir()('tigrfam.db') self.d = dirname(self.exp_db_fp) def test_prepare_metadata(self): prepare_metadata(self.d, self.obs_db_fp) with open(self.obs_db_fp, 'rb') as o, open(self.exp_db_fp, 'rb') as e: self.assertEqual(o.read(), e.read()) if __name__ == '__main__': main() Update the equality test for database files#!/usr/bin/env python # ---------------------------------------------------------------------------- # Copyright (c) 2015--, micronota development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. # ---------------------------------------------------------------------------- from tempfile import mktemp from unittest import TestCase, main from os.path import dirname from sqlite3 import connect from micronota.bfillings.util import _get_data_dir from micronota.db.tigrfam import prepare_metadata class TigrfamTests(TestCase): def setUp(self): self.obs_db_fp = mktemp() self.exp_db_fp = _get_data_dir()('tigrfam.db') self.d = dirname(self.exp_db_fp) def test_prepare_metadata(self): prepare_metadata(self.d, self.obs_db_fp) with connect(self.obs_db_fp) as o, connect(self.exp_db_fp) as e: co = o.cursor() co.execute('SELECT * from tigrfam') ce = e.cursor() ce.execute('SELECT * from tigrfam') self.assertCountEqual(co.fetchall(), ce.fetchall()) if __name__ == '__main__': main()
<commit_before>#!/usr/bin/env python # ---------------------------------------------------------------------------- # Copyright (c) 2015--, micronota development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. # ---------------------------------------------------------------------------- from tempfile import mktemp from unittest import TestCase, main from os.path import dirname from micronota.bfillings.util import _get_data_dir from micronota.db.tigrfam import prepare_metadata class TigrfamTests(TestCase): def setUp(self): self.obs_db_fp = mktemp() self.exp_db_fp = _get_data_dir()('tigrfam.db') self.d = dirname(self.exp_db_fp) def test_prepare_metadata(self): prepare_metadata(self.d, self.obs_db_fp) with open(self.obs_db_fp, 'rb') as o, open(self.exp_db_fp, 'rb') as e: self.assertEqual(o.read(), e.read()) if __name__ == '__main__': main() <commit_msg>Update the equality test for database files<commit_after>#!/usr/bin/env python # ---------------------------------------------------------------------------- # Copyright (c) 2015--, micronota development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. # ---------------------------------------------------------------------------- from tempfile import mktemp from unittest import TestCase, main from os.path import dirname from sqlite3 import connect from micronota.bfillings.util import _get_data_dir from micronota.db.tigrfam import prepare_metadata class TigrfamTests(TestCase): def setUp(self): self.obs_db_fp = mktemp() self.exp_db_fp = _get_data_dir()('tigrfam.db') self.d = dirname(self.exp_db_fp) def test_prepare_metadata(self): prepare_metadata(self.d, self.obs_db_fp) with connect(self.obs_db_fp) as o, connect(self.exp_db_fp) as e: co = o.cursor() co.execute('SELECT * from tigrfam') ce = e.cursor() ce.execute('SELECT * from tigrfam') self.assertCountEqual(co.fetchall(), ce.fetchall()) if __name__ == '__main__': main()
c74b3a4d80b8d7002b6836a421cf2b3032377545
filterable.py
filterable.py
class Filterable: no_delays_filter = lambda filterable: filterable.condition.record_id == str(6) query_delay_filter = lambda filterable: filterable.condition.record_id == str(7) document_delay_filter = lambda filterable: filterable.condition.record_id == str(8) combined_delay_filter = lambda filterable: filterable.condition.record_id == str(9) identity_filter = lambda filterable: True @staticmethod def combine_filters( *filters ): return lambda filterable: all([fil( filterable ) for fil in filters])
class Filterable: no_delays_filter = lambda filterable: filterable.condition.record_id == str(6) query_delay_filter = lambda filterable: filterable.condition.record_id == str(7) document_delay_filter = lambda filterable: filterable.condition.record_id == str(8) combined_delay_filter = lambda filterable: filterable.condition.record_id == str(9) practice_topic_reject_filter = lambda filterable: filterable.topic.record_id != str(367) identity_filter = lambda filterable: True @staticmethod def combine_filters( *filters ): return lambda filterable: all([fil( filterable ) for fil in filters])
Add filter for rejecting practice topic
Add filter for rejecting practice topic
Python
mit
fire-uta/iiix-data-parser
class Filterable: no_delays_filter = lambda filterable: filterable.condition.record_id == str(6) query_delay_filter = lambda filterable: filterable.condition.record_id == str(7) document_delay_filter = lambda filterable: filterable.condition.record_id == str(8) combined_delay_filter = lambda filterable: filterable.condition.record_id == str(9) identity_filter = lambda filterable: True @staticmethod def combine_filters( *filters ): return lambda filterable: all([fil( filterable ) for fil in filters]) Add filter for rejecting practice topic
class Filterable: no_delays_filter = lambda filterable: filterable.condition.record_id == str(6) query_delay_filter = lambda filterable: filterable.condition.record_id == str(7) document_delay_filter = lambda filterable: filterable.condition.record_id == str(8) combined_delay_filter = lambda filterable: filterable.condition.record_id == str(9) practice_topic_reject_filter = lambda filterable: filterable.topic.record_id != str(367) identity_filter = lambda filterable: True @staticmethod def combine_filters( *filters ): return lambda filterable: all([fil( filterable ) for fil in filters])
<commit_before>class Filterable: no_delays_filter = lambda filterable: filterable.condition.record_id == str(6) query_delay_filter = lambda filterable: filterable.condition.record_id == str(7) document_delay_filter = lambda filterable: filterable.condition.record_id == str(8) combined_delay_filter = lambda filterable: filterable.condition.record_id == str(9) identity_filter = lambda filterable: True @staticmethod def combine_filters( *filters ): return lambda filterable: all([fil( filterable ) for fil in filters]) <commit_msg>Add filter for rejecting practice topic<commit_after>
class Filterable: no_delays_filter = lambda filterable: filterable.condition.record_id == str(6) query_delay_filter = lambda filterable: filterable.condition.record_id == str(7) document_delay_filter = lambda filterable: filterable.condition.record_id == str(8) combined_delay_filter = lambda filterable: filterable.condition.record_id == str(9) practice_topic_reject_filter = lambda filterable: filterable.topic.record_id != str(367) identity_filter = lambda filterable: True @staticmethod def combine_filters( *filters ): return lambda filterable: all([fil( filterable ) for fil in filters])
class Filterable: no_delays_filter = lambda filterable: filterable.condition.record_id == str(6) query_delay_filter = lambda filterable: filterable.condition.record_id == str(7) document_delay_filter = lambda filterable: filterable.condition.record_id == str(8) combined_delay_filter = lambda filterable: filterable.condition.record_id == str(9) identity_filter = lambda filterable: True @staticmethod def combine_filters( *filters ): return lambda filterable: all([fil( filterable ) for fil in filters]) Add filter for rejecting practice topicclass Filterable: no_delays_filter = lambda filterable: filterable.condition.record_id == str(6) query_delay_filter = lambda filterable: filterable.condition.record_id == str(7) document_delay_filter = lambda filterable: filterable.condition.record_id == str(8) combined_delay_filter = lambda filterable: filterable.condition.record_id == str(9) practice_topic_reject_filter = lambda filterable: filterable.topic.record_id != str(367) identity_filter = lambda filterable: True @staticmethod def combine_filters( *filters ): return lambda filterable: all([fil( filterable ) for fil in filters])
<commit_before>class Filterable: no_delays_filter = lambda filterable: filterable.condition.record_id == str(6) query_delay_filter = lambda filterable: filterable.condition.record_id == str(7) document_delay_filter = lambda filterable: filterable.condition.record_id == str(8) combined_delay_filter = lambda filterable: filterable.condition.record_id == str(9) identity_filter = lambda filterable: True @staticmethod def combine_filters( *filters ): return lambda filterable: all([fil( filterable ) for fil in filters]) <commit_msg>Add filter for rejecting practice topic<commit_after>class Filterable: no_delays_filter = lambda filterable: filterable.condition.record_id == str(6) query_delay_filter = lambda filterable: filterable.condition.record_id == str(7) document_delay_filter = lambda filterable: filterable.condition.record_id == str(8) combined_delay_filter = lambda filterable: filterable.condition.record_id == str(9) practice_topic_reject_filter = lambda filterable: filterable.topic.record_id != str(367) identity_filter = lambda filterable: True @staticmethod def combine_filters( *filters ): return lambda filterable: all([fil( filterable ) for fil in filters])
5f62db4246e67cec6ac39f27960d6f17e9f163c5
test/functional/rpc_deprecated.py
test/functional/rpc_deprecated.py
#!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test deprecation of RPC calls.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_raises_rpc_error class DeprecatedRpcTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.setup_clean_chain = True self.extra_args = [[], ["-deprecatedrpc=createmultisig"]] def run_test(self): self.log.info("Make sure that -deprecatedrpc=createmultisig allows it to take addresses") assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, [self.nodes[0].getnewaddress()]) self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()]) if __name__ == '__main__': DeprecatedRpcTest().main()
#!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test deprecation of RPC calls.""" from test_framework.test_framework import BitcoinTestFramework class DeprecatedRpcTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.setup_clean_chain = True self.extra_args = [[], []] def run_test(self): # This test should be used to verify correct behaviour of deprecated # RPC methods with and without the -deprecatedrpc flags. For example: # # self.log.info("Make sure that -deprecatedrpc=createmultisig allows it to take addresses") # assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, [self.nodes[0].getnewaddress()]) # self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()]) # # There are currently no deprecated RPC methods in master, so this # test is currently empty. pass if __name__ == '__main__': DeprecatedRpcTest().main()
Remove test for deprecated createmultsig option
[tests] Remove test for deprecated createmultsig option
Python
mit
chaincoin/chaincoin,chaincoin/chaincoin,chaincoin/chaincoin,chaincoin/chaincoin,chaincoin/chaincoin,chaincoin/chaincoin
#!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test deprecation of RPC calls.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_raises_rpc_error class DeprecatedRpcTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.setup_clean_chain = True self.extra_args = [[], ["-deprecatedrpc=createmultisig"]] def run_test(self): self.log.info("Make sure that -deprecatedrpc=createmultisig allows it to take addresses") assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, [self.nodes[0].getnewaddress()]) self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()]) if __name__ == '__main__': DeprecatedRpcTest().main() [tests] Remove test for deprecated createmultsig option
#!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test deprecation of RPC calls.""" from test_framework.test_framework import BitcoinTestFramework class DeprecatedRpcTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.setup_clean_chain = True self.extra_args = [[], []] def run_test(self): # This test should be used to verify correct behaviour of deprecated # RPC methods with and without the -deprecatedrpc flags. For example: # # self.log.info("Make sure that -deprecatedrpc=createmultisig allows it to take addresses") # assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, [self.nodes[0].getnewaddress()]) # self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()]) # # There are currently no deprecated RPC methods in master, so this # test is currently empty. pass if __name__ == '__main__': DeprecatedRpcTest().main()
<commit_before>#!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test deprecation of RPC calls.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_raises_rpc_error class DeprecatedRpcTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.setup_clean_chain = True self.extra_args = [[], ["-deprecatedrpc=createmultisig"]] def run_test(self): self.log.info("Make sure that -deprecatedrpc=createmultisig allows it to take addresses") assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, [self.nodes[0].getnewaddress()]) self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()]) if __name__ == '__main__': DeprecatedRpcTest().main() <commit_msg>[tests] Remove test for deprecated createmultsig option<commit_after>
#!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test deprecation of RPC calls.""" from test_framework.test_framework import BitcoinTestFramework class DeprecatedRpcTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.setup_clean_chain = True self.extra_args = [[], []] def run_test(self): # This test should be used to verify correct behaviour of deprecated # RPC methods with and without the -deprecatedrpc flags. For example: # # self.log.info("Make sure that -deprecatedrpc=createmultisig allows it to take addresses") # assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, [self.nodes[0].getnewaddress()]) # self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()]) # # There are currently no deprecated RPC methods in master, so this # test is currently empty. pass if __name__ == '__main__': DeprecatedRpcTest().main()
#!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test deprecation of RPC calls.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_raises_rpc_error class DeprecatedRpcTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.setup_clean_chain = True self.extra_args = [[], ["-deprecatedrpc=createmultisig"]] def run_test(self): self.log.info("Make sure that -deprecatedrpc=createmultisig allows it to take addresses") assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, [self.nodes[0].getnewaddress()]) self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()]) if __name__ == '__main__': DeprecatedRpcTest().main() [tests] Remove test for deprecated createmultsig option#!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test deprecation of RPC calls.""" from test_framework.test_framework import BitcoinTestFramework class DeprecatedRpcTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.setup_clean_chain = True self.extra_args = [[], []] def run_test(self): # This test should be used to verify correct behaviour of deprecated # RPC methods with and without the -deprecatedrpc flags. For example: # # self.log.info("Make sure that -deprecatedrpc=createmultisig allows it to take addresses") # assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, [self.nodes[0].getnewaddress()]) # self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()]) # # There are currently no deprecated RPC methods in master, so this # test is currently empty. pass if __name__ == '__main__': DeprecatedRpcTest().main()
<commit_before>#!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test deprecation of RPC calls.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_raises_rpc_error class DeprecatedRpcTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.setup_clean_chain = True self.extra_args = [[], ["-deprecatedrpc=createmultisig"]] def run_test(self): self.log.info("Make sure that -deprecatedrpc=createmultisig allows it to take addresses") assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, [self.nodes[0].getnewaddress()]) self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()]) if __name__ == '__main__': DeprecatedRpcTest().main() <commit_msg>[tests] Remove test for deprecated createmultsig option<commit_after>#!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test deprecation of RPC calls.""" from test_framework.test_framework import BitcoinTestFramework class DeprecatedRpcTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.setup_clean_chain = True self.extra_args = [[], []] def run_test(self): # This test should be used to verify correct behaviour of deprecated # RPC methods with and without the -deprecatedrpc flags. For example: # # self.log.info("Make sure that -deprecatedrpc=createmultisig allows it to take addresses") # assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, [self.nodes[0].getnewaddress()]) # self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()]) # # There are currently no deprecated RPC methods in master, so this # test is currently empty. pass if __name__ == '__main__': DeprecatedRpcTest().main()
fd2c03b2e6f48dac071b813b20cc2f70a2658f24
tests/test_path_paths.py
tests/test_path_paths.py
import nose from nose.tools import raises import dpath.path import dpath.exceptions import dpath.options @raises(dpath.exceptions.InvalidKeyName) def test_path_paths_invalid_keyname(): tdict = { "I/contain/the/separator": 0 } for x in dpath.path.paths(tdict): pass @raises(dpath.exceptions.InvalidKeyName) def test_path_paths_empty_key_disallowed(): tdict = { "Empty": { "": { "Key": "" } } } for x in dpath.path.paths(tdict): pass def test_path_paths_empty_key_allowed(): tdict = { "Empty": { "": { "Key": "" } } } parts=[] dpath.options.ALLOW_EMPTY_STRING_KEYS=True for x in dpath.path.paths(tdict, dirs=False, leaves=True): path = x for x in path[:-1]: parts.append(x[0]) dpath.options.ALLOW_EMPTY_STRING_KEYS=False print "/".join(parts) assert("/".join(parts) == "Empty//Key") def test_path_paths_int_keys(): dpath.path.validate([ ['I', dict], ['am', dict], ['path', dict], [0, dict], ['of', dict], [2, int] ])
import nose from nose.tools import raises import dpath.path import dpath.exceptions import dpath.options @raises(dpath.exceptions.InvalidKeyName) def test_path_paths_invalid_keyname(): tdict = { "I/contain/the/separator": 0 } for x in dpath.path.paths(tdict): pass @raises(dpath.exceptions.InvalidKeyName) def test_path_paths_empty_key_disallowed(): tdict = { "Empty": { "": { "Key": "" } } } for x in dpath.path.paths(tdict): pass def test_path_paths_empty_key_allowed(): tdict = { "Empty": { "": { "Key": "" } } } parts=[] dpath.options.ALLOW_EMPTY_STRING_KEYS=True for x in dpath.path.paths(tdict, dirs=False, leaves=True): path = x for x in path[:-1]: parts.append(x[0]) dpath.options.ALLOW_EMPTY_STRING_KEYS=False assert("/".join(parts) == "Empty//Key") def test_path_paths_int_keys(): dpath.path.validate([ ['I', dict], ['am', dict], ['path', dict], [0, dict], ['of', dict], [2, int] ])
Print statement was breaking python 3 builds
Print statement was breaking python 3 builds
Python
mit
akesterson/dpath-python,calebcase/dpath-python,benthomasson/dpath-python,lexhung/dpath-python,pombredanne/dpath-python
import nose from nose.tools import raises import dpath.path import dpath.exceptions import dpath.options @raises(dpath.exceptions.InvalidKeyName) def test_path_paths_invalid_keyname(): tdict = { "I/contain/the/separator": 0 } for x in dpath.path.paths(tdict): pass @raises(dpath.exceptions.InvalidKeyName) def test_path_paths_empty_key_disallowed(): tdict = { "Empty": { "": { "Key": "" } } } for x in dpath.path.paths(tdict): pass def test_path_paths_empty_key_allowed(): tdict = { "Empty": { "": { "Key": "" } } } parts=[] dpath.options.ALLOW_EMPTY_STRING_KEYS=True for x in dpath.path.paths(tdict, dirs=False, leaves=True): path = x for x in path[:-1]: parts.append(x[0]) dpath.options.ALLOW_EMPTY_STRING_KEYS=False print "/".join(parts) assert("/".join(parts) == "Empty//Key") def test_path_paths_int_keys(): dpath.path.validate([ ['I', dict], ['am', dict], ['path', dict], [0, dict], ['of', dict], [2, int] ]) Print statement was breaking python 3 builds
import nose from nose.tools import raises import dpath.path import dpath.exceptions import dpath.options @raises(dpath.exceptions.InvalidKeyName) def test_path_paths_invalid_keyname(): tdict = { "I/contain/the/separator": 0 } for x in dpath.path.paths(tdict): pass @raises(dpath.exceptions.InvalidKeyName) def test_path_paths_empty_key_disallowed(): tdict = { "Empty": { "": { "Key": "" } } } for x in dpath.path.paths(tdict): pass def test_path_paths_empty_key_allowed(): tdict = { "Empty": { "": { "Key": "" } } } parts=[] dpath.options.ALLOW_EMPTY_STRING_KEYS=True for x in dpath.path.paths(tdict, dirs=False, leaves=True): path = x for x in path[:-1]: parts.append(x[0]) dpath.options.ALLOW_EMPTY_STRING_KEYS=False assert("/".join(parts) == "Empty//Key") def test_path_paths_int_keys(): dpath.path.validate([ ['I', dict], ['am', dict], ['path', dict], [0, dict], ['of', dict], [2, int] ])
<commit_before>import nose from nose.tools import raises import dpath.path import dpath.exceptions import dpath.options @raises(dpath.exceptions.InvalidKeyName) def test_path_paths_invalid_keyname(): tdict = { "I/contain/the/separator": 0 } for x in dpath.path.paths(tdict): pass @raises(dpath.exceptions.InvalidKeyName) def test_path_paths_empty_key_disallowed(): tdict = { "Empty": { "": { "Key": "" } } } for x in dpath.path.paths(tdict): pass def test_path_paths_empty_key_allowed(): tdict = { "Empty": { "": { "Key": "" } } } parts=[] dpath.options.ALLOW_EMPTY_STRING_KEYS=True for x in dpath.path.paths(tdict, dirs=False, leaves=True): path = x for x in path[:-1]: parts.append(x[0]) dpath.options.ALLOW_EMPTY_STRING_KEYS=False print "/".join(parts) assert("/".join(parts) == "Empty//Key") def test_path_paths_int_keys(): dpath.path.validate([ ['I', dict], ['am', dict], ['path', dict], [0, dict], ['of', dict], [2, int] ]) <commit_msg>Print statement was breaking python 3 builds<commit_after>
import nose from nose.tools import raises import dpath.path import dpath.exceptions import dpath.options @raises(dpath.exceptions.InvalidKeyName) def test_path_paths_invalid_keyname(): tdict = { "I/contain/the/separator": 0 } for x in dpath.path.paths(tdict): pass @raises(dpath.exceptions.InvalidKeyName) def test_path_paths_empty_key_disallowed(): tdict = { "Empty": { "": { "Key": "" } } } for x in dpath.path.paths(tdict): pass def test_path_paths_empty_key_allowed(): tdict = { "Empty": { "": { "Key": "" } } } parts=[] dpath.options.ALLOW_EMPTY_STRING_KEYS=True for x in dpath.path.paths(tdict, dirs=False, leaves=True): path = x for x in path[:-1]: parts.append(x[0]) dpath.options.ALLOW_EMPTY_STRING_KEYS=False assert("/".join(parts) == "Empty//Key") def test_path_paths_int_keys(): dpath.path.validate([ ['I', dict], ['am', dict], ['path', dict], [0, dict], ['of', dict], [2, int] ])
import nose from nose.tools import raises import dpath.path import dpath.exceptions import dpath.options @raises(dpath.exceptions.InvalidKeyName) def test_path_paths_invalid_keyname(): tdict = { "I/contain/the/separator": 0 } for x in dpath.path.paths(tdict): pass @raises(dpath.exceptions.InvalidKeyName) def test_path_paths_empty_key_disallowed(): tdict = { "Empty": { "": { "Key": "" } } } for x in dpath.path.paths(tdict): pass def test_path_paths_empty_key_allowed(): tdict = { "Empty": { "": { "Key": "" } } } parts=[] dpath.options.ALLOW_EMPTY_STRING_KEYS=True for x in dpath.path.paths(tdict, dirs=False, leaves=True): path = x for x in path[:-1]: parts.append(x[0]) dpath.options.ALLOW_EMPTY_STRING_KEYS=False print "/".join(parts) assert("/".join(parts) == "Empty//Key") def test_path_paths_int_keys(): dpath.path.validate([ ['I', dict], ['am', dict], ['path', dict], [0, dict], ['of', dict], [2, int] ]) Print statement was breaking python 3 buildsimport nose from nose.tools import raises import dpath.path import dpath.exceptions import dpath.options @raises(dpath.exceptions.InvalidKeyName) def test_path_paths_invalid_keyname(): tdict = { "I/contain/the/separator": 0 } for x in dpath.path.paths(tdict): pass @raises(dpath.exceptions.InvalidKeyName) def test_path_paths_empty_key_disallowed(): tdict = { "Empty": { "": { "Key": "" } } } for x in dpath.path.paths(tdict): pass def test_path_paths_empty_key_allowed(): tdict = { "Empty": { "": { "Key": "" } } } parts=[] dpath.options.ALLOW_EMPTY_STRING_KEYS=True for x in dpath.path.paths(tdict, dirs=False, leaves=True): path = x for x in path[:-1]: parts.append(x[0]) dpath.options.ALLOW_EMPTY_STRING_KEYS=False assert("/".join(parts) == "Empty//Key") def test_path_paths_int_keys(): dpath.path.validate([ ['I', dict], ['am', dict], ['path', dict], [0, dict], ['of', dict], [2, int] ])
<commit_before>import nose from nose.tools import raises import dpath.path import dpath.exceptions import dpath.options @raises(dpath.exceptions.InvalidKeyName) def test_path_paths_invalid_keyname(): tdict = { "I/contain/the/separator": 0 } for x in dpath.path.paths(tdict): pass @raises(dpath.exceptions.InvalidKeyName) def test_path_paths_empty_key_disallowed(): tdict = { "Empty": { "": { "Key": "" } } } for x in dpath.path.paths(tdict): pass def test_path_paths_empty_key_allowed(): tdict = { "Empty": { "": { "Key": "" } } } parts=[] dpath.options.ALLOW_EMPTY_STRING_KEYS=True for x in dpath.path.paths(tdict, dirs=False, leaves=True): path = x for x in path[:-1]: parts.append(x[0]) dpath.options.ALLOW_EMPTY_STRING_KEYS=False print "/".join(parts) assert("/".join(parts) == "Empty//Key") def test_path_paths_int_keys(): dpath.path.validate([ ['I', dict], ['am', dict], ['path', dict], [0, dict], ['of', dict], [2, int] ]) <commit_msg>Print statement was breaking python 3 builds<commit_after>import nose from nose.tools import raises import dpath.path import dpath.exceptions import dpath.options @raises(dpath.exceptions.InvalidKeyName) def test_path_paths_invalid_keyname(): tdict = { "I/contain/the/separator": 0 } for x in dpath.path.paths(tdict): pass @raises(dpath.exceptions.InvalidKeyName) def test_path_paths_empty_key_disallowed(): tdict = { "Empty": { "": { "Key": "" } } } for x in dpath.path.paths(tdict): pass def test_path_paths_empty_key_allowed(): tdict = { "Empty": { "": { "Key": "" } } } parts=[] dpath.options.ALLOW_EMPTY_STRING_KEYS=True for x in dpath.path.paths(tdict, dirs=False, leaves=True): path = x for x in path[:-1]: parts.append(x[0]) dpath.options.ALLOW_EMPTY_STRING_KEYS=False assert("/".join(parts) == "Empty//Key") def test_path_paths_int_keys(): dpath.path.validate([ ['I', dict], ['am', dict], ['path', dict], [0, dict], ['of', dict], [2, int] ])
53dd7c112d3f1781e8b7c662ba52c805a6afa568
scripts/3-create-database.py
scripts/3-create-database.py
"""Creates an SQLite database detailing all the K2 target pixel files. TODO ---- * Sort the final table by EPIC ID. * Add an index to the sqlite table? """ import glob import logging import sqlite3 import pandas as pd log = logging.getLogger(__name__) log.setLevel("INFO") CSV_FILENAME = "../k2-target-pixel-files.csv" SQLITE_FILENAME = "../k2-target-pixel-files.db" if __name__ == "__main__": log.info("Reading the data") df = pd.concat([pd.read_csv(fn) for fn in glob.glob("intermediate-data/*metadata.csv")]) # Write to the CSV file log.info("Writing {}".format(CSV_FILENAME)) df.to_csv(CSV_FILENAME, index=False) # Write the SQLite table log.info("Writing {}".format(SQLITE_FILENAME)) con = sqlite3.connect(SQLITE_FILENAME) df.to_sql(name='tpf', con=con, if_exists='replace', index=False)
"""Creates an SQLite database detailing all the K2 target pixel files. TODO ---- * Add an index to the sqlite table? """ import glob import logging import sqlite3 import pandas as pd log = logging.getLogger(__name__) log.setLevel("INFO") CSV_FILENAME = "../k2-target-pixel-files.csv" SQLITE_FILENAME = "../k2-target-pixel-files.db" if __name__ == "__main__": log.info("Reading the data") df = pd.concat([pd.read_csv(fn) for fn in glob.glob("intermediate-data/*metadata.csv")]) df = df.sort_values("keplerid") # Write to the CSV file log.info("Writing {}".format(CSV_FILENAME)) df.to_csv(CSV_FILENAME, index=False) # Write the SQLite table log.info("Writing {}".format(SQLITE_FILENAME)) con = sqlite3.connect(SQLITE_FILENAME) df.to_sql(name='tpf', con=con, if_exists='replace', index=False)
Sort the final table by keplerid
Sort the final table by keplerid
Python
mit
barentsen/K2metadata,KeplerGO/K2metadata,barentsen/k2-target-index
"""Creates an SQLite database detailing all the K2 target pixel files. TODO ---- * Sort the final table by EPIC ID. * Add an index to the sqlite table? """ import glob import logging import sqlite3 import pandas as pd log = logging.getLogger(__name__) log.setLevel("INFO") CSV_FILENAME = "../k2-target-pixel-files.csv" SQLITE_FILENAME = "../k2-target-pixel-files.db" if __name__ == "__main__": log.info("Reading the data") df = pd.concat([pd.read_csv(fn) for fn in glob.glob("intermediate-data/*metadata.csv")]) # Write to the CSV file log.info("Writing {}".format(CSV_FILENAME)) df.to_csv(CSV_FILENAME, index=False) # Write the SQLite table log.info("Writing {}".format(SQLITE_FILENAME)) con = sqlite3.connect(SQLITE_FILENAME) df.to_sql(name='tpf', con=con, if_exists='replace', index=False) Sort the final table by keplerid
"""Creates an SQLite database detailing all the K2 target pixel files. TODO ---- * Add an index to the sqlite table? """ import glob import logging import sqlite3 import pandas as pd log = logging.getLogger(__name__) log.setLevel("INFO") CSV_FILENAME = "../k2-target-pixel-files.csv" SQLITE_FILENAME = "../k2-target-pixel-files.db" if __name__ == "__main__": log.info("Reading the data") df = pd.concat([pd.read_csv(fn) for fn in glob.glob("intermediate-data/*metadata.csv")]) df = df.sort_values("keplerid") # Write to the CSV file log.info("Writing {}".format(CSV_FILENAME)) df.to_csv(CSV_FILENAME, index=False) # Write the SQLite table log.info("Writing {}".format(SQLITE_FILENAME)) con = sqlite3.connect(SQLITE_FILENAME) df.to_sql(name='tpf', con=con, if_exists='replace', index=False)
<commit_before>"""Creates an SQLite database detailing all the K2 target pixel files. TODO ---- * Sort the final table by EPIC ID. * Add an index to the sqlite table? """ import glob import logging import sqlite3 import pandas as pd log = logging.getLogger(__name__) log.setLevel("INFO") CSV_FILENAME = "../k2-target-pixel-files.csv" SQLITE_FILENAME = "../k2-target-pixel-files.db" if __name__ == "__main__": log.info("Reading the data") df = pd.concat([pd.read_csv(fn) for fn in glob.glob("intermediate-data/*metadata.csv")]) # Write to the CSV file log.info("Writing {}".format(CSV_FILENAME)) df.to_csv(CSV_FILENAME, index=False) # Write the SQLite table log.info("Writing {}".format(SQLITE_FILENAME)) con = sqlite3.connect(SQLITE_FILENAME) df.to_sql(name='tpf', con=con, if_exists='replace', index=False) <commit_msg>Sort the final table by keplerid<commit_after>
"""Creates an SQLite database detailing all the K2 target pixel files. TODO ---- * Add an index to the sqlite table? """ import glob import logging import sqlite3 import pandas as pd log = logging.getLogger(__name__) log.setLevel("INFO") CSV_FILENAME = "../k2-target-pixel-files.csv" SQLITE_FILENAME = "../k2-target-pixel-files.db" if __name__ == "__main__": log.info("Reading the data") df = pd.concat([pd.read_csv(fn) for fn in glob.glob("intermediate-data/*metadata.csv")]) df = df.sort_values("keplerid") # Write to the CSV file log.info("Writing {}".format(CSV_FILENAME)) df.to_csv(CSV_FILENAME, index=False) # Write the SQLite table log.info("Writing {}".format(SQLITE_FILENAME)) con = sqlite3.connect(SQLITE_FILENAME) df.to_sql(name='tpf', con=con, if_exists='replace', index=False)
"""Creates an SQLite database detailing all the K2 target pixel files. TODO ---- * Sort the final table by EPIC ID. * Add an index to the sqlite table? """ import glob import logging import sqlite3 import pandas as pd log = logging.getLogger(__name__) log.setLevel("INFO") CSV_FILENAME = "../k2-target-pixel-files.csv" SQLITE_FILENAME = "../k2-target-pixel-files.db" if __name__ == "__main__": log.info("Reading the data") df = pd.concat([pd.read_csv(fn) for fn in glob.glob("intermediate-data/*metadata.csv")]) # Write to the CSV file log.info("Writing {}".format(CSV_FILENAME)) df.to_csv(CSV_FILENAME, index=False) # Write the SQLite table log.info("Writing {}".format(SQLITE_FILENAME)) con = sqlite3.connect(SQLITE_FILENAME) df.to_sql(name='tpf', con=con, if_exists='replace', index=False) Sort the final table by keplerid"""Creates an SQLite database detailing all the K2 target pixel files. TODO ---- * Add an index to the sqlite table? """ import glob import logging import sqlite3 import pandas as pd log = logging.getLogger(__name__) log.setLevel("INFO") CSV_FILENAME = "../k2-target-pixel-files.csv" SQLITE_FILENAME = "../k2-target-pixel-files.db" if __name__ == "__main__": log.info("Reading the data") df = pd.concat([pd.read_csv(fn) for fn in glob.glob("intermediate-data/*metadata.csv")]) df = df.sort_values("keplerid") # Write to the CSV file log.info("Writing {}".format(CSV_FILENAME)) df.to_csv(CSV_FILENAME, index=False) # Write the SQLite table log.info("Writing {}".format(SQLITE_FILENAME)) con = sqlite3.connect(SQLITE_FILENAME) df.to_sql(name='tpf', con=con, if_exists='replace', index=False)
<commit_before>"""Creates an SQLite database detailing all the K2 target pixel files. TODO ---- * Sort the final table by EPIC ID. * Add an index to the sqlite table? """ import glob import logging import sqlite3 import pandas as pd log = logging.getLogger(__name__) log.setLevel("INFO") CSV_FILENAME = "../k2-target-pixel-files.csv" SQLITE_FILENAME = "../k2-target-pixel-files.db" if __name__ == "__main__": log.info("Reading the data") df = pd.concat([pd.read_csv(fn) for fn in glob.glob("intermediate-data/*metadata.csv")]) # Write to the CSV file log.info("Writing {}".format(CSV_FILENAME)) df.to_csv(CSV_FILENAME, index=False) # Write the SQLite table log.info("Writing {}".format(SQLITE_FILENAME)) con = sqlite3.connect(SQLITE_FILENAME) df.to_sql(name='tpf', con=con, if_exists='replace', index=False) <commit_msg>Sort the final table by keplerid<commit_after>"""Creates an SQLite database detailing all the K2 target pixel files. TODO ---- * Add an index to the sqlite table? """ import glob import logging import sqlite3 import pandas as pd log = logging.getLogger(__name__) log.setLevel("INFO") CSV_FILENAME = "../k2-target-pixel-files.csv" SQLITE_FILENAME = "../k2-target-pixel-files.db" if __name__ == "__main__": log.info("Reading the data") df = pd.concat([pd.read_csv(fn) for fn in glob.glob("intermediate-data/*metadata.csv")]) df = df.sort_values("keplerid") # Write to the CSV file log.info("Writing {}".format(CSV_FILENAME)) df.to_csv(CSV_FILENAME, index=False) # Write the SQLite table log.info("Writing {}".format(SQLITE_FILENAME)) con = sqlite3.connect(SQLITE_FILENAME) df.to_sql(name='tpf', con=con, if_exists='replace', index=False)
0da5820816187dd6b6d6ebbd554fc9646853e0fc
tests/git_code_debt/logic_test.py
tests/git_code_debt/logic_test.py
import testify as T from git_code_debt.create_tables import get_metric_ids from git_code_debt.discovery import get_metric_parsers from git_code_debt.logic import get_metric_mapping from testing.base_classes.sandbox_test_case import SandboxTestCase class TestLogic(SandboxTestCase): def test_get_metric_mapping(self): with self.db() as db: ret = get_metric_mapping(db) T.assert_equal(set(ret.keys()), set(get_metric_ids(get_metric_parsers())))
import testify as T from git_code_debt.create_tables import get_metric_ids from git_code_debt.discovery import get_metric_parsers from git_code_debt.logic import get_metric_mapping from git_code_debt.logic import get_metric_values from git_code_debt.logic import get_previous_sha from git_code_debt.logic import insert_metric_values from git_code_debt.repo_parser import Commit from testing.base_classes.sandbox_test_case import SandboxTestCase class TestLogic(SandboxTestCase): sha = 'a' * 40 repo = 'git@github.com:asottile/git-code-debt' def test_get_metric_mapping(self): with self.db() as db: ret = get_metric_mapping(db) T.assert_equal(set(ret.keys()), set(get_metric_ids(get_metric_parsers()))) def test_get_previous_sha_no_previous_sha(self): with self.db() as db: ret = get_previous_sha(db, self.repo) T.assert_is(ret, None) def get_fake_metrics(self, metric_mapping): return dict( (metric_name, 1) for metric_name in metric_mapping.keys() ) def get_fake_commit(self): return Commit(self.sha, 1, 'foo') def insert_fake_metrics(self, db): metric_mapping = get_metric_mapping(db) metric_values = self.get_fake_metrics(metric_mapping) commit = self.get_fake_commit() insert_metric_values(db, metric_values, metric_mapping, self.repo, commit) def test_get_previous_sha_previous_existing_sha(self): with self.db() as db: self.insert_fake_metrics(db) ret = get_previous_sha(db, self.repo) T.assert_equal(ret, self.sha) def test_insert_and_get_metric_values(self): with self.db() as db: fake_metrics = self.get_fake_metrics(get_metric_mapping(db)) fake_commit = self.get_fake_commit() self.insert_fake_metrics(db) T.assert_equal(fake_metrics, get_metric_values(db, fake_commit))
Add more tests to logic test
Add more tests to logic test
Python
mit
ucarion/git-code-debt,ucarion/git-code-debt,Yelp/git-code-debt,Yelp/git-code-debt,Yelp/git-code-debt,ucarion/git-code-debt,Yelp/git-code-debt
import testify as T from git_code_debt.create_tables import get_metric_ids from git_code_debt.discovery import get_metric_parsers from git_code_debt.logic import get_metric_mapping from testing.base_classes.sandbox_test_case import SandboxTestCase class TestLogic(SandboxTestCase): def test_get_metric_mapping(self): with self.db() as db: ret = get_metric_mapping(db) T.assert_equal(set(ret.keys()), set(get_metric_ids(get_metric_parsers()))) Add more tests to logic test
import testify as T from git_code_debt.create_tables import get_metric_ids from git_code_debt.discovery import get_metric_parsers from git_code_debt.logic import get_metric_mapping from git_code_debt.logic import get_metric_values from git_code_debt.logic import get_previous_sha from git_code_debt.logic import insert_metric_values from git_code_debt.repo_parser import Commit from testing.base_classes.sandbox_test_case import SandboxTestCase class TestLogic(SandboxTestCase): sha = 'a' * 40 repo = 'git@github.com:asottile/git-code-debt' def test_get_metric_mapping(self): with self.db() as db: ret = get_metric_mapping(db) T.assert_equal(set(ret.keys()), set(get_metric_ids(get_metric_parsers()))) def test_get_previous_sha_no_previous_sha(self): with self.db() as db: ret = get_previous_sha(db, self.repo) T.assert_is(ret, None) def get_fake_metrics(self, metric_mapping): return dict( (metric_name, 1) for metric_name in metric_mapping.keys() ) def get_fake_commit(self): return Commit(self.sha, 1, 'foo') def insert_fake_metrics(self, db): metric_mapping = get_metric_mapping(db) metric_values = self.get_fake_metrics(metric_mapping) commit = self.get_fake_commit() insert_metric_values(db, metric_values, metric_mapping, self.repo, commit) def test_get_previous_sha_previous_existing_sha(self): with self.db() as db: self.insert_fake_metrics(db) ret = get_previous_sha(db, self.repo) T.assert_equal(ret, self.sha) def test_insert_and_get_metric_values(self): with self.db() as db: fake_metrics = self.get_fake_metrics(get_metric_mapping(db)) fake_commit = self.get_fake_commit() self.insert_fake_metrics(db) T.assert_equal(fake_metrics, get_metric_values(db, fake_commit))
<commit_before> import testify as T from git_code_debt.create_tables import get_metric_ids from git_code_debt.discovery import get_metric_parsers from git_code_debt.logic import get_metric_mapping from testing.base_classes.sandbox_test_case import SandboxTestCase class TestLogic(SandboxTestCase): def test_get_metric_mapping(self): with self.db() as db: ret = get_metric_mapping(db) T.assert_equal(set(ret.keys()), set(get_metric_ids(get_metric_parsers()))) <commit_msg>Add more tests to logic test<commit_after>
import testify as T from git_code_debt.create_tables import get_metric_ids from git_code_debt.discovery import get_metric_parsers from git_code_debt.logic import get_metric_mapping from git_code_debt.logic import get_metric_values from git_code_debt.logic import get_previous_sha from git_code_debt.logic import insert_metric_values from git_code_debt.repo_parser import Commit from testing.base_classes.sandbox_test_case import SandboxTestCase class TestLogic(SandboxTestCase): sha = 'a' * 40 repo = 'git@github.com:asottile/git-code-debt' def test_get_metric_mapping(self): with self.db() as db: ret = get_metric_mapping(db) T.assert_equal(set(ret.keys()), set(get_metric_ids(get_metric_parsers()))) def test_get_previous_sha_no_previous_sha(self): with self.db() as db: ret = get_previous_sha(db, self.repo) T.assert_is(ret, None) def get_fake_metrics(self, metric_mapping): return dict( (metric_name, 1) for metric_name in metric_mapping.keys() ) def get_fake_commit(self): return Commit(self.sha, 1, 'foo') def insert_fake_metrics(self, db): metric_mapping = get_metric_mapping(db) metric_values = self.get_fake_metrics(metric_mapping) commit = self.get_fake_commit() insert_metric_values(db, metric_values, metric_mapping, self.repo, commit) def test_get_previous_sha_previous_existing_sha(self): with self.db() as db: self.insert_fake_metrics(db) ret = get_previous_sha(db, self.repo) T.assert_equal(ret, self.sha) def test_insert_and_get_metric_values(self): with self.db() as db: fake_metrics = self.get_fake_metrics(get_metric_mapping(db)) fake_commit = self.get_fake_commit() self.insert_fake_metrics(db) T.assert_equal(fake_metrics, get_metric_values(db, fake_commit))
import testify as T from git_code_debt.create_tables import get_metric_ids from git_code_debt.discovery import get_metric_parsers from git_code_debt.logic import get_metric_mapping from testing.base_classes.sandbox_test_case import SandboxTestCase class TestLogic(SandboxTestCase): def test_get_metric_mapping(self): with self.db() as db: ret = get_metric_mapping(db) T.assert_equal(set(ret.keys()), set(get_metric_ids(get_metric_parsers()))) Add more tests to logic test import testify as T from git_code_debt.create_tables import get_metric_ids from git_code_debt.discovery import get_metric_parsers from git_code_debt.logic import get_metric_mapping from git_code_debt.logic import get_metric_values from git_code_debt.logic import get_previous_sha from git_code_debt.logic import insert_metric_values from git_code_debt.repo_parser import Commit from testing.base_classes.sandbox_test_case import SandboxTestCase class TestLogic(SandboxTestCase): sha = 'a' * 40 repo = 'git@github.com:asottile/git-code-debt' def test_get_metric_mapping(self): with self.db() as db: ret = get_metric_mapping(db) T.assert_equal(set(ret.keys()), set(get_metric_ids(get_metric_parsers()))) def test_get_previous_sha_no_previous_sha(self): with self.db() as db: ret = get_previous_sha(db, self.repo) T.assert_is(ret, None) def get_fake_metrics(self, metric_mapping): return dict( (metric_name, 1) for metric_name in metric_mapping.keys() ) def get_fake_commit(self): return Commit(self.sha, 1, 'foo') def insert_fake_metrics(self, db): metric_mapping = get_metric_mapping(db) metric_values = self.get_fake_metrics(metric_mapping) commit = self.get_fake_commit() insert_metric_values(db, metric_values, metric_mapping, self.repo, commit) def test_get_previous_sha_previous_existing_sha(self): with self.db() as db: self.insert_fake_metrics(db) ret = get_previous_sha(db, self.repo) T.assert_equal(ret, self.sha) def test_insert_and_get_metric_values(self): with self.db() as db: fake_metrics = self.get_fake_metrics(get_metric_mapping(db)) fake_commit = self.get_fake_commit() self.insert_fake_metrics(db) T.assert_equal(fake_metrics, get_metric_values(db, fake_commit))
<commit_before> import testify as T from git_code_debt.create_tables import get_metric_ids from git_code_debt.discovery import get_metric_parsers from git_code_debt.logic import get_metric_mapping from testing.base_classes.sandbox_test_case import SandboxTestCase class TestLogic(SandboxTestCase): def test_get_metric_mapping(self): with self.db() as db: ret = get_metric_mapping(db) T.assert_equal(set(ret.keys()), set(get_metric_ids(get_metric_parsers()))) <commit_msg>Add more tests to logic test<commit_after> import testify as T from git_code_debt.create_tables import get_metric_ids from git_code_debt.discovery import get_metric_parsers from git_code_debt.logic import get_metric_mapping from git_code_debt.logic import get_metric_values from git_code_debt.logic import get_previous_sha from git_code_debt.logic import insert_metric_values from git_code_debt.repo_parser import Commit from testing.base_classes.sandbox_test_case import SandboxTestCase class TestLogic(SandboxTestCase): sha = 'a' * 40 repo = 'git@github.com:asottile/git-code-debt' def test_get_metric_mapping(self): with self.db() as db: ret = get_metric_mapping(db) T.assert_equal(set(ret.keys()), set(get_metric_ids(get_metric_parsers()))) def test_get_previous_sha_no_previous_sha(self): with self.db() as db: ret = get_previous_sha(db, self.repo) T.assert_is(ret, None) def get_fake_metrics(self, metric_mapping): return dict( (metric_name, 1) for metric_name in metric_mapping.keys() ) def get_fake_commit(self): return Commit(self.sha, 1, 'foo') def insert_fake_metrics(self, db): metric_mapping = get_metric_mapping(db) metric_values = self.get_fake_metrics(metric_mapping) commit = self.get_fake_commit() insert_metric_values(db, metric_values, metric_mapping, self.repo, commit) def test_get_previous_sha_previous_existing_sha(self): with self.db() as db: self.insert_fake_metrics(db) ret = get_previous_sha(db, self.repo) T.assert_equal(ret, self.sha) def test_insert_and_get_metric_values(self): with self.db() as db: fake_metrics = self.get_fake_metrics(get_metric_mapping(db)) fake_commit = self.get_fake_commit() self.insert_fake_metrics(db) T.assert_equal(fake_metrics, get_metric_values(db, fake_commit))
48e14060eefd09976624e939eb924405a9b247e4
chatterbot/__init__.py
chatterbot/__init__.py
""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.5.1' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot'
""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.5.2' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot'
Update release version to 0.5.2
Update release version to 0.5.2
Python
bsd-3-clause
Reinaesaya/OUIRL-ChatBot,maclogan/VirtualPenPal,Gustavo6046/ChatterBot,Reinaesaya/OUIRL-ChatBot,gunthercox/ChatterBot,davizucon/ChatterBot,vkosuri/ChatterBot
""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.5.1' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot' Update release version to 0.5.2
""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.5.2' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot'
<commit_before>""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.5.1' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot' <commit_msg>Update release version to 0.5.2<commit_after>
""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.5.2' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot'
""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.5.1' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot' Update release version to 0.5.2""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.5.2' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot'
<commit_before>""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.5.1' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot' <commit_msg>Update release version to 0.5.2<commit_after>""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '0.5.2' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot'
7a09460a7a15cfc81f35f9944939314bf5255898
corehq/motech/repeaters/const.py
corehq/motech/repeaters/const.py
from datetime import timedelta MAX_RETRY_WAIT = timedelta(days=7) MIN_RETRY_WAIT = timedelta(minutes=60) CHECK_REPEATERS_INTERVAL = timedelta(minutes=5) CHECK_REPEATERS_KEY = 'check-repeaters-key' POST_TIMEOUT = 45 # seconds RECORD_PENDING_STATE = 'PENDING' RECORD_SUCCESS_STATE = 'SUCCESS' RECORD_FAILURE_STATE = 'FAIL' RECORD_CANCELLED_STATE = 'CANCELLED'
from datetime import timedelta MAX_RETRY_WAIT = timedelta(days=7) MIN_RETRY_WAIT = timedelta(minutes=60) CHECK_REPEATERS_INTERVAL = timedelta(minutes=5) CHECK_REPEATERS_KEY = 'check-repeaters-key' POST_TIMEOUT = 75 # seconds RECORD_PENDING_STATE = 'PENDING' RECORD_SUCCESS_STATE = 'SUCCESS' RECORD_FAILURE_STATE = 'FAIL' RECORD_CANCELLED_STATE = 'CANCELLED'
Increase repeater post timeout to 75 seconds
Increase repeater post timeout to 75 seconds 99DOTS responses sometimes take ~60s to process, which leads us to mark those records as failed
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
from datetime import timedelta MAX_RETRY_WAIT = timedelta(days=7) MIN_RETRY_WAIT = timedelta(minutes=60) CHECK_REPEATERS_INTERVAL = timedelta(minutes=5) CHECK_REPEATERS_KEY = 'check-repeaters-key' POST_TIMEOUT = 45 # seconds RECORD_PENDING_STATE = 'PENDING' RECORD_SUCCESS_STATE = 'SUCCESS' RECORD_FAILURE_STATE = 'FAIL' RECORD_CANCELLED_STATE = 'CANCELLED' Increase repeater post timeout to 75 seconds 99DOTS responses sometimes take ~60s to process, which leads us to mark those records as failed
from datetime import timedelta MAX_RETRY_WAIT = timedelta(days=7) MIN_RETRY_WAIT = timedelta(minutes=60) CHECK_REPEATERS_INTERVAL = timedelta(minutes=5) CHECK_REPEATERS_KEY = 'check-repeaters-key' POST_TIMEOUT = 75 # seconds RECORD_PENDING_STATE = 'PENDING' RECORD_SUCCESS_STATE = 'SUCCESS' RECORD_FAILURE_STATE = 'FAIL' RECORD_CANCELLED_STATE = 'CANCELLED'
<commit_before>from datetime import timedelta MAX_RETRY_WAIT = timedelta(days=7) MIN_RETRY_WAIT = timedelta(minutes=60) CHECK_REPEATERS_INTERVAL = timedelta(minutes=5) CHECK_REPEATERS_KEY = 'check-repeaters-key' POST_TIMEOUT = 45 # seconds RECORD_PENDING_STATE = 'PENDING' RECORD_SUCCESS_STATE = 'SUCCESS' RECORD_FAILURE_STATE = 'FAIL' RECORD_CANCELLED_STATE = 'CANCELLED' <commit_msg>Increase repeater post timeout to 75 seconds 99DOTS responses sometimes take ~60s to process, which leads us to mark those records as failed<commit_after>
from datetime import timedelta MAX_RETRY_WAIT = timedelta(days=7) MIN_RETRY_WAIT = timedelta(minutes=60) CHECK_REPEATERS_INTERVAL = timedelta(minutes=5) CHECK_REPEATERS_KEY = 'check-repeaters-key' POST_TIMEOUT = 75 # seconds RECORD_PENDING_STATE = 'PENDING' RECORD_SUCCESS_STATE = 'SUCCESS' RECORD_FAILURE_STATE = 'FAIL' RECORD_CANCELLED_STATE = 'CANCELLED'
from datetime import timedelta MAX_RETRY_WAIT = timedelta(days=7) MIN_RETRY_WAIT = timedelta(minutes=60) CHECK_REPEATERS_INTERVAL = timedelta(minutes=5) CHECK_REPEATERS_KEY = 'check-repeaters-key' POST_TIMEOUT = 45 # seconds RECORD_PENDING_STATE = 'PENDING' RECORD_SUCCESS_STATE = 'SUCCESS' RECORD_FAILURE_STATE = 'FAIL' RECORD_CANCELLED_STATE = 'CANCELLED' Increase repeater post timeout to 75 seconds 99DOTS responses sometimes take ~60s to process, which leads us to mark those records as failedfrom datetime import timedelta MAX_RETRY_WAIT = timedelta(days=7) MIN_RETRY_WAIT = timedelta(minutes=60) CHECK_REPEATERS_INTERVAL = timedelta(minutes=5) CHECK_REPEATERS_KEY = 'check-repeaters-key' POST_TIMEOUT = 75 # seconds RECORD_PENDING_STATE = 'PENDING' RECORD_SUCCESS_STATE = 'SUCCESS' RECORD_FAILURE_STATE = 'FAIL' RECORD_CANCELLED_STATE = 'CANCELLED'
<commit_before>from datetime import timedelta MAX_RETRY_WAIT = timedelta(days=7) MIN_RETRY_WAIT = timedelta(minutes=60) CHECK_REPEATERS_INTERVAL = timedelta(minutes=5) CHECK_REPEATERS_KEY = 'check-repeaters-key' POST_TIMEOUT = 45 # seconds RECORD_PENDING_STATE = 'PENDING' RECORD_SUCCESS_STATE = 'SUCCESS' RECORD_FAILURE_STATE = 'FAIL' RECORD_CANCELLED_STATE = 'CANCELLED' <commit_msg>Increase repeater post timeout to 75 seconds 99DOTS responses sometimes take ~60s to process, which leads us to mark those records as failed<commit_after>from datetime import timedelta MAX_RETRY_WAIT = timedelta(days=7) MIN_RETRY_WAIT = timedelta(minutes=60) CHECK_REPEATERS_INTERVAL = timedelta(minutes=5) CHECK_REPEATERS_KEY = 'check-repeaters-key' POST_TIMEOUT = 75 # seconds RECORD_PENDING_STATE = 'PENDING' RECORD_SUCCESS_STATE = 'SUCCESS' RECORD_FAILURE_STATE = 'FAIL' RECORD_CANCELLED_STATE = 'CANCELLED'
ce3fa12a6fc497264529d5f44e3f4a20b5317fcd
gapipy/resources/booking/customer.py
gapipy/resources/booking/customer.py
from __future__ import unicode_literals from ..base import Resource class Customer(Resource): _resource_name = 'customers' _is_listable = False _is_parent_resource = True _as_is_fields = [ 'id', 'href', 'place_of_birth', 'meal_preference', 'meal_notes', 'emergency_contacts', 'medical_notes', 'phone_numbers', 'account_email', 'name', 'passport', 'address', 'nationality', ] _date_fields = ['date_of_birth', ] @property def _resource_collection_fields(self): from .booking import Booking return [ ('bookings', Booking), ]
from __future__ import unicode_literals from ..base import Resource class Customer(Resource): _resource_name = 'customers' _is_listable = False _is_parent_resource = True _as_is_fields = [ 'id', 'href', 'place_of_birth', 'meal_preference', 'meal_notes', 'emergency_contacts', 'medical_notes', 'phone_numbers', 'account_email', 'name', 'passport', 'address', 'nationality', 'gender', ] _date_fields = ['date_of_birth', ] @property def _resource_collection_fields(self): from .booking import Booking return [ ('bookings', Booking), ]
Add gender to Customer model.
Add gender to Customer model.
Python
mit
gadventures/gapipy
from __future__ import unicode_literals from ..base import Resource class Customer(Resource): _resource_name = 'customers' _is_listable = False _is_parent_resource = True _as_is_fields = [ 'id', 'href', 'place_of_birth', 'meal_preference', 'meal_notes', 'emergency_contacts', 'medical_notes', 'phone_numbers', 'account_email', 'name', 'passport', 'address', 'nationality', ] _date_fields = ['date_of_birth', ] @property def _resource_collection_fields(self): from .booking import Booking return [ ('bookings', Booking), ] Add gender to Customer model.
from __future__ import unicode_literals from ..base import Resource class Customer(Resource): _resource_name = 'customers' _is_listable = False _is_parent_resource = True _as_is_fields = [ 'id', 'href', 'place_of_birth', 'meal_preference', 'meal_notes', 'emergency_contacts', 'medical_notes', 'phone_numbers', 'account_email', 'name', 'passport', 'address', 'nationality', 'gender', ] _date_fields = ['date_of_birth', ] @property def _resource_collection_fields(self): from .booking import Booking return [ ('bookings', Booking), ]
<commit_before>from __future__ import unicode_literals from ..base import Resource class Customer(Resource): _resource_name = 'customers' _is_listable = False _is_parent_resource = True _as_is_fields = [ 'id', 'href', 'place_of_birth', 'meal_preference', 'meal_notes', 'emergency_contacts', 'medical_notes', 'phone_numbers', 'account_email', 'name', 'passport', 'address', 'nationality', ] _date_fields = ['date_of_birth', ] @property def _resource_collection_fields(self): from .booking import Booking return [ ('bookings', Booking), ] <commit_msg>Add gender to Customer model.<commit_after>
from __future__ import unicode_literals from ..base import Resource class Customer(Resource): _resource_name = 'customers' _is_listable = False _is_parent_resource = True _as_is_fields = [ 'id', 'href', 'place_of_birth', 'meal_preference', 'meal_notes', 'emergency_contacts', 'medical_notes', 'phone_numbers', 'account_email', 'name', 'passport', 'address', 'nationality', 'gender', ] _date_fields = ['date_of_birth', ] @property def _resource_collection_fields(self): from .booking import Booking return [ ('bookings', Booking), ]
from __future__ import unicode_literals from ..base import Resource class Customer(Resource): _resource_name = 'customers' _is_listable = False _is_parent_resource = True _as_is_fields = [ 'id', 'href', 'place_of_birth', 'meal_preference', 'meal_notes', 'emergency_contacts', 'medical_notes', 'phone_numbers', 'account_email', 'name', 'passport', 'address', 'nationality', ] _date_fields = ['date_of_birth', ] @property def _resource_collection_fields(self): from .booking import Booking return [ ('bookings', Booking), ] Add gender to Customer model.from __future__ import unicode_literals from ..base import Resource class Customer(Resource): _resource_name = 'customers' _is_listable = False _is_parent_resource = True _as_is_fields = [ 'id', 'href', 'place_of_birth', 'meal_preference', 'meal_notes', 'emergency_contacts', 'medical_notes', 'phone_numbers', 'account_email', 'name', 'passport', 'address', 'nationality', 'gender', ] _date_fields = ['date_of_birth', ] @property def _resource_collection_fields(self): from .booking import Booking return [ ('bookings', Booking), ]
<commit_before>from __future__ import unicode_literals from ..base import Resource class Customer(Resource): _resource_name = 'customers' _is_listable = False _is_parent_resource = True _as_is_fields = [ 'id', 'href', 'place_of_birth', 'meal_preference', 'meal_notes', 'emergency_contacts', 'medical_notes', 'phone_numbers', 'account_email', 'name', 'passport', 'address', 'nationality', ] _date_fields = ['date_of_birth', ] @property def _resource_collection_fields(self): from .booking import Booking return [ ('bookings', Booking), ] <commit_msg>Add gender to Customer model.<commit_after>from __future__ import unicode_literals from ..base import Resource class Customer(Resource): _resource_name = 'customers' _is_listable = False _is_parent_resource = True _as_is_fields = [ 'id', 'href', 'place_of_birth', 'meal_preference', 'meal_notes', 'emergency_contacts', 'medical_notes', 'phone_numbers', 'account_email', 'name', 'passport', 'address', 'nationality', 'gender', ] _date_fields = ['date_of_birth', ] @property def _resource_collection_fields(self): from .booking import Booking return [ ('bookings', Booking), ]
aed959a0593558b6063e70c3b594feb6caa4bdda
tests/runner/compose/init_test.py
tests/runner/compose/init_test.py
import os import tempfile import shutil from unittest import TestCase import yaml from dusty import constants from dusty.runner.compose import _write_composefile class TestComposeRunner(TestCase): def setUp(self): self.temp_compose_dir = tempfile.mkdtemp() self.temp_compose_path = os.path.join(self.temp_compose_dir, 'docker-compose.yml') self.old_compose_dir = constants.COMPOSE_DIR constants.COMPOSE_DIR = self.temp_compose_dir self.test_spec = {'app-a': {'image': 'app/a'}} def tearDown(self): constants.COMPOSE_DIR = self.old_compose_dir shutil.rmtree(self.temp_compose_dir) def test_write_composefile(self): _write_composefile(self.test_spec) written = open(self.temp_compose_path, 'r').read() self.assertItemsEqual(yaml.load(written), self.test_spec)
import os import tempfile import shutil from unittest import TestCase from mock import patch import yaml from dusty import constants from dusty.runner.compose import _write_composefile, _get_docker_env class TestComposeRunner(TestCase): def setUp(self): self.temp_compose_dir = tempfile.mkdtemp() self.temp_compose_path = os.path.join(self.temp_compose_dir, 'docker-compose.yml') self.old_compose_dir = constants.COMPOSE_DIR constants.COMPOSE_DIR = self.temp_compose_dir self.test_spec = {'app-a': {'image': 'app/a'}} def tearDown(self): constants.COMPOSE_DIR = self.old_compose_dir shutil.rmtree(self.temp_compose_dir) def test_write_composefile(self): _write_composefile(self.test_spec) written = open(self.temp_compose_path, 'r').read() self.assertItemsEqual(yaml.load(written), self.test_spec) @patch('dusty.runner.compose._check_output_demoted') def test_get_docker_env(self, fake_check_output): fake_check_output.return_value = """ export DOCKER_TLS_VERIFY=1 export DOCKER_HOST=tcp://192.168.59.103:2376 export DOCKER_CERT_PATH=/Users/root/.boot2docker/certs/boot2docker-vm""" expected = {'DOCKER_TLS_VERIFY': '1', 'DOCKER_HOST': 'tcp://192.168.59.103:2376', 'DOCKER_CERT_PATH': '/Users/root/.boot2docker/certs/boot2docker-vm'} result = _get_docker_env() self.assertItemsEqual(result, expected)
Add a test for _get_docker_env
Add a test for _get_docker_env
Python
mit
gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty
import os import tempfile import shutil from unittest import TestCase import yaml from dusty import constants from dusty.runner.compose import _write_composefile class TestComposeRunner(TestCase): def setUp(self): self.temp_compose_dir = tempfile.mkdtemp() self.temp_compose_path = os.path.join(self.temp_compose_dir, 'docker-compose.yml') self.old_compose_dir = constants.COMPOSE_DIR constants.COMPOSE_DIR = self.temp_compose_dir self.test_spec = {'app-a': {'image': 'app/a'}} def tearDown(self): constants.COMPOSE_DIR = self.old_compose_dir shutil.rmtree(self.temp_compose_dir) def test_write_composefile(self): _write_composefile(self.test_spec) written = open(self.temp_compose_path, 'r').read() self.assertItemsEqual(yaml.load(written), self.test_spec) Add a test for _get_docker_env
import os import tempfile import shutil from unittest import TestCase from mock import patch import yaml from dusty import constants from dusty.runner.compose import _write_composefile, _get_docker_env class TestComposeRunner(TestCase): def setUp(self): self.temp_compose_dir = tempfile.mkdtemp() self.temp_compose_path = os.path.join(self.temp_compose_dir, 'docker-compose.yml') self.old_compose_dir = constants.COMPOSE_DIR constants.COMPOSE_DIR = self.temp_compose_dir self.test_spec = {'app-a': {'image': 'app/a'}} def tearDown(self): constants.COMPOSE_DIR = self.old_compose_dir shutil.rmtree(self.temp_compose_dir) def test_write_composefile(self): _write_composefile(self.test_spec) written = open(self.temp_compose_path, 'r').read() self.assertItemsEqual(yaml.load(written), self.test_spec) @patch('dusty.runner.compose._check_output_demoted') def test_get_docker_env(self, fake_check_output): fake_check_output.return_value = """ export DOCKER_TLS_VERIFY=1 export DOCKER_HOST=tcp://192.168.59.103:2376 export DOCKER_CERT_PATH=/Users/root/.boot2docker/certs/boot2docker-vm""" expected = {'DOCKER_TLS_VERIFY': '1', 'DOCKER_HOST': 'tcp://192.168.59.103:2376', 'DOCKER_CERT_PATH': '/Users/root/.boot2docker/certs/boot2docker-vm'} result = _get_docker_env() self.assertItemsEqual(result, expected)
<commit_before>import os import tempfile import shutil from unittest import TestCase import yaml from dusty import constants from dusty.runner.compose import _write_composefile class TestComposeRunner(TestCase): def setUp(self): self.temp_compose_dir = tempfile.mkdtemp() self.temp_compose_path = os.path.join(self.temp_compose_dir, 'docker-compose.yml') self.old_compose_dir = constants.COMPOSE_DIR constants.COMPOSE_DIR = self.temp_compose_dir self.test_spec = {'app-a': {'image': 'app/a'}} def tearDown(self): constants.COMPOSE_DIR = self.old_compose_dir shutil.rmtree(self.temp_compose_dir) def test_write_composefile(self): _write_composefile(self.test_spec) written = open(self.temp_compose_path, 'r').read() self.assertItemsEqual(yaml.load(written), self.test_spec) <commit_msg>Add a test for _get_docker_env<commit_after>
import os import tempfile import shutil from unittest import TestCase from mock import patch import yaml from dusty import constants from dusty.runner.compose import _write_composefile, _get_docker_env class TestComposeRunner(TestCase): def setUp(self): self.temp_compose_dir = tempfile.mkdtemp() self.temp_compose_path = os.path.join(self.temp_compose_dir, 'docker-compose.yml') self.old_compose_dir = constants.COMPOSE_DIR constants.COMPOSE_DIR = self.temp_compose_dir self.test_spec = {'app-a': {'image': 'app/a'}} def tearDown(self): constants.COMPOSE_DIR = self.old_compose_dir shutil.rmtree(self.temp_compose_dir) def test_write_composefile(self): _write_composefile(self.test_spec) written = open(self.temp_compose_path, 'r').read() self.assertItemsEqual(yaml.load(written), self.test_spec) @patch('dusty.runner.compose._check_output_demoted') def test_get_docker_env(self, fake_check_output): fake_check_output.return_value = """ export DOCKER_TLS_VERIFY=1 export DOCKER_HOST=tcp://192.168.59.103:2376 export DOCKER_CERT_PATH=/Users/root/.boot2docker/certs/boot2docker-vm""" expected = {'DOCKER_TLS_VERIFY': '1', 'DOCKER_HOST': 'tcp://192.168.59.103:2376', 'DOCKER_CERT_PATH': '/Users/root/.boot2docker/certs/boot2docker-vm'} result = _get_docker_env() self.assertItemsEqual(result, expected)
import os import tempfile import shutil from unittest import TestCase import yaml from dusty import constants from dusty.runner.compose import _write_composefile class TestComposeRunner(TestCase): def setUp(self): self.temp_compose_dir = tempfile.mkdtemp() self.temp_compose_path = os.path.join(self.temp_compose_dir, 'docker-compose.yml') self.old_compose_dir = constants.COMPOSE_DIR constants.COMPOSE_DIR = self.temp_compose_dir self.test_spec = {'app-a': {'image': 'app/a'}} def tearDown(self): constants.COMPOSE_DIR = self.old_compose_dir shutil.rmtree(self.temp_compose_dir) def test_write_composefile(self): _write_composefile(self.test_spec) written = open(self.temp_compose_path, 'r').read() self.assertItemsEqual(yaml.load(written), self.test_spec) Add a test for _get_docker_envimport os import tempfile import shutil from unittest import TestCase from mock import patch import yaml from dusty import constants from dusty.runner.compose import _write_composefile, _get_docker_env class TestComposeRunner(TestCase): def setUp(self): self.temp_compose_dir = tempfile.mkdtemp() self.temp_compose_path = os.path.join(self.temp_compose_dir, 'docker-compose.yml') self.old_compose_dir = constants.COMPOSE_DIR constants.COMPOSE_DIR = self.temp_compose_dir self.test_spec = {'app-a': {'image': 'app/a'}} def tearDown(self): constants.COMPOSE_DIR = self.old_compose_dir shutil.rmtree(self.temp_compose_dir) def test_write_composefile(self): _write_composefile(self.test_spec) written = open(self.temp_compose_path, 'r').read() self.assertItemsEqual(yaml.load(written), self.test_spec) @patch('dusty.runner.compose._check_output_demoted') def test_get_docker_env(self, fake_check_output): fake_check_output.return_value = """ export DOCKER_TLS_VERIFY=1 export DOCKER_HOST=tcp://192.168.59.103:2376 export DOCKER_CERT_PATH=/Users/root/.boot2docker/certs/boot2docker-vm""" expected = {'DOCKER_TLS_VERIFY': '1', 'DOCKER_HOST': 'tcp://192.168.59.103:2376', 'DOCKER_CERT_PATH': '/Users/root/.boot2docker/certs/boot2docker-vm'} result = _get_docker_env() self.assertItemsEqual(result, expected)
<commit_before>import os import tempfile import shutil from unittest import TestCase import yaml from dusty import constants from dusty.runner.compose import _write_composefile class TestComposeRunner(TestCase): def setUp(self): self.temp_compose_dir = tempfile.mkdtemp() self.temp_compose_path = os.path.join(self.temp_compose_dir, 'docker-compose.yml') self.old_compose_dir = constants.COMPOSE_DIR constants.COMPOSE_DIR = self.temp_compose_dir self.test_spec = {'app-a': {'image': 'app/a'}} def tearDown(self): constants.COMPOSE_DIR = self.old_compose_dir shutil.rmtree(self.temp_compose_dir) def test_write_composefile(self): _write_composefile(self.test_spec) written = open(self.temp_compose_path, 'r').read() self.assertItemsEqual(yaml.load(written), self.test_spec) <commit_msg>Add a test for _get_docker_env<commit_after>import os import tempfile import shutil from unittest import TestCase from mock import patch import yaml from dusty import constants from dusty.runner.compose import _write_composefile, _get_docker_env class TestComposeRunner(TestCase): def setUp(self): self.temp_compose_dir = tempfile.mkdtemp() self.temp_compose_path = os.path.join(self.temp_compose_dir, 'docker-compose.yml') self.old_compose_dir = constants.COMPOSE_DIR constants.COMPOSE_DIR = self.temp_compose_dir self.test_spec = {'app-a': {'image': 'app/a'}} def tearDown(self): constants.COMPOSE_DIR = self.old_compose_dir shutil.rmtree(self.temp_compose_dir) def test_write_composefile(self): _write_composefile(self.test_spec) written = open(self.temp_compose_path, 'r').read() self.assertItemsEqual(yaml.load(written), self.test_spec) @patch('dusty.runner.compose._check_output_demoted') def test_get_docker_env(self, fake_check_output): fake_check_output.return_value = """ export DOCKER_TLS_VERIFY=1 export DOCKER_HOST=tcp://192.168.59.103:2376 export DOCKER_CERT_PATH=/Users/root/.boot2docker/certs/boot2docker-vm""" expected = {'DOCKER_TLS_VERIFY': '1', 'DOCKER_HOST': 'tcp://192.168.59.103:2376', 'DOCKER_CERT_PATH': '/Users/root/.boot2docker/certs/boot2docker-vm'} result = _get_docker_env() self.assertItemsEqual(result, expected)
a75ece6d38ccc5377092b37da9486b39594cae8b
sal/urls.py
sal/urls.py
import django.contrib.auth.views as auth_views from django.conf import settings from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import admin from django.contrib.staticfiles import views admin.autodiscover() urlpatterns = [ url(r'^login/*$', auth_views.LoginView, name='login'), url(r'^logout/$', auth_views.logout_then_login, name='logout_then_login'), url(r'^changepassword/$', auth_views.PasswordChangeView, name='password_change'), url(r'^changepassword/done/$', auth_views.PasswordChangeDoneView, name='password_change_done'), url(r'^', include('server.urls')), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', admin.site.urls), url(r'^api/', include('api.v1.urls')), url(r'^api/v1/', include('api.v1.urls')), url(r'^api/v2/', include('api.v2.urls')), url(r'^inventory/', include('inventory.urls')), url(r'^search/', include('search.urls')), url(r'^licenses/', include('licenses.urls')), url(r'^catalog/', include('catalog.urls')), url(r'^profiles/', include('profiles.urls')), ] if settings.DEBUG: urlpatterns.append(url(r'^static/(?P<path>.*)$', views.serve))
import django.contrib.auth.views as auth_views from django.conf import settings from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import admin from django.contrib.staticfiles import views admin.autodiscover() urlpatterns = [ url(r'^login/*$', auth_views.LoginView.as_view(), name='login'), url(r'^logout/$', auth_views.logout_then_login, name='logout_then_login'), url(r'^changepassword/$', auth_views.PasswordChangeView.as_view(), name='password_change'), url(r'^changepassword/done/$', auth_views.PasswordChangeDoneView.as_view(), name='password_change_done'), url(r'^', include('server.urls')), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', admin.site.urls), url(r'^api/', include('api.v1.urls')), url(r'^api/v1/', include('api.v1.urls')), url(r'^api/v2/', include('api.v2.urls')), url(r'^inventory/', include('inventory.urls')), url(r'^search/', include('search.urls')), url(r'^licenses/', include('licenses.urls')), url(r'^catalog/', include('catalog.urls')), url(r'^profiles/', include('profiles.urls')), ] if settings.DEBUG: urlpatterns.append(url(r'^static/(?P<path>.*)$', views.serve))
Fix URLs for login/logout, and password changes.
Fix URLs for login/logout, and password changes.
Python
apache-2.0
salopensource/sal,salopensource/sal,sheagcraig/sal,salopensource/sal,sheagcraig/sal,sheagcraig/sal,sheagcraig/sal,salopensource/sal
import django.contrib.auth.views as auth_views from django.conf import settings from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import admin from django.contrib.staticfiles import views admin.autodiscover() urlpatterns = [ url(r'^login/*$', auth_views.LoginView, name='login'), url(r'^logout/$', auth_views.logout_then_login, name='logout_then_login'), url(r'^changepassword/$', auth_views.PasswordChangeView, name='password_change'), url(r'^changepassword/done/$', auth_views.PasswordChangeDoneView, name='password_change_done'), url(r'^', include('server.urls')), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', admin.site.urls), url(r'^api/', include('api.v1.urls')), url(r'^api/v1/', include('api.v1.urls')), url(r'^api/v2/', include('api.v2.urls')), url(r'^inventory/', include('inventory.urls')), url(r'^search/', include('search.urls')), url(r'^licenses/', include('licenses.urls')), url(r'^catalog/', include('catalog.urls')), url(r'^profiles/', include('profiles.urls')), ] if settings.DEBUG: urlpatterns.append(url(r'^static/(?P<path>.*)$', views.serve)) Fix URLs for login/logout, and password changes.
import django.contrib.auth.views as auth_views from django.conf import settings from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import admin from django.contrib.staticfiles import views admin.autodiscover() urlpatterns = [ url(r'^login/*$', auth_views.LoginView.as_view(), name='login'), url(r'^logout/$', auth_views.logout_then_login, name='logout_then_login'), url(r'^changepassword/$', auth_views.PasswordChangeView.as_view(), name='password_change'), url(r'^changepassword/done/$', auth_views.PasswordChangeDoneView.as_view(), name='password_change_done'), url(r'^', include('server.urls')), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', admin.site.urls), url(r'^api/', include('api.v1.urls')), url(r'^api/v1/', include('api.v1.urls')), url(r'^api/v2/', include('api.v2.urls')), url(r'^inventory/', include('inventory.urls')), url(r'^search/', include('search.urls')), url(r'^licenses/', include('licenses.urls')), url(r'^catalog/', include('catalog.urls')), url(r'^profiles/', include('profiles.urls')), ] if settings.DEBUG: urlpatterns.append(url(r'^static/(?P<path>.*)$', views.serve))
<commit_before>import django.contrib.auth.views as auth_views from django.conf import settings from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import admin from django.contrib.staticfiles import views admin.autodiscover() urlpatterns = [ url(r'^login/*$', auth_views.LoginView, name='login'), url(r'^logout/$', auth_views.logout_then_login, name='logout_then_login'), url(r'^changepassword/$', auth_views.PasswordChangeView, name='password_change'), url(r'^changepassword/done/$', auth_views.PasswordChangeDoneView, name='password_change_done'), url(r'^', include('server.urls')), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', admin.site.urls), url(r'^api/', include('api.v1.urls')), url(r'^api/v1/', include('api.v1.urls')), url(r'^api/v2/', include('api.v2.urls')), url(r'^inventory/', include('inventory.urls')), url(r'^search/', include('search.urls')), url(r'^licenses/', include('licenses.urls')), url(r'^catalog/', include('catalog.urls')), url(r'^profiles/', include('profiles.urls')), ] if settings.DEBUG: urlpatterns.append(url(r'^static/(?P<path>.*)$', views.serve)) <commit_msg>Fix URLs for login/logout, and password changes.<commit_after>
import django.contrib.auth.views as auth_views from django.conf import settings from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import admin from django.contrib.staticfiles import views admin.autodiscover() urlpatterns = [ url(r'^login/*$', auth_views.LoginView.as_view(), name='login'), url(r'^logout/$', auth_views.logout_then_login, name='logout_then_login'), url(r'^changepassword/$', auth_views.PasswordChangeView.as_view(), name='password_change'), url(r'^changepassword/done/$', auth_views.PasswordChangeDoneView.as_view(), name='password_change_done'), url(r'^', include('server.urls')), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', admin.site.urls), url(r'^api/', include('api.v1.urls')), url(r'^api/v1/', include('api.v1.urls')), url(r'^api/v2/', include('api.v2.urls')), url(r'^inventory/', include('inventory.urls')), url(r'^search/', include('search.urls')), url(r'^licenses/', include('licenses.urls')), url(r'^catalog/', include('catalog.urls')), url(r'^profiles/', include('profiles.urls')), ] if settings.DEBUG: urlpatterns.append(url(r'^static/(?P<path>.*)$', views.serve))
import django.contrib.auth.views as auth_views from django.conf import settings from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import admin from django.contrib.staticfiles import views admin.autodiscover() urlpatterns = [ url(r'^login/*$', auth_views.LoginView, name='login'), url(r'^logout/$', auth_views.logout_then_login, name='logout_then_login'), url(r'^changepassword/$', auth_views.PasswordChangeView, name='password_change'), url(r'^changepassword/done/$', auth_views.PasswordChangeDoneView, name='password_change_done'), url(r'^', include('server.urls')), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', admin.site.urls), url(r'^api/', include('api.v1.urls')), url(r'^api/v1/', include('api.v1.urls')), url(r'^api/v2/', include('api.v2.urls')), url(r'^inventory/', include('inventory.urls')), url(r'^search/', include('search.urls')), url(r'^licenses/', include('licenses.urls')), url(r'^catalog/', include('catalog.urls')), url(r'^profiles/', include('profiles.urls')), ] if settings.DEBUG: urlpatterns.append(url(r'^static/(?P<path>.*)$', views.serve)) Fix URLs for login/logout, and password changes.import django.contrib.auth.views as auth_views from django.conf import settings from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import admin from django.contrib.staticfiles import views admin.autodiscover() urlpatterns = [ url(r'^login/*$', auth_views.LoginView.as_view(), name='login'), url(r'^logout/$', auth_views.logout_then_login, name='logout_then_login'), url(r'^changepassword/$', auth_views.PasswordChangeView.as_view(), name='password_change'), url(r'^changepassword/done/$', auth_views.PasswordChangeDoneView.as_view(), name='password_change_done'), url(r'^', include('server.urls')), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', admin.site.urls), url(r'^api/', include('api.v1.urls')), url(r'^api/v1/', include('api.v1.urls')), url(r'^api/v2/', include('api.v2.urls')), url(r'^inventory/', include('inventory.urls')), url(r'^search/', include('search.urls')), url(r'^licenses/', include('licenses.urls')), url(r'^catalog/', include('catalog.urls')), url(r'^profiles/', include('profiles.urls')), ] if settings.DEBUG: urlpatterns.append(url(r'^static/(?P<path>.*)$', views.serve))
<commit_before>import django.contrib.auth.views as auth_views from django.conf import settings from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import admin from django.contrib.staticfiles import views admin.autodiscover() urlpatterns = [ url(r'^login/*$', auth_views.LoginView, name='login'), url(r'^logout/$', auth_views.logout_then_login, name='logout_then_login'), url(r'^changepassword/$', auth_views.PasswordChangeView, name='password_change'), url(r'^changepassword/done/$', auth_views.PasswordChangeDoneView, name='password_change_done'), url(r'^', include('server.urls')), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', admin.site.urls), url(r'^api/', include('api.v1.urls')), url(r'^api/v1/', include('api.v1.urls')), url(r'^api/v2/', include('api.v2.urls')), url(r'^inventory/', include('inventory.urls')), url(r'^search/', include('search.urls')), url(r'^licenses/', include('licenses.urls')), url(r'^catalog/', include('catalog.urls')), url(r'^profiles/', include('profiles.urls')), ] if settings.DEBUG: urlpatterns.append(url(r'^static/(?P<path>.*)$', views.serve)) <commit_msg>Fix URLs for login/logout, and password changes.<commit_after>import django.contrib.auth.views as auth_views from django.conf import settings from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import admin from django.contrib.staticfiles import views admin.autodiscover() urlpatterns = [ url(r'^login/*$', auth_views.LoginView.as_view(), name='login'), url(r'^logout/$', auth_views.logout_then_login, name='logout_then_login'), url(r'^changepassword/$', auth_views.PasswordChangeView.as_view(), name='password_change'), url(r'^changepassword/done/$', auth_views.PasswordChangeDoneView.as_view(), name='password_change_done'), url(r'^', include('server.urls')), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', admin.site.urls), url(r'^api/', include('api.v1.urls')), url(r'^api/v1/', include('api.v1.urls')), url(r'^api/v2/', include('api.v2.urls')), url(r'^inventory/', include('inventory.urls')), url(r'^search/', include('search.urls')), url(r'^licenses/', include('licenses.urls')), url(r'^catalog/', include('catalog.urls')), url(r'^profiles/', include('profiles.urls')), ] if settings.DEBUG: urlpatterns.append(url(r'^static/(?P<path>.*)$', views.serve))
4c74a02b669efd7ec66dbc217d8e55f149cd94d1
tests/test_parse.py
tests/test_parse.py
from hypothesis_auto import auto_pytest_magic from isort import parse auto_pytest_magic(parse.import_comment)
from hypothesis_auto import auto_pytest_magic from isort import parse from isort.finders import FindersManager from isort.settings import DEFAULT_SECTIONS, default TEST_CONTENTS = """ import xyz import abc def function(): pass """ auto_pytest_magic(parse.import_comment) auto_pytest_magic(parse.import_type) auto_pytest_magic(parse._strip_syntax) auto_pytest_magic(parse.skip_line) def test_file_contents(): ( in_lines, out_lines, import_index, place_imports, import_placements, as_map, imports, categorized_comments, first_comment_index_start, first_comment_index_end, change_count, original_line_count, ) = parse.file_contents( TEST_CONTENTS, line_separator="\n", add_imports=[], force_adds=False, sections=["FIRSTPARTY"], section_comments=[], forced_separate=[], combine_as_imports=False, verbose=False, finder=FindersManager(config=default, sections=DEFAULT_SECTIONS), ) assert "\n".join(in_lines) == TEST_CONTENTS assert "import" not in "\n".join(out_lines) assert import_index == 1 assert change_count == -2 assert original_line_count == len(in_lines)
Add test cases for parse modue
Add test cases for parse modue
Python
mit
PyCQA/isort,PyCQA/isort
from hypothesis_auto import auto_pytest_magic from isort import parse auto_pytest_magic(parse.import_comment) Add test cases for parse modue
from hypothesis_auto import auto_pytest_magic from isort import parse from isort.finders import FindersManager from isort.settings import DEFAULT_SECTIONS, default TEST_CONTENTS = """ import xyz import abc def function(): pass """ auto_pytest_magic(parse.import_comment) auto_pytest_magic(parse.import_type) auto_pytest_magic(parse._strip_syntax) auto_pytest_magic(parse.skip_line) def test_file_contents(): ( in_lines, out_lines, import_index, place_imports, import_placements, as_map, imports, categorized_comments, first_comment_index_start, first_comment_index_end, change_count, original_line_count, ) = parse.file_contents( TEST_CONTENTS, line_separator="\n", add_imports=[], force_adds=False, sections=["FIRSTPARTY"], section_comments=[], forced_separate=[], combine_as_imports=False, verbose=False, finder=FindersManager(config=default, sections=DEFAULT_SECTIONS), ) assert "\n".join(in_lines) == TEST_CONTENTS assert "import" not in "\n".join(out_lines) assert import_index == 1 assert change_count == -2 assert original_line_count == len(in_lines)
<commit_before>from hypothesis_auto import auto_pytest_magic from isort import parse auto_pytest_magic(parse.import_comment) <commit_msg>Add test cases for parse modue<commit_after>
from hypothesis_auto import auto_pytest_magic from isort import parse from isort.finders import FindersManager from isort.settings import DEFAULT_SECTIONS, default TEST_CONTENTS = """ import xyz import abc def function(): pass """ auto_pytest_magic(parse.import_comment) auto_pytest_magic(parse.import_type) auto_pytest_magic(parse._strip_syntax) auto_pytest_magic(parse.skip_line) def test_file_contents(): ( in_lines, out_lines, import_index, place_imports, import_placements, as_map, imports, categorized_comments, first_comment_index_start, first_comment_index_end, change_count, original_line_count, ) = parse.file_contents( TEST_CONTENTS, line_separator="\n", add_imports=[], force_adds=False, sections=["FIRSTPARTY"], section_comments=[], forced_separate=[], combine_as_imports=False, verbose=False, finder=FindersManager(config=default, sections=DEFAULT_SECTIONS), ) assert "\n".join(in_lines) == TEST_CONTENTS assert "import" not in "\n".join(out_lines) assert import_index == 1 assert change_count == -2 assert original_line_count == len(in_lines)
from hypothesis_auto import auto_pytest_magic from isort import parse auto_pytest_magic(parse.import_comment) Add test cases for parse moduefrom hypothesis_auto import auto_pytest_magic from isort import parse from isort.finders import FindersManager from isort.settings import DEFAULT_SECTIONS, default TEST_CONTENTS = """ import xyz import abc def function(): pass """ auto_pytest_magic(parse.import_comment) auto_pytest_magic(parse.import_type) auto_pytest_magic(parse._strip_syntax) auto_pytest_magic(parse.skip_line) def test_file_contents(): ( in_lines, out_lines, import_index, place_imports, import_placements, as_map, imports, categorized_comments, first_comment_index_start, first_comment_index_end, change_count, original_line_count, ) = parse.file_contents( TEST_CONTENTS, line_separator="\n", add_imports=[], force_adds=False, sections=["FIRSTPARTY"], section_comments=[], forced_separate=[], combine_as_imports=False, verbose=False, finder=FindersManager(config=default, sections=DEFAULT_SECTIONS), ) assert "\n".join(in_lines) == TEST_CONTENTS assert "import" not in "\n".join(out_lines) assert import_index == 1 assert change_count == -2 assert original_line_count == len(in_lines)
<commit_before>from hypothesis_auto import auto_pytest_magic from isort import parse auto_pytest_magic(parse.import_comment) <commit_msg>Add test cases for parse modue<commit_after>from hypothesis_auto import auto_pytest_magic from isort import parse from isort.finders import FindersManager from isort.settings import DEFAULT_SECTIONS, default TEST_CONTENTS = """ import xyz import abc def function(): pass """ auto_pytest_magic(parse.import_comment) auto_pytest_magic(parse.import_type) auto_pytest_magic(parse._strip_syntax) auto_pytest_magic(parse.skip_line) def test_file_contents(): ( in_lines, out_lines, import_index, place_imports, import_placements, as_map, imports, categorized_comments, first_comment_index_start, first_comment_index_end, change_count, original_line_count, ) = parse.file_contents( TEST_CONTENTS, line_separator="\n", add_imports=[], force_adds=False, sections=["FIRSTPARTY"], section_comments=[], forced_separate=[], combine_as_imports=False, verbose=False, finder=FindersManager(config=default, sections=DEFAULT_SECTIONS), ) assert "\n".join(in_lines) == TEST_CONTENTS assert "import" not in "\n".join(out_lines) assert import_index == 1 assert change_count == -2 assert original_line_count == len(in_lines)
78585c783013c6f06f7e20eee6a654759b70e99c
tests/test_ttfmt.py
tests/test_ttfmt.py
import unittest class TestTtFmt(unittest.TestCase): def testName(self): pass if __name__ == "__main__": unittest.main()
import unittest import tt.fmttools.ttfmt as ttfmt class TestTtFmt(unittest.TestCase): def test_get_vars(self): data_provider = { # Simple test cases "F = A and B" : ["F", "A", "B"], "F = A and B or C" : ["F", "A", "B", "C"], } for eq in data_provider: self.assertListEqual(data_provider[eq], ttfmt.get_vars(eq)) if __name__ == "__main__": unittest.main()
Add basic tests for ttfmt get_vars method
Add basic tests for ttfmt get_vars method
Python
mit
welchbj/tt,welchbj/tt,welchbj/tt
import unittest class TestTtFmt(unittest.TestCase): def testName(self): pass if __name__ == "__main__": unittest.main()Add basic tests for ttfmt get_vars method
import unittest import tt.fmttools.ttfmt as ttfmt class TestTtFmt(unittest.TestCase): def test_get_vars(self): data_provider = { # Simple test cases "F = A and B" : ["F", "A", "B"], "F = A and B or C" : ["F", "A", "B", "C"], } for eq in data_provider: self.assertListEqual(data_provider[eq], ttfmt.get_vars(eq)) if __name__ == "__main__": unittest.main()
<commit_before>import unittest class TestTtFmt(unittest.TestCase): def testName(self): pass if __name__ == "__main__": unittest.main()<commit_msg>Add basic tests for ttfmt get_vars method<commit_after>
import unittest import tt.fmttools.ttfmt as ttfmt class TestTtFmt(unittest.TestCase): def test_get_vars(self): data_provider = { # Simple test cases "F = A and B" : ["F", "A", "B"], "F = A and B or C" : ["F", "A", "B", "C"], } for eq in data_provider: self.assertListEqual(data_provider[eq], ttfmt.get_vars(eq)) if __name__ == "__main__": unittest.main()
import unittest class TestTtFmt(unittest.TestCase): def testName(self): pass if __name__ == "__main__": unittest.main()Add basic tests for ttfmt get_vars methodimport unittest import tt.fmttools.ttfmt as ttfmt class TestTtFmt(unittest.TestCase): def test_get_vars(self): data_provider = { # Simple test cases "F = A and B" : ["F", "A", "B"], "F = A and B or C" : ["F", "A", "B", "C"], } for eq in data_provider: self.assertListEqual(data_provider[eq], ttfmt.get_vars(eq)) if __name__ == "__main__": unittest.main()
<commit_before>import unittest class TestTtFmt(unittest.TestCase): def testName(self): pass if __name__ == "__main__": unittest.main()<commit_msg>Add basic tests for ttfmt get_vars method<commit_after>import unittest import tt.fmttools.ttfmt as ttfmt class TestTtFmt(unittest.TestCase): def test_get_vars(self): data_provider = { # Simple test cases "F = A and B" : ["F", "A", "B"], "F = A and B or C" : ["F", "A", "B", "C"], } for eq in data_provider: self.assertListEqual(data_provider[eq], ttfmt.get_vars(eq)) if __name__ == "__main__": unittest.main()
c2eeb0a7d8d3a2692537f2004052b9fad9b1527a
tests/test_utils.py
tests/test_utils.py
import os from unittest import TestCase import requests from furikura import utils def test_request(): requests.get('https://example.com') class TestUtils(TestCase): def test_get_file(self): self.assertEqual(utils.get_file("testfile"), "/usr/share/testfile") def test_check_connection(self): self.addTypeEqualityFunc(type, utils.check_connection(test_request)) def test_autostart(self): utils.autostart('add') self.assertTrue(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop'))) utils.autostart('remove') self.assertFalse(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop')))
import os from unittest import TestCase import requests from furikura import utils def test_request(): requests.get('https://example.com') class TestUtils(TestCase): def test_get_file(self): self.assertEqual(utils.get_file("testfile"), "/usr/share/testfile") def test_check_connection(self): self.addTypeEqualityFunc(type, utils.check_connection(test_request)) def test_autostart(self): os.makedirs(os.path.expanduser('~/.config/autostart/'), exist_ok=True) utils.autostart('add') self.assertTrue(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop'))) utils.autostart('remove') self.assertFalse(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop')))
Create autostart folder for tests
Create autostart folder for tests
Python
mit
benjamindean/furi-kura,benjamindean/furi-kura
import os from unittest import TestCase import requests from furikura import utils def test_request(): requests.get('https://example.com') class TestUtils(TestCase): def test_get_file(self): self.assertEqual(utils.get_file("testfile"), "/usr/share/testfile") def test_check_connection(self): self.addTypeEqualityFunc(type, utils.check_connection(test_request)) def test_autostart(self): utils.autostart('add') self.assertTrue(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop'))) utils.autostart('remove') self.assertFalse(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop'))) Create autostart folder for tests
import os from unittest import TestCase import requests from furikura import utils def test_request(): requests.get('https://example.com') class TestUtils(TestCase): def test_get_file(self): self.assertEqual(utils.get_file("testfile"), "/usr/share/testfile") def test_check_connection(self): self.addTypeEqualityFunc(type, utils.check_connection(test_request)) def test_autostart(self): os.makedirs(os.path.expanduser('~/.config/autostart/'), exist_ok=True) utils.autostart('add') self.assertTrue(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop'))) utils.autostart('remove') self.assertFalse(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop')))
<commit_before>import os from unittest import TestCase import requests from furikura import utils def test_request(): requests.get('https://example.com') class TestUtils(TestCase): def test_get_file(self): self.assertEqual(utils.get_file("testfile"), "/usr/share/testfile") def test_check_connection(self): self.addTypeEqualityFunc(type, utils.check_connection(test_request)) def test_autostart(self): utils.autostart('add') self.assertTrue(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop'))) utils.autostart('remove') self.assertFalse(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop'))) <commit_msg>Create autostart folder for tests<commit_after>
import os from unittest import TestCase import requests from furikura import utils def test_request(): requests.get('https://example.com') class TestUtils(TestCase): def test_get_file(self): self.assertEqual(utils.get_file("testfile"), "/usr/share/testfile") def test_check_connection(self): self.addTypeEqualityFunc(type, utils.check_connection(test_request)) def test_autostart(self): os.makedirs(os.path.expanduser('~/.config/autostart/'), exist_ok=True) utils.autostart('add') self.assertTrue(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop'))) utils.autostart('remove') self.assertFalse(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop')))
import os from unittest import TestCase import requests from furikura import utils def test_request(): requests.get('https://example.com') class TestUtils(TestCase): def test_get_file(self): self.assertEqual(utils.get_file("testfile"), "/usr/share/testfile") def test_check_connection(self): self.addTypeEqualityFunc(type, utils.check_connection(test_request)) def test_autostart(self): utils.autostart('add') self.assertTrue(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop'))) utils.autostart('remove') self.assertFalse(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop'))) Create autostart folder for testsimport os from unittest import TestCase import requests from furikura import utils def test_request(): requests.get('https://example.com') class TestUtils(TestCase): def test_get_file(self): self.assertEqual(utils.get_file("testfile"), "/usr/share/testfile") def test_check_connection(self): self.addTypeEqualityFunc(type, utils.check_connection(test_request)) def test_autostart(self): os.makedirs(os.path.expanduser('~/.config/autostart/'), exist_ok=True) utils.autostart('add') self.assertTrue(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop'))) utils.autostart('remove') self.assertFalse(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop')))
<commit_before>import os from unittest import TestCase import requests from furikura import utils def test_request(): requests.get('https://example.com') class TestUtils(TestCase): def test_get_file(self): self.assertEqual(utils.get_file("testfile"), "/usr/share/testfile") def test_check_connection(self): self.addTypeEqualityFunc(type, utils.check_connection(test_request)) def test_autostart(self): utils.autostart('add') self.assertTrue(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop'))) utils.autostart('remove') self.assertFalse(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop'))) <commit_msg>Create autostart folder for tests<commit_after>import os from unittest import TestCase import requests from furikura import utils def test_request(): requests.get('https://example.com') class TestUtils(TestCase): def test_get_file(self): self.assertEqual(utils.get_file("testfile"), "/usr/share/testfile") def test_check_connection(self): self.addTypeEqualityFunc(type, utils.check_connection(test_request)) def test_autostart(self): os.makedirs(os.path.expanduser('~/.config/autostart/'), exist_ok=True) utils.autostart('add') self.assertTrue(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop'))) utils.autostart('remove') self.assertFalse(os.path.islink(os.path.expanduser('~/.config/autostart/furikura.desktop')))
fc8c949c58caaa012f166f1d0266e896f4ab0e3f
getTwitter.py
getTwitter.py
print 'This script will be used to get the page and information from tiwtter'
import urllib2 print 'Welcome to the Get Twitter tool. This tool will allow you to download a page from Twitter to be used to extract the data' userResponse = raw_input("Please enter the full URL from the Tweet page") response = urllib2.urlopen(userResponse) html = response.read()
Allow user to input url
Allow user to input url Allows the user to input a URL which will then be retrieved
Python
artistic-2.0
christaylortf/FinalYearProject
print 'This script will be used to get the page and information from tiwtter' Allow user to input url Allows the user to input a URL which will then be retrieved
import urllib2 print 'Welcome to the Get Twitter tool. This tool will allow you to download a page from Twitter to be used to extract the data' userResponse = raw_input("Please enter the full URL from the Tweet page") response = urllib2.urlopen(userResponse) html = response.read()
<commit_before>print 'This script will be used to get the page and information from tiwtter' <commit_msg>Allow user to input url Allows the user to input a URL which will then be retrieved<commit_after>
import urllib2 print 'Welcome to the Get Twitter tool. This tool will allow you to download a page from Twitter to be used to extract the data' userResponse = raw_input("Please enter the full URL from the Tweet page") response = urllib2.urlopen(userResponse) html = response.read()
print 'This script will be used to get the page and information from tiwtter' Allow user to input url Allows the user to input a URL which will then be retrievedimport urllib2 print 'Welcome to the Get Twitter tool. This tool will allow you to download a page from Twitter to be used to extract the data' userResponse = raw_input("Please enter the full URL from the Tweet page") response = urllib2.urlopen(userResponse) html = response.read()
<commit_before>print 'This script will be used to get the page and information from tiwtter' <commit_msg>Allow user to input url Allows the user to input a URL which will then be retrieved<commit_after>import urllib2 print 'Welcome to the Get Twitter tool. This tool will allow you to download a page from Twitter to be used to extract the data' userResponse = raw_input("Please enter the full URL from the Tweet page") response = urllib2.urlopen(userResponse) html = response.read()
dd68fbb86100d0d3da08172505e7c564cc5bd3e7
monitor-notifier-slack.py
monitor-notifier-slack.py
#!/usr/bin/env python import pika import json import requests SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"] RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"] connection = pika.BlockingConnection(pika.ConnectionParameters( RABBIT_MQ_SERVER)) channel = connection.channel() channel.queue_declare(queue='slack') def callback(ch, method, properties, body): payload = {} payload["text"] = body r = requests.post(SLACK_WEBHOOK_URL, data = json.dumps(payload)) channel.basic_consume(callback, queue='slack', no_ack=True) channel.start_consuming()
#!/usr/bin/env python import pika import json import requests import os RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"] RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"] RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"] credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD) connection = pika.BlockingConnection(pika.ConnectionParameters( RABBIT_MQ_SERVER, credentials = credentials)) channel = connection.channel() # channel.queue_declare(queue='slack') def callback(ch, method, properties, body): payload = {} payload["text"] = body req = json.loads(body) webhook_url = json.loads(req["monitor"]["notifier"]["arguments"])["webhook_url"] r = requests.post(webhook_url, data = json.dumps(payload)) channel.basic_consume(callback, queue='slack', no_ack=True) channel.start_consuming()
Add credentials + read webhook_url from notifier arguments
Add credentials + read webhook_url from notifier arguments
Python
mit
observer-hackaton/monitor-notifier-slack
#!/usr/bin/env python import pika import json import requests SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"] RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"] connection = pika.BlockingConnection(pika.ConnectionParameters( RABBIT_MQ_SERVER)) channel = connection.channel() channel.queue_declare(queue='slack') def callback(ch, method, properties, body): payload = {} payload["text"] = body r = requests.post(SLACK_WEBHOOK_URL, data = json.dumps(payload)) channel.basic_consume(callback, queue='slack', no_ack=True) channel.start_consuming() Add credentials + read webhook_url from notifier arguments
#!/usr/bin/env python import pika import json import requests import os RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"] RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"] RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"] credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD) connection = pika.BlockingConnection(pika.ConnectionParameters( RABBIT_MQ_SERVER, credentials = credentials)) channel = connection.channel() # channel.queue_declare(queue='slack') def callback(ch, method, properties, body): payload = {} payload["text"] = body req = json.loads(body) webhook_url = json.loads(req["monitor"]["notifier"]["arguments"])["webhook_url"] r = requests.post(webhook_url, data = json.dumps(payload)) channel.basic_consume(callback, queue='slack', no_ack=True) channel.start_consuming()
<commit_before>#!/usr/bin/env python import pika import json import requests SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"] RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"] connection = pika.BlockingConnection(pika.ConnectionParameters( RABBIT_MQ_SERVER)) channel = connection.channel() channel.queue_declare(queue='slack') def callback(ch, method, properties, body): payload = {} payload["text"] = body r = requests.post(SLACK_WEBHOOK_URL, data = json.dumps(payload)) channel.basic_consume(callback, queue='slack', no_ack=True) channel.start_consuming() <commit_msg>Add credentials + read webhook_url from notifier arguments<commit_after>
#!/usr/bin/env python import pika import json import requests import os RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"] RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"] RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"] credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD) connection = pika.BlockingConnection(pika.ConnectionParameters( RABBIT_MQ_SERVER, credentials = credentials)) channel = connection.channel() # channel.queue_declare(queue='slack') def callback(ch, method, properties, body): payload = {} payload["text"] = body req = json.loads(body) webhook_url = json.loads(req["monitor"]["notifier"]["arguments"])["webhook_url"] r = requests.post(webhook_url, data = json.dumps(payload)) channel.basic_consume(callback, queue='slack', no_ack=True) channel.start_consuming()
#!/usr/bin/env python import pika import json import requests SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"] RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"] connection = pika.BlockingConnection(pika.ConnectionParameters( RABBIT_MQ_SERVER)) channel = connection.channel() channel.queue_declare(queue='slack') def callback(ch, method, properties, body): payload = {} payload["text"] = body r = requests.post(SLACK_WEBHOOK_URL, data = json.dumps(payload)) channel.basic_consume(callback, queue='slack', no_ack=True) channel.start_consuming() Add credentials + read webhook_url from notifier arguments#!/usr/bin/env python import pika import json import requests import os RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"] RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"] RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"] credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD) connection = pika.BlockingConnection(pika.ConnectionParameters( RABBIT_MQ_SERVER, credentials = credentials)) channel = connection.channel() # channel.queue_declare(queue='slack') def callback(ch, method, properties, body): payload = {} payload["text"] = body req = json.loads(body) webhook_url = json.loads(req["monitor"]["notifier"]["arguments"])["webhook_url"] r = requests.post(webhook_url, data = json.dumps(payload)) channel.basic_consume(callback, queue='slack', no_ack=True) channel.start_consuming()
<commit_before>#!/usr/bin/env python import pika import json import requests SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"] RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"] connection = pika.BlockingConnection(pika.ConnectionParameters( RABBIT_MQ_SERVER)) channel = connection.channel() channel.queue_declare(queue='slack') def callback(ch, method, properties, body): payload = {} payload["text"] = body r = requests.post(SLACK_WEBHOOK_URL, data = json.dumps(payload)) channel.basic_consume(callback, queue='slack', no_ack=True) channel.start_consuming() <commit_msg>Add credentials + read webhook_url from notifier arguments<commit_after>#!/usr/bin/env python import pika import json import requests import os RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"] RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"] RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"] credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD) connection = pika.BlockingConnection(pika.ConnectionParameters( RABBIT_MQ_SERVER, credentials = credentials)) channel = connection.channel() # channel.queue_declare(queue='slack') def callback(ch, method, properties, body): payload = {} payload["text"] = body req = json.loads(body) webhook_url = json.loads(req["monitor"]["notifier"]["arguments"])["webhook_url"] r = requests.post(webhook_url, data = json.dumps(payload)) channel.basic_consume(callback, queue='slack', no_ack=True) channel.start_consuming()
275cec3a846093769eaddda87b753a7e5c224f59
odbc2csv.py
odbc2csv.py
import pypyodbc import csv conn = pypyodbc.connect("DSN=HOSS_DB") cur = conn.cursor() tables = [] cur.execute("select * from sys.tables") for row in cur.fetchall(): tables.append(row[0]) for table in tables: print(table) cur.execute("select * from {}".format(table)) column_names = [] for d in cur.description: column_names.append(d[0]) # file = open("{}.csv".format(table), "w", encoding="ISO-8859-1") file = open("{}.csv".format(table), "w", encoding="utf-8") writer = csv.writer(file) writer.writerow(column_names) for row in cur.fetchall(): writer.writerow(row) file.close()
import pypyodbc import csv conn = pypyodbc.connect("DSN=HOSS_DB") cur = conn.cursor() tables = [] cur.execute("select * from sys.tables") for row in cur.fetchall(): tables.append(row[0]) for table in tables: print(table) cur.execute("select * from {}".format(table)) column_names = [] for d in cur.description: column_names.append(d[0]) # file = open("{}.csv".format(table), "w", encoding="ISO-8859-1") file = open("{}.csv".format(table), "w", encoding="utf-8") writer = csv.writer(file, lineterminator='\n') writer.writerow(column_names) for row in cur.fetchall(): writer.writerow(row) file.close()
Use just newline for file terminator.
Use just newline for file terminator.
Python
isc
wablair/misc_scripts,wablair/misc_scripts,wablair/misc_scripts,wablair/misc_scripts
import pypyodbc import csv conn = pypyodbc.connect("DSN=HOSS_DB") cur = conn.cursor() tables = [] cur.execute("select * from sys.tables") for row in cur.fetchall(): tables.append(row[0]) for table in tables: print(table) cur.execute("select * from {}".format(table)) column_names = [] for d in cur.description: column_names.append(d[0]) # file = open("{}.csv".format(table), "w", encoding="ISO-8859-1") file = open("{}.csv".format(table), "w", encoding="utf-8") writer = csv.writer(file) writer.writerow(column_names) for row in cur.fetchall(): writer.writerow(row) file.close() Use just newline for file terminator.
import pypyodbc import csv conn = pypyodbc.connect("DSN=HOSS_DB") cur = conn.cursor() tables = [] cur.execute("select * from sys.tables") for row in cur.fetchall(): tables.append(row[0]) for table in tables: print(table) cur.execute("select * from {}".format(table)) column_names = [] for d in cur.description: column_names.append(d[0]) # file = open("{}.csv".format(table), "w", encoding="ISO-8859-1") file = open("{}.csv".format(table), "w", encoding="utf-8") writer = csv.writer(file, lineterminator='\n') writer.writerow(column_names) for row in cur.fetchall(): writer.writerow(row) file.close()
<commit_before>import pypyodbc import csv conn = pypyodbc.connect("DSN=HOSS_DB") cur = conn.cursor() tables = [] cur.execute("select * from sys.tables") for row in cur.fetchall(): tables.append(row[0]) for table in tables: print(table) cur.execute("select * from {}".format(table)) column_names = [] for d in cur.description: column_names.append(d[0]) # file = open("{}.csv".format(table), "w", encoding="ISO-8859-1") file = open("{}.csv".format(table), "w", encoding="utf-8") writer = csv.writer(file) writer.writerow(column_names) for row in cur.fetchall(): writer.writerow(row) file.close() <commit_msg>Use just newline for file terminator.<commit_after>
import pypyodbc import csv conn = pypyodbc.connect("DSN=HOSS_DB") cur = conn.cursor() tables = [] cur.execute("select * from sys.tables") for row in cur.fetchall(): tables.append(row[0]) for table in tables: print(table) cur.execute("select * from {}".format(table)) column_names = [] for d in cur.description: column_names.append(d[0]) # file = open("{}.csv".format(table), "w", encoding="ISO-8859-1") file = open("{}.csv".format(table), "w", encoding="utf-8") writer = csv.writer(file, lineterminator='\n') writer.writerow(column_names) for row in cur.fetchall(): writer.writerow(row) file.close()
import pypyodbc import csv conn = pypyodbc.connect("DSN=HOSS_DB") cur = conn.cursor() tables = [] cur.execute("select * from sys.tables") for row in cur.fetchall(): tables.append(row[0]) for table in tables: print(table) cur.execute("select * from {}".format(table)) column_names = [] for d in cur.description: column_names.append(d[0]) # file = open("{}.csv".format(table), "w", encoding="ISO-8859-1") file = open("{}.csv".format(table), "w", encoding="utf-8") writer = csv.writer(file) writer.writerow(column_names) for row in cur.fetchall(): writer.writerow(row) file.close() Use just newline for file terminator.import pypyodbc import csv conn = pypyodbc.connect("DSN=HOSS_DB") cur = conn.cursor() tables = [] cur.execute("select * from sys.tables") for row in cur.fetchall(): tables.append(row[0]) for table in tables: print(table) cur.execute("select * from {}".format(table)) column_names = [] for d in cur.description: column_names.append(d[0]) # file = open("{}.csv".format(table), "w", encoding="ISO-8859-1") file = open("{}.csv".format(table), "w", encoding="utf-8") writer = csv.writer(file, lineterminator='\n') writer.writerow(column_names) for row in cur.fetchall(): writer.writerow(row) file.close()
<commit_before>import pypyodbc import csv conn = pypyodbc.connect("DSN=HOSS_DB") cur = conn.cursor() tables = [] cur.execute("select * from sys.tables") for row in cur.fetchall(): tables.append(row[0]) for table in tables: print(table) cur.execute("select * from {}".format(table)) column_names = [] for d in cur.description: column_names.append(d[0]) # file = open("{}.csv".format(table), "w", encoding="ISO-8859-1") file = open("{}.csv".format(table), "w", encoding="utf-8") writer = csv.writer(file) writer.writerow(column_names) for row in cur.fetchall(): writer.writerow(row) file.close() <commit_msg>Use just newline for file terminator.<commit_after>import pypyodbc import csv conn = pypyodbc.connect("DSN=HOSS_DB") cur = conn.cursor() tables = [] cur.execute("select * from sys.tables") for row in cur.fetchall(): tables.append(row[0]) for table in tables: print(table) cur.execute("select * from {}".format(table)) column_names = [] for d in cur.description: column_names.append(d[0]) # file = open("{}.csv".format(table), "w", encoding="ISO-8859-1") file = open("{}.csv".format(table), "w", encoding="utf-8") writer = csv.writer(file, lineterminator='\n') writer.writerow(column_names) for row in cur.fetchall(): writer.writerow(row) file.close()
c345e73ac22be8dde7e0230121e0e02b581d1209
uncertainty/lib/nlp/summarizer.py
uncertainty/lib/nlp/summarizer.py
from . import chunktagger, lemmatizer, postagger, stemmer, tokenizer class Summarizer(object): def __init__(self, text): self.text = text def execute(self): tokens = tokenizer.NLTKTokenizer(self.text).execute() stems = stemmer.Stemmer(tokens).execute() pos = postagger.PosTagger(tokens).execute() chunk = chunktagger.ChunkTagger().parse(pos) summary = zip(tokens, stems, pos, chunk) return summary
from . import chunktagger, lemmatizer, postagger, stemmer, tokenizer class Summarizer(object): def __init__(self, text): self.text = text def execute(self): tokens = tokenizer.NLTKTokenizer(self.text).execute() stems = stemmer.Stemmer(tokens).execute() pos = postagger.PosTagger(tokens).execute() chunk = chunktagger.ChunkTagger().parse(pos) summary = zip(tokens, stems, pos, chunk) return [(t, s, p[1], c[1]) for (t, s, p, c) in summary]
Fix bug that returned a zip object instead of list
Fix bug that returned a zip object instead of list
Python
mit
meyersbs/uncertainty
from . import chunktagger, lemmatizer, postagger, stemmer, tokenizer class Summarizer(object): def __init__(self, text): self.text = text def execute(self): tokens = tokenizer.NLTKTokenizer(self.text).execute() stems = stemmer.Stemmer(tokens).execute() pos = postagger.PosTagger(tokens).execute() chunk = chunktagger.ChunkTagger().parse(pos) summary = zip(tokens, stems, pos, chunk) return summary Fix bug that returned a zip object instead of list
from . import chunktagger, lemmatizer, postagger, stemmer, tokenizer class Summarizer(object): def __init__(self, text): self.text = text def execute(self): tokens = tokenizer.NLTKTokenizer(self.text).execute() stems = stemmer.Stemmer(tokens).execute() pos = postagger.PosTagger(tokens).execute() chunk = chunktagger.ChunkTagger().parse(pos) summary = zip(tokens, stems, pos, chunk) return [(t, s, p[1], c[1]) for (t, s, p, c) in summary]
<commit_before>from . import chunktagger, lemmatizer, postagger, stemmer, tokenizer class Summarizer(object): def __init__(self, text): self.text = text def execute(self): tokens = tokenizer.NLTKTokenizer(self.text).execute() stems = stemmer.Stemmer(tokens).execute() pos = postagger.PosTagger(tokens).execute() chunk = chunktagger.ChunkTagger().parse(pos) summary = zip(tokens, stems, pos, chunk) return summary <commit_msg>Fix bug that returned a zip object instead of list<commit_after>
from . import chunktagger, lemmatizer, postagger, stemmer, tokenizer class Summarizer(object): def __init__(self, text): self.text = text def execute(self): tokens = tokenizer.NLTKTokenizer(self.text).execute() stems = stemmer.Stemmer(tokens).execute() pos = postagger.PosTagger(tokens).execute() chunk = chunktagger.ChunkTagger().parse(pos) summary = zip(tokens, stems, pos, chunk) return [(t, s, p[1], c[1]) for (t, s, p, c) in summary]
from . import chunktagger, lemmatizer, postagger, stemmer, tokenizer class Summarizer(object): def __init__(self, text): self.text = text def execute(self): tokens = tokenizer.NLTKTokenizer(self.text).execute() stems = stemmer.Stemmer(tokens).execute() pos = postagger.PosTagger(tokens).execute() chunk = chunktagger.ChunkTagger().parse(pos) summary = zip(tokens, stems, pos, chunk) return summary Fix bug that returned a zip object instead of listfrom . import chunktagger, lemmatizer, postagger, stemmer, tokenizer class Summarizer(object): def __init__(self, text): self.text = text def execute(self): tokens = tokenizer.NLTKTokenizer(self.text).execute() stems = stemmer.Stemmer(tokens).execute() pos = postagger.PosTagger(tokens).execute() chunk = chunktagger.ChunkTagger().parse(pos) summary = zip(tokens, stems, pos, chunk) return [(t, s, p[1], c[1]) for (t, s, p, c) in summary]
<commit_before>from . import chunktagger, lemmatizer, postagger, stemmer, tokenizer class Summarizer(object): def __init__(self, text): self.text = text def execute(self): tokens = tokenizer.NLTKTokenizer(self.text).execute() stems = stemmer.Stemmer(tokens).execute() pos = postagger.PosTagger(tokens).execute() chunk = chunktagger.ChunkTagger().parse(pos) summary = zip(tokens, stems, pos, chunk) return summary <commit_msg>Fix bug that returned a zip object instead of list<commit_after>from . import chunktagger, lemmatizer, postagger, stemmer, tokenizer class Summarizer(object): def __init__(self, text): self.text = text def execute(self): tokens = tokenizer.NLTKTokenizer(self.text).execute() stems = stemmer.Stemmer(tokens).execute() pos = postagger.PosTagger(tokens).execute() chunk = chunktagger.ChunkTagger().parse(pos) summary = zip(tokens, stems, pos, chunk) return [(t, s, p[1], c[1]) for (t, s, p, c) in summary]
e22886416b04d3900bed76c699bbfcdb20534ea2
semillas_backend/users/serializers.py
semillas_backend/users/serializers.py
#from phonenumber_field.serializerfields import PhoneNumberField from rest_framework import serializers from drf_extra_fields.geo_fields import PointField from .models import User class UserSerializer(serializers.ModelSerializer): """ Usage: from rest_framework.renderers import JSONRenderer from semillas_backend.users.serializers import UserSerializer JSONRenderer().render(UserSerializer(user_instance).data) """ location = PointField() class Meta: model = User fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone', 'faircoin_address', 'telegram_id') class UpdateUserSerializer(serializers.ModelSerializer): name = serializers.CharField(required=False) #phone = PhoneNumberField(required=False) email = serializers.CharField(required=False) picture = serializers.ImageField(required=False) uuid = serializers.CharField(read_only=True) location = PointField(required=False) class Meta: model = User fields = ('name', 'picture', 'phone', 'email', 'uuid', 'faircoin_address', 'telegram_id', 'location', 'location_manually_set') from wallet.serializers import WalletSerializer class FullUserSerializer(UserSerializer): wallet = WalletSerializer() class Meta: model = User fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
#from phonenumber_field.serializerfields import PhoneNumberField from rest_framework import serializers from drf_extra_fields.geo_fields import PointField from .models import User class UserSerializer(serializers.ModelSerializer): """ Usage: from rest_framework.renderers import JSONRenderer from semillas_backend.users.serializers import UserSerializer JSONRenderer().render(UserSerializer(user_instance).data) """ location = PointField() class Meta: model = User fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone', 'faircoin_address', 'telegram_id', 'is_staff') class UpdateUserSerializer(serializers.ModelSerializer): name = serializers.CharField(required=False) #phone = PhoneNumberField(required=False) email = serializers.CharField(required=False) picture = serializers.ImageField(required=False) uuid = serializers.CharField(read_only=True) location = PointField(required=False) class Meta: model = User fields = ('name', 'picture', 'phone', 'email', 'uuid', 'faircoin_address', 'telegram_id', 'location', 'location_manually_set') from wallet.serializers import WalletSerializer class FullUserSerializer(UserSerializer): wallet = WalletSerializer() class Meta: model = User fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
Add is_staff to user serializer
Add is_staff to user serializer
Python
mit
Semillas/semillas_platform,Semillas/semillas_platform,Semillas/semillas_backend,Semillas/semillas_platform,Semillas/semillas_platform,Semillas/semillas_backend,Semillas/semillas_backend,Semillas/semillas_backend
#from phonenumber_field.serializerfields import PhoneNumberField from rest_framework import serializers from drf_extra_fields.geo_fields import PointField from .models import User class UserSerializer(serializers.ModelSerializer): """ Usage: from rest_framework.renderers import JSONRenderer from semillas_backend.users.serializers import UserSerializer JSONRenderer().render(UserSerializer(user_instance).data) """ location = PointField() class Meta: model = User fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone', 'faircoin_address', 'telegram_id') class UpdateUserSerializer(serializers.ModelSerializer): name = serializers.CharField(required=False) #phone = PhoneNumberField(required=False) email = serializers.CharField(required=False) picture = serializers.ImageField(required=False) uuid = serializers.CharField(read_only=True) location = PointField(required=False) class Meta: model = User fields = ('name', 'picture', 'phone', 'email', 'uuid', 'faircoin_address', 'telegram_id', 'location', 'location_manually_set') from wallet.serializers import WalletSerializer class FullUserSerializer(UserSerializer): wallet = WalletSerializer() class Meta: model = User fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone') Add is_staff to user serializer
#from phonenumber_field.serializerfields import PhoneNumberField from rest_framework import serializers from drf_extra_fields.geo_fields import PointField from .models import User class UserSerializer(serializers.ModelSerializer): """ Usage: from rest_framework.renderers import JSONRenderer from semillas_backend.users.serializers import UserSerializer JSONRenderer().render(UserSerializer(user_instance).data) """ location = PointField() class Meta: model = User fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone', 'faircoin_address', 'telegram_id', 'is_staff') class UpdateUserSerializer(serializers.ModelSerializer): name = serializers.CharField(required=False) #phone = PhoneNumberField(required=False) email = serializers.CharField(required=False) picture = serializers.ImageField(required=False) uuid = serializers.CharField(read_only=True) location = PointField(required=False) class Meta: model = User fields = ('name', 'picture', 'phone', 'email', 'uuid', 'faircoin_address', 'telegram_id', 'location', 'location_manually_set') from wallet.serializers import WalletSerializer class FullUserSerializer(UserSerializer): wallet = WalletSerializer() class Meta: model = User fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
<commit_before>#from phonenumber_field.serializerfields import PhoneNumberField from rest_framework import serializers from drf_extra_fields.geo_fields import PointField from .models import User class UserSerializer(serializers.ModelSerializer): """ Usage: from rest_framework.renderers import JSONRenderer from semillas_backend.users.serializers import UserSerializer JSONRenderer().render(UserSerializer(user_instance).data) """ location = PointField() class Meta: model = User fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone', 'faircoin_address', 'telegram_id') class UpdateUserSerializer(serializers.ModelSerializer): name = serializers.CharField(required=False) #phone = PhoneNumberField(required=False) email = serializers.CharField(required=False) picture = serializers.ImageField(required=False) uuid = serializers.CharField(read_only=True) location = PointField(required=False) class Meta: model = User fields = ('name', 'picture', 'phone', 'email', 'uuid', 'faircoin_address', 'telegram_id', 'location', 'location_manually_set') from wallet.serializers import WalletSerializer class FullUserSerializer(UserSerializer): wallet = WalletSerializer() class Meta: model = User fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone') <commit_msg>Add is_staff to user serializer<commit_after>
#from phonenumber_field.serializerfields import PhoneNumberField from rest_framework import serializers from drf_extra_fields.geo_fields import PointField from .models import User class UserSerializer(serializers.ModelSerializer): """ Usage: from rest_framework.renderers import JSONRenderer from semillas_backend.users.serializers import UserSerializer JSONRenderer().render(UserSerializer(user_instance).data) """ location = PointField() class Meta: model = User fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone', 'faircoin_address', 'telegram_id', 'is_staff') class UpdateUserSerializer(serializers.ModelSerializer): name = serializers.CharField(required=False) #phone = PhoneNumberField(required=False) email = serializers.CharField(required=False) picture = serializers.ImageField(required=False) uuid = serializers.CharField(read_only=True) location = PointField(required=False) class Meta: model = User fields = ('name', 'picture', 'phone', 'email', 'uuid', 'faircoin_address', 'telegram_id', 'location', 'location_manually_set') from wallet.serializers import WalletSerializer class FullUserSerializer(UserSerializer): wallet = WalletSerializer() class Meta: model = User fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
#from phonenumber_field.serializerfields import PhoneNumberField from rest_framework import serializers from drf_extra_fields.geo_fields import PointField from .models import User class UserSerializer(serializers.ModelSerializer): """ Usage: from rest_framework.renderers import JSONRenderer from semillas_backend.users.serializers import UserSerializer JSONRenderer().render(UserSerializer(user_instance).data) """ location = PointField() class Meta: model = User fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone', 'faircoin_address', 'telegram_id') class UpdateUserSerializer(serializers.ModelSerializer): name = serializers.CharField(required=False) #phone = PhoneNumberField(required=False) email = serializers.CharField(required=False) picture = serializers.ImageField(required=False) uuid = serializers.CharField(read_only=True) location = PointField(required=False) class Meta: model = User fields = ('name', 'picture', 'phone', 'email', 'uuid', 'faircoin_address', 'telegram_id', 'location', 'location_manually_set') from wallet.serializers import WalletSerializer class FullUserSerializer(UserSerializer): wallet = WalletSerializer() class Meta: model = User fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone') Add is_staff to user serializer#from phonenumber_field.serializerfields import PhoneNumberField from rest_framework import serializers from drf_extra_fields.geo_fields import PointField from .models import User class UserSerializer(serializers.ModelSerializer): """ Usage: from rest_framework.renderers import JSONRenderer from semillas_backend.users.serializers import UserSerializer JSONRenderer().render(UserSerializer(user_instance).data) """ location = PointField() class Meta: model = User fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone', 'faircoin_address', 'telegram_id', 'is_staff') class UpdateUserSerializer(serializers.ModelSerializer): name = serializers.CharField(required=False) #phone = PhoneNumberField(required=False) email = serializers.CharField(required=False) picture = serializers.ImageField(required=False) uuid = serializers.CharField(read_only=True) location = PointField(required=False) class Meta: model = User fields = ('name', 'picture', 'phone', 'email', 'uuid', 'faircoin_address', 'telegram_id', 'location', 'location_manually_set') from wallet.serializers import WalletSerializer class FullUserSerializer(UserSerializer): wallet = WalletSerializer() class Meta: model = User fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
<commit_before>#from phonenumber_field.serializerfields import PhoneNumberField from rest_framework import serializers from drf_extra_fields.geo_fields import PointField from .models import User class UserSerializer(serializers.ModelSerializer): """ Usage: from rest_framework.renderers import JSONRenderer from semillas_backend.users.serializers import UserSerializer JSONRenderer().render(UserSerializer(user_instance).data) """ location = PointField() class Meta: model = User fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone', 'faircoin_address', 'telegram_id') class UpdateUserSerializer(serializers.ModelSerializer): name = serializers.CharField(required=False) #phone = PhoneNumberField(required=False) email = serializers.CharField(required=False) picture = serializers.ImageField(required=False) uuid = serializers.CharField(read_only=True) location = PointField(required=False) class Meta: model = User fields = ('name', 'picture', 'phone', 'email', 'uuid', 'faircoin_address', 'telegram_id', 'location', 'location_manually_set') from wallet.serializers import WalletSerializer class FullUserSerializer(UserSerializer): wallet = WalletSerializer() class Meta: model = User fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone') <commit_msg>Add is_staff to user serializer<commit_after>#from phonenumber_field.serializerfields import PhoneNumberField from rest_framework import serializers from drf_extra_fields.geo_fields import PointField from .models import User class UserSerializer(serializers.ModelSerializer): """ Usage: from rest_framework.renderers import JSONRenderer from semillas_backend.users.serializers import UserSerializer JSONRenderer().render(UserSerializer(user_instance).data) """ location = PointField() class Meta: model = User fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone', 'faircoin_address', 'telegram_id', 'is_staff') class UpdateUserSerializer(serializers.ModelSerializer): name = serializers.CharField(required=False) #phone = PhoneNumberField(required=False) email = serializers.CharField(required=False) picture = serializers.ImageField(required=False) uuid = serializers.CharField(read_only=True) location = PointField(required=False) class Meta: model = User fields = ('name', 'picture', 'phone', 'email', 'uuid', 'faircoin_address', 'telegram_id', 'location', 'location_manually_set') from wallet.serializers import WalletSerializer class FullUserSerializer(UserSerializer): wallet = WalletSerializer() class Meta: model = User fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
dd50adfa567f7be04b3c000508f3f649147be387
scripts/update_vv.py
scripts/update_vv.py
#!/usr/bin/env python import mica.vv mica.vv.update()
#!/usr/bin/env python import matplotlib matplotlib.use('Agg') import mica.vv mica.vv.update()
Set vv cron script to use Agg backend
Set vv cron script to use Agg backend
Python
bsd-3-clause
sot/mica,sot/mica
#!/usr/bin/env python import mica.vv mica.vv.update() Set vv cron script to use Agg backend
#!/usr/bin/env python import matplotlib matplotlib.use('Agg') import mica.vv mica.vv.update()
<commit_before>#!/usr/bin/env python import mica.vv mica.vv.update() <commit_msg>Set vv cron script to use Agg backend<commit_after>
#!/usr/bin/env python import matplotlib matplotlib.use('Agg') import mica.vv mica.vv.update()
#!/usr/bin/env python import mica.vv mica.vv.update() Set vv cron script to use Agg backend#!/usr/bin/env python import matplotlib matplotlib.use('Agg') import mica.vv mica.vv.update()
<commit_before>#!/usr/bin/env python import mica.vv mica.vv.update() <commit_msg>Set vv cron script to use Agg backend<commit_after>#!/usr/bin/env python import matplotlib matplotlib.use('Agg') import mica.vv mica.vv.update()
d2520f8834c7010d0896414ed9e49c8c65ef6df5
altair/vegalite/v2/examples/connected_scatterplot.py
altair/vegalite/v2/examples/connected_scatterplot.py
""" Connected Scatterplot (Lines with Custom Paths) ----------------------------------------------- This example show how the order encoding can be used to draw a custom path. The dataset tracks miles driven per capita along with gas prices annually from 1956 to 2010. It is based on Hannah Fairfield's article 'Driving Shifts Into Reverse'. See https://archive.nytimes.com/www.nytimes.com/imagepages/2010/05/02/business/02metrics.html for the original. """ # category: scatter plots import altair as alt from vega_datasets import data driving = data.driving() alt.Chart(driving).mark_line(point=True).encode( alt.X('miles', scale=alt.Scale(zero=False)), alt.Y('gas', scale=alt.Scale(zero=False)), order='year' )
""" Connected Scatterplot (Lines with Custom Paths) ----------------------------------------------- This example show how the order encoding can be used to draw a custom path. The dataset tracks miles driven per capita along with gas prices annually from 1956 to 2010. It is based on Hannah Fairfield's article 'Driving Shifts Into Reverse'. See https://archive.nytimes.com/www.nytimes.com/imagepages/2010/05/02/business/02metrics.html for the original. """ # category: scatter plots import altair as alt from vega_datasets import data driving = data.driving() alt.Chart(driving).mark_line(point=True).encode( alt.X('miles', scale=alt.Scale(zero=False)), alt.Y('gas', scale=alt.Scale(zero=False)), order='year' )
Add line break to connect scatter plot doc string
DOC: Add line break to connect scatter plot doc string
Python
bsd-3-clause
altair-viz/altair,jakevdp/altair
""" Connected Scatterplot (Lines with Custom Paths) ----------------------------------------------- This example show how the order encoding can be used to draw a custom path. The dataset tracks miles driven per capita along with gas prices annually from 1956 to 2010. It is based on Hannah Fairfield's article 'Driving Shifts Into Reverse'. See https://archive.nytimes.com/www.nytimes.com/imagepages/2010/05/02/business/02metrics.html for the original. """ # category: scatter plots import altair as alt from vega_datasets import data driving = data.driving() alt.Chart(driving).mark_line(point=True).encode( alt.X('miles', scale=alt.Scale(zero=False)), alt.Y('gas', scale=alt.Scale(zero=False)), order='year' ) DOC: Add line break to connect scatter plot doc string
""" Connected Scatterplot (Lines with Custom Paths) ----------------------------------------------- This example show how the order encoding can be used to draw a custom path. The dataset tracks miles driven per capita along with gas prices annually from 1956 to 2010. It is based on Hannah Fairfield's article 'Driving Shifts Into Reverse'. See https://archive.nytimes.com/www.nytimes.com/imagepages/2010/05/02/business/02metrics.html for the original. """ # category: scatter plots import altair as alt from vega_datasets import data driving = data.driving() alt.Chart(driving).mark_line(point=True).encode( alt.X('miles', scale=alt.Scale(zero=False)), alt.Y('gas', scale=alt.Scale(zero=False)), order='year' )
<commit_before>""" Connected Scatterplot (Lines with Custom Paths) ----------------------------------------------- This example show how the order encoding can be used to draw a custom path. The dataset tracks miles driven per capita along with gas prices annually from 1956 to 2010. It is based on Hannah Fairfield's article 'Driving Shifts Into Reverse'. See https://archive.nytimes.com/www.nytimes.com/imagepages/2010/05/02/business/02metrics.html for the original. """ # category: scatter plots import altair as alt from vega_datasets import data driving = data.driving() alt.Chart(driving).mark_line(point=True).encode( alt.X('miles', scale=alt.Scale(zero=False)), alt.Y('gas', scale=alt.Scale(zero=False)), order='year' ) <commit_msg>DOC: Add line break to connect scatter plot doc string<commit_after>
""" Connected Scatterplot (Lines with Custom Paths) ----------------------------------------------- This example show how the order encoding can be used to draw a custom path. The dataset tracks miles driven per capita along with gas prices annually from 1956 to 2010. It is based on Hannah Fairfield's article 'Driving Shifts Into Reverse'. See https://archive.nytimes.com/www.nytimes.com/imagepages/2010/05/02/business/02metrics.html for the original. """ # category: scatter plots import altair as alt from vega_datasets import data driving = data.driving() alt.Chart(driving).mark_line(point=True).encode( alt.X('miles', scale=alt.Scale(zero=False)), alt.Y('gas', scale=alt.Scale(zero=False)), order='year' )
""" Connected Scatterplot (Lines with Custom Paths) ----------------------------------------------- This example show how the order encoding can be used to draw a custom path. The dataset tracks miles driven per capita along with gas prices annually from 1956 to 2010. It is based on Hannah Fairfield's article 'Driving Shifts Into Reverse'. See https://archive.nytimes.com/www.nytimes.com/imagepages/2010/05/02/business/02metrics.html for the original. """ # category: scatter plots import altair as alt from vega_datasets import data driving = data.driving() alt.Chart(driving).mark_line(point=True).encode( alt.X('miles', scale=alt.Scale(zero=False)), alt.Y('gas', scale=alt.Scale(zero=False)), order='year' ) DOC: Add line break to connect scatter plot doc string""" Connected Scatterplot (Lines with Custom Paths) ----------------------------------------------- This example show how the order encoding can be used to draw a custom path. The dataset tracks miles driven per capita along with gas prices annually from 1956 to 2010. It is based on Hannah Fairfield's article 'Driving Shifts Into Reverse'. See https://archive.nytimes.com/www.nytimes.com/imagepages/2010/05/02/business/02metrics.html for the original. """ # category: scatter plots import altair as alt from vega_datasets import data driving = data.driving() alt.Chart(driving).mark_line(point=True).encode( alt.X('miles', scale=alt.Scale(zero=False)), alt.Y('gas', scale=alt.Scale(zero=False)), order='year' )
<commit_before>""" Connected Scatterplot (Lines with Custom Paths) ----------------------------------------------- This example show how the order encoding can be used to draw a custom path. The dataset tracks miles driven per capita along with gas prices annually from 1956 to 2010. It is based on Hannah Fairfield's article 'Driving Shifts Into Reverse'. See https://archive.nytimes.com/www.nytimes.com/imagepages/2010/05/02/business/02metrics.html for the original. """ # category: scatter plots import altair as alt from vega_datasets import data driving = data.driving() alt.Chart(driving).mark_line(point=True).encode( alt.X('miles', scale=alt.Scale(zero=False)), alt.Y('gas', scale=alt.Scale(zero=False)), order='year' ) <commit_msg>DOC: Add line break to connect scatter plot doc string<commit_after>""" Connected Scatterplot (Lines with Custom Paths) ----------------------------------------------- This example show how the order encoding can be used to draw a custom path. The dataset tracks miles driven per capita along with gas prices annually from 1956 to 2010. It is based on Hannah Fairfield's article 'Driving Shifts Into Reverse'. See https://archive.nytimes.com/www.nytimes.com/imagepages/2010/05/02/business/02metrics.html for the original. """ # category: scatter plots import altair as alt from vega_datasets import data driving = data.driving() alt.Chart(driving).mark_line(point=True).encode( alt.X('miles', scale=alt.Scale(zero=False)), alt.Y('gas', scale=alt.Scale(zero=False)), order='year' )
51854e2f437c3abc2a89d4e31e10aa6b36eef2e1
pylons/__init__.py
pylons/__init__.py
"""Base objects to be exported for use in Controllers""" from paste.registry import StackedObjectProxy from pylons.configuration import config __all__ = ['app_globals', 'cache', 'config', 'request', 'response', 'session', 'tmpl_context', 'url'] def __figure_version(): try: from pkg_resources import require import os # NOTE: this only works when the package is either installed, # or has an .egg-info directory present (i.e. wont work with raw # SVN checkout) info = require('pylons')[0] if os.path.dirname(os.path.dirname(__file__)) == info.location: return info.version else: return '(not installed)' except: return '(not installed)' __version__ = __figure_version() app_globals = StackedObjectProxy(name="app_globals") cache = StackedObjectProxy(name="cache") request = StackedObjectProxy(name="request") response = StackedObjectProxy(name="response") session = StackedObjectProxy(name="session") tmpl_context = StackedObjectProxy(name="tmpl_context or C") url = StackedObjectProxy(name="url") translator = StackedObjectProxy(name="translator")
"""Base objects to be exported for use in Controllers""" # Import pkg_resources first so namespace handling is properly done so the # paste imports work import pkg_resources from paste.registry import StackedObjectProxy from pylons.configuration import config __all__ = ['app_globals', 'cache', 'config', 'request', 'response', 'session', 'tmpl_context', 'url'] def __figure_version(): try: from pkg_resources import require import os # NOTE: this only works when the package is either installed, # or has an .egg-info directory present (i.e. wont work with raw # SVN checkout) info = require('pylons')[0] if os.path.dirname(os.path.dirname(__file__)) == info.location: return info.version else: return '(not installed)' except: return '(not installed)' __version__ = __figure_version() app_globals = StackedObjectProxy(name="app_globals") cache = StackedObjectProxy(name="cache") request = StackedObjectProxy(name="request") response = StackedObjectProxy(name="response") session = StackedObjectProxy(name="session") tmpl_context = StackedObjectProxy(name="tmpl_context or C") url = StackedObjectProxy(name="url") translator = StackedObjectProxy(name="translator")
Handle namespacing properly so pylons imports without errors.
Handle namespacing properly so pylons imports without errors. --HG-- branch : trunk
Python
bsd-3-clause
Pylons/pylons,Pylons/pylons,moreati/pylons,Pylons/pylons,moreati/pylons,moreati/pylons
"""Base objects to be exported for use in Controllers""" from paste.registry import StackedObjectProxy from pylons.configuration import config __all__ = ['app_globals', 'cache', 'config', 'request', 'response', 'session', 'tmpl_context', 'url'] def __figure_version(): try: from pkg_resources import require import os # NOTE: this only works when the package is either installed, # or has an .egg-info directory present (i.e. wont work with raw # SVN checkout) info = require('pylons')[0] if os.path.dirname(os.path.dirname(__file__)) == info.location: return info.version else: return '(not installed)' except: return '(not installed)' __version__ = __figure_version() app_globals = StackedObjectProxy(name="app_globals") cache = StackedObjectProxy(name="cache") request = StackedObjectProxy(name="request") response = StackedObjectProxy(name="response") session = StackedObjectProxy(name="session") tmpl_context = StackedObjectProxy(name="tmpl_context or C") url = StackedObjectProxy(name="url") translator = StackedObjectProxy(name="translator") Handle namespacing properly so pylons imports without errors. --HG-- branch : trunk
"""Base objects to be exported for use in Controllers""" # Import pkg_resources first so namespace handling is properly done so the # paste imports work import pkg_resources from paste.registry import StackedObjectProxy from pylons.configuration import config __all__ = ['app_globals', 'cache', 'config', 'request', 'response', 'session', 'tmpl_context', 'url'] def __figure_version(): try: from pkg_resources import require import os # NOTE: this only works when the package is either installed, # or has an .egg-info directory present (i.e. wont work with raw # SVN checkout) info = require('pylons')[0] if os.path.dirname(os.path.dirname(__file__)) == info.location: return info.version else: return '(not installed)' except: return '(not installed)' __version__ = __figure_version() app_globals = StackedObjectProxy(name="app_globals") cache = StackedObjectProxy(name="cache") request = StackedObjectProxy(name="request") response = StackedObjectProxy(name="response") session = StackedObjectProxy(name="session") tmpl_context = StackedObjectProxy(name="tmpl_context or C") url = StackedObjectProxy(name="url") translator = StackedObjectProxy(name="translator")
<commit_before>"""Base objects to be exported for use in Controllers""" from paste.registry import StackedObjectProxy from pylons.configuration import config __all__ = ['app_globals', 'cache', 'config', 'request', 'response', 'session', 'tmpl_context', 'url'] def __figure_version(): try: from pkg_resources import require import os # NOTE: this only works when the package is either installed, # or has an .egg-info directory present (i.e. wont work with raw # SVN checkout) info = require('pylons')[0] if os.path.dirname(os.path.dirname(__file__)) == info.location: return info.version else: return '(not installed)' except: return '(not installed)' __version__ = __figure_version() app_globals = StackedObjectProxy(name="app_globals") cache = StackedObjectProxy(name="cache") request = StackedObjectProxy(name="request") response = StackedObjectProxy(name="response") session = StackedObjectProxy(name="session") tmpl_context = StackedObjectProxy(name="tmpl_context or C") url = StackedObjectProxy(name="url") translator = StackedObjectProxy(name="translator") <commit_msg>Handle namespacing properly so pylons imports without errors. --HG-- branch : trunk<commit_after>
"""Base objects to be exported for use in Controllers""" # Import pkg_resources first so namespace handling is properly done so the # paste imports work import pkg_resources from paste.registry import StackedObjectProxy from pylons.configuration import config __all__ = ['app_globals', 'cache', 'config', 'request', 'response', 'session', 'tmpl_context', 'url'] def __figure_version(): try: from pkg_resources import require import os # NOTE: this only works when the package is either installed, # or has an .egg-info directory present (i.e. wont work with raw # SVN checkout) info = require('pylons')[0] if os.path.dirname(os.path.dirname(__file__)) == info.location: return info.version else: return '(not installed)' except: return '(not installed)' __version__ = __figure_version() app_globals = StackedObjectProxy(name="app_globals") cache = StackedObjectProxy(name="cache") request = StackedObjectProxy(name="request") response = StackedObjectProxy(name="response") session = StackedObjectProxy(name="session") tmpl_context = StackedObjectProxy(name="tmpl_context or C") url = StackedObjectProxy(name="url") translator = StackedObjectProxy(name="translator")
"""Base objects to be exported for use in Controllers""" from paste.registry import StackedObjectProxy from pylons.configuration import config __all__ = ['app_globals', 'cache', 'config', 'request', 'response', 'session', 'tmpl_context', 'url'] def __figure_version(): try: from pkg_resources import require import os # NOTE: this only works when the package is either installed, # or has an .egg-info directory present (i.e. wont work with raw # SVN checkout) info = require('pylons')[0] if os.path.dirname(os.path.dirname(__file__)) == info.location: return info.version else: return '(not installed)' except: return '(not installed)' __version__ = __figure_version() app_globals = StackedObjectProxy(name="app_globals") cache = StackedObjectProxy(name="cache") request = StackedObjectProxy(name="request") response = StackedObjectProxy(name="response") session = StackedObjectProxy(name="session") tmpl_context = StackedObjectProxy(name="tmpl_context or C") url = StackedObjectProxy(name="url") translator = StackedObjectProxy(name="translator") Handle namespacing properly so pylons imports without errors. --HG-- branch : trunk"""Base objects to be exported for use in Controllers""" # Import pkg_resources first so namespace handling is properly done so the # paste imports work import pkg_resources from paste.registry import StackedObjectProxy from pylons.configuration import config __all__ = ['app_globals', 'cache', 'config', 'request', 'response', 'session', 'tmpl_context', 'url'] def __figure_version(): try: from pkg_resources import require import os # NOTE: this only works when the package is either installed, # or has an .egg-info directory present (i.e. wont work with raw # SVN checkout) info = require('pylons')[0] if os.path.dirname(os.path.dirname(__file__)) == info.location: return info.version else: return '(not installed)' except: return '(not installed)' __version__ = __figure_version() app_globals = StackedObjectProxy(name="app_globals") cache = StackedObjectProxy(name="cache") request = StackedObjectProxy(name="request") response = StackedObjectProxy(name="response") session = StackedObjectProxy(name="session") tmpl_context = StackedObjectProxy(name="tmpl_context or C") url = StackedObjectProxy(name="url") translator = StackedObjectProxy(name="translator")
<commit_before>"""Base objects to be exported for use in Controllers""" from paste.registry import StackedObjectProxy from pylons.configuration import config __all__ = ['app_globals', 'cache', 'config', 'request', 'response', 'session', 'tmpl_context', 'url'] def __figure_version(): try: from pkg_resources import require import os # NOTE: this only works when the package is either installed, # or has an .egg-info directory present (i.e. wont work with raw # SVN checkout) info = require('pylons')[0] if os.path.dirname(os.path.dirname(__file__)) == info.location: return info.version else: return '(not installed)' except: return '(not installed)' __version__ = __figure_version() app_globals = StackedObjectProxy(name="app_globals") cache = StackedObjectProxy(name="cache") request = StackedObjectProxy(name="request") response = StackedObjectProxy(name="response") session = StackedObjectProxy(name="session") tmpl_context = StackedObjectProxy(name="tmpl_context or C") url = StackedObjectProxy(name="url") translator = StackedObjectProxy(name="translator") <commit_msg>Handle namespacing properly so pylons imports without errors. --HG-- branch : trunk<commit_after>"""Base objects to be exported for use in Controllers""" # Import pkg_resources first so namespace handling is properly done so the # paste imports work import pkg_resources from paste.registry import StackedObjectProxy from pylons.configuration import config __all__ = ['app_globals', 'cache', 'config', 'request', 'response', 'session', 'tmpl_context', 'url'] def __figure_version(): try: from pkg_resources import require import os # NOTE: this only works when the package is either installed, # or has an .egg-info directory present (i.e. wont work with raw # SVN checkout) info = require('pylons')[0] if os.path.dirname(os.path.dirname(__file__)) == info.location: return info.version else: return '(not installed)' except: return '(not installed)' __version__ = __figure_version() app_globals = StackedObjectProxy(name="app_globals") cache = StackedObjectProxy(name="cache") request = StackedObjectProxy(name="request") response = StackedObjectProxy(name="response") session = StackedObjectProxy(name="session") tmpl_context = StackedObjectProxy(name="tmpl_context or C") url = StackedObjectProxy(name="url") translator = StackedObjectProxy(name="translator")
bc4486063325fc18bee00ba3ee8ba4e5e2323bee
doc/tools/make_cookbook.py
doc/tools/make_cookbook.py
""" Generate the rst files for the cookbook from the recipes. """ import sys import os body = r""" .. raw:: html [<a href="{code}">source code</a>] .. literalinclude:: {code} :language: python :linenos: """ def recipe_to_rst(recipe): """ Convert a .py recipe to a .rst entry for sphinx """ sys.stderr.write("Converting {} to rst ...".format(recipe)) recipe_file = os.path.split(recipe)[-1] recipe_name = os.path.splitext(recipe_file)[0] output = recipe_name + '.rst' # Get the title from the first lines of the recipe docstring title = '' with open(recipe) as f: for line in f.readlines()[1:]: line = line.strip() if line == '"""' or not line: break title = ' '.join([title, line]) with open(output, 'w') as f: f.write('.. _cookbook_{}:\n\n'.format(recipe_name)) f.write(title.strip() + '\n') f.write('='*len(title) + '\n') f.write(body.format( code='../_static/cookbook/{}'.format(recipe_file))) sys.stderr.write(" done\n") if __name__ == '__main__': for recipe in sys.argv[1:]: recipe_to_rst(recipe)
""" Generate the rst files for the cookbook from the recipes. """ import sys import os body = r""" **Download** source code: :download:`{recipe}<{code}>` .. literalinclude:: {code} :language: python """ def recipe_to_rst(recipe): """ Convert a .py recipe to a .rst entry for sphinx """ sys.stderr.write("Converting {} to rst ...".format(recipe)) recipe_file = os.path.split(recipe)[-1] recipe_name = os.path.splitext(recipe_file)[0] output = recipe_name + '.rst' # Get the title from the first lines of the recipe docstring title = '' with open(recipe) as f: for line in f.readlines()[1:]: line = line.strip() if line == '"""' or not line: break title = ' '.join([title, line]) with open(output, 'w') as f: f.write('.. _cookbook_{}:\n\n'.format(recipe_name)) f.write(title.strip() + '\n') f.write('='*len(title) + '\n') f.write(body.format( recipe=recipe_file, code='../_static/cookbook/{}'.format(recipe_file))) sys.stderr.write(" done\n") if __name__ == '__main__': for recipe in sys.argv[1:]: recipe_to_rst(recipe)
Remove line numbers from recipe code
Remove line numbers from recipe code The larger font made the numbers not match the code. Added better link text to download the recipe.
Python
bsd-3-clause
santis19/fatiando,rafaelmds/fatiando,drandykass/fatiando,eusoubrasileiro/fatiando,eusoubrasileiro/fatiando,eusoubrasileiro/fatiando_seismic,fatiando/fatiando,eusoubrasileiro/fatiando_seismic,victortxa/fatiando,fatiando/fatiando,rafaelmds/fatiando,victortxa/fatiando,cmeessen/fatiando,santis19/fatiando,mtb-za/fatiando,eusoubrasileiro/fatiando,cmeessen/fatiando,mtb-za/fatiando,drandykass/fatiando
""" Generate the rst files for the cookbook from the recipes. """ import sys import os body = r""" .. raw:: html [<a href="{code}">source code</a>] .. literalinclude:: {code} :language: python :linenos: """ def recipe_to_rst(recipe): """ Convert a .py recipe to a .rst entry for sphinx """ sys.stderr.write("Converting {} to rst ...".format(recipe)) recipe_file = os.path.split(recipe)[-1] recipe_name = os.path.splitext(recipe_file)[0] output = recipe_name + '.rst' # Get the title from the first lines of the recipe docstring title = '' with open(recipe) as f: for line in f.readlines()[1:]: line = line.strip() if line == '"""' or not line: break title = ' '.join([title, line]) with open(output, 'w') as f: f.write('.. _cookbook_{}:\n\n'.format(recipe_name)) f.write(title.strip() + '\n') f.write('='*len(title) + '\n') f.write(body.format( code='../_static/cookbook/{}'.format(recipe_file))) sys.stderr.write(" done\n") if __name__ == '__main__': for recipe in sys.argv[1:]: recipe_to_rst(recipe) Remove line numbers from recipe code The larger font made the numbers not match the code. Added better link text to download the recipe.
""" Generate the rst files for the cookbook from the recipes. """ import sys import os body = r""" **Download** source code: :download:`{recipe}<{code}>` .. literalinclude:: {code} :language: python """ def recipe_to_rst(recipe): """ Convert a .py recipe to a .rst entry for sphinx """ sys.stderr.write("Converting {} to rst ...".format(recipe)) recipe_file = os.path.split(recipe)[-1] recipe_name = os.path.splitext(recipe_file)[0] output = recipe_name + '.rst' # Get the title from the first lines of the recipe docstring title = '' with open(recipe) as f: for line in f.readlines()[1:]: line = line.strip() if line == '"""' or not line: break title = ' '.join([title, line]) with open(output, 'w') as f: f.write('.. _cookbook_{}:\n\n'.format(recipe_name)) f.write(title.strip() + '\n') f.write('='*len(title) + '\n') f.write(body.format( recipe=recipe_file, code='../_static/cookbook/{}'.format(recipe_file))) sys.stderr.write(" done\n") if __name__ == '__main__': for recipe in sys.argv[1:]: recipe_to_rst(recipe)
<commit_before>""" Generate the rst files for the cookbook from the recipes. """ import sys import os body = r""" .. raw:: html [<a href="{code}">source code</a>] .. literalinclude:: {code} :language: python :linenos: """ def recipe_to_rst(recipe): """ Convert a .py recipe to a .rst entry for sphinx """ sys.stderr.write("Converting {} to rst ...".format(recipe)) recipe_file = os.path.split(recipe)[-1] recipe_name = os.path.splitext(recipe_file)[0] output = recipe_name + '.rst' # Get the title from the first lines of the recipe docstring title = '' with open(recipe) as f: for line in f.readlines()[1:]: line = line.strip() if line == '"""' or not line: break title = ' '.join([title, line]) with open(output, 'w') as f: f.write('.. _cookbook_{}:\n\n'.format(recipe_name)) f.write(title.strip() + '\n') f.write('='*len(title) + '\n') f.write(body.format( code='../_static/cookbook/{}'.format(recipe_file))) sys.stderr.write(" done\n") if __name__ == '__main__': for recipe in sys.argv[1:]: recipe_to_rst(recipe) <commit_msg>Remove line numbers from recipe code The larger font made the numbers not match the code. Added better link text to download the recipe.<commit_after>
""" Generate the rst files for the cookbook from the recipes. """ import sys import os body = r""" **Download** source code: :download:`{recipe}<{code}>` .. literalinclude:: {code} :language: python """ def recipe_to_rst(recipe): """ Convert a .py recipe to a .rst entry for sphinx """ sys.stderr.write("Converting {} to rst ...".format(recipe)) recipe_file = os.path.split(recipe)[-1] recipe_name = os.path.splitext(recipe_file)[0] output = recipe_name + '.rst' # Get the title from the first lines of the recipe docstring title = '' with open(recipe) as f: for line in f.readlines()[1:]: line = line.strip() if line == '"""' or not line: break title = ' '.join([title, line]) with open(output, 'w') as f: f.write('.. _cookbook_{}:\n\n'.format(recipe_name)) f.write(title.strip() + '\n') f.write('='*len(title) + '\n') f.write(body.format( recipe=recipe_file, code='../_static/cookbook/{}'.format(recipe_file))) sys.stderr.write(" done\n") if __name__ == '__main__': for recipe in sys.argv[1:]: recipe_to_rst(recipe)
""" Generate the rst files for the cookbook from the recipes. """ import sys import os body = r""" .. raw:: html [<a href="{code}">source code</a>] .. literalinclude:: {code} :language: python :linenos: """ def recipe_to_rst(recipe): """ Convert a .py recipe to a .rst entry for sphinx """ sys.stderr.write("Converting {} to rst ...".format(recipe)) recipe_file = os.path.split(recipe)[-1] recipe_name = os.path.splitext(recipe_file)[0] output = recipe_name + '.rst' # Get the title from the first lines of the recipe docstring title = '' with open(recipe) as f: for line in f.readlines()[1:]: line = line.strip() if line == '"""' or not line: break title = ' '.join([title, line]) with open(output, 'w') as f: f.write('.. _cookbook_{}:\n\n'.format(recipe_name)) f.write(title.strip() + '\n') f.write('='*len(title) + '\n') f.write(body.format( code='../_static/cookbook/{}'.format(recipe_file))) sys.stderr.write(" done\n") if __name__ == '__main__': for recipe in sys.argv[1:]: recipe_to_rst(recipe) Remove line numbers from recipe code The larger font made the numbers not match the code. Added better link text to download the recipe.""" Generate the rst files for the cookbook from the recipes. """ import sys import os body = r""" **Download** source code: :download:`{recipe}<{code}>` .. literalinclude:: {code} :language: python """ def recipe_to_rst(recipe): """ Convert a .py recipe to a .rst entry for sphinx """ sys.stderr.write("Converting {} to rst ...".format(recipe)) recipe_file = os.path.split(recipe)[-1] recipe_name = os.path.splitext(recipe_file)[0] output = recipe_name + '.rst' # Get the title from the first lines of the recipe docstring title = '' with open(recipe) as f: for line in f.readlines()[1:]: line = line.strip() if line == '"""' or not line: break title = ' '.join([title, line]) with open(output, 'w') as f: f.write('.. _cookbook_{}:\n\n'.format(recipe_name)) f.write(title.strip() + '\n') f.write('='*len(title) + '\n') f.write(body.format( recipe=recipe_file, code='../_static/cookbook/{}'.format(recipe_file))) sys.stderr.write(" done\n") if __name__ == '__main__': for recipe in sys.argv[1:]: recipe_to_rst(recipe)
<commit_before>""" Generate the rst files for the cookbook from the recipes. """ import sys import os body = r""" .. raw:: html [<a href="{code}">source code</a>] .. literalinclude:: {code} :language: python :linenos: """ def recipe_to_rst(recipe): """ Convert a .py recipe to a .rst entry for sphinx """ sys.stderr.write("Converting {} to rst ...".format(recipe)) recipe_file = os.path.split(recipe)[-1] recipe_name = os.path.splitext(recipe_file)[0] output = recipe_name + '.rst' # Get the title from the first lines of the recipe docstring title = '' with open(recipe) as f: for line in f.readlines()[1:]: line = line.strip() if line == '"""' or not line: break title = ' '.join([title, line]) with open(output, 'w') as f: f.write('.. _cookbook_{}:\n\n'.format(recipe_name)) f.write(title.strip() + '\n') f.write('='*len(title) + '\n') f.write(body.format( code='../_static/cookbook/{}'.format(recipe_file))) sys.stderr.write(" done\n") if __name__ == '__main__': for recipe in sys.argv[1:]: recipe_to_rst(recipe) <commit_msg>Remove line numbers from recipe code The larger font made the numbers not match the code. Added better link text to download the recipe.<commit_after>""" Generate the rst files for the cookbook from the recipes. """ import sys import os body = r""" **Download** source code: :download:`{recipe}<{code}>` .. literalinclude:: {code} :language: python """ def recipe_to_rst(recipe): """ Convert a .py recipe to a .rst entry for sphinx """ sys.stderr.write("Converting {} to rst ...".format(recipe)) recipe_file = os.path.split(recipe)[-1] recipe_name = os.path.splitext(recipe_file)[0] output = recipe_name + '.rst' # Get the title from the first lines of the recipe docstring title = '' with open(recipe) as f: for line in f.readlines()[1:]: line = line.strip() if line == '"""' or not line: break title = ' '.join([title, line]) with open(output, 'w') as f: f.write('.. _cookbook_{}:\n\n'.format(recipe_name)) f.write(title.strip() + '\n') f.write('='*len(title) + '\n') f.write(body.format( recipe=recipe_file, code='../_static/cookbook/{}'.format(recipe_file))) sys.stderr.write(" done\n") if __name__ == '__main__': for recipe in sys.argv[1:]: recipe_to_rst(recipe)
bfbdf34e2efd1d22ee6f15f4655334764106725c
locksmith/lightauth/common.py
locksmith/lightauth/common.py
from locksmith.common import apicall try: from django.conf import settings SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY, API_NAME = settings.LOCKSMITH_API_NAME ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/' except: SIGNING_KEY = "" API_NAME = "" ENDPOINT = "" def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT): try: apicall(endpoint, signing_key, api=api, endpoint=endpoint, key=key ) except urllib2.HTTPError as e: if e.code == 404: return None else: raise
from locksmith.common import apicall import urllib2 try: from django.conf import settings SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY API_NAME = settings.LOCKSMITH_API_NAME ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/' except: SIGNING_KEY = "" API_NAME = "" ENDPOINT = "" def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT): try: apicall(endpoint, signing_key, api=api, key=key ) return True except urllib2.HTTPError as e: if e.code == 404: return None else: raise
Make client key checking actually work.
Make client key checking actually work.
Python
bsd-3-clause
sunlightlabs/django-locksmith,sunlightlabs/django-locksmith,sunlightlabs/django-locksmith
from locksmith.common import apicall try: from django.conf import settings SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY, API_NAME = settings.LOCKSMITH_API_NAME ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/' except: SIGNING_KEY = "" API_NAME = "" ENDPOINT = "" def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT): try: apicall(endpoint, signing_key, api=api, endpoint=endpoint, key=key ) except urllib2.HTTPError as e: if e.code == 404: return None else: raise Make client key checking actually work.
from locksmith.common import apicall import urllib2 try: from django.conf import settings SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY API_NAME = settings.LOCKSMITH_API_NAME ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/' except: SIGNING_KEY = "" API_NAME = "" ENDPOINT = "" def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT): try: apicall(endpoint, signing_key, api=api, key=key ) return True except urllib2.HTTPError as e: if e.code == 404: return None else: raise
<commit_before>from locksmith.common import apicall try: from django.conf import settings SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY, API_NAME = settings.LOCKSMITH_API_NAME ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/' except: SIGNING_KEY = "" API_NAME = "" ENDPOINT = "" def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT): try: apicall(endpoint, signing_key, api=api, endpoint=endpoint, key=key ) except urllib2.HTTPError as e: if e.code == 404: return None else: raise <commit_msg>Make client key checking actually work.<commit_after>
from locksmith.common import apicall import urllib2 try: from django.conf import settings SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY API_NAME = settings.LOCKSMITH_API_NAME ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/' except: SIGNING_KEY = "" API_NAME = "" ENDPOINT = "" def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT): try: apicall(endpoint, signing_key, api=api, key=key ) return True except urllib2.HTTPError as e: if e.code == 404: return None else: raise
from locksmith.common import apicall try: from django.conf import settings SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY, API_NAME = settings.LOCKSMITH_API_NAME ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/' except: SIGNING_KEY = "" API_NAME = "" ENDPOINT = "" def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT): try: apicall(endpoint, signing_key, api=api, endpoint=endpoint, key=key ) except urllib2.HTTPError as e: if e.code == 404: return None else: raise Make client key checking actually work.from locksmith.common import apicall import urllib2 try: from django.conf import settings SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY API_NAME = settings.LOCKSMITH_API_NAME ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/' except: SIGNING_KEY = "" API_NAME = "" ENDPOINT = "" def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT): try: apicall(endpoint, signing_key, api=api, key=key ) return True except urllib2.HTTPError as e: if e.code == 404: return None else: raise
<commit_before>from locksmith.common import apicall try: from django.conf import settings SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY, API_NAME = settings.LOCKSMITH_API_NAME ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/' except: SIGNING_KEY = "" API_NAME = "" ENDPOINT = "" def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT): try: apicall(endpoint, signing_key, api=api, endpoint=endpoint, key=key ) except urllib2.HTTPError as e: if e.code == 404: return None else: raise <commit_msg>Make client key checking actually work.<commit_after>from locksmith.common import apicall import urllib2 try: from django.conf import settings SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY API_NAME = settings.LOCKSMITH_API_NAME ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/' except: SIGNING_KEY = "" API_NAME = "" ENDPOINT = "" def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT): try: apicall(endpoint, signing_key, api=api, key=key ) return True except urllib2.HTTPError as e: if e.code == 404: return None else: raise
eecb3468b581b4854f2162c2b62ac06ea744045e
malcolm/core/attributemeta.py
malcolm/core/attributemeta.py
from collections import OrderedDict from malcolm.core.serializable import Serializable class AttributeMeta(Serializable): """Abstract base class for Meta objects""" # Type constants SCALAR = "scalar" TABLE = "table" SCALARARRAY = "scalar_array" def __init__(self, name, description, *args): super(AttributeMeta, self).__init__(name, *args) self.description = description def validate(self, value): """ Abstract function to validate a given value Args: value(abstract): Value to validate """ raise NotImplementedError( "Abstract validate function must be implemented in child classes") def to_dict(self): """Convert object attributes into a dictionary""" d = OrderedDict() d["description"] = self.description d["typeid"] = self.typeid return d
from collections import OrderedDict from malcolm.core.serializable import Serializable class AttributeMeta(Serializable): """Abstract base class for Meta objects""" def __init__(self, name, description, *args): super(AttributeMeta, self).__init__(name, *args) self.description = description def validate(self, value): """ Abstract function to validate a given value Args: value(abstract): Value to validate """ raise NotImplementedError( "Abstract validate function must be implemented in child classes") def to_dict(self): """Convert object attributes into a dictionary""" d = OrderedDict() d["description"] = self.description d["typeid"] = self.typeid return d
Remove unused AttributeMeta type constants
Remove unused AttributeMeta type constants
Python
apache-2.0
dls-controls/pymalcolm,dls-controls/pymalcolm,dls-controls/pymalcolm
from collections import OrderedDict from malcolm.core.serializable import Serializable class AttributeMeta(Serializable): """Abstract base class for Meta objects""" # Type constants SCALAR = "scalar" TABLE = "table" SCALARARRAY = "scalar_array" def __init__(self, name, description, *args): super(AttributeMeta, self).__init__(name, *args) self.description = description def validate(self, value): """ Abstract function to validate a given value Args: value(abstract): Value to validate """ raise NotImplementedError( "Abstract validate function must be implemented in child classes") def to_dict(self): """Convert object attributes into a dictionary""" d = OrderedDict() d["description"] = self.description d["typeid"] = self.typeid return d Remove unused AttributeMeta type constants
from collections import OrderedDict from malcolm.core.serializable import Serializable class AttributeMeta(Serializable): """Abstract base class for Meta objects""" def __init__(self, name, description, *args): super(AttributeMeta, self).__init__(name, *args) self.description = description def validate(self, value): """ Abstract function to validate a given value Args: value(abstract): Value to validate """ raise NotImplementedError( "Abstract validate function must be implemented in child classes") def to_dict(self): """Convert object attributes into a dictionary""" d = OrderedDict() d["description"] = self.description d["typeid"] = self.typeid return d
<commit_before>from collections import OrderedDict from malcolm.core.serializable import Serializable class AttributeMeta(Serializable): """Abstract base class for Meta objects""" # Type constants SCALAR = "scalar" TABLE = "table" SCALARARRAY = "scalar_array" def __init__(self, name, description, *args): super(AttributeMeta, self).__init__(name, *args) self.description = description def validate(self, value): """ Abstract function to validate a given value Args: value(abstract): Value to validate """ raise NotImplementedError( "Abstract validate function must be implemented in child classes") def to_dict(self): """Convert object attributes into a dictionary""" d = OrderedDict() d["description"] = self.description d["typeid"] = self.typeid return d <commit_msg>Remove unused AttributeMeta type constants<commit_after>
from collections import OrderedDict from malcolm.core.serializable import Serializable class AttributeMeta(Serializable): """Abstract base class for Meta objects""" def __init__(self, name, description, *args): super(AttributeMeta, self).__init__(name, *args) self.description = description def validate(self, value): """ Abstract function to validate a given value Args: value(abstract): Value to validate """ raise NotImplementedError( "Abstract validate function must be implemented in child classes") def to_dict(self): """Convert object attributes into a dictionary""" d = OrderedDict() d["description"] = self.description d["typeid"] = self.typeid return d
from collections import OrderedDict from malcolm.core.serializable import Serializable class AttributeMeta(Serializable): """Abstract base class for Meta objects""" # Type constants SCALAR = "scalar" TABLE = "table" SCALARARRAY = "scalar_array" def __init__(self, name, description, *args): super(AttributeMeta, self).__init__(name, *args) self.description = description def validate(self, value): """ Abstract function to validate a given value Args: value(abstract): Value to validate """ raise NotImplementedError( "Abstract validate function must be implemented in child classes") def to_dict(self): """Convert object attributes into a dictionary""" d = OrderedDict() d["description"] = self.description d["typeid"] = self.typeid return d Remove unused AttributeMeta type constantsfrom collections import OrderedDict from malcolm.core.serializable import Serializable class AttributeMeta(Serializable): """Abstract base class for Meta objects""" def __init__(self, name, description, *args): super(AttributeMeta, self).__init__(name, *args) self.description = description def validate(self, value): """ Abstract function to validate a given value Args: value(abstract): Value to validate """ raise NotImplementedError( "Abstract validate function must be implemented in child classes") def to_dict(self): """Convert object attributes into a dictionary""" d = OrderedDict() d["description"] = self.description d["typeid"] = self.typeid return d
<commit_before>from collections import OrderedDict from malcolm.core.serializable import Serializable class AttributeMeta(Serializable): """Abstract base class for Meta objects""" # Type constants SCALAR = "scalar" TABLE = "table" SCALARARRAY = "scalar_array" def __init__(self, name, description, *args): super(AttributeMeta, self).__init__(name, *args) self.description = description def validate(self, value): """ Abstract function to validate a given value Args: value(abstract): Value to validate """ raise NotImplementedError( "Abstract validate function must be implemented in child classes") def to_dict(self): """Convert object attributes into a dictionary""" d = OrderedDict() d["description"] = self.description d["typeid"] = self.typeid return d <commit_msg>Remove unused AttributeMeta type constants<commit_after>from collections import OrderedDict from malcolm.core.serializable import Serializable class AttributeMeta(Serializable): """Abstract base class for Meta objects""" def __init__(self, name, description, *args): super(AttributeMeta, self).__init__(name, *args) self.description = description def validate(self, value): """ Abstract function to validate a given value Args: value(abstract): Value to validate """ raise NotImplementedError( "Abstract validate function must be implemented in child classes") def to_dict(self): """Convert object attributes into a dictionary""" d = OrderedDict() d["description"] = self.description d["typeid"] = self.typeid return d
cb72ad84b9fdb15d7311220eb1762c8c2fe94fab
wagtailmenus/settings.py
wagtailmenus/settings.py
# -*- coding: utf-8 -*- from django.conf import settings ACTIVE_CLASS = getattr( settings, 'WAGTAILMENUS_ACTIVE_CLASS', 'active') ACTIVE_ANCESTOR_CLASS = getattr( settings, 'WAGTAILMENUS_ACTIVE_ANCESTOR_CLASS', 'ancestor') MAINMENU_MENU_ICON = getattr( settings, 'WAGTAILMENUS_MAINMENU_MENU_ICON', 'list-ol') FLATMENU_MENU_ICON = getattr( settings, 'WAGTAILMENUS_MAINMENU_MENU_ICON', 'list-ol') DEFAULT_MAIN_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_MAIN_MENU_TEMPLATE', 'menus/main_menu.html') DEFAULT_FLAT_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_FLAT_MENU_TEMPLATE', 'menus/flat_menu.html') DEFAULT_SECTION_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_SECTION_MENU_TEMPLATE', 'menus/section_menu.html') DEFAULT_CHILDREN_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_CHILDREN_MENU_TEMPLATE', 'menus/children_menu.html')
# -*- coding: utf-8 -*- from django.conf import settings ACTIVE_CLASS = getattr( settings, 'WAGTAILMENUS_ACTIVE_CLASS', 'active') ACTIVE_ANCESTOR_CLASS = getattr( settings, 'WAGTAILMENUS_ACTIVE_ANCESTOR_CLASS', 'ancestor') MAINMENU_MENU_ICON = getattr( settings, 'WAGTAILMENUS_MAINMENU_MENU_ICON', 'list-ol') FLATMENU_MENU_ICON = getattr( settings, 'WAGTAILMENUS_FLATMENU_MENU_ICON', 'list-ol') DEFAULT_MAIN_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_MAIN_MENU_TEMPLATE', 'menus/main_menu.html') DEFAULT_FLAT_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_FLAT_MENU_TEMPLATE', 'menus/flat_menu.html') DEFAULT_SECTION_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_SECTION_MENU_TEMPLATE', 'menus/section_menu.html') DEFAULT_CHILDREN_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_CHILDREN_MENU_TEMPLATE', 'menus/children_menu.html')
Fix setting reference for FLATMENU_MENU_ICON
Fix setting reference for FLATMENU_MENU_ICON
Python
mit
ababic/wagtailmenus,ababic/wagtailmenus,frague59/wagtailmenus,frague59/wagtailmenus,frague59/wagtailmenus,rkhleics/wagtailmenus,ababic/wagtailmenus,rkhleics/wagtailmenus,rkhleics/wagtailmenus
# -*- coding: utf-8 -*- from django.conf import settings ACTIVE_CLASS = getattr( settings, 'WAGTAILMENUS_ACTIVE_CLASS', 'active') ACTIVE_ANCESTOR_CLASS = getattr( settings, 'WAGTAILMENUS_ACTIVE_ANCESTOR_CLASS', 'ancestor') MAINMENU_MENU_ICON = getattr( settings, 'WAGTAILMENUS_MAINMENU_MENU_ICON', 'list-ol') FLATMENU_MENU_ICON = getattr( settings, 'WAGTAILMENUS_MAINMENU_MENU_ICON', 'list-ol') DEFAULT_MAIN_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_MAIN_MENU_TEMPLATE', 'menus/main_menu.html') DEFAULT_FLAT_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_FLAT_MENU_TEMPLATE', 'menus/flat_menu.html') DEFAULT_SECTION_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_SECTION_MENU_TEMPLATE', 'menus/section_menu.html') DEFAULT_CHILDREN_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_CHILDREN_MENU_TEMPLATE', 'menus/children_menu.html') Fix setting reference for FLATMENU_MENU_ICON
# -*- coding: utf-8 -*- from django.conf import settings ACTIVE_CLASS = getattr( settings, 'WAGTAILMENUS_ACTIVE_CLASS', 'active') ACTIVE_ANCESTOR_CLASS = getattr( settings, 'WAGTAILMENUS_ACTIVE_ANCESTOR_CLASS', 'ancestor') MAINMENU_MENU_ICON = getattr( settings, 'WAGTAILMENUS_MAINMENU_MENU_ICON', 'list-ol') FLATMENU_MENU_ICON = getattr( settings, 'WAGTAILMENUS_FLATMENU_MENU_ICON', 'list-ol') DEFAULT_MAIN_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_MAIN_MENU_TEMPLATE', 'menus/main_menu.html') DEFAULT_FLAT_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_FLAT_MENU_TEMPLATE', 'menus/flat_menu.html') DEFAULT_SECTION_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_SECTION_MENU_TEMPLATE', 'menus/section_menu.html') DEFAULT_CHILDREN_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_CHILDREN_MENU_TEMPLATE', 'menus/children_menu.html')
<commit_before># -*- coding: utf-8 -*- from django.conf import settings ACTIVE_CLASS = getattr( settings, 'WAGTAILMENUS_ACTIVE_CLASS', 'active') ACTIVE_ANCESTOR_CLASS = getattr( settings, 'WAGTAILMENUS_ACTIVE_ANCESTOR_CLASS', 'ancestor') MAINMENU_MENU_ICON = getattr( settings, 'WAGTAILMENUS_MAINMENU_MENU_ICON', 'list-ol') FLATMENU_MENU_ICON = getattr( settings, 'WAGTAILMENUS_MAINMENU_MENU_ICON', 'list-ol') DEFAULT_MAIN_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_MAIN_MENU_TEMPLATE', 'menus/main_menu.html') DEFAULT_FLAT_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_FLAT_MENU_TEMPLATE', 'menus/flat_menu.html') DEFAULT_SECTION_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_SECTION_MENU_TEMPLATE', 'menus/section_menu.html') DEFAULT_CHILDREN_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_CHILDREN_MENU_TEMPLATE', 'menus/children_menu.html') <commit_msg>Fix setting reference for FLATMENU_MENU_ICON<commit_after>
# -*- coding: utf-8 -*- from django.conf import settings ACTIVE_CLASS = getattr( settings, 'WAGTAILMENUS_ACTIVE_CLASS', 'active') ACTIVE_ANCESTOR_CLASS = getattr( settings, 'WAGTAILMENUS_ACTIVE_ANCESTOR_CLASS', 'ancestor') MAINMENU_MENU_ICON = getattr( settings, 'WAGTAILMENUS_MAINMENU_MENU_ICON', 'list-ol') FLATMENU_MENU_ICON = getattr( settings, 'WAGTAILMENUS_FLATMENU_MENU_ICON', 'list-ol') DEFAULT_MAIN_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_MAIN_MENU_TEMPLATE', 'menus/main_menu.html') DEFAULT_FLAT_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_FLAT_MENU_TEMPLATE', 'menus/flat_menu.html') DEFAULT_SECTION_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_SECTION_MENU_TEMPLATE', 'menus/section_menu.html') DEFAULT_CHILDREN_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_CHILDREN_MENU_TEMPLATE', 'menus/children_menu.html')
# -*- coding: utf-8 -*- from django.conf import settings ACTIVE_CLASS = getattr( settings, 'WAGTAILMENUS_ACTIVE_CLASS', 'active') ACTIVE_ANCESTOR_CLASS = getattr( settings, 'WAGTAILMENUS_ACTIVE_ANCESTOR_CLASS', 'ancestor') MAINMENU_MENU_ICON = getattr( settings, 'WAGTAILMENUS_MAINMENU_MENU_ICON', 'list-ol') FLATMENU_MENU_ICON = getattr( settings, 'WAGTAILMENUS_MAINMENU_MENU_ICON', 'list-ol') DEFAULT_MAIN_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_MAIN_MENU_TEMPLATE', 'menus/main_menu.html') DEFAULT_FLAT_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_FLAT_MENU_TEMPLATE', 'menus/flat_menu.html') DEFAULT_SECTION_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_SECTION_MENU_TEMPLATE', 'menus/section_menu.html') DEFAULT_CHILDREN_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_CHILDREN_MENU_TEMPLATE', 'menus/children_menu.html') Fix setting reference for FLATMENU_MENU_ICON# -*- coding: utf-8 -*- from django.conf import settings ACTIVE_CLASS = getattr( settings, 'WAGTAILMENUS_ACTIVE_CLASS', 'active') ACTIVE_ANCESTOR_CLASS = getattr( settings, 'WAGTAILMENUS_ACTIVE_ANCESTOR_CLASS', 'ancestor') MAINMENU_MENU_ICON = getattr( settings, 'WAGTAILMENUS_MAINMENU_MENU_ICON', 'list-ol') FLATMENU_MENU_ICON = getattr( settings, 'WAGTAILMENUS_FLATMENU_MENU_ICON', 'list-ol') DEFAULT_MAIN_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_MAIN_MENU_TEMPLATE', 'menus/main_menu.html') DEFAULT_FLAT_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_FLAT_MENU_TEMPLATE', 'menus/flat_menu.html') DEFAULT_SECTION_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_SECTION_MENU_TEMPLATE', 'menus/section_menu.html') DEFAULT_CHILDREN_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_CHILDREN_MENU_TEMPLATE', 'menus/children_menu.html')
<commit_before># -*- coding: utf-8 -*- from django.conf import settings ACTIVE_CLASS = getattr( settings, 'WAGTAILMENUS_ACTIVE_CLASS', 'active') ACTIVE_ANCESTOR_CLASS = getattr( settings, 'WAGTAILMENUS_ACTIVE_ANCESTOR_CLASS', 'ancestor') MAINMENU_MENU_ICON = getattr( settings, 'WAGTAILMENUS_MAINMENU_MENU_ICON', 'list-ol') FLATMENU_MENU_ICON = getattr( settings, 'WAGTAILMENUS_MAINMENU_MENU_ICON', 'list-ol') DEFAULT_MAIN_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_MAIN_MENU_TEMPLATE', 'menus/main_menu.html') DEFAULT_FLAT_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_FLAT_MENU_TEMPLATE', 'menus/flat_menu.html') DEFAULT_SECTION_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_SECTION_MENU_TEMPLATE', 'menus/section_menu.html') DEFAULT_CHILDREN_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_CHILDREN_MENU_TEMPLATE', 'menus/children_menu.html') <commit_msg>Fix setting reference for FLATMENU_MENU_ICON<commit_after># -*- coding: utf-8 -*- from django.conf import settings ACTIVE_CLASS = getattr( settings, 'WAGTAILMENUS_ACTIVE_CLASS', 'active') ACTIVE_ANCESTOR_CLASS = getattr( settings, 'WAGTAILMENUS_ACTIVE_ANCESTOR_CLASS', 'ancestor') MAINMENU_MENU_ICON = getattr( settings, 'WAGTAILMENUS_MAINMENU_MENU_ICON', 'list-ol') FLATMENU_MENU_ICON = getattr( settings, 'WAGTAILMENUS_FLATMENU_MENU_ICON', 'list-ol') DEFAULT_MAIN_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_MAIN_MENU_TEMPLATE', 'menus/main_menu.html') DEFAULT_FLAT_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_FLAT_MENU_TEMPLATE', 'menus/flat_menu.html') DEFAULT_SECTION_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_SECTION_MENU_TEMPLATE', 'menus/section_menu.html') DEFAULT_CHILDREN_MENU_TEMPLATE = getattr( settings, 'WAGTAILMENUS_DEFAULT_CHILDREN_MENU_TEMPLATE', 'menus/children_menu.html')
a49a3c133478c01774adfe8853b608e110a5a2e6
examples/test_double_click.py
examples/test_double_click.py
from seleniumbase import BaseCase class MyTestClass(BaseCase): def test_double_click(self): self.open("https://www.w3schools.com/jsref" "/tryit.asp?filename=tryjsref_ondblclick") self.switch_to_frame("iframeResult") self.double_click('[ondblclick="myFunction()"]') self.assert_text("Hello World", "#demo")
from seleniumbase import BaseCase class MyTestClass(BaseCase): def test_double_click_and_switch_to_frame(self): self.open("https://www.w3schools.com/jsref" "/tryit.asp?filename=tryjsref_ondblclick") self.ad_block() self.switch_to_frame("#iframeResult") self.double_click('[ondblclick="myFunction()"]') self.assert_text("Hello World", "#demo") def test_double_click_and_switch_to_frame_of_element(self): self.open("https://www.w3schools.com/jsref" "/tryit.asp?filename=tryjsref_ondblclick") self.ad_block() self.switch_to_frame_of_element('[ondblclick="myFunction()"]') self.double_click('[ondblclick="myFunction()"]') self.assert_text("Hello World", "#demo")
Update a test for entering iframes and double-clicking
Update a test for entering iframes and double-clicking
Python
mit
seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase
from seleniumbase import BaseCase class MyTestClass(BaseCase): def test_double_click(self): self.open("https://www.w3schools.com/jsref" "/tryit.asp?filename=tryjsref_ondblclick") self.switch_to_frame("iframeResult") self.double_click('[ondblclick="myFunction()"]') self.assert_text("Hello World", "#demo") Update a test for entering iframes and double-clicking
from seleniumbase import BaseCase class MyTestClass(BaseCase): def test_double_click_and_switch_to_frame(self): self.open("https://www.w3schools.com/jsref" "/tryit.asp?filename=tryjsref_ondblclick") self.ad_block() self.switch_to_frame("#iframeResult") self.double_click('[ondblclick="myFunction()"]') self.assert_text("Hello World", "#demo") def test_double_click_and_switch_to_frame_of_element(self): self.open("https://www.w3schools.com/jsref" "/tryit.asp?filename=tryjsref_ondblclick") self.ad_block() self.switch_to_frame_of_element('[ondblclick="myFunction()"]') self.double_click('[ondblclick="myFunction()"]') self.assert_text("Hello World", "#demo")
<commit_before>from seleniumbase import BaseCase class MyTestClass(BaseCase): def test_double_click(self): self.open("https://www.w3schools.com/jsref" "/tryit.asp?filename=tryjsref_ondblclick") self.switch_to_frame("iframeResult") self.double_click('[ondblclick="myFunction()"]') self.assert_text("Hello World", "#demo") <commit_msg>Update a test for entering iframes and double-clicking<commit_after>
from seleniumbase import BaseCase class MyTestClass(BaseCase): def test_double_click_and_switch_to_frame(self): self.open("https://www.w3schools.com/jsref" "/tryit.asp?filename=tryjsref_ondblclick") self.ad_block() self.switch_to_frame("#iframeResult") self.double_click('[ondblclick="myFunction()"]') self.assert_text("Hello World", "#demo") def test_double_click_and_switch_to_frame_of_element(self): self.open("https://www.w3schools.com/jsref" "/tryit.asp?filename=tryjsref_ondblclick") self.ad_block() self.switch_to_frame_of_element('[ondblclick="myFunction()"]') self.double_click('[ondblclick="myFunction()"]') self.assert_text("Hello World", "#demo")
from seleniumbase import BaseCase class MyTestClass(BaseCase): def test_double_click(self): self.open("https://www.w3schools.com/jsref" "/tryit.asp?filename=tryjsref_ondblclick") self.switch_to_frame("iframeResult") self.double_click('[ondblclick="myFunction()"]') self.assert_text("Hello World", "#demo") Update a test for entering iframes and double-clickingfrom seleniumbase import BaseCase class MyTestClass(BaseCase): def test_double_click_and_switch_to_frame(self): self.open("https://www.w3schools.com/jsref" "/tryit.asp?filename=tryjsref_ondblclick") self.ad_block() self.switch_to_frame("#iframeResult") self.double_click('[ondblclick="myFunction()"]') self.assert_text("Hello World", "#demo") def test_double_click_and_switch_to_frame_of_element(self): self.open("https://www.w3schools.com/jsref" "/tryit.asp?filename=tryjsref_ondblclick") self.ad_block() self.switch_to_frame_of_element('[ondblclick="myFunction()"]') self.double_click('[ondblclick="myFunction()"]') self.assert_text("Hello World", "#demo")
<commit_before>from seleniumbase import BaseCase class MyTestClass(BaseCase): def test_double_click(self): self.open("https://www.w3schools.com/jsref" "/tryit.asp?filename=tryjsref_ondblclick") self.switch_to_frame("iframeResult") self.double_click('[ondblclick="myFunction()"]') self.assert_text("Hello World", "#demo") <commit_msg>Update a test for entering iframes and double-clicking<commit_after>from seleniumbase import BaseCase class MyTestClass(BaseCase): def test_double_click_and_switch_to_frame(self): self.open("https://www.w3schools.com/jsref" "/tryit.asp?filename=tryjsref_ondblclick") self.ad_block() self.switch_to_frame("#iframeResult") self.double_click('[ondblclick="myFunction()"]') self.assert_text("Hello World", "#demo") def test_double_click_and_switch_to_frame_of_element(self): self.open("https://www.w3schools.com/jsref" "/tryit.asp?filename=tryjsref_ondblclick") self.ad_block() self.switch_to_frame_of_element('[ondblclick="myFunction()"]') self.double_click('[ondblclick="myFunction()"]') self.assert_text("Hello World", "#demo")
6d43df828cb34c8949c8f87c256bde2e6ccb7d3c
atamatracker/moviefile.py
atamatracker/moviefile.py
"""Movie module for atamaTracker """ import cv2 class Movie(object): """Movie file object. """ def __init__(self, file_path): self.__capture = cv2.VideoCapture(file_path) def __del__(self): self.__capture.release() def load_image(self, time_sec): """Load image at the desired time. Retruns None if no image could load. """ self.__capture.set(cv2.cv.CV_CAP_PROP_POS_MSEC, time_sec * 1000) f, image = self.__capture.read() return image
"""Movie module for atamaTracker """ import cv2 class Movie(object): """Movie file object. Public properties: fps (read-only) -- [float] frames per second width (read-only) -- [int] frame dimension height (read-only) -- [int] frame dimension """ def __init__(self, file_path): capture = cv2.VideoCapture(file_path) self.__capture = capture self.__fps = capture.get(cv2.cv.CV_CAP_PROP_FPS) self.__width = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH)) self.__height = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT)) def __del__(self): self.__capture.release() @property def fps(self): """frames per second """ return self.__fps @property def width(self): """frame dimension """ return self.__width @property def height(self): """frame dimension """ return self.__height def load_image(self, time_sec): """Load image at the desired time. Retruns None if no image could load. """ self.__capture.set(cv2.cv.CV_CAP_PROP_POS_MSEC, time_sec * 1000) f, image = self.__capture.read() return image
Add some useful read-only properties to Movie class
Add some useful read-only properties to Movie class
Python
mit
ptsg/AtamaTracker
"""Movie module for atamaTracker """ import cv2 class Movie(object): """Movie file object. """ def __init__(self, file_path): self.__capture = cv2.VideoCapture(file_path) def __del__(self): self.__capture.release() def load_image(self, time_sec): """Load image at the desired time. Retruns None if no image could load. """ self.__capture.set(cv2.cv.CV_CAP_PROP_POS_MSEC, time_sec * 1000) f, image = self.__capture.read() return image Add some useful read-only properties to Movie class
"""Movie module for atamaTracker """ import cv2 class Movie(object): """Movie file object. Public properties: fps (read-only) -- [float] frames per second width (read-only) -- [int] frame dimension height (read-only) -- [int] frame dimension """ def __init__(self, file_path): capture = cv2.VideoCapture(file_path) self.__capture = capture self.__fps = capture.get(cv2.cv.CV_CAP_PROP_FPS) self.__width = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH)) self.__height = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT)) def __del__(self): self.__capture.release() @property def fps(self): """frames per second """ return self.__fps @property def width(self): """frame dimension """ return self.__width @property def height(self): """frame dimension """ return self.__height def load_image(self, time_sec): """Load image at the desired time. Retruns None if no image could load. """ self.__capture.set(cv2.cv.CV_CAP_PROP_POS_MSEC, time_sec * 1000) f, image = self.__capture.read() return image
<commit_before>"""Movie module for atamaTracker """ import cv2 class Movie(object): """Movie file object. """ def __init__(self, file_path): self.__capture = cv2.VideoCapture(file_path) def __del__(self): self.__capture.release() def load_image(self, time_sec): """Load image at the desired time. Retruns None if no image could load. """ self.__capture.set(cv2.cv.CV_CAP_PROP_POS_MSEC, time_sec * 1000) f, image = self.__capture.read() return image <commit_msg>Add some useful read-only properties to Movie class<commit_after>
"""Movie module for atamaTracker """ import cv2 class Movie(object): """Movie file object. Public properties: fps (read-only) -- [float] frames per second width (read-only) -- [int] frame dimension height (read-only) -- [int] frame dimension """ def __init__(self, file_path): capture = cv2.VideoCapture(file_path) self.__capture = capture self.__fps = capture.get(cv2.cv.CV_CAP_PROP_FPS) self.__width = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH)) self.__height = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT)) def __del__(self): self.__capture.release() @property def fps(self): """frames per second """ return self.__fps @property def width(self): """frame dimension """ return self.__width @property def height(self): """frame dimension """ return self.__height def load_image(self, time_sec): """Load image at the desired time. Retruns None if no image could load. """ self.__capture.set(cv2.cv.CV_CAP_PROP_POS_MSEC, time_sec * 1000) f, image = self.__capture.read() return image
"""Movie module for atamaTracker """ import cv2 class Movie(object): """Movie file object. """ def __init__(self, file_path): self.__capture = cv2.VideoCapture(file_path) def __del__(self): self.__capture.release() def load_image(self, time_sec): """Load image at the desired time. Retruns None if no image could load. """ self.__capture.set(cv2.cv.CV_CAP_PROP_POS_MSEC, time_sec * 1000) f, image = self.__capture.read() return image Add some useful read-only properties to Movie class"""Movie module for atamaTracker """ import cv2 class Movie(object): """Movie file object. Public properties: fps (read-only) -- [float] frames per second width (read-only) -- [int] frame dimension height (read-only) -- [int] frame dimension """ def __init__(self, file_path): capture = cv2.VideoCapture(file_path) self.__capture = capture self.__fps = capture.get(cv2.cv.CV_CAP_PROP_FPS) self.__width = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH)) self.__height = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT)) def __del__(self): self.__capture.release() @property def fps(self): """frames per second """ return self.__fps @property def width(self): """frame dimension """ return self.__width @property def height(self): """frame dimension """ return self.__height def load_image(self, time_sec): """Load image at the desired time. Retruns None if no image could load. """ self.__capture.set(cv2.cv.CV_CAP_PROP_POS_MSEC, time_sec * 1000) f, image = self.__capture.read() return image
<commit_before>"""Movie module for atamaTracker """ import cv2 class Movie(object): """Movie file object. """ def __init__(self, file_path): self.__capture = cv2.VideoCapture(file_path) def __del__(self): self.__capture.release() def load_image(self, time_sec): """Load image at the desired time. Retruns None if no image could load. """ self.__capture.set(cv2.cv.CV_CAP_PROP_POS_MSEC, time_sec * 1000) f, image = self.__capture.read() return image <commit_msg>Add some useful read-only properties to Movie class<commit_after>"""Movie module for atamaTracker """ import cv2 class Movie(object): """Movie file object. Public properties: fps (read-only) -- [float] frames per second width (read-only) -- [int] frame dimension height (read-only) -- [int] frame dimension """ def __init__(self, file_path): capture = cv2.VideoCapture(file_path) self.__capture = capture self.__fps = capture.get(cv2.cv.CV_CAP_PROP_FPS) self.__width = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH)) self.__height = int(capture.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT)) def __del__(self): self.__capture.release() @property def fps(self): """frames per second """ return self.__fps @property def width(self): """frame dimension """ return self.__width @property def height(self): """frame dimension """ return self.__height def load_image(self, time_sec): """Load image at the desired time. Retruns None if no image could load. """ self.__capture.set(cv2.cv.CV_CAP_PROP_POS_MSEC, time_sec * 1000) f, image = self.__capture.read() return image
0a7dfca0e4783abc24a6ec9d0bd9b84219593a1f
common/djangoapps/util/json_request.py
common/djangoapps/util/json_request.py
from functools import wraps import copy import json def expect_json(view_function): @wraps(view_function) def expect_json_with_cloned_request(request, *args, **kwargs): if request.META['CONTENT_TYPE'] == "application/json": cloned_request = copy.copy(request) cloned_request.POST = cloned_request.POST.copy() cloned_request.POST.update(json.loads(request.body)) return view_function(cloned_request, *args, **kwargs) else: return view_function(request, *args, **kwargs) return expect_json_with_cloned_request
from functools import wraps import copy import json def expect_json(view_function): @wraps(view_function) def expect_json_with_cloned_request(request, *args, **kwargs): # cdodge: fix postback errors in CMS. The POST 'content-type' header can include additional information # e.g. 'charset', so we can't do a direct string compare if request.META['CONTENT_TYPE'].lower().startswith("application/json"): cloned_request = copy.copy(request) cloned_request.POST = cloned_request.POST.copy() cloned_request.POST.update(json.loads(request.body)) return view_function(cloned_request, *args, **kwargs) else: return view_function(request, *args, **kwargs) return expect_json_with_cloned_request
Fix JSON postback error where the content-type header line can contain more info than just the application/json descriptor. Now we just to a compare on the start of the header value.
Fix JSON postback error where the content-type header line can contain more info than just the application/json descriptor. Now we just to a compare on the start of the header value.
Python
agpl-3.0
deepsrijit1105/edx-platform,waheedahmed/edx-platform,jamesblunt/edx-platform,pku9104038/edx-platform,prarthitm/edxplatform,rismalrv/edx-platform,kamalx/edx-platform,LICEF/edx-platform,miptliot/edx-platform,abdoosh00/edraak,RPI-OPENEDX/edx-platform,shubhdev/edxOnBaadal,mtlchun/edx,Ayub-Khan/edx-platform,rationalAgent/edx-platform-custom,hamzehd/edx-platform,kmoocdev2/edx-platform,Softmotions/edx-platform,pabloborrego93/edx-platform,UXE/local-edx,Kalyzee/edx-platform,cyanna/edx-platform,miptliot/edx-platform,gymnasium/edx-platform,utecuy/edx-platform,tanmaykm/edx-platform,tiagochiavericosta/edx-platform,Edraak/edx-platform,bdero/edx-platform,kxliugang/edx-platform,jazkarta/edx-platform-for-isc,jolyonb/edx-platform,yokose-ks/edx-platform,cpennington/edx-platform,leansoft/edx-platform,olexiim/edx-platform,apigee/edx-platform,WatanabeYasumasa/edx-platform,adoosii/edx-platform,LearnEra/LearnEraPlaftform,ferabra/edx-platform,longmen21/edx-platform,raccoongang/edx-platform,pepeportela/edx-platform,jbassen/edx-platform,hastexo/edx-platform,ak2703/edx-platform,benpatterson/edx-platform,jazkarta/edx-platform,kmoocdev/edx-platform,shurihell/testasia,shubhdev/edx-platform,CredoReference/edx-platform,ovnicraft/edx-platform,angelapper/edx-platform,jbassen/edx-platform,adoosii/edx-platform,marcore/edx-platform,bdero/edx-platform,kamalx/edx-platform,rue89-tech/edx-platform,xuxiao19910803/edx,inares/edx-platform,ahmedaljazzar/edx-platform,don-github/edx-platform,romain-li/edx-platform,nanolearning/edx-platform,dsajkl/reqiop,procangroup/edx-platform,valtech-mooc/edx-platform,4eek/edx-platform,hkawasaki/kawasaki-aio8-2,xuxiao19910803/edx,stvstnfrd/edx-platform,ampax/edx-platform,jruiperezv/ANALYSE,OmarIthawi/edx-platform,jjmiranda/edx-platform,peterm-itr/edx-platform,philanthropy-u/edx-platform,devs1991/test_edx_docmode,cognitiveclass/edx-platform,inares/edx-platform,martynovp/edx-platform,sudheerchintala/LearnEraPlatForm,pelikanchik/edx-platform,mjirayu/sit_academy,chrisndodge/edx-platform,ahmadiga/min_edx,jamiefolsom/edx-platform,philanthropy-u/edx-platform,JioEducation/edx-platform,tiagochiavericosta/edx-platform,WatanabeYasumasa/edx-platform,jswope00/GAI,naresh21/synergetics-edx-platform,nagyistoce/edx-platform,jazkarta/edx-platform,apigee/edx-platform,fly19890211/edx-platform,IndonesiaX/edx-platform,EduPepperPD/pepper2013,polimediaupv/edx-platform,zofuthan/edx-platform,SravanthiSinha/edx-platform,4eek/edx-platform,rationalAgent/edx-platform-custom,franosincic/edx-platform,tanmaykm/edx-platform,xuxiao19910803/edx-platform,chudaol/edx-platform,CourseTalk/edx-platform,apigee/edx-platform,andyzsf/edx,dcosentino/edx-platform,torchingloom/edx-platform,rismalrv/edx-platform,eemirtekin/edx-platform,AkA84/edx-platform,kalebhartje/schoolboost,IONISx/edx-platform,xinjiguaike/edx-platform,TeachAtTUM/edx-platform,ferabra/edx-platform,DefyVentures/edx-platform,gymnasium/edx-platform,SivilTaram/edx-platform,amir-qayyum-khan/edx-platform,defance/edx-platform,jruiperezv/ANALYSE,torchingloom/edx-platform,leansoft/edx-platform,zadgroup/edx-platform,ferabra/edx-platform,Kalyzee/edx-platform,vasyarv/edx-platform,analyseuc3m/ANALYSE-v1,carsongee/edx-platform,etzhou/edx-platform,proversity-org/edx-platform,pepeportela/edx-platform,ESOedX/edx-platform,chudaol/edx-platform,gymnasium/edx-platform,lduarte1991/edx-platform,utecuy/edx-platform,mjirayu/sit_academy,rue89-tech/edx-platform,utecuy/edx-platform,shurihell/testasia,rismalrv/edx-platform,Shrhawk/edx-platform,shubhdev/edxOnBaadal,jazkarta/edx-platform,abdoosh00/edraak,Ayub-Khan/edx-platform,y12uc231/edx-platform,teltek/edx-platform,dcosentino/edx-platform,knehez/edx-platform,jolyonb/edx-platform,kmoocdev/edx-platform,eestay/edx-platform,shubhdev/edxOnBaadal,devs1991/test_edx_docmode,angelapper/edx-platform,vasyarv/edx-platform,motion2015/a3,itsjeyd/edx-platform,auferack08/edx-platform,xinjiguaike/edx-platform,jonathan-beard/edx-platform,eduNEXT/edunext-platform,OmarIthawi/edx-platform,don-github/edx-platform,cecep-edu/edx-platform,fintech-circle/edx-platform,zofuthan/edx-platform,doismellburning/edx-platform,edry/edx-platform,naresh21/synergetics-edx-platform,martynovp/edx-platform,jjmiranda/edx-platform,CourseTalk/edx-platform,TeachAtTUM/edx-platform,nttks/edx-platform,eemirtekin/edx-platform,louyihua/edx-platform,unicri/edx-platform,rhndg/openedx,morenopc/edx-platform,cognitiveclass/edx-platform,cyanna/edx-platform,solashirai/edx-platform,beacloudgenius/edx-platform,EDUlib/edx-platform,morenopc/edx-platform,appliedx/edx-platform,MakeHer/edx-platform,LearnEra/LearnEraPlaftform,cpennington/edx-platform,vikas1885/test1,chauhanhardik/populo,playm2mboy/edx-platform,jamiefolsom/edx-platform,Edraak/circleci-edx-platform,benpatterson/edx-platform,arbrandes/edx-platform,doismellburning/edx-platform,dsajkl/123,B-MOOC/edx-platform,Lektorium-LLC/edx-platform,itsjeyd/edx-platform,Edraak/edx-platform,shubhdev/openedx,eemirtekin/edx-platform,beacloudgenius/edx-platform,wwj718/edx-platform,nanolearningllc/edx-platform-cypress,Semi-global/edx-platform,nagyistoce/edx-platform,mcgachey/edx-platform,cselis86/edx-platform,IONISx/edx-platform,doismellburning/edx-platform,ferabra/edx-platform,MakeHer/edx-platform,hmcmooc/muddx-platform,chauhanhardik/populo_2,pomegranited/edx-platform,kmoocdev2/edx-platform,nanolearningllc/edx-platform-cypress,jzoldak/edx-platform,4eek/edx-platform,pomegranited/edx-platform,vikas1885/test1,hkawasaki/kawasaki-aio8-2,ak2703/edx-platform,mushtaqak/edx-platform,mitocw/edx-platform,franosincic/edx-platform,chauhanhardik/populo,nttks/jenkins-test,EduPepperPD/pepper2013,SravanthiSinha/edx-platform,rhndg/openedx,mtlchun/edx,dkarakats/edx-platform,JCBarahona/edX,eduNEXT/edunext-platform,bitifirefly/edx-platform,hkawasaki/kawasaki-aio8-0,kamalx/edx-platform,waheedahmed/edx-platform,procangroup/edx-platform,itsjeyd/edx-platform,zhenzhai/edx-platform,ahmadio/edx-platform,analyseuc3m/ANALYSE-v1,ak2703/edx-platform,nikolas/edx-platform,yokose-ks/edx-platform,ampax/edx-platform-backup,adoosii/edx-platform,shashank971/edx-platform,waheedahmed/edx-platform,pomegranited/edx-platform,J861449197/edx-platform,gsehub/edx-platform,benpatterson/edx-platform,EduPepperPDTesting/pepper2013-testing,playm2mboy/edx-platform,hkawasaki/kawasaki-aio8-0,mjg2203/edx-platform-seas,EduPepperPDTesting/pepper2013-testing,Ayub-Khan/edx-platform,kursitet/edx-platform,mahendra-r/edx-platform,nttks/jenkins-test,chudaol/edx-platform,shubhdev/edx-platform,gymnasium/edx-platform,J861449197/edx-platform,jbzdak/edx-platform,andyzsf/edx,nttks/edx-platform,PepperPD/edx-pepper-platform,EduPepperPDTesting/pepper2013-testing,tanmaykm/edx-platform,solashirai/edx-platform,syjeon/new_edx,EDUlib/edx-platform,edx-solutions/edx-platform,vikas1885/test1,pdehaye/theming-edx-platform,doganov/edx-platform,andyzsf/edx,halvertoluke/edx-platform,romain-li/edx-platform,hkawasaki/kawasaki-aio8-1,dkarakats/edx-platform,jbassen/edx-platform,LearnEra/LearnEraPlaftform,atsolakid/edx-platform,EduPepperPDTesting/pepper2013-testing,Edraak/edx-platform,shubhdev/openedx,AkA84/edx-platform,abdoosh00/edx-rtl-final,ahmadiga/min_edx,CredoReference/edx-platform,ak2703/edx-platform,msegado/edx-platform,motion2015/edx-platform,cpennington/edx-platform,morenopc/edx-platform,msegado/edx-platform,morenopc/edx-platform,TsinghuaX/edx-platform,alexthered/kienhoc-platform,IONISx/edx-platform,stvstnfrd/edx-platform,J861449197/edx-platform,LICEF/edx-platform,msegado/edx-platform,Lektorium-LLC/edx-platform,UXE/local-edx,pepeportela/edx-platform,atsolakid/edx-platform,xuxiao19910803/edx-platform,franosincic/edx-platform,jswope00/griffinx,valtech-mooc/edx-platform,Livit/Livit.Learn.EdX,pku9104038/edx-platform,fly19890211/edx-platform,dsajkl/123,motion2015/edx-platform,abdoosh00/edx-rtl-final,Livit/Livit.Learn.EdX,tiagochiavericosta/edx-platform,Kalyzee/edx-platform,simbs/edx-platform,jamesblunt/edx-platform,appliedx/edx-platform,vismartltd/edx-platform,jzoldak/edx-platform,solashirai/edx-platform,dcosentino/edx-platform,mcgachey/edx-platform,ZLLab-Mooc/edx-platform,shurihell/testasia,shubhdev/edx-platform,openfun/edx-platform,UXE/local-edx,shabab12/edx-platform,jbassen/edx-platform,cecep-edu/edx-platform,Kalyzee/edx-platform,kmoocdev/edx-platform,DefyVentures/edx-platform,cecep-edu/edx-platform,Stanford-Online/edx-platform,xuxiao19910803/edx-platform,praveen-pal/edx-platform,jelugbo/tundex,mcgachey/edx-platform,auferack08/edx-platform,SivilTaram/edx-platform,shubhdev/edxOnBaadal,ahmadio/edx-platform,beni55/edx-platform,antoviaque/edx-platform,xingyepei/edx-platform,adoosii/edx-platform,philanthropy-u/edx-platform,edry/edx-platform,Shrhawk/edx-platform,chudaol/edx-platform,zhenzhai/edx-platform,zhenzhai/edx-platform,chauhanhardik/populo_2,AkA84/edx-platform,IndonesiaX/edx-platform,shubhdev/edx-platform,edry/edx-platform,waheedahmed/edx-platform,xuxiao19910803/edx,nikolas/edx-platform,ferabra/edx-platform,shashank971/edx-platform,jbzdak/edx-platform,kmoocdev2/edx-platform,unicri/edx-platform,knehez/edx-platform,naresh21/synergetics-edx-platform,y12uc231/edx-platform,ahmadio/edx-platform,defance/edx-platform,pepeportela/edx-platform,mbareta/edx-platform-ft,ZLLab-Mooc/edx-platform,rue89-tech/edx-platform,dkarakats/edx-platform,zubair-arbi/edx-platform,ovnicraft/edx-platform,gsehub/edx-platform,angelapper/edx-platform,romain-li/edx-platform,benpatterson/edx-platform,OmarIthawi/edx-platform,nttks/edx-platform,peterm-itr/edx-platform,caesar2164/edx-platform,alexthered/kienhoc-platform,syjeon/new_edx,UOMx/edx-platform,ZLLab-Mooc/edx-platform,doganov/edx-platform,miptliot/edx-platform,polimediaupv/edx-platform,pdehaye/theming-edx-platform,dsajkl/123,msegado/edx-platform,nanolearningllc/edx-platform-cypress-2,pku9104038/edx-platform,Unow/edx-platform,knehez/edx-platform,halvertoluke/edx-platform,praveen-pal/edx-platform,JCBarahona/edX,miptliot/edx-platform,antonve/s4-project-mooc,nanolearningllc/edx-platform-cypress-2,CourseTalk/edx-platform,nagyistoce/edx-platform,PepperPD/edx-pepper-platform,eestay/edx-platform,PepperPD/edx-pepper-platform,caesar2164/edx-platform,motion2015/edx-platform,dsajkl/reqiop,beacloudgenius/edx-platform,atsolakid/edx-platform,hmcmooc/muddx-platform,nanolearningllc/edx-platform-cypress-2,rue89-tech/edx-platform,Edraak/edraak-platform,stvstnfrd/edx-platform,simbs/edx-platform,wwj718/ANALYSE,pelikanchik/edx-platform,Semi-global/edx-platform,ak2703/edx-platform,TsinghuaX/edx-platform,synergeticsedx/deployment-wipro,halvertoluke/edx-platform,xuxiao19910803/edx,SivilTaram/edx-platform,pomegranited/edx-platform,appsembler/edx-platform,simbs/edx-platform,kxliugang/edx-platform,MSOpenTech/edx-platform,defance/edx-platform,tiagochiavericosta/edx-platform,jazztpt/edx-platform,chudaol/edx-platform,kalebhartje/schoolboost,dsajkl/123,morpheby/levelup-by,nttks/edx-platform,ahmedaljazzar/edx-platform,hmcmooc/muddx-platform,edx-solutions/edx-platform,ZLLab-Mooc/edx-platform,jelugbo/tundex,raccoongang/edx-platform,romain-li/edx-platform,lduarte1991/edx-platform,UXE/local-edx,longmen21/edx-platform,jswope00/griffinx,peterm-itr/edx-platform,mcgachey/edx-platform,deepsrijit1105/edx-platform,marcore/edx-platform,Livit/Livit.Learn.EdX,RPI-OPENEDX/edx-platform,olexiim/edx-platform,motion2015/a3,eestay/edx-platform,IITBinterns13/edx-platform-dev,etzhou/edx-platform,tiagochiavericosta/edx-platform,openfun/edx-platform,DNFcode/edx-platform,MSOpenTech/edx-platform,cecep-edu/edx-platform,jazztpt/edx-platform,BehavioralInsightsTeam/edx-platform,shashank971/edx-platform,UOMx/edx-platform,IITBinterns13/edx-platform-dev,jamiefolsom/edx-platform,antonve/s4-project-mooc,vismartltd/edx-platform,doganov/edx-platform,IONISx/edx-platform,zadgroup/edx-platform,doismellburning/edx-platform,kxliugang/edx-platform,arifsetiawan/edx-platform,vismartltd/edx-platform,pabloborrego93/edx-platform,y12uc231/edx-platform,zubair-arbi/edx-platform,mcgachey/edx-platform,eduNEXT/edx-platform,analyseuc3m/ANALYSE-v1,jolyonb/edx-platform,arifsetiawan/edx-platform,edry/edx-platform,RPI-OPENEDX/edx-platform,bitifirefly/edx-platform,louyihua/edx-platform,EDUlib/edx-platform,solashirai/edx-platform,kalebhartje/schoolboost,pdehaye/theming-edx-platform,Softmotions/edx-platform,valtech-mooc/edx-platform,syjeon/new_edx,wwj718/edx-platform,torchingloom/edx-platform,xinjiguaike/edx-platform,kamalx/edx-platform,zerobatu/edx-platform,xinjiguaike/edx-platform,Semi-global/edx-platform,arbrandes/edx-platform,synergeticsedx/deployment-wipro,mushtaqak/edx-platform,nagyistoce/edx-platform,pku9104038/edx-platform,devs1991/test_edx_docmode,Edraak/edraak-platform,kmoocdev/edx-platform,fly19890211/edx-platform,leansoft/edx-platform,JioEducation/edx-platform,beni55/edx-platform,xingyepei/edx-platform,marcore/edx-platform,jbzdak/edx-platform,SivilTaram/edx-platform,jjmiranda/edx-platform,EduPepperPD/pepper2013,tanmaykm/edx-platform,Edraak/circleci-edx-platform,waheedahmed/edx-platform,fintech-circle/edx-platform,arbrandes/edx-platform,cselis86/edx-platform,cselis86/edx-platform,polimediaupv/edx-platform,jswope00/griffinx,deepsrijit1105/edx-platform,Ayub-Khan/edx-platform,xuxiao19910803/edx,nanolearningllc/edx-platform-cypress-2,hkawasaki/kawasaki-aio8-1,olexiim/edx-platform,LICEF/edx-platform,Endika/edx-platform,etzhou/edx-platform,hkawasaki/kawasaki-aio8-2,jruiperezv/ANALYSE,JCBarahona/edX,xingyepei/edx-platform,bigdatauniversity/edx-platform,OmarIthawi/edx-platform,nanolearningllc/edx-platform-cypress,Edraak/edx-platform,zerobatu/edx-platform,mjg2203/edx-platform-seas,wwj718/ANALYSE,pelikanchik/edx-platform,vikas1885/test1,openfun/edx-platform,motion2015/a3,unicri/edx-platform,4eek/edx-platform,raccoongang/edx-platform,Kalyzee/edx-platform,nanolearning/edx-platform,hastexo/edx-platform,Shrhawk/edx-platform,jazztpt/edx-platform,MSOpenTech/edx-platform,auferack08/edx-platform,ampax/edx-platform-backup,mbareta/edx-platform-ft,Endika/edx-platform,mjg2203/edx-platform-seas,franosincic/edx-platform,ovnicraft/edx-platform,Endika/edx-platform,iivic/BoiseStateX,don-github/edx-platform,ahmadio/edx-platform,BehavioralInsightsTeam/edx-platform,halvertoluke/edx-platform,beacloudgenius/edx-platform,jbzdak/edx-platform,sameetb-cuelogic/edx-platform-test,hamzehd/edx-platform,nttks/jenkins-test,CourseTalk/edx-platform,msegado/edx-platform,SravanthiSinha/edx-platform,Unow/edx-platform,stvstnfrd/edx-platform,DNFcode/edx-platform,motion2015/edx-platform,ampax/edx-platform,zofuthan/edx-platform,dcosentino/edx-platform,cognitiveclass/edx-platform,marcore/edx-platform,jamesblunt/edx-platform,CredoReference/edx-platform,arifsetiawan/edx-platform,arifsetiawan/edx-platform,rationalAgent/edx-platform-custom,Unow/edx-platform,sameetb-cuelogic/edx-platform-test,jswope00/griffinx,eduNEXT/edunext-platform,mahendra-r/edx-platform,rismalrv/edx-platform,DefyVentures/edx-platform,appsembler/edx-platform,zofuthan/edx-platform,edx-solutions/edx-platform,dsajkl/reqiop,mahendra-r/edx-platform,mahendra-r/edx-platform,iivic/BoiseStateX,adoosii/edx-platform,zerobatu/edx-platform,abdoosh00/edx-rtl-final,AkA84/edx-platform,zerobatu/edx-platform,apigee/edx-platform,jamesblunt/edx-platform,jonathan-beard/edx-platform,nttks/jenkins-test,y12uc231/edx-platform,procangroup/edx-platform,MakeHer/edx-platform,appliedx/edx-platform,arbrandes/edx-platform,jjmiranda/edx-platform,jazkarta/edx-platform-for-isc,jbassen/edx-platform,chand3040/cloud_that,kxliugang/edx-platform,ampax/edx-platform-backup,10clouds/edx-platform,valtech-mooc/edx-platform,jzoldak/edx-platform,gsehub/edx-platform,UOMx/edx-platform,a-parhom/edx-platform,TsinghuaX/edx-platform,jolyonb/edx-platform,MSOpenTech/edx-platform,sameetb-cuelogic/edx-platform-test,alu042/edx-platform,louyihua/edx-platform,mbareta/edx-platform-ft,cselis86/edx-platform,mahendra-r/edx-platform,mushtaqak/edx-platform,mitocw/edx-platform,LICEF/edx-platform,TeachAtTUM/edx-platform,B-MOOC/edx-platform,caesar2164/edx-platform,SravanthiSinha/edx-platform,playm2mboy/edx-platform,edx/edx-platform,inares/edx-platform,prarthitm/edxplatform,nanolearningllc/edx-platform-cypress-2,mbareta/edx-platform-ft,sudheerchintala/LearnEraPlatForm,longmen21/edx-platform,IONISx/edx-platform,chrisndodge/edx-platform,eemirtekin/edx-platform,10clouds/edx-platform,nanolearning/edx-platform,hamzehd/edx-platform,Edraak/circleci-edx-platform,praveen-pal/edx-platform,JioEducation/edx-platform,shashank971/edx-platform,abdoosh00/edraak,rhndg/openedx,kmoocdev/edx-platform,cecep-edu/edx-platform,jazkarta/edx-platform-for-isc,kalebhartje/schoolboost,chrisndodge/edx-platform,jonathan-beard/edx-platform,jelugbo/tundex,synergeticsedx/deployment-wipro,chauhanhardik/populo_2,zadgroup/edx-platform,alu042/edx-platform,vasyarv/edx-platform,Stanford-Online/edx-platform,Edraak/circleci-edx-platform,louyihua/edx-platform,hamzehd/edx-platform,motion2015/a3,chauhanhardik/populo,eduNEXT/edunext-platform,procangroup/edx-platform,pabloborrego93/edx-platform,jswope00/GAI,lduarte1991/edx-platform,martynovp/edx-platform,zubair-arbi/edx-platform,RPI-OPENEDX/edx-platform,proversity-org/edx-platform,halvertoluke/edx-platform,edx/edx-platform,nanolearning/edx-platform,MakeHer/edx-platform,martynovp/edx-platform,appsembler/edx-platform,wwj718/edx-platform,valtech-mooc/edx-platform,hamzehd/edx-platform,ESOedX/edx-platform,jruiperezv/ANALYSE,knehez/edx-platform,J861449197/edx-platform,longmen21/edx-platform,unicri/edx-platform,mtlchun/edx,rhndg/openedx,amir-qayyum-khan/edx-platform,chand3040/cloud_that,mjg2203/edx-platform-seas,ovnicraft/edx-platform,bitifirefly/edx-platform,iivic/BoiseStateX,10clouds/edx-platform,edx-solutions/edx-platform,etzhou/edx-platform,dsajkl/123,IITBinterns13/edx-platform-dev,utecuy/edx-platform,fly19890211/edx-platform,olexiim/edx-platform,JCBarahona/edX,EduPepperPD/pepper2013,mushtaqak/edx-platform,jonathan-beard/edx-platform,shurihell/testasia,doganov/edx-platform,eduNEXT/edx-platform,chauhanhardik/populo_2,nagyistoce/edx-platform,wwj718/edx-platform,ahmadiga/min_edx,doganov/edx-platform,lduarte1991/edx-platform,mtlchun/edx,Ayub-Khan/edx-platform,angelapper/edx-platform,chand3040/cloud_that,shabab12/edx-platform,rationalAgent/edx-platform-custom,atsolakid/edx-platform,jbzdak/edx-platform,dkarakats/edx-platform,polimediaupv/edx-platform,cognitiveclass/edx-platform,edx/edx-platform,kxliugang/edx-platform,alexthered/kienhoc-platform,IndonesiaX/edx-platform,nikolas/edx-platform,antonve/s4-project-mooc,franosincic/edx-platform,abdoosh00/edx-rtl-final,pdehaye/theming-edx-platform,DNFcode/edx-platform,morenopc/edx-platform,don-github/edx-platform,cognitiveclass/edx-platform,bitifirefly/edx-platform,ubc/edx-platform,motion2015/edx-platform,motion2015/a3,wwj718/edx-platform,sameetb-cuelogic/edx-platform-test,prarthitm/edxplatform,Shrhawk/edx-platform,jazkarta/edx-platform-for-isc,mjirayu/sit_academy,Endika/edx-platform,jazkarta/edx-platform-for-isc,teltek/edx-platform,bigdatauniversity/edx-platform,TeachAtTUM/edx-platform,cyanna/edx-platform,alexthered/kienhoc-platform,praveen-pal/edx-platform,fintech-circle/edx-platform,vismartltd/edx-platform,leansoft/edx-platform,olexiim/edx-platform,PepperPD/edx-pepper-platform,hkawasaki/kawasaki-aio8-0,alexthered/kienhoc-platform,yokose-ks/edx-platform,zofuthan/edx-platform,eduNEXT/edx-platform,yokose-ks/edx-platform,synergeticsedx/deployment-wipro,eestay/edx-platform,proversity-org/edx-platform,PepperPD/edx-pepper-platform,TsinghuaX/edx-platform,appliedx/edx-platform,xingyepei/edx-platform,alu042/edx-platform,zadgroup/edx-platform,Edraak/edraak-platform,kursitet/edx-platform,4eek/edx-platform,deepsrijit1105/edx-platform,pabloborrego93/edx-platform,shurihell/testasia,dcosentino/edx-platform,Stanford-Online/edx-platform,inares/edx-platform,amir-qayyum-khan/edx-platform,WatanabeYasumasa/edx-platform,ampax/edx-platform-backup,LearnEra/LearnEraPlaftform,hastexo/edx-platform,jelugbo/tundex,alu042/edx-platform,dsajkl/reqiop,appsembler/edx-platform,shubhdev/openedx,inares/edx-platform,don-github/edx-platform,devs1991/test_edx_docmode,teltek/edx-platform,cselis86/edx-platform,iivic/BoiseStateX,zhenzhai/edx-platform,ahmadio/edx-platform,mitocw/edx-platform,shubhdev/openedx,leansoft/edx-platform,hastexo/edx-platform,morpheby/levelup-by,jazztpt/edx-platform,Softmotions/edx-platform,jamiefolsom/edx-platform,wwj718/ANALYSE,playm2mboy/edx-platform,beni55/edx-platform,jazkarta/edx-platform,mjirayu/sit_academy,naresh21/synergetics-edx-platform,chauhanhardik/populo,rhndg/openedx,ahmadiga/min_edx,IITBinterns13/edx-platform-dev,xingyepei/edx-platform,longmen21/edx-platform,bigdatauniversity/edx-platform,jazkarta/edx-platform,doismellburning/edx-platform,morpheby/levelup-by,antonve/s4-project-mooc,kamalx/edx-platform,peterm-itr/edx-platform,benpatterson/edx-platform,utecuy/edx-platform,xuxiao19910803/edx-platform,hkawasaki/kawasaki-aio8-1,raccoongang/edx-platform,ampax/edx-platform-backup,jswope00/griffinx,shubhdev/edx-platform,knehez/edx-platform,arifsetiawan/edx-platform,a-parhom/edx-platform,kmoocdev2/edx-platform,antoviaque/edx-platform,a-parhom/edx-platform,devs1991/test_edx_docmode,EduPepperPDTesting/pepper2013-testing,teltek/edx-platform,shashank971/edx-platform,hkawasaki/kawasaki-aio8-1,bigdatauniversity/edx-platform,jelugbo/tundex,shabab12/edx-platform,Lektorium-LLC/edx-platform,JioEducation/edx-platform,eestay/edx-platform,xuxiao19910803/edx-platform,MSOpenTech/edx-platform,hkawasaki/kawasaki-aio8-2,shubhdev/edxOnBaadal,shubhdev/openedx,DNFcode/edx-platform,RPI-OPENEDX/edx-platform,chauhanhardik/populo_2,ZLLab-Mooc/edx-platform,atsolakid/edx-platform,pelikanchik/edx-platform,nanolearningllc/edx-platform-cypress,solashirai/edx-platform,caesar2164/edx-platform,bigdatauniversity/edx-platform,auferack08/edx-platform,bdero/edx-platform,gsehub/edx-platform,a-parhom/edx-platform,yokose-ks/edx-platform,AkA84/edx-platform,openfun/edx-platform,beacloudgenius/edx-platform,UOMx/edx-platform,zubair-arbi/edx-platform,devs1991/test_edx_docmode,jswope00/GAI,dkarakats/edx-platform,romain-li/edx-platform,jazztpt/edx-platform,B-MOOC/edx-platform,EduPepperPDTesting/pepper2013-testing,ampax/edx-platform,hkawasaki/kawasaki-aio8-0,zadgroup/edx-platform,rismalrv/edx-platform,defance/edx-platform,Edraak/circleci-edx-platform,etzhou/edx-platform,andyzsf/edx,polimediaupv/edx-platform,JCBarahona/edX,LICEF/edx-platform,J861449197/edx-platform,DefyVentures/edx-platform,IndonesiaX/edx-platform,kmoocdev2/edx-platform,nanolearningllc/edx-platform-cypress,DefyVentures/edx-platform,martynovp/edx-platform,mtlchun/edx,abdoosh00/edraak,philanthropy-u/edx-platform,chand3040/cloud_that,edry/edx-platform,rationalAgent/edx-platform-custom,eemirtekin/edx-platform,itsjeyd/edx-platform,ubc/edx-platform,B-MOOC/edx-platform,Edraak/edx-platform,Edraak/edraak-platform,pomegranited/edx-platform,y12uc231/edx-platform,nanolearning/edx-platform,ESOedX/edx-platform,carsongee/edx-platform,Livit/Livit.Learn.EdX,jamiefolsom/edx-platform,cpennington/edx-platform,prarthitm/edxplatform,DNFcode/edx-platform,IndonesiaX/edx-platform,playm2mboy/edx-platform,kalebhartje/schoolboost,antonve/s4-project-mooc,jonathan-beard/edx-platform,jswope00/GAI,beni55/edx-platform,ovnicraft/edx-platform,BehavioralInsightsTeam/edx-platform,cyanna/edx-platform,wwj718/ANALYSE,MakeHer/edx-platform,Softmotions/edx-platform,xinjiguaike/edx-platform,syjeon/new_edx,bdero/edx-platform,rue89-tech/edx-platform,sudheerchintala/LearnEraPlatForm,hmcmooc/muddx-platform,devs1991/test_edx_docmode,jruiperezv/ANALYSE,vasyarv/edx-platform,fly19890211/edx-platform,EduPepperPD/pepper2013,ESOedX/edx-platform,WatanabeYasumasa/edx-platform,kursitet/edx-platform,proversity-org/edx-platform,kursitet/edx-platform,appliedx/edx-platform,Semi-global/edx-platform,ampax/edx-platform,unicri/edx-platform,torchingloom/edx-platform,antoviaque/edx-platform,ubc/edx-platform,vismartltd/edx-platform,beni55/edx-platform,ahmadiga/min_edx,mjirayu/sit_academy,BehavioralInsightsTeam/edx-platform,chrisndodge/edx-platform,vikas1885/test1,eduNEXT/edx-platform,vasyarv/edx-platform,Semi-global/edx-platform,Shrhawk/edx-platform,fintech-circle/edx-platform,chauhanhardik/populo,carsongee/edx-platform,ahmedaljazzar/edx-platform,openfun/edx-platform,jamesblunt/edx-platform,cyanna/edx-platform,devs1991/test_edx_docmode,mitocw/edx-platform,torchingloom/edx-platform,mushtaqak/edx-platform,simbs/edx-platform,Lektorium-LLC/edx-platform,ahmedaljazzar/edx-platform,bitifirefly/edx-platform,zerobatu/edx-platform,wwj718/ANALYSE,nttks/jenkins-test,10clouds/edx-platform,Unow/edx-platform,nikolas/edx-platform,SravanthiSinha/edx-platform,B-MOOC/edx-platform,amir-qayyum-khan/edx-platform,shabab12/edx-platform,Stanford-Online/edx-platform,simbs/edx-platform,edx/edx-platform,EDUlib/edx-platform,ubc/edx-platform,antoviaque/edx-platform,sudheerchintala/LearnEraPlatForm,analyseuc3m/ANALYSE-v1,iivic/BoiseStateX,ubc/edx-platform,SivilTaram/edx-platform,CredoReference/edx-platform,zhenzhai/edx-platform,carsongee/edx-platform,Softmotions/edx-platform,jzoldak/edx-platform,morpheby/levelup-by,kursitet/edx-platform,nikolas/edx-platform,nttks/edx-platform,zubair-arbi/edx-platform,chand3040/cloud_that,sameetb-cuelogic/edx-platform-test
from functools import wraps import copy import json def expect_json(view_function): @wraps(view_function) def expect_json_with_cloned_request(request, *args, **kwargs): if request.META['CONTENT_TYPE'] == "application/json": cloned_request = copy.copy(request) cloned_request.POST = cloned_request.POST.copy() cloned_request.POST.update(json.loads(request.body)) return view_function(cloned_request, *args, **kwargs) else: return view_function(request, *args, **kwargs) return expect_json_with_cloned_request Fix JSON postback error where the content-type header line can contain more info than just the application/json descriptor. Now we just to a compare on the start of the header value.
from functools import wraps import copy import json def expect_json(view_function): @wraps(view_function) def expect_json_with_cloned_request(request, *args, **kwargs): # cdodge: fix postback errors in CMS. The POST 'content-type' header can include additional information # e.g. 'charset', so we can't do a direct string compare if request.META['CONTENT_TYPE'].lower().startswith("application/json"): cloned_request = copy.copy(request) cloned_request.POST = cloned_request.POST.copy() cloned_request.POST.update(json.loads(request.body)) return view_function(cloned_request, *args, **kwargs) else: return view_function(request, *args, **kwargs) return expect_json_with_cloned_request
<commit_before>from functools import wraps import copy import json def expect_json(view_function): @wraps(view_function) def expect_json_with_cloned_request(request, *args, **kwargs): if request.META['CONTENT_TYPE'] == "application/json": cloned_request = copy.copy(request) cloned_request.POST = cloned_request.POST.copy() cloned_request.POST.update(json.loads(request.body)) return view_function(cloned_request, *args, **kwargs) else: return view_function(request, *args, **kwargs) return expect_json_with_cloned_request <commit_msg>Fix JSON postback error where the content-type header line can contain more info than just the application/json descriptor. Now we just to a compare on the start of the header value.<commit_after>
from functools import wraps import copy import json def expect_json(view_function): @wraps(view_function) def expect_json_with_cloned_request(request, *args, **kwargs): # cdodge: fix postback errors in CMS. The POST 'content-type' header can include additional information # e.g. 'charset', so we can't do a direct string compare if request.META['CONTENT_TYPE'].lower().startswith("application/json"): cloned_request = copy.copy(request) cloned_request.POST = cloned_request.POST.copy() cloned_request.POST.update(json.loads(request.body)) return view_function(cloned_request, *args, **kwargs) else: return view_function(request, *args, **kwargs) return expect_json_with_cloned_request
from functools import wraps import copy import json def expect_json(view_function): @wraps(view_function) def expect_json_with_cloned_request(request, *args, **kwargs): if request.META['CONTENT_TYPE'] == "application/json": cloned_request = copy.copy(request) cloned_request.POST = cloned_request.POST.copy() cloned_request.POST.update(json.loads(request.body)) return view_function(cloned_request, *args, **kwargs) else: return view_function(request, *args, **kwargs) return expect_json_with_cloned_request Fix JSON postback error where the content-type header line can contain more info than just the application/json descriptor. Now we just to a compare on the start of the header value.from functools import wraps import copy import json def expect_json(view_function): @wraps(view_function) def expect_json_with_cloned_request(request, *args, **kwargs): # cdodge: fix postback errors in CMS. The POST 'content-type' header can include additional information # e.g. 'charset', so we can't do a direct string compare if request.META['CONTENT_TYPE'].lower().startswith("application/json"): cloned_request = copy.copy(request) cloned_request.POST = cloned_request.POST.copy() cloned_request.POST.update(json.loads(request.body)) return view_function(cloned_request, *args, **kwargs) else: return view_function(request, *args, **kwargs) return expect_json_with_cloned_request
<commit_before>from functools import wraps import copy import json def expect_json(view_function): @wraps(view_function) def expect_json_with_cloned_request(request, *args, **kwargs): if request.META['CONTENT_TYPE'] == "application/json": cloned_request = copy.copy(request) cloned_request.POST = cloned_request.POST.copy() cloned_request.POST.update(json.loads(request.body)) return view_function(cloned_request, *args, **kwargs) else: return view_function(request, *args, **kwargs) return expect_json_with_cloned_request <commit_msg>Fix JSON postback error where the content-type header line can contain more info than just the application/json descriptor. Now we just to a compare on the start of the header value.<commit_after>from functools import wraps import copy import json def expect_json(view_function): @wraps(view_function) def expect_json_with_cloned_request(request, *args, **kwargs): # cdodge: fix postback errors in CMS. The POST 'content-type' header can include additional information # e.g. 'charset', so we can't do a direct string compare if request.META['CONTENT_TYPE'].lower().startswith("application/json"): cloned_request = copy.copy(request) cloned_request.POST = cloned_request.POST.copy() cloned_request.POST.update(json.loads(request.body)) return view_function(cloned_request, *args, **kwargs) else: return view_function(request, *args, **kwargs) return expect_json_with_cloned_request
7af8ee5ca8a036ae2339187b689507989d43aaa6
elmo/moon_tracker/utils.py
elmo/moon_tracker/utils.py
def user_can_view_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.can_view_scans', moon.planet.system) or user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation.region) ) def user_can_add_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.can_add_scans', moon.planet.system) or user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation.region) ) def user_can_delete_scans(user, moon): return ( user.has_perm('eve_sde.can_delete_scans', moon.planet.system) or user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation.region) )
def user_can_view_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.sys_can_view_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_view_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_view_scans', moon.planet.system.constellation.region) ) def user_can_add_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.sys_can_add_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_add_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_add_scans', moon.planet.system.constellation.region) ) def user_can_delete_scans(user, moon): return ( user.has_perm('eve_sde.sys_can_delete_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_delete_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_delete_scans', moon.planet.system.constellation.region) )
Update the permission helper functions.
Update the permission helper functions.
Python
mit
StephenSwat/eve_lunar_mining_organiser,StephenSwat/eve_lunar_mining_organiser
def user_can_view_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.can_view_scans', moon.planet.system) or user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation.region) ) def user_can_add_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.can_add_scans', moon.planet.system) or user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation.region) ) def user_can_delete_scans(user, moon): return ( user.has_perm('eve_sde.can_delete_scans', moon.planet.system) or user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation.region) ) Update the permission helper functions.
def user_can_view_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.sys_can_view_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_view_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_view_scans', moon.planet.system.constellation.region) ) def user_can_add_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.sys_can_add_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_add_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_add_scans', moon.planet.system.constellation.region) ) def user_can_delete_scans(user, moon): return ( user.has_perm('eve_sde.sys_can_delete_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_delete_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_delete_scans', moon.planet.system.constellation.region) )
<commit_before>def user_can_view_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.can_view_scans', moon.planet.system) or user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation.region) ) def user_can_add_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.can_add_scans', moon.planet.system) or user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation.region) ) def user_can_delete_scans(user, moon): return ( user.has_perm('eve_sde.can_delete_scans', moon.planet.system) or user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation.region) ) <commit_msg>Update the permission helper functions.<commit_after>
def user_can_view_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.sys_can_view_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_view_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_view_scans', moon.planet.system.constellation.region) ) def user_can_add_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.sys_can_add_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_add_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_add_scans', moon.planet.system.constellation.region) ) def user_can_delete_scans(user, moon): return ( user.has_perm('eve_sde.sys_can_delete_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_delete_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_delete_scans', moon.planet.system.constellation.region) )
def user_can_view_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.can_view_scans', moon.planet.system) or user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation.region) ) def user_can_add_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.can_add_scans', moon.planet.system) or user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation.region) ) def user_can_delete_scans(user, moon): return ( user.has_perm('eve_sde.can_delete_scans', moon.planet.system) or user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation.region) ) Update the permission helper functions.def user_can_view_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.sys_can_view_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_view_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_view_scans', moon.planet.system.constellation.region) ) def user_can_add_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.sys_can_add_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_add_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_add_scans', moon.planet.system.constellation.region) ) def user_can_delete_scans(user, moon): return ( user.has_perm('eve_sde.sys_can_delete_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_delete_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_delete_scans', moon.planet.system.constellation.region) )
<commit_before>def user_can_view_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.can_view_scans', moon.planet.system) or user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation.region) ) def user_can_add_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.can_add_scans', moon.planet.system) or user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation.region) ) def user_can_delete_scans(user, moon): return ( user.has_perm('eve_sde.can_delete_scans', moon.planet.system) or user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation.region) ) <commit_msg>Update the permission helper functions.<commit_after>def user_can_view_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.sys_can_view_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_view_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_view_scans', moon.planet.system.constellation.region) ) def user_can_add_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.sys_can_add_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_add_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_add_scans', moon.planet.system.constellation.region) ) def user_can_delete_scans(user, moon): return ( user.has_perm('eve_sde.sys_can_delete_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_delete_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_delete_scans', moon.planet.system.constellation.region) )
184cc6448a7bed4c945b0c5cb1e3739c3fb9c7f8
examples/list_vmss_pips.py
examples/list_vmss_pips.py
import azurerm import json import sys # check for single command argument if len(sys.argv) == 3: rg = sys.argv[1] vmss = sys.argv[2] else: sys.exit('Expecting resource group name and vmss name as arguments.') # Load Azure app defaults try: with open('azurermconfig.json') as configFile: configData = json.load(configFile) except FileNotFoundError: print("Error: Expecting azurermconfig.json in current folder") sys.exit() tenant_id = configData['tenantId'] app_id = configData['appId'] app_secret = configData['appSecret'] subscription_id = configData['subscriptionId'] access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rg, vmss) print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': ')))
import argparse import azurerm import json import re import sys # validate command line arguments argParser = argparse.ArgumentParser() argParser.add_argument('--vmssname', '-n', required=True, action='store', help='VMSS Name') argParser.add_argument('--rgname', '-g', required=True, action='store', help='Resource Group Name') argParser.add_argument('--details', '-a', required=False, action='store', help='Print all details') args = argParser.parse_args() name = args.vmssname rgname = args.rgname details = args.details # Load Azure app defaults try: with open('azurermconfig.json') as configFile: configData = json.load(configFile) except FileNotFoundError: print("Error: Expecting azurermconfig.json in current folder") sys.exit() tenant_id = configData['tenantId'] app_id = configData['appId'] app_secret = configData['appSecret'] subscription_id = configData['subscriptionId'] access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rgname, name) if details is True: print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': '))) else: for ip in public_ips['value']: vm_id = re.search('Machines/(.*)/networkInt', ip['id']).group(1) ipaddr = ip['properties']['ipAddress'] print('VM id: ' + vm_id + ', IP: ' + ipaddr)
Improve list VMSS public IP example
Improve list VMSS public IP example
Python
mit
gbowerman/azurerm
import azurerm import json import sys # check for single command argument if len(sys.argv) == 3: rg = sys.argv[1] vmss = sys.argv[2] else: sys.exit('Expecting resource group name and vmss name as arguments.') # Load Azure app defaults try: with open('azurermconfig.json') as configFile: configData = json.load(configFile) except FileNotFoundError: print("Error: Expecting azurermconfig.json in current folder") sys.exit() tenant_id = configData['tenantId'] app_id = configData['appId'] app_secret = configData['appSecret'] subscription_id = configData['subscriptionId'] access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rg, vmss) print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': '))) Improve list VMSS public IP example
import argparse import azurerm import json import re import sys # validate command line arguments argParser = argparse.ArgumentParser() argParser.add_argument('--vmssname', '-n', required=True, action='store', help='VMSS Name') argParser.add_argument('--rgname', '-g', required=True, action='store', help='Resource Group Name') argParser.add_argument('--details', '-a', required=False, action='store', help='Print all details') args = argParser.parse_args() name = args.vmssname rgname = args.rgname details = args.details # Load Azure app defaults try: with open('azurermconfig.json') as configFile: configData = json.load(configFile) except FileNotFoundError: print("Error: Expecting azurermconfig.json in current folder") sys.exit() tenant_id = configData['tenantId'] app_id = configData['appId'] app_secret = configData['appSecret'] subscription_id = configData['subscriptionId'] access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rgname, name) if details is True: print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': '))) else: for ip in public_ips['value']: vm_id = re.search('Machines/(.*)/networkInt', ip['id']).group(1) ipaddr = ip['properties']['ipAddress'] print('VM id: ' + vm_id + ', IP: ' + ipaddr)
<commit_before>import azurerm import json import sys # check for single command argument if len(sys.argv) == 3: rg = sys.argv[1] vmss = sys.argv[2] else: sys.exit('Expecting resource group name and vmss name as arguments.') # Load Azure app defaults try: with open('azurermconfig.json') as configFile: configData = json.load(configFile) except FileNotFoundError: print("Error: Expecting azurermconfig.json in current folder") sys.exit() tenant_id = configData['tenantId'] app_id = configData['appId'] app_secret = configData['appSecret'] subscription_id = configData['subscriptionId'] access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rg, vmss) print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': '))) <commit_msg>Improve list VMSS public IP example<commit_after>
import argparse import azurerm import json import re import sys # validate command line arguments argParser = argparse.ArgumentParser() argParser.add_argument('--vmssname', '-n', required=True, action='store', help='VMSS Name') argParser.add_argument('--rgname', '-g', required=True, action='store', help='Resource Group Name') argParser.add_argument('--details', '-a', required=False, action='store', help='Print all details') args = argParser.parse_args() name = args.vmssname rgname = args.rgname details = args.details # Load Azure app defaults try: with open('azurermconfig.json') as configFile: configData = json.load(configFile) except FileNotFoundError: print("Error: Expecting azurermconfig.json in current folder") sys.exit() tenant_id = configData['tenantId'] app_id = configData['appId'] app_secret = configData['appSecret'] subscription_id = configData['subscriptionId'] access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rgname, name) if details is True: print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': '))) else: for ip in public_ips['value']: vm_id = re.search('Machines/(.*)/networkInt', ip['id']).group(1) ipaddr = ip['properties']['ipAddress'] print('VM id: ' + vm_id + ', IP: ' + ipaddr)
import azurerm import json import sys # check for single command argument if len(sys.argv) == 3: rg = sys.argv[1] vmss = sys.argv[2] else: sys.exit('Expecting resource group name and vmss name as arguments.') # Load Azure app defaults try: with open('azurermconfig.json') as configFile: configData = json.load(configFile) except FileNotFoundError: print("Error: Expecting azurermconfig.json in current folder") sys.exit() tenant_id = configData['tenantId'] app_id = configData['appId'] app_secret = configData['appSecret'] subscription_id = configData['subscriptionId'] access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rg, vmss) print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': '))) Improve list VMSS public IP exampleimport argparse import azurerm import json import re import sys # validate command line arguments argParser = argparse.ArgumentParser() argParser.add_argument('--vmssname', '-n', required=True, action='store', help='VMSS Name') argParser.add_argument('--rgname', '-g', required=True, action='store', help='Resource Group Name') argParser.add_argument('--details', '-a', required=False, action='store', help='Print all details') args = argParser.parse_args() name = args.vmssname rgname = args.rgname details = args.details # Load Azure app defaults try: with open('azurermconfig.json') as configFile: configData = json.load(configFile) except FileNotFoundError: print("Error: Expecting azurermconfig.json in current folder") sys.exit() tenant_id = configData['tenantId'] app_id = configData['appId'] app_secret = configData['appSecret'] subscription_id = configData['subscriptionId'] access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rgname, name) if details is True: print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': '))) else: for ip in public_ips['value']: vm_id = re.search('Machines/(.*)/networkInt', ip['id']).group(1) ipaddr = ip['properties']['ipAddress'] print('VM id: ' + vm_id + ', IP: ' + ipaddr)
<commit_before>import azurerm import json import sys # check for single command argument if len(sys.argv) == 3: rg = sys.argv[1] vmss = sys.argv[2] else: sys.exit('Expecting resource group name and vmss name as arguments.') # Load Azure app defaults try: with open('azurermconfig.json') as configFile: configData = json.load(configFile) except FileNotFoundError: print("Error: Expecting azurermconfig.json in current folder") sys.exit() tenant_id = configData['tenantId'] app_id = configData['appId'] app_secret = configData['appSecret'] subscription_id = configData['subscriptionId'] access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rg, vmss) print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': '))) <commit_msg>Improve list VMSS public IP example<commit_after>import argparse import azurerm import json import re import sys # validate command line arguments argParser = argparse.ArgumentParser() argParser.add_argument('--vmssname', '-n', required=True, action='store', help='VMSS Name') argParser.add_argument('--rgname', '-g', required=True, action='store', help='Resource Group Name') argParser.add_argument('--details', '-a', required=False, action='store', help='Print all details') args = argParser.parse_args() name = args.vmssname rgname = args.rgname details = args.details # Load Azure app defaults try: with open('azurermconfig.json') as configFile: configData = json.load(configFile) except FileNotFoundError: print("Error: Expecting azurermconfig.json in current folder") sys.exit() tenant_id = configData['tenantId'] app_id = configData['appId'] app_secret = configData['appSecret'] subscription_id = configData['subscriptionId'] access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rgname, name) if details is True: print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': '))) else: for ip in public_ips['value']: vm_id = re.search('Machines/(.*)/networkInt', ip['id']).group(1) ipaddr = ip['properties']['ipAddress'] print('VM id: ' + vm_id + ', IP: ' + ipaddr)
cc7b8f5dc95d09af619e588aea8042376be6edfc
secondhand/urls.py
secondhand/urls.py
from django.conf.urls import patterns, include, url from tastypie.api import Api from tracker.api import UserResource, TaskResource, WorkSessionResource, \ ApiTokenResource from tracker.views import SignupView # Uncomment the next two lines to enable the admin: # from django.contrib import admin # admin.autodiscover() # tracker API. v1_api = Api(api_name='v1') v1_api.register(ApiTokenResource()) v1_api.register(UserResource()) v1_api.register(TaskResource()) v1_api.register(WorkSessionResource()) urlpatterns = patterns('', # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: # url(r'^admin/', include(admin.site.urls)), url(r'^signup/', SignupView.as_view(), name='signup'), url(r'^api/', include(v1_api.urls)), )
from django.conf.urls import patterns, include, url from tastypie.api import Api from tracker.api import TaskResource, WorkSessionResource, \ ApiTokenResource, ProjectResource from tracker.views import SignupView # Uncomment the next two lines to enable the admin: # from django.contrib import admin # admin.autodiscover() # tracker API. v1_api = Api(api_name='v1') v1_api.register(ApiTokenResource()) v1_api.register(ProjectResource()) v1_api.register(TaskResource()) v1_api.register(WorkSessionResource()) urlpatterns = patterns('', # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: # url(r'^admin/', include(admin.site.urls)), url(r'^signup/', SignupView.as_view(), name='signup'), url(r'^api/', include(v1_api.urls)), )
Remove UserResource from the API and add ProjectResource.
Remove UserResource from the API and add ProjectResource.
Python
mit
GeneralMaximus/secondhand
from django.conf.urls import patterns, include, url from tastypie.api import Api from tracker.api import UserResource, TaskResource, WorkSessionResource, \ ApiTokenResource from tracker.views import SignupView # Uncomment the next two lines to enable the admin: # from django.contrib import admin # admin.autodiscover() # tracker API. v1_api = Api(api_name='v1') v1_api.register(ApiTokenResource()) v1_api.register(UserResource()) v1_api.register(TaskResource()) v1_api.register(WorkSessionResource()) urlpatterns = patterns('', # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: # url(r'^admin/', include(admin.site.urls)), url(r'^signup/', SignupView.as_view(), name='signup'), url(r'^api/', include(v1_api.urls)), ) Remove UserResource from the API and add ProjectResource.
from django.conf.urls import patterns, include, url from tastypie.api import Api from tracker.api import TaskResource, WorkSessionResource, \ ApiTokenResource, ProjectResource from tracker.views import SignupView # Uncomment the next two lines to enable the admin: # from django.contrib import admin # admin.autodiscover() # tracker API. v1_api = Api(api_name='v1') v1_api.register(ApiTokenResource()) v1_api.register(ProjectResource()) v1_api.register(TaskResource()) v1_api.register(WorkSessionResource()) urlpatterns = patterns('', # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: # url(r'^admin/', include(admin.site.urls)), url(r'^signup/', SignupView.as_view(), name='signup'), url(r'^api/', include(v1_api.urls)), )
<commit_before>from django.conf.urls import patterns, include, url from tastypie.api import Api from tracker.api import UserResource, TaskResource, WorkSessionResource, \ ApiTokenResource from tracker.views import SignupView # Uncomment the next two lines to enable the admin: # from django.contrib import admin # admin.autodiscover() # tracker API. v1_api = Api(api_name='v1') v1_api.register(ApiTokenResource()) v1_api.register(UserResource()) v1_api.register(TaskResource()) v1_api.register(WorkSessionResource()) urlpatterns = patterns('', # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: # url(r'^admin/', include(admin.site.urls)), url(r'^signup/', SignupView.as_view(), name='signup'), url(r'^api/', include(v1_api.urls)), ) <commit_msg>Remove UserResource from the API and add ProjectResource.<commit_after>
from django.conf.urls import patterns, include, url from tastypie.api import Api from tracker.api import TaskResource, WorkSessionResource, \ ApiTokenResource, ProjectResource from tracker.views import SignupView # Uncomment the next two lines to enable the admin: # from django.contrib import admin # admin.autodiscover() # tracker API. v1_api = Api(api_name='v1') v1_api.register(ApiTokenResource()) v1_api.register(ProjectResource()) v1_api.register(TaskResource()) v1_api.register(WorkSessionResource()) urlpatterns = patterns('', # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: # url(r'^admin/', include(admin.site.urls)), url(r'^signup/', SignupView.as_view(), name='signup'), url(r'^api/', include(v1_api.urls)), )
from django.conf.urls import patterns, include, url from tastypie.api import Api from tracker.api import UserResource, TaskResource, WorkSessionResource, \ ApiTokenResource from tracker.views import SignupView # Uncomment the next two lines to enable the admin: # from django.contrib import admin # admin.autodiscover() # tracker API. v1_api = Api(api_name='v1') v1_api.register(ApiTokenResource()) v1_api.register(UserResource()) v1_api.register(TaskResource()) v1_api.register(WorkSessionResource()) urlpatterns = patterns('', # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: # url(r'^admin/', include(admin.site.urls)), url(r'^signup/', SignupView.as_view(), name='signup'), url(r'^api/', include(v1_api.urls)), ) Remove UserResource from the API and add ProjectResource.from django.conf.urls import patterns, include, url from tastypie.api import Api from tracker.api import TaskResource, WorkSessionResource, \ ApiTokenResource, ProjectResource from tracker.views import SignupView # Uncomment the next two lines to enable the admin: # from django.contrib import admin # admin.autodiscover() # tracker API. v1_api = Api(api_name='v1') v1_api.register(ApiTokenResource()) v1_api.register(ProjectResource()) v1_api.register(TaskResource()) v1_api.register(WorkSessionResource()) urlpatterns = patterns('', # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: # url(r'^admin/', include(admin.site.urls)), url(r'^signup/', SignupView.as_view(), name='signup'), url(r'^api/', include(v1_api.urls)), )
<commit_before>from django.conf.urls import patterns, include, url from tastypie.api import Api from tracker.api import UserResource, TaskResource, WorkSessionResource, \ ApiTokenResource from tracker.views import SignupView # Uncomment the next two lines to enable the admin: # from django.contrib import admin # admin.autodiscover() # tracker API. v1_api = Api(api_name='v1') v1_api.register(ApiTokenResource()) v1_api.register(UserResource()) v1_api.register(TaskResource()) v1_api.register(WorkSessionResource()) urlpatterns = patterns('', # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: # url(r'^admin/', include(admin.site.urls)), url(r'^signup/', SignupView.as_view(), name='signup'), url(r'^api/', include(v1_api.urls)), ) <commit_msg>Remove UserResource from the API and add ProjectResource.<commit_after>from django.conf.urls import patterns, include, url from tastypie.api import Api from tracker.api import TaskResource, WorkSessionResource, \ ApiTokenResource, ProjectResource from tracker.views import SignupView # Uncomment the next two lines to enable the admin: # from django.contrib import admin # admin.autodiscover() # tracker API. v1_api = Api(api_name='v1') v1_api.register(ApiTokenResource()) v1_api.register(ProjectResource()) v1_api.register(TaskResource()) v1_api.register(WorkSessionResource()) urlpatterns = patterns('', # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: # url(r'^admin/', include(admin.site.urls)), url(r'^signup/', SignupView.as_view(), name='signup'), url(r'^api/', include(v1_api.urls)), )
f840af6621fd63dd9021fcb68a32ba14c925fcf7
wellknown/models.py
wellknown/models.py
from django.db import models from django.db.models.signals import post_save import mimetypes import wellknown # # create default host-meta handler # from wellknown.resources import HostMeta wellknown.register('host-meta', handler=HostMeta(), content_type='application/xrd+xml') # # resource model # class Resource(models.Model): path = models.CharField(max_length=128) content = models.TextField(blank=True) content_type = models.CharField(max_length=128, blank=True) class Meta: ordering = ('path',) def __unicode__(self): return self.path def save(self, **kwargs): self.path = self.path.strip('/') if not self.content_type: self.content_type = mimetypes.guess_type(self.path)[0] or 'text/plain' super(Resource, self).save(**kwargs) # # update resources when models are saved # def save_handler(sender, **kwargs): reg = kwargs['instance'] wellknown.register( reg.path, content=reg.content, content_type=reg.content_type, update=True ) post_save.connect(save_handler, sender=Resource)
from django.db import models from django.db.models.signals import post_save import mimetypes import wellknown # # create default host-meta handler # from wellknown.resources import HostMeta try: wellknown.register('host-meta', handler=HostMeta(), content_type='application/xrd+xml') except ValueError: pass # # resource model # class Resource(models.Model): path = models.CharField(max_length=128) content = models.TextField(blank=True) content_type = models.CharField(max_length=128, blank=True) class Meta: ordering = ('path',) def __unicode__(self): return self.path def save(self, **kwargs): self.path = self.path.strip('/') if not self.content_type: self.content_type = mimetypes.guess_type(self.path)[0] or 'text/plain' super(Resource, self).save(**kwargs) # # update resources when models are saved # def save_handler(sender, **kwargs): reg = kwargs['instance'] wellknown.register( reg.path, content=reg.content, content_type=reg.content_type, update=True ) post_save.connect(save_handler, sender=Resource)
Check for an existing handler before registering default host-meta handler.
Check for an existing handler before registering default host-meta handler.
Python
bsd-3-clause
jcarbaugh/django-wellknown
from django.db import models from django.db.models.signals import post_save import mimetypes import wellknown # # create default host-meta handler # from wellknown.resources import HostMeta wellknown.register('host-meta', handler=HostMeta(), content_type='application/xrd+xml') # # resource model # class Resource(models.Model): path = models.CharField(max_length=128) content = models.TextField(blank=True) content_type = models.CharField(max_length=128, blank=True) class Meta: ordering = ('path',) def __unicode__(self): return self.path def save(self, **kwargs): self.path = self.path.strip('/') if not self.content_type: self.content_type = mimetypes.guess_type(self.path)[0] or 'text/plain' super(Resource, self).save(**kwargs) # # update resources when models are saved # def save_handler(sender, **kwargs): reg = kwargs['instance'] wellknown.register( reg.path, content=reg.content, content_type=reg.content_type, update=True ) post_save.connect(save_handler, sender=Resource) Check for an existing handler before registering default host-meta handler.
from django.db import models from django.db.models.signals import post_save import mimetypes import wellknown # # create default host-meta handler # from wellknown.resources import HostMeta try: wellknown.register('host-meta', handler=HostMeta(), content_type='application/xrd+xml') except ValueError: pass # # resource model # class Resource(models.Model): path = models.CharField(max_length=128) content = models.TextField(blank=True) content_type = models.CharField(max_length=128, blank=True) class Meta: ordering = ('path',) def __unicode__(self): return self.path def save(self, **kwargs): self.path = self.path.strip('/') if not self.content_type: self.content_type = mimetypes.guess_type(self.path)[0] or 'text/plain' super(Resource, self).save(**kwargs) # # update resources when models are saved # def save_handler(sender, **kwargs): reg = kwargs['instance'] wellknown.register( reg.path, content=reg.content, content_type=reg.content_type, update=True ) post_save.connect(save_handler, sender=Resource)
<commit_before>from django.db import models from django.db.models.signals import post_save import mimetypes import wellknown # # create default host-meta handler # from wellknown.resources import HostMeta wellknown.register('host-meta', handler=HostMeta(), content_type='application/xrd+xml') # # resource model # class Resource(models.Model): path = models.CharField(max_length=128) content = models.TextField(blank=True) content_type = models.CharField(max_length=128, blank=True) class Meta: ordering = ('path',) def __unicode__(self): return self.path def save(self, **kwargs): self.path = self.path.strip('/') if not self.content_type: self.content_type = mimetypes.guess_type(self.path)[0] or 'text/plain' super(Resource, self).save(**kwargs) # # update resources when models are saved # def save_handler(sender, **kwargs): reg = kwargs['instance'] wellknown.register( reg.path, content=reg.content, content_type=reg.content_type, update=True ) post_save.connect(save_handler, sender=Resource) <commit_msg>Check for an existing handler before registering default host-meta handler.<commit_after>
from django.db import models from django.db.models.signals import post_save import mimetypes import wellknown # # create default host-meta handler # from wellknown.resources import HostMeta try: wellknown.register('host-meta', handler=HostMeta(), content_type='application/xrd+xml') except ValueError: pass # # resource model # class Resource(models.Model): path = models.CharField(max_length=128) content = models.TextField(blank=True) content_type = models.CharField(max_length=128, blank=True) class Meta: ordering = ('path',) def __unicode__(self): return self.path def save(self, **kwargs): self.path = self.path.strip('/') if not self.content_type: self.content_type = mimetypes.guess_type(self.path)[0] or 'text/plain' super(Resource, self).save(**kwargs) # # update resources when models are saved # def save_handler(sender, **kwargs): reg = kwargs['instance'] wellknown.register( reg.path, content=reg.content, content_type=reg.content_type, update=True ) post_save.connect(save_handler, sender=Resource)
from django.db import models from django.db.models.signals import post_save import mimetypes import wellknown # # create default host-meta handler # from wellknown.resources import HostMeta wellknown.register('host-meta', handler=HostMeta(), content_type='application/xrd+xml') # # resource model # class Resource(models.Model): path = models.CharField(max_length=128) content = models.TextField(blank=True) content_type = models.CharField(max_length=128, blank=True) class Meta: ordering = ('path',) def __unicode__(self): return self.path def save(self, **kwargs): self.path = self.path.strip('/') if not self.content_type: self.content_type = mimetypes.guess_type(self.path)[0] or 'text/plain' super(Resource, self).save(**kwargs) # # update resources when models are saved # def save_handler(sender, **kwargs): reg = kwargs['instance'] wellknown.register( reg.path, content=reg.content, content_type=reg.content_type, update=True ) post_save.connect(save_handler, sender=Resource) Check for an existing handler before registering default host-meta handler.from django.db import models from django.db.models.signals import post_save import mimetypes import wellknown # # create default host-meta handler # from wellknown.resources import HostMeta try: wellknown.register('host-meta', handler=HostMeta(), content_type='application/xrd+xml') except ValueError: pass # # resource model # class Resource(models.Model): path = models.CharField(max_length=128) content = models.TextField(blank=True) content_type = models.CharField(max_length=128, blank=True) class Meta: ordering = ('path',) def __unicode__(self): return self.path def save(self, **kwargs): self.path = self.path.strip('/') if not self.content_type: self.content_type = mimetypes.guess_type(self.path)[0] or 'text/plain' super(Resource, self).save(**kwargs) # # update resources when models are saved # def save_handler(sender, **kwargs): reg = kwargs['instance'] wellknown.register( reg.path, content=reg.content, content_type=reg.content_type, update=True ) post_save.connect(save_handler, sender=Resource)
<commit_before>from django.db import models from django.db.models.signals import post_save import mimetypes import wellknown # # create default host-meta handler # from wellknown.resources import HostMeta wellknown.register('host-meta', handler=HostMeta(), content_type='application/xrd+xml') # # resource model # class Resource(models.Model): path = models.CharField(max_length=128) content = models.TextField(blank=True) content_type = models.CharField(max_length=128, blank=True) class Meta: ordering = ('path',) def __unicode__(self): return self.path def save(self, **kwargs): self.path = self.path.strip('/') if not self.content_type: self.content_type = mimetypes.guess_type(self.path)[0] or 'text/plain' super(Resource, self).save(**kwargs) # # update resources when models are saved # def save_handler(sender, **kwargs): reg = kwargs['instance'] wellknown.register( reg.path, content=reg.content, content_type=reg.content_type, update=True ) post_save.connect(save_handler, sender=Resource) <commit_msg>Check for an existing handler before registering default host-meta handler.<commit_after>from django.db import models from django.db.models.signals import post_save import mimetypes import wellknown # # create default host-meta handler # from wellknown.resources import HostMeta try: wellknown.register('host-meta', handler=HostMeta(), content_type='application/xrd+xml') except ValueError: pass # # resource model # class Resource(models.Model): path = models.CharField(max_length=128) content = models.TextField(blank=True) content_type = models.CharField(max_length=128, blank=True) class Meta: ordering = ('path',) def __unicode__(self): return self.path def save(self, **kwargs): self.path = self.path.strip('/') if not self.content_type: self.content_type = mimetypes.guess_type(self.path)[0] or 'text/plain' super(Resource, self).save(**kwargs) # # update resources when models are saved # def save_handler(sender, **kwargs): reg = kwargs['instance'] wellknown.register( reg.path, content=reg.content, content_type=reg.content_type, update=True ) post_save.connect(save_handler, sender=Resource)
06ead54d0d4b93038df32cbbe622ea5f5fc8288a
boardinghouse/__init__.py
boardinghouse/__init__.py
""" """ __version__ = '0.1' __release__ = '0.1a3' def inject_app_defaults(): """ Automatically inject the default settings for this app. If settings has already been configured, then we need to add our defaults to that (if not overridden), and in all cases we also want to inject our settings into the global_settings object, so we can use diffsettings. Based on: http://passingcuriosity.com/2010/default-settings-for-django-applications/ but with improvements for importing/assignation failures. """ try: import settings as app_settings from django.conf import settings, global_settings from django.core.exceptions import ImproperlyConfigured except ImportError: return for key in dir(app_settings): if key.isupper(): value = getattr(app_settings, key) setattr(global_settings, key, value) if not hasattr(settings, key): # We can just ignore failures, as this means we are # not set up, so global_settings will suffice. try: setattr(settings, key, value) except (ImproperlyConfigured, ImportError): pass inject_app_defaults() default_app_config = 'boardinghouse.apps.BoardingHouseConfig'
""" """ __version__ = '0.2' __release__ = '0.2a1' def inject_app_defaults(): """ Automatically inject the default settings for this app. If settings has already been configured, then we need to add our defaults to that (if not overridden), and in all cases we also want to inject our settings into the global_settings object, so we can use diffsettings. Based on: http://passingcuriosity.com/2010/default-settings-for-django-applications/ but with improvements for importing/assignation failures. """ try: import settings as app_settings from django.conf import settings, global_settings from django.core.exceptions import ImproperlyConfigured except ImportError: return for key in dir(app_settings): if key.isupper(): value = getattr(app_settings, key) setattr(global_settings, key, value) if not hasattr(settings, key): # We can just ignore failures, as this means we are # not set up, so global_settings will suffice. try: setattr(settings, key, value) except (ImproperlyConfigured, ImportError): pass inject_app_defaults() default_app_config = 'boardinghouse.apps.BoardingHouseConfig'
Bump version number (so readthedocs picks it up).
Bump version number (so readthedocs picks it up).
Python
bsd-3-clause
luzfcb/django-boardinghouse,luzfcb/django-boardinghouse,luzfcb/django-boardinghouse
""" """ __version__ = '0.1' __release__ = '0.1a3' def inject_app_defaults(): """ Automatically inject the default settings for this app. If settings has already been configured, then we need to add our defaults to that (if not overridden), and in all cases we also want to inject our settings into the global_settings object, so we can use diffsettings. Based on: http://passingcuriosity.com/2010/default-settings-for-django-applications/ but with improvements for importing/assignation failures. """ try: import settings as app_settings from django.conf import settings, global_settings from django.core.exceptions import ImproperlyConfigured except ImportError: return for key in dir(app_settings): if key.isupper(): value = getattr(app_settings, key) setattr(global_settings, key, value) if not hasattr(settings, key): # We can just ignore failures, as this means we are # not set up, so global_settings will suffice. try: setattr(settings, key, value) except (ImproperlyConfigured, ImportError): pass inject_app_defaults() default_app_config = 'boardinghouse.apps.BoardingHouseConfig'Bump version number (so readthedocs picks it up).
""" """ __version__ = '0.2' __release__ = '0.2a1' def inject_app_defaults(): """ Automatically inject the default settings for this app. If settings has already been configured, then we need to add our defaults to that (if not overridden), and in all cases we also want to inject our settings into the global_settings object, so we can use diffsettings. Based on: http://passingcuriosity.com/2010/default-settings-for-django-applications/ but with improvements for importing/assignation failures. """ try: import settings as app_settings from django.conf import settings, global_settings from django.core.exceptions import ImproperlyConfigured except ImportError: return for key in dir(app_settings): if key.isupper(): value = getattr(app_settings, key) setattr(global_settings, key, value) if not hasattr(settings, key): # We can just ignore failures, as this means we are # not set up, so global_settings will suffice. try: setattr(settings, key, value) except (ImproperlyConfigured, ImportError): pass inject_app_defaults() default_app_config = 'boardinghouse.apps.BoardingHouseConfig'
<commit_before>""" """ __version__ = '0.1' __release__ = '0.1a3' def inject_app_defaults(): """ Automatically inject the default settings for this app. If settings has already been configured, then we need to add our defaults to that (if not overridden), and in all cases we also want to inject our settings into the global_settings object, so we can use diffsettings. Based on: http://passingcuriosity.com/2010/default-settings-for-django-applications/ but with improvements for importing/assignation failures. """ try: import settings as app_settings from django.conf import settings, global_settings from django.core.exceptions import ImproperlyConfigured except ImportError: return for key in dir(app_settings): if key.isupper(): value = getattr(app_settings, key) setattr(global_settings, key, value) if not hasattr(settings, key): # We can just ignore failures, as this means we are # not set up, so global_settings will suffice. try: setattr(settings, key, value) except (ImproperlyConfigured, ImportError): pass inject_app_defaults() default_app_config = 'boardinghouse.apps.BoardingHouseConfig'<commit_msg>Bump version number (so readthedocs picks it up).<commit_after>
""" """ __version__ = '0.2' __release__ = '0.2a1' def inject_app_defaults(): """ Automatically inject the default settings for this app. If settings has already been configured, then we need to add our defaults to that (if not overridden), and in all cases we also want to inject our settings into the global_settings object, so we can use diffsettings. Based on: http://passingcuriosity.com/2010/default-settings-for-django-applications/ but with improvements for importing/assignation failures. """ try: import settings as app_settings from django.conf import settings, global_settings from django.core.exceptions import ImproperlyConfigured except ImportError: return for key in dir(app_settings): if key.isupper(): value = getattr(app_settings, key) setattr(global_settings, key, value) if not hasattr(settings, key): # We can just ignore failures, as this means we are # not set up, so global_settings will suffice. try: setattr(settings, key, value) except (ImproperlyConfigured, ImportError): pass inject_app_defaults() default_app_config = 'boardinghouse.apps.BoardingHouseConfig'
""" """ __version__ = '0.1' __release__ = '0.1a3' def inject_app_defaults(): """ Automatically inject the default settings for this app. If settings has already been configured, then we need to add our defaults to that (if not overridden), and in all cases we also want to inject our settings into the global_settings object, so we can use diffsettings. Based on: http://passingcuriosity.com/2010/default-settings-for-django-applications/ but with improvements for importing/assignation failures. """ try: import settings as app_settings from django.conf import settings, global_settings from django.core.exceptions import ImproperlyConfigured except ImportError: return for key in dir(app_settings): if key.isupper(): value = getattr(app_settings, key) setattr(global_settings, key, value) if not hasattr(settings, key): # We can just ignore failures, as this means we are # not set up, so global_settings will suffice. try: setattr(settings, key, value) except (ImproperlyConfigured, ImportError): pass inject_app_defaults() default_app_config = 'boardinghouse.apps.BoardingHouseConfig'Bump version number (so readthedocs picks it up).""" """ __version__ = '0.2' __release__ = '0.2a1' def inject_app_defaults(): """ Automatically inject the default settings for this app. If settings has already been configured, then we need to add our defaults to that (if not overridden), and in all cases we also want to inject our settings into the global_settings object, so we can use diffsettings. Based on: http://passingcuriosity.com/2010/default-settings-for-django-applications/ but with improvements for importing/assignation failures. """ try: import settings as app_settings from django.conf import settings, global_settings from django.core.exceptions import ImproperlyConfigured except ImportError: return for key in dir(app_settings): if key.isupper(): value = getattr(app_settings, key) setattr(global_settings, key, value) if not hasattr(settings, key): # We can just ignore failures, as this means we are # not set up, so global_settings will suffice. try: setattr(settings, key, value) except (ImproperlyConfigured, ImportError): pass inject_app_defaults() default_app_config = 'boardinghouse.apps.BoardingHouseConfig'
<commit_before>""" """ __version__ = '0.1' __release__ = '0.1a3' def inject_app_defaults(): """ Automatically inject the default settings for this app. If settings has already been configured, then we need to add our defaults to that (if not overridden), and in all cases we also want to inject our settings into the global_settings object, so we can use diffsettings. Based on: http://passingcuriosity.com/2010/default-settings-for-django-applications/ but with improvements for importing/assignation failures. """ try: import settings as app_settings from django.conf import settings, global_settings from django.core.exceptions import ImproperlyConfigured except ImportError: return for key in dir(app_settings): if key.isupper(): value = getattr(app_settings, key) setattr(global_settings, key, value) if not hasattr(settings, key): # We can just ignore failures, as this means we are # not set up, so global_settings will suffice. try: setattr(settings, key, value) except (ImproperlyConfigured, ImportError): pass inject_app_defaults() default_app_config = 'boardinghouse.apps.BoardingHouseConfig'<commit_msg>Bump version number (so readthedocs picks it up).<commit_after>""" """ __version__ = '0.2' __release__ = '0.2a1' def inject_app_defaults(): """ Automatically inject the default settings for this app. If settings has already been configured, then we need to add our defaults to that (if not overridden), and in all cases we also want to inject our settings into the global_settings object, so we can use diffsettings. Based on: http://passingcuriosity.com/2010/default-settings-for-django-applications/ but with improvements for importing/assignation failures. """ try: import settings as app_settings from django.conf import settings, global_settings from django.core.exceptions import ImproperlyConfigured except ImportError: return for key in dir(app_settings): if key.isupper(): value = getattr(app_settings, key) setattr(global_settings, key, value) if not hasattr(settings, key): # We can just ignore failures, as this means we are # not set up, so global_settings will suffice. try: setattr(settings, key, value) except (ImproperlyConfigured, ImportError): pass inject_app_defaults() default_app_config = 'boardinghouse.apps.BoardingHouseConfig'
fadac685c4ba7bb094fea0e0b471cbef9ac0e91a
scheduler/schedule.py
scheduler/schedule.py
import sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def queue_update_job(): q.enqueue(jobs.update_graph) @sched.scheduled_job('interval', minutes=1) def queue_stats_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', minutes=1) def queue_export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() # Wait a bit for Sesame to start time.sleep(10) # Queue the stats job first. This creates the repository before any other # jobs are run. q.enqueue(jobs.calculate_stats) # Start the scheduler sched.start()
import sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def queue_update_job(): q.enqueue(jobs.update_graph, timeout=3600) # 1hr timeout @sched.scheduled_job('interval', minutes=1) def queue_stats_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', minutes=1) def queue_export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() # Wait a bit for Sesame to start time.sleep(10) # Queue the stats job first. This creates the repository before any other # jobs are run. q.enqueue(jobs.calculate_stats) # Start the scheduler sched.start()
Set a 1hr timeout for the update job
Set a 1hr timeout for the update job
Python
apache-2.0
ec-geolink/d1lod,ec-geolink/d1lod,ec-geolink/d1lod,ec-geolink/d1lod
import sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def queue_update_job(): q.enqueue(jobs.update_graph) @sched.scheduled_job('interval', minutes=1) def queue_stats_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', minutes=1) def queue_export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() # Wait a bit for Sesame to start time.sleep(10) # Queue the stats job first. This creates the repository before any other # jobs are run. q.enqueue(jobs.calculate_stats) # Start the scheduler sched.start() Set a 1hr timeout for the update job
import sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def queue_update_job(): q.enqueue(jobs.update_graph, timeout=3600) # 1hr timeout @sched.scheduled_job('interval', minutes=1) def queue_stats_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', minutes=1) def queue_export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() # Wait a bit for Sesame to start time.sleep(10) # Queue the stats job first. This creates the repository before any other # jobs are run. q.enqueue(jobs.calculate_stats) # Start the scheduler sched.start()
<commit_before>import sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def queue_update_job(): q.enqueue(jobs.update_graph) @sched.scheduled_job('interval', minutes=1) def queue_stats_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', minutes=1) def queue_export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() # Wait a bit for Sesame to start time.sleep(10) # Queue the stats job first. This creates the repository before any other # jobs are run. q.enqueue(jobs.calculate_stats) # Start the scheduler sched.start() <commit_msg>Set a 1hr timeout for the update job<commit_after>
import sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def queue_update_job(): q.enqueue(jobs.update_graph, timeout=3600) # 1hr timeout @sched.scheduled_job('interval', minutes=1) def queue_stats_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', minutes=1) def queue_export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() # Wait a bit for Sesame to start time.sleep(10) # Queue the stats job first. This creates the repository before any other # jobs are run. q.enqueue(jobs.calculate_stats) # Start the scheduler sched.start()
import sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def queue_update_job(): q.enqueue(jobs.update_graph) @sched.scheduled_job('interval', minutes=1) def queue_stats_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', minutes=1) def queue_export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() # Wait a bit for Sesame to start time.sleep(10) # Queue the stats job first. This creates the repository before any other # jobs are run. q.enqueue(jobs.calculate_stats) # Start the scheduler sched.start() Set a 1hr timeout for the update jobimport sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def queue_update_job(): q.enqueue(jobs.update_graph, timeout=3600) # 1hr timeout @sched.scheduled_job('interval', minutes=1) def queue_stats_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', minutes=1) def queue_export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() # Wait a bit for Sesame to start time.sleep(10) # Queue the stats job first. This creates the repository before any other # jobs are run. q.enqueue(jobs.calculate_stats) # Start the scheduler sched.start()
<commit_before>import sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def queue_update_job(): q.enqueue(jobs.update_graph) @sched.scheduled_job('interval', minutes=1) def queue_stats_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', minutes=1) def queue_export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() # Wait a bit for Sesame to start time.sleep(10) # Queue the stats job first. This creates the repository before any other # jobs are run. q.enqueue(jobs.calculate_stats) # Start the scheduler sched.start() <commit_msg>Set a 1hr timeout for the update job<commit_after>import sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def queue_update_job(): q.enqueue(jobs.update_graph, timeout=3600) # 1hr timeout @sched.scheduled_job('interval', minutes=1) def queue_stats_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', minutes=1) def queue_export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() # Wait a bit for Sesame to start time.sleep(10) # Queue the stats job first. This creates the repository before any other # jobs are run. q.enqueue(jobs.calculate_stats) # Start the scheduler sched.start()
693b904a9053fbddc6c93cfab1d6448c4b644d1c
scripts/travis_build_dependent_projects.py
scripts/travis_build_dependent_projects.py
# -*- coding: utf-8 -*- import os from click import echo from travispy import travispy from travispy import TravisPy def main(): restarted = [] building = [] for domain in [travispy.PUBLIC, travispy.PRIVATE]: echo("Enumerate repos on {!r}".format(domain)) conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain) user = conn.user() repos = conn.repos(member=user.login) for repo in repos: if not repo.active: continue echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description)) try: build = conn.build(repo.last_build_id) if "config.json" in build.config.get("config", [""])[0]: echo("Found drift project: {!r}".format(repo.slug)) if not build.running: echo("Restarting...") build.restart() restarted.append(repo.slug) else: echo("Build is already running!") building.append(repo.slug) else: echo("Not a drift based project.") except Exception as e: echo("Can't build repo: {!r}".format(e)) echo() if restarted: echo("Repos restarted:") for reponame in restarted: echo("\t{}".format(reponame)) else: echo("No builds restarted.") if building: echo("Repos already building:") for reponame in building: echo("\t{}".format(reponame)) if __name__ == "__main__": main()
# -*- coding: utf-8 -*- import os from click import echo from travispy import travispy from travispy import TravisPy def main(): restarted = [] building = [] for domain in [travispy.PUBLIC, travispy.PRIVATE]: echo("Enumerate repos on {!r}".format(domain)) conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain) user = conn.user() repos = conn.repos(member=user.login) for repo in repos: if not repo.active: continue echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description)) try: build = conn.build(repo.last_build_id) if 'drift' in build.config.get('drift_build_trigger', []): echo("Found drift project: {!r}".format(repo.slug)) if not build.running: echo("Restarting...") build.restart() restarted.append(repo.slug) else: echo("Build is already running!") building.append(repo.slug) else: echo("Not a drift based project.") except Exception as e: echo("Can't build repo: {!r}".format(e)) echo() if restarted: echo("Repos restarted:") for reponame in restarted: echo("\t{}".format(reponame)) else: echo("No builds restarted.") if building: echo("Repos already building:") for reponame in building: echo("\t{}".format(reponame)) if __name__ == "__main__": main()
Fix Travis dependant build trigger
Fix Travis dependant build trigger
Python
mit
dgnorth/drift,dgnorth/drift,dgnorth/drift
# -*- coding: utf-8 -*- import os from click import echo from travispy import travispy from travispy import TravisPy def main(): restarted = [] building = [] for domain in [travispy.PUBLIC, travispy.PRIVATE]: echo("Enumerate repos on {!r}".format(domain)) conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain) user = conn.user() repos = conn.repos(member=user.login) for repo in repos: if not repo.active: continue echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description)) try: build = conn.build(repo.last_build_id) if "config.json" in build.config.get("config", [""])[0]: echo("Found drift project: {!r}".format(repo.slug)) if not build.running: echo("Restarting...") build.restart() restarted.append(repo.slug) else: echo("Build is already running!") building.append(repo.slug) else: echo("Not a drift based project.") except Exception as e: echo("Can't build repo: {!r}".format(e)) echo() if restarted: echo("Repos restarted:") for reponame in restarted: echo("\t{}".format(reponame)) else: echo("No builds restarted.") if building: echo("Repos already building:") for reponame in building: echo("\t{}".format(reponame)) if __name__ == "__main__": main() Fix Travis dependant build trigger
# -*- coding: utf-8 -*- import os from click import echo from travispy import travispy from travispy import TravisPy def main(): restarted = [] building = [] for domain in [travispy.PUBLIC, travispy.PRIVATE]: echo("Enumerate repos on {!r}".format(domain)) conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain) user = conn.user() repos = conn.repos(member=user.login) for repo in repos: if not repo.active: continue echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description)) try: build = conn.build(repo.last_build_id) if 'drift' in build.config.get('drift_build_trigger', []): echo("Found drift project: {!r}".format(repo.slug)) if not build.running: echo("Restarting...") build.restart() restarted.append(repo.slug) else: echo("Build is already running!") building.append(repo.slug) else: echo("Not a drift based project.") except Exception as e: echo("Can't build repo: {!r}".format(e)) echo() if restarted: echo("Repos restarted:") for reponame in restarted: echo("\t{}".format(reponame)) else: echo("No builds restarted.") if building: echo("Repos already building:") for reponame in building: echo("\t{}".format(reponame)) if __name__ == "__main__": main()
<commit_before># -*- coding: utf-8 -*- import os from click import echo from travispy import travispy from travispy import TravisPy def main(): restarted = [] building = [] for domain in [travispy.PUBLIC, travispy.PRIVATE]: echo("Enumerate repos on {!r}".format(domain)) conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain) user = conn.user() repos = conn.repos(member=user.login) for repo in repos: if not repo.active: continue echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description)) try: build = conn.build(repo.last_build_id) if "config.json" in build.config.get("config", [""])[0]: echo("Found drift project: {!r}".format(repo.slug)) if not build.running: echo("Restarting...") build.restart() restarted.append(repo.slug) else: echo("Build is already running!") building.append(repo.slug) else: echo("Not a drift based project.") except Exception as e: echo("Can't build repo: {!r}".format(e)) echo() if restarted: echo("Repos restarted:") for reponame in restarted: echo("\t{}".format(reponame)) else: echo("No builds restarted.") if building: echo("Repos already building:") for reponame in building: echo("\t{}".format(reponame)) if __name__ == "__main__": main() <commit_msg>Fix Travis dependant build trigger<commit_after>
# -*- coding: utf-8 -*- import os from click import echo from travispy import travispy from travispy import TravisPy def main(): restarted = [] building = [] for domain in [travispy.PUBLIC, travispy.PRIVATE]: echo("Enumerate repos on {!r}".format(domain)) conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain) user = conn.user() repos = conn.repos(member=user.login) for repo in repos: if not repo.active: continue echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description)) try: build = conn.build(repo.last_build_id) if 'drift' in build.config.get('drift_build_trigger', []): echo("Found drift project: {!r}".format(repo.slug)) if not build.running: echo("Restarting...") build.restart() restarted.append(repo.slug) else: echo("Build is already running!") building.append(repo.slug) else: echo("Not a drift based project.") except Exception as e: echo("Can't build repo: {!r}".format(e)) echo() if restarted: echo("Repos restarted:") for reponame in restarted: echo("\t{}".format(reponame)) else: echo("No builds restarted.") if building: echo("Repos already building:") for reponame in building: echo("\t{}".format(reponame)) if __name__ == "__main__": main()
# -*- coding: utf-8 -*- import os from click import echo from travispy import travispy from travispy import TravisPy def main(): restarted = [] building = [] for domain in [travispy.PUBLIC, travispy.PRIVATE]: echo("Enumerate repos on {!r}".format(domain)) conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain) user = conn.user() repos = conn.repos(member=user.login) for repo in repos: if not repo.active: continue echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description)) try: build = conn.build(repo.last_build_id) if "config.json" in build.config.get("config", [""])[0]: echo("Found drift project: {!r}".format(repo.slug)) if not build.running: echo("Restarting...") build.restart() restarted.append(repo.slug) else: echo("Build is already running!") building.append(repo.slug) else: echo("Not a drift based project.") except Exception as e: echo("Can't build repo: {!r}".format(e)) echo() if restarted: echo("Repos restarted:") for reponame in restarted: echo("\t{}".format(reponame)) else: echo("No builds restarted.") if building: echo("Repos already building:") for reponame in building: echo("\t{}".format(reponame)) if __name__ == "__main__": main() Fix Travis dependant build trigger# -*- coding: utf-8 -*- import os from click import echo from travispy import travispy from travispy import TravisPy def main(): restarted = [] building = [] for domain in [travispy.PUBLIC, travispy.PRIVATE]: echo("Enumerate repos on {!r}".format(domain)) conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain) user = conn.user() repos = conn.repos(member=user.login) for repo in repos: if not repo.active: continue echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description)) try: build = conn.build(repo.last_build_id) if 'drift' in build.config.get('drift_build_trigger', []): echo("Found drift project: {!r}".format(repo.slug)) if not build.running: echo("Restarting...") build.restart() restarted.append(repo.slug) else: echo("Build is already running!") building.append(repo.slug) else: echo("Not a drift based project.") except Exception as e: echo("Can't build repo: {!r}".format(e)) echo() if restarted: echo("Repos restarted:") for reponame in restarted: echo("\t{}".format(reponame)) else: echo("No builds restarted.") if building: echo("Repos already building:") for reponame in building: echo("\t{}".format(reponame)) if __name__ == "__main__": main()
<commit_before># -*- coding: utf-8 -*- import os from click import echo from travispy import travispy from travispy import TravisPy def main(): restarted = [] building = [] for domain in [travispy.PUBLIC, travispy.PRIVATE]: echo("Enumerate repos on {!r}".format(domain)) conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain) user = conn.user() repos = conn.repos(member=user.login) for repo in repos: if not repo.active: continue echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description)) try: build = conn.build(repo.last_build_id) if "config.json" in build.config.get("config", [""])[0]: echo("Found drift project: {!r}".format(repo.slug)) if not build.running: echo("Restarting...") build.restart() restarted.append(repo.slug) else: echo("Build is already running!") building.append(repo.slug) else: echo("Not a drift based project.") except Exception as e: echo("Can't build repo: {!r}".format(e)) echo() if restarted: echo("Repos restarted:") for reponame in restarted: echo("\t{}".format(reponame)) else: echo("No builds restarted.") if building: echo("Repos already building:") for reponame in building: echo("\t{}".format(reponame)) if __name__ == "__main__": main() <commit_msg>Fix Travis dependant build trigger<commit_after># -*- coding: utf-8 -*- import os from click import echo from travispy import travispy from travispy import TravisPy def main(): restarted = [] building = [] for domain in [travispy.PUBLIC, travispy.PRIVATE]: echo("Enumerate repos on {!r}".format(domain)) conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain) user = conn.user() repos = conn.repos(member=user.login) for repo in repos: if not repo.active: continue echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description)) try: build = conn.build(repo.last_build_id) if 'drift' in build.config.get('drift_build_trigger', []): echo("Found drift project: {!r}".format(repo.slug)) if not build.running: echo("Restarting...") build.restart() restarted.append(repo.slug) else: echo("Build is already running!") building.append(repo.slug) else: echo("Not a drift based project.") except Exception as e: echo("Can't build repo: {!r}".format(e)) echo() if restarted: echo("Repos restarted:") for reponame in restarted: echo("\t{}".format(reponame)) else: echo("No builds restarted.") if building: echo("Repos already building:") for reponame in building: echo("\t{}".format(reponame)) if __name__ == "__main__": main()
aed4ddb9cd50baf318822830ba49d5b994e4e518
youmap/views.py
youmap/views.py
from django.views.generic import TemplateView from chickpea.models import Map class Home(TemplateView): template_name = "youmap/home.html" list_template_name = "chickpea/map_list.html" def get_context_data(self, **kwargs): maps = Map.objects.all()[:100] return { "maps": maps } def get_template_names(self): """ Dispatch template according to the kind of request: ajax or normal. """ if self.request.is_ajax(): return [self.list_template_name] else: return [self.template_name] home = Home.as_view()
from django.views.generic import TemplateView from chickpea.models import Map class Home(TemplateView): template_name = "youmap/home.html" list_template_name = "chickpea/map_list.html" def get_context_data(self, **kwargs): maps = Map.objects.order_by('-modified_at')[:100] return { "maps": maps } def get_template_names(self): """ Dispatch template according to the kind of request: ajax or normal. """ if self.request.is_ajax(): return [self.list_template_name] else: return [self.template_name] home = Home.as_view()
Order map by modified_at desc in list
Order map by modified_at desc in list
Python
agpl-3.0
diraol/umap
from django.views.generic import TemplateView from chickpea.models import Map class Home(TemplateView): template_name = "youmap/home.html" list_template_name = "chickpea/map_list.html" def get_context_data(self, **kwargs): maps = Map.objects.all()[:100] return { "maps": maps } def get_template_names(self): """ Dispatch template according to the kind of request: ajax or normal. """ if self.request.is_ajax(): return [self.list_template_name] else: return [self.template_name] home = Home.as_view() Order map by modified_at desc in list
from django.views.generic import TemplateView from chickpea.models import Map class Home(TemplateView): template_name = "youmap/home.html" list_template_name = "chickpea/map_list.html" def get_context_data(self, **kwargs): maps = Map.objects.order_by('-modified_at')[:100] return { "maps": maps } def get_template_names(self): """ Dispatch template according to the kind of request: ajax or normal. """ if self.request.is_ajax(): return [self.list_template_name] else: return [self.template_name] home = Home.as_view()
<commit_before>from django.views.generic import TemplateView from chickpea.models import Map class Home(TemplateView): template_name = "youmap/home.html" list_template_name = "chickpea/map_list.html" def get_context_data(self, **kwargs): maps = Map.objects.all()[:100] return { "maps": maps } def get_template_names(self): """ Dispatch template according to the kind of request: ajax or normal. """ if self.request.is_ajax(): return [self.list_template_name] else: return [self.template_name] home = Home.as_view() <commit_msg>Order map by modified_at desc in list<commit_after>
from django.views.generic import TemplateView from chickpea.models import Map class Home(TemplateView): template_name = "youmap/home.html" list_template_name = "chickpea/map_list.html" def get_context_data(self, **kwargs): maps = Map.objects.order_by('-modified_at')[:100] return { "maps": maps } def get_template_names(self): """ Dispatch template according to the kind of request: ajax or normal. """ if self.request.is_ajax(): return [self.list_template_name] else: return [self.template_name] home = Home.as_view()
from django.views.generic import TemplateView from chickpea.models import Map class Home(TemplateView): template_name = "youmap/home.html" list_template_name = "chickpea/map_list.html" def get_context_data(self, **kwargs): maps = Map.objects.all()[:100] return { "maps": maps } def get_template_names(self): """ Dispatch template according to the kind of request: ajax or normal. """ if self.request.is_ajax(): return [self.list_template_name] else: return [self.template_name] home = Home.as_view() Order map by modified_at desc in listfrom django.views.generic import TemplateView from chickpea.models import Map class Home(TemplateView): template_name = "youmap/home.html" list_template_name = "chickpea/map_list.html" def get_context_data(self, **kwargs): maps = Map.objects.order_by('-modified_at')[:100] return { "maps": maps } def get_template_names(self): """ Dispatch template according to the kind of request: ajax or normal. """ if self.request.is_ajax(): return [self.list_template_name] else: return [self.template_name] home = Home.as_view()
<commit_before>from django.views.generic import TemplateView from chickpea.models import Map class Home(TemplateView): template_name = "youmap/home.html" list_template_name = "chickpea/map_list.html" def get_context_data(self, **kwargs): maps = Map.objects.all()[:100] return { "maps": maps } def get_template_names(self): """ Dispatch template according to the kind of request: ajax or normal. """ if self.request.is_ajax(): return [self.list_template_name] else: return [self.template_name] home = Home.as_view() <commit_msg>Order map by modified_at desc in list<commit_after>from django.views.generic import TemplateView from chickpea.models import Map class Home(TemplateView): template_name = "youmap/home.html" list_template_name = "chickpea/map_list.html" def get_context_data(self, **kwargs): maps = Map.objects.order_by('-modified_at')[:100] return { "maps": maps } def get_template_names(self): """ Dispatch template according to the kind of request: ajax or normal. """ if self.request.is_ajax(): return [self.list_template_name] else: return [self.template_name] home = Home.as_view()
42339932811493bdd398fda4f7a2322a94bdc2e9
saleor/shipping/migrations/0018_default_zones_countries.py
saleor/shipping/migrations/0018_default_zones_countries.py
# Generated by Django 3.0.6 on 2020-06-05 14:35 from django.db import migrations from ..utils import get_countries_without_shipping_zone def assign_countries_in_default_shipping_zone(apps, schema_editor): ShippingZone = apps.get_model("shipping", "ShippingZone") qs = ShippingZone.objects.filter(default=True) if qs.exists(): default_zone = qs[0] if not default_zone.countries: default_zone.countries = get_countries_without_shipping_zone() default_zone.save(update_fields=["countries"]) class Migration(migrations.Migration): dependencies = [ ("shipping", "0017_django_price_2"), ] operations = [ migrations.RunPython( assign_countries_in_default_shipping_zone, migrations.RunPython.noop ) ]
# Generated by Django 3.0.6 on 2020-06-05 14:35 from django.db import migrations from django_countries import countries def get_countries_without_shipping_zone(ShippingZone): """Return countries that are not assigned to any shipping zone.""" covered_countries = set() for zone in ShippingZone.objects.all(): covered_countries.update({c.code for c in zone.countries}) return (country[0] for country in countries if country[0] not in covered_countries) def assign_countries_in_default_shipping_zone(apps, schema_editor): ShippingZone = apps.get_model("shipping", "ShippingZone") qs = ShippingZone.objects.filter(default=True) if qs.exists(): default_zone = qs[0] if not default_zone.countries: default_zone.countries = get_countries_without_shipping_zone(ShippingZone) default_zone.save(update_fields=["countries"]) class Migration(migrations.Migration): dependencies = [ ("shipping", "0017_django_price_2"), ] operations = [ migrations.RunPython( assign_countries_in_default_shipping_zone, migrations.RunPython.noop ) ]
Move utility function to migration
Move utility function to migration
Python
bsd-3-clause
mociepka/saleor,mociepka/saleor,mociepka/saleor
# Generated by Django 3.0.6 on 2020-06-05 14:35 from django.db import migrations from ..utils import get_countries_without_shipping_zone def assign_countries_in_default_shipping_zone(apps, schema_editor): ShippingZone = apps.get_model("shipping", "ShippingZone") qs = ShippingZone.objects.filter(default=True) if qs.exists(): default_zone = qs[0] if not default_zone.countries: default_zone.countries = get_countries_without_shipping_zone() default_zone.save(update_fields=["countries"]) class Migration(migrations.Migration): dependencies = [ ("shipping", "0017_django_price_2"), ] operations = [ migrations.RunPython( assign_countries_in_default_shipping_zone, migrations.RunPython.noop ) ] Move utility function to migration
# Generated by Django 3.0.6 on 2020-06-05 14:35 from django.db import migrations from django_countries import countries def get_countries_without_shipping_zone(ShippingZone): """Return countries that are not assigned to any shipping zone.""" covered_countries = set() for zone in ShippingZone.objects.all(): covered_countries.update({c.code for c in zone.countries}) return (country[0] for country in countries if country[0] not in covered_countries) def assign_countries_in_default_shipping_zone(apps, schema_editor): ShippingZone = apps.get_model("shipping", "ShippingZone") qs = ShippingZone.objects.filter(default=True) if qs.exists(): default_zone = qs[0] if not default_zone.countries: default_zone.countries = get_countries_without_shipping_zone(ShippingZone) default_zone.save(update_fields=["countries"]) class Migration(migrations.Migration): dependencies = [ ("shipping", "0017_django_price_2"), ] operations = [ migrations.RunPython( assign_countries_in_default_shipping_zone, migrations.RunPython.noop ) ]
<commit_before># Generated by Django 3.0.6 on 2020-06-05 14:35 from django.db import migrations from ..utils import get_countries_without_shipping_zone def assign_countries_in_default_shipping_zone(apps, schema_editor): ShippingZone = apps.get_model("shipping", "ShippingZone") qs = ShippingZone.objects.filter(default=True) if qs.exists(): default_zone = qs[0] if not default_zone.countries: default_zone.countries = get_countries_without_shipping_zone() default_zone.save(update_fields=["countries"]) class Migration(migrations.Migration): dependencies = [ ("shipping", "0017_django_price_2"), ] operations = [ migrations.RunPython( assign_countries_in_default_shipping_zone, migrations.RunPython.noop ) ] <commit_msg>Move utility function to migration<commit_after>
# Generated by Django 3.0.6 on 2020-06-05 14:35 from django.db import migrations from django_countries import countries def get_countries_without_shipping_zone(ShippingZone): """Return countries that are not assigned to any shipping zone.""" covered_countries = set() for zone in ShippingZone.objects.all(): covered_countries.update({c.code for c in zone.countries}) return (country[0] for country in countries if country[0] not in covered_countries) def assign_countries_in_default_shipping_zone(apps, schema_editor): ShippingZone = apps.get_model("shipping", "ShippingZone") qs = ShippingZone.objects.filter(default=True) if qs.exists(): default_zone = qs[0] if not default_zone.countries: default_zone.countries = get_countries_without_shipping_zone(ShippingZone) default_zone.save(update_fields=["countries"]) class Migration(migrations.Migration): dependencies = [ ("shipping", "0017_django_price_2"), ] operations = [ migrations.RunPython( assign_countries_in_default_shipping_zone, migrations.RunPython.noop ) ]
# Generated by Django 3.0.6 on 2020-06-05 14:35 from django.db import migrations from ..utils import get_countries_without_shipping_zone def assign_countries_in_default_shipping_zone(apps, schema_editor): ShippingZone = apps.get_model("shipping", "ShippingZone") qs = ShippingZone.objects.filter(default=True) if qs.exists(): default_zone = qs[0] if not default_zone.countries: default_zone.countries = get_countries_without_shipping_zone() default_zone.save(update_fields=["countries"]) class Migration(migrations.Migration): dependencies = [ ("shipping", "0017_django_price_2"), ] operations = [ migrations.RunPython( assign_countries_in_default_shipping_zone, migrations.RunPython.noop ) ] Move utility function to migration# Generated by Django 3.0.6 on 2020-06-05 14:35 from django.db import migrations from django_countries import countries def get_countries_without_shipping_zone(ShippingZone): """Return countries that are not assigned to any shipping zone.""" covered_countries = set() for zone in ShippingZone.objects.all(): covered_countries.update({c.code for c in zone.countries}) return (country[0] for country in countries if country[0] not in covered_countries) def assign_countries_in_default_shipping_zone(apps, schema_editor): ShippingZone = apps.get_model("shipping", "ShippingZone") qs = ShippingZone.objects.filter(default=True) if qs.exists(): default_zone = qs[0] if not default_zone.countries: default_zone.countries = get_countries_without_shipping_zone(ShippingZone) default_zone.save(update_fields=["countries"]) class Migration(migrations.Migration): dependencies = [ ("shipping", "0017_django_price_2"), ] operations = [ migrations.RunPython( assign_countries_in_default_shipping_zone, migrations.RunPython.noop ) ]
<commit_before># Generated by Django 3.0.6 on 2020-06-05 14:35 from django.db import migrations from ..utils import get_countries_without_shipping_zone def assign_countries_in_default_shipping_zone(apps, schema_editor): ShippingZone = apps.get_model("shipping", "ShippingZone") qs = ShippingZone.objects.filter(default=True) if qs.exists(): default_zone = qs[0] if not default_zone.countries: default_zone.countries = get_countries_without_shipping_zone() default_zone.save(update_fields=["countries"]) class Migration(migrations.Migration): dependencies = [ ("shipping", "0017_django_price_2"), ] operations = [ migrations.RunPython( assign_countries_in_default_shipping_zone, migrations.RunPython.noop ) ] <commit_msg>Move utility function to migration<commit_after># Generated by Django 3.0.6 on 2020-06-05 14:35 from django.db import migrations from django_countries import countries def get_countries_without_shipping_zone(ShippingZone): """Return countries that are not assigned to any shipping zone.""" covered_countries = set() for zone in ShippingZone.objects.all(): covered_countries.update({c.code for c in zone.countries}) return (country[0] for country in countries if country[0] not in covered_countries) def assign_countries_in_default_shipping_zone(apps, schema_editor): ShippingZone = apps.get_model("shipping", "ShippingZone") qs = ShippingZone.objects.filter(default=True) if qs.exists(): default_zone = qs[0] if not default_zone.countries: default_zone.countries = get_countries_without_shipping_zone(ShippingZone) default_zone.save(update_fields=["countries"]) class Migration(migrations.Migration): dependencies = [ ("shipping", "0017_django_price_2"), ] operations = [ migrations.RunPython( assign_countries_in_default_shipping_zone, migrations.RunPython.noop ) ]
c85d3cddcc55b427b1d0cd536fb45a28b5f05aff
pytest_blink1/__init__.py
pytest_blink1/__init__.py
from urllib.request import urlopen, URLError def pytest_terminal_summary(terminalreporter, exitstatus): # pylint: disable=unused-argument _add_patterns() if exitstatus == 0: _pattern('pytest-success') else: _pattern('pytest-failure') def _add_patterns(): _blink('pattern/add?pname=pytest-success&pattern=0.5,%2300ff00,0.5,%23000000,0.5') _blink('pattern/add?pname=pytest-failure&pattern=1,%23ff0000,0.3,1,%23ff0000,0.3,2,%23000000,0.1,0,%23ff0000,0.3,2,%23ff0000,0.3,1,%23000000,0.1,0') def _pattern(name): _blink('pattern/play?pname={}'.format(name)) def _blink(command): try: urlopen('http://localhost:8934/blink1/{}'.format(command)) except URLError as e: pass
from urllib.request import urlopen, URLError def pytest_terminal_summary(terminalreporter, exitstatus=None): # pylint: disable=unused-argument _add_patterns() if exitstatus == 0: _pattern('pytest-success') else: _pattern('pytest-failure') def _add_patterns(): _blink('pattern/add?pname=pytest-success&pattern=0.5,%2300ff00,0.5,%23000000,0.5') _blink('pattern/add?pname=pytest-failure&pattern=1,%23ff0000,0.3,1,%23ff0000,0.3,2,%23000000,0.1,0,%23ff0000,0.3,2,%23ff0000,0.3,1,%23000000,0.1,0') def _pattern(name): _blink('pattern/play?pname={}'.format(name)) def _blink(command): try: urlopen('http://localhost:8934/blink1/{}'.format(command)) except URLError as e: pass
Fix `exitstatus` sometimes not provided
Fix `exitstatus` sometimes not provided
Python
mit
vmalloc/pytest-blink1
from urllib.request import urlopen, URLError def pytest_terminal_summary(terminalreporter, exitstatus): # pylint: disable=unused-argument _add_patterns() if exitstatus == 0: _pattern('pytest-success') else: _pattern('pytest-failure') def _add_patterns(): _blink('pattern/add?pname=pytest-success&pattern=0.5,%2300ff00,0.5,%23000000,0.5') _blink('pattern/add?pname=pytest-failure&pattern=1,%23ff0000,0.3,1,%23ff0000,0.3,2,%23000000,0.1,0,%23ff0000,0.3,2,%23ff0000,0.3,1,%23000000,0.1,0') def _pattern(name): _blink('pattern/play?pname={}'.format(name)) def _blink(command): try: urlopen('http://localhost:8934/blink1/{}'.format(command)) except URLError as e: pass Fix `exitstatus` sometimes not provided
from urllib.request import urlopen, URLError def pytest_terminal_summary(terminalreporter, exitstatus=None): # pylint: disable=unused-argument _add_patterns() if exitstatus == 0: _pattern('pytest-success') else: _pattern('pytest-failure') def _add_patterns(): _blink('pattern/add?pname=pytest-success&pattern=0.5,%2300ff00,0.5,%23000000,0.5') _blink('pattern/add?pname=pytest-failure&pattern=1,%23ff0000,0.3,1,%23ff0000,0.3,2,%23000000,0.1,0,%23ff0000,0.3,2,%23ff0000,0.3,1,%23000000,0.1,0') def _pattern(name): _blink('pattern/play?pname={}'.format(name)) def _blink(command): try: urlopen('http://localhost:8934/blink1/{}'.format(command)) except URLError as e: pass
<commit_before>from urllib.request import urlopen, URLError def pytest_terminal_summary(terminalreporter, exitstatus): # pylint: disable=unused-argument _add_patterns() if exitstatus == 0: _pattern('pytest-success') else: _pattern('pytest-failure') def _add_patterns(): _blink('pattern/add?pname=pytest-success&pattern=0.5,%2300ff00,0.5,%23000000,0.5') _blink('pattern/add?pname=pytest-failure&pattern=1,%23ff0000,0.3,1,%23ff0000,0.3,2,%23000000,0.1,0,%23ff0000,0.3,2,%23ff0000,0.3,1,%23000000,0.1,0') def _pattern(name): _blink('pattern/play?pname={}'.format(name)) def _blink(command): try: urlopen('http://localhost:8934/blink1/{}'.format(command)) except URLError as e: pass <commit_msg>Fix `exitstatus` sometimes not provided<commit_after>
from urllib.request import urlopen, URLError def pytest_terminal_summary(terminalreporter, exitstatus=None): # pylint: disable=unused-argument _add_patterns() if exitstatus == 0: _pattern('pytest-success') else: _pattern('pytest-failure') def _add_patterns(): _blink('pattern/add?pname=pytest-success&pattern=0.5,%2300ff00,0.5,%23000000,0.5') _blink('pattern/add?pname=pytest-failure&pattern=1,%23ff0000,0.3,1,%23ff0000,0.3,2,%23000000,0.1,0,%23ff0000,0.3,2,%23ff0000,0.3,1,%23000000,0.1,0') def _pattern(name): _blink('pattern/play?pname={}'.format(name)) def _blink(command): try: urlopen('http://localhost:8934/blink1/{}'.format(command)) except URLError as e: pass
from urllib.request import urlopen, URLError def pytest_terminal_summary(terminalreporter, exitstatus): # pylint: disable=unused-argument _add_patterns() if exitstatus == 0: _pattern('pytest-success') else: _pattern('pytest-failure') def _add_patterns(): _blink('pattern/add?pname=pytest-success&pattern=0.5,%2300ff00,0.5,%23000000,0.5') _blink('pattern/add?pname=pytest-failure&pattern=1,%23ff0000,0.3,1,%23ff0000,0.3,2,%23000000,0.1,0,%23ff0000,0.3,2,%23ff0000,0.3,1,%23000000,0.1,0') def _pattern(name): _blink('pattern/play?pname={}'.format(name)) def _blink(command): try: urlopen('http://localhost:8934/blink1/{}'.format(command)) except URLError as e: pass Fix `exitstatus` sometimes not providedfrom urllib.request import urlopen, URLError def pytest_terminal_summary(terminalreporter, exitstatus=None): # pylint: disable=unused-argument _add_patterns() if exitstatus == 0: _pattern('pytest-success') else: _pattern('pytest-failure') def _add_patterns(): _blink('pattern/add?pname=pytest-success&pattern=0.5,%2300ff00,0.5,%23000000,0.5') _blink('pattern/add?pname=pytest-failure&pattern=1,%23ff0000,0.3,1,%23ff0000,0.3,2,%23000000,0.1,0,%23ff0000,0.3,2,%23ff0000,0.3,1,%23000000,0.1,0') def _pattern(name): _blink('pattern/play?pname={}'.format(name)) def _blink(command): try: urlopen('http://localhost:8934/blink1/{}'.format(command)) except URLError as e: pass
<commit_before>from urllib.request import urlopen, URLError def pytest_terminal_summary(terminalreporter, exitstatus): # pylint: disable=unused-argument _add_patterns() if exitstatus == 0: _pattern('pytest-success') else: _pattern('pytest-failure') def _add_patterns(): _blink('pattern/add?pname=pytest-success&pattern=0.5,%2300ff00,0.5,%23000000,0.5') _blink('pattern/add?pname=pytest-failure&pattern=1,%23ff0000,0.3,1,%23ff0000,0.3,2,%23000000,0.1,0,%23ff0000,0.3,2,%23ff0000,0.3,1,%23000000,0.1,0') def _pattern(name): _blink('pattern/play?pname={}'.format(name)) def _blink(command): try: urlopen('http://localhost:8934/blink1/{}'.format(command)) except URLError as e: pass <commit_msg>Fix `exitstatus` sometimes not provided<commit_after>from urllib.request import urlopen, URLError def pytest_terminal_summary(terminalreporter, exitstatus=None): # pylint: disable=unused-argument _add_patterns() if exitstatus == 0: _pattern('pytest-success') else: _pattern('pytest-failure') def _add_patterns(): _blink('pattern/add?pname=pytest-success&pattern=0.5,%2300ff00,0.5,%23000000,0.5') _blink('pattern/add?pname=pytest-failure&pattern=1,%23ff0000,0.3,1,%23ff0000,0.3,2,%23000000,0.1,0,%23ff0000,0.3,2,%23ff0000,0.3,1,%23000000,0.1,0') def _pattern(name): _blink('pattern/play?pname={}'.format(name)) def _blink(command): try: urlopen('http://localhost:8934/blink1/{}'.format(command)) except URLError as e: pass
208c17449b42dc7d87ac24a04675612e17a31984
sierra_adapter/s3_demultiplexer/src/s3_demultiplexer.py
sierra_adapter/s3_demultiplexer/src/s3_demultiplexer.py
# -*- encoding: utf-8 -*- import json import os import boto3 from wellcome_aws_utils import s3_utils, sns_utils def main(event, _): print(f'event = {event!r}') topic_arn = os.environ["TOPIC_ARN"] s3_events = s3_utils.parse_s3_record(event=event) assert len(s3_events) == 1 s3_event = s3_events[0] s3_client = boto3.client('s3') resp = s3_client.get_object( Bucket=s3_event['bucket_name'], Key=s3_event['object_key'] ) body = resp['Body'].read() sns_client = boto3.client('sns') records = json.loads(body) for r in records: sns_utils.publish_sns_message( sns_client=sns_client, topic_arn=topic_arn, message=r )
# -*- encoding: utf-8 -*- """ We have a sierra_reader that reads records from Sierra, and uploads them to files in S3. Each file in S3 contains multiple records. Our downstream applications want to process records one at a time, so this demultiplexer receives the event stream of PUTs from S3, and splits each file into individual messages on SNS. S3 SNS +--------+ +-------------------+ | reader |------> | r1, r2, r3, r4 | --- demultiplexer ---+ +--------+ +-------------------+ | | r5, r6, r7, r8 | | +-------------------+ v | r9, r10, r11, r12 | [ r1 ] +-------------------+ [ r2 ] [ r3 ] .... """ import json import os import boto3 from wellcome_aws_utils import s3_utils, sns_utils def main(event, _): print(f'event = {event!r}') topic_arn = os.environ["TOPIC_ARN"] s3_events = s3_utils.parse_s3_record(event=event) assert len(s3_events) == 1 s3_event = s3_events[0] s3_client = boto3.client('s3') resp = s3_client.get_object( Bucket=s3_event['bucket_name'], Key=s3_event['object_key'] ) body = resp['Body'].read() sns_client = boto3.client('sns') records = json.loads(body) for r in records: sns_utils.publish_sns_message( sns_client=sns_client, topic_arn=topic_arn, message=r )
Add a comment explaining the purpose of the demultiplexer
Add a comment explaining the purpose of the demultiplexer
Python
mit
wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api
# -*- encoding: utf-8 -*- import json import os import boto3 from wellcome_aws_utils import s3_utils, sns_utils def main(event, _): print(f'event = {event!r}') topic_arn = os.environ["TOPIC_ARN"] s3_events = s3_utils.parse_s3_record(event=event) assert len(s3_events) == 1 s3_event = s3_events[0] s3_client = boto3.client('s3') resp = s3_client.get_object( Bucket=s3_event['bucket_name'], Key=s3_event['object_key'] ) body = resp['Body'].read() sns_client = boto3.client('sns') records = json.loads(body) for r in records: sns_utils.publish_sns_message( sns_client=sns_client, topic_arn=topic_arn, message=r ) Add a comment explaining the purpose of the demultiplexer
# -*- encoding: utf-8 -*- """ We have a sierra_reader that reads records from Sierra, and uploads them to files in S3. Each file in S3 contains multiple records. Our downstream applications want to process records one at a time, so this demultiplexer receives the event stream of PUTs from S3, and splits each file into individual messages on SNS. S3 SNS +--------+ +-------------------+ | reader |------> | r1, r2, r3, r4 | --- demultiplexer ---+ +--------+ +-------------------+ | | r5, r6, r7, r8 | | +-------------------+ v | r9, r10, r11, r12 | [ r1 ] +-------------------+ [ r2 ] [ r3 ] .... """ import json import os import boto3 from wellcome_aws_utils import s3_utils, sns_utils def main(event, _): print(f'event = {event!r}') topic_arn = os.environ["TOPIC_ARN"] s3_events = s3_utils.parse_s3_record(event=event) assert len(s3_events) == 1 s3_event = s3_events[0] s3_client = boto3.client('s3') resp = s3_client.get_object( Bucket=s3_event['bucket_name'], Key=s3_event['object_key'] ) body = resp['Body'].read() sns_client = boto3.client('sns') records = json.loads(body) for r in records: sns_utils.publish_sns_message( sns_client=sns_client, topic_arn=topic_arn, message=r )
<commit_before># -*- encoding: utf-8 -*- import json import os import boto3 from wellcome_aws_utils import s3_utils, sns_utils def main(event, _): print(f'event = {event!r}') topic_arn = os.environ["TOPIC_ARN"] s3_events = s3_utils.parse_s3_record(event=event) assert len(s3_events) == 1 s3_event = s3_events[0] s3_client = boto3.client('s3') resp = s3_client.get_object( Bucket=s3_event['bucket_name'], Key=s3_event['object_key'] ) body = resp['Body'].read() sns_client = boto3.client('sns') records = json.loads(body) for r in records: sns_utils.publish_sns_message( sns_client=sns_client, topic_arn=topic_arn, message=r ) <commit_msg>Add a comment explaining the purpose of the demultiplexer<commit_after>
# -*- encoding: utf-8 -*- """ We have a sierra_reader that reads records from Sierra, and uploads them to files in S3. Each file in S3 contains multiple records. Our downstream applications want to process records one at a time, so this demultiplexer receives the event stream of PUTs from S3, and splits each file into individual messages on SNS. S3 SNS +--------+ +-------------------+ | reader |------> | r1, r2, r3, r4 | --- demultiplexer ---+ +--------+ +-------------------+ | | r5, r6, r7, r8 | | +-------------------+ v | r9, r10, r11, r12 | [ r1 ] +-------------------+ [ r2 ] [ r3 ] .... """ import json import os import boto3 from wellcome_aws_utils import s3_utils, sns_utils def main(event, _): print(f'event = {event!r}') topic_arn = os.environ["TOPIC_ARN"] s3_events = s3_utils.parse_s3_record(event=event) assert len(s3_events) == 1 s3_event = s3_events[0] s3_client = boto3.client('s3') resp = s3_client.get_object( Bucket=s3_event['bucket_name'], Key=s3_event['object_key'] ) body = resp['Body'].read() sns_client = boto3.client('sns') records = json.loads(body) for r in records: sns_utils.publish_sns_message( sns_client=sns_client, topic_arn=topic_arn, message=r )
# -*- encoding: utf-8 -*- import json import os import boto3 from wellcome_aws_utils import s3_utils, sns_utils def main(event, _): print(f'event = {event!r}') topic_arn = os.environ["TOPIC_ARN"] s3_events = s3_utils.parse_s3_record(event=event) assert len(s3_events) == 1 s3_event = s3_events[0] s3_client = boto3.client('s3') resp = s3_client.get_object( Bucket=s3_event['bucket_name'], Key=s3_event['object_key'] ) body = resp['Body'].read() sns_client = boto3.client('sns') records = json.loads(body) for r in records: sns_utils.publish_sns_message( sns_client=sns_client, topic_arn=topic_arn, message=r ) Add a comment explaining the purpose of the demultiplexer# -*- encoding: utf-8 -*- """ We have a sierra_reader that reads records from Sierra, and uploads them to files in S3. Each file in S3 contains multiple records. Our downstream applications want to process records one at a time, so this demultiplexer receives the event stream of PUTs from S3, and splits each file into individual messages on SNS. S3 SNS +--------+ +-------------------+ | reader |------> | r1, r2, r3, r4 | --- demultiplexer ---+ +--------+ +-------------------+ | | r5, r6, r7, r8 | | +-------------------+ v | r9, r10, r11, r12 | [ r1 ] +-------------------+ [ r2 ] [ r3 ] .... """ import json import os import boto3 from wellcome_aws_utils import s3_utils, sns_utils def main(event, _): print(f'event = {event!r}') topic_arn = os.environ["TOPIC_ARN"] s3_events = s3_utils.parse_s3_record(event=event) assert len(s3_events) == 1 s3_event = s3_events[0] s3_client = boto3.client('s3') resp = s3_client.get_object( Bucket=s3_event['bucket_name'], Key=s3_event['object_key'] ) body = resp['Body'].read() sns_client = boto3.client('sns') records = json.loads(body) for r in records: sns_utils.publish_sns_message( sns_client=sns_client, topic_arn=topic_arn, message=r )
<commit_before># -*- encoding: utf-8 -*- import json import os import boto3 from wellcome_aws_utils import s3_utils, sns_utils def main(event, _): print(f'event = {event!r}') topic_arn = os.environ["TOPIC_ARN"] s3_events = s3_utils.parse_s3_record(event=event) assert len(s3_events) == 1 s3_event = s3_events[0] s3_client = boto3.client('s3') resp = s3_client.get_object( Bucket=s3_event['bucket_name'], Key=s3_event['object_key'] ) body = resp['Body'].read() sns_client = boto3.client('sns') records = json.loads(body) for r in records: sns_utils.publish_sns_message( sns_client=sns_client, topic_arn=topic_arn, message=r ) <commit_msg>Add a comment explaining the purpose of the demultiplexer<commit_after># -*- encoding: utf-8 -*- """ We have a sierra_reader that reads records from Sierra, and uploads them to files in S3. Each file in S3 contains multiple records. Our downstream applications want to process records one at a time, so this demultiplexer receives the event stream of PUTs from S3, and splits each file into individual messages on SNS. S3 SNS +--------+ +-------------------+ | reader |------> | r1, r2, r3, r4 | --- demultiplexer ---+ +--------+ +-------------------+ | | r5, r6, r7, r8 | | +-------------------+ v | r9, r10, r11, r12 | [ r1 ] +-------------------+ [ r2 ] [ r3 ] .... """ import json import os import boto3 from wellcome_aws_utils import s3_utils, sns_utils def main(event, _): print(f'event = {event!r}') topic_arn = os.environ["TOPIC_ARN"] s3_events = s3_utils.parse_s3_record(event=event) assert len(s3_events) == 1 s3_event = s3_events[0] s3_client = boto3.client('s3') resp = s3_client.get_object( Bucket=s3_event['bucket_name'], Key=s3_event['object_key'] ) body = resp['Body'].read() sns_client = boto3.client('sns') records = json.loads(body) for r in records: sns_utils.publish_sns_message( sns_client=sns_client, topic_arn=topic_arn, message=r )
8f93bad77371fbc0d7dc75548472c7715eb8a2ee
climlab/tests/test_rcm.py
climlab/tests/test_rcm.py
from __future__ import division import numpy as np import climlab import pytest @pytest.fixture() def rcm(): # initial state (temperatures) state = climlab.column_state(num_lev=num_lev, num_lat=1, water_depth=5.) ## Create individual physical process models: # fixed relative humidity h2o = climlab.radiation.ManabeWaterVapor(state=state, name='H2O') # Hard convective adjustment convadj = climlab.convection.ConvectiveAdjustment(state=state, name='ConvectiveAdjustment', adj_lapse_rate=6.5) # CAM3 radiation with default parameters and interactive water vapor rad = climlab.radiation.RRTMG(state=state, albedo=alb, specific_humidity=h2o.q, name='Radiation') # Couple the models rcm = climlab.couple([h2o,convadj,rad], name='RCM') return rcm def test_convective_adjustment(rcm): rcm.step_forward() # test non-scalar critical lapse rate num_lev = rcm.lev.size rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = np.linspace(5., 8., num_lev) rcm.step_forward() # test pseudoadiabatic critical lapse rate rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = 'pseudoadiabat' rcm.step_forward()
from __future__ import division import numpy as np import climlab import pytest @pytest.fixture() def rcm(): # initial state (temperatures) state = climlab.column_state(num_lev=num_lev, num_lat=1, water_depth=5.) ## Create individual physical process models: # fixed relative humidity h2o = climlab.radiation.ManabeWaterVapor(state=state, name='H2O') # Hard convective adjustment convadj = climlab.convection.ConvectiveAdjustment(state=state, name='ConvectiveAdjustment', adj_lapse_rate=6.5) # CAM3 radiation with default parameters and interactive water vapor rad = climlab.radiation.RRTMG(state=state, albedo=alb, specific_humidity=h2o.q, name='Radiation') # Couple the models rcm = climlab.couple([h2o,convadj,rad], name='RCM') return rcm @pytest.mark.fast def test_convective_adjustment(rcm): rcm.step_forward() # test non-scalar critical lapse rate num_lev = rcm.lev.size rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = np.linspace(5., 8., num_lev) rcm.step_forward() # test pseudoadiabatic critical lapse rate rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = 'pseudoadiabat' rcm.step_forward()
Mark rcm test as fast so it executes during build and test
Mark rcm test as fast so it executes during build and test
Python
mit
cjcardinale/climlab,cjcardinale/climlab,brian-rose/climlab,cjcardinale/climlab,brian-rose/climlab
from __future__ import division import numpy as np import climlab import pytest @pytest.fixture() def rcm(): # initial state (temperatures) state = climlab.column_state(num_lev=num_lev, num_lat=1, water_depth=5.) ## Create individual physical process models: # fixed relative humidity h2o = climlab.radiation.ManabeWaterVapor(state=state, name='H2O') # Hard convective adjustment convadj = climlab.convection.ConvectiveAdjustment(state=state, name='ConvectiveAdjustment', adj_lapse_rate=6.5) # CAM3 radiation with default parameters and interactive water vapor rad = climlab.radiation.RRTMG(state=state, albedo=alb, specific_humidity=h2o.q, name='Radiation') # Couple the models rcm = climlab.couple([h2o,convadj,rad], name='RCM') return rcm def test_convective_adjustment(rcm): rcm.step_forward() # test non-scalar critical lapse rate num_lev = rcm.lev.size rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = np.linspace(5., 8., num_lev) rcm.step_forward() # test pseudoadiabatic critical lapse rate rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = 'pseudoadiabat' rcm.step_forward() Mark rcm test as fast so it executes during build and test
from __future__ import division import numpy as np import climlab import pytest @pytest.fixture() def rcm(): # initial state (temperatures) state = climlab.column_state(num_lev=num_lev, num_lat=1, water_depth=5.) ## Create individual physical process models: # fixed relative humidity h2o = climlab.radiation.ManabeWaterVapor(state=state, name='H2O') # Hard convective adjustment convadj = climlab.convection.ConvectiveAdjustment(state=state, name='ConvectiveAdjustment', adj_lapse_rate=6.5) # CAM3 radiation with default parameters and interactive water vapor rad = climlab.radiation.RRTMG(state=state, albedo=alb, specific_humidity=h2o.q, name='Radiation') # Couple the models rcm = climlab.couple([h2o,convadj,rad], name='RCM') return rcm @pytest.mark.fast def test_convective_adjustment(rcm): rcm.step_forward() # test non-scalar critical lapse rate num_lev = rcm.lev.size rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = np.linspace(5., 8., num_lev) rcm.step_forward() # test pseudoadiabatic critical lapse rate rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = 'pseudoadiabat' rcm.step_forward()
<commit_before>from __future__ import division import numpy as np import climlab import pytest @pytest.fixture() def rcm(): # initial state (temperatures) state = climlab.column_state(num_lev=num_lev, num_lat=1, water_depth=5.) ## Create individual physical process models: # fixed relative humidity h2o = climlab.radiation.ManabeWaterVapor(state=state, name='H2O') # Hard convective adjustment convadj = climlab.convection.ConvectiveAdjustment(state=state, name='ConvectiveAdjustment', adj_lapse_rate=6.5) # CAM3 radiation with default parameters and interactive water vapor rad = climlab.radiation.RRTMG(state=state, albedo=alb, specific_humidity=h2o.q, name='Radiation') # Couple the models rcm = climlab.couple([h2o,convadj,rad], name='RCM') return rcm def test_convective_adjustment(rcm): rcm.step_forward() # test non-scalar critical lapse rate num_lev = rcm.lev.size rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = np.linspace(5., 8., num_lev) rcm.step_forward() # test pseudoadiabatic critical lapse rate rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = 'pseudoadiabat' rcm.step_forward() <commit_msg>Mark rcm test as fast so it executes during build and test<commit_after>
from __future__ import division import numpy as np import climlab import pytest @pytest.fixture() def rcm(): # initial state (temperatures) state = climlab.column_state(num_lev=num_lev, num_lat=1, water_depth=5.) ## Create individual physical process models: # fixed relative humidity h2o = climlab.radiation.ManabeWaterVapor(state=state, name='H2O') # Hard convective adjustment convadj = climlab.convection.ConvectiveAdjustment(state=state, name='ConvectiveAdjustment', adj_lapse_rate=6.5) # CAM3 radiation with default parameters and interactive water vapor rad = climlab.radiation.RRTMG(state=state, albedo=alb, specific_humidity=h2o.q, name='Radiation') # Couple the models rcm = climlab.couple([h2o,convadj,rad], name='RCM') return rcm @pytest.mark.fast def test_convective_adjustment(rcm): rcm.step_forward() # test non-scalar critical lapse rate num_lev = rcm.lev.size rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = np.linspace(5., 8., num_lev) rcm.step_forward() # test pseudoadiabatic critical lapse rate rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = 'pseudoadiabat' rcm.step_forward()
from __future__ import division import numpy as np import climlab import pytest @pytest.fixture() def rcm(): # initial state (temperatures) state = climlab.column_state(num_lev=num_lev, num_lat=1, water_depth=5.) ## Create individual physical process models: # fixed relative humidity h2o = climlab.radiation.ManabeWaterVapor(state=state, name='H2O') # Hard convective adjustment convadj = climlab.convection.ConvectiveAdjustment(state=state, name='ConvectiveAdjustment', adj_lapse_rate=6.5) # CAM3 radiation with default parameters and interactive water vapor rad = climlab.radiation.RRTMG(state=state, albedo=alb, specific_humidity=h2o.q, name='Radiation') # Couple the models rcm = climlab.couple([h2o,convadj,rad], name='RCM') return rcm def test_convective_adjustment(rcm): rcm.step_forward() # test non-scalar critical lapse rate num_lev = rcm.lev.size rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = np.linspace(5., 8., num_lev) rcm.step_forward() # test pseudoadiabatic critical lapse rate rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = 'pseudoadiabat' rcm.step_forward() Mark rcm test as fast so it executes during build and testfrom __future__ import division import numpy as np import climlab import pytest @pytest.fixture() def rcm(): # initial state (temperatures) state = climlab.column_state(num_lev=num_lev, num_lat=1, water_depth=5.) ## Create individual physical process models: # fixed relative humidity h2o = climlab.radiation.ManabeWaterVapor(state=state, name='H2O') # Hard convective adjustment convadj = climlab.convection.ConvectiveAdjustment(state=state, name='ConvectiveAdjustment', adj_lapse_rate=6.5) # CAM3 radiation with default parameters and interactive water vapor rad = climlab.radiation.RRTMG(state=state, albedo=alb, specific_humidity=h2o.q, name='Radiation') # Couple the models rcm = climlab.couple([h2o,convadj,rad], name='RCM') return rcm @pytest.mark.fast def test_convective_adjustment(rcm): rcm.step_forward() # test non-scalar critical lapse rate num_lev = rcm.lev.size rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = np.linspace(5., 8., num_lev) rcm.step_forward() # test pseudoadiabatic critical lapse rate rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = 'pseudoadiabat' rcm.step_forward()
<commit_before>from __future__ import division import numpy as np import climlab import pytest @pytest.fixture() def rcm(): # initial state (temperatures) state = climlab.column_state(num_lev=num_lev, num_lat=1, water_depth=5.) ## Create individual physical process models: # fixed relative humidity h2o = climlab.radiation.ManabeWaterVapor(state=state, name='H2O') # Hard convective adjustment convadj = climlab.convection.ConvectiveAdjustment(state=state, name='ConvectiveAdjustment', adj_lapse_rate=6.5) # CAM3 radiation with default parameters and interactive water vapor rad = climlab.radiation.RRTMG(state=state, albedo=alb, specific_humidity=h2o.q, name='Radiation') # Couple the models rcm = climlab.couple([h2o,convadj,rad], name='RCM') return rcm def test_convective_adjustment(rcm): rcm.step_forward() # test non-scalar critical lapse rate num_lev = rcm.lev.size rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = np.linspace(5., 8., num_lev) rcm.step_forward() # test pseudoadiabatic critical lapse rate rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = 'pseudoadiabat' rcm.step_forward() <commit_msg>Mark rcm test as fast so it executes during build and test<commit_after>from __future__ import division import numpy as np import climlab import pytest @pytest.fixture() def rcm(): # initial state (temperatures) state = climlab.column_state(num_lev=num_lev, num_lat=1, water_depth=5.) ## Create individual physical process models: # fixed relative humidity h2o = climlab.radiation.ManabeWaterVapor(state=state, name='H2O') # Hard convective adjustment convadj = climlab.convection.ConvectiveAdjustment(state=state, name='ConvectiveAdjustment', adj_lapse_rate=6.5) # CAM3 radiation with default parameters and interactive water vapor rad = climlab.radiation.RRTMG(state=state, albedo=alb, specific_humidity=h2o.q, name='Radiation') # Couple the models rcm = climlab.couple([h2o,convadj,rad], name='RCM') return rcm @pytest.mark.fast def test_convective_adjustment(rcm): rcm.step_forward() # test non-scalar critical lapse rate num_lev = rcm.lev.size rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = np.linspace(5., 8., num_lev) rcm.step_forward() # test pseudoadiabatic critical lapse rate rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = 'pseudoadiabat' rcm.step_forward()
54ec54d3a7c5f198bd4b9ca9d4cd613108ac4987
ichnaea/tests/test_util.py
ichnaea/tests/test_util.py
from datetime import datetime import pytest from pytz import UTC from ichnaea.exceptions import GZIPDecodeError from ichnaea import util class TestUtil(object): gzip_foo = ( b"\x1f\x8b\x08\x00\xed\x7f\x9aU\x02\xffK" b"\xcb\xcf\x07\x00!es\x8c\x03\x00\x00\x00" ) def test_utcnow(self): now = util.utcnow() assert isinstance(now, datetime) assert now.tzinfo == UTC def test_encode_gzip(self): data = util.encode_gzip(b"foo") # Test around the 4-byte timestamp assert data[:4] == self.gzip_foo[:4] assert data[8:] == self.gzip_foo[8:] def test_decode_gzip(self): data = util.decode_gzip(self.gzip_foo) assert data == b"foo" def test_roundtrip_gzip(self): data = util.decode_gzip(util.encode_gzip(b"foo")) assert data == b"foo" def test_decode_gzip_error(self): with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:1]) with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:5])
from datetime import datetime import pytest from pytz import UTC from ichnaea.exceptions import GZIPDecodeError from ichnaea import util class TestUtil(object): gzip_foo = ( b"\x1f\x8b\x08\x00\xed\x7f\x9aU\x00\xffK" b"\xcb\xcf\x07\x00!es\x8c\x03\x00\x00\x00" ) def test_utcnow(self): now = util.utcnow() assert isinstance(now, datetime) assert now.tzinfo == UTC def test_encode_gzip(self): data = util.encode_gzip(b"foo") # Test around the 4-byte timestamp assert data[:4] == self.gzip_foo[:4] assert data[8:] == self.gzip_foo[8:] def test_decode_gzip(self): data = util.decode_gzip(self.gzip_foo) assert data == b"foo" def test_roundtrip_gzip(self): data = util.decode_gzip(util.encode_gzip(b"foo")) assert data == b"foo" def test_decode_gzip_error(self): with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:1]) with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:5])
Update gzip output for Python 3.8.2
Update gzip output for Python 3.8.2 Python 3.8.2 includes a fix for https://bugs.python.org/issue39389. Previously, the extra flags byte was always set to 0x02, claiming maximum compression. It now reflects the actual compression setting. Since our default is neither fastest or maximum, the expected test output is 0x00 for the extra flags byte.
Python
apache-2.0
mozilla/ichnaea,mozilla/ichnaea,mozilla/ichnaea,mozilla/ichnaea
from datetime import datetime import pytest from pytz import UTC from ichnaea.exceptions import GZIPDecodeError from ichnaea import util class TestUtil(object): gzip_foo = ( b"\x1f\x8b\x08\x00\xed\x7f\x9aU\x02\xffK" b"\xcb\xcf\x07\x00!es\x8c\x03\x00\x00\x00" ) def test_utcnow(self): now = util.utcnow() assert isinstance(now, datetime) assert now.tzinfo == UTC def test_encode_gzip(self): data = util.encode_gzip(b"foo") # Test around the 4-byte timestamp assert data[:4] == self.gzip_foo[:4] assert data[8:] == self.gzip_foo[8:] def test_decode_gzip(self): data = util.decode_gzip(self.gzip_foo) assert data == b"foo" def test_roundtrip_gzip(self): data = util.decode_gzip(util.encode_gzip(b"foo")) assert data == b"foo" def test_decode_gzip_error(self): with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:1]) with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:5]) Update gzip output for Python 3.8.2 Python 3.8.2 includes a fix for https://bugs.python.org/issue39389. Previously, the extra flags byte was always set to 0x02, claiming maximum compression. It now reflects the actual compression setting. Since our default is neither fastest or maximum, the expected test output is 0x00 for the extra flags byte.
from datetime import datetime import pytest from pytz import UTC from ichnaea.exceptions import GZIPDecodeError from ichnaea import util class TestUtil(object): gzip_foo = ( b"\x1f\x8b\x08\x00\xed\x7f\x9aU\x00\xffK" b"\xcb\xcf\x07\x00!es\x8c\x03\x00\x00\x00" ) def test_utcnow(self): now = util.utcnow() assert isinstance(now, datetime) assert now.tzinfo == UTC def test_encode_gzip(self): data = util.encode_gzip(b"foo") # Test around the 4-byte timestamp assert data[:4] == self.gzip_foo[:4] assert data[8:] == self.gzip_foo[8:] def test_decode_gzip(self): data = util.decode_gzip(self.gzip_foo) assert data == b"foo" def test_roundtrip_gzip(self): data = util.decode_gzip(util.encode_gzip(b"foo")) assert data == b"foo" def test_decode_gzip_error(self): with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:1]) with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:5])
<commit_before>from datetime import datetime import pytest from pytz import UTC from ichnaea.exceptions import GZIPDecodeError from ichnaea import util class TestUtil(object): gzip_foo = ( b"\x1f\x8b\x08\x00\xed\x7f\x9aU\x02\xffK" b"\xcb\xcf\x07\x00!es\x8c\x03\x00\x00\x00" ) def test_utcnow(self): now = util.utcnow() assert isinstance(now, datetime) assert now.tzinfo == UTC def test_encode_gzip(self): data = util.encode_gzip(b"foo") # Test around the 4-byte timestamp assert data[:4] == self.gzip_foo[:4] assert data[8:] == self.gzip_foo[8:] def test_decode_gzip(self): data = util.decode_gzip(self.gzip_foo) assert data == b"foo" def test_roundtrip_gzip(self): data = util.decode_gzip(util.encode_gzip(b"foo")) assert data == b"foo" def test_decode_gzip_error(self): with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:1]) with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:5]) <commit_msg>Update gzip output for Python 3.8.2 Python 3.8.2 includes a fix for https://bugs.python.org/issue39389. Previously, the extra flags byte was always set to 0x02, claiming maximum compression. It now reflects the actual compression setting. Since our default is neither fastest or maximum, the expected test output is 0x00 for the extra flags byte.<commit_after>
from datetime import datetime import pytest from pytz import UTC from ichnaea.exceptions import GZIPDecodeError from ichnaea import util class TestUtil(object): gzip_foo = ( b"\x1f\x8b\x08\x00\xed\x7f\x9aU\x00\xffK" b"\xcb\xcf\x07\x00!es\x8c\x03\x00\x00\x00" ) def test_utcnow(self): now = util.utcnow() assert isinstance(now, datetime) assert now.tzinfo == UTC def test_encode_gzip(self): data = util.encode_gzip(b"foo") # Test around the 4-byte timestamp assert data[:4] == self.gzip_foo[:4] assert data[8:] == self.gzip_foo[8:] def test_decode_gzip(self): data = util.decode_gzip(self.gzip_foo) assert data == b"foo" def test_roundtrip_gzip(self): data = util.decode_gzip(util.encode_gzip(b"foo")) assert data == b"foo" def test_decode_gzip_error(self): with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:1]) with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:5])
from datetime import datetime import pytest from pytz import UTC from ichnaea.exceptions import GZIPDecodeError from ichnaea import util class TestUtil(object): gzip_foo = ( b"\x1f\x8b\x08\x00\xed\x7f\x9aU\x02\xffK" b"\xcb\xcf\x07\x00!es\x8c\x03\x00\x00\x00" ) def test_utcnow(self): now = util.utcnow() assert isinstance(now, datetime) assert now.tzinfo == UTC def test_encode_gzip(self): data = util.encode_gzip(b"foo") # Test around the 4-byte timestamp assert data[:4] == self.gzip_foo[:4] assert data[8:] == self.gzip_foo[8:] def test_decode_gzip(self): data = util.decode_gzip(self.gzip_foo) assert data == b"foo" def test_roundtrip_gzip(self): data = util.decode_gzip(util.encode_gzip(b"foo")) assert data == b"foo" def test_decode_gzip_error(self): with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:1]) with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:5]) Update gzip output for Python 3.8.2 Python 3.8.2 includes a fix for https://bugs.python.org/issue39389. Previously, the extra flags byte was always set to 0x02, claiming maximum compression. It now reflects the actual compression setting. Since our default is neither fastest or maximum, the expected test output is 0x00 for the extra flags byte.from datetime import datetime import pytest from pytz import UTC from ichnaea.exceptions import GZIPDecodeError from ichnaea import util class TestUtil(object): gzip_foo = ( b"\x1f\x8b\x08\x00\xed\x7f\x9aU\x00\xffK" b"\xcb\xcf\x07\x00!es\x8c\x03\x00\x00\x00" ) def test_utcnow(self): now = util.utcnow() assert isinstance(now, datetime) assert now.tzinfo == UTC def test_encode_gzip(self): data = util.encode_gzip(b"foo") # Test around the 4-byte timestamp assert data[:4] == self.gzip_foo[:4] assert data[8:] == self.gzip_foo[8:] def test_decode_gzip(self): data = util.decode_gzip(self.gzip_foo) assert data == b"foo" def test_roundtrip_gzip(self): data = util.decode_gzip(util.encode_gzip(b"foo")) assert data == b"foo" def test_decode_gzip_error(self): with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:1]) with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:5])
<commit_before>from datetime import datetime import pytest from pytz import UTC from ichnaea.exceptions import GZIPDecodeError from ichnaea import util class TestUtil(object): gzip_foo = ( b"\x1f\x8b\x08\x00\xed\x7f\x9aU\x02\xffK" b"\xcb\xcf\x07\x00!es\x8c\x03\x00\x00\x00" ) def test_utcnow(self): now = util.utcnow() assert isinstance(now, datetime) assert now.tzinfo == UTC def test_encode_gzip(self): data = util.encode_gzip(b"foo") # Test around the 4-byte timestamp assert data[:4] == self.gzip_foo[:4] assert data[8:] == self.gzip_foo[8:] def test_decode_gzip(self): data = util.decode_gzip(self.gzip_foo) assert data == b"foo" def test_roundtrip_gzip(self): data = util.decode_gzip(util.encode_gzip(b"foo")) assert data == b"foo" def test_decode_gzip_error(self): with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:1]) with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:5]) <commit_msg>Update gzip output for Python 3.8.2 Python 3.8.2 includes a fix for https://bugs.python.org/issue39389. Previously, the extra flags byte was always set to 0x02, claiming maximum compression. It now reflects the actual compression setting. Since our default is neither fastest or maximum, the expected test output is 0x00 for the extra flags byte.<commit_after>from datetime import datetime import pytest from pytz import UTC from ichnaea.exceptions import GZIPDecodeError from ichnaea import util class TestUtil(object): gzip_foo = ( b"\x1f\x8b\x08\x00\xed\x7f\x9aU\x00\xffK" b"\xcb\xcf\x07\x00!es\x8c\x03\x00\x00\x00" ) def test_utcnow(self): now = util.utcnow() assert isinstance(now, datetime) assert now.tzinfo == UTC def test_encode_gzip(self): data = util.encode_gzip(b"foo") # Test around the 4-byte timestamp assert data[:4] == self.gzip_foo[:4] assert data[8:] == self.gzip_foo[8:] def test_decode_gzip(self): data = util.decode_gzip(self.gzip_foo) assert data == b"foo" def test_roundtrip_gzip(self): data = util.decode_gzip(util.encode_gzip(b"foo")) assert data == b"foo" def test_decode_gzip_error(self): with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:1]) with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:5])
2fff4600d701d6f5ac9675d96916ca74cf3cfdbd
riker/worker/apps.py
riker/worker/apps.py
from __future__ import unicode_literals from django.apps import AppConfig from worker.utils import LircListener class WorkerConfig(AppConfig): name = 'worker' def ready(self): lirc_name = getattr(settings, 'RIKER_LIRC_LISTENER_NAME', 'riker') LircListener(lirc_name=lirc_name).start()
from __future__ import unicode_literals from django.apps import AppConfig from worker.utils import LircListener class WorkerConfig(AppConfig): name = 'worker'
Remove prematurely inserted 'ready' method
Remove prematurely inserted 'ready' method
Python
mit
haikuginger/riker
from __future__ import unicode_literals from django.apps import AppConfig from worker.utils import LircListener class WorkerConfig(AppConfig): name = 'worker' def ready(self): lirc_name = getattr(settings, 'RIKER_LIRC_LISTENER_NAME', 'riker') LircListener(lirc_name=lirc_name).start() Remove prematurely inserted 'ready' method
from __future__ import unicode_literals from django.apps import AppConfig from worker.utils import LircListener class WorkerConfig(AppConfig): name = 'worker'
<commit_before>from __future__ import unicode_literals from django.apps import AppConfig from worker.utils import LircListener class WorkerConfig(AppConfig): name = 'worker' def ready(self): lirc_name = getattr(settings, 'RIKER_LIRC_LISTENER_NAME', 'riker') LircListener(lirc_name=lirc_name).start() <commit_msg>Remove prematurely inserted 'ready' method<commit_after>
from __future__ import unicode_literals from django.apps import AppConfig from worker.utils import LircListener class WorkerConfig(AppConfig): name = 'worker'
from __future__ import unicode_literals from django.apps import AppConfig from worker.utils import LircListener class WorkerConfig(AppConfig): name = 'worker' def ready(self): lirc_name = getattr(settings, 'RIKER_LIRC_LISTENER_NAME', 'riker') LircListener(lirc_name=lirc_name).start() Remove prematurely inserted 'ready' methodfrom __future__ import unicode_literals from django.apps import AppConfig from worker.utils import LircListener class WorkerConfig(AppConfig): name = 'worker'
<commit_before>from __future__ import unicode_literals from django.apps import AppConfig from worker.utils import LircListener class WorkerConfig(AppConfig): name = 'worker' def ready(self): lirc_name = getattr(settings, 'RIKER_LIRC_LISTENER_NAME', 'riker') LircListener(lirc_name=lirc_name).start() <commit_msg>Remove prematurely inserted 'ready' method<commit_after>from __future__ import unicode_literals from django.apps import AppConfig from worker.utils import LircListener class WorkerConfig(AppConfig): name = 'worker'
93149023bb28319d05213a122c7f4e59a8589e38
pirx/base.py
pirx/base.py
import collections class Settings(object): def __init__(self): self._settings = collections.OrderedDict() def __setattr__(self, name, value): if name.startswith('_'): super(Settings, self).__setattr__(name, value) else: self._settings[name] = value def __str__(self): lines = [] for name, value in self._settings.iteritems(): if name.startswith('_'): lines.append(value) else: lines.append('%s = %s' % (name.upper(), value.__repr__())) return '\n'.join(lines) def _set_raw_value(self, value): self._settings['_%d' % len(self._settings)] = value def imp(self, module_name): value = 'import %s' % module_name self._set_raw_value(value)
import collections import datetime class Settings(object): docstring = 'Settings built with Pirx on %(datetime)s' def __init__(self): self._settings = collections.OrderedDict() docstring = self.docstring % { 'datetime': datetime.datetime.now() } self._set_raw_value('"""%s"""' % docstring) def __setattr__(self, name, value): if name.startswith('_'): super(Settings, self).__setattr__(name, value) else: self._settings[name] = value def __str__(self): lines = [] for name, value in self._settings.iteritems(): if name.startswith('_'): lines.append(value) else: lines.append('%s = %s' % (name.upper(), value.__repr__())) return '\n'.join(lines) def _set_raw_value(self, value): self._settings['_%d' % len(self._settings)] = value def imp(self, module_name): value = 'import %s' % module_name self._set_raw_value(value)
Insert customizable docstring at the beginning of settings file
Insert customizable docstring at the beginning of settings file
Python
mit
piotrekw/pirx
import collections class Settings(object): def __init__(self): self._settings = collections.OrderedDict() def __setattr__(self, name, value): if name.startswith('_'): super(Settings, self).__setattr__(name, value) else: self._settings[name] = value def __str__(self): lines = [] for name, value in self._settings.iteritems(): if name.startswith('_'): lines.append(value) else: lines.append('%s = %s' % (name.upper(), value.__repr__())) return '\n'.join(lines) def _set_raw_value(self, value): self._settings['_%d' % len(self._settings)] = value def imp(self, module_name): value = 'import %s' % module_name self._set_raw_value(value) Insert customizable docstring at the beginning of settings file
import collections import datetime class Settings(object): docstring = 'Settings built with Pirx on %(datetime)s' def __init__(self): self._settings = collections.OrderedDict() docstring = self.docstring % { 'datetime': datetime.datetime.now() } self._set_raw_value('"""%s"""' % docstring) def __setattr__(self, name, value): if name.startswith('_'): super(Settings, self).__setattr__(name, value) else: self._settings[name] = value def __str__(self): lines = [] for name, value in self._settings.iteritems(): if name.startswith('_'): lines.append(value) else: lines.append('%s = %s' % (name.upper(), value.__repr__())) return '\n'.join(lines) def _set_raw_value(self, value): self._settings['_%d' % len(self._settings)] = value def imp(self, module_name): value = 'import %s' % module_name self._set_raw_value(value)
<commit_before>import collections class Settings(object): def __init__(self): self._settings = collections.OrderedDict() def __setattr__(self, name, value): if name.startswith('_'): super(Settings, self).__setattr__(name, value) else: self._settings[name] = value def __str__(self): lines = [] for name, value in self._settings.iteritems(): if name.startswith('_'): lines.append(value) else: lines.append('%s = %s' % (name.upper(), value.__repr__())) return '\n'.join(lines) def _set_raw_value(self, value): self._settings['_%d' % len(self._settings)] = value def imp(self, module_name): value = 'import %s' % module_name self._set_raw_value(value) <commit_msg>Insert customizable docstring at the beginning of settings file<commit_after>
import collections import datetime class Settings(object): docstring = 'Settings built with Pirx on %(datetime)s' def __init__(self): self._settings = collections.OrderedDict() docstring = self.docstring % { 'datetime': datetime.datetime.now() } self._set_raw_value('"""%s"""' % docstring) def __setattr__(self, name, value): if name.startswith('_'): super(Settings, self).__setattr__(name, value) else: self._settings[name] = value def __str__(self): lines = [] for name, value in self._settings.iteritems(): if name.startswith('_'): lines.append(value) else: lines.append('%s = %s' % (name.upper(), value.__repr__())) return '\n'.join(lines) def _set_raw_value(self, value): self._settings['_%d' % len(self._settings)] = value def imp(self, module_name): value = 'import %s' % module_name self._set_raw_value(value)
import collections class Settings(object): def __init__(self): self._settings = collections.OrderedDict() def __setattr__(self, name, value): if name.startswith('_'): super(Settings, self).__setattr__(name, value) else: self._settings[name] = value def __str__(self): lines = [] for name, value in self._settings.iteritems(): if name.startswith('_'): lines.append(value) else: lines.append('%s = %s' % (name.upper(), value.__repr__())) return '\n'.join(lines) def _set_raw_value(self, value): self._settings['_%d' % len(self._settings)] = value def imp(self, module_name): value = 'import %s' % module_name self._set_raw_value(value) Insert customizable docstring at the beginning of settings fileimport collections import datetime class Settings(object): docstring = 'Settings built with Pirx on %(datetime)s' def __init__(self): self._settings = collections.OrderedDict() docstring = self.docstring % { 'datetime': datetime.datetime.now() } self._set_raw_value('"""%s"""' % docstring) def __setattr__(self, name, value): if name.startswith('_'): super(Settings, self).__setattr__(name, value) else: self._settings[name] = value def __str__(self): lines = [] for name, value in self._settings.iteritems(): if name.startswith('_'): lines.append(value) else: lines.append('%s = %s' % (name.upper(), value.__repr__())) return '\n'.join(lines) def _set_raw_value(self, value): self._settings['_%d' % len(self._settings)] = value def imp(self, module_name): value = 'import %s' % module_name self._set_raw_value(value)
<commit_before>import collections class Settings(object): def __init__(self): self._settings = collections.OrderedDict() def __setattr__(self, name, value): if name.startswith('_'): super(Settings, self).__setattr__(name, value) else: self._settings[name] = value def __str__(self): lines = [] for name, value in self._settings.iteritems(): if name.startswith('_'): lines.append(value) else: lines.append('%s = %s' % (name.upper(), value.__repr__())) return '\n'.join(lines) def _set_raw_value(self, value): self._settings['_%d' % len(self._settings)] = value def imp(self, module_name): value = 'import %s' % module_name self._set_raw_value(value) <commit_msg>Insert customizable docstring at the beginning of settings file<commit_after>import collections import datetime class Settings(object): docstring = 'Settings built with Pirx on %(datetime)s' def __init__(self): self._settings = collections.OrderedDict() docstring = self.docstring % { 'datetime': datetime.datetime.now() } self._set_raw_value('"""%s"""' % docstring) def __setattr__(self, name, value): if name.startswith('_'): super(Settings, self).__setattr__(name, value) else: self._settings[name] = value def __str__(self): lines = [] for name, value in self._settings.iteritems(): if name.startswith('_'): lines.append(value) else: lines.append('%s = %s' % (name.upper(), value.__repr__())) return '\n'.join(lines) def _set_raw_value(self, value): self._settings['_%d' % len(self._settings)] = value def imp(self, module_name): value = 'import %s' % module_name self._set_raw_value(value)
749219a1282a347133ba73127ed7cc8d8009897d
anchore_engine/clients/syft_wrapper.py
anchore_engine/clients/syft_wrapper.py
import json import os import shlex from anchore_engine.utils import run_check def run_syft(image): proc_env = os.environ.copy() syft_env = { "SYFT_CHECK_FOR_APP_UPDATE": "0", "SYFT_LOG_STRUCTURED": "1", } proc_env.update(syft_env) cmd = "syft -vv -o json oci-dir:{image}".format(image=image) stdout, _ = run_check(shlex.split(cmd), env=proc_env) return json.loads(stdout)
import json import os import shlex from anchore_engine.utils import run_check def run_syft(image): proc_env = os.environ.copy() syft_env = { "SYFT_CHECK_FOR_APP_UPDATE": "0", "SYFT_LOG_STRUCTURED": "1", } proc_env.update(syft_env) cmd = "syft -vv -o json oci-dir:{image}".format(image=image) stdout, _ = run_check(shlex.split(cmd), env=proc_env, log_level="spew") return json.loads(stdout)
Make the syft invocation only log the full output at spew level instead of debug.
Make the syft invocation only log the full output at spew level instead of debug. The syft json output for very large images can be 100s of MB and cause the analyzer to be unusable due to the logging itself. This changes that call to only dump output at "spew" level logging. Signed-off-by: Zach Hill <9de8c4480303b5335cd2a33eefe814615ba3612a@anchore.com>
Python
apache-2.0
anchore/anchore-engine,anchore/anchore-engine,anchore/anchore-engine
import json import os import shlex from anchore_engine.utils import run_check def run_syft(image): proc_env = os.environ.copy() syft_env = { "SYFT_CHECK_FOR_APP_UPDATE": "0", "SYFT_LOG_STRUCTURED": "1", } proc_env.update(syft_env) cmd = "syft -vv -o json oci-dir:{image}".format(image=image) stdout, _ = run_check(shlex.split(cmd), env=proc_env) return json.loads(stdout) Make the syft invocation only log the full output at spew level instead of debug. The syft json output for very large images can be 100s of MB and cause the analyzer to be unusable due to the logging itself. This changes that call to only dump output at "spew" level logging. Signed-off-by: Zach Hill <9de8c4480303b5335cd2a33eefe814615ba3612a@anchore.com>
import json import os import shlex from anchore_engine.utils import run_check def run_syft(image): proc_env = os.environ.copy() syft_env = { "SYFT_CHECK_FOR_APP_UPDATE": "0", "SYFT_LOG_STRUCTURED": "1", } proc_env.update(syft_env) cmd = "syft -vv -o json oci-dir:{image}".format(image=image) stdout, _ = run_check(shlex.split(cmd), env=proc_env, log_level="spew") return json.loads(stdout)
<commit_before>import json import os import shlex from anchore_engine.utils import run_check def run_syft(image): proc_env = os.environ.copy() syft_env = { "SYFT_CHECK_FOR_APP_UPDATE": "0", "SYFT_LOG_STRUCTURED": "1", } proc_env.update(syft_env) cmd = "syft -vv -o json oci-dir:{image}".format(image=image) stdout, _ = run_check(shlex.split(cmd), env=proc_env) return json.loads(stdout) <commit_msg>Make the syft invocation only log the full output at spew level instead of debug. The syft json output for very large images can be 100s of MB and cause the analyzer to be unusable due to the logging itself. This changes that call to only dump output at "spew" level logging. Signed-off-by: Zach Hill <9de8c4480303b5335cd2a33eefe814615ba3612a@anchore.com><commit_after>
import json import os import shlex from anchore_engine.utils import run_check def run_syft(image): proc_env = os.environ.copy() syft_env = { "SYFT_CHECK_FOR_APP_UPDATE": "0", "SYFT_LOG_STRUCTURED": "1", } proc_env.update(syft_env) cmd = "syft -vv -o json oci-dir:{image}".format(image=image) stdout, _ = run_check(shlex.split(cmd), env=proc_env, log_level="spew") return json.loads(stdout)
import json import os import shlex from anchore_engine.utils import run_check def run_syft(image): proc_env = os.environ.copy() syft_env = { "SYFT_CHECK_FOR_APP_UPDATE": "0", "SYFT_LOG_STRUCTURED": "1", } proc_env.update(syft_env) cmd = "syft -vv -o json oci-dir:{image}".format(image=image) stdout, _ = run_check(shlex.split(cmd), env=proc_env) return json.loads(stdout) Make the syft invocation only log the full output at spew level instead of debug. The syft json output for very large images can be 100s of MB and cause the analyzer to be unusable due to the logging itself. This changes that call to only dump output at "spew" level logging. Signed-off-by: Zach Hill <9de8c4480303b5335cd2a33eefe814615ba3612a@anchore.com>import json import os import shlex from anchore_engine.utils import run_check def run_syft(image): proc_env = os.environ.copy() syft_env = { "SYFT_CHECK_FOR_APP_UPDATE": "0", "SYFT_LOG_STRUCTURED": "1", } proc_env.update(syft_env) cmd = "syft -vv -o json oci-dir:{image}".format(image=image) stdout, _ = run_check(shlex.split(cmd), env=proc_env, log_level="spew") return json.loads(stdout)
<commit_before>import json import os import shlex from anchore_engine.utils import run_check def run_syft(image): proc_env = os.environ.copy() syft_env = { "SYFT_CHECK_FOR_APP_UPDATE": "0", "SYFT_LOG_STRUCTURED": "1", } proc_env.update(syft_env) cmd = "syft -vv -o json oci-dir:{image}".format(image=image) stdout, _ = run_check(shlex.split(cmd), env=proc_env) return json.loads(stdout) <commit_msg>Make the syft invocation only log the full output at spew level instead of debug. The syft json output for very large images can be 100s of MB and cause the analyzer to be unusable due to the logging itself. This changes that call to only dump output at "spew" level logging. Signed-off-by: Zach Hill <9de8c4480303b5335cd2a33eefe814615ba3612a@anchore.com><commit_after>import json import os import shlex from anchore_engine.utils import run_check def run_syft(image): proc_env = os.environ.copy() syft_env = { "SYFT_CHECK_FOR_APP_UPDATE": "0", "SYFT_LOG_STRUCTURED": "1", } proc_env.update(syft_env) cmd = "syft -vv -o json oci-dir:{image}".format(image=image) stdout, _ = run_check(shlex.split(cmd), env=proc_env, log_level="spew") return json.loads(stdout)
a800bacf217ef903fd266e1fbf8103365ab64c94
source/segue/frontend/exporter.py
source/segue/frontend/exporter.py
# :coding: utf-8 # :copyright: Copyright (c) 2013 Martin Pengelly-Phillips # :license: See LICENSE.txt. from PySide import QtGui from .selector import SelectorWidget from .options import OptionsWidget class ExporterWidget(QtGui.QWidget): '''Manage exporting.''' def __init__(self, host, parent=None): '''Initialise with *host* application and *parent*.''' super(ExporterWidget, self).__init__(parent=parent) self.host = host self.build() self.post_build() def build(self): '''Build and layout the interface.''' self.setLayout(QtGui.QVBoxLayout()) self.selector_widget = SelectorWidget(host=self.host) self.selector_widget.setFrameStyle( QtGui.QFrame.StyledPanel ) self.layout().addWidget(self.selector_widget) self.options_widget = OptionsWidget(host=self.host) self.options_widget.setFrameStyle( QtGui.QFrame.StyledPanel ) self.layout().addWidget(self.options_widget) self.export_button = QtGui.QPushButton('Export') self.layout().addWidget(self.export_button) def post_build(self): '''Perform post-build operations.''' self.setWindowTitle('Segue Exporter')
# :coding: utf-8 # :copyright: Copyright (c) 2013 Martin Pengelly-Phillips # :license: See LICENSE.txt. from PySide import QtGui from .selector import SelectorWidget from .options import OptionsWidget class ExporterWidget(QtGui.QWidget): '''Manage exporting.''' def __init__(self, host, parent=None): '''Initialise with *host* application and *parent*.''' super(ExporterWidget, self).__init__(parent=parent) self.host = host self.build() self.post_build() def build(self): '''Build and layout the interface.''' self.setLayout(QtGui.QVBoxLayout()) self.selector_widget = SelectorWidget(host=self.host) self.selector_widget.setFrameStyle( QtGui.QFrame.StyledPanel ) self.layout().addWidget(self.selector_widget) self.options_widget = OptionsWidget(host=self.host) self.options_widget.setFrameStyle( QtGui.QFrame.StyledPanel ) self.layout().addWidget(self.options_widget) self.export_button = QtGui.QPushButton('Export') self.layout().addWidget(self.export_button) def post_build(self): '''Perform post-build operations.''' self.setWindowTitle('Segue Exporter') self.selector_widget.added.connect(self.on_selection_changed) self.selector_widget.removed.connect(self.on_selection_changed) self.validate() def on_selection_changed(self, items): '''Handle selection change.''' self.validate() def validate(self): '''Validate options and update UI state.''' self.export_button.setEnabled(False) if not self.selector_widget.items(): return self.export_button.setEnabled(True)
Add basic validation of ui state.
Add basic validation of ui state.
Python
apache-2.0
4degrees/segue
# :coding: utf-8 # :copyright: Copyright (c) 2013 Martin Pengelly-Phillips # :license: See LICENSE.txt. from PySide import QtGui from .selector import SelectorWidget from .options import OptionsWidget class ExporterWidget(QtGui.QWidget): '''Manage exporting.''' def __init__(self, host, parent=None): '''Initialise with *host* application and *parent*.''' super(ExporterWidget, self).__init__(parent=parent) self.host = host self.build() self.post_build() def build(self): '''Build and layout the interface.''' self.setLayout(QtGui.QVBoxLayout()) self.selector_widget = SelectorWidget(host=self.host) self.selector_widget.setFrameStyle( QtGui.QFrame.StyledPanel ) self.layout().addWidget(self.selector_widget) self.options_widget = OptionsWidget(host=self.host) self.options_widget.setFrameStyle( QtGui.QFrame.StyledPanel ) self.layout().addWidget(self.options_widget) self.export_button = QtGui.QPushButton('Export') self.layout().addWidget(self.export_button) def post_build(self): '''Perform post-build operations.''' self.setWindowTitle('Segue Exporter') Add basic validation of ui state.
# :coding: utf-8 # :copyright: Copyright (c) 2013 Martin Pengelly-Phillips # :license: See LICENSE.txt. from PySide import QtGui from .selector import SelectorWidget from .options import OptionsWidget class ExporterWidget(QtGui.QWidget): '''Manage exporting.''' def __init__(self, host, parent=None): '''Initialise with *host* application and *parent*.''' super(ExporterWidget, self).__init__(parent=parent) self.host = host self.build() self.post_build() def build(self): '''Build and layout the interface.''' self.setLayout(QtGui.QVBoxLayout()) self.selector_widget = SelectorWidget(host=self.host) self.selector_widget.setFrameStyle( QtGui.QFrame.StyledPanel ) self.layout().addWidget(self.selector_widget) self.options_widget = OptionsWidget(host=self.host) self.options_widget.setFrameStyle( QtGui.QFrame.StyledPanel ) self.layout().addWidget(self.options_widget) self.export_button = QtGui.QPushButton('Export') self.layout().addWidget(self.export_button) def post_build(self): '''Perform post-build operations.''' self.setWindowTitle('Segue Exporter') self.selector_widget.added.connect(self.on_selection_changed) self.selector_widget.removed.connect(self.on_selection_changed) self.validate() def on_selection_changed(self, items): '''Handle selection change.''' self.validate() def validate(self): '''Validate options and update UI state.''' self.export_button.setEnabled(False) if not self.selector_widget.items(): return self.export_button.setEnabled(True)
<commit_before># :coding: utf-8 # :copyright: Copyright (c) 2013 Martin Pengelly-Phillips # :license: See LICENSE.txt. from PySide import QtGui from .selector import SelectorWidget from .options import OptionsWidget class ExporterWidget(QtGui.QWidget): '''Manage exporting.''' def __init__(self, host, parent=None): '''Initialise with *host* application and *parent*.''' super(ExporterWidget, self).__init__(parent=parent) self.host = host self.build() self.post_build() def build(self): '''Build and layout the interface.''' self.setLayout(QtGui.QVBoxLayout()) self.selector_widget = SelectorWidget(host=self.host) self.selector_widget.setFrameStyle( QtGui.QFrame.StyledPanel ) self.layout().addWidget(self.selector_widget) self.options_widget = OptionsWidget(host=self.host) self.options_widget.setFrameStyle( QtGui.QFrame.StyledPanel ) self.layout().addWidget(self.options_widget) self.export_button = QtGui.QPushButton('Export') self.layout().addWidget(self.export_button) def post_build(self): '''Perform post-build operations.''' self.setWindowTitle('Segue Exporter') <commit_msg>Add basic validation of ui state.<commit_after>
# :coding: utf-8 # :copyright: Copyright (c) 2013 Martin Pengelly-Phillips # :license: See LICENSE.txt. from PySide import QtGui from .selector import SelectorWidget from .options import OptionsWidget class ExporterWidget(QtGui.QWidget): '''Manage exporting.''' def __init__(self, host, parent=None): '''Initialise with *host* application and *parent*.''' super(ExporterWidget, self).__init__(parent=parent) self.host = host self.build() self.post_build() def build(self): '''Build and layout the interface.''' self.setLayout(QtGui.QVBoxLayout()) self.selector_widget = SelectorWidget(host=self.host) self.selector_widget.setFrameStyle( QtGui.QFrame.StyledPanel ) self.layout().addWidget(self.selector_widget) self.options_widget = OptionsWidget(host=self.host) self.options_widget.setFrameStyle( QtGui.QFrame.StyledPanel ) self.layout().addWidget(self.options_widget) self.export_button = QtGui.QPushButton('Export') self.layout().addWidget(self.export_button) def post_build(self): '''Perform post-build operations.''' self.setWindowTitle('Segue Exporter') self.selector_widget.added.connect(self.on_selection_changed) self.selector_widget.removed.connect(self.on_selection_changed) self.validate() def on_selection_changed(self, items): '''Handle selection change.''' self.validate() def validate(self): '''Validate options and update UI state.''' self.export_button.setEnabled(False) if not self.selector_widget.items(): return self.export_button.setEnabled(True)
# :coding: utf-8 # :copyright: Copyright (c) 2013 Martin Pengelly-Phillips # :license: See LICENSE.txt. from PySide import QtGui from .selector import SelectorWidget from .options import OptionsWidget class ExporterWidget(QtGui.QWidget): '''Manage exporting.''' def __init__(self, host, parent=None): '''Initialise with *host* application and *parent*.''' super(ExporterWidget, self).__init__(parent=parent) self.host = host self.build() self.post_build() def build(self): '''Build and layout the interface.''' self.setLayout(QtGui.QVBoxLayout()) self.selector_widget = SelectorWidget(host=self.host) self.selector_widget.setFrameStyle( QtGui.QFrame.StyledPanel ) self.layout().addWidget(self.selector_widget) self.options_widget = OptionsWidget(host=self.host) self.options_widget.setFrameStyle( QtGui.QFrame.StyledPanel ) self.layout().addWidget(self.options_widget) self.export_button = QtGui.QPushButton('Export') self.layout().addWidget(self.export_button) def post_build(self): '''Perform post-build operations.''' self.setWindowTitle('Segue Exporter') Add basic validation of ui state.# :coding: utf-8 # :copyright: Copyright (c) 2013 Martin Pengelly-Phillips # :license: See LICENSE.txt. from PySide import QtGui from .selector import SelectorWidget from .options import OptionsWidget class ExporterWidget(QtGui.QWidget): '''Manage exporting.''' def __init__(self, host, parent=None): '''Initialise with *host* application and *parent*.''' super(ExporterWidget, self).__init__(parent=parent) self.host = host self.build() self.post_build() def build(self): '''Build and layout the interface.''' self.setLayout(QtGui.QVBoxLayout()) self.selector_widget = SelectorWidget(host=self.host) self.selector_widget.setFrameStyle( QtGui.QFrame.StyledPanel ) self.layout().addWidget(self.selector_widget) self.options_widget = OptionsWidget(host=self.host) self.options_widget.setFrameStyle( QtGui.QFrame.StyledPanel ) self.layout().addWidget(self.options_widget) self.export_button = QtGui.QPushButton('Export') self.layout().addWidget(self.export_button) def post_build(self): '''Perform post-build operations.''' self.setWindowTitle('Segue Exporter') self.selector_widget.added.connect(self.on_selection_changed) self.selector_widget.removed.connect(self.on_selection_changed) self.validate() def on_selection_changed(self, items): '''Handle selection change.''' self.validate() def validate(self): '''Validate options and update UI state.''' self.export_button.setEnabled(False) if not self.selector_widget.items(): return self.export_button.setEnabled(True)
<commit_before># :coding: utf-8 # :copyright: Copyright (c) 2013 Martin Pengelly-Phillips # :license: See LICENSE.txt. from PySide import QtGui from .selector import SelectorWidget from .options import OptionsWidget class ExporterWidget(QtGui.QWidget): '''Manage exporting.''' def __init__(self, host, parent=None): '''Initialise with *host* application and *parent*.''' super(ExporterWidget, self).__init__(parent=parent) self.host = host self.build() self.post_build() def build(self): '''Build and layout the interface.''' self.setLayout(QtGui.QVBoxLayout()) self.selector_widget = SelectorWidget(host=self.host) self.selector_widget.setFrameStyle( QtGui.QFrame.StyledPanel ) self.layout().addWidget(self.selector_widget) self.options_widget = OptionsWidget(host=self.host) self.options_widget.setFrameStyle( QtGui.QFrame.StyledPanel ) self.layout().addWidget(self.options_widget) self.export_button = QtGui.QPushButton('Export') self.layout().addWidget(self.export_button) def post_build(self): '''Perform post-build operations.''' self.setWindowTitle('Segue Exporter') <commit_msg>Add basic validation of ui state.<commit_after># :coding: utf-8 # :copyright: Copyright (c) 2013 Martin Pengelly-Phillips # :license: See LICENSE.txt. from PySide import QtGui from .selector import SelectorWidget from .options import OptionsWidget class ExporterWidget(QtGui.QWidget): '''Manage exporting.''' def __init__(self, host, parent=None): '''Initialise with *host* application and *parent*.''' super(ExporterWidget, self).__init__(parent=parent) self.host = host self.build() self.post_build() def build(self): '''Build and layout the interface.''' self.setLayout(QtGui.QVBoxLayout()) self.selector_widget = SelectorWidget(host=self.host) self.selector_widget.setFrameStyle( QtGui.QFrame.StyledPanel ) self.layout().addWidget(self.selector_widget) self.options_widget = OptionsWidget(host=self.host) self.options_widget.setFrameStyle( QtGui.QFrame.StyledPanel ) self.layout().addWidget(self.options_widget) self.export_button = QtGui.QPushButton('Export') self.layout().addWidget(self.export_button) def post_build(self): '''Perform post-build operations.''' self.setWindowTitle('Segue Exporter') self.selector_widget.added.connect(self.on_selection_changed) self.selector_widget.removed.connect(self.on_selection_changed) self.validate() def on_selection_changed(self, items): '''Handle selection change.''' self.validate() def validate(self): '''Validate options and update UI state.''' self.export_button.setEnabled(False) if not self.selector_widget.items(): return self.export_button.setEnabled(True)
461d364b3973be071d3d6ef891fbfa55e95dcfa9
megascops/settings/testing.py
megascops/settings/testing.py
from base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } DEBUG = False
from base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } INSTALLED_APPS += [ 'django_jenkins' ] DEBUG = False
Add django_jenkins to installed apps
Add django_jenkins to installed apps
Python
agpl-3.0
strycore/megascops,strycore/megascops,strycore/megascops,strycore/megascops
from base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } DEBUG = False Add django_jenkins to installed apps
from base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } INSTALLED_APPS += [ 'django_jenkins' ] DEBUG = False
<commit_before>from base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } DEBUG = False <commit_msg>Add django_jenkins to installed apps<commit_after>
from base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } INSTALLED_APPS += [ 'django_jenkins' ] DEBUG = False
from base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } DEBUG = False Add django_jenkins to installed appsfrom base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } INSTALLED_APPS += [ 'django_jenkins' ] DEBUG = False
<commit_before>from base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } DEBUG = False <commit_msg>Add django_jenkins to installed apps<commit_after>from base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } INSTALLED_APPS += [ 'django_jenkins' ] DEBUG = False
73e5fe29074f52e0b769fd2a6c40669040bef330
app/notify_client/invite_api_client.py
app/notify_client/invite_api_client.py
from notifications_python_client.base import BaseAPIClient from app.notify_client.models import User class InviteApiClient(BaseAPIClient): def __init__(self, base_url=None, client_id=None, secret=None): super(self.__class__, self).__init__(base_url=base_url or 'base_url', client_id=client_id or 'client_id', secret=secret or 'secret') def init_app(self, app): self.base_url = app.config['API_HOST_NAME'] self.client_id = app.config['ADMIN_CLIENT_USER_NAME'] self.secret = app.config['ADMIN_CLIENT_SECRET'] def create_invite(self, invite_from_id, service_id, email_address, permissions): data = { 'service': str(service_id), 'email_address': email_address, 'from_user': invite_from_id, 'permissions': permissions } resp = self.post(url='/service/{}/invite'.format(service_id), data=data) return resp['data'] def get_invites_for_service(self, service_id): endpoint = '/service/{}/invite'.format(service_id) resp = self.get(endpoint) return [User(data) for data in resp['data']] def cancel_invited_user(self, service_id, invited_user_id): data = {'status': 'cancelled'} resp = self.post(url='/service/{0}/invite/{1}'.format(service_id, invited_user_id), data=data) return resp['data']
from notifications_python_client.base import BaseAPIClient from app.notify_client.models import User class InviteApiClient(BaseAPIClient): def __init__(self, base_url=None, client_id=None, secret=None): super(self.__class__, self).__init__(base_url=base_url or 'base_url', client_id=client_id or 'client_id', secret=secret or 'secret') def init_app(self, app): self.base_url = app.config['API_HOST_NAME'] self.client_id = app.config['ADMIN_CLIENT_USER_NAME'] self.secret = app.config['ADMIN_CLIENT_SECRET'] def create_invite(self, invite_from_id, service_id, email_address, permissions): data = { 'service': str(service_id), 'email_address': email_address, 'from_user': invite_from_id, 'permissions': permissions } resp = self.post(url='/service/{}/invite'.format(service_id), data=data) return resp['data'] def get_invites_for_service(self, service_id): endpoint = '/service/{}/invite'.format(service_id) resp = self.get(endpoint) return [User(data) for data in resp['data']] def cancel_invited_user(self, service_id, invited_user_id): data = {'status': 'cancelled'} self.post(url='/service/{0}/invite/{1}'.format(service_id, invited_user_id), data=data)
Change cancel_invited_user client to not return anything.
Change cancel_invited_user client to not return anything.
Python
mit
alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin
from notifications_python_client.base import BaseAPIClient from app.notify_client.models import User class InviteApiClient(BaseAPIClient): def __init__(self, base_url=None, client_id=None, secret=None): super(self.__class__, self).__init__(base_url=base_url or 'base_url', client_id=client_id or 'client_id', secret=secret or 'secret') def init_app(self, app): self.base_url = app.config['API_HOST_NAME'] self.client_id = app.config['ADMIN_CLIENT_USER_NAME'] self.secret = app.config['ADMIN_CLIENT_SECRET'] def create_invite(self, invite_from_id, service_id, email_address, permissions): data = { 'service': str(service_id), 'email_address': email_address, 'from_user': invite_from_id, 'permissions': permissions } resp = self.post(url='/service/{}/invite'.format(service_id), data=data) return resp['data'] def get_invites_for_service(self, service_id): endpoint = '/service/{}/invite'.format(service_id) resp = self.get(endpoint) return [User(data) for data in resp['data']] def cancel_invited_user(self, service_id, invited_user_id): data = {'status': 'cancelled'} resp = self.post(url='/service/{0}/invite/{1}'.format(service_id, invited_user_id), data=data) return resp['data'] Change cancel_invited_user client to not return anything.
from notifications_python_client.base import BaseAPIClient from app.notify_client.models import User class InviteApiClient(BaseAPIClient): def __init__(self, base_url=None, client_id=None, secret=None): super(self.__class__, self).__init__(base_url=base_url or 'base_url', client_id=client_id or 'client_id', secret=secret or 'secret') def init_app(self, app): self.base_url = app.config['API_HOST_NAME'] self.client_id = app.config['ADMIN_CLIENT_USER_NAME'] self.secret = app.config['ADMIN_CLIENT_SECRET'] def create_invite(self, invite_from_id, service_id, email_address, permissions): data = { 'service': str(service_id), 'email_address': email_address, 'from_user': invite_from_id, 'permissions': permissions } resp = self.post(url='/service/{}/invite'.format(service_id), data=data) return resp['data'] def get_invites_for_service(self, service_id): endpoint = '/service/{}/invite'.format(service_id) resp = self.get(endpoint) return [User(data) for data in resp['data']] def cancel_invited_user(self, service_id, invited_user_id): data = {'status': 'cancelled'} self.post(url='/service/{0}/invite/{1}'.format(service_id, invited_user_id), data=data)
<commit_before> from notifications_python_client.base import BaseAPIClient from app.notify_client.models import User class InviteApiClient(BaseAPIClient): def __init__(self, base_url=None, client_id=None, secret=None): super(self.__class__, self).__init__(base_url=base_url or 'base_url', client_id=client_id or 'client_id', secret=secret or 'secret') def init_app(self, app): self.base_url = app.config['API_HOST_NAME'] self.client_id = app.config['ADMIN_CLIENT_USER_NAME'] self.secret = app.config['ADMIN_CLIENT_SECRET'] def create_invite(self, invite_from_id, service_id, email_address, permissions): data = { 'service': str(service_id), 'email_address': email_address, 'from_user': invite_from_id, 'permissions': permissions } resp = self.post(url='/service/{}/invite'.format(service_id), data=data) return resp['data'] def get_invites_for_service(self, service_id): endpoint = '/service/{}/invite'.format(service_id) resp = self.get(endpoint) return [User(data) for data in resp['data']] def cancel_invited_user(self, service_id, invited_user_id): data = {'status': 'cancelled'} resp = self.post(url='/service/{0}/invite/{1}'.format(service_id, invited_user_id), data=data) return resp['data'] <commit_msg>Change cancel_invited_user client to not return anything.<commit_after>
from notifications_python_client.base import BaseAPIClient from app.notify_client.models import User class InviteApiClient(BaseAPIClient): def __init__(self, base_url=None, client_id=None, secret=None): super(self.__class__, self).__init__(base_url=base_url or 'base_url', client_id=client_id or 'client_id', secret=secret or 'secret') def init_app(self, app): self.base_url = app.config['API_HOST_NAME'] self.client_id = app.config['ADMIN_CLIENT_USER_NAME'] self.secret = app.config['ADMIN_CLIENT_SECRET'] def create_invite(self, invite_from_id, service_id, email_address, permissions): data = { 'service': str(service_id), 'email_address': email_address, 'from_user': invite_from_id, 'permissions': permissions } resp = self.post(url='/service/{}/invite'.format(service_id), data=data) return resp['data'] def get_invites_for_service(self, service_id): endpoint = '/service/{}/invite'.format(service_id) resp = self.get(endpoint) return [User(data) for data in resp['data']] def cancel_invited_user(self, service_id, invited_user_id): data = {'status': 'cancelled'} self.post(url='/service/{0}/invite/{1}'.format(service_id, invited_user_id), data=data)
from notifications_python_client.base import BaseAPIClient from app.notify_client.models import User class InviteApiClient(BaseAPIClient): def __init__(self, base_url=None, client_id=None, secret=None): super(self.__class__, self).__init__(base_url=base_url or 'base_url', client_id=client_id or 'client_id', secret=secret or 'secret') def init_app(self, app): self.base_url = app.config['API_HOST_NAME'] self.client_id = app.config['ADMIN_CLIENT_USER_NAME'] self.secret = app.config['ADMIN_CLIENT_SECRET'] def create_invite(self, invite_from_id, service_id, email_address, permissions): data = { 'service': str(service_id), 'email_address': email_address, 'from_user': invite_from_id, 'permissions': permissions } resp = self.post(url='/service/{}/invite'.format(service_id), data=data) return resp['data'] def get_invites_for_service(self, service_id): endpoint = '/service/{}/invite'.format(service_id) resp = self.get(endpoint) return [User(data) for data in resp['data']] def cancel_invited_user(self, service_id, invited_user_id): data = {'status': 'cancelled'} resp = self.post(url='/service/{0}/invite/{1}'.format(service_id, invited_user_id), data=data) return resp['data'] Change cancel_invited_user client to not return anything. from notifications_python_client.base import BaseAPIClient from app.notify_client.models import User class InviteApiClient(BaseAPIClient): def __init__(self, base_url=None, client_id=None, secret=None): super(self.__class__, self).__init__(base_url=base_url or 'base_url', client_id=client_id or 'client_id', secret=secret or 'secret') def init_app(self, app): self.base_url = app.config['API_HOST_NAME'] self.client_id = app.config['ADMIN_CLIENT_USER_NAME'] self.secret = app.config['ADMIN_CLIENT_SECRET'] def create_invite(self, invite_from_id, service_id, email_address, permissions): data = { 'service': str(service_id), 'email_address': email_address, 'from_user': invite_from_id, 'permissions': permissions } resp = self.post(url='/service/{}/invite'.format(service_id), data=data) return resp['data'] def get_invites_for_service(self, service_id): endpoint = '/service/{}/invite'.format(service_id) resp = self.get(endpoint) return [User(data) for data in resp['data']] def cancel_invited_user(self, service_id, invited_user_id): data = {'status': 'cancelled'} self.post(url='/service/{0}/invite/{1}'.format(service_id, invited_user_id), data=data)
<commit_before> from notifications_python_client.base import BaseAPIClient from app.notify_client.models import User class InviteApiClient(BaseAPIClient): def __init__(self, base_url=None, client_id=None, secret=None): super(self.__class__, self).__init__(base_url=base_url or 'base_url', client_id=client_id or 'client_id', secret=secret or 'secret') def init_app(self, app): self.base_url = app.config['API_HOST_NAME'] self.client_id = app.config['ADMIN_CLIENT_USER_NAME'] self.secret = app.config['ADMIN_CLIENT_SECRET'] def create_invite(self, invite_from_id, service_id, email_address, permissions): data = { 'service': str(service_id), 'email_address': email_address, 'from_user': invite_from_id, 'permissions': permissions } resp = self.post(url='/service/{}/invite'.format(service_id), data=data) return resp['data'] def get_invites_for_service(self, service_id): endpoint = '/service/{}/invite'.format(service_id) resp = self.get(endpoint) return [User(data) for data in resp['data']] def cancel_invited_user(self, service_id, invited_user_id): data = {'status': 'cancelled'} resp = self.post(url='/service/{0}/invite/{1}'.format(service_id, invited_user_id), data=data) return resp['data'] <commit_msg>Change cancel_invited_user client to not return anything.<commit_after> from notifications_python_client.base import BaseAPIClient from app.notify_client.models import User class InviteApiClient(BaseAPIClient): def __init__(self, base_url=None, client_id=None, secret=None): super(self.__class__, self).__init__(base_url=base_url or 'base_url', client_id=client_id or 'client_id', secret=secret or 'secret') def init_app(self, app): self.base_url = app.config['API_HOST_NAME'] self.client_id = app.config['ADMIN_CLIENT_USER_NAME'] self.secret = app.config['ADMIN_CLIENT_SECRET'] def create_invite(self, invite_from_id, service_id, email_address, permissions): data = { 'service': str(service_id), 'email_address': email_address, 'from_user': invite_from_id, 'permissions': permissions } resp = self.post(url='/service/{}/invite'.format(service_id), data=data) return resp['data'] def get_invites_for_service(self, service_id): endpoint = '/service/{}/invite'.format(service_id) resp = self.get(endpoint) return [User(data) for data in resp['data']] def cancel_invited_user(self, service_id, invited_user_id): data = {'status': 'cancelled'} self.post(url='/service/{0}/invite/{1}'.format(service_id, invited_user_id), data=data)
6fc304f21d762d692188057fcded195fa6c5675a
test/test_amqp_message.py
test/test_amqp_message.py
import unittest from amqp import Message from mock import patch from sir.amqp.message import (InvalidMessageContentException, Message as PMessage, MESSAGE_TYPES) class AmqpMessageTest(unittest.TestCase): @staticmethod def _parsed_message(body="artist 123 456", channel="search.index"): msg = Message(body=body) parsed_message = PMessage.from_amqp_message(channel, msg) return parsed_message def test_message_parses(self): parsed_message = self._parsed_message() self.assertEqual(parsed_message.entity_type, "artist") self.assertEqual(parsed_message.message_type, MESSAGE_TYPES.index) def test_invalid_channel_raises(self): self.assertRaises(ValueError, self._parsed_message, channel="foo.bar") @patch("sir.amqp.message.SCHEMA", new={'entity': None}) def test_invalid_entity_raises(self): self.assertRaises(ValueError, self._parsed_message) def test_message_too_short_raises(self): self.assertRaises(InvalidMessageContentException, self._parsed_message, body="foo") def test_non_delete_converts_to_int(self): for c in ("search.index", "search.update"): parsed_message = self._parsed_message(channel=c) map(lambda id_: self.assertTrue(isinstance(id_, int)), parsed_message.ids)
import unittest from amqp import Message from mock import patch from sir.amqp.message import (InvalidMessageContentException, Message as PMessage, MESSAGE_TYPES) class AmqpMessageTest(unittest.TestCase): @staticmethod def _parsed_message(body="artist 123 456", channel="search.index"): msg = Message(body=body) parsed_message = PMessage.from_amqp_message(channel, msg) return parsed_message def test_message_parses(self): parsed_message = self._parsed_message() self.assertEqual(parsed_message.entity_type, "artist") self.assertEqual(parsed_message.message_type, MESSAGE_TYPES.index) def test_invalid_channel_raises(self): self.assertRaises(ValueError, self._parsed_message, channel="foo.bar") @patch("sir.amqp.message.SCHEMA", new={'entity': None}) def test_invalid_entity_raises(self): self.assertRaises(ValueError, self._parsed_message) def test_message_too_short_raises(self): self.assertRaises(InvalidMessageContentException, self._parsed_message, body="foo") def test_non_delete_converts_to_int(self): parsed_message = self._parsed_message(channel="search.index") map(lambda id_: self.assertTrue(isinstance(id_, int)), parsed_message.ids)
Remove a reference to search.update
Remove a reference to search.update
Python
mit
jeffweeksio/sir
import unittest from amqp import Message from mock import patch from sir.amqp.message import (InvalidMessageContentException, Message as PMessage, MESSAGE_TYPES) class AmqpMessageTest(unittest.TestCase): @staticmethod def _parsed_message(body="artist 123 456", channel="search.index"): msg = Message(body=body) parsed_message = PMessage.from_amqp_message(channel, msg) return parsed_message def test_message_parses(self): parsed_message = self._parsed_message() self.assertEqual(parsed_message.entity_type, "artist") self.assertEqual(parsed_message.message_type, MESSAGE_TYPES.index) def test_invalid_channel_raises(self): self.assertRaises(ValueError, self._parsed_message, channel="foo.bar") @patch("sir.amqp.message.SCHEMA", new={'entity': None}) def test_invalid_entity_raises(self): self.assertRaises(ValueError, self._parsed_message) def test_message_too_short_raises(self): self.assertRaises(InvalidMessageContentException, self._parsed_message, body="foo") def test_non_delete_converts_to_int(self): for c in ("search.index", "search.update"): parsed_message = self._parsed_message(channel=c) map(lambda id_: self.assertTrue(isinstance(id_, int)), parsed_message.ids) Remove a reference to search.update
import unittest from amqp import Message from mock import patch from sir.amqp.message import (InvalidMessageContentException, Message as PMessage, MESSAGE_TYPES) class AmqpMessageTest(unittest.TestCase): @staticmethod def _parsed_message(body="artist 123 456", channel="search.index"): msg = Message(body=body) parsed_message = PMessage.from_amqp_message(channel, msg) return parsed_message def test_message_parses(self): parsed_message = self._parsed_message() self.assertEqual(parsed_message.entity_type, "artist") self.assertEqual(parsed_message.message_type, MESSAGE_TYPES.index) def test_invalid_channel_raises(self): self.assertRaises(ValueError, self._parsed_message, channel="foo.bar") @patch("sir.amqp.message.SCHEMA", new={'entity': None}) def test_invalid_entity_raises(self): self.assertRaises(ValueError, self._parsed_message) def test_message_too_short_raises(self): self.assertRaises(InvalidMessageContentException, self._parsed_message, body="foo") def test_non_delete_converts_to_int(self): parsed_message = self._parsed_message(channel="search.index") map(lambda id_: self.assertTrue(isinstance(id_, int)), parsed_message.ids)
<commit_before>import unittest from amqp import Message from mock import patch from sir.amqp.message import (InvalidMessageContentException, Message as PMessage, MESSAGE_TYPES) class AmqpMessageTest(unittest.TestCase): @staticmethod def _parsed_message(body="artist 123 456", channel="search.index"): msg = Message(body=body) parsed_message = PMessage.from_amqp_message(channel, msg) return parsed_message def test_message_parses(self): parsed_message = self._parsed_message() self.assertEqual(parsed_message.entity_type, "artist") self.assertEqual(parsed_message.message_type, MESSAGE_TYPES.index) def test_invalid_channel_raises(self): self.assertRaises(ValueError, self._parsed_message, channel="foo.bar") @patch("sir.amqp.message.SCHEMA", new={'entity': None}) def test_invalid_entity_raises(self): self.assertRaises(ValueError, self._parsed_message) def test_message_too_short_raises(self): self.assertRaises(InvalidMessageContentException, self._parsed_message, body="foo") def test_non_delete_converts_to_int(self): for c in ("search.index", "search.update"): parsed_message = self._parsed_message(channel=c) map(lambda id_: self.assertTrue(isinstance(id_, int)), parsed_message.ids) <commit_msg>Remove a reference to search.update<commit_after>
import unittest from amqp import Message from mock import patch from sir.amqp.message import (InvalidMessageContentException, Message as PMessage, MESSAGE_TYPES) class AmqpMessageTest(unittest.TestCase): @staticmethod def _parsed_message(body="artist 123 456", channel="search.index"): msg = Message(body=body) parsed_message = PMessage.from_amqp_message(channel, msg) return parsed_message def test_message_parses(self): parsed_message = self._parsed_message() self.assertEqual(parsed_message.entity_type, "artist") self.assertEqual(parsed_message.message_type, MESSAGE_TYPES.index) def test_invalid_channel_raises(self): self.assertRaises(ValueError, self._parsed_message, channel="foo.bar") @patch("sir.amqp.message.SCHEMA", new={'entity': None}) def test_invalid_entity_raises(self): self.assertRaises(ValueError, self._parsed_message) def test_message_too_short_raises(self): self.assertRaises(InvalidMessageContentException, self._parsed_message, body="foo") def test_non_delete_converts_to_int(self): parsed_message = self._parsed_message(channel="search.index") map(lambda id_: self.assertTrue(isinstance(id_, int)), parsed_message.ids)
import unittest from amqp import Message from mock import patch from sir.amqp.message import (InvalidMessageContentException, Message as PMessage, MESSAGE_TYPES) class AmqpMessageTest(unittest.TestCase): @staticmethod def _parsed_message(body="artist 123 456", channel="search.index"): msg = Message(body=body) parsed_message = PMessage.from_amqp_message(channel, msg) return parsed_message def test_message_parses(self): parsed_message = self._parsed_message() self.assertEqual(parsed_message.entity_type, "artist") self.assertEqual(parsed_message.message_type, MESSAGE_TYPES.index) def test_invalid_channel_raises(self): self.assertRaises(ValueError, self._parsed_message, channel="foo.bar") @patch("sir.amqp.message.SCHEMA", new={'entity': None}) def test_invalid_entity_raises(self): self.assertRaises(ValueError, self._parsed_message) def test_message_too_short_raises(self): self.assertRaises(InvalidMessageContentException, self._parsed_message, body="foo") def test_non_delete_converts_to_int(self): for c in ("search.index", "search.update"): parsed_message = self._parsed_message(channel=c) map(lambda id_: self.assertTrue(isinstance(id_, int)), parsed_message.ids) Remove a reference to search.updateimport unittest from amqp import Message from mock import patch from sir.amqp.message import (InvalidMessageContentException, Message as PMessage, MESSAGE_TYPES) class AmqpMessageTest(unittest.TestCase): @staticmethod def _parsed_message(body="artist 123 456", channel="search.index"): msg = Message(body=body) parsed_message = PMessage.from_amqp_message(channel, msg) return parsed_message def test_message_parses(self): parsed_message = self._parsed_message() self.assertEqual(parsed_message.entity_type, "artist") self.assertEqual(parsed_message.message_type, MESSAGE_TYPES.index) def test_invalid_channel_raises(self): self.assertRaises(ValueError, self._parsed_message, channel="foo.bar") @patch("sir.amqp.message.SCHEMA", new={'entity': None}) def test_invalid_entity_raises(self): self.assertRaises(ValueError, self._parsed_message) def test_message_too_short_raises(self): self.assertRaises(InvalidMessageContentException, self._parsed_message, body="foo") def test_non_delete_converts_to_int(self): parsed_message = self._parsed_message(channel="search.index") map(lambda id_: self.assertTrue(isinstance(id_, int)), parsed_message.ids)
<commit_before>import unittest from amqp import Message from mock import patch from sir.amqp.message import (InvalidMessageContentException, Message as PMessage, MESSAGE_TYPES) class AmqpMessageTest(unittest.TestCase): @staticmethod def _parsed_message(body="artist 123 456", channel="search.index"): msg = Message(body=body) parsed_message = PMessage.from_amqp_message(channel, msg) return parsed_message def test_message_parses(self): parsed_message = self._parsed_message() self.assertEqual(parsed_message.entity_type, "artist") self.assertEqual(parsed_message.message_type, MESSAGE_TYPES.index) def test_invalid_channel_raises(self): self.assertRaises(ValueError, self._parsed_message, channel="foo.bar") @patch("sir.amqp.message.SCHEMA", new={'entity': None}) def test_invalid_entity_raises(self): self.assertRaises(ValueError, self._parsed_message) def test_message_too_short_raises(self): self.assertRaises(InvalidMessageContentException, self._parsed_message, body="foo") def test_non_delete_converts_to_int(self): for c in ("search.index", "search.update"): parsed_message = self._parsed_message(channel=c) map(lambda id_: self.assertTrue(isinstance(id_, int)), parsed_message.ids) <commit_msg>Remove a reference to search.update<commit_after>import unittest from amqp import Message from mock import patch from sir.amqp.message import (InvalidMessageContentException, Message as PMessage, MESSAGE_TYPES) class AmqpMessageTest(unittest.TestCase): @staticmethod def _parsed_message(body="artist 123 456", channel="search.index"): msg = Message(body=body) parsed_message = PMessage.from_amqp_message(channel, msg) return parsed_message def test_message_parses(self): parsed_message = self._parsed_message() self.assertEqual(parsed_message.entity_type, "artist") self.assertEqual(parsed_message.message_type, MESSAGE_TYPES.index) def test_invalid_channel_raises(self): self.assertRaises(ValueError, self._parsed_message, channel="foo.bar") @patch("sir.amqp.message.SCHEMA", new={'entity': None}) def test_invalid_entity_raises(self): self.assertRaises(ValueError, self._parsed_message) def test_message_too_short_raises(self): self.assertRaises(InvalidMessageContentException, self._parsed_message, body="foo") def test_non_delete_converts_to_int(self): parsed_message = self._parsed_message(channel="search.index") map(lambda id_: self.assertTrue(isinstance(id_, int)), parsed_message.ids)
2e361627ca94b3a3b1cdd9583d22ca8ff81a0591
rpn/util.py
rpn/util.py
from functools import wraps import subprocess _SELECTIONS = { '+': 'clipboard', '*': 'primary', } def _store_selection(data, selection): with subprocess.Popen(['xclip', '-selection', selection], stdin=subprocess.PIPE) as xclip: xclip.stdin.write(str(data).encode()) def _load_selection(selection): with subprocess.Popen(['xclip', '-selection', selection, '-o'], stdout=PIPE) as xclip: return xclip.stdout.read().decode() class RPNError(Exception): pass def wrap_user_errors(fmt): ''' Ugly hack decorator that converts exceptions to warnings. Passes through RPNErrors. ''' def decorator(f): @wraps(f) def wrapper(*args, **kwargs): try: return f(*args, **kwargs) except RPNError: raise except Exception as e: raise RPNError(fmt.format(*args, **kwargs), e) return wrapper return decorator
from functools import wraps import subprocess _SELECTIONS = { '+': 'clipboard', '*': 'primary', } def _store_selection(data, selection): with subprocess.Popen(['xclip', '-selection', selection], stdin=subprocess.PIPE) as xclip: xclip.stdin.write(str(data).encode()) def _load_selection(selection): with subprocess.Popen(['xclip', '-selection', selection, '-o'], stdout=subprocess.PIPE) as xclip: return xclip.stdout.read().decode() class RPNError(Exception): pass def wrap_user_errors(fmt): ''' Ugly hack decorator that converts exceptions to warnings. Passes through RPNErrors. ''' def decorator(f): @wraps(f) def wrapper(*args, **kwargs): try: return f(*args, **kwargs) except RPNError: raise except Exception as e: raise RPNError(fmt.format(*args, **kwargs), e) return wrapper return decorator
Fix typo resulting in NameError
Fix typo resulting in NameError
Python
isc
pilona/RPN,pilona/RPN
from functools import wraps import subprocess _SELECTIONS = { '+': 'clipboard', '*': 'primary', } def _store_selection(data, selection): with subprocess.Popen(['xclip', '-selection', selection], stdin=subprocess.PIPE) as xclip: xclip.stdin.write(str(data).encode()) def _load_selection(selection): with subprocess.Popen(['xclip', '-selection', selection, '-o'], stdout=PIPE) as xclip: return xclip.stdout.read().decode() class RPNError(Exception): pass def wrap_user_errors(fmt): ''' Ugly hack decorator that converts exceptions to warnings. Passes through RPNErrors. ''' def decorator(f): @wraps(f) def wrapper(*args, **kwargs): try: return f(*args, **kwargs) except RPNError: raise except Exception as e: raise RPNError(fmt.format(*args, **kwargs), e) return wrapper return decorator Fix typo resulting in NameError
from functools import wraps import subprocess _SELECTIONS = { '+': 'clipboard', '*': 'primary', } def _store_selection(data, selection): with subprocess.Popen(['xclip', '-selection', selection], stdin=subprocess.PIPE) as xclip: xclip.stdin.write(str(data).encode()) def _load_selection(selection): with subprocess.Popen(['xclip', '-selection', selection, '-o'], stdout=subprocess.PIPE) as xclip: return xclip.stdout.read().decode() class RPNError(Exception): pass def wrap_user_errors(fmt): ''' Ugly hack decorator that converts exceptions to warnings. Passes through RPNErrors. ''' def decorator(f): @wraps(f) def wrapper(*args, **kwargs): try: return f(*args, **kwargs) except RPNError: raise except Exception as e: raise RPNError(fmt.format(*args, **kwargs), e) return wrapper return decorator
<commit_before>from functools import wraps import subprocess _SELECTIONS = { '+': 'clipboard', '*': 'primary', } def _store_selection(data, selection): with subprocess.Popen(['xclip', '-selection', selection], stdin=subprocess.PIPE) as xclip: xclip.stdin.write(str(data).encode()) def _load_selection(selection): with subprocess.Popen(['xclip', '-selection', selection, '-o'], stdout=PIPE) as xclip: return xclip.stdout.read().decode() class RPNError(Exception): pass def wrap_user_errors(fmt): ''' Ugly hack decorator that converts exceptions to warnings. Passes through RPNErrors. ''' def decorator(f): @wraps(f) def wrapper(*args, **kwargs): try: return f(*args, **kwargs) except RPNError: raise except Exception as e: raise RPNError(fmt.format(*args, **kwargs), e) return wrapper return decorator <commit_msg>Fix typo resulting in NameError<commit_after>
from functools import wraps import subprocess _SELECTIONS = { '+': 'clipboard', '*': 'primary', } def _store_selection(data, selection): with subprocess.Popen(['xclip', '-selection', selection], stdin=subprocess.PIPE) as xclip: xclip.stdin.write(str(data).encode()) def _load_selection(selection): with subprocess.Popen(['xclip', '-selection', selection, '-o'], stdout=subprocess.PIPE) as xclip: return xclip.stdout.read().decode() class RPNError(Exception): pass def wrap_user_errors(fmt): ''' Ugly hack decorator that converts exceptions to warnings. Passes through RPNErrors. ''' def decorator(f): @wraps(f) def wrapper(*args, **kwargs): try: return f(*args, **kwargs) except RPNError: raise except Exception as e: raise RPNError(fmt.format(*args, **kwargs), e) return wrapper return decorator
from functools import wraps import subprocess _SELECTIONS = { '+': 'clipboard', '*': 'primary', } def _store_selection(data, selection): with subprocess.Popen(['xclip', '-selection', selection], stdin=subprocess.PIPE) as xclip: xclip.stdin.write(str(data).encode()) def _load_selection(selection): with subprocess.Popen(['xclip', '-selection', selection, '-o'], stdout=PIPE) as xclip: return xclip.stdout.read().decode() class RPNError(Exception): pass def wrap_user_errors(fmt): ''' Ugly hack decorator that converts exceptions to warnings. Passes through RPNErrors. ''' def decorator(f): @wraps(f) def wrapper(*args, **kwargs): try: return f(*args, **kwargs) except RPNError: raise except Exception as e: raise RPNError(fmt.format(*args, **kwargs), e) return wrapper return decorator Fix typo resulting in NameErrorfrom functools import wraps import subprocess _SELECTIONS = { '+': 'clipboard', '*': 'primary', } def _store_selection(data, selection): with subprocess.Popen(['xclip', '-selection', selection], stdin=subprocess.PIPE) as xclip: xclip.stdin.write(str(data).encode()) def _load_selection(selection): with subprocess.Popen(['xclip', '-selection', selection, '-o'], stdout=subprocess.PIPE) as xclip: return xclip.stdout.read().decode() class RPNError(Exception): pass def wrap_user_errors(fmt): ''' Ugly hack decorator that converts exceptions to warnings. Passes through RPNErrors. ''' def decorator(f): @wraps(f) def wrapper(*args, **kwargs): try: return f(*args, **kwargs) except RPNError: raise except Exception as e: raise RPNError(fmt.format(*args, **kwargs), e) return wrapper return decorator
<commit_before>from functools import wraps import subprocess _SELECTIONS = { '+': 'clipboard', '*': 'primary', } def _store_selection(data, selection): with subprocess.Popen(['xclip', '-selection', selection], stdin=subprocess.PIPE) as xclip: xclip.stdin.write(str(data).encode()) def _load_selection(selection): with subprocess.Popen(['xclip', '-selection', selection, '-o'], stdout=PIPE) as xclip: return xclip.stdout.read().decode() class RPNError(Exception): pass def wrap_user_errors(fmt): ''' Ugly hack decorator that converts exceptions to warnings. Passes through RPNErrors. ''' def decorator(f): @wraps(f) def wrapper(*args, **kwargs): try: return f(*args, **kwargs) except RPNError: raise except Exception as e: raise RPNError(fmt.format(*args, **kwargs), e) return wrapper return decorator <commit_msg>Fix typo resulting in NameError<commit_after>from functools import wraps import subprocess _SELECTIONS = { '+': 'clipboard', '*': 'primary', } def _store_selection(data, selection): with subprocess.Popen(['xclip', '-selection', selection], stdin=subprocess.PIPE) as xclip: xclip.stdin.write(str(data).encode()) def _load_selection(selection): with subprocess.Popen(['xclip', '-selection', selection, '-o'], stdout=subprocess.PIPE) as xclip: return xclip.stdout.read().decode() class RPNError(Exception): pass def wrap_user_errors(fmt): ''' Ugly hack decorator that converts exceptions to warnings. Passes through RPNErrors. ''' def decorator(f): @wraps(f) def wrapper(*args, **kwargs): try: return f(*args, **kwargs) except RPNError: raise except Exception as e: raise RPNError(fmt.format(*args, **kwargs), e) return wrapper return decorator
de7e36f6b83c6d41c46b222ea94eff3b6f1909e6
ser2file/ser2file.py
ser2file/ser2file.py
#!/usr/bin/env python3 import sys import serial import datetime if __name__=='__main__': try: ser = serial.Serial('COM1', 115200, timeout=1) print("opened " + ser.portstr) except serial.SerialException: print('could not open port') sys.exit(1) pass f = open(datetime.datetime.now().strftime("%y-%m-%d_%H-%M-%S") + '.csv', 'w+') f.write('10ms;x;y;z;delta\n') i = 0 line = ser.readline().decode('utf-8') # Truncate first read line try: while True: line = ser.readline().decode('utf-8') if line: f.write(str(i) + ';' + line) i = i + 1 print(line) except KeyboardInterrupt: pass f.close() ser.close()
#!/usr/bin/env python3 import sys import serial import datetime if __name__=='__main__': try: ser = serial.Serial('COM1', 115200, timeout=1) print("opened " + ser.portstr) except serial.SerialException: print('could not open port') sys.exit(1) pass f = open(datetime.datetime.now().strftime("%y-%m-%d_%H-%M-%S") + '.txt', 'w+') f.write('10ms\tx\ty\tz\tdelta\n') i = 0 line = ser.readline().decode('utf-8') # Truncate first read line try: while True: line = ser.readline().decode('utf-8') if line: f.write(str(i) + '\t' + line.replace(';', '\t')) i = i + 1 print(line) except KeyboardInterrupt: pass f.close() ser.close()
Use TAB instead of ;
Use TAB instead of ;
Python
mit
Make-O-Matic/experiments
#!/usr/bin/env python3 import sys import serial import datetime if __name__=='__main__': try: ser = serial.Serial('COM1', 115200, timeout=1) print("opened " + ser.portstr) except serial.SerialException: print('could not open port') sys.exit(1) pass f = open(datetime.datetime.now().strftime("%y-%m-%d_%H-%M-%S") + '.csv', 'w+') f.write('10ms;x;y;z;delta\n') i = 0 line = ser.readline().decode('utf-8') # Truncate first read line try: while True: line = ser.readline().decode('utf-8') if line: f.write(str(i) + ';' + line) i = i + 1 print(line) except KeyboardInterrupt: pass f.close() ser.close() Use TAB instead of ;
#!/usr/bin/env python3 import sys import serial import datetime if __name__=='__main__': try: ser = serial.Serial('COM1', 115200, timeout=1) print("opened " + ser.portstr) except serial.SerialException: print('could not open port') sys.exit(1) pass f = open(datetime.datetime.now().strftime("%y-%m-%d_%H-%M-%S") + '.txt', 'w+') f.write('10ms\tx\ty\tz\tdelta\n') i = 0 line = ser.readline().decode('utf-8') # Truncate first read line try: while True: line = ser.readline().decode('utf-8') if line: f.write(str(i) + '\t' + line.replace(';', '\t')) i = i + 1 print(line) except KeyboardInterrupt: pass f.close() ser.close()
<commit_before>#!/usr/bin/env python3 import sys import serial import datetime if __name__=='__main__': try: ser = serial.Serial('COM1', 115200, timeout=1) print("opened " + ser.portstr) except serial.SerialException: print('could not open port') sys.exit(1) pass f = open(datetime.datetime.now().strftime("%y-%m-%d_%H-%M-%S") + '.csv', 'w+') f.write('10ms;x;y;z;delta\n') i = 0 line = ser.readline().decode('utf-8') # Truncate first read line try: while True: line = ser.readline().decode('utf-8') if line: f.write(str(i) + ';' + line) i = i + 1 print(line) except KeyboardInterrupt: pass f.close() ser.close() <commit_msg>Use TAB instead of ;<commit_after>
#!/usr/bin/env python3 import sys import serial import datetime if __name__=='__main__': try: ser = serial.Serial('COM1', 115200, timeout=1) print("opened " + ser.portstr) except serial.SerialException: print('could not open port') sys.exit(1) pass f = open(datetime.datetime.now().strftime("%y-%m-%d_%H-%M-%S") + '.txt', 'w+') f.write('10ms\tx\ty\tz\tdelta\n') i = 0 line = ser.readline().decode('utf-8') # Truncate first read line try: while True: line = ser.readline().decode('utf-8') if line: f.write(str(i) + '\t' + line.replace(';', '\t')) i = i + 1 print(line) except KeyboardInterrupt: pass f.close() ser.close()
#!/usr/bin/env python3 import sys import serial import datetime if __name__=='__main__': try: ser = serial.Serial('COM1', 115200, timeout=1) print("opened " + ser.portstr) except serial.SerialException: print('could not open port') sys.exit(1) pass f = open(datetime.datetime.now().strftime("%y-%m-%d_%H-%M-%S") + '.csv', 'w+') f.write('10ms;x;y;z;delta\n') i = 0 line = ser.readline().decode('utf-8') # Truncate first read line try: while True: line = ser.readline().decode('utf-8') if line: f.write(str(i) + ';' + line) i = i + 1 print(line) except KeyboardInterrupt: pass f.close() ser.close() Use TAB instead of ;#!/usr/bin/env python3 import sys import serial import datetime if __name__=='__main__': try: ser = serial.Serial('COM1', 115200, timeout=1) print("opened " + ser.portstr) except serial.SerialException: print('could not open port') sys.exit(1) pass f = open(datetime.datetime.now().strftime("%y-%m-%d_%H-%M-%S") + '.txt', 'w+') f.write('10ms\tx\ty\tz\tdelta\n') i = 0 line = ser.readline().decode('utf-8') # Truncate first read line try: while True: line = ser.readline().decode('utf-8') if line: f.write(str(i) + '\t' + line.replace(';', '\t')) i = i + 1 print(line) except KeyboardInterrupt: pass f.close() ser.close()
<commit_before>#!/usr/bin/env python3 import sys import serial import datetime if __name__=='__main__': try: ser = serial.Serial('COM1', 115200, timeout=1) print("opened " + ser.portstr) except serial.SerialException: print('could not open port') sys.exit(1) pass f = open(datetime.datetime.now().strftime("%y-%m-%d_%H-%M-%S") + '.csv', 'w+') f.write('10ms;x;y;z;delta\n') i = 0 line = ser.readline().decode('utf-8') # Truncate first read line try: while True: line = ser.readline().decode('utf-8') if line: f.write(str(i) + ';' + line) i = i + 1 print(line) except KeyboardInterrupt: pass f.close() ser.close() <commit_msg>Use TAB instead of ;<commit_after>#!/usr/bin/env python3 import sys import serial import datetime if __name__=='__main__': try: ser = serial.Serial('COM1', 115200, timeout=1) print("opened " + ser.portstr) except serial.SerialException: print('could not open port') sys.exit(1) pass f = open(datetime.datetime.now().strftime("%y-%m-%d_%H-%M-%S") + '.txt', 'w+') f.write('10ms\tx\ty\tz\tdelta\n') i = 0 line = ser.readline().decode('utf-8') # Truncate first read line try: while True: line = ser.readline().decode('utf-8') if line: f.write(str(i) + '\t' + line.replace(';', '\t')) i = i + 1 print(line) except KeyboardInterrupt: pass f.close() ser.close()
1a5e589597b101ff4d6c7aa14b747f7143ced8a3
test_hpack_integration.py
test_hpack_integration.py
# -*- coding: utf-8 -*- """ This module defines substantial HPACK integration tests. These can take a very long time to run, so they're outside the main test suite, but they need to be run before every change to HPACK. """ from hyper.http20.hpack import Decoder from binascii import unhexlify class TestHPACKDecoderIntegration(object): def test_can_decode_a_story(self, story): d = Decoder() for case in story['cases']: d.header_table_size = case['header_table_size'] decoded_headers = d.decode(unhexlify(case['wire'])) # The correct headers are a list of dicts, which is annoying. correct_headers = {item[0]: item[1] for header in case['headers'] for item in header.items()} assert correct_headers == decoded_headers
# -*- coding: utf-8 -*- """ This module defines substantial HPACK integration tests. These can take a very long time to run, so they're outside the main test suite, but they need to be run before every change to HPACK. """ from hyper.http20.hpack import Decoder from binascii import unhexlify class TestHPACKDecoderIntegration(object): def test_can_decode_a_story(self, story): d = Decoder() for case in story['cases']: d.header_table_size = case['header_table_size'] decoded_headers = d.decode(unhexlify(case['wire'])) # The correct headers are a list of dicts, which is annoying. correct_headers = {(item[0], item[1]) for header in case['headers'] for item in header.items()} assert correct_headers == decoded_headers
Test for sets, not dicts.
Test for sets, not dicts.
Python
mit
lawnmowerlatte/hyper,jdecuyper/hyper,fredthomsen/hyper,irvind/hyper,lawnmowerlatte/hyper,masaori335/hyper,fredthomsen/hyper,Lukasa/hyper,Lukasa/hyper,masaori335/hyper,plucury/hyper,irvind/hyper,plucury/hyper,jdecuyper/hyper
# -*- coding: utf-8 -*- """ This module defines substantial HPACK integration tests. These can take a very long time to run, so they're outside the main test suite, but they need to be run before every change to HPACK. """ from hyper.http20.hpack import Decoder from binascii import unhexlify class TestHPACKDecoderIntegration(object): def test_can_decode_a_story(self, story): d = Decoder() for case in story['cases']: d.header_table_size = case['header_table_size'] decoded_headers = d.decode(unhexlify(case['wire'])) # The correct headers are a list of dicts, which is annoying. correct_headers = {item[0]: item[1] for header in case['headers'] for item in header.items()} assert correct_headers == decoded_headers Test for sets, not dicts.
# -*- coding: utf-8 -*- """ This module defines substantial HPACK integration tests. These can take a very long time to run, so they're outside the main test suite, but they need to be run before every change to HPACK. """ from hyper.http20.hpack import Decoder from binascii import unhexlify class TestHPACKDecoderIntegration(object): def test_can_decode_a_story(self, story): d = Decoder() for case in story['cases']: d.header_table_size = case['header_table_size'] decoded_headers = d.decode(unhexlify(case['wire'])) # The correct headers are a list of dicts, which is annoying. correct_headers = {(item[0], item[1]) for header in case['headers'] for item in header.items()} assert correct_headers == decoded_headers
<commit_before># -*- coding: utf-8 -*- """ This module defines substantial HPACK integration tests. These can take a very long time to run, so they're outside the main test suite, but they need to be run before every change to HPACK. """ from hyper.http20.hpack import Decoder from binascii import unhexlify class TestHPACKDecoderIntegration(object): def test_can_decode_a_story(self, story): d = Decoder() for case in story['cases']: d.header_table_size = case['header_table_size'] decoded_headers = d.decode(unhexlify(case['wire'])) # The correct headers are a list of dicts, which is annoying. correct_headers = {item[0]: item[1] for header in case['headers'] for item in header.items()} assert correct_headers == decoded_headers <commit_msg>Test for sets, not dicts.<commit_after>
# -*- coding: utf-8 -*- """ This module defines substantial HPACK integration tests. These can take a very long time to run, so they're outside the main test suite, but they need to be run before every change to HPACK. """ from hyper.http20.hpack import Decoder from binascii import unhexlify class TestHPACKDecoderIntegration(object): def test_can_decode_a_story(self, story): d = Decoder() for case in story['cases']: d.header_table_size = case['header_table_size'] decoded_headers = d.decode(unhexlify(case['wire'])) # The correct headers are a list of dicts, which is annoying. correct_headers = {(item[0], item[1]) for header in case['headers'] for item in header.items()} assert correct_headers == decoded_headers
# -*- coding: utf-8 -*- """ This module defines substantial HPACK integration tests. These can take a very long time to run, so they're outside the main test suite, but they need to be run before every change to HPACK. """ from hyper.http20.hpack import Decoder from binascii import unhexlify class TestHPACKDecoderIntegration(object): def test_can_decode_a_story(self, story): d = Decoder() for case in story['cases']: d.header_table_size = case['header_table_size'] decoded_headers = d.decode(unhexlify(case['wire'])) # The correct headers are a list of dicts, which is annoying. correct_headers = {item[0]: item[1] for header in case['headers'] for item in header.items()} assert correct_headers == decoded_headers Test for sets, not dicts.# -*- coding: utf-8 -*- """ This module defines substantial HPACK integration tests. These can take a very long time to run, so they're outside the main test suite, but they need to be run before every change to HPACK. """ from hyper.http20.hpack import Decoder from binascii import unhexlify class TestHPACKDecoderIntegration(object): def test_can_decode_a_story(self, story): d = Decoder() for case in story['cases']: d.header_table_size = case['header_table_size'] decoded_headers = d.decode(unhexlify(case['wire'])) # The correct headers are a list of dicts, which is annoying. correct_headers = {(item[0], item[1]) for header in case['headers'] for item in header.items()} assert correct_headers == decoded_headers
<commit_before># -*- coding: utf-8 -*- """ This module defines substantial HPACK integration tests. These can take a very long time to run, so they're outside the main test suite, but they need to be run before every change to HPACK. """ from hyper.http20.hpack import Decoder from binascii import unhexlify class TestHPACKDecoderIntegration(object): def test_can_decode_a_story(self, story): d = Decoder() for case in story['cases']: d.header_table_size = case['header_table_size'] decoded_headers = d.decode(unhexlify(case['wire'])) # The correct headers are a list of dicts, which is annoying. correct_headers = {item[0]: item[1] for header in case['headers'] for item in header.items()} assert correct_headers == decoded_headers <commit_msg>Test for sets, not dicts.<commit_after># -*- coding: utf-8 -*- """ This module defines substantial HPACK integration tests. These can take a very long time to run, so they're outside the main test suite, but they need to be run before every change to HPACK. """ from hyper.http20.hpack import Decoder from binascii import unhexlify class TestHPACKDecoderIntegration(object): def test_can_decode_a_story(self, story): d = Decoder() for case in story['cases']: d.header_table_size = case['header_table_size'] decoded_headers = d.decode(unhexlify(case['wire'])) # The correct headers are a list of dicts, which is annoying. correct_headers = {(item[0], item[1]) for header in case['headers'] for item in header.items()} assert correct_headers == decoded_headers
75ad8c1167bae3d63f2b50da8a7cc8d8d64cca4d
tests/test_application.py
tests/test_application.py
# Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals import os.path import pytest from warehouse.application import Warehouse def test_basic_instantiation(): Warehouse({ "debug": False, "database": { "url": "postgres:///test_warehouse", } }) def test_yaml_instantiation(): Warehouse.from_yaml( os.path.abspath(os.path.join( os.path.dirname(__file__), "test_config.yml", )), ) def test_cli_instantiation(): with pytest.raises(SystemExit): Warehouse.from_cli(["-h"]) def test_tables_attribute(): app = Warehouse({ "debug": False, "database": { "url": "postgres:///test_warehouse", } }) assert app.tables is app.metadata.tables
# Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals import os.path import pytest from warehouse.application import Warehouse def test_basic_instantiation(): Warehouse({ "debug": False, "database": { "url": "postgres:///test_warehouse", } }) def test_yaml_instantiation(): Warehouse.from_yaml( os.path.abspath(os.path.join( os.path.dirname(__file__), "test_config.yml", )), ) def test_cli_instantiation(): with pytest.raises(SystemExit): Warehouse.from_cli(["-h"])
Remove a test for removed features
Remove a test for removed features
Python
apache-2.0
mattrobenolt/warehouse,techtonik/warehouse,techtonik/warehouse,robhudson/warehouse,mattrobenolt/warehouse,robhudson/warehouse,mattrobenolt/warehouse
# Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals import os.path import pytest from warehouse.application import Warehouse def test_basic_instantiation(): Warehouse({ "debug": False, "database": { "url": "postgres:///test_warehouse", } }) def test_yaml_instantiation(): Warehouse.from_yaml( os.path.abspath(os.path.join( os.path.dirname(__file__), "test_config.yml", )), ) def test_cli_instantiation(): with pytest.raises(SystemExit): Warehouse.from_cli(["-h"]) def test_tables_attribute(): app = Warehouse({ "debug": False, "database": { "url": "postgres:///test_warehouse", } }) assert app.tables is app.metadata.tables Remove a test for removed features
# Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals import os.path import pytest from warehouse.application import Warehouse def test_basic_instantiation(): Warehouse({ "debug": False, "database": { "url": "postgres:///test_warehouse", } }) def test_yaml_instantiation(): Warehouse.from_yaml( os.path.abspath(os.path.join( os.path.dirname(__file__), "test_config.yml", )), ) def test_cli_instantiation(): with pytest.raises(SystemExit): Warehouse.from_cli(["-h"])
<commit_before># Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals import os.path import pytest from warehouse.application import Warehouse def test_basic_instantiation(): Warehouse({ "debug": False, "database": { "url": "postgres:///test_warehouse", } }) def test_yaml_instantiation(): Warehouse.from_yaml( os.path.abspath(os.path.join( os.path.dirname(__file__), "test_config.yml", )), ) def test_cli_instantiation(): with pytest.raises(SystemExit): Warehouse.from_cli(["-h"]) def test_tables_attribute(): app = Warehouse({ "debug": False, "database": { "url": "postgres:///test_warehouse", } }) assert app.tables is app.metadata.tables <commit_msg>Remove a test for removed features<commit_after>
# Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals import os.path import pytest from warehouse.application import Warehouse def test_basic_instantiation(): Warehouse({ "debug": False, "database": { "url": "postgres:///test_warehouse", } }) def test_yaml_instantiation(): Warehouse.from_yaml( os.path.abspath(os.path.join( os.path.dirname(__file__), "test_config.yml", )), ) def test_cli_instantiation(): with pytest.raises(SystemExit): Warehouse.from_cli(["-h"])
# Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals import os.path import pytest from warehouse.application import Warehouse def test_basic_instantiation(): Warehouse({ "debug": False, "database": { "url": "postgres:///test_warehouse", } }) def test_yaml_instantiation(): Warehouse.from_yaml( os.path.abspath(os.path.join( os.path.dirname(__file__), "test_config.yml", )), ) def test_cli_instantiation(): with pytest.raises(SystemExit): Warehouse.from_cli(["-h"]) def test_tables_attribute(): app = Warehouse({ "debug": False, "database": { "url": "postgres:///test_warehouse", } }) assert app.tables is app.metadata.tables Remove a test for removed features# Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals import os.path import pytest from warehouse.application import Warehouse def test_basic_instantiation(): Warehouse({ "debug": False, "database": { "url": "postgres:///test_warehouse", } }) def test_yaml_instantiation(): Warehouse.from_yaml( os.path.abspath(os.path.join( os.path.dirname(__file__), "test_config.yml", )), ) def test_cli_instantiation(): with pytest.raises(SystemExit): Warehouse.from_cli(["-h"])
<commit_before># Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals import os.path import pytest from warehouse.application import Warehouse def test_basic_instantiation(): Warehouse({ "debug": False, "database": { "url": "postgres:///test_warehouse", } }) def test_yaml_instantiation(): Warehouse.from_yaml( os.path.abspath(os.path.join( os.path.dirname(__file__), "test_config.yml", )), ) def test_cli_instantiation(): with pytest.raises(SystemExit): Warehouse.from_cli(["-h"]) def test_tables_attribute(): app = Warehouse({ "debug": False, "database": { "url": "postgres:///test_warehouse", } }) assert app.tables is app.metadata.tables <commit_msg>Remove a test for removed features<commit_after># Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals import os.path import pytest from warehouse.application import Warehouse def test_basic_instantiation(): Warehouse({ "debug": False, "database": { "url": "postgres:///test_warehouse", } }) def test_yaml_instantiation(): Warehouse.from_yaml( os.path.abspath(os.path.join( os.path.dirname(__file__), "test_config.yml", )), ) def test_cli_instantiation(): with pytest.raises(SystemExit): Warehouse.from_cli(["-h"])
a15bbbd22d8fa32abd7b10179a3289f1ec396c3a
tests/test_ultrametric.py
tests/test_ultrametric.py
from viridis import tree from six.moves import range import pytest @pytest.fixture def base_tree(): t = tree.Ultrametric(list(range(6))) t.merge(0, 1, 0.1) t.merge(6, 2, 0.2) t.merge(3, 4, 0.3) t.merge(8, 5, 0.4) t.merge(7, 8, 0.5) return t def test_split(base_tree): t = base_tree t.split(0, 2) assert t.node[9]['num_leaves'] == 3 t.split(0, 4) # nothing to do assert t.node[9]['num_leaves'] == 3 def test_children(base_tree): t = base_tree assert t.children(6) == [0, 1]
from viridis import tree from six.moves import range import pytest @pytest.fixture def base_tree(): t = tree.Ultrametric(list(range(6))) t.merge(0, 1, 0.1) t.merge(6, 2, 0.2) t.merge(3, 4, 0.3) t.merge(8, 5, 0.4) t.merge(7, 8, 0.5) return t def test_split(base_tree): t = base_tree t.split(0, 2) assert t.node[9]['num_leaves'] == 3 t.split(0, 4) # nothing to do assert tree.num_leaves(t, 9) == 3 def test_children(base_tree): t = base_tree assert t.children(6) == [0, 1]
Use num_leaves function in tests
Use num_leaves function in tests
Python
mit
jni/viridis
from viridis import tree from six.moves import range import pytest @pytest.fixture def base_tree(): t = tree.Ultrametric(list(range(6))) t.merge(0, 1, 0.1) t.merge(6, 2, 0.2) t.merge(3, 4, 0.3) t.merge(8, 5, 0.4) t.merge(7, 8, 0.5) return t def test_split(base_tree): t = base_tree t.split(0, 2) assert t.node[9]['num_leaves'] == 3 t.split(0, 4) # nothing to do assert t.node[9]['num_leaves'] == 3 def test_children(base_tree): t = base_tree assert t.children(6) == [0, 1] Use num_leaves function in tests
from viridis import tree from six.moves import range import pytest @pytest.fixture def base_tree(): t = tree.Ultrametric(list(range(6))) t.merge(0, 1, 0.1) t.merge(6, 2, 0.2) t.merge(3, 4, 0.3) t.merge(8, 5, 0.4) t.merge(7, 8, 0.5) return t def test_split(base_tree): t = base_tree t.split(0, 2) assert t.node[9]['num_leaves'] == 3 t.split(0, 4) # nothing to do assert tree.num_leaves(t, 9) == 3 def test_children(base_tree): t = base_tree assert t.children(6) == [0, 1]
<commit_before>from viridis import tree from six.moves import range import pytest @pytest.fixture def base_tree(): t = tree.Ultrametric(list(range(6))) t.merge(0, 1, 0.1) t.merge(6, 2, 0.2) t.merge(3, 4, 0.3) t.merge(8, 5, 0.4) t.merge(7, 8, 0.5) return t def test_split(base_tree): t = base_tree t.split(0, 2) assert t.node[9]['num_leaves'] == 3 t.split(0, 4) # nothing to do assert t.node[9]['num_leaves'] == 3 def test_children(base_tree): t = base_tree assert t.children(6) == [0, 1] <commit_msg>Use num_leaves function in tests<commit_after>
from viridis import tree from six.moves import range import pytest @pytest.fixture def base_tree(): t = tree.Ultrametric(list(range(6))) t.merge(0, 1, 0.1) t.merge(6, 2, 0.2) t.merge(3, 4, 0.3) t.merge(8, 5, 0.4) t.merge(7, 8, 0.5) return t def test_split(base_tree): t = base_tree t.split(0, 2) assert t.node[9]['num_leaves'] == 3 t.split(0, 4) # nothing to do assert tree.num_leaves(t, 9) == 3 def test_children(base_tree): t = base_tree assert t.children(6) == [0, 1]
from viridis import tree from six.moves import range import pytest @pytest.fixture def base_tree(): t = tree.Ultrametric(list(range(6))) t.merge(0, 1, 0.1) t.merge(6, 2, 0.2) t.merge(3, 4, 0.3) t.merge(8, 5, 0.4) t.merge(7, 8, 0.5) return t def test_split(base_tree): t = base_tree t.split(0, 2) assert t.node[9]['num_leaves'] == 3 t.split(0, 4) # nothing to do assert t.node[9]['num_leaves'] == 3 def test_children(base_tree): t = base_tree assert t.children(6) == [0, 1] Use num_leaves function in testsfrom viridis import tree from six.moves import range import pytest @pytest.fixture def base_tree(): t = tree.Ultrametric(list(range(6))) t.merge(0, 1, 0.1) t.merge(6, 2, 0.2) t.merge(3, 4, 0.3) t.merge(8, 5, 0.4) t.merge(7, 8, 0.5) return t def test_split(base_tree): t = base_tree t.split(0, 2) assert t.node[9]['num_leaves'] == 3 t.split(0, 4) # nothing to do assert tree.num_leaves(t, 9) == 3 def test_children(base_tree): t = base_tree assert t.children(6) == [0, 1]
<commit_before>from viridis import tree from six.moves import range import pytest @pytest.fixture def base_tree(): t = tree.Ultrametric(list(range(6))) t.merge(0, 1, 0.1) t.merge(6, 2, 0.2) t.merge(3, 4, 0.3) t.merge(8, 5, 0.4) t.merge(7, 8, 0.5) return t def test_split(base_tree): t = base_tree t.split(0, 2) assert t.node[9]['num_leaves'] == 3 t.split(0, 4) # nothing to do assert t.node[9]['num_leaves'] == 3 def test_children(base_tree): t = base_tree assert t.children(6) == [0, 1] <commit_msg>Use num_leaves function in tests<commit_after>from viridis import tree from six.moves import range import pytest @pytest.fixture def base_tree(): t = tree.Ultrametric(list(range(6))) t.merge(0, 1, 0.1) t.merge(6, 2, 0.2) t.merge(3, 4, 0.3) t.merge(8, 5, 0.4) t.merge(7, 8, 0.5) return t def test_split(base_tree): t = base_tree t.split(0, 2) assert t.node[9]['num_leaves'] == 3 t.split(0, 4) # nothing to do assert tree.num_leaves(t, 9) == 3 def test_children(base_tree): t = base_tree assert t.children(6) == [0, 1]
024b862bdd4ae3bf4c3058ef32b6016b280a4cf6
tests/web/test_request.py
tests/web/test_request.py
import unittest from performance.web import Request, RequestTypeError, RequestTimeError class RequestTestCase(unittest.TestCase): def setUp(self): self.url = 'http://www.google.com' def test_constants(self): self.assertEqual('get', Request.GET) self.assertEqual('post', Request.POST) def test_init(self): request = Request(url=self.url) self.assertEqual(Request.GET, request.type) self.assertEqual(self.url, request.url) request = Request(url=self.url, type=Request.POST) self.assertEqual(Request.POST, request.type) def test_do(self): request = Request(url=self.url, type=Request.GET) request.do() self.assertTrue(hasattr(request, 'status_code')) request.type = Request.POST request.do() self.assertTrue(hasattr(request, 'status_code')) def test_invalid_type(self): type = 'foo_bar' request = Request(url=self.url, type=type) with self.assertRaises(RequestTypeError) as error: request.do() self.assertEqual('Invalid request type "%s"' % type, error.exception.__str__()) def test_response_time(self): request = Request(url=self.url, type=Request.GET) request.do() self.assertEqual(request.finished - request.started, request.get_response_time()) def test_time_error(self): request = Request(url=self.url, type=Request.GET) with self.assertRaises(RequestTimeError): request.get_response_time()
import unittest from performance.web import Request, RequestTypeError, RequestTimeError class RequestTestCase(unittest.TestCase): def setUp(self): self.host = 'http://www.google.com' def test_constants(self): self.assertEqual('get', Request.GET) self.assertEqual('post', Request.POST) def test_init(self): request = Request(url=self.host) self.assertEqual(Request.GET, request.type) self.assertEqual(self.host, request.url) request = Request(url=self.host, type=Request.POST) self.assertEqual(Request.POST, request.type) def test_do(self): request = Request(url=self.host, type=Request.GET) request.do() def test_invalid_type(self): type = 'foo_bar' request = Request(url=self.host, type=type) with self.assertRaises(RequestTypeError) as error: request.do() self.assertEqual('Invalid request type "%s"' % type, error.exception.__str__())
Remove tests for response_time, update variable names
Remove tests for response_time, update variable names
Python
mit
BakeCode/performance-testing,BakeCode/performance-testing
import unittest from performance.web import Request, RequestTypeError, RequestTimeError class RequestTestCase(unittest.TestCase): def setUp(self): self.url = 'http://www.google.com' def test_constants(self): self.assertEqual('get', Request.GET) self.assertEqual('post', Request.POST) def test_init(self): request = Request(url=self.url) self.assertEqual(Request.GET, request.type) self.assertEqual(self.url, request.url) request = Request(url=self.url, type=Request.POST) self.assertEqual(Request.POST, request.type) def test_do(self): request = Request(url=self.url, type=Request.GET) request.do() self.assertTrue(hasattr(request, 'status_code')) request.type = Request.POST request.do() self.assertTrue(hasattr(request, 'status_code')) def test_invalid_type(self): type = 'foo_bar' request = Request(url=self.url, type=type) with self.assertRaises(RequestTypeError) as error: request.do() self.assertEqual('Invalid request type "%s"' % type, error.exception.__str__()) def test_response_time(self): request = Request(url=self.url, type=Request.GET) request.do() self.assertEqual(request.finished - request.started, request.get_response_time()) def test_time_error(self): request = Request(url=self.url, type=Request.GET) with self.assertRaises(RequestTimeError): request.get_response_time() Remove tests for response_time, update variable names
import unittest from performance.web import Request, RequestTypeError, RequestTimeError class RequestTestCase(unittest.TestCase): def setUp(self): self.host = 'http://www.google.com' def test_constants(self): self.assertEqual('get', Request.GET) self.assertEqual('post', Request.POST) def test_init(self): request = Request(url=self.host) self.assertEqual(Request.GET, request.type) self.assertEqual(self.host, request.url) request = Request(url=self.host, type=Request.POST) self.assertEqual(Request.POST, request.type) def test_do(self): request = Request(url=self.host, type=Request.GET) request.do() def test_invalid_type(self): type = 'foo_bar' request = Request(url=self.host, type=type) with self.assertRaises(RequestTypeError) as error: request.do() self.assertEqual('Invalid request type "%s"' % type, error.exception.__str__())
<commit_before>import unittest from performance.web import Request, RequestTypeError, RequestTimeError class RequestTestCase(unittest.TestCase): def setUp(self): self.url = 'http://www.google.com' def test_constants(self): self.assertEqual('get', Request.GET) self.assertEqual('post', Request.POST) def test_init(self): request = Request(url=self.url) self.assertEqual(Request.GET, request.type) self.assertEqual(self.url, request.url) request = Request(url=self.url, type=Request.POST) self.assertEqual(Request.POST, request.type) def test_do(self): request = Request(url=self.url, type=Request.GET) request.do() self.assertTrue(hasattr(request, 'status_code')) request.type = Request.POST request.do() self.assertTrue(hasattr(request, 'status_code')) def test_invalid_type(self): type = 'foo_bar' request = Request(url=self.url, type=type) with self.assertRaises(RequestTypeError) as error: request.do() self.assertEqual('Invalid request type "%s"' % type, error.exception.__str__()) def test_response_time(self): request = Request(url=self.url, type=Request.GET) request.do() self.assertEqual(request.finished - request.started, request.get_response_time()) def test_time_error(self): request = Request(url=self.url, type=Request.GET) with self.assertRaises(RequestTimeError): request.get_response_time() <commit_msg>Remove tests for response_time, update variable names<commit_after>
import unittest from performance.web import Request, RequestTypeError, RequestTimeError class RequestTestCase(unittest.TestCase): def setUp(self): self.host = 'http://www.google.com' def test_constants(self): self.assertEqual('get', Request.GET) self.assertEqual('post', Request.POST) def test_init(self): request = Request(url=self.host) self.assertEqual(Request.GET, request.type) self.assertEqual(self.host, request.url) request = Request(url=self.host, type=Request.POST) self.assertEqual(Request.POST, request.type) def test_do(self): request = Request(url=self.host, type=Request.GET) request.do() def test_invalid_type(self): type = 'foo_bar' request = Request(url=self.host, type=type) with self.assertRaises(RequestTypeError) as error: request.do() self.assertEqual('Invalid request type "%s"' % type, error.exception.__str__())
import unittest from performance.web import Request, RequestTypeError, RequestTimeError class RequestTestCase(unittest.TestCase): def setUp(self): self.url = 'http://www.google.com' def test_constants(self): self.assertEqual('get', Request.GET) self.assertEqual('post', Request.POST) def test_init(self): request = Request(url=self.url) self.assertEqual(Request.GET, request.type) self.assertEqual(self.url, request.url) request = Request(url=self.url, type=Request.POST) self.assertEqual(Request.POST, request.type) def test_do(self): request = Request(url=self.url, type=Request.GET) request.do() self.assertTrue(hasattr(request, 'status_code')) request.type = Request.POST request.do() self.assertTrue(hasattr(request, 'status_code')) def test_invalid_type(self): type = 'foo_bar' request = Request(url=self.url, type=type) with self.assertRaises(RequestTypeError) as error: request.do() self.assertEqual('Invalid request type "%s"' % type, error.exception.__str__()) def test_response_time(self): request = Request(url=self.url, type=Request.GET) request.do() self.assertEqual(request.finished - request.started, request.get_response_time()) def test_time_error(self): request = Request(url=self.url, type=Request.GET) with self.assertRaises(RequestTimeError): request.get_response_time() Remove tests for response_time, update variable namesimport unittest from performance.web import Request, RequestTypeError, RequestTimeError class RequestTestCase(unittest.TestCase): def setUp(self): self.host = 'http://www.google.com' def test_constants(self): self.assertEqual('get', Request.GET) self.assertEqual('post', Request.POST) def test_init(self): request = Request(url=self.host) self.assertEqual(Request.GET, request.type) self.assertEqual(self.host, request.url) request = Request(url=self.host, type=Request.POST) self.assertEqual(Request.POST, request.type) def test_do(self): request = Request(url=self.host, type=Request.GET) request.do() def test_invalid_type(self): type = 'foo_bar' request = Request(url=self.host, type=type) with self.assertRaises(RequestTypeError) as error: request.do() self.assertEqual('Invalid request type "%s"' % type, error.exception.__str__())
<commit_before>import unittest from performance.web import Request, RequestTypeError, RequestTimeError class RequestTestCase(unittest.TestCase): def setUp(self): self.url = 'http://www.google.com' def test_constants(self): self.assertEqual('get', Request.GET) self.assertEqual('post', Request.POST) def test_init(self): request = Request(url=self.url) self.assertEqual(Request.GET, request.type) self.assertEqual(self.url, request.url) request = Request(url=self.url, type=Request.POST) self.assertEqual(Request.POST, request.type) def test_do(self): request = Request(url=self.url, type=Request.GET) request.do() self.assertTrue(hasattr(request, 'status_code')) request.type = Request.POST request.do() self.assertTrue(hasattr(request, 'status_code')) def test_invalid_type(self): type = 'foo_bar' request = Request(url=self.url, type=type) with self.assertRaises(RequestTypeError) as error: request.do() self.assertEqual('Invalid request type "%s"' % type, error.exception.__str__()) def test_response_time(self): request = Request(url=self.url, type=Request.GET) request.do() self.assertEqual(request.finished - request.started, request.get_response_time()) def test_time_error(self): request = Request(url=self.url, type=Request.GET) with self.assertRaises(RequestTimeError): request.get_response_time() <commit_msg>Remove tests for response_time, update variable names<commit_after>import unittest from performance.web import Request, RequestTypeError, RequestTimeError class RequestTestCase(unittest.TestCase): def setUp(self): self.host = 'http://www.google.com' def test_constants(self): self.assertEqual('get', Request.GET) self.assertEqual('post', Request.POST) def test_init(self): request = Request(url=self.host) self.assertEqual(Request.GET, request.type) self.assertEqual(self.host, request.url) request = Request(url=self.host, type=Request.POST) self.assertEqual(Request.POST, request.type) def test_do(self): request = Request(url=self.host, type=Request.GET) request.do() def test_invalid_type(self): type = 'foo_bar' request = Request(url=self.host, type=type) with self.assertRaises(RequestTypeError) as error: request.do() self.assertEqual('Invalid request type "%s"' % type, error.exception.__str__())
8b351036f6431bd760565b23d9e887e7d8a73840
mysql_statsd/thread_manager.py
mysql_statsd/thread_manager.py
import Queue import signal import threading import time class ThreadManager(): """Knows how to manage dem threads""" quit = False quitting = False threads = [] def __init__(self, queue=Queue.Queue(), threads=[], config={}): """Program entry point""" # Set up queue self.queue = Queue.Queue() self.config = config self.threads = threads self.register_signal_handlers() def register_signal_handlers(self): # Register signal handler signal.signal(signal.SIGINT, self.signal_handler) signal.signal(signal.SIGTERM, self.signal_handler) def run(self): # Main loop self.start_threads() while not self.quit: time.sleep(1) def start_threads(self): for t in self.threads: t.start() def signal_handler(self, signal, frame): """ Handle signals """ print("Caught CTRL+C / SIGKILL") if not self.quitting: self.quitting = True self.stop_threads() self.quit = True else: print("BE PATIENT!@#~!#!@#$~!`1111") def stop_threads(self): """Stops all threads and waits for them to quit""" print("Stopping threads") for thread in self.threads: thread.stop() while threading.activeCount() > 1: print("Waiting for %s threads" % threading.activeCount()) time.sleep(1) print("All threads stopped")
import Queue import signal import threading import time class ThreadManager(): """Knows how to manage dem threads""" quit = False quitting = False threads = [] def __init__(self, threads=[]): """Program entry point""" self.threads = threads self.register_signal_handlers() def register_signal_handlers(self): # Register signal handler signal.signal(signal.SIGINT, self.signal_handler) signal.signal(signal.SIGTERM, self.signal_handler) def run(self): # Main loop self.start_threads() while not self.quit: time.sleep(1) def start_threads(self): for t in self.threads: t.start() def signal_handler(self, signal, frame): """ Handle signals """ print("Caught CTRL+C / SIGKILL") if not self.quitting: self.quitting = True self.stop_threads() self.quit = True else: print("BE PATIENT!@#~!#!@#$~!`1111") def stop_threads(self): """Stops all threads and waits for them to quit""" print("Stopping threads") for thread in self.threads: thread.stop() while threading.activeCount() > 1: print("Waiting for %s threads" % threading.activeCount()) time.sleep(1) print("All threads stopped")
Remove config handling from threadmanager (was unused)
Remove config handling from threadmanager (was unused)
Python
bsd-3-clause
spilgames/mysql-statsd,medvedik/mysql-statsd,art-spilgames/mysql-statsd,db-art/mysql-statsd,medvedik/mysql-statsd,bnkr/mysql-statsd
import Queue import signal import threading import time class ThreadManager(): """Knows how to manage dem threads""" quit = False quitting = False threads = [] def __init__(self, queue=Queue.Queue(), threads=[], config={}): """Program entry point""" # Set up queue self.queue = Queue.Queue() self.config = config self.threads = threads self.register_signal_handlers() def register_signal_handlers(self): # Register signal handler signal.signal(signal.SIGINT, self.signal_handler) signal.signal(signal.SIGTERM, self.signal_handler) def run(self): # Main loop self.start_threads() while not self.quit: time.sleep(1) def start_threads(self): for t in self.threads: t.start() def signal_handler(self, signal, frame): """ Handle signals """ print("Caught CTRL+C / SIGKILL") if not self.quitting: self.quitting = True self.stop_threads() self.quit = True else: print("BE PATIENT!@#~!#!@#$~!`1111") def stop_threads(self): """Stops all threads and waits for them to quit""" print("Stopping threads") for thread in self.threads: thread.stop() while threading.activeCount() > 1: print("Waiting for %s threads" % threading.activeCount()) time.sleep(1) print("All threads stopped") Remove config handling from threadmanager (was unused)
import Queue import signal import threading import time class ThreadManager(): """Knows how to manage dem threads""" quit = False quitting = False threads = [] def __init__(self, threads=[]): """Program entry point""" self.threads = threads self.register_signal_handlers() def register_signal_handlers(self): # Register signal handler signal.signal(signal.SIGINT, self.signal_handler) signal.signal(signal.SIGTERM, self.signal_handler) def run(self): # Main loop self.start_threads() while not self.quit: time.sleep(1) def start_threads(self): for t in self.threads: t.start() def signal_handler(self, signal, frame): """ Handle signals """ print("Caught CTRL+C / SIGKILL") if not self.quitting: self.quitting = True self.stop_threads() self.quit = True else: print("BE PATIENT!@#~!#!@#$~!`1111") def stop_threads(self): """Stops all threads and waits for them to quit""" print("Stopping threads") for thread in self.threads: thread.stop() while threading.activeCount() > 1: print("Waiting for %s threads" % threading.activeCount()) time.sleep(1) print("All threads stopped")
<commit_before>import Queue import signal import threading import time class ThreadManager(): """Knows how to manage dem threads""" quit = False quitting = False threads = [] def __init__(self, queue=Queue.Queue(), threads=[], config={}): """Program entry point""" # Set up queue self.queue = Queue.Queue() self.config = config self.threads = threads self.register_signal_handlers() def register_signal_handlers(self): # Register signal handler signal.signal(signal.SIGINT, self.signal_handler) signal.signal(signal.SIGTERM, self.signal_handler) def run(self): # Main loop self.start_threads() while not self.quit: time.sleep(1) def start_threads(self): for t in self.threads: t.start() def signal_handler(self, signal, frame): """ Handle signals """ print("Caught CTRL+C / SIGKILL") if not self.quitting: self.quitting = True self.stop_threads() self.quit = True else: print("BE PATIENT!@#~!#!@#$~!`1111") def stop_threads(self): """Stops all threads and waits for them to quit""" print("Stopping threads") for thread in self.threads: thread.stop() while threading.activeCount() > 1: print("Waiting for %s threads" % threading.activeCount()) time.sleep(1) print("All threads stopped") <commit_msg>Remove config handling from threadmanager (was unused)<commit_after>
import Queue import signal import threading import time class ThreadManager(): """Knows how to manage dem threads""" quit = False quitting = False threads = [] def __init__(self, threads=[]): """Program entry point""" self.threads = threads self.register_signal_handlers() def register_signal_handlers(self): # Register signal handler signal.signal(signal.SIGINT, self.signal_handler) signal.signal(signal.SIGTERM, self.signal_handler) def run(self): # Main loop self.start_threads() while not self.quit: time.sleep(1) def start_threads(self): for t in self.threads: t.start() def signal_handler(self, signal, frame): """ Handle signals """ print("Caught CTRL+C / SIGKILL") if not self.quitting: self.quitting = True self.stop_threads() self.quit = True else: print("BE PATIENT!@#~!#!@#$~!`1111") def stop_threads(self): """Stops all threads and waits for them to quit""" print("Stopping threads") for thread in self.threads: thread.stop() while threading.activeCount() > 1: print("Waiting for %s threads" % threading.activeCount()) time.sleep(1) print("All threads stopped")
import Queue import signal import threading import time class ThreadManager(): """Knows how to manage dem threads""" quit = False quitting = False threads = [] def __init__(self, queue=Queue.Queue(), threads=[], config={}): """Program entry point""" # Set up queue self.queue = Queue.Queue() self.config = config self.threads = threads self.register_signal_handlers() def register_signal_handlers(self): # Register signal handler signal.signal(signal.SIGINT, self.signal_handler) signal.signal(signal.SIGTERM, self.signal_handler) def run(self): # Main loop self.start_threads() while not self.quit: time.sleep(1) def start_threads(self): for t in self.threads: t.start() def signal_handler(self, signal, frame): """ Handle signals """ print("Caught CTRL+C / SIGKILL") if not self.quitting: self.quitting = True self.stop_threads() self.quit = True else: print("BE PATIENT!@#~!#!@#$~!`1111") def stop_threads(self): """Stops all threads and waits for them to quit""" print("Stopping threads") for thread in self.threads: thread.stop() while threading.activeCount() > 1: print("Waiting for %s threads" % threading.activeCount()) time.sleep(1) print("All threads stopped") Remove config handling from threadmanager (was unused)import Queue import signal import threading import time class ThreadManager(): """Knows how to manage dem threads""" quit = False quitting = False threads = [] def __init__(self, threads=[]): """Program entry point""" self.threads = threads self.register_signal_handlers() def register_signal_handlers(self): # Register signal handler signal.signal(signal.SIGINT, self.signal_handler) signal.signal(signal.SIGTERM, self.signal_handler) def run(self): # Main loop self.start_threads() while not self.quit: time.sleep(1) def start_threads(self): for t in self.threads: t.start() def signal_handler(self, signal, frame): """ Handle signals """ print("Caught CTRL+C / SIGKILL") if not self.quitting: self.quitting = True self.stop_threads() self.quit = True else: print("BE PATIENT!@#~!#!@#$~!`1111") def stop_threads(self): """Stops all threads and waits for them to quit""" print("Stopping threads") for thread in self.threads: thread.stop() while threading.activeCount() > 1: print("Waiting for %s threads" % threading.activeCount()) time.sleep(1) print("All threads stopped")
<commit_before>import Queue import signal import threading import time class ThreadManager(): """Knows how to manage dem threads""" quit = False quitting = False threads = [] def __init__(self, queue=Queue.Queue(), threads=[], config={}): """Program entry point""" # Set up queue self.queue = Queue.Queue() self.config = config self.threads = threads self.register_signal_handlers() def register_signal_handlers(self): # Register signal handler signal.signal(signal.SIGINT, self.signal_handler) signal.signal(signal.SIGTERM, self.signal_handler) def run(self): # Main loop self.start_threads() while not self.quit: time.sleep(1) def start_threads(self): for t in self.threads: t.start() def signal_handler(self, signal, frame): """ Handle signals """ print("Caught CTRL+C / SIGKILL") if not self.quitting: self.quitting = True self.stop_threads() self.quit = True else: print("BE PATIENT!@#~!#!@#$~!`1111") def stop_threads(self): """Stops all threads and waits for them to quit""" print("Stopping threads") for thread in self.threads: thread.stop() while threading.activeCount() > 1: print("Waiting for %s threads" % threading.activeCount()) time.sleep(1) print("All threads stopped") <commit_msg>Remove config handling from threadmanager (was unused)<commit_after>import Queue import signal import threading import time class ThreadManager(): """Knows how to manage dem threads""" quit = False quitting = False threads = [] def __init__(self, threads=[]): """Program entry point""" self.threads = threads self.register_signal_handlers() def register_signal_handlers(self): # Register signal handler signal.signal(signal.SIGINT, self.signal_handler) signal.signal(signal.SIGTERM, self.signal_handler) def run(self): # Main loop self.start_threads() while not self.quit: time.sleep(1) def start_threads(self): for t in self.threads: t.start() def signal_handler(self, signal, frame): """ Handle signals """ print("Caught CTRL+C / SIGKILL") if not self.quitting: self.quitting = True self.stop_threads() self.quit = True else: print("BE PATIENT!@#~!#!@#$~!`1111") def stop_threads(self): """Stops all threads and waits for them to quit""" print("Stopping threads") for thread in self.threads: thread.stop() while threading.activeCount() > 1: print("Waiting for %s threads" % threading.activeCount()) time.sleep(1) print("All threads stopped")
d37555f71d61aa2f40b6d959833d7dd08bc269d4
tmserver/jtui/__init__.py
tmserver/jtui/__init__.py
import logging from flask import Blueprint, current_app, jsonify from tmserver.error import register_http_error_classes jtui = Blueprint('jtui', __name__) logger = logging.getLogger(__name__) def register_error(cls): """Decorator to register exception classes as errors that can be serialized to JSON""" @jtui.errorhandler(cls) def handle_invalid_usage(error): current_app.logger.error(error) response = jsonify(error=error) response.status_code = error.status_code return response return cls register_http_error_classes(jtui.errorhandler) import tmserver.jtui.api
import logging from flask import Blueprint jtui = Blueprint('jtui', __name__) logger = logging.getLogger(__name__) import tmserver.jtui.api
Remove jtui blueprint specific error handler
Remove jtui blueprint specific error handler
Python
agpl-3.0
TissueMAPS/TmServer
import logging from flask import Blueprint, current_app, jsonify from tmserver.error import register_http_error_classes jtui = Blueprint('jtui', __name__) logger = logging.getLogger(__name__) def register_error(cls): """Decorator to register exception classes as errors that can be serialized to JSON""" @jtui.errorhandler(cls) def handle_invalid_usage(error): current_app.logger.error(error) response = jsonify(error=error) response.status_code = error.status_code return response return cls register_http_error_classes(jtui.errorhandler) import tmserver.jtui.api Remove jtui blueprint specific error handler
import logging from flask import Blueprint jtui = Blueprint('jtui', __name__) logger = logging.getLogger(__name__) import tmserver.jtui.api
<commit_before>import logging from flask import Blueprint, current_app, jsonify from tmserver.error import register_http_error_classes jtui = Blueprint('jtui', __name__) logger = logging.getLogger(__name__) def register_error(cls): """Decorator to register exception classes as errors that can be serialized to JSON""" @jtui.errorhandler(cls) def handle_invalid_usage(error): current_app.logger.error(error) response = jsonify(error=error) response.status_code = error.status_code return response return cls register_http_error_classes(jtui.errorhandler) import tmserver.jtui.api <commit_msg>Remove jtui blueprint specific error handler<commit_after>
import logging from flask import Blueprint jtui = Blueprint('jtui', __name__) logger = logging.getLogger(__name__) import tmserver.jtui.api
import logging from flask import Blueprint, current_app, jsonify from tmserver.error import register_http_error_classes jtui = Blueprint('jtui', __name__) logger = logging.getLogger(__name__) def register_error(cls): """Decorator to register exception classes as errors that can be serialized to JSON""" @jtui.errorhandler(cls) def handle_invalid_usage(error): current_app.logger.error(error) response = jsonify(error=error) response.status_code = error.status_code return response return cls register_http_error_classes(jtui.errorhandler) import tmserver.jtui.api Remove jtui blueprint specific error handlerimport logging from flask import Blueprint jtui = Blueprint('jtui', __name__) logger = logging.getLogger(__name__) import tmserver.jtui.api
<commit_before>import logging from flask import Blueprint, current_app, jsonify from tmserver.error import register_http_error_classes jtui = Blueprint('jtui', __name__) logger = logging.getLogger(__name__) def register_error(cls): """Decorator to register exception classes as errors that can be serialized to JSON""" @jtui.errorhandler(cls) def handle_invalid_usage(error): current_app.logger.error(error) response = jsonify(error=error) response.status_code = error.status_code return response return cls register_http_error_classes(jtui.errorhandler) import tmserver.jtui.api <commit_msg>Remove jtui blueprint specific error handler<commit_after>import logging from flask import Blueprint jtui = Blueprint('jtui', __name__) logger = logging.getLogger(__name__) import tmserver.jtui.api
ee8dc8b97cf70c06504f0572d9ee7e18e273b470
scheduler.py
scheduler.py
# -*- coding: utf-8 -*- # # dp for Tornado # YoungYong Park (youngyongpark@gmail.com) # 2015.03.09 # import os import sys import inspect import importlib path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) sys.path.append(path) if __name__ == '__main__': if len(sys.argv) <= 1: exit(0) module = sys.argv[1] timeout = sys.argv[2] if len(sys.argv) >= 3 else None try: module = importlib.import_module(module) except ImportError as e: print 'The specified scheduler module is invalid. (%s)' % module exit(1) try: runner = getattr(module, 'Scheduler')(timeout) except AttributeError: print 'The specified scheduler module is not implemented. (%s)' % module exit(1) try: runner.run() except Exception as e: import traceback traceback.print_exc()
# -*- coding: utf-8 -*- # # dp for Tornado # YoungYong Park (youngyongpark@gmail.com) # 2015.03.09 # import os import sys import inspect import importlib path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) sys.path.append(path) if __name__ == '__main__': if len(sys.argv) <= 1: exit(0) module = sys.argv[1] timeout = sys.argv[2] if len(sys.argv) >= 3 else None try: module = importlib.import_module(module) except ImportError as e: print('The specified scheduler module is invalid. (%s)' % module) exit(1) try: runner = getattr(module, 'Scheduler')(timeout) except AttributeError: print('The specified scheduler module is not implemented. (%s)' % module) exit(1) try: runner.run() except Exception as e: import traceback traceback.print_exc()
Support python3 and PEP8 rules.
Support python3 and PEP8 rules.
Python
mit
why2pac/dp-tornado,why2pac/dp-tornado,why2pac/dp-tornado,why2pac/dp-tornado
# -*- coding: utf-8 -*- # # dp for Tornado # YoungYong Park (youngyongpark@gmail.com) # 2015.03.09 # import os import sys import inspect import importlib path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) sys.path.append(path) if __name__ == '__main__': if len(sys.argv) <= 1: exit(0) module = sys.argv[1] timeout = sys.argv[2] if len(sys.argv) >= 3 else None try: module = importlib.import_module(module) except ImportError as e: print 'The specified scheduler module is invalid. (%s)' % module exit(1) try: runner = getattr(module, 'Scheduler')(timeout) except AttributeError: print 'The specified scheduler module is not implemented. (%s)' % module exit(1) try: runner.run() except Exception as e: import traceback traceback.print_exc()Support python3 and PEP8 rules.
# -*- coding: utf-8 -*- # # dp for Tornado # YoungYong Park (youngyongpark@gmail.com) # 2015.03.09 # import os import sys import inspect import importlib path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) sys.path.append(path) if __name__ == '__main__': if len(sys.argv) <= 1: exit(0) module = sys.argv[1] timeout = sys.argv[2] if len(sys.argv) >= 3 else None try: module = importlib.import_module(module) except ImportError as e: print('The specified scheduler module is invalid. (%s)' % module) exit(1) try: runner = getattr(module, 'Scheduler')(timeout) except AttributeError: print('The specified scheduler module is not implemented. (%s)' % module) exit(1) try: runner.run() except Exception as e: import traceback traceback.print_exc()
<commit_before># -*- coding: utf-8 -*- # # dp for Tornado # YoungYong Park (youngyongpark@gmail.com) # 2015.03.09 # import os import sys import inspect import importlib path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) sys.path.append(path) if __name__ == '__main__': if len(sys.argv) <= 1: exit(0) module = sys.argv[1] timeout = sys.argv[2] if len(sys.argv) >= 3 else None try: module = importlib.import_module(module) except ImportError as e: print 'The specified scheduler module is invalid. (%s)' % module exit(1) try: runner = getattr(module, 'Scheduler')(timeout) except AttributeError: print 'The specified scheduler module is not implemented. (%s)' % module exit(1) try: runner.run() except Exception as e: import traceback traceback.print_exc()<commit_msg>Support python3 and PEP8 rules.<commit_after>
# -*- coding: utf-8 -*- # # dp for Tornado # YoungYong Park (youngyongpark@gmail.com) # 2015.03.09 # import os import sys import inspect import importlib path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) sys.path.append(path) if __name__ == '__main__': if len(sys.argv) <= 1: exit(0) module = sys.argv[1] timeout = sys.argv[2] if len(sys.argv) >= 3 else None try: module = importlib.import_module(module) except ImportError as e: print('The specified scheduler module is invalid. (%s)' % module) exit(1) try: runner = getattr(module, 'Scheduler')(timeout) except AttributeError: print('The specified scheduler module is not implemented. (%s)' % module) exit(1) try: runner.run() except Exception as e: import traceback traceback.print_exc()
# -*- coding: utf-8 -*- # # dp for Tornado # YoungYong Park (youngyongpark@gmail.com) # 2015.03.09 # import os import sys import inspect import importlib path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) sys.path.append(path) if __name__ == '__main__': if len(sys.argv) <= 1: exit(0) module = sys.argv[1] timeout = sys.argv[2] if len(sys.argv) >= 3 else None try: module = importlib.import_module(module) except ImportError as e: print 'The specified scheduler module is invalid. (%s)' % module exit(1) try: runner = getattr(module, 'Scheduler')(timeout) except AttributeError: print 'The specified scheduler module is not implemented. (%s)' % module exit(1) try: runner.run() except Exception as e: import traceback traceback.print_exc()Support python3 and PEP8 rules.# -*- coding: utf-8 -*- # # dp for Tornado # YoungYong Park (youngyongpark@gmail.com) # 2015.03.09 # import os import sys import inspect import importlib path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) sys.path.append(path) if __name__ == '__main__': if len(sys.argv) <= 1: exit(0) module = sys.argv[1] timeout = sys.argv[2] if len(sys.argv) >= 3 else None try: module = importlib.import_module(module) except ImportError as e: print('The specified scheduler module is invalid. (%s)' % module) exit(1) try: runner = getattr(module, 'Scheduler')(timeout) except AttributeError: print('The specified scheduler module is not implemented. (%s)' % module) exit(1) try: runner.run() except Exception as e: import traceback traceback.print_exc()
<commit_before># -*- coding: utf-8 -*- # # dp for Tornado # YoungYong Park (youngyongpark@gmail.com) # 2015.03.09 # import os import sys import inspect import importlib path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) sys.path.append(path) if __name__ == '__main__': if len(sys.argv) <= 1: exit(0) module = sys.argv[1] timeout = sys.argv[2] if len(sys.argv) >= 3 else None try: module = importlib.import_module(module) except ImportError as e: print 'The specified scheduler module is invalid. (%s)' % module exit(1) try: runner = getattr(module, 'Scheduler')(timeout) except AttributeError: print 'The specified scheduler module is not implemented. (%s)' % module exit(1) try: runner.run() except Exception as e: import traceback traceback.print_exc()<commit_msg>Support python3 and PEP8 rules.<commit_after># -*- coding: utf-8 -*- # # dp for Tornado # YoungYong Park (youngyongpark@gmail.com) # 2015.03.09 # import os import sys import inspect import importlib path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) sys.path.append(path) if __name__ == '__main__': if len(sys.argv) <= 1: exit(0) module = sys.argv[1] timeout = sys.argv[2] if len(sys.argv) >= 3 else None try: module = importlib.import_module(module) except ImportError as e: print('The specified scheduler module is invalid. (%s)' % module) exit(1) try: runner = getattr(module, 'Scheduler')(timeout) except AttributeError: print('The specified scheduler module is not implemented. (%s)' % module) exit(1) try: runner.run() except Exception as e: import traceback traceback.print_exc()
d17dc6285d2eab6662230313ed4ff8fa63ab2994
demetsiiify/blueprints/__init__.py
demetsiiify/blueprints/__init__.py
from .api import api from .auth import auth from .iiif import iiif from .view import view __all__ = [api, auth, iiif, view]
from .api import api from .iiif import iiif from .view import view __all__ = [api, iiif, view]
Fix accidental import from other branch
Fix accidental import from other branch
Python
agpl-3.0
jbaiter/demetsiiify,jbaiter/demetsiiify,jbaiter/demetsiiify
from .api import api from .auth import auth from .iiif import iiif from .view import view __all__ = [api, auth, iiif, view] Fix accidental import from other branch
from .api import api from .iiif import iiif from .view import view __all__ = [api, iiif, view]
<commit_before>from .api import api from .auth import auth from .iiif import iiif from .view import view __all__ = [api, auth, iiif, view] <commit_msg>Fix accidental import from other branch<commit_after>
from .api import api from .iiif import iiif from .view import view __all__ = [api, iiif, view]
from .api import api from .auth import auth from .iiif import iiif from .view import view __all__ = [api, auth, iiif, view] Fix accidental import from other branchfrom .api import api from .iiif import iiif from .view import view __all__ = [api, iiif, view]
<commit_before>from .api import api from .auth import auth from .iiif import iiif from .view import view __all__ = [api, auth, iiif, view] <commit_msg>Fix accidental import from other branch<commit_after>from .api import api from .iiif import iiif from .view import view __all__ = [api, iiif, view]
b167b1d9ff4278d142c1eeffc5ef443b11459cd9
lamson-server/config/settings.py
lamson-server/config/settings.py
# This file contains python variables that configure Lamson for email processing. import logging import pymongo hostnames = ['kasm.clayadavis.net', 'openkasm.com', 'remixmail.com'] # You may add additional parameters such as `username' and `password' if your # relay server requires authentication, `starttls' (boolean) or `ssl' (boolean) # for secure connections. relay_config = {'host': 'localhost', 'port': 8825} #receiver_config = {'host': 'localhost', 'port': 8823} #receiver_config = {'host': 'localhost', 'port': 25} receiver_config = {'host': '0.0.0.0', 'port': 25} handlers = ['app.handlers.kasm'] #router_defaults = {'host': '.+'} hosts = ['localhost', '127.0.0.1'] + hostnames router_defaults = {'host': '|'.join(['(%s)' % x for x in hosts])} template_config = {'dir': 'app', 'module': 'templates'} # the config/boot.py will turn these values into variables set in settings db_client = pymongo.MongoClient() db = db_client.kasm
# This file contains python variables that configure Lamson for email processing. import logging import pymongo hostnames = ['kasm.clayadavis.net', 'openkasm.com', #'remixmail.com', ] # You may add additional parameters such as `username' and `password' if your # relay server requires authentication, `starttls' (boolean) or `ssl' (boolean) # for secure connections. relay_config = {'host': 'localhost', 'port': 8825} #receiver_config = {'host': 'localhost', 'port': 8823} #receiver_config = {'host': 'localhost', 'port': 25} receiver_config = {'host': '0.0.0.0', 'port': 25} handlers = ['app.handlers.kasm'] #router_defaults = {'host': '.+'} hosts = ['localhost', '127.0.0.1'] + hostnames router_defaults = {'host': '|'.join(['(%s)' % x for x in hosts])} template_config = {'dir': 'app', 'module': 'templates'} # the config/boot.py will turn these values into variables set in settings db_client = pymongo.MongoClient() db = db_client.kasm
Remove remixmail from hosts for now
Remove remixmail from hosts for now
Python
mit
clayadavis/OpenKasm,clayadavis/OpenKasm
# This file contains python variables that configure Lamson for email processing. import logging import pymongo hostnames = ['kasm.clayadavis.net', 'openkasm.com', 'remixmail.com'] # You may add additional parameters such as `username' and `password' if your # relay server requires authentication, `starttls' (boolean) or `ssl' (boolean) # for secure connections. relay_config = {'host': 'localhost', 'port': 8825} #receiver_config = {'host': 'localhost', 'port': 8823} #receiver_config = {'host': 'localhost', 'port': 25} receiver_config = {'host': '0.0.0.0', 'port': 25} handlers = ['app.handlers.kasm'] #router_defaults = {'host': '.+'} hosts = ['localhost', '127.0.0.1'] + hostnames router_defaults = {'host': '|'.join(['(%s)' % x for x in hosts])} template_config = {'dir': 'app', 'module': 'templates'} # the config/boot.py will turn these values into variables set in settings db_client = pymongo.MongoClient() db = db_client.kasm Remove remixmail from hosts for now
# This file contains python variables that configure Lamson for email processing. import logging import pymongo hostnames = ['kasm.clayadavis.net', 'openkasm.com', #'remixmail.com', ] # You may add additional parameters such as `username' and `password' if your # relay server requires authentication, `starttls' (boolean) or `ssl' (boolean) # for secure connections. relay_config = {'host': 'localhost', 'port': 8825} #receiver_config = {'host': 'localhost', 'port': 8823} #receiver_config = {'host': 'localhost', 'port': 25} receiver_config = {'host': '0.0.0.0', 'port': 25} handlers = ['app.handlers.kasm'] #router_defaults = {'host': '.+'} hosts = ['localhost', '127.0.0.1'] + hostnames router_defaults = {'host': '|'.join(['(%s)' % x for x in hosts])} template_config = {'dir': 'app', 'module': 'templates'} # the config/boot.py will turn these values into variables set in settings db_client = pymongo.MongoClient() db = db_client.kasm
<commit_before># This file contains python variables that configure Lamson for email processing. import logging import pymongo hostnames = ['kasm.clayadavis.net', 'openkasm.com', 'remixmail.com'] # You may add additional parameters such as `username' and `password' if your # relay server requires authentication, `starttls' (boolean) or `ssl' (boolean) # for secure connections. relay_config = {'host': 'localhost', 'port': 8825} #receiver_config = {'host': 'localhost', 'port': 8823} #receiver_config = {'host': 'localhost', 'port': 25} receiver_config = {'host': '0.0.0.0', 'port': 25} handlers = ['app.handlers.kasm'] #router_defaults = {'host': '.+'} hosts = ['localhost', '127.0.0.1'] + hostnames router_defaults = {'host': '|'.join(['(%s)' % x for x in hosts])} template_config = {'dir': 'app', 'module': 'templates'} # the config/boot.py will turn these values into variables set in settings db_client = pymongo.MongoClient() db = db_client.kasm <commit_msg>Remove remixmail from hosts for now<commit_after>
# This file contains python variables that configure Lamson for email processing. import logging import pymongo hostnames = ['kasm.clayadavis.net', 'openkasm.com', #'remixmail.com', ] # You may add additional parameters such as `username' and `password' if your # relay server requires authentication, `starttls' (boolean) or `ssl' (boolean) # for secure connections. relay_config = {'host': 'localhost', 'port': 8825} #receiver_config = {'host': 'localhost', 'port': 8823} #receiver_config = {'host': 'localhost', 'port': 25} receiver_config = {'host': '0.0.0.0', 'port': 25} handlers = ['app.handlers.kasm'] #router_defaults = {'host': '.+'} hosts = ['localhost', '127.0.0.1'] + hostnames router_defaults = {'host': '|'.join(['(%s)' % x for x in hosts])} template_config = {'dir': 'app', 'module': 'templates'} # the config/boot.py will turn these values into variables set in settings db_client = pymongo.MongoClient() db = db_client.kasm
# This file contains python variables that configure Lamson for email processing. import logging import pymongo hostnames = ['kasm.clayadavis.net', 'openkasm.com', 'remixmail.com'] # You may add additional parameters such as `username' and `password' if your # relay server requires authentication, `starttls' (boolean) or `ssl' (boolean) # for secure connections. relay_config = {'host': 'localhost', 'port': 8825} #receiver_config = {'host': 'localhost', 'port': 8823} #receiver_config = {'host': 'localhost', 'port': 25} receiver_config = {'host': '0.0.0.0', 'port': 25} handlers = ['app.handlers.kasm'] #router_defaults = {'host': '.+'} hosts = ['localhost', '127.0.0.1'] + hostnames router_defaults = {'host': '|'.join(['(%s)' % x for x in hosts])} template_config = {'dir': 'app', 'module': 'templates'} # the config/boot.py will turn these values into variables set in settings db_client = pymongo.MongoClient() db = db_client.kasm Remove remixmail from hosts for now# This file contains python variables that configure Lamson for email processing. import logging import pymongo hostnames = ['kasm.clayadavis.net', 'openkasm.com', #'remixmail.com', ] # You may add additional parameters such as `username' and `password' if your # relay server requires authentication, `starttls' (boolean) or `ssl' (boolean) # for secure connections. relay_config = {'host': 'localhost', 'port': 8825} #receiver_config = {'host': 'localhost', 'port': 8823} #receiver_config = {'host': 'localhost', 'port': 25} receiver_config = {'host': '0.0.0.0', 'port': 25} handlers = ['app.handlers.kasm'] #router_defaults = {'host': '.+'} hosts = ['localhost', '127.0.0.1'] + hostnames router_defaults = {'host': '|'.join(['(%s)' % x for x in hosts])} template_config = {'dir': 'app', 'module': 'templates'} # the config/boot.py will turn these values into variables set in settings db_client = pymongo.MongoClient() db = db_client.kasm
<commit_before># This file contains python variables that configure Lamson for email processing. import logging import pymongo hostnames = ['kasm.clayadavis.net', 'openkasm.com', 'remixmail.com'] # You may add additional parameters such as `username' and `password' if your # relay server requires authentication, `starttls' (boolean) or `ssl' (boolean) # for secure connections. relay_config = {'host': 'localhost', 'port': 8825} #receiver_config = {'host': 'localhost', 'port': 8823} #receiver_config = {'host': 'localhost', 'port': 25} receiver_config = {'host': '0.0.0.0', 'port': 25} handlers = ['app.handlers.kasm'] #router_defaults = {'host': '.+'} hosts = ['localhost', '127.0.0.1'] + hostnames router_defaults = {'host': '|'.join(['(%s)' % x for x in hosts])} template_config = {'dir': 'app', 'module': 'templates'} # the config/boot.py will turn these values into variables set in settings db_client = pymongo.MongoClient() db = db_client.kasm <commit_msg>Remove remixmail from hosts for now<commit_after># This file contains python variables that configure Lamson for email processing. import logging import pymongo hostnames = ['kasm.clayadavis.net', 'openkasm.com', #'remixmail.com', ] # You may add additional parameters such as `username' and `password' if your # relay server requires authentication, `starttls' (boolean) or `ssl' (boolean) # for secure connections. relay_config = {'host': 'localhost', 'port': 8825} #receiver_config = {'host': 'localhost', 'port': 8823} #receiver_config = {'host': 'localhost', 'port': 25} receiver_config = {'host': '0.0.0.0', 'port': 25} handlers = ['app.handlers.kasm'] #router_defaults = {'host': '.+'} hosts = ['localhost', '127.0.0.1'] + hostnames router_defaults = {'host': '|'.join(['(%s)' % x for x in hosts])} template_config = {'dir': 'app', 'module': 'templates'} # the config/boot.py will turn these values into variables set in settings db_client = pymongo.MongoClient() db = db_client.kasm
d866dc0f6a33925e2a8cd910a8b6226f8b7ed50d
pytablereader/__init__.py
pytablereader/__init__.py
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import from tabledata import ( DataError, EmptyDataError, InvalidDataError, InvalidHeaderNameError, InvalidTableNameError, ) from .__version__ import __author__, __copyright__, __email__, __license__, __version__ from ._constant import PatternMatch from ._logger import logger, set_log_level, set_logger from .csv.core import CsvTableFileLoader, CsvTableTextLoader from .error import ( APIError, HTTPError, InvalidFilePathError, LoaderNotFoundError, OpenError, PathError, ProxyError, PypandocImportError, UrlError, ValidationError, ) from .html.core import HtmlTableFileLoader, HtmlTableTextLoader from .json.core import JsonTableDictLoader, JsonTableFileLoader, JsonTableTextLoader from .jsonlines.core import JsonLinesTableFileLoader, JsonLinesTableTextLoader from .loadermanager import TableFileLoader, TableUrlLoader from .ltsv.core import LtsvTableFileLoader, LtsvTableTextLoader from .markdown.core import MarkdownTableFileLoader, MarkdownTableTextLoader from .mediawiki.core import MediaWikiTableFileLoader, MediaWikiTableTextLoader from .spreadsheet.excelloader import ExcelTableFileLoader from .spreadsheet.gsloader import GoogleSheetsTableLoader from .sqlite.core import SqliteFileLoader from .tsv.core import TsvTableFileLoader, TsvTableTextLoader
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import from tabledata import DataError, InvalidHeaderNameError, InvalidTableNameError from .__version__ import __author__, __copyright__, __email__, __license__, __version__ from ._constant import PatternMatch from ._logger import logger, set_log_level, set_logger from .csv.core import CsvTableFileLoader, CsvTableTextLoader from .error import ( APIError, HTTPError, InvalidFilePathError, LoaderNotFoundError, OpenError, PathError, ProxyError, PypandocImportError, UrlError, ValidationError, ) from .html.core import HtmlTableFileLoader, HtmlTableTextLoader from .json.core import JsonTableDictLoader, JsonTableFileLoader, JsonTableTextLoader from .jsonlines.core import JsonLinesTableFileLoader, JsonLinesTableTextLoader from .loadermanager import TableFileLoader, TableUrlLoader from .ltsv.core import LtsvTableFileLoader, LtsvTableTextLoader from .markdown.core import MarkdownTableFileLoader, MarkdownTableTextLoader from .mediawiki.core import MediaWikiTableFileLoader, MediaWikiTableTextLoader from .spreadsheet.excelloader import ExcelTableFileLoader from .spreadsheet.gsloader import GoogleSheetsTableLoader from .sqlite.core import SqliteFileLoader from .tsv.core import TsvTableFileLoader, TsvTableTextLoader
Remove an import that deprecated and unused
Remove an import that deprecated and unused
Python
mit
thombashi/pytablereader,thombashi/pytablereader,thombashi/pytablereader
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import from tabledata import ( DataError, EmptyDataError, InvalidDataError, InvalidHeaderNameError, InvalidTableNameError, ) from .__version__ import __author__, __copyright__, __email__, __license__, __version__ from ._constant import PatternMatch from ._logger import logger, set_log_level, set_logger from .csv.core import CsvTableFileLoader, CsvTableTextLoader from .error import ( APIError, HTTPError, InvalidFilePathError, LoaderNotFoundError, OpenError, PathError, ProxyError, PypandocImportError, UrlError, ValidationError, ) from .html.core import HtmlTableFileLoader, HtmlTableTextLoader from .json.core import JsonTableDictLoader, JsonTableFileLoader, JsonTableTextLoader from .jsonlines.core import JsonLinesTableFileLoader, JsonLinesTableTextLoader from .loadermanager import TableFileLoader, TableUrlLoader from .ltsv.core import LtsvTableFileLoader, LtsvTableTextLoader from .markdown.core import MarkdownTableFileLoader, MarkdownTableTextLoader from .mediawiki.core import MediaWikiTableFileLoader, MediaWikiTableTextLoader from .spreadsheet.excelloader import ExcelTableFileLoader from .spreadsheet.gsloader import GoogleSheetsTableLoader from .sqlite.core import SqliteFileLoader from .tsv.core import TsvTableFileLoader, TsvTableTextLoader Remove an import that deprecated and unused
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import from tabledata import DataError, InvalidHeaderNameError, InvalidTableNameError from .__version__ import __author__, __copyright__, __email__, __license__, __version__ from ._constant import PatternMatch from ._logger import logger, set_log_level, set_logger from .csv.core import CsvTableFileLoader, CsvTableTextLoader from .error import ( APIError, HTTPError, InvalidFilePathError, LoaderNotFoundError, OpenError, PathError, ProxyError, PypandocImportError, UrlError, ValidationError, ) from .html.core import HtmlTableFileLoader, HtmlTableTextLoader from .json.core import JsonTableDictLoader, JsonTableFileLoader, JsonTableTextLoader from .jsonlines.core import JsonLinesTableFileLoader, JsonLinesTableTextLoader from .loadermanager import TableFileLoader, TableUrlLoader from .ltsv.core import LtsvTableFileLoader, LtsvTableTextLoader from .markdown.core import MarkdownTableFileLoader, MarkdownTableTextLoader from .mediawiki.core import MediaWikiTableFileLoader, MediaWikiTableTextLoader from .spreadsheet.excelloader import ExcelTableFileLoader from .spreadsheet.gsloader import GoogleSheetsTableLoader from .sqlite.core import SqliteFileLoader from .tsv.core import TsvTableFileLoader, TsvTableTextLoader
<commit_before># encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import from tabledata import ( DataError, EmptyDataError, InvalidDataError, InvalidHeaderNameError, InvalidTableNameError, ) from .__version__ import __author__, __copyright__, __email__, __license__, __version__ from ._constant import PatternMatch from ._logger import logger, set_log_level, set_logger from .csv.core import CsvTableFileLoader, CsvTableTextLoader from .error import ( APIError, HTTPError, InvalidFilePathError, LoaderNotFoundError, OpenError, PathError, ProxyError, PypandocImportError, UrlError, ValidationError, ) from .html.core import HtmlTableFileLoader, HtmlTableTextLoader from .json.core import JsonTableDictLoader, JsonTableFileLoader, JsonTableTextLoader from .jsonlines.core import JsonLinesTableFileLoader, JsonLinesTableTextLoader from .loadermanager import TableFileLoader, TableUrlLoader from .ltsv.core import LtsvTableFileLoader, LtsvTableTextLoader from .markdown.core import MarkdownTableFileLoader, MarkdownTableTextLoader from .mediawiki.core import MediaWikiTableFileLoader, MediaWikiTableTextLoader from .spreadsheet.excelloader import ExcelTableFileLoader from .spreadsheet.gsloader import GoogleSheetsTableLoader from .sqlite.core import SqliteFileLoader from .tsv.core import TsvTableFileLoader, TsvTableTextLoader <commit_msg>Remove an import that deprecated and unused<commit_after>
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import from tabledata import DataError, InvalidHeaderNameError, InvalidTableNameError from .__version__ import __author__, __copyright__, __email__, __license__, __version__ from ._constant import PatternMatch from ._logger import logger, set_log_level, set_logger from .csv.core import CsvTableFileLoader, CsvTableTextLoader from .error import ( APIError, HTTPError, InvalidFilePathError, LoaderNotFoundError, OpenError, PathError, ProxyError, PypandocImportError, UrlError, ValidationError, ) from .html.core import HtmlTableFileLoader, HtmlTableTextLoader from .json.core import JsonTableDictLoader, JsonTableFileLoader, JsonTableTextLoader from .jsonlines.core import JsonLinesTableFileLoader, JsonLinesTableTextLoader from .loadermanager import TableFileLoader, TableUrlLoader from .ltsv.core import LtsvTableFileLoader, LtsvTableTextLoader from .markdown.core import MarkdownTableFileLoader, MarkdownTableTextLoader from .mediawiki.core import MediaWikiTableFileLoader, MediaWikiTableTextLoader from .spreadsheet.excelloader import ExcelTableFileLoader from .spreadsheet.gsloader import GoogleSheetsTableLoader from .sqlite.core import SqliteFileLoader from .tsv.core import TsvTableFileLoader, TsvTableTextLoader
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import from tabledata import ( DataError, EmptyDataError, InvalidDataError, InvalidHeaderNameError, InvalidTableNameError, ) from .__version__ import __author__, __copyright__, __email__, __license__, __version__ from ._constant import PatternMatch from ._logger import logger, set_log_level, set_logger from .csv.core import CsvTableFileLoader, CsvTableTextLoader from .error import ( APIError, HTTPError, InvalidFilePathError, LoaderNotFoundError, OpenError, PathError, ProxyError, PypandocImportError, UrlError, ValidationError, ) from .html.core import HtmlTableFileLoader, HtmlTableTextLoader from .json.core import JsonTableDictLoader, JsonTableFileLoader, JsonTableTextLoader from .jsonlines.core import JsonLinesTableFileLoader, JsonLinesTableTextLoader from .loadermanager import TableFileLoader, TableUrlLoader from .ltsv.core import LtsvTableFileLoader, LtsvTableTextLoader from .markdown.core import MarkdownTableFileLoader, MarkdownTableTextLoader from .mediawiki.core import MediaWikiTableFileLoader, MediaWikiTableTextLoader from .spreadsheet.excelloader import ExcelTableFileLoader from .spreadsheet.gsloader import GoogleSheetsTableLoader from .sqlite.core import SqliteFileLoader from .tsv.core import TsvTableFileLoader, TsvTableTextLoader Remove an import that deprecated and unused# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import from tabledata import DataError, InvalidHeaderNameError, InvalidTableNameError from .__version__ import __author__, __copyright__, __email__, __license__, __version__ from ._constant import PatternMatch from ._logger import logger, set_log_level, set_logger from .csv.core import CsvTableFileLoader, CsvTableTextLoader from .error import ( APIError, HTTPError, InvalidFilePathError, LoaderNotFoundError, OpenError, PathError, ProxyError, PypandocImportError, UrlError, ValidationError, ) from .html.core import HtmlTableFileLoader, HtmlTableTextLoader from .json.core import JsonTableDictLoader, JsonTableFileLoader, JsonTableTextLoader from .jsonlines.core import JsonLinesTableFileLoader, JsonLinesTableTextLoader from .loadermanager import TableFileLoader, TableUrlLoader from .ltsv.core import LtsvTableFileLoader, LtsvTableTextLoader from .markdown.core import MarkdownTableFileLoader, MarkdownTableTextLoader from .mediawiki.core import MediaWikiTableFileLoader, MediaWikiTableTextLoader from .spreadsheet.excelloader import ExcelTableFileLoader from .spreadsheet.gsloader import GoogleSheetsTableLoader from .sqlite.core import SqliteFileLoader from .tsv.core import TsvTableFileLoader, TsvTableTextLoader
<commit_before># encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import from tabledata import ( DataError, EmptyDataError, InvalidDataError, InvalidHeaderNameError, InvalidTableNameError, ) from .__version__ import __author__, __copyright__, __email__, __license__, __version__ from ._constant import PatternMatch from ._logger import logger, set_log_level, set_logger from .csv.core import CsvTableFileLoader, CsvTableTextLoader from .error import ( APIError, HTTPError, InvalidFilePathError, LoaderNotFoundError, OpenError, PathError, ProxyError, PypandocImportError, UrlError, ValidationError, ) from .html.core import HtmlTableFileLoader, HtmlTableTextLoader from .json.core import JsonTableDictLoader, JsonTableFileLoader, JsonTableTextLoader from .jsonlines.core import JsonLinesTableFileLoader, JsonLinesTableTextLoader from .loadermanager import TableFileLoader, TableUrlLoader from .ltsv.core import LtsvTableFileLoader, LtsvTableTextLoader from .markdown.core import MarkdownTableFileLoader, MarkdownTableTextLoader from .mediawiki.core import MediaWikiTableFileLoader, MediaWikiTableTextLoader from .spreadsheet.excelloader import ExcelTableFileLoader from .spreadsheet.gsloader import GoogleSheetsTableLoader from .sqlite.core import SqliteFileLoader from .tsv.core import TsvTableFileLoader, TsvTableTextLoader <commit_msg>Remove an import that deprecated and unused<commit_after># encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import from tabledata import DataError, InvalidHeaderNameError, InvalidTableNameError from .__version__ import __author__, __copyright__, __email__, __license__, __version__ from ._constant import PatternMatch from ._logger import logger, set_log_level, set_logger from .csv.core import CsvTableFileLoader, CsvTableTextLoader from .error import ( APIError, HTTPError, InvalidFilePathError, LoaderNotFoundError, OpenError, PathError, ProxyError, PypandocImportError, UrlError, ValidationError, ) from .html.core import HtmlTableFileLoader, HtmlTableTextLoader from .json.core import JsonTableDictLoader, JsonTableFileLoader, JsonTableTextLoader from .jsonlines.core import JsonLinesTableFileLoader, JsonLinesTableTextLoader from .loadermanager import TableFileLoader, TableUrlLoader from .ltsv.core import LtsvTableFileLoader, LtsvTableTextLoader from .markdown.core import MarkdownTableFileLoader, MarkdownTableTextLoader from .mediawiki.core import MediaWikiTableFileLoader, MediaWikiTableTextLoader from .spreadsheet.excelloader import ExcelTableFileLoader from .spreadsheet.gsloader import GoogleSheetsTableLoader from .sqlite.core import SqliteFileLoader from .tsv.core import TsvTableFileLoader, TsvTableTextLoader
5eb7558873a62739edcc5c633561c45e9781384e
migrations/versions/1d91199c02c5_.py
migrations/versions/1d91199c02c5_.py
"""empty message Revision ID: 1d91199c02c5 Revises: Create Date: 2017-05-01 23:02:26.034481 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '1d91199c02c5' down_revision = None branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('user', sa.Column('banned', sa.Boolean(), nullable=True)) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'banned') # ### end Alembic commands ###
"""Add user banned column Revision ID: 1d91199c02c5 Revises: Create Date: 2017-05-01 23:02:26.034481 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '1d91199c02c5' down_revision = None branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('user', sa.Column('banned', sa.Boolean(), nullable=True)) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'banned') # ### end Alembic commands ###
Add message to user ban migration
Add message to user ban migration
Python
agpl-3.0
CMU-Senate/tcc-room-reservation,CMU-Senate/tcc-room-reservation,CMU-Senate/tcc-room-reservation
"""empty message Revision ID: 1d91199c02c5 Revises: Create Date: 2017-05-01 23:02:26.034481 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '1d91199c02c5' down_revision = None branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('user', sa.Column('banned', sa.Boolean(), nullable=True)) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'banned') # ### end Alembic commands ### Add message to user ban migration
"""Add user banned column Revision ID: 1d91199c02c5 Revises: Create Date: 2017-05-01 23:02:26.034481 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '1d91199c02c5' down_revision = None branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('user', sa.Column('banned', sa.Boolean(), nullable=True)) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'banned') # ### end Alembic commands ###
<commit_before>"""empty message Revision ID: 1d91199c02c5 Revises: Create Date: 2017-05-01 23:02:26.034481 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '1d91199c02c5' down_revision = None branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('user', sa.Column('banned', sa.Boolean(), nullable=True)) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'banned') # ### end Alembic commands ### <commit_msg>Add message to user ban migration<commit_after>
"""Add user banned column Revision ID: 1d91199c02c5 Revises: Create Date: 2017-05-01 23:02:26.034481 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '1d91199c02c5' down_revision = None branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('user', sa.Column('banned', sa.Boolean(), nullable=True)) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'banned') # ### end Alembic commands ###
"""empty message Revision ID: 1d91199c02c5 Revises: Create Date: 2017-05-01 23:02:26.034481 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '1d91199c02c5' down_revision = None branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('user', sa.Column('banned', sa.Boolean(), nullable=True)) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'banned') # ### end Alembic commands ### Add message to user ban migration"""Add user banned column Revision ID: 1d91199c02c5 Revises: Create Date: 2017-05-01 23:02:26.034481 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '1d91199c02c5' down_revision = None branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('user', sa.Column('banned', sa.Boolean(), nullable=True)) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'banned') # ### end Alembic commands ###
<commit_before>"""empty message Revision ID: 1d91199c02c5 Revises: Create Date: 2017-05-01 23:02:26.034481 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '1d91199c02c5' down_revision = None branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('user', sa.Column('banned', sa.Boolean(), nullable=True)) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'banned') # ### end Alembic commands ### <commit_msg>Add message to user ban migration<commit_after>"""Add user banned column Revision ID: 1d91199c02c5 Revises: Create Date: 2017-05-01 23:02:26.034481 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '1d91199c02c5' down_revision = None branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('user', sa.Column('banned', sa.Boolean(), nullable=True)) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'banned') # ### end Alembic commands ###
374feca1d714a09b23f152308bf3a1878c79a3f9
mysite/profile/management/commands/profile_daily_tasks.py
mysite/profile/management/commands/profile_daily_tasks.py
# This file is part of OpenHatch. # Copyright (C) 2010 OpenHatch, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.core.management.base import BaseCommand import logging import mysite.profile.tasks import mysite.profile.management.commands.send_weekly_emails class Command(BaseCommand): help = "Run this once daily for the OpenHatch profile app." def handle(self, *args, **options): # Garbage collect forwarders root_logger = logging.getLogger('') root_logger.setLevel(logging.WARN) mysite.profile.tasks.GarbageCollectForwarders().run() # Try to send the emails. The command will only actually send emails at # most once per week. command = mysite.profile.management.commands.send_weekly_emails.Command() command.run()
# This file is part of OpenHatch. # Copyright (C) 2010 OpenHatch, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.core.management.base import BaseCommand import logging import mysite.profile.tasks import mysite.profile.management.commands.send_weekly_emails class Command(BaseCommand): help = "Run this once daily for the OpenHatch profile app." def handle(self, *args, **options): # Garbage collect forwarders root_logger = logging.getLogger('') root_logger.setLevel(logging.WARN) mysite.profile.tasks.GarbageCollectForwarders().run() # Try to send the emails. The command will only actually send emails at # most once per week. command = mysite.profile.management.commands.send_weekly_emails.Command() command.handle()
Call the right method on the send_weekly_emails Command.
Call the right method on the send_weekly_emails Command.
Python
agpl-3.0
heeraj123/oh-mainline,nirmeshk/oh-mainline,mzdaniel/oh-mainline,mzdaniel/oh-mainline,ojengwa/oh-mainline,waseem18/oh-mainline,nirmeshk/oh-mainline,SnappleCap/oh-mainline,ehashman/oh-mainline,nirmeshk/oh-mainline,openhatch/oh-mainline,jledbetter/openhatch,SnappleCap/oh-mainline,mzdaniel/oh-mainline,ojengwa/oh-mainline,heeraj123/oh-mainline,mzdaniel/oh-mainline,openhatch/oh-mainline,sudheesh001/oh-mainline,Changaco/oh-mainline,eeshangarg/oh-mainline,mzdaniel/oh-mainline,vipul-sharma20/oh-mainline,onceuponatimeforever/oh-mainline,waseem18/oh-mainline,eeshangarg/oh-mainline,eeshangarg/oh-mainline,campbe13/openhatch,vipul-sharma20/oh-mainline,jledbetter/openhatch,heeraj123/oh-mainline,campbe13/openhatch,eeshangarg/oh-mainline,Changaco/oh-mainline,openhatch/oh-mainline,willingc/oh-mainline,moijes12/oh-mainline,waseem18/oh-mainline,willingc/oh-mainline,ehashman/oh-mainline,onceuponatimeforever/oh-mainline,jledbetter/openhatch,moijes12/oh-mainline,campbe13/openhatch,jledbetter/openhatch,openhatch/oh-mainline,ojengwa/oh-mainline,vipul-sharma20/oh-mainline,campbe13/openhatch,moijes12/oh-mainline,ehashman/oh-mainline,openhatch/oh-mainline,nirmeshk/oh-mainline,onceuponatimeforever/oh-mainline,sudheesh001/oh-mainline,SnappleCap/oh-mainline,waseem18/oh-mainline,moijes12/oh-mainline,Changaco/oh-mainline,nirmeshk/oh-mainline,sudheesh001/oh-mainline,Changaco/oh-mainline,onceuponatimeforever/oh-mainline,SnappleCap/oh-mainline,moijes12/oh-mainline,campbe13/openhatch,SnappleCap/oh-mainline,mzdaniel/oh-mainline,mzdaniel/oh-mainline,ehashman/oh-mainline,heeraj123/oh-mainline,Changaco/oh-mainline,eeshangarg/oh-mainline,ojengwa/oh-mainline,willingc/oh-mainline,ojengwa/oh-mainline,vipul-sharma20/oh-mainline,sudheesh001/oh-mainline,onceuponatimeforever/oh-mainline,willingc/oh-mainline,jledbetter/openhatch,willingc/oh-mainline,sudheesh001/oh-mainline,ehashman/oh-mainline,vipul-sharma20/oh-mainline,waseem18/oh-mainline,heeraj123/oh-mainline
# This file is part of OpenHatch. # Copyright (C) 2010 OpenHatch, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.core.management.base import BaseCommand import logging import mysite.profile.tasks import mysite.profile.management.commands.send_weekly_emails class Command(BaseCommand): help = "Run this once daily for the OpenHatch profile app." def handle(self, *args, **options): # Garbage collect forwarders root_logger = logging.getLogger('') root_logger.setLevel(logging.WARN) mysite.profile.tasks.GarbageCollectForwarders().run() # Try to send the emails. The command will only actually send emails at # most once per week. command = mysite.profile.management.commands.send_weekly_emails.Command() command.run() Call the right method on the send_weekly_emails Command.
# This file is part of OpenHatch. # Copyright (C) 2010 OpenHatch, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.core.management.base import BaseCommand import logging import mysite.profile.tasks import mysite.profile.management.commands.send_weekly_emails class Command(BaseCommand): help = "Run this once daily for the OpenHatch profile app." def handle(self, *args, **options): # Garbage collect forwarders root_logger = logging.getLogger('') root_logger.setLevel(logging.WARN) mysite.profile.tasks.GarbageCollectForwarders().run() # Try to send the emails. The command will only actually send emails at # most once per week. command = mysite.profile.management.commands.send_weekly_emails.Command() command.handle()
<commit_before># This file is part of OpenHatch. # Copyright (C) 2010 OpenHatch, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.core.management.base import BaseCommand import logging import mysite.profile.tasks import mysite.profile.management.commands.send_weekly_emails class Command(BaseCommand): help = "Run this once daily for the OpenHatch profile app." def handle(self, *args, **options): # Garbage collect forwarders root_logger = logging.getLogger('') root_logger.setLevel(logging.WARN) mysite.profile.tasks.GarbageCollectForwarders().run() # Try to send the emails. The command will only actually send emails at # most once per week. command = mysite.profile.management.commands.send_weekly_emails.Command() command.run() <commit_msg>Call the right method on the send_weekly_emails Command.<commit_after>
# This file is part of OpenHatch. # Copyright (C) 2010 OpenHatch, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.core.management.base import BaseCommand import logging import mysite.profile.tasks import mysite.profile.management.commands.send_weekly_emails class Command(BaseCommand): help = "Run this once daily for the OpenHatch profile app." def handle(self, *args, **options): # Garbage collect forwarders root_logger = logging.getLogger('') root_logger.setLevel(logging.WARN) mysite.profile.tasks.GarbageCollectForwarders().run() # Try to send the emails. The command will only actually send emails at # most once per week. command = mysite.profile.management.commands.send_weekly_emails.Command() command.handle()
# This file is part of OpenHatch. # Copyright (C) 2010 OpenHatch, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.core.management.base import BaseCommand import logging import mysite.profile.tasks import mysite.profile.management.commands.send_weekly_emails class Command(BaseCommand): help = "Run this once daily for the OpenHatch profile app." def handle(self, *args, **options): # Garbage collect forwarders root_logger = logging.getLogger('') root_logger.setLevel(logging.WARN) mysite.profile.tasks.GarbageCollectForwarders().run() # Try to send the emails. The command will only actually send emails at # most once per week. command = mysite.profile.management.commands.send_weekly_emails.Command() command.run() Call the right method on the send_weekly_emails Command.# This file is part of OpenHatch. # Copyright (C) 2010 OpenHatch, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.core.management.base import BaseCommand import logging import mysite.profile.tasks import mysite.profile.management.commands.send_weekly_emails class Command(BaseCommand): help = "Run this once daily for the OpenHatch profile app." def handle(self, *args, **options): # Garbage collect forwarders root_logger = logging.getLogger('') root_logger.setLevel(logging.WARN) mysite.profile.tasks.GarbageCollectForwarders().run() # Try to send the emails. The command will only actually send emails at # most once per week. command = mysite.profile.management.commands.send_weekly_emails.Command() command.handle()
<commit_before># This file is part of OpenHatch. # Copyright (C) 2010 OpenHatch, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.core.management.base import BaseCommand import logging import mysite.profile.tasks import mysite.profile.management.commands.send_weekly_emails class Command(BaseCommand): help = "Run this once daily for the OpenHatch profile app." def handle(self, *args, **options): # Garbage collect forwarders root_logger = logging.getLogger('') root_logger.setLevel(logging.WARN) mysite.profile.tasks.GarbageCollectForwarders().run() # Try to send the emails. The command will only actually send emails at # most once per week. command = mysite.profile.management.commands.send_weekly_emails.Command() command.run() <commit_msg>Call the right method on the send_weekly_emails Command.<commit_after># This file is part of OpenHatch. # Copyright (C) 2010 OpenHatch, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.core.management.base import BaseCommand import logging import mysite.profile.tasks import mysite.profile.management.commands.send_weekly_emails class Command(BaseCommand): help = "Run this once daily for the OpenHatch profile app." def handle(self, *args, **options): # Garbage collect forwarders root_logger = logging.getLogger('') root_logger.setLevel(logging.WARN) mysite.profile.tasks.GarbageCollectForwarders().run() # Try to send the emails. The command will only actually send emails at # most once per week. command = mysite.profile.management.commands.send_weekly_emails.Command() command.handle()
699a2d8d97d8c526f9fb269245d5fb593d47d3ca
rasa/nlu/tokenizers/__init__.py
rasa/nlu/tokenizers/__init__.py
class Tokenizer: pass class Token: def __init__(self, text, offset, data=None): self.offset = offset self.text = text self.end = offset + len(text) self.data = data if data else {} def set(self, prop, info): self.data[prop] = info def get(self, prop, default=None): return self.data.get(prop, default)
import functools class Tokenizer: pass @functools.total_ordering class Token: def __init__(self, text, offset, data=None): self.offset = offset self.text = text self.end = offset + len(text) self.data = data if data else {} def set(self, prop, info): self.data[prop] = info def get(self, prop, default=None): return self.data.get(prop, default) def __eq__(self, other): if not isinstance(other, Token): return NotImplemented return self.text == other.text def __lt__(self, other): if not isinstance(other, Token): return NotImplemented return self.text < other.text
Fix to make sanitize_examples() be able to sort tokens
Fix to make sanitize_examples() be able to sort tokens
Python
apache-2.0
RasaHQ/rasa_nlu,RasaHQ/rasa_nlu,RasaHQ/rasa_nlu
class Tokenizer: pass class Token: def __init__(self, text, offset, data=None): self.offset = offset self.text = text self.end = offset + len(text) self.data = data if data else {} def set(self, prop, info): self.data[prop] = info def get(self, prop, default=None): return self.data.get(prop, default) Fix to make sanitize_examples() be able to sort tokens
import functools class Tokenizer: pass @functools.total_ordering class Token: def __init__(self, text, offset, data=None): self.offset = offset self.text = text self.end = offset + len(text) self.data = data if data else {} def set(self, prop, info): self.data[prop] = info def get(self, prop, default=None): return self.data.get(prop, default) def __eq__(self, other): if not isinstance(other, Token): return NotImplemented return self.text == other.text def __lt__(self, other): if not isinstance(other, Token): return NotImplemented return self.text < other.text
<commit_before>class Tokenizer: pass class Token: def __init__(self, text, offset, data=None): self.offset = offset self.text = text self.end = offset + len(text) self.data = data if data else {} def set(self, prop, info): self.data[prop] = info def get(self, prop, default=None): return self.data.get(prop, default) <commit_msg>Fix to make sanitize_examples() be able to sort tokens<commit_after>
import functools class Tokenizer: pass @functools.total_ordering class Token: def __init__(self, text, offset, data=None): self.offset = offset self.text = text self.end = offset + len(text) self.data = data if data else {} def set(self, prop, info): self.data[prop] = info def get(self, prop, default=None): return self.data.get(prop, default) def __eq__(self, other): if not isinstance(other, Token): return NotImplemented return self.text == other.text def __lt__(self, other): if not isinstance(other, Token): return NotImplemented return self.text < other.text
class Tokenizer: pass class Token: def __init__(self, text, offset, data=None): self.offset = offset self.text = text self.end = offset + len(text) self.data = data if data else {} def set(self, prop, info): self.data[prop] = info def get(self, prop, default=None): return self.data.get(prop, default) Fix to make sanitize_examples() be able to sort tokensimport functools class Tokenizer: pass @functools.total_ordering class Token: def __init__(self, text, offset, data=None): self.offset = offset self.text = text self.end = offset + len(text) self.data = data if data else {} def set(self, prop, info): self.data[prop] = info def get(self, prop, default=None): return self.data.get(prop, default) def __eq__(self, other): if not isinstance(other, Token): return NotImplemented return self.text == other.text def __lt__(self, other): if not isinstance(other, Token): return NotImplemented return self.text < other.text
<commit_before>class Tokenizer: pass class Token: def __init__(self, text, offset, data=None): self.offset = offset self.text = text self.end = offset + len(text) self.data = data if data else {} def set(self, prop, info): self.data[prop] = info def get(self, prop, default=None): return self.data.get(prop, default) <commit_msg>Fix to make sanitize_examples() be able to sort tokens<commit_after>import functools class Tokenizer: pass @functools.total_ordering class Token: def __init__(self, text, offset, data=None): self.offset = offset self.text = text self.end = offset + len(text) self.data = data if data else {} def set(self, prop, info): self.data[prop] = info def get(self, prop, default=None): return self.data.get(prop, default) def __eq__(self, other): if not isinstance(other, Token): return NotImplemented return self.text == other.text def __lt__(self, other): if not isinstance(other, Token): return NotImplemented return self.text < other.text
75402ed564a0e37732bceb2b44261630e69bc250
src/smsfly/util.py
src/smsfly/util.py
from functools import wraps from bs4 import BeautifulSoup as bs from errors import XMLError, PhoneError, StartTimeError, EndTimeError, LifetimeError, SpeedError, AlphanameError, TextError, InsufficientFundsError ERROR_MAP = { 'XMLERROR': XMLError, 'ERRPHONES': PhoneError, 'ERRSTARTTIME': StartTimeError, 'ERRENDTIME': EndTimeError, 'ERRLIFETIME': LifetimeError, 'ERRSPEED': SpeedError, 'ERRALFANAME': AlphanameError, 'ERRTEXT': TextError, 'INSUFFICIENTFUNDS': InsufficientFundsError } def parse_xml_response(f): @wraps(f) def wrapper(*args, **kwargs): res_xml = bs(f(*args, **kwargs), features='lxml-xml') state_code = res_xml.message.state['code'] try: raise ERROR_MAP[state_code] except KeyError: return res_xml return wrapper
from functools import wraps from bs4 import BeautifulSoup as bs from .errors import ( XMLError, PhoneError, StartTimeError, EndTimeError, LifetimeError, SpeedError, AlphanameError, TextError, InsufficientFundsError ) ERROR_MAP = { 'XMLERROR': XMLError, 'ERRPHONES': PhoneError, 'ERRSTARTTIME': StartTimeError, 'ERRENDTIME': EndTimeError, 'ERRLIFETIME': LifetimeError, 'ERRSPEED': SpeedError, 'ERRALFANAME': AlphanameError, 'ERRTEXT': TextError, 'INSUFFICIENTFUNDS': InsufficientFundsError } def parse_xml_response(f): @wraps(f) def wrapper(*args, **kwargs): res_xml = bs(f(*args, **kwargs), features='lxml-xml') state_code = res_xml.message.state['code'] try: raise ERROR_MAP[state_code] except KeyError: return res_xml return wrapper
Fix import of error classes
Fix import of error classes
Python
mit
wk-tech/python-smsfly
from functools import wraps from bs4 import BeautifulSoup as bs from errors import XMLError, PhoneError, StartTimeError, EndTimeError, LifetimeError, SpeedError, AlphanameError, TextError, InsufficientFundsError ERROR_MAP = { 'XMLERROR': XMLError, 'ERRPHONES': PhoneError, 'ERRSTARTTIME': StartTimeError, 'ERRENDTIME': EndTimeError, 'ERRLIFETIME': LifetimeError, 'ERRSPEED': SpeedError, 'ERRALFANAME': AlphanameError, 'ERRTEXT': TextError, 'INSUFFICIENTFUNDS': InsufficientFundsError } def parse_xml_response(f): @wraps(f) def wrapper(*args, **kwargs): res_xml = bs(f(*args, **kwargs), features='lxml-xml') state_code = res_xml.message.state['code'] try: raise ERROR_MAP[state_code] except KeyError: return res_xml return wrapper Fix import of error classes
from functools import wraps from bs4 import BeautifulSoup as bs from .errors import ( XMLError, PhoneError, StartTimeError, EndTimeError, LifetimeError, SpeedError, AlphanameError, TextError, InsufficientFundsError ) ERROR_MAP = { 'XMLERROR': XMLError, 'ERRPHONES': PhoneError, 'ERRSTARTTIME': StartTimeError, 'ERRENDTIME': EndTimeError, 'ERRLIFETIME': LifetimeError, 'ERRSPEED': SpeedError, 'ERRALFANAME': AlphanameError, 'ERRTEXT': TextError, 'INSUFFICIENTFUNDS': InsufficientFundsError } def parse_xml_response(f): @wraps(f) def wrapper(*args, **kwargs): res_xml = bs(f(*args, **kwargs), features='lxml-xml') state_code = res_xml.message.state['code'] try: raise ERROR_MAP[state_code] except KeyError: return res_xml return wrapper
<commit_before>from functools import wraps from bs4 import BeautifulSoup as bs from errors import XMLError, PhoneError, StartTimeError, EndTimeError, LifetimeError, SpeedError, AlphanameError, TextError, InsufficientFundsError ERROR_MAP = { 'XMLERROR': XMLError, 'ERRPHONES': PhoneError, 'ERRSTARTTIME': StartTimeError, 'ERRENDTIME': EndTimeError, 'ERRLIFETIME': LifetimeError, 'ERRSPEED': SpeedError, 'ERRALFANAME': AlphanameError, 'ERRTEXT': TextError, 'INSUFFICIENTFUNDS': InsufficientFundsError } def parse_xml_response(f): @wraps(f) def wrapper(*args, **kwargs): res_xml = bs(f(*args, **kwargs), features='lxml-xml') state_code = res_xml.message.state['code'] try: raise ERROR_MAP[state_code] except KeyError: return res_xml return wrapper <commit_msg>Fix import of error classes<commit_after>
from functools import wraps from bs4 import BeautifulSoup as bs from .errors import ( XMLError, PhoneError, StartTimeError, EndTimeError, LifetimeError, SpeedError, AlphanameError, TextError, InsufficientFundsError ) ERROR_MAP = { 'XMLERROR': XMLError, 'ERRPHONES': PhoneError, 'ERRSTARTTIME': StartTimeError, 'ERRENDTIME': EndTimeError, 'ERRLIFETIME': LifetimeError, 'ERRSPEED': SpeedError, 'ERRALFANAME': AlphanameError, 'ERRTEXT': TextError, 'INSUFFICIENTFUNDS': InsufficientFundsError } def parse_xml_response(f): @wraps(f) def wrapper(*args, **kwargs): res_xml = bs(f(*args, **kwargs), features='lxml-xml') state_code = res_xml.message.state['code'] try: raise ERROR_MAP[state_code] except KeyError: return res_xml return wrapper
from functools import wraps from bs4 import BeautifulSoup as bs from errors import XMLError, PhoneError, StartTimeError, EndTimeError, LifetimeError, SpeedError, AlphanameError, TextError, InsufficientFundsError ERROR_MAP = { 'XMLERROR': XMLError, 'ERRPHONES': PhoneError, 'ERRSTARTTIME': StartTimeError, 'ERRENDTIME': EndTimeError, 'ERRLIFETIME': LifetimeError, 'ERRSPEED': SpeedError, 'ERRALFANAME': AlphanameError, 'ERRTEXT': TextError, 'INSUFFICIENTFUNDS': InsufficientFundsError } def parse_xml_response(f): @wraps(f) def wrapper(*args, **kwargs): res_xml = bs(f(*args, **kwargs), features='lxml-xml') state_code = res_xml.message.state['code'] try: raise ERROR_MAP[state_code] except KeyError: return res_xml return wrapper Fix import of error classesfrom functools import wraps from bs4 import BeautifulSoup as bs from .errors import ( XMLError, PhoneError, StartTimeError, EndTimeError, LifetimeError, SpeedError, AlphanameError, TextError, InsufficientFundsError ) ERROR_MAP = { 'XMLERROR': XMLError, 'ERRPHONES': PhoneError, 'ERRSTARTTIME': StartTimeError, 'ERRENDTIME': EndTimeError, 'ERRLIFETIME': LifetimeError, 'ERRSPEED': SpeedError, 'ERRALFANAME': AlphanameError, 'ERRTEXT': TextError, 'INSUFFICIENTFUNDS': InsufficientFundsError } def parse_xml_response(f): @wraps(f) def wrapper(*args, **kwargs): res_xml = bs(f(*args, **kwargs), features='lxml-xml') state_code = res_xml.message.state['code'] try: raise ERROR_MAP[state_code] except KeyError: return res_xml return wrapper
<commit_before>from functools import wraps from bs4 import BeautifulSoup as bs from errors import XMLError, PhoneError, StartTimeError, EndTimeError, LifetimeError, SpeedError, AlphanameError, TextError, InsufficientFundsError ERROR_MAP = { 'XMLERROR': XMLError, 'ERRPHONES': PhoneError, 'ERRSTARTTIME': StartTimeError, 'ERRENDTIME': EndTimeError, 'ERRLIFETIME': LifetimeError, 'ERRSPEED': SpeedError, 'ERRALFANAME': AlphanameError, 'ERRTEXT': TextError, 'INSUFFICIENTFUNDS': InsufficientFundsError } def parse_xml_response(f): @wraps(f) def wrapper(*args, **kwargs): res_xml = bs(f(*args, **kwargs), features='lxml-xml') state_code = res_xml.message.state['code'] try: raise ERROR_MAP[state_code] except KeyError: return res_xml return wrapper <commit_msg>Fix import of error classes<commit_after>from functools import wraps from bs4 import BeautifulSoup as bs from .errors import ( XMLError, PhoneError, StartTimeError, EndTimeError, LifetimeError, SpeedError, AlphanameError, TextError, InsufficientFundsError ) ERROR_MAP = { 'XMLERROR': XMLError, 'ERRPHONES': PhoneError, 'ERRSTARTTIME': StartTimeError, 'ERRENDTIME': EndTimeError, 'ERRLIFETIME': LifetimeError, 'ERRSPEED': SpeedError, 'ERRALFANAME': AlphanameError, 'ERRTEXT': TextError, 'INSUFFICIENTFUNDS': InsufficientFundsError } def parse_xml_response(f): @wraps(f) def wrapper(*args, **kwargs): res_xml = bs(f(*args, **kwargs), features='lxml-xml') state_code = res_xml.message.state['code'] try: raise ERROR_MAP[state_code] except KeyError: return res_xml return wrapper
4eade25270273d4a779034fef3818f15066ee647
src/streaming-programs/car-average-speeds.py
src/streaming-programs/car-average-speeds.py
#!/usr/bin/python import sys import json # Count average speeds for links def main(locationdata_dictionary_file): locationdata = {} with open(locationdata_dictionary_file, "r") as dictionary_file: locationdata = json.load(dictionary_file) for input_line in sys.stdin: data = json.loads(input_line) for recognition in data['recognitions']: try: link_data = (item for item in locationdata['links'] if item['id'] == recognition['id']).next() average_speed = (link_data['dist'] / recognition['tt']) * 3.6 print "CountAverage: " + str(recognition['id']) + "\t" + str(int(average_speed)) except: pass if __name__ == "__main__": main(sys.argv[1])
#!/usr/bin/python import sys import json # Count average speeds for links def main(locationdata_dictionary_file): locationdata = {} with open(locationdata_dictionary_file, "r") as dictionary_file: locationdata = json.load(dictionary_file) for input_line in sys.stdin: data = json.loads(input_line) for recognition in data['recognitions']: try: link_data = (item for item in locationdata['links'] if item['id'] == recognition['id']).next() average_speed = (link_data['dist'] / recognition['tt']) * 3.6 print "LongValueSum:" + str(recognition['id']) + "_speedsum\t" + str(int(average_speed)) print "LongValueSum:" + str(recognition['id']) + "_speedcount\t1" except: pass if __name__ == "__main__": main(sys.argv[1])
Test average speed calculation by tracking sum and count separately
Test average speed calculation by tracking sum and count separately
Python
mit
gofore/aws-emr,gofore/aws-emr,gofore/aws-emr,gofore/aws-emr
#!/usr/bin/python import sys import json # Count average speeds for links def main(locationdata_dictionary_file): locationdata = {} with open(locationdata_dictionary_file, "r") as dictionary_file: locationdata = json.load(dictionary_file) for input_line in sys.stdin: data = json.loads(input_line) for recognition in data['recognitions']: try: link_data = (item for item in locationdata['links'] if item['id'] == recognition['id']).next() average_speed = (link_data['dist'] / recognition['tt']) * 3.6 print "CountAverage: " + str(recognition['id']) + "\t" + str(int(average_speed)) except: pass if __name__ == "__main__": main(sys.argv[1]) Test average speed calculation by tracking sum and count separately
#!/usr/bin/python import sys import json # Count average speeds for links def main(locationdata_dictionary_file): locationdata = {} with open(locationdata_dictionary_file, "r") as dictionary_file: locationdata = json.load(dictionary_file) for input_line in sys.stdin: data = json.loads(input_line) for recognition in data['recognitions']: try: link_data = (item for item in locationdata['links'] if item['id'] == recognition['id']).next() average_speed = (link_data['dist'] / recognition['tt']) * 3.6 print "LongValueSum:" + str(recognition['id']) + "_speedsum\t" + str(int(average_speed)) print "LongValueSum:" + str(recognition['id']) + "_speedcount\t1" except: pass if __name__ == "__main__": main(sys.argv[1])
<commit_before>#!/usr/bin/python import sys import json # Count average speeds for links def main(locationdata_dictionary_file): locationdata = {} with open(locationdata_dictionary_file, "r") as dictionary_file: locationdata = json.load(dictionary_file) for input_line in sys.stdin: data = json.loads(input_line) for recognition in data['recognitions']: try: link_data = (item for item in locationdata['links'] if item['id'] == recognition['id']).next() average_speed = (link_data['dist'] / recognition['tt']) * 3.6 print "CountAverage: " + str(recognition['id']) + "\t" + str(int(average_speed)) except: pass if __name__ == "__main__": main(sys.argv[1]) <commit_msg>Test average speed calculation by tracking sum and count separately<commit_after>
#!/usr/bin/python import sys import json # Count average speeds for links def main(locationdata_dictionary_file): locationdata = {} with open(locationdata_dictionary_file, "r") as dictionary_file: locationdata = json.load(dictionary_file) for input_line in sys.stdin: data = json.loads(input_line) for recognition in data['recognitions']: try: link_data = (item for item in locationdata['links'] if item['id'] == recognition['id']).next() average_speed = (link_data['dist'] / recognition['tt']) * 3.6 print "LongValueSum:" + str(recognition['id']) + "_speedsum\t" + str(int(average_speed)) print "LongValueSum:" + str(recognition['id']) + "_speedcount\t1" except: pass if __name__ == "__main__": main(sys.argv[1])
#!/usr/bin/python import sys import json # Count average speeds for links def main(locationdata_dictionary_file): locationdata = {} with open(locationdata_dictionary_file, "r") as dictionary_file: locationdata = json.load(dictionary_file) for input_line in sys.stdin: data = json.loads(input_line) for recognition in data['recognitions']: try: link_data = (item for item in locationdata['links'] if item['id'] == recognition['id']).next() average_speed = (link_data['dist'] / recognition['tt']) * 3.6 print "CountAverage: " + str(recognition['id']) + "\t" + str(int(average_speed)) except: pass if __name__ == "__main__": main(sys.argv[1]) Test average speed calculation by tracking sum and count separately#!/usr/bin/python import sys import json # Count average speeds for links def main(locationdata_dictionary_file): locationdata = {} with open(locationdata_dictionary_file, "r") as dictionary_file: locationdata = json.load(dictionary_file) for input_line in sys.stdin: data = json.loads(input_line) for recognition in data['recognitions']: try: link_data = (item for item in locationdata['links'] if item['id'] == recognition['id']).next() average_speed = (link_data['dist'] / recognition['tt']) * 3.6 print "LongValueSum:" + str(recognition['id']) + "_speedsum\t" + str(int(average_speed)) print "LongValueSum:" + str(recognition['id']) + "_speedcount\t1" except: pass if __name__ == "__main__": main(sys.argv[1])
<commit_before>#!/usr/bin/python import sys import json # Count average speeds for links def main(locationdata_dictionary_file): locationdata = {} with open(locationdata_dictionary_file, "r") as dictionary_file: locationdata = json.load(dictionary_file) for input_line in sys.stdin: data = json.loads(input_line) for recognition in data['recognitions']: try: link_data = (item for item in locationdata['links'] if item['id'] == recognition['id']).next() average_speed = (link_data['dist'] / recognition['tt']) * 3.6 print "CountAverage: " + str(recognition['id']) + "\t" + str(int(average_speed)) except: pass if __name__ == "__main__": main(sys.argv[1]) <commit_msg>Test average speed calculation by tracking sum and count separately<commit_after>#!/usr/bin/python import sys import json # Count average speeds for links def main(locationdata_dictionary_file): locationdata = {} with open(locationdata_dictionary_file, "r") as dictionary_file: locationdata = json.load(dictionary_file) for input_line in sys.stdin: data = json.loads(input_line) for recognition in data['recognitions']: try: link_data = (item for item in locationdata['links'] if item['id'] == recognition['id']).next() average_speed = (link_data['dist'] / recognition['tt']) * 3.6 print "LongValueSum:" + str(recognition['id']) + "_speedsum\t" + str(int(average_speed)) print "LongValueSum:" + str(recognition['id']) + "_speedcount\t1" except: pass if __name__ == "__main__": main(sys.argv[1])
fa3605047619495be3ddc3de8a3c3579d57deca4
djedi/tests/test_admin.py
djedi/tests/test_admin.py
from django.core.urlresolvers import reverse from djedi.tests.base import ClientTest class PanelTest(ClientTest): def test_admin_panel(self): url = reverse('index') response = self.client.get(url) self.assertIn(u'Djedi Test', response.content) self.assertIn(u'window.DJEDI_NODES', response.content)
from django.core.urlresolvers import reverse from django.utils.encoding import smart_unicode from djedi.tests.base import ClientTest class PanelTest(ClientTest): def test_embed(self): url = reverse('index') response = self.client.get(url) self.assertIn(u'Djedi Test', response.content) self.assertIn(u'window.DJEDI_NODES', response.content) def test_cms(self): url = reverse('admin:djedi:cms') response = self.client.get(url) self.assertIn(u'<title>djedi cms</title>', response.content) def test_django_admin(self): # Patch django admin index from django.contrib.admin.templatetags.log import AdminLogNode _render = AdminLogNode.render AdminLogNode.render = lambda x, y: None url = reverse('admin:index') response = self.client.get(url) cms_url = reverse('admin:djedi:cms') self.assertIn(u'<a href="%s">CMS</a>' % cms_url, smart_unicode(response.content)) # Rollback patch AdminLogNode.render = _render
Add tests for rendering cms admin
Add tests for rendering cms admin
Python
bsd-3-clause
andreif/djedi-cms,andreif/djedi-cms,5monkeys/djedi-cms,andreif/djedi-cms,joar/djedi-cms,joar/djedi-cms,5monkeys/djedi-cms,5monkeys/djedi-cms,joar/djedi-cms
from django.core.urlresolvers import reverse from djedi.tests.base import ClientTest class PanelTest(ClientTest): def test_admin_panel(self): url = reverse('index') response = self.client.get(url) self.assertIn(u'Djedi Test', response.content) self.assertIn(u'window.DJEDI_NODES', response.content) Add tests for rendering cms admin
from django.core.urlresolvers import reverse from django.utils.encoding import smart_unicode from djedi.tests.base import ClientTest class PanelTest(ClientTest): def test_embed(self): url = reverse('index') response = self.client.get(url) self.assertIn(u'Djedi Test', response.content) self.assertIn(u'window.DJEDI_NODES', response.content) def test_cms(self): url = reverse('admin:djedi:cms') response = self.client.get(url) self.assertIn(u'<title>djedi cms</title>', response.content) def test_django_admin(self): # Patch django admin index from django.contrib.admin.templatetags.log import AdminLogNode _render = AdminLogNode.render AdminLogNode.render = lambda x, y: None url = reverse('admin:index') response = self.client.get(url) cms_url = reverse('admin:djedi:cms') self.assertIn(u'<a href="%s">CMS</a>' % cms_url, smart_unicode(response.content)) # Rollback patch AdminLogNode.render = _render
<commit_before>from django.core.urlresolvers import reverse from djedi.tests.base import ClientTest class PanelTest(ClientTest): def test_admin_panel(self): url = reverse('index') response = self.client.get(url) self.assertIn(u'Djedi Test', response.content) self.assertIn(u'window.DJEDI_NODES', response.content) <commit_msg>Add tests for rendering cms admin<commit_after>
from django.core.urlresolvers import reverse from django.utils.encoding import smart_unicode from djedi.tests.base import ClientTest class PanelTest(ClientTest): def test_embed(self): url = reverse('index') response = self.client.get(url) self.assertIn(u'Djedi Test', response.content) self.assertIn(u'window.DJEDI_NODES', response.content) def test_cms(self): url = reverse('admin:djedi:cms') response = self.client.get(url) self.assertIn(u'<title>djedi cms</title>', response.content) def test_django_admin(self): # Patch django admin index from django.contrib.admin.templatetags.log import AdminLogNode _render = AdminLogNode.render AdminLogNode.render = lambda x, y: None url = reverse('admin:index') response = self.client.get(url) cms_url = reverse('admin:djedi:cms') self.assertIn(u'<a href="%s">CMS</a>' % cms_url, smart_unicode(response.content)) # Rollback patch AdminLogNode.render = _render
from django.core.urlresolvers import reverse from djedi.tests.base import ClientTest class PanelTest(ClientTest): def test_admin_panel(self): url = reverse('index') response = self.client.get(url) self.assertIn(u'Djedi Test', response.content) self.assertIn(u'window.DJEDI_NODES', response.content) Add tests for rendering cms adminfrom django.core.urlresolvers import reverse from django.utils.encoding import smart_unicode from djedi.tests.base import ClientTest class PanelTest(ClientTest): def test_embed(self): url = reverse('index') response = self.client.get(url) self.assertIn(u'Djedi Test', response.content) self.assertIn(u'window.DJEDI_NODES', response.content) def test_cms(self): url = reverse('admin:djedi:cms') response = self.client.get(url) self.assertIn(u'<title>djedi cms</title>', response.content) def test_django_admin(self): # Patch django admin index from django.contrib.admin.templatetags.log import AdminLogNode _render = AdminLogNode.render AdminLogNode.render = lambda x, y: None url = reverse('admin:index') response = self.client.get(url) cms_url = reverse('admin:djedi:cms') self.assertIn(u'<a href="%s">CMS</a>' % cms_url, smart_unicode(response.content)) # Rollback patch AdminLogNode.render = _render
<commit_before>from django.core.urlresolvers import reverse from djedi.tests.base import ClientTest class PanelTest(ClientTest): def test_admin_panel(self): url = reverse('index') response = self.client.get(url) self.assertIn(u'Djedi Test', response.content) self.assertIn(u'window.DJEDI_NODES', response.content) <commit_msg>Add tests for rendering cms admin<commit_after>from django.core.urlresolvers import reverse from django.utils.encoding import smart_unicode from djedi.tests.base import ClientTest class PanelTest(ClientTest): def test_embed(self): url = reverse('index') response = self.client.get(url) self.assertIn(u'Djedi Test', response.content) self.assertIn(u'window.DJEDI_NODES', response.content) def test_cms(self): url = reverse('admin:djedi:cms') response = self.client.get(url) self.assertIn(u'<title>djedi cms</title>', response.content) def test_django_admin(self): # Patch django admin index from django.contrib.admin.templatetags.log import AdminLogNode _render = AdminLogNode.render AdminLogNode.render = lambda x, y: None url = reverse('admin:index') response = self.client.get(url) cms_url = reverse('admin:djedi:cms') self.assertIn(u'<a href="%s">CMS</a>' % cms_url, smart_unicode(response.content)) # Rollback patch AdminLogNode.render = _render
583e3bc4ba82191e34b715485650248398afc2b6
src/endpoints/base.py
src/endpoints/base.py
class Base: def __init__(self, client): self.client = client
class Base: def __init__(self, client): self.client = client def build_query(self, query): if query is None: query_string = '' else: query_string = '?' for key, value in query.items(): if not query_string.endswith('?'): query_string = query_string + '&' query_string = query_string + key + '=' + value return query_string
Add method to build a query string to every class
Add method to build a query string to every class
Python
mit
Vaelor/python-mattermost-driver
class Base: def __init__(self, client): self.client = client Add method to build a query string to every class
class Base: def __init__(self, client): self.client = client def build_query(self, query): if query is None: query_string = '' else: query_string = '?' for key, value in query.items(): if not query_string.endswith('?'): query_string = query_string + '&' query_string = query_string + key + '=' + value return query_string
<commit_before>class Base: def __init__(self, client): self.client = client <commit_msg>Add method to build a query string to every class<commit_after>
class Base: def __init__(self, client): self.client = client def build_query(self, query): if query is None: query_string = '' else: query_string = '?' for key, value in query.items(): if not query_string.endswith('?'): query_string = query_string + '&' query_string = query_string + key + '=' + value return query_string
class Base: def __init__(self, client): self.client = client Add method to build a query string to every classclass Base: def __init__(self, client): self.client = client def build_query(self, query): if query is None: query_string = '' else: query_string = '?' for key, value in query.items(): if not query_string.endswith('?'): query_string = query_string + '&' query_string = query_string + key + '=' + value return query_string
<commit_before>class Base: def __init__(self, client): self.client = client <commit_msg>Add method to build a query string to every class<commit_after>class Base: def __init__(self, client): self.client = client def build_query(self, query): if query is None: query_string = '' else: query_string = '?' for key, value in query.items(): if not query_string.endswith('?'): query_string = query_string + '&' query_string = query_string + key + '=' + value return query_string
140e75fb3d96de3784c4ccc7272bbfa0e6b67d39
pinax/invitations/__init__.py
pinax/invitations/__init__.py
import pkg_resources __version__ = pkg_resources.get_distribution("pinax-invitations").version
import pkg_resources __version__ = pkg_resources.get_distribution("pinax-invitations").version default_app_config = "pinax.invitations.apps.AppConfig"
Set default_app_config to point to the correct AppConfig
Set default_app_config to point to the correct AppConfig
Python
unknown
pinax/pinax-invitations,jacobwegner/pinax-invitations,eldarion/kaleo,rizumu/pinax-invitations
import pkg_resources __version__ = pkg_resources.get_distribution("pinax-invitations").version Set default_app_config to point to the correct AppConfig
import pkg_resources __version__ = pkg_resources.get_distribution("pinax-invitations").version default_app_config = "pinax.invitations.apps.AppConfig"
<commit_before>import pkg_resources __version__ = pkg_resources.get_distribution("pinax-invitations").version <commit_msg>Set default_app_config to point to the correct AppConfig<commit_after>
import pkg_resources __version__ = pkg_resources.get_distribution("pinax-invitations").version default_app_config = "pinax.invitations.apps.AppConfig"
import pkg_resources __version__ = pkg_resources.get_distribution("pinax-invitations").version Set default_app_config to point to the correct AppConfigimport pkg_resources __version__ = pkg_resources.get_distribution("pinax-invitations").version default_app_config = "pinax.invitations.apps.AppConfig"
<commit_before>import pkg_resources __version__ = pkg_resources.get_distribution("pinax-invitations").version <commit_msg>Set default_app_config to point to the correct AppConfig<commit_after>import pkg_resources __version__ = pkg_resources.get_distribution("pinax-invitations").version default_app_config = "pinax.invitations.apps.AppConfig"
9fe573614e2f3ca9a6e738afb7f1af84b541092c
invertedindex.py
invertedindex.py
#!/usr/bin/env python3 # -*- coding: utf8 -*- # import class InvertedIndex: def __init__(self): self.index = dict() def add_mail(self, mail): for key in ["simple_terms_body", "complexe_terms_body"]: for terms in mail[key]: if terms in self.index.keys(): self.index[terms].append((mail["name"], mail[key][terms])) else: self.index[terms] = list() self.index[terms].append((mail["name"], mail[key][terms]))
#!/usr/bin/env python3 # -*- coding: utf8 -*- # import class InvertedIndex: def __init__(self): self.index = dict() def add_mail(self, mail): for key in ["simple_terms_body", "complexe_terms_body"]: for terms in mail[key]: if terms in self.index.keys(): self.index[terms].append((mail["name"], mail[key][terms])) else: self.index[terms] = list() self.index[terms].append((mail["name"], mail[key][terms])) def terms(self): for terms in self.index.keys(): yield terms def get_terms(self): return self.index.keys() def file_counter(self, terms): for val in self.index[terms]: yield val def get_file_counter(self, terms): return self.index.values() def file(self, terms): for val in file_counter(terms): yield val[0] def counter(self, terms): for val in file_counter(terms): yield val[1]
Add some access function to inverted index
Add some access function to inverted index
Python
mit
Nedgang/adt_project
#!/usr/bin/env python3 # -*- coding: utf8 -*- # import class InvertedIndex: def __init__(self): self.index = dict() def add_mail(self, mail): for key in ["simple_terms_body", "complexe_terms_body"]: for terms in mail[key]: if terms in self.index.keys(): self.index[terms].append((mail["name"], mail[key][terms])) else: self.index[terms] = list() self.index[terms].append((mail["name"], mail[key][terms])) Add some access function to inverted index
#!/usr/bin/env python3 # -*- coding: utf8 -*- # import class InvertedIndex: def __init__(self): self.index = dict() def add_mail(self, mail): for key in ["simple_terms_body", "complexe_terms_body"]: for terms in mail[key]: if terms in self.index.keys(): self.index[terms].append((mail["name"], mail[key][terms])) else: self.index[terms] = list() self.index[terms].append((mail["name"], mail[key][terms])) def terms(self): for terms in self.index.keys(): yield terms def get_terms(self): return self.index.keys() def file_counter(self, terms): for val in self.index[terms]: yield val def get_file_counter(self, terms): return self.index.values() def file(self, terms): for val in file_counter(terms): yield val[0] def counter(self, terms): for val in file_counter(terms): yield val[1]
<commit_before>#!/usr/bin/env python3 # -*- coding: utf8 -*- # import class InvertedIndex: def __init__(self): self.index = dict() def add_mail(self, mail): for key in ["simple_terms_body", "complexe_terms_body"]: for terms in mail[key]: if terms in self.index.keys(): self.index[terms].append((mail["name"], mail[key][terms])) else: self.index[terms] = list() self.index[terms].append((mail["name"], mail[key][terms])) <commit_msg>Add some access function to inverted index<commit_after>
#!/usr/bin/env python3 # -*- coding: utf8 -*- # import class InvertedIndex: def __init__(self): self.index = dict() def add_mail(self, mail): for key in ["simple_terms_body", "complexe_terms_body"]: for terms in mail[key]: if terms in self.index.keys(): self.index[terms].append((mail["name"], mail[key][terms])) else: self.index[terms] = list() self.index[terms].append((mail["name"], mail[key][terms])) def terms(self): for terms in self.index.keys(): yield terms def get_terms(self): return self.index.keys() def file_counter(self, terms): for val in self.index[terms]: yield val def get_file_counter(self, terms): return self.index.values() def file(self, terms): for val in file_counter(terms): yield val[0] def counter(self, terms): for val in file_counter(terms): yield val[1]
#!/usr/bin/env python3 # -*- coding: utf8 -*- # import class InvertedIndex: def __init__(self): self.index = dict() def add_mail(self, mail): for key in ["simple_terms_body", "complexe_terms_body"]: for terms in mail[key]: if terms in self.index.keys(): self.index[terms].append((mail["name"], mail[key][terms])) else: self.index[terms] = list() self.index[terms].append((mail["name"], mail[key][terms])) Add some access function to inverted index#!/usr/bin/env python3 # -*- coding: utf8 -*- # import class InvertedIndex: def __init__(self): self.index = dict() def add_mail(self, mail): for key in ["simple_terms_body", "complexe_terms_body"]: for terms in mail[key]: if terms in self.index.keys(): self.index[terms].append((mail["name"], mail[key][terms])) else: self.index[terms] = list() self.index[terms].append((mail["name"], mail[key][terms])) def terms(self): for terms in self.index.keys(): yield terms def get_terms(self): return self.index.keys() def file_counter(self, terms): for val in self.index[terms]: yield val def get_file_counter(self, terms): return self.index.values() def file(self, terms): for val in file_counter(terms): yield val[0] def counter(self, terms): for val in file_counter(terms): yield val[1]
<commit_before>#!/usr/bin/env python3 # -*- coding: utf8 -*- # import class InvertedIndex: def __init__(self): self.index = dict() def add_mail(self, mail): for key in ["simple_terms_body", "complexe_terms_body"]: for terms in mail[key]: if terms in self.index.keys(): self.index[terms].append((mail["name"], mail[key][terms])) else: self.index[terms] = list() self.index[terms].append((mail["name"], mail[key][terms])) <commit_msg>Add some access function to inverted index<commit_after>#!/usr/bin/env python3 # -*- coding: utf8 -*- # import class InvertedIndex: def __init__(self): self.index = dict() def add_mail(self, mail): for key in ["simple_terms_body", "complexe_terms_body"]: for terms in mail[key]: if terms in self.index.keys(): self.index[terms].append((mail["name"], mail[key][terms])) else: self.index[terms] = list() self.index[terms].append((mail["name"], mail[key][terms])) def terms(self): for terms in self.index.keys(): yield terms def get_terms(self): return self.index.keys() def file_counter(self, terms): for val in self.index[terms]: yield val def get_file_counter(self, terms): return self.index.values() def file(self, terms): for val in file_counter(terms): yield val[0] def counter(self, terms): for val in file_counter(terms): yield val[1]
d86fc7edd64eba36bf91fc9ad718bb77c7e2b862
yubico_client/__init__.py
yubico_client/__init__.py
__version__ = (1, 6, 3)
__version__ = (1, 6, 3) __all__ = [ 'Yubico' ] from yubico_client.yubico import Yubico
Allow users to directly do from yubico_client import Yubico.
Allow users to directly do from yubico_client import Yubico.
Python
bsd-3-clause
Kami/python-yubico-client
__version__ = (1, 6, 3) Allow users to directly do from yubico_client import Yubico.
__version__ = (1, 6, 3) __all__ = [ 'Yubico' ] from yubico_client.yubico import Yubico
<commit_before>__version__ = (1, 6, 3) <commit_msg>Allow users to directly do from yubico_client import Yubico.<commit_after>
__version__ = (1, 6, 3) __all__ = [ 'Yubico' ] from yubico_client.yubico import Yubico
__version__ = (1, 6, 3) Allow users to directly do from yubico_client import Yubico.__version__ = (1, 6, 3) __all__ = [ 'Yubico' ] from yubico_client.yubico import Yubico
<commit_before>__version__ = (1, 6, 3) <commit_msg>Allow users to directly do from yubico_client import Yubico.<commit_after>__version__ = (1, 6, 3) __all__ = [ 'Yubico' ] from yubico_client.yubico import Yubico
bf41f23d71491050dc79a2975b26ffe210b45505
examples/test_contains_selector.py
examples/test_contains_selector.py
from seleniumbase import BaseCase class ContainsSelectorTests(BaseCase): def test_contains_selector(self): self.open("https://xkcd.com/2207/") self.assert_text("Math Work", "#ctitle") self.click('a:contains("Next")') self.assert_text("Drone Fishing", "#ctitle")
from seleniumbase import BaseCase class ContainsSelectorTests(BaseCase): def test_contains_selector(self): self.open("https://xkcd.com/2207/") self.assert_element('div.box div:contains("Math Work")') self.click('a:contains("Next")') self.assert_element('div div:contains("Drone Fishing")')
Update an example that uses the ":contains()" selector
Update an example that uses the ":contains()" selector
Python
mit
seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase
from seleniumbase import BaseCase class ContainsSelectorTests(BaseCase): def test_contains_selector(self): self.open("https://xkcd.com/2207/") self.assert_text("Math Work", "#ctitle") self.click('a:contains("Next")') self.assert_text("Drone Fishing", "#ctitle") Update an example that uses the ":contains()" selector
from seleniumbase import BaseCase class ContainsSelectorTests(BaseCase): def test_contains_selector(self): self.open("https://xkcd.com/2207/") self.assert_element('div.box div:contains("Math Work")') self.click('a:contains("Next")') self.assert_element('div div:contains("Drone Fishing")')
<commit_before>from seleniumbase import BaseCase class ContainsSelectorTests(BaseCase): def test_contains_selector(self): self.open("https://xkcd.com/2207/") self.assert_text("Math Work", "#ctitle") self.click('a:contains("Next")') self.assert_text("Drone Fishing", "#ctitle") <commit_msg>Update an example that uses the ":contains()" selector<commit_after>
from seleniumbase import BaseCase class ContainsSelectorTests(BaseCase): def test_contains_selector(self): self.open("https://xkcd.com/2207/") self.assert_element('div.box div:contains("Math Work")') self.click('a:contains("Next")') self.assert_element('div div:contains("Drone Fishing")')
from seleniumbase import BaseCase class ContainsSelectorTests(BaseCase): def test_contains_selector(self): self.open("https://xkcd.com/2207/") self.assert_text("Math Work", "#ctitle") self.click('a:contains("Next")') self.assert_text("Drone Fishing", "#ctitle") Update an example that uses the ":contains()" selectorfrom seleniumbase import BaseCase class ContainsSelectorTests(BaseCase): def test_contains_selector(self): self.open("https://xkcd.com/2207/") self.assert_element('div.box div:contains("Math Work")') self.click('a:contains("Next")') self.assert_element('div div:contains("Drone Fishing")')
<commit_before>from seleniumbase import BaseCase class ContainsSelectorTests(BaseCase): def test_contains_selector(self): self.open("https://xkcd.com/2207/") self.assert_text("Math Work", "#ctitle") self.click('a:contains("Next")') self.assert_text("Drone Fishing", "#ctitle") <commit_msg>Update an example that uses the ":contains()" selector<commit_after>from seleniumbase import BaseCase class ContainsSelectorTests(BaseCase): def test_contains_selector(self): self.open("https://xkcd.com/2207/") self.assert_element('div.box div:contains("Math Work")') self.click('a:contains("Next")') self.assert_element('div div:contains("Drone Fishing")')
517c8978c33d7e9f0251985f2ca39b6f2514ae9e
hack/boxee/skin/boxee/720p/scripts/boxeehack_clear_cache.py
hack/boxee/skin/boxee/720p/scripts/boxeehack_clear_cache.py
import os,mc import xbmc, xbmcgui def fanart_function(): if mc.ShowDialogConfirm("Clear fanart cache", "Are you sure you want to clear the fanart cache?", "Cancel", "OK"): pass def thumbnail_function(): if mc.ShowDialogConfirm("Clear thumbnail cache", "Are you sure you want to clear the thumbnail cache?", "Cancel", "OK"): os.system("rm /data/etc/.fanart") os.system("find /data/.boxee/UserData/profiles/*/Thumbnails/ -name \*.tbn | xargs rm") mc.ShowDialogNotification("Clearing thumbnail cache") if (__name__ == "__main__"): section = sys.argv[1] if section == "fanart": fanart_function() if section == "thumbnail": thumbnail_function()
import os,mc import xbmc, xbmcgui def fanart_function(): if mc.ShowDialogConfirm("Clear fanart cache", "Are you sure you want to clear the fanart cache?", "Cancel", "OK"): pass def thumbnail_function(): if mc.ShowDialogConfirm("Clear thumbnail cache", "Are you sure you want to clear the thumbnail cache?", "Cancel", "OK"): os.system("rm %s" % xbmc.translatePath('special://profile/.fanart')) os.system("find /data/.boxee/UserData/profiles/*/Thumbnails/ -name \*.tbn | xargs rm") mc.ShowDialogNotification("Clearing thumbnail cache") if (__name__ == "__main__"): section = sys.argv[1] if section == "fanart": fanart_function() if section == "thumbnail": thumbnail_function()
Correct clearing of fanart cache
Correct clearing of fanart cache
Python
mit
cigamit/boxeehack,cigamit/boxeehack,vLBrian/boxeehack-cigamit,vLBrian/boxeehack-cigamit
import os,mc import xbmc, xbmcgui def fanart_function(): if mc.ShowDialogConfirm("Clear fanart cache", "Are you sure you want to clear the fanart cache?", "Cancel", "OK"): pass def thumbnail_function(): if mc.ShowDialogConfirm("Clear thumbnail cache", "Are you sure you want to clear the thumbnail cache?", "Cancel", "OK"): os.system("rm /data/etc/.fanart") os.system("find /data/.boxee/UserData/profiles/*/Thumbnails/ -name \*.tbn | xargs rm") mc.ShowDialogNotification("Clearing thumbnail cache") if (__name__ == "__main__"): section = sys.argv[1] if section == "fanart": fanart_function() if section == "thumbnail": thumbnail_function() Correct clearing of fanart cache
import os,mc import xbmc, xbmcgui def fanart_function(): if mc.ShowDialogConfirm("Clear fanart cache", "Are you sure you want to clear the fanart cache?", "Cancel", "OK"): pass def thumbnail_function(): if mc.ShowDialogConfirm("Clear thumbnail cache", "Are you sure you want to clear the thumbnail cache?", "Cancel", "OK"): os.system("rm %s" % xbmc.translatePath('special://profile/.fanart')) os.system("find /data/.boxee/UserData/profiles/*/Thumbnails/ -name \*.tbn | xargs rm") mc.ShowDialogNotification("Clearing thumbnail cache") if (__name__ == "__main__"): section = sys.argv[1] if section == "fanart": fanart_function() if section == "thumbnail": thumbnail_function()
<commit_before>import os,mc import xbmc, xbmcgui def fanart_function(): if mc.ShowDialogConfirm("Clear fanart cache", "Are you sure you want to clear the fanart cache?", "Cancel", "OK"): pass def thumbnail_function(): if mc.ShowDialogConfirm("Clear thumbnail cache", "Are you sure you want to clear the thumbnail cache?", "Cancel", "OK"): os.system("rm /data/etc/.fanart") os.system("find /data/.boxee/UserData/profiles/*/Thumbnails/ -name \*.tbn | xargs rm") mc.ShowDialogNotification("Clearing thumbnail cache") if (__name__ == "__main__"): section = sys.argv[1] if section == "fanart": fanart_function() if section == "thumbnail": thumbnail_function() <commit_msg>Correct clearing of fanart cache<commit_after>
import os,mc import xbmc, xbmcgui def fanart_function(): if mc.ShowDialogConfirm("Clear fanart cache", "Are you sure you want to clear the fanart cache?", "Cancel", "OK"): pass def thumbnail_function(): if mc.ShowDialogConfirm("Clear thumbnail cache", "Are you sure you want to clear the thumbnail cache?", "Cancel", "OK"): os.system("rm %s" % xbmc.translatePath('special://profile/.fanart')) os.system("find /data/.boxee/UserData/profiles/*/Thumbnails/ -name \*.tbn | xargs rm") mc.ShowDialogNotification("Clearing thumbnail cache") if (__name__ == "__main__"): section = sys.argv[1] if section == "fanart": fanart_function() if section == "thumbnail": thumbnail_function()
import os,mc import xbmc, xbmcgui def fanart_function(): if mc.ShowDialogConfirm("Clear fanart cache", "Are you sure you want to clear the fanart cache?", "Cancel", "OK"): pass def thumbnail_function(): if mc.ShowDialogConfirm("Clear thumbnail cache", "Are you sure you want to clear the thumbnail cache?", "Cancel", "OK"): os.system("rm /data/etc/.fanart") os.system("find /data/.boxee/UserData/profiles/*/Thumbnails/ -name \*.tbn | xargs rm") mc.ShowDialogNotification("Clearing thumbnail cache") if (__name__ == "__main__"): section = sys.argv[1] if section == "fanart": fanart_function() if section == "thumbnail": thumbnail_function() Correct clearing of fanart cacheimport os,mc import xbmc, xbmcgui def fanart_function(): if mc.ShowDialogConfirm("Clear fanart cache", "Are you sure you want to clear the fanart cache?", "Cancel", "OK"): pass def thumbnail_function(): if mc.ShowDialogConfirm("Clear thumbnail cache", "Are you sure you want to clear the thumbnail cache?", "Cancel", "OK"): os.system("rm %s" % xbmc.translatePath('special://profile/.fanart')) os.system("find /data/.boxee/UserData/profiles/*/Thumbnails/ -name \*.tbn | xargs rm") mc.ShowDialogNotification("Clearing thumbnail cache") if (__name__ == "__main__"): section = sys.argv[1] if section == "fanart": fanart_function() if section == "thumbnail": thumbnail_function()
<commit_before>import os,mc import xbmc, xbmcgui def fanart_function(): if mc.ShowDialogConfirm("Clear fanart cache", "Are you sure you want to clear the fanart cache?", "Cancel", "OK"): pass def thumbnail_function(): if mc.ShowDialogConfirm("Clear thumbnail cache", "Are you sure you want to clear the thumbnail cache?", "Cancel", "OK"): os.system("rm /data/etc/.fanart") os.system("find /data/.boxee/UserData/profiles/*/Thumbnails/ -name \*.tbn | xargs rm") mc.ShowDialogNotification("Clearing thumbnail cache") if (__name__ == "__main__"): section = sys.argv[1] if section == "fanart": fanart_function() if section == "thumbnail": thumbnail_function() <commit_msg>Correct clearing of fanart cache<commit_after>import os,mc import xbmc, xbmcgui def fanart_function(): if mc.ShowDialogConfirm("Clear fanart cache", "Are you sure you want to clear the fanart cache?", "Cancel", "OK"): pass def thumbnail_function(): if mc.ShowDialogConfirm("Clear thumbnail cache", "Are you sure you want to clear the thumbnail cache?", "Cancel", "OK"): os.system("rm %s" % xbmc.translatePath('special://profile/.fanart')) os.system("find /data/.boxee/UserData/profiles/*/Thumbnails/ -name \*.tbn | xargs rm") mc.ShowDialogNotification("Clearing thumbnail cache") if (__name__ == "__main__"): section = sys.argv[1] if section == "fanart": fanart_function() if section == "thumbnail": thumbnail_function()
fa5d6537b94ed06853cddce9afd5e5dfc009384a
statsd/__init__.py
statsd/__init__.py
import socket import os try: from django.conf import settings except ImportError: settings = None from client import StatsClient __all__ = ['StatsClient', 'statsd'] VERSION = (0, 4, 0) __version__ = '.'.join(map(str, VERSION)) if settings: try: host = getattr(settings, 'STATSD_HOST', 'localhost') port = getattr(settings, 'STATSD_PORT', 8125) prefix = getattr(settings, 'STATSD_PREFIX', None) statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, ImportError): try: host = os.environ['STATSD_HOST'] port = os.environ['STATSD_PORT'] prefix = os.environ.get('STATSD_PREFIX') statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, KeyError): statsd = None
import socket import os try: from django.conf import settings except ImportError: settings = None from client import StatsClient __all__ = ['StatsClient', 'statsd'] VERSION = (0, 4, 0) __version__ = '.'.join(map(str, VERSION)) if settings: try: host = getattr(settings, 'STATSD_HOST', 'localhost') port = getattr(settings, 'STATSD_PORT', 8125) prefix = getattr(settings, 'STATSD_PREFIX', None) statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, ImportError): try: host = os.environ['STATSD_HOST'] port = int(os.environ['STATSD_PORT']) prefix = os.environ.get('STATSD_PREFIX') statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, KeyError): statsd = None
Make sure port is an int
Make sure port is an int
Python
mit
smarkets/pystatsd,lyft/pystatsd,jsocol/pystatsd,wujuguang/pystatsd,Khan/pystatsd,lyft/pystatsd,deathowl/pystatsd,Khan/pystatsd
import socket import os try: from django.conf import settings except ImportError: settings = None from client import StatsClient __all__ = ['StatsClient', 'statsd'] VERSION = (0, 4, 0) __version__ = '.'.join(map(str, VERSION)) if settings: try: host = getattr(settings, 'STATSD_HOST', 'localhost') port = getattr(settings, 'STATSD_PORT', 8125) prefix = getattr(settings, 'STATSD_PREFIX', None) statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, ImportError): try: host = os.environ['STATSD_HOST'] port = os.environ['STATSD_PORT'] prefix = os.environ.get('STATSD_PREFIX') statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, KeyError): statsd = None Make sure port is an int
import socket import os try: from django.conf import settings except ImportError: settings = None from client import StatsClient __all__ = ['StatsClient', 'statsd'] VERSION = (0, 4, 0) __version__ = '.'.join(map(str, VERSION)) if settings: try: host = getattr(settings, 'STATSD_HOST', 'localhost') port = getattr(settings, 'STATSD_PORT', 8125) prefix = getattr(settings, 'STATSD_PREFIX', None) statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, ImportError): try: host = os.environ['STATSD_HOST'] port = int(os.environ['STATSD_PORT']) prefix = os.environ.get('STATSD_PREFIX') statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, KeyError): statsd = None
<commit_before>import socket import os try: from django.conf import settings except ImportError: settings = None from client import StatsClient __all__ = ['StatsClient', 'statsd'] VERSION = (0, 4, 0) __version__ = '.'.join(map(str, VERSION)) if settings: try: host = getattr(settings, 'STATSD_HOST', 'localhost') port = getattr(settings, 'STATSD_PORT', 8125) prefix = getattr(settings, 'STATSD_PREFIX', None) statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, ImportError): try: host = os.environ['STATSD_HOST'] port = os.environ['STATSD_PORT'] prefix = os.environ.get('STATSD_PREFIX') statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, KeyError): statsd = None <commit_msg>Make sure port is an int<commit_after>
import socket import os try: from django.conf import settings except ImportError: settings = None from client import StatsClient __all__ = ['StatsClient', 'statsd'] VERSION = (0, 4, 0) __version__ = '.'.join(map(str, VERSION)) if settings: try: host = getattr(settings, 'STATSD_HOST', 'localhost') port = getattr(settings, 'STATSD_PORT', 8125) prefix = getattr(settings, 'STATSD_PREFIX', None) statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, ImportError): try: host = os.environ['STATSD_HOST'] port = int(os.environ['STATSD_PORT']) prefix = os.environ.get('STATSD_PREFIX') statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, KeyError): statsd = None
import socket import os try: from django.conf import settings except ImportError: settings = None from client import StatsClient __all__ = ['StatsClient', 'statsd'] VERSION = (0, 4, 0) __version__ = '.'.join(map(str, VERSION)) if settings: try: host = getattr(settings, 'STATSD_HOST', 'localhost') port = getattr(settings, 'STATSD_PORT', 8125) prefix = getattr(settings, 'STATSD_PREFIX', None) statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, ImportError): try: host = os.environ['STATSD_HOST'] port = os.environ['STATSD_PORT'] prefix = os.environ.get('STATSD_PREFIX') statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, KeyError): statsd = None Make sure port is an intimport socket import os try: from django.conf import settings except ImportError: settings = None from client import StatsClient __all__ = ['StatsClient', 'statsd'] VERSION = (0, 4, 0) __version__ = '.'.join(map(str, VERSION)) if settings: try: host = getattr(settings, 'STATSD_HOST', 'localhost') port = getattr(settings, 'STATSD_PORT', 8125) prefix = getattr(settings, 'STATSD_PREFIX', None) statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, ImportError): try: host = os.environ['STATSD_HOST'] port = int(os.environ['STATSD_PORT']) prefix = os.environ.get('STATSD_PREFIX') statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, KeyError): statsd = None
<commit_before>import socket import os try: from django.conf import settings except ImportError: settings = None from client import StatsClient __all__ = ['StatsClient', 'statsd'] VERSION = (0, 4, 0) __version__ = '.'.join(map(str, VERSION)) if settings: try: host = getattr(settings, 'STATSD_HOST', 'localhost') port = getattr(settings, 'STATSD_PORT', 8125) prefix = getattr(settings, 'STATSD_PREFIX', None) statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, ImportError): try: host = os.environ['STATSD_HOST'] port = os.environ['STATSD_PORT'] prefix = os.environ.get('STATSD_PREFIX') statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, KeyError): statsd = None <commit_msg>Make sure port is an int<commit_after>import socket import os try: from django.conf import settings except ImportError: settings = None from client import StatsClient __all__ = ['StatsClient', 'statsd'] VERSION = (0, 4, 0) __version__ = '.'.join(map(str, VERSION)) if settings: try: host = getattr(settings, 'STATSD_HOST', 'localhost') port = getattr(settings, 'STATSD_PORT', 8125) prefix = getattr(settings, 'STATSD_PREFIX', None) statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, ImportError): try: host = os.environ['STATSD_HOST'] port = int(os.environ['STATSD_PORT']) prefix = os.environ.get('STATSD_PREFIX') statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, KeyError): statsd = None
a15813399992fb8bbf951854a218e30e4cddd717
prime-factors/prime_factors.py
prime-factors/prime_factors.py
# File: prime_factors.py # Purpose: Compute the prime factors of a given natural number. # Programmer: Amal Shehu # Course: Exercism # Date: Monday 26 September 2016, 12:05 AM def prime_factors(number): factors = [] if number > 1: for num in range(2, number): if (number % num) == 0: factors.append(num) return factors break else: return True else: return factors
# File: prime_factors.py # Purpose: Compute the prime factors of a given natural number. # Programmer: Amal Shehu # Course: Exercism # Date: Monday 26 September 2016, 12:05 AM def prime_factors(number, n=2, factors=None): if factors is None: factors = [] for num in range(n, number): if (number % num) == 0: factors.append(num) return prime_factors(number // num, num, factors) else: return factors
Add two more arguments with function
Add two more arguments with function
Python
mit
amalshehu/exercism-python
# File: prime_factors.py # Purpose: Compute the prime factors of a given natural number. # Programmer: Amal Shehu # Course: Exercism # Date: Monday 26 September 2016, 12:05 AM def prime_factors(number): factors = [] if number > 1: for num in range(2, number): if (number % num) == 0: factors.append(num) return factors break else: return True else: return factors Add two more arguments with function
# File: prime_factors.py # Purpose: Compute the prime factors of a given natural number. # Programmer: Amal Shehu # Course: Exercism # Date: Monday 26 September 2016, 12:05 AM def prime_factors(number, n=2, factors=None): if factors is None: factors = [] for num in range(n, number): if (number % num) == 0: factors.append(num) return prime_factors(number // num, num, factors) else: return factors
<commit_before># File: prime_factors.py # Purpose: Compute the prime factors of a given natural number. # Programmer: Amal Shehu # Course: Exercism # Date: Monday 26 September 2016, 12:05 AM def prime_factors(number): factors = [] if number > 1: for num in range(2, number): if (number % num) == 0: factors.append(num) return factors break else: return True else: return factors <commit_msg>Add two more arguments with function<commit_after>
# File: prime_factors.py # Purpose: Compute the prime factors of a given natural number. # Programmer: Amal Shehu # Course: Exercism # Date: Monday 26 September 2016, 12:05 AM def prime_factors(number, n=2, factors=None): if factors is None: factors = [] for num in range(n, number): if (number % num) == 0: factors.append(num) return prime_factors(number // num, num, factors) else: return factors
# File: prime_factors.py # Purpose: Compute the prime factors of a given natural number. # Programmer: Amal Shehu # Course: Exercism # Date: Monday 26 September 2016, 12:05 AM def prime_factors(number): factors = [] if number > 1: for num in range(2, number): if (number % num) == 0: factors.append(num) return factors break else: return True else: return factors Add two more arguments with function# File: prime_factors.py # Purpose: Compute the prime factors of a given natural number. # Programmer: Amal Shehu # Course: Exercism # Date: Monday 26 September 2016, 12:05 AM def prime_factors(number, n=2, factors=None): if factors is None: factors = [] for num in range(n, number): if (number % num) == 0: factors.append(num) return prime_factors(number // num, num, factors) else: return factors
<commit_before># File: prime_factors.py # Purpose: Compute the prime factors of a given natural number. # Programmer: Amal Shehu # Course: Exercism # Date: Monday 26 September 2016, 12:05 AM def prime_factors(number): factors = [] if number > 1: for num in range(2, number): if (number % num) == 0: factors.append(num) return factors break else: return True else: return factors <commit_msg>Add two more arguments with function<commit_after># File: prime_factors.py # Purpose: Compute the prime factors of a given natural number. # Programmer: Amal Shehu # Course: Exercism # Date: Monday 26 September 2016, 12:05 AM def prime_factors(number, n=2, factors=None): if factors is None: factors = [] for num in range(n, number): if (number % num) == 0: factors.append(num) return prime_factors(number // num, num, factors) else: return factors
2d2c368f9ece2235a4c972197e6c0406031fbcb3
versions.py
versions.py
#!/usr/bin/env python import os import warnings warnings.filterwarnings('ignore', category=DeprecationWarning) def test_for_version(filename): stdin, stdout = os.popen4('%s --version' % filename, 'r') response = stdout.read() return '.'.join(response.strip().split(' ')[1].split('.')[:-1]) versions = ['python', 'python2.4', 'python2.5', 'python2.6'] valid = {} for filename in versions: version = test_for_version(filename) if version not in valid: valid[version] = filename # Prefer the latest version of python output = [] if '2.6' in valid: output.append(valid['2.6']) for version in valid.keys(): if valid[version] not in output: output.append(valid[version]) print ' '.join(output)
#!/usr/bin/env python import os import warnings warnings.filterwarnings('ignore', category=DeprecationWarning) def test_for_version(filename): stdin, stdout = os.popen4('%s -V' % filename, 'r') response = stdout.read() return '.'.join(response.strip().split(' ')[1].split('.')[:-1]) versions = ['python', 'python2.4', 'python2.5', 'python2.6'] valid = {} for filename in versions: version = test_for_version(filename) if version not in valid: valid[version] = filename # Prefer the latest version of python output = [] if '2.6' in valid: output.append(valid['2.6']) for version in valid.keys(): if valid[version] not in output: output.append(valid[version]) print ' '.join(output)
Change to use short flag for 2.4
Change to use short flag for 2.4
Python
mpl-2.0
fkarb/pika-python3,vrtsystems/pika,renshawbay/pika-python3,benjamin9999/pika,shinji-s/pika,zixiliuyue/pika,skftn/pika,knowsis/pika,jstnlef/pika,hugoxia/pika,Zephor5/pika,pika/pika,vitaly-krugl/pika,reddec/pika,Tarsbot/pika
#!/usr/bin/env python import os import warnings warnings.filterwarnings('ignore', category=DeprecationWarning) def test_for_version(filename): stdin, stdout = os.popen4('%s --version' % filename, 'r') response = stdout.read() return '.'.join(response.strip().split(' ')[1].split('.')[:-1]) versions = ['python', 'python2.4', 'python2.5', 'python2.6'] valid = {} for filename in versions: version = test_for_version(filename) if version not in valid: valid[version] = filename # Prefer the latest version of python output = [] if '2.6' in valid: output.append(valid['2.6']) for version in valid.keys(): if valid[version] not in output: output.append(valid[version]) print ' '.join(output) Change to use short flag for 2.4
#!/usr/bin/env python import os import warnings warnings.filterwarnings('ignore', category=DeprecationWarning) def test_for_version(filename): stdin, stdout = os.popen4('%s -V' % filename, 'r') response = stdout.read() return '.'.join(response.strip().split(' ')[1].split('.')[:-1]) versions = ['python', 'python2.4', 'python2.5', 'python2.6'] valid = {} for filename in versions: version = test_for_version(filename) if version not in valid: valid[version] = filename # Prefer the latest version of python output = [] if '2.6' in valid: output.append(valid['2.6']) for version in valid.keys(): if valid[version] not in output: output.append(valid[version]) print ' '.join(output)
<commit_before>#!/usr/bin/env python import os import warnings warnings.filterwarnings('ignore', category=DeprecationWarning) def test_for_version(filename): stdin, stdout = os.popen4('%s --version' % filename, 'r') response = stdout.read() return '.'.join(response.strip().split(' ')[1].split('.')[:-1]) versions = ['python', 'python2.4', 'python2.5', 'python2.6'] valid = {} for filename in versions: version = test_for_version(filename) if version not in valid: valid[version] = filename # Prefer the latest version of python output = [] if '2.6' in valid: output.append(valid['2.6']) for version in valid.keys(): if valid[version] not in output: output.append(valid[version]) print ' '.join(output) <commit_msg>Change to use short flag for 2.4<commit_after>
#!/usr/bin/env python import os import warnings warnings.filterwarnings('ignore', category=DeprecationWarning) def test_for_version(filename): stdin, stdout = os.popen4('%s -V' % filename, 'r') response = stdout.read() return '.'.join(response.strip().split(' ')[1].split('.')[:-1]) versions = ['python', 'python2.4', 'python2.5', 'python2.6'] valid = {} for filename in versions: version = test_for_version(filename) if version not in valid: valid[version] = filename # Prefer the latest version of python output = [] if '2.6' in valid: output.append(valid['2.6']) for version in valid.keys(): if valid[version] not in output: output.append(valid[version]) print ' '.join(output)
#!/usr/bin/env python import os import warnings warnings.filterwarnings('ignore', category=DeprecationWarning) def test_for_version(filename): stdin, stdout = os.popen4('%s --version' % filename, 'r') response = stdout.read() return '.'.join(response.strip().split(' ')[1].split('.')[:-1]) versions = ['python', 'python2.4', 'python2.5', 'python2.6'] valid = {} for filename in versions: version = test_for_version(filename) if version not in valid: valid[version] = filename # Prefer the latest version of python output = [] if '2.6' in valid: output.append(valid['2.6']) for version in valid.keys(): if valid[version] not in output: output.append(valid[version]) print ' '.join(output) Change to use short flag for 2.4#!/usr/bin/env python import os import warnings warnings.filterwarnings('ignore', category=DeprecationWarning) def test_for_version(filename): stdin, stdout = os.popen4('%s -V' % filename, 'r') response = stdout.read() return '.'.join(response.strip().split(' ')[1].split('.')[:-1]) versions = ['python', 'python2.4', 'python2.5', 'python2.6'] valid = {} for filename in versions: version = test_for_version(filename) if version not in valid: valid[version] = filename # Prefer the latest version of python output = [] if '2.6' in valid: output.append(valid['2.6']) for version in valid.keys(): if valid[version] not in output: output.append(valid[version]) print ' '.join(output)
<commit_before>#!/usr/bin/env python import os import warnings warnings.filterwarnings('ignore', category=DeprecationWarning) def test_for_version(filename): stdin, stdout = os.popen4('%s --version' % filename, 'r') response = stdout.read() return '.'.join(response.strip().split(' ')[1].split('.')[:-1]) versions = ['python', 'python2.4', 'python2.5', 'python2.6'] valid = {} for filename in versions: version = test_for_version(filename) if version not in valid: valid[version] = filename # Prefer the latest version of python output = [] if '2.6' in valid: output.append(valid['2.6']) for version in valid.keys(): if valid[version] not in output: output.append(valid[version]) print ' '.join(output) <commit_msg>Change to use short flag for 2.4<commit_after>#!/usr/bin/env python import os import warnings warnings.filterwarnings('ignore', category=DeprecationWarning) def test_for_version(filename): stdin, stdout = os.popen4('%s -V' % filename, 'r') response = stdout.read() return '.'.join(response.strip().split(' ')[1].split('.')[:-1]) versions = ['python', 'python2.4', 'python2.5', 'python2.6'] valid = {} for filename in versions: version = test_for_version(filename) if version not in valid: valid[version] = filename # Prefer the latest version of python output = [] if '2.6' in valid: output.append(valid['2.6']) for version in valid.keys(): if valid[version] not in output: output.append(valid[version]) print ' '.join(output)
339622ea314656c1055d08d863ceb1bb0a82faf1
mne/beamformer/__init__.py
mne/beamformer/__init__.py
"""Beamformers for source localization """ from ._lcmv import lcmv, lcmv_epochs, lcmv_raw from ._dics import dics_epochs
"""Beamformers for source localization """ from ._lcmv import lcmv, lcmv_epochs, lcmv_raw from ._dics import dics, dics_epochs
Add dics to init for beamformer module
Add dics to init for beamformer module
Python
bsd-3-clause
effigies/mne-python,jmontoyam/mne-python,ARudiuk/mne-python,bloyl/mne-python,leggitta/mne-python,wmvanvliet/mne-python,mne-tools/mne-python,drammock/mne-python,mne-tools/mne-python,Teekuningas/mne-python,andyh616/mne-python,effigies/mne-python,wronk/mne-python,drammock/mne-python,teonlamont/mne-python,yousrabk/mne-python,jniediek/mne-python,ARudiuk/mne-python,aestrivex/mne-python,pravsripad/mne-python,pravsripad/mne-python,drammock/mne-python,dgwakeman/mne-python,agramfort/mne-python,wmvanvliet/mne-python,rkmaddox/mne-python,alexandrebarachant/mne-python,matthew-tucker/mne-python,olafhauk/mne-python,wronk/mne-python,leggitta/mne-python,rkmaddox/mne-python,pravsripad/mne-python,Odingod/mne-python,matthew-tucker/mne-python,trachelr/mne-python,antiface/mne-python,trachelr/mne-python,kambysese/mne-python,alexandrebarachant/mne-python,Odingod/mne-python,jniediek/mne-python,nicproulx/mne-python,dgwakeman/mne-python,larsoner/mne-python,jaeilepp/mne-python,antiface/mne-python,yousrabk/mne-python,cmoutard/mne-python,cjayb/mne-python,andyh616/mne-python,adykstra/mne-python,wmvanvliet/mne-python,cmoutard/mne-python,kingjr/mne-python,kambysese/mne-python,bloyl/mne-python,teonlamont/mne-python,aestrivex/mne-python,Teekuningas/mne-python,larsoner/mne-python,lorenzo-desantis/mne-python,adykstra/mne-python,mne-tools/mne-python,Eric89GXL/mne-python,kingjr/mne-python,agramfort/mne-python,Eric89GXL/mne-python,nicproulx/mne-python,olafhauk/mne-python,kingjr/mne-python,dimkal/mne-python,larsoner/mne-python,lorenzo-desantis/mne-python,dimkal/mne-python,jaeilepp/mne-python,Teekuningas/mne-python,jmontoyam/mne-python,olafhauk/mne-python,cjayb/mne-python
"""Beamformers for source localization """ from ._lcmv import lcmv, lcmv_epochs, lcmv_raw from ._dics import dics_epochs Add dics to init for beamformer module
"""Beamformers for source localization """ from ._lcmv import lcmv, lcmv_epochs, lcmv_raw from ._dics import dics, dics_epochs
<commit_before>"""Beamformers for source localization """ from ._lcmv import lcmv, lcmv_epochs, lcmv_raw from ._dics import dics_epochs <commit_msg>Add dics to init for beamformer module<commit_after>
"""Beamformers for source localization """ from ._lcmv import lcmv, lcmv_epochs, lcmv_raw from ._dics import dics, dics_epochs
"""Beamformers for source localization """ from ._lcmv import lcmv, lcmv_epochs, lcmv_raw from ._dics import dics_epochs Add dics to init for beamformer module"""Beamformers for source localization """ from ._lcmv import lcmv, lcmv_epochs, lcmv_raw from ._dics import dics, dics_epochs
<commit_before>"""Beamformers for source localization """ from ._lcmv import lcmv, lcmv_epochs, lcmv_raw from ._dics import dics_epochs <commit_msg>Add dics to init for beamformer module<commit_after>"""Beamformers for source localization """ from ._lcmv import lcmv, lcmv_epochs, lcmv_raw from ._dics import dics, dics_epochs
9311cbe8ed7a434adb46340640895b48e8cc4027
examples/pi-montecarlo/pi_distarray.py
examples/pi-montecarlo/pi_distarray.py
""" Estimate pi using a Monte Carlo method with distarray. Usage: $ python pi_distarray.py <number of points> """ import sys from distarray.client import RandomModule, Context from util import timer context = Context() random = RandomModule(context) @timer def calc_pi(n): """Estimate pi using distributed NumPy arrays.""" x = random.rand((n,)) y = random.rand((n,)) r = context.hypot(x, y) return 4 * float((r < 1.).sum())/n if __name__ == '__main__': N = int(sys.argv[1]) result, time = calc_pi(N) print('time : %3.4g\nresult: %.7f' % (time, result)) context.view.client.purge_everything()
""" Estimate pi using a Monte Carlo method with distarray. Usage: $ python pi_distarray.py <number of points> """ import sys from distarray.random import Random from distarray.client import Context from util import timer context = Context() random = Random(context) @timer def calc_pi(n): """Estimate pi using distributed NumPy arrays.""" x = random.rand((n,)) y = random.rand((n,)) r = context.hypot(x, y) return 4 * float((r < 1.).sum())/n if __name__ == '__main__': N = int(sys.argv[1]) result, time = calc_pi(N) print('time : %3.4g\nresult: %.7f' % (time, result)) context.view.client.purge_everything()
Change to reflect recent API changes.
Change to reflect recent API changes.
Python
bsd-3-clause
RaoUmer/distarray,enthought/distarray,RaoUmer/distarray,enthought/distarray
""" Estimate pi using a Monte Carlo method with distarray. Usage: $ python pi_distarray.py <number of points> """ import sys from distarray.client import RandomModule, Context from util import timer context = Context() random = RandomModule(context) @timer def calc_pi(n): """Estimate pi using distributed NumPy arrays.""" x = random.rand((n,)) y = random.rand((n,)) r = context.hypot(x, y) return 4 * float((r < 1.).sum())/n if __name__ == '__main__': N = int(sys.argv[1]) result, time = calc_pi(N) print('time : %3.4g\nresult: %.7f' % (time, result)) context.view.client.purge_everything() Change to reflect recent API changes.
""" Estimate pi using a Monte Carlo method with distarray. Usage: $ python pi_distarray.py <number of points> """ import sys from distarray.random import Random from distarray.client import Context from util import timer context = Context() random = Random(context) @timer def calc_pi(n): """Estimate pi using distributed NumPy arrays.""" x = random.rand((n,)) y = random.rand((n,)) r = context.hypot(x, y) return 4 * float((r < 1.).sum())/n if __name__ == '__main__': N = int(sys.argv[1]) result, time = calc_pi(N) print('time : %3.4g\nresult: %.7f' % (time, result)) context.view.client.purge_everything()
<commit_before>""" Estimate pi using a Monte Carlo method with distarray. Usage: $ python pi_distarray.py <number of points> """ import sys from distarray.client import RandomModule, Context from util import timer context = Context() random = RandomModule(context) @timer def calc_pi(n): """Estimate pi using distributed NumPy arrays.""" x = random.rand((n,)) y = random.rand((n,)) r = context.hypot(x, y) return 4 * float((r < 1.).sum())/n if __name__ == '__main__': N = int(sys.argv[1]) result, time = calc_pi(N) print('time : %3.4g\nresult: %.7f' % (time, result)) context.view.client.purge_everything() <commit_msg>Change to reflect recent API changes.<commit_after>
""" Estimate pi using a Monte Carlo method with distarray. Usage: $ python pi_distarray.py <number of points> """ import sys from distarray.random import Random from distarray.client import Context from util import timer context = Context() random = Random(context) @timer def calc_pi(n): """Estimate pi using distributed NumPy arrays.""" x = random.rand((n,)) y = random.rand((n,)) r = context.hypot(x, y) return 4 * float((r < 1.).sum())/n if __name__ == '__main__': N = int(sys.argv[1]) result, time = calc_pi(N) print('time : %3.4g\nresult: %.7f' % (time, result)) context.view.client.purge_everything()
""" Estimate pi using a Monte Carlo method with distarray. Usage: $ python pi_distarray.py <number of points> """ import sys from distarray.client import RandomModule, Context from util import timer context = Context() random = RandomModule(context) @timer def calc_pi(n): """Estimate pi using distributed NumPy arrays.""" x = random.rand((n,)) y = random.rand((n,)) r = context.hypot(x, y) return 4 * float((r < 1.).sum())/n if __name__ == '__main__': N = int(sys.argv[1]) result, time = calc_pi(N) print('time : %3.4g\nresult: %.7f' % (time, result)) context.view.client.purge_everything() Change to reflect recent API changes.""" Estimate pi using a Monte Carlo method with distarray. Usage: $ python pi_distarray.py <number of points> """ import sys from distarray.random import Random from distarray.client import Context from util import timer context = Context() random = Random(context) @timer def calc_pi(n): """Estimate pi using distributed NumPy arrays.""" x = random.rand((n,)) y = random.rand((n,)) r = context.hypot(x, y) return 4 * float((r < 1.).sum())/n if __name__ == '__main__': N = int(sys.argv[1]) result, time = calc_pi(N) print('time : %3.4g\nresult: %.7f' % (time, result)) context.view.client.purge_everything()
<commit_before>""" Estimate pi using a Monte Carlo method with distarray. Usage: $ python pi_distarray.py <number of points> """ import sys from distarray.client import RandomModule, Context from util import timer context = Context() random = RandomModule(context) @timer def calc_pi(n): """Estimate pi using distributed NumPy arrays.""" x = random.rand((n,)) y = random.rand((n,)) r = context.hypot(x, y) return 4 * float((r < 1.).sum())/n if __name__ == '__main__': N = int(sys.argv[1]) result, time = calc_pi(N) print('time : %3.4g\nresult: %.7f' % (time, result)) context.view.client.purge_everything() <commit_msg>Change to reflect recent API changes.<commit_after>""" Estimate pi using a Monte Carlo method with distarray. Usage: $ python pi_distarray.py <number of points> """ import sys from distarray.random import Random from distarray.client import Context from util import timer context = Context() random = Random(context) @timer def calc_pi(n): """Estimate pi using distributed NumPy arrays.""" x = random.rand((n,)) y = random.rand((n,)) r = context.hypot(x, y) return 4 * float((r < 1.).sum())/n if __name__ == '__main__': N = int(sys.argv[1]) result, time = calc_pi(N) print('time : %3.4g\nresult: %.7f' % (time, result)) context.view.client.purge_everything()
108768166c660b0ba38da07b21f687d5813734f2
jsonconfigparser/__init__.py
jsonconfigparser/__init__.py
from .configparser import JSONConfigParser from .utils import convert_input, dict_, list_, fieldtypes from .commands import view, add_file, add_field, delete, edit version = '0.0.1'
from .configparser import JSONConfigParser from .utils import dict_, list_, fieldtypes, command, call from .commands import add_file, add_field, view, edit, delete, append version = '0.0.1'
Make sure correct functions are imported
Make sure correct functions are imported
Python
mit
justanr/JSONConfigParser
from .configparser import JSONConfigParser from .utils import convert_input, dict_, list_, fieldtypes from .commands import view, add_file, add_field, delete, edit version = '0.0.1' Make sure correct functions are imported
from .configparser import JSONConfigParser from .utils import dict_, list_, fieldtypes, command, call from .commands import add_file, add_field, view, edit, delete, append version = '0.0.1'
<commit_before>from .configparser import JSONConfigParser from .utils import convert_input, dict_, list_, fieldtypes from .commands import view, add_file, add_field, delete, edit version = '0.0.1' <commit_msg>Make sure correct functions are imported<commit_after>
from .configparser import JSONConfigParser from .utils import dict_, list_, fieldtypes, command, call from .commands import add_file, add_field, view, edit, delete, append version = '0.0.1'
from .configparser import JSONConfigParser from .utils import convert_input, dict_, list_, fieldtypes from .commands import view, add_file, add_field, delete, edit version = '0.0.1' Make sure correct functions are importedfrom .configparser import JSONConfigParser from .utils import dict_, list_, fieldtypes, command, call from .commands import add_file, add_field, view, edit, delete, append version = '0.0.1'
<commit_before>from .configparser import JSONConfigParser from .utils import convert_input, dict_, list_, fieldtypes from .commands import view, add_file, add_field, delete, edit version = '0.0.1' <commit_msg>Make sure correct functions are imported<commit_after>from .configparser import JSONConfigParser from .utils import dict_, list_, fieldtypes, command, call from .commands import add_file, add_field, view, edit, delete, append version = '0.0.1'
9d37d1a08f3d99786db1ccd81f08c8ab24011d7d
runtests.py
runtests.py
#!/usr/bin/env python import sys from os.path import dirname, abspath from django.conf import settings if not settings.configured: settings.configure( DATABASE_ENGINE='django.db.backends.postgresql_psycopg2', DATABASE_NAME='bitfield_test', INSTALLED_APPS=[ 'bitfield', 'bitfield.tests', ], ROOT_URLCONF='', DEBUG=False, ) from django.test.simple import run_tests def runtests(*test_args): if not test_args: test_args = ['bitfield'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:])
#!/usr/bin/env python import sys from os.path import dirname, abspath from django.conf import settings if not settings.configured: settings.configure( DATABASE_ENGINE='django.db.backends.postgresql_psycopg2', DATABASE_NAME='bitfield_test', INSTALLED_APPS=[ 'django.contrib.contettypes', 'bitfield', 'bitfield.tests', ], ROOT_URLCONF='', DEBUG=False, ) from django.test.simple import run_tests def runtests(*test_args): if not test_args: test_args = ['bitfield'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:])
Test suite seems to require ContentType to exist
Test suite seems to require ContentType to exist
Python
apache-2.0
disqus/django-bitfield,budlight/django-bitfield,moggers87/django-bitfield,joshowen/django-bitfield,Elec/django-bitfield,mattcaldwell/django-bitfield
#!/usr/bin/env python import sys from os.path import dirname, abspath from django.conf import settings if not settings.configured: settings.configure( DATABASE_ENGINE='django.db.backends.postgresql_psycopg2', DATABASE_NAME='bitfield_test', INSTALLED_APPS=[ 'bitfield', 'bitfield.tests', ], ROOT_URLCONF='', DEBUG=False, ) from django.test.simple import run_tests def runtests(*test_args): if not test_args: test_args = ['bitfield'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:])Test suite seems to require ContentType to exist
#!/usr/bin/env python import sys from os.path import dirname, abspath from django.conf import settings if not settings.configured: settings.configure( DATABASE_ENGINE='django.db.backends.postgresql_psycopg2', DATABASE_NAME='bitfield_test', INSTALLED_APPS=[ 'django.contrib.contettypes', 'bitfield', 'bitfield.tests', ], ROOT_URLCONF='', DEBUG=False, ) from django.test.simple import run_tests def runtests(*test_args): if not test_args: test_args = ['bitfield'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:])
<commit_before>#!/usr/bin/env python import sys from os.path import dirname, abspath from django.conf import settings if not settings.configured: settings.configure( DATABASE_ENGINE='django.db.backends.postgresql_psycopg2', DATABASE_NAME='bitfield_test', INSTALLED_APPS=[ 'bitfield', 'bitfield.tests', ], ROOT_URLCONF='', DEBUG=False, ) from django.test.simple import run_tests def runtests(*test_args): if not test_args: test_args = ['bitfield'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:])<commit_msg>Test suite seems to require ContentType to exist<commit_after>
#!/usr/bin/env python import sys from os.path import dirname, abspath from django.conf import settings if not settings.configured: settings.configure( DATABASE_ENGINE='django.db.backends.postgresql_psycopg2', DATABASE_NAME='bitfield_test', INSTALLED_APPS=[ 'django.contrib.contettypes', 'bitfield', 'bitfield.tests', ], ROOT_URLCONF='', DEBUG=False, ) from django.test.simple import run_tests def runtests(*test_args): if not test_args: test_args = ['bitfield'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:])
#!/usr/bin/env python import sys from os.path import dirname, abspath from django.conf import settings if not settings.configured: settings.configure( DATABASE_ENGINE='django.db.backends.postgresql_psycopg2', DATABASE_NAME='bitfield_test', INSTALLED_APPS=[ 'bitfield', 'bitfield.tests', ], ROOT_URLCONF='', DEBUG=False, ) from django.test.simple import run_tests def runtests(*test_args): if not test_args: test_args = ['bitfield'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:])Test suite seems to require ContentType to exist#!/usr/bin/env python import sys from os.path import dirname, abspath from django.conf import settings if not settings.configured: settings.configure( DATABASE_ENGINE='django.db.backends.postgresql_psycopg2', DATABASE_NAME='bitfield_test', INSTALLED_APPS=[ 'django.contrib.contettypes', 'bitfield', 'bitfield.tests', ], ROOT_URLCONF='', DEBUG=False, ) from django.test.simple import run_tests def runtests(*test_args): if not test_args: test_args = ['bitfield'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:])
<commit_before>#!/usr/bin/env python import sys from os.path import dirname, abspath from django.conf import settings if not settings.configured: settings.configure( DATABASE_ENGINE='django.db.backends.postgresql_psycopg2', DATABASE_NAME='bitfield_test', INSTALLED_APPS=[ 'bitfield', 'bitfield.tests', ], ROOT_URLCONF='', DEBUG=False, ) from django.test.simple import run_tests def runtests(*test_args): if not test_args: test_args = ['bitfield'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:])<commit_msg>Test suite seems to require ContentType to exist<commit_after>#!/usr/bin/env python import sys from os.path import dirname, abspath from django.conf import settings if not settings.configured: settings.configure( DATABASE_ENGINE='django.db.backends.postgresql_psycopg2', DATABASE_NAME='bitfield_test', INSTALLED_APPS=[ 'django.contrib.contettypes', 'bitfield', 'bitfield.tests', ], ROOT_URLCONF='', DEBUG=False, ) from django.test.simple import run_tests def runtests(*test_args): if not test_args: test_args = ['bitfield'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:])
67186bff0a37d941e5cfb3420dbd1c0ac010c7b6
gconvert.py
gconvert.py
#!/usr/bin/env python import json import re from urllib import urlopen api = 'http://www.google.com/ig/calculator?hl=en&q={}{}=?{}' def convert(value, src_units, dst_units): url = api.format(value, src_units, dst_units) data = urlopen(url).read().decode('utf-8', 'ignore') # Convert to valid JSON: {foo: "1"} -> {"foo" : "1"} data = re.sub('([a-z]+):', '"\\1" :', data) data = json.loads(data) if len(data['error']) > 0: raise Exception(data['error']) value = data['rhs'].split(' ')[0] value = float(value) if '.' in value else int(value) return value, data['rhs'] if __name__ == '__main__': from argparse import ArgumentParser parser = ArgumentParser() parser.add_argument('value', type=float) parser.add_argument('source_units') parser.add_argument('dest_units') args = parser.parse_args() print convert(args.value, args.source_units, args.dest_units)
#!/usr/bin/env python import json import re from urllib import urlopen api = 'http://www.google.com/ig/calculator?hl=en&q={}{}=?{}' def convert(value, src_units, dst_units): url = api.format(value, src_units, dst_units) # read and preprocess the response resp = urlopen(url).read() resp = resp.replace(r'\x', r'\u00') resp = re.sub('([a-z]+):', '"\\1" :', resp) data = json.loads(resp) if len(data['error']) > 0: raise Exception(data['error']) # postprocess the answer from Google to deal with HTML-formatted scientific # notation rhs = data['rhs'] rhs = re.sub(r'\s*&#215;\s*10\s*<sup>-(\d+)</sup>', 'e-\\1', rhs) rhs = re.sub(r'\s*&#215;\s*10\s*<sup>(\d+)</sup>', 'e+\\1', rhs) data['rhs'] = rhs value = data['rhs'].split(' ')[0] value = float(value) if '.' in value else int(value) return value, data['rhs'] if __name__ == '__main__': from argparse import ArgumentParser parser = ArgumentParser() parser.add_argument('value', type=float) parser.add_argument('source_units') parser.add_argument('dest_units') args = parser.parse_args() print convert(args.value, args.source_units, args.dest_units)
Handle scientific notation in response
Handle scientific notation in response
Python
mit
jason0x43/jc-units
#!/usr/bin/env python import json import re from urllib import urlopen api = 'http://www.google.com/ig/calculator?hl=en&q={}{}=?{}' def convert(value, src_units, dst_units): url = api.format(value, src_units, dst_units) data = urlopen(url).read().decode('utf-8', 'ignore') # Convert to valid JSON: {foo: "1"} -> {"foo" : "1"} data = re.sub('([a-z]+):', '"\\1" :', data) data = json.loads(data) if len(data['error']) > 0: raise Exception(data['error']) value = data['rhs'].split(' ')[0] value = float(value) if '.' in value else int(value) return value, data['rhs'] if __name__ == '__main__': from argparse import ArgumentParser parser = ArgumentParser() parser.add_argument('value', type=float) parser.add_argument('source_units') parser.add_argument('dest_units') args = parser.parse_args() print convert(args.value, args.source_units, args.dest_units) Handle scientific notation in response
#!/usr/bin/env python import json import re from urllib import urlopen api = 'http://www.google.com/ig/calculator?hl=en&q={}{}=?{}' def convert(value, src_units, dst_units): url = api.format(value, src_units, dst_units) # read and preprocess the response resp = urlopen(url).read() resp = resp.replace(r'\x', r'\u00') resp = re.sub('([a-z]+):', '"\\1" :', resp) data = json.loads(resp) if len(data['error']) > 0: raise Exception(data['error']) # postprocess the answer from Google to deal with HTML-formatted scientific # notation rhs = data['rhs'] rhs = re.sub(r'\s*&#215;\s*10\s*<sup>-(\d+)</sup>', 'e-\\1', rhs) rhs = re.sub(r'\s*&#215;\s*10\s*<sup>(\d+)</sup>', 'e+\\1', rhs) data['rhs'] = rhs value = data['rhs'].split(' ')[0] value = float(value) if '.' in value else int(value) return value, data['rhs'] if __name__ == '__main__': from argparse import ArgumentParser parser = ArgumentParser() parser.add_argument('value', type=float) parser.add_argument('source_units') parser.add_argument('dest_units') args = parser.parse_args() print convert(args.value, args.source_units, args.dest_units)
<commit_before>#!/usr/bin/env python import json import re from urllib import urlopen api = 'http://www.google.com/ig/calculator?hl=en&q={}{}=?{}' def convert(value, src_units, dst_units): url = api.format(value, src_units, dst_units) data = urlopen(url).read().decode('utf-8', 'ignore') # Convert to valid JSON: {foo: "1"} -> {"foo" : "1"} data = re.sub('([a-z]+):', '"\\1" :', data) data = json.loads(data) if len(data['error']) > 0: raise Exception(data['error']) value = data['rhs'].split(' ')[0] value = float(value) if '.' in value else int(value) return value, data['rhs'] if __name__ == '__main__': from argparse import ArgumentParser parser = ArgumentParser() parser.add_argument('value', type=float) parser.add_argument('source_units') parser.add_argument('dest_units') args = parser.parse_args() print convert(args.value, args.source_units, args.dest_units) <commit_msg>Handle scientific notation in response<commit_after>
#!/usr/bin/env python import json import re from urllib import urlopen api = 'http://www.google.com/ig/calculator?hl=en&q={}{}=?{}' def convert(value, src_units, dst_units): url = api.format(value, src_units, dst_units) # read and preprocess the response resp = urlopen(url).read() resp = resp.replace(r'\x', r'\u00') resp = re.sub('([a-z]+):', '"\\1" :', resp) data = json.loads(resp) if len(data['error']) > 0: raise Exception(data['error']) # postprocess the answer from Google to deal with HTML-formatted scientific # notation rhs = data['rhs'] rhs = re.sub(r'\s*&#215;\s*10\s*<sup>-(\d+)</sup>', 'e-\\1', rhs) rhs = re.sub(r'\s*&#215;\s*10\s*<sup>(\d+)</sup>', 'e+\\1', rhs) data['rhs'] = rhs value = data['rhs'].split(' ')[0] value = float(value) if '.' in value else int(value) return value, data['rhs'] if __name__ == '__main__': from argparse import ArgumentParser parser = ArgumentParser() parser.add_argument('value', type=float) parser.add_argument('source_units') parser.add_argument('dest_units') args = parser.parse_args() print convert(args.value, args.source_units, args.dest_units)
#!/usr/bin/env python import json import re from urllib import urlopen api = 'http://www.google.com/ig/calculator?hl=en&q={}{}=?{}' def convert(value, src_units, dst_units): url = api.format(value, src_units, dst_units) data = urlopen(url).read().decode('utf-8', 'ignore') # Convert to valid JSON: {foo: "1"} -> {"foo" : "1"} data = re.sub('([a-z]+):', '"\\1" :', data) data = json.loads(data) if len(data['error']) > 0: raise Exception(data['error']) value = data['rhs'].split(' ')[0] value = float(value) if '.' in value else int(value) return value, data['rhs'] if __name__ == '__main__': from argparse import ArgumentParser parser = ArgumentParser() parser.add_argument('value', type=float) parser.add_argument('source_units') parser.add_argument('dest_units') args = parser.parse_args() print convert(args.value, args.source_units, args.dest_units) Handle scientific notation in response#!/usr/bin/env python import json import re from urllib import urlopen api = 'http://www.google.com/ig/calculator?hl=en&q={}{}=?{}' def convert(value, src_units, dst_units): url = api.format(value, src_units, dst_units) # read and preprocess the response resp = urlopen(url).read() resp = resp.replace(r'\x', r'\u00') resp = re.sub('([a-z]+):', '"\\1" :', resp) data = json.loads(resp) if len(data['error']) > 0: raise Exception(data['error']) # postprocess the answer from Google to deal with HTML-formatted scientific # notation rhs = data['rhs'] rhs = re.sub(r'\s*&#215;\s*10\s*<sup>-(\d+)</sup>', 'e-\\1', rhs) rhs = re.sub(r'\s*&#215;\s*10\s*<sup>(\d+)</sup>', 'e+\\1', rhs) data['rhs'] = rhs value = data['rhs'].split(' ')[0] value = float(value) if '.' in value else int(value) return value, data['rhs'] if __name__ == '__main__': from argparse import ArgumentParser parser = ArgumentParser() parser.add_argument('value', type=float) parser.add_argument('source_units') parser.add_argument('dest_units') args = parser.parse_args() print convert(args.value, args.source_units, args.dest_units)
<commit_before>#!/usr/bin/env python import json import re from urllib import urlopen api = 'http://www.google.com/ig/calculator?hl=en&q={}{}=?{}' def convert(value, src_units, dst_units): url = api.format(value, src_units, dst_units) data = urlopen(url).read().decode('utf-8', 'ignore') # Convert to valid JSON: {foo: "1"} -> {"foo" : "1"} data = re.sub('([a-z]+):', '"\\1" :', data) data = json.loads(data) if len(data['error']) > 0: raise Exception(data['error']) value = data['rhs'].split(' ')[0] value = float(value) if '.' in value else int(value) return value, data['rhs'] if __name__ == '__main__': from argparse import ArgumentParser parser = ArgumentParser() parser.add_argument('value', type=float) parser.add_argument('source_units') parser.add_argument('dest_units') args = parser.parse_args() print convert(args.value, args.source_units, args.dest_units) <commit_msg>Handle scientific notation in response<commit_after>#!/usr/bin/env python import json import re from urllib import urlopen api = 'http://www.google.com/ig/calculator?hl=en&q={}{}=?{}' def convert(value, src_units, dst_units): url = api.format(value, src_units, dst_units) # read and preprocess the response resp = urlopen(url).read() resp = resp.replace(r'\x', r'\u00') resp = re.sub('([a-z]+):', '"\\1" :', resp) data = json.loads(resp) if len(data['error']) > 0: raise Exception(data['error']) # postprocess the answer from Google to deal with HTML-formatted scientific # notation rhs = data['rhs'] rhs = re.sub(r'\s*&#215;\s*10\s*<sup>-(\d+)</sup>', 'e-\\1', rhs) rhs = re.sub(r'\s*&#215;\s*10\s*<sup>(\d+)</sup>', 'e+\\1', rhs) data['rhs'] = rhs value = data['rhs'].split(' ')[0] value = float(value) if '.' in value else int(value) return value, data['rhs'] if __name__ == '__main__': from argparse import ArgumentParser parser = ArgumentParser() parser.add_argument('value', type=float) parser.add_argument('source_units') parser.add_argument('dest_units') args = parser.parse_args() print convert(args.value, args.source_units, args.dest_units)
ce3371edbd852ba226eb57817b3a04d164d64f7c
h5py/_stub.py
h5py/_stub.py
# Cython has limits on what you can declare inside control structures. This # native-Python module is a shim to allow things like dynamic class # definitions and functional closures. def generate_class(cls1, cls2): """ Create a new class from two bases. The new name is the concatenation of cls2.__name__ with "H5"; e.g. KeyError -> KeyErrorH5. """ class HybridClass(cls1, cls2): pass HybridClass.__name__ = cls2.__name__+"H5" return HybridClass
# Cython has limits on what you can declare inside control structures. This # native-Python module is a shim to allow things like dynamic class # definitions and functional closures. def generate_class(cls1, cls2): """ Create a new class from two bases. The new name is the concatenation of cls2.__name__ with "H5"; e.g. KeyError -> KeyErrorH5. """ class HybridClass(cls1, cls2): pass HybridClass.__name__ = cls2.__name__ return HybridClass
Drop H5 suffix for exceptions
Drop H5 suffix for exceptions
Python
bsd-3-clause
h5py/h5py,h5py/h5py,h5py/h5py
# Cython has limits on what you can declare inside control structures. This # native-Python module is a shim to allow things like dynamic class # definitions and functional closures. def generate_class(cls1, cls2): """ Create a new class from two bases. The new name is the concatenation of cls2.__name__ with "H5"; e.g. KeyError -> KeyErrorH5. """ class HybridClass(cls1, cls2): pass HybridClass.__name__ = cls2.__name__+"H5" return HybridClass Drop H5 suffix for exceptions
# Cython has limits on what you can declare inside control structures. This # native-Python module is a shim to allow things like dynamic class # definitions and functional closures. def generate_class(cls1, cls2): """ Create a new class from two bases. The new name is the concatenation of cls2.__name__ with "H5"; e.g. KeyError -> KeyErrorH5. """ class HybridClass(cls1, cls2): pass HybridClass.__name__ = cls2.__name__ return HybridClass
<commit_before> # Cython has limits on what you can declare inside control structures. This # native-Python module is a shim to allow things like dynamic class # definitions and functional closures. def generate_class(cls1, cls2): """ Create a new class from two bases. The new name is the concatenation of cls2.__name__ with "H5"; e.g. KeyError -> KeyErrorH5. """ class HybridClass(cls1, cls2): pass HybridClass.__name__ = cls2.__name__+"H5" return HybridClass <commit_msg>Drop H5 suffix for exceptions<commit_after>
# Cython has limits on what you can declare inside control structures. This # native-Python module is a shim to allow things like dynamic class # definitions and functional closures. def generate_class(cls1, cls2): """ Create a new class from two bases. The new name is the concatenation of cls2.__name__ with "H5"; e.g. KeyError -> KeyErrorH5. """ class HybridClass(cls1, cls2): pass HybridClass.__name__ = cls2.__name__ return HybridClass
# Cython has limits on what you can declare inside control structures. This # native-Python module is a shim to allow things like dynamic class # definitions and functional closures. def generate_class(cls1, cls2): """ Create a new class from two bases. The new name is the concatenation of cls2.__name__ with "H5"; e.g. KeyError -> KeyErrorH5. """ class HybridClass(cls1, cls2): pass HybridClass.__name__ = cls2.__name__+"H5" return HybridClass Drop H5 suffix for exceptions # Cython has limits on what you can declare inside control structures. This # native-Python module is a shim to allow things like dynamic class # definitions and functional closures. def generate_class(cls1, cls2): """ Create a new class from two bases. The new name is the concatenation of cls2.__name__ with "H5"; e.g. KeyError -> KeyErrorH5. """ class HybridClass(cls1, cls2): pass HybridClass.__name__ = cls2.__name__ return HybridClass
<commit_before> # Cython has limits on what you can declare inside control structures. This # native-Python module is a shim to allow things like dynamic class # definitions and functional closures. def generate_class(cls1, cls2): """ Create a new class from two bases. The new name is the concatenation of cls2.__name__ with "H5"; e.g. KeyError -> KeyErrorH5. """ class HybridClass(cls1, cls2): pass HybridClass.__name__ = cls2.__name__+"H5" return HybridClass <commit_msg>Drop H5 suffix for exceptions<commit_after> # Cython has limits on what you can declare inside control structures. This # native-Python module is a shim to allow things like dynamic class # definitions and functional closures. def generate_class(cls1, cls2): """ Create a new class from two bases. The new name is the concatenation of cls2.__name__ with "H5"; e.g. KeyError -> KeyErrorH5. """ class HybridClass(cls1, cls2): pass HybridClass.__name__ = cls2.__name__ return HybridClass
e8c9762cbfac6dbb4dab252bd9cdf0a4e01f3a36
scipy/ndimage/tests/test_regression.py
scipy/ndimage/tests/test_regression.py
import numpy as np from numpy.testing import * import scipy.ndimage as ndimage def test_byte_order_median(): """Regression test for #413: median_filter does not handle bytes orders.""" a = np.arange(9, dtype='<f4').reshape(3, 3) ref = ndimage.filters.median_filter(a,(3, 3)) b = np.arange(9, dtype='>f4').reshape(3, 3) t = ndimage.filters.median_filter(b, (3, 3)) assert_array_almost_equal(ref, t) def test_zoom_output_shape(): """Ticket #643""" x = np.arange(12).reshape((3,4)) ndimage.zoom(x, 2, output=np.zeros((6,8))) if __name__ == "__main__": NumpyTest().run()
import numpy as np from numpy.testing import * import scipy.ndimage as ndimage def test_byte_order_median(): """Regression test for #413: median_filter does not handle bytes orders.""" a = np.arange(9, dtype='<f4').reshape(3, 3) ref = ndimage.filters.median_filter(a,(3, 3)) b = np.arange(9, dtype='>f4').reshape(3, 3) t = ndimage.filters.median_filter(b, (3, 3)) assert_array_almost_equal(ref, t) def test_zoom_output_shape(): """Ticket #643""" x = np.arange(12).reshape((3,4)) ndimage.zoom(x, 2, output=np.zeros((6,8))) if __name__ == "__main__": run_module_suite()
Use run_module_suite instead of deprecated NumpyTest.
Use run_module_suite instead of deprecated NumpyTest. git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@5310 d6536bca-fef9-0310-8506-e4c0a848fbcf
Python
bsd-3-clause
jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt,scipy/scipy-svn,lesserwhirls/scipy-cwt,scipy/scipy-svn,scipy/scipy-svn,jasonmccampbell/scipy-refactor,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt,lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor,scipy/scipy-svn
import numpy as np from numpy.testing import * import scipy.ndimage as ndimage def test_byte_order_median(): """Regression test for #413: median_filter does not handle bytes orders.""" a = np.arange(9, dtype='<f4').reshape(3, 3) ref = ndimage.filters.median_filter(a,(3, 3)) b = np.arange(9, dtype='>f4').reshape(3, 3) t = ndimage.filters.median_filter(b, (3, 3)) assert_array_almost_equal(ref, t) def test_zoom_output_shape(): """Ticket #643""" x = np.arange(12).reshape((3,4)) ndimage.zoom(x, 2, output=np.zeros((6,8))) if __name__ == "__main__": NumpyTest().run() Use run_module_suite instead of deprecated NumpyTest. git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@5310 d6536bca-fef9-0310-8506-e4c0a848fbcf
import numpy as np from numpy.testing import * import scipy.ndimage as ndimage def test_byte_order_median(): """Regression test for #413: median_filter does not handle bytes orders.""" a = np.arange(9, dtype='<f4').reshape(3, 3) ref = ndimage.filters.median_filter(a,(3, 3)) b = np.arange(9, dtype='>f4').reshape(3, 3) t = ndimage.filters.median_filter(b, (3, 3)) assert_array_almost_equal(ref, t) def test_zoom_output_shape(): """Ticket #643""" x = np.arange(12).reshape((3,4)) ndimage.zoom(x, 2, output=np.zeros((6,8))) if __name__ == "__main__": run_module_suite()
<commit_before>import numpy as np from numpy.testing import * import scipy.ndimage as ndimage def test_byte_order_median(): """Regression test for #413: median_filter does not handle bytes orders.""" a = np.arange(9, dtype='<f4').reshape(3, 3) ref = ndimage.filters.median_filter(a,(3, 3)) b = np.arange(9, dtype='>f4').reshape(3, 3) t = ndimage.filters.median_filter(b, (3, 3)) assert_array_almost_equal(ref, t) def test_zoom_output_shape(): """Ticket #643""" x = np.arange(12).reshape((3,4)) ndimage.zoom(x, 2, output=np.zeros((6,8))) if __name__ == "__main__": NumpyTest().run() <commit_msg>Use run_module_suite instead of deprecated NumpyTest. git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@5310 d6536bca-fef9-0310-8506-e4c0a848fbcf<commit_after>
import numpy as np from numpy.testing import * import scipy.ndimage as ndimage def test_byte_order_median(): """Regression test for #413: median_filter does not handle bytes orders.""" a = np.arange(9, dtype='<f4').reshape(3, 3) ref = ndimage.filters.median_filter(a,(3, 3)) b = np.arange(9, dtype='>f4').reshape(3, 3) t = ndimage.filters.median_filter(b, (3, 3)) assert_array_almost_equal(ref, t) def test_zoom_output_shape(): """Ticket #643""" x = np.arange(12).reshape((3,4)) ndimage.zoom(x, 2, output=np.zeros((6,8))) if __name__ == "__main__": run_module_suite()
import numpy as np from numpy.testing import * import scipy.ndimage as ndimage def test_byte_order_median(): """Regression test for #413: median_filter does not handle bytes orders.""" a = np.arange(9, dtype='<f4').reshape(3, 3) ref = ndimage.filters.median_filter(a,(3, 3)) b = np.arange(9, dtype='>f4').reshape(3, 3) t = ndimage.filters.median_filter(b, (3, 3)) assert_array_almost_equal(ref, t) def test_zoom_output_shape(): """Ticket #643""" x = np.arange(12).reshape((3,4)) ndimage.zoom(x, 2, output=np.zeros((6,8))) if __name__ == "__main__": NumpyTest().run() Use run_module_suite instead of deprecated NumpyTest. git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@5310 d6536bca-fef9-0310-8506-e4c0a848fbcfimport numpy as np from numpy.testing import * import scipy.ndimage as ndimage def test_byte_order_median(): """Regression test for #413: median_filter does not handle bytes orders.""" a = np.arange(9, dtype='<f4').reshape(3, 3) ref = ndimage.filters.median_filter(a,(3, 3)) b = np.arange(9, dtype='>f4').reshape(3, 3) t = ndimage.filters.median_filter(b, (3, 3)) assert_array_almost_equal(ref, t) def test_zoom_output_shape(): """Ticket #643""" x = np.arange(12).reshape((3,4)) ndimage.zoom(x, 2, output=np.zeros((6,8))) if __name__ == "__main__": run_module_suite()
<commit_before>import numpy as np from numpy.testing import * import scipy.ndimage as ndimage def test_byte_order_median(): """Regression test for #413: median_filter does not handle bytes orders.""" a = np.arange(9, dtype='<f4').reshape(3, 3) ref = ndimage.filters.median_filter(a,(3, 3)) b = np.arange(9, dtype='>f4').reshape(3, 3) t = ndimage.filters.median_filter(b, (3, 3)) assert_array_almost_equal(ref, t) def test_zoom_output_shape(): """Ticket #643""" x = np.arange(12).reshape((3,4)) ndimage.zoom(x, 2, output=np.zeros((6,8))) if __name__ == "__main__": NumpyTest().run() <commit_msg>Use run_module_suite instead of deprecated NumpyTest. git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@5310 d6536bca-fef9-0310-8506-e4c0a848fbcf<commit_after>import numpy as np from numpy.testing import * import scipy.ndimage as ndimage def test_byte_order_median(): """Regression test for #413: median_filter does not handle bytes orders.""" a = np.arange(9, dtype='<f4').reshape(3, 3) ref = ndimage.filters.median_filter(a,(3, 3)) b = np.arange(9, dtype='>f4').reshape(3, 3) t = ndimage.filters.median_filter(b, (3, 3)) assert_array_almost_equal(ref, t) def test_zoom_output_shape(): """Ticket #643""" x = np.arange(12).reshape((3,4)) ndimage.zoom(x, 2, output=np.zeros((6,8))) if __name__ == "__main__": run_module_suite()
bdc8ac1db8681dd45d4498ccd9735be5b1cdb1b7
hash_table.py
hash_table.py
#!/usr/bin/env python '''Implementation of a simple hash table. The table has `hash`, `get` and `set` methods. The hash function uses a very basic hash algorithm to insert the value into the table. ''' class HashItem(object): def __init__(self, key, value): self.key = key self.value = value class Hash(object): def __init__(self, size=1024): self.table = [] for i in range(size): self.table.append(list()) def hash(self, key): hash_value = 0 for i in key: hash_value += ord(key) return hash_value % len(self.table) def get(self, key): hashed_key = self.hash(key) for k in self.table[hashed_key]: if k[0] == key: return k[1] else: raise KeyError('Value not found') def set(self, key, val): hashed_key = self.hash(key) self.table[hashed_key].append((key, val))
#!/usr/bin/env python '''Implementation of a simple hash table. The table has `hash`, `get` and `set` methods. The hash function uses a very basic hash algorithm to insert the value into the table. ''' class HashItem(object): def __init__(self, key, value): self.key = key self.value = value class HashTable(object): def __init__(self, size=1024): self.table = [] for i in range(size): self.table.append(list()) def hash(self, key): hash_value = 0 for i in key: hash_value += ord(i) return hash_value % len(self.table) def get(self, key): hashed_key = self.hash(key) for k in self.table[hashed_key]: if k[0] == key: return k[1] else: raise KeyError('Value not found') def set(self, key, val): hashed_key = self.hash(key) for k in self.table[hashed_key]: if k[0] == key: self.table[hashed_key][k] = (key, val) else: self.table[hashed_key].append((key, val))
Add handling for duplicate keys in set; fix bug in hash function (was calling ord on key instead of character); change name of class to HashTable from Hash
Add handling for duplicate keys in set; fix bug in hash function (was calling ord on key instead of character); change name of class to HashTable from Hash
Python
mit
jwarren116/data-structures-deux
#!/usr/bin/env python '''Implementation of a simple hash table. The table has `hash`, `get` and `set` methods. The hash function uses a very basic hash algorithm to insert the value into the table. ''' class HashItem(object): def __init__(self, key, value): self.key = key self.value = value class Hash(object): def __init__(self, size=1024): self.table = [] for i in range(size): self.table.append(list()) def hash(self, key): hash_value = 0 for i in key: hash_value += ord(key) return hash_value % len(self.table) def get(self, key): hashed_key = self.hash(key) for k in self.table[hashed_key]: if k[0] == key: return k[1] else: raise KeyError('Value not found') def set(self, key, val): hashed_key = self.hash(key) self.table[hashed_key].append((key, val)) Add handling for duplicate keys in set; fix bug in hash function (was calling ord on key instead of character); change name of class to HashTable from Hash
#!/usr/bin/env python '''Implementation of a simple hash table. The table has `hash`, `get` and `set` methods. The hash function uses a very basic hash algorithm to insert the value into the table. ''' class HashItem(object): def __init__(self, key, value): self.key = key self.value = value class HashTable(object): def __init__(self, size=1024): self.table = [] for i in range(size): self.table.append(list()) def hash(self, key): hash_value = 0 for i in key: hash_value += ord(i) return hash_value % len(self.table) def get(self, key): hashed_key = self.hash(key) for k in self.table[hashed_key]: if k[0] == key: return k[1] else: raise KeyError('Value not found') def set(self, key, val): hashed_key = self.hash(key) for k in self.table[hashed_key]: if k[0] == key: self.table[hashed_key][k] = (key, val) else: self.table[hashed_key].append((key, val))
<commit_before>#!/usr/bin/env python '''Implementation of a simple hash table. The table has `hash`, `get` and `set` methods. The hash function uses a very basic hash algorithm to insert the value into the table. ''' class HashItem(object): def __init__(self, key, value): self.key = key self.value = value class Hash(object): def __init__(self, size=1024): self.table = [] for i in range(size): self.table.append(list()) def hash(self, key): hash_value = 0 for i in key: hash_value += ord(key) return hash_value % len(self.table) def get(self, key): hashed_key = self.hash(key) for k in self.table[hashed_key]: if k[0] == key: return k[1] else: raise KeyError('Value not found') def set(self, key, val): hashed_key = self.hash(key) self.table[hashed_key].append((key, val)) <commit_msg>Add handling for duplicate keys in set; fix bug in hash function (was calling ord on key instead of character); change name of class to HashTable from Hash<commit_after>
#!/usr/bin/env python '''Implementation of a simple hash table. The table has `hash`, `get` and `set` methods. The hash function uses a very basic hash algorithm to insert the value into the table. ''' class HashItem(object): def __init__(self, key, value): self.key = key self.value = value class HashTable(object): def __init__(self, size=1024): self.table = [] for i in range(size): self.table.append(list()) def hash(self, key): hash_value = 0 for i in key: hash_value += ord(i) return hash_value % len(self.table) def get(self, key): hashed_key = self.hash(key) for k in self.table[hashed_key]: if k[0] == key: return k[1] else: raise KeyError('Value not found') def set(self, key, val): hashed_key = self.hash(key) for k in self.table[hashed_key]: if k[0] == key: self.table[hashed_key][k] = (key, val) else: self.table[hashed_key].append((key, val))
#!/usr/bin/env python '''Implementation of a simple hash table. The table has `hash`, `get` and `set` methods. The hash function uses a very basic hash algorithm to insert the value into the table. ''' class HashItem(object): def __init__(self, key, value): self.key = key self.value = value class Hash(object): def __init__(self, size=1024): self.table = [] for i in range(size): self.table.append(list()) def hash(self, key): hash_value = 0 for i in key: hash_value += ord(key) return hash_value % len(self.table) def get(self, key): hashed_key = self.hash(key) for k in self.table[hashed_key]: if k[0] == key: return k[1] else: raise KeyError('Value not found') def set(self, key, val): hashed_key = self.hash(key) self.table[hashed_key].append((key, val)) Add handling for duplicate keys in set; fix bug in hash function (was calling ord on key instead of character); change name of class to HashTable from Hash#!/usr/bin/env python '''Implementation of a simple hash table. The table has `hash`, `get` and `set` methods. The hash function uses a very basic hash algorithm to insert the value into the table. ''' class HashItem(object): def __init__(self, key, value): self.key = key self.value = value class HashTable(object): def __init__(self, size=1024): self.table = [] for i in range(size): self.table.append(list()) def hash(self, key): hash_value = 0 for i in key: hash_value += ord(i) return hash_value % len(self.table) def get(self, key): hashed_key = self.hash(key) for k in self.table[hashed_key]: if k[0] == key: return k[1] else: raise KeyError('Value not found') def set(self, key, val): hashed_key = self.hash(key) for k in self.table[hashed_key]: if k[0] == key: self.table[hashed_key][k] = (key, val) else: self.table[hashed_key].append((key, val))
<commit_before>#!/usr/bin/env python '''Implementation of a simple hash table. The table has `hash`, `get` and `set` methods. The hash function uses a very basic hash algorithm to insert the value into the table. ''' class HashItem(object): def __init__(self, key, value): self.key = key self.value = value class Hash(object): def __init__(self, size=1024): self.table = [] for i in range(size): self.table.append(list()) def hash(self, key): hash_value = 0 for i in key: hash_value += ord(key) return hash_value % len(self.table) def get(self, key): hashed_key = self.hash(key) for k in self.table[hashed_key]: if k[0] == key: return k[1] else: raise KeyError('Value not found') def set(self, key, val): hashed_key = self.hash(key) self.table[hashed_key].append((key, val)) <commit_msg>Add handling for duplicate keys in set; fix bug in hash function (was calling ord on key instead of character); change name of class to HashTable from Hash<commit_after>#!/usr/bin/env python '''Implementation of a simple hash table. The table has `hash`, `get` and `set` methods. The hash function uses a very basic hash algorithm to insert the value into the table. ''' class HashItem(object): def __init__(self, key, value): self.key = key self.value = value class HashTable(object): def __init__(self, size=1024): self.table = [] for i in range(size): self.table.append(list()) def hash(self, key): hash_value = 0 for i in key: hash_value += ord(i) return hash_value % len(self.table) def get(self, key): hashed_key = self.hash(key) for k in self.table[hashed_key]: if k[0] == key: return k[1] else: raise KeyError('Value not found') def set(self, key, val): hashed_key = self.hash(key) for k in self.table[hashed_key]: if k[0] == key: self.table[hashed_key][k] = (key, val) else: self.table[hashed_key].append((key, val))
7dec24bb55b7c33133f62f8f124fde0948d417a8
snippet/example/python/utils.py
snippet/example/python/utils.py
#!/usr/bin/env python # encoding: utf-8 from __future__ import absolute_import, unicode_literals, print_function, division import sys if sys.version_info[0] < 3: PY3, Unicode, Bytes = False, unicode, str else: PY3, Unicode, Bytes = True, str, bytes to_bytes = lambda v, e="utf-8": v.encode(e) if isinstance(v, Unicode) else v to_unicode = lambda v, e="utf-8": v.decode(e) if isinstance(v, Bytes) else v to_str = to_unicode if PY3 else to_bytes
#!/usr/bin/env python # encoding: utf-8 from __future__ import absolute_import, unicode_literals, print_function, division import sys if sys.version_info[0] < 3: PY3, Unicode, Bytes = False, unicode, str else: PY3, Unicode, Bytes = True, str, bytes to_bytes = lambda v, e="utf-8": v.encode(e) if isinstance(v, Unicode) else v to_unicode = lambda v, e="utf-8": v.decode(e) if isinstance(v, Bytes) else v to_str = to_unicode if PY3 else to_bytes class ObjectDict(dict): def __setattr__(self, name, value): self[name] = value def __getattr__(self, name): try: return self[name] except KeyError: raise AttributeError("'%s' object has no attribute '%s'" % ( self.__class__.__name__, name))
Add the python example ObjectDict
Add the python example ObjectDict
Python
mit
xgfone/snippet,xgfone/snippet,xgfone/snippet,xgfone/snippet,xgfone/snippet,xgfone/snippet,xgfone/snippet
#!/usr/bin/env python # encoding: utf-8 from __future__ import absolute_import, unicode_literals, print_function, division import sys if sys.version_info[0] < 3: PY3, Unicode, Bytes = False, unicode, str else: PY3, Unicode, Bytes = True, str, bytes to_bytes = lambda v, e="utf-8": v.encode(e) if isinstance(v, Unicode) else v to_unicode = lambda v, e="utf-8": v.decode(e) if isinstance(v, Bytes) else v to_str = to_unicode if PY3 else to_bytes Add the python example ObjectDict
#!/usr/bin/env python # encoding: utf-8 from __future__ import absolute_import, unicode_literals, print_function, division import sys if sys.version_info[0] < 3: PY3, Unicode, Bytes = False, unicode, str else: PY3, Unicode, Bytes = True, str, bytes to_bytes = lambda v, e="utf-8": v.encode(e) if isinstance(v, Unicode) else v to_unicode = lambda v, e="utf-8": v.decode(e) if isinstance(v, Bytes) else v to_str = to_unicode if PY3 else to_bytes class ObjectDict(dict): def __setattr__(self, name, value): self[name] = value def __getattr__(self, name): try: return self[name] except KeyError: raise AttributeError("'%s' object has no attribute '%s'" % ( self.__class__.__name__, name))
<commit_before>#!/usr/bin/env python # encoding: utf-8 from __future__ import absolute_import, unicode_literals, print_function, division import sys if sys.version_info[0] < 3: PY3, Unicode, Bytes = False, unicode, str else: PY3, Unicode, Bytes = True, str, bytes to_bytes = lambda v, e="utf-8": v.encode(e) if isinstance(v, Unicode) else v to_unicode = lambda v, e="utf-8": v.decode(e) if isinstance(v, Bytes) else v to_str = to_unicode if PY3 else to_bytes <commit_msg>Add the python example ObjectDict<commit_after>
#!/usr/bin/env python # encoding: utf-8 from __future__ import absolute_import, unicode_literals, print_function, division import sys if sys.version_info[0] < 3: PY3, Unicode, Bytes = False, unicode, str else: PY3, Unicode, Bytes = True, str, bytes to_bytes = lambda v, e="utf-8": v.encode(e) if isinstance(v, Unicode) else v to_unicode = lambda v, e="utf-8": v.decode(e) if isinstance(v, Bytes) else v to_str = to_unicode if PY3 else to_bytes class ObjectDict(dict): def __setattr__(self, name, value): self[name] = value def __getattr__(self, name): try: return self[name] except KeyError: raise AttributeError("'%s' object has no attribute '%s'" % ( self.__class__.__name__, name))
#!/usr/bin/env python # encoding: utf-8 from __future__ import absolute_import, unicode_literals, print_function, division import sys if sys.version_info[0] < 3: PY3, Unicode, Bytes = False, unicode, str else: PY3, Unicode, Bytes = True, str, bytes to_bytes = lambda v, e="utf-8": v.encode(e) if isinstance(v, Unicode) else v to_unicode = lambda v, e="utf-8": v.decode(e) if isinstance(v, Bytes) else v to_str = to_unicode if PY3 else to_bytes Add the python example ObjectDict#!/usr/bin/env python # encoding: utf-8 from __future__ import absolute_import, unicode_literals, print_function, division import sys if sys.version_info[0] < 3: PY3, Unicode, Bytes = False, unicode, str else: PY3, Unicode, Bytes = True, str, bytes to_bytes = lambda v, e="utf-8": v.encode(e) if isinstance(v, Unicode) else v to_unicode = lambda v, e="utf-8": v.decode(e) if isinstance(v, Bytes) else v to_str = to_unicode if PY3 else to_bytes class ObjectDict(dict): def __setattr__(self, name, value): self[name] = value def __getattr__(self, name): try: return self[name] except KeyError: raise AttributeError("'%s' object has no attribute '%s'" % ( self.__class__.__name__, name))
<commit_before>#!/usr/bin/env python # encoding: utf-8 from __future__ import absolute_import, unicode_literals, print_function, division import sys if sys.version_info[0] < 3: PY3, Unicode, Bytes = False, unicode, str else: PY3, Unicode, Bytes = True, str, bytes to_bytes = lambda v, e="utf-8": v.encode(e) if isinstance(v, Unicode) else v to_unicode = lambda v, e="utf-8": v.decode(e) if isinstance(v, Bytes) else v to_str = to_unicode if PY3 else to_bytes <commit_msg>Add the python example ObjectDict<commit_after>#!/usr/bin/env python # encoding: utf-8 from __future__ import absolute_import, unicode_literals, print_function, division import sys if sys.version_info[0] < 3: PY3, Unicode, Bytes = False, unicode, str else: PY3, Unicode, Bytes = True, str, bytes to_bytes = lambda v, e="utf-8": v.encode(e) if isinstance(v, Unicode) else v to_unicode = lambda v, e="utf-8": v.decode(e) if isinstance(v, Bytes) else v to_str = to_unicode if PY3 else to_bytes class ObjectDict(dict): def __setattr__(self, name, value): self[name] = value def __getattr__(self, name): try: return self[name] except KeyError: raise AttributeError("'%s' object has no attribute '%s'" % ( self.__class__.__name__, name))
c5c77ba407e195e3cc98bb75a961fe112736fca6
homebrew/command_line.py
homebrew/command_line.py
# -*- coding: utf-8 -*- from .homebrew import HomeBrew def main(): HomeBrew().log_info()
# -*- coding: utf-8 -*- import argparse from .homebrew import HomeBrew def main(): argparse.ArgumentParser(description='Get homebrew info').parse_args() HomeBrew().log_info()
Add argparse for info on hb command
Add argparse for info on hb command
Python
isc
igroen/homebrew
# -*- coding: utf-8 -*- from .homebrew import HomeBrew def main(): HomeBrew().log_info() Add argparse for info on hb command
# -*- coding: utf-8 -*- import argparse from .homebrew import HomeBrew def main(): argparse.ArgumentParser(description='Get homebrew info').parse_args() HomeBrew().log_info()
<commit_before># -*- coding: utf-8 -*- from .homebrew import HomeBrew def main(): HomeBrew().log_info() <commit_msg>Add argparse for info on hb command<commit_after>
# -*- coding: utf-8 -*- import argparse from .homebrew import HomeBrew def main(): argparse.ArgumentParser(description='Get homebrew info').parse_args() HomeBrew().log_info()
# -*- coding: utf-8 -*- from .homebrew import HomeBrew def main(): HomeBrew().log_info() Add argparse for info on hb command# -*- coding: utf-8 -*- import argparse from .homebrew import HomeBrew def main(): argparse.ArgumentParser(description='Get homebrew info').parse_args() HomeBrew().log_info()
<commit_before># -*- coding: utf-8 -*- from .homebrew import HomeBrew def main(): HomeBrew().log_info() <commit_msg>Add argparse for info on hb command<commit_after># -*- coding: utf-8 -*- import argparse from .homebrew import HomeBrew def main(): argparse.ArgumentParser(description='Get homebrew info').parse_args() HomeBrew().log_info()
207a1a8fad79ccfa0c244aa0a1d0d25fee87c438
testfixtures/tests/test_docs.py
testfixtures/tests/test_docs.py
# Copyright (c) 2009-2012 Simplistix Ltd # # See license.txt for more details. from doctest import REPORT_NDIFF,ELLIPSIS from glob import glob from manuel import doctest, capture from manuel.testing import TestSuite from nose.plugins.skip import SkipTest from os.path import dirname, join, pardir import os from . import compat path = os.environ.get('DOCPATH', join(dirname(__file__),pardir,pardir,'docs')) tests = glob(join(path,'*.txt')) if not tests: raise SkipTest('No docs found to test') # pragma: no cover def test_suite(): m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS) m += compat.Manuel() m += capture.Manuel() return TestSuite(m, *tests)
# Copyright (c) 2009-2012 Simplistix Ltd # # See license.txt for more details. from doctest import REPORT_NDIFF,ELLIPSIS from glob import glob from manuel import doctest, capture from manuel.testing import TestSuite from nose.plugins.skip import SkipTest from os.path import dirname, join, pardir import os from . import compat workspace = os.environ.get('WORKSPACE', join(dirname(__file__), pardir, pardir)) tests = glob(join(workspace,'docs', '*.txt')) if not tests: raise SkipTest('No docs found to test') # pragma: no cover def test_suite(): m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS) m += compat.Manuel() m += capture.Manuel() return TestSuite(m, *tests)
Use a WORKSPACE to make life easier in Jenkins.
Use a WORKSPACE to make life easier in Jenkins.
Python
mit
nebulans/testfixtures,Simplistix/testfixtures
# Copyright (c) 2009-2012 Simplistix Ltd # # See license.txt for more details. from doctest import REPORT_NDIFF,ELLIPSIS from glob import glob from manuel import doctest, capture from manuel.testing import TestSuite from nose.plugins.skip import SkipTest from os.path import dirname, join, pardir import os from . import compat path = os.environ.get('DOCPATH', join(dirname(__file__),pardir,pardir,'docs')) tests = glob(join(path,'*.txt')) if not tests: raise SkipTest('No docs found to test') # pragma: no cover def test_suite(): m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS) m += compat.Manuel() m += capture.Manuel() return TestSuite(m, *tests) Use a WORKSPACE to make life easier in Jenkins.
# Copyright (c) 2009-2012 Simplistix Ltd # # See license.txt for more details. from doctest import REPORT_NDIFF,ELLIPSIS from glob import glob from manuel import doctest, capture from manuel.testing import TestSuite from nose.plugins.skip import SkipTest from os.path import dirname, join, pardir import os from . import compat workspace = os.environ.get('WORKSPACE', join(dirname(__file__), pardir, pardir)) tests = glob(join(workspace,'docs', '*.txt')) if not tests: raise SkipTest('No docs found to test') # pragma: no cover def test_suite(): m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS) m += compat.Manuel() m += capture.Manuel() return TestSuite(m, *tests)
<commit_before># Copyright (c) 2009-2012 Simplistix Ltd # # See license.txt for more details. from doctest import REPORT_NDIFF,ELLIPSIS from glob import glob from manuel import doctest, capture from manuel.testing import TestSuite from nose.plugins.skip import SkipTest from os.path import dirname, join, pardir import os from . import compat path = os.environ.get('DOCPATH', join(dirname(__file__),pardir,pardir,'docs')) tests = glob(join(path,'*.txt')) if not tests: raise SkipTest('No docs found to test') # pragma: no cover def test_suite(): m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS) m += compat.Manuel() m += capture.Manuel() return TestSuite(m, *tests) <commit_msg>Use a WORKSPACE to make life easier in Jenkins.<commit_after>
# Copyright (c) 2009-2012 Simplistix Ltd # # See license.txt for more details. from doctest import REPORT_NDIFF,ELLIPSIS from glob import glob from manuel import doctest, capture from manuel.testing import TestSuite from nose.plugins.skip import SkipTest from os.path import dirname, join, pardir import os from . import compat workspace = os.environ.get('WORKSPACE', join(dirname(__file__), pardir, pardir)) tests = glob(join(workspace,'docs', '*.txt')) if not tests: raise SkipTest('No docs found to test') # pragma: no cover def test_suite(): m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS) m += compat.Manuel() m += capture.Manuel() return TestSuite(m, *tests)
# Copyright (c) 2009-2012 Simplistix Ltd # # See license.txt for more details. from doctest import REPORT_NDIFF,ELLIPSIS from glob import glob from manuel import doctest, capture from manuel.testing import TestSuite from nose.plugins.skip import SkipTest from os.path import dirname, join, pardir import os from . import compat path = os.environ.get('DOCPATH', join(dirname(__file__),pardir,pardir,'docs')) tests = glob(join(path,'*.txt')) if not tests: raise SkipTest('No docs found to test') # pragma: no cover def test_suite(): m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS) m += compat.Manuel() m += capture.Manuel() return TestSuite(m, *tests) Use a WORKSPACE to make life easier in Jenkins.# Copyright (c) 2009-2012 Simplistix Ltd # # See license.txt for more details. from doctest import REPORT_NDIFF,ELLIPSIS from glob import glob from manuel import doctest, capture from manuel.testing import TestSuite from nose.plugins.skip import SkipTest from os.path import dirname, join, pardir import os from . import compat workspace = os.environ.get('WORKSPACE', join(dirname(__file__), pardir, pardir)) tests = glob(join(workspace,'docs', '*.txt')) if not tests: raise SkipTest('No docs found to test') # pragma: no cover def test_suite(): m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS) m += compat.Manuel() m += capture.Manuel() return TestSuite(m, *tests)
<commit_before># Copyright (c) 2009-2012 Simplistix Ltd # # See license.txt for more details. from doctest import REPORT_NDIFF,ELLIPSIS from glob import glob from manuel import doctest, capture from manuel.testing import TestSuite from nose.plugins.skip import SkipTest from os.path import dirname, join, pardir import os from . import compat path = os.environ.get('DOCPATH', join(dirname(__file__),pardir,pardir,'docs')) tests = glob(join(path,'*.txt')) if not tests: raise SkipTest('No docs found to test') # pragma: no cover def test_suite(): m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS) m += compat.Manuel() m += capture.Manuel() return TestSuite(m, *tests) <commit_msg>Use a WORKSPACE to make life easier in Jenkins.<commit_after># Copyright (c) 2009-2012 Simplistix Ltd # # See license.txt for more details. from doctest import REPORT_NDIFF,ELLIPSIS from glob import glob from manuel import doctest, capture from manuel.testing import TestSuite from nose.plugins.skip import SkipTest from os.path import dirname, join, pardir import os from . import compat workspace = os.environ.get('WORKSPACE', join(dirname(__file__), pardir, pardir)) tests = glob(join(workspace,'docs', '*.txt')) if not tests: raise SkipTest('No docs found to test') # pragma: no cover def test_suite(): m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS) m += compat.Manuel() m += capture.Manuel() return TestSuite(m, *tests)
6a9fb4f8ad3c8fda2b12688be5058e95d5e995e7
tests/test_main.py
tests/test_main.py
import os def test_qt_api(): """ If QT_API is specified, we check that the correct Qt wrapper was used """ from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets QT_API = os.environ.get('QT_API', None) if QT_API == 'pyside': import PySide assert QtCore.QEvent is PySide.QtCore.QEvent assert QtGui.QPainter is PySide.QtGui.QPainter assert QtWidgets.QWidget is PySide.QtGui.QWidget assert QtWebEngineWidgets.QWebEnginePage is PySide.QtWebKit.QWebPage elif QT_API in ('pyqt', 'pyqt4'): import PyQt4 assert QtCore.QEvent is PyQt4.QtCore.QEvent assert QtGui.QPainter is PyQt4.QtGui.QPainter assert QtWidgets.QWidget is PyQt4.QtGui.QWidget assert QtWebEngineWidgets.QWebEnginePage is PyQt4.QtWebKit.QWebPage elif QT_API == 'pyqt5': import PyQt5 assert QtCore.QEvent is PyQt5.QtCore.QEvent assert QtGui.QPainter is PyQt5.QtGui.QPainter assert QtWidgets.QWidget is PyQt5.QtWidgets.QWidget assert QtWebEngineWidgets.QWebEnginePage is PyQt5.QtWebEngineWidgets.QWebEnginePage else: pass
import os from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets def assert_pyside(): import PySide assert QtCore.QEvent is PySide.QtCore.QEvent assert QtGui.QPainter is PySide.QtGui.QPainter assert QtWidgets.QWidget is PySide.QtGui.QWidget assert QtWebEngineWidgets.QWebEnginePage is PySide.QtWebKit.QWebPage def assert_pyqt4(): import PyQt4 assert QtCore.QEvent is PyQt4.QtCore.QEvent assert QtGui.QPainter is PyQt4.QtGui.QPainter assert QtWidgets.QWidget is PyQt4.QtGui.QWidget assert QtWebEngineWidgets.QWebEnginePage is PyQt4.QtWebKit.QWebPage def assert_pyqt5(): import PyQt5 assert QtCore.QEvent is PyQt5.QtCore.QEvent assert QtGui.QPainter is PyQt5.QtGui.QPainter assert QtWidgets.QWidget is PyQt5.QtWidgets.QWidget assert QtWebEngineWidgets.QWebEnginePage is PyQt5.QtWebEngineWidgets.QWebEnginePage def test_qt_api(): """ If QT_API is specified, we check that the correct Qt wrapper was used """ QT_API = os.environ.get('QT_API', None) if QT_API == 'pyside': assert_pyside() elif QT_API in ('pyqt', 'pyqt4'): assert_pyqt4() elif QT_API == 'pyqt5': assert_pyqt5() else: # We should then be loading, in order of decreasing preference, PyQt5, # PyQt4, and PySide. try: import PyQt5 except ImportError: try: import PyQt4 except ImportError: import PySide assert_pyside() else: assert_pyqt4() else: assert_pyqt5()
Check that the priority order is respected if QT_API or USE_QT_API are not specified.
Check that the priority order is respected if QT_API or USE_QT_API are not specified.
Python
mit
goanpeca/qtpy,davvid/qtpy,davvid/qtpy,goanpeca/qtpy,spyder-ide/qtpy
import os def test_qt_api(): """ If QT_API is specified, we check that the correct Qt wrapper was used """ from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets QT_API = os.environ.get('QT_API', None) if QT_API == 'pyside': import PySide assert QtCore.QEvent is PySide.QtCore.QEvent assert QtGui.QPainter is PySide.QtGui.QPainter assert QtWidgets.QWidget is PySide.QtGui.QWidget assert QtWebEngineWidgets.QWebEnginePage is PySide.QtWebKit.QWebPage elif QT_API in ('pyqt', 'pyqt4'): import PyQt4 assert QtCore.QEvent is PyQt4.QtCore.QEvent assert QtGui.QPainter is PyQt4.QtGui.QPainter assert QtWidgets.QWidget is PyQt4.QtGui.QWidget assert QtWebEngineWidgets.QWebEnginePage is PyQt4.QtWebKit.QWebPage elif QT_API == 'pyqt5': import PyQt5 assert QtCore.QEvent is PyQt5.QtCore.QEvent assert QtGui.QPainter is PyQt5.QtGui.QPainter assert QtWidgets.QWidget is PyQt5.QtWidgets.QWidget assert QtWebEngineWidgets.QWebEnginePage is PyQt5.QtWebEngineWidgets.QWebEnginePage else: pass Check that the priority order is respected if QT_API or USE_QT_API are not specified.
import os from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets def assert_pyside(): import PySide assert QtCore.QEvent is PySide.QtCore.QEvent assert QtGui.QPainter is PySide.QtGui.QPainter assert QtWidgets.QWidget is PySide.QtGui.QWidget assert QtWebEngineWidgets.QWebEnginePage is PySide.QtWebKit.QWebPage def assert_pyqt4(): import PyQt4 assert QtCore.QEvent is PyQt4.QtCore.QEvent assert QtGui.QPainter is PyQt4.QtGui.QPainter assert QtWidgets.QWidget is PyQt4.QtGui.QWidget assert QtWebEngineWidgets.QWebEnginePage is PyQt4.QtWebKit.QWebPage def assert_pyqt5(): import PyQt5 assert QtCore.QEvent is PyQt5.QtCore.QEvent assert QtGui.QPainter is PyQt5.QtGui.QPainter assert QtWidgets.QWidget is PyQt5.QtWidgets.QWidget assert QtWebEngineWidgets.QWebEnginePage is PyQt5.QtWebEngineWidgets.QWebEnginePage def test_qt_api(): """ If QT_API is specified, we check that the correct Qt wrapper was used """ QT_API = os.environ.get('QT_API', None) if QT_API == 'pyside': assert_pyside() elif QT_API in ('pyqt', 'pyqt4'): assert_pyqt4() elif QT_API == 'pyqt5': assert_pyqt5() else: # We should then be loading, in order of decreasing preference, PyQt5, # PyQt4, and PySide. try: import PyQt5 except ImportError: try: import PyQt4 except ImportError: import PySide assert_pyside() else: assert_pyqt4() else: assert_pyqt5()
<commit_before>import os def test_qt_api(): """ If QT_API is specified, we check that the correct Qt wrapper was used """ from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets QT_API = os.environ.get('QT_API', None) if QT_API == 'pyside': import PySide assert QtCore.QEvent is PySide.QtCore.QEvent assert QtGui.QPainter is PySide.QtGui.QPainter assert QtWidgets.QWidget is PySide.QtGui.QWidget assert QtWebEngineWidgets.QWebEnginePage is PySide.QtWebKit.QWebPage elif QT_API in ('pyqt', 'pyqt4'): import PyQt4 assert QtCore.QEvent is PyQt4.QtCore.QEvent assert QtGui.QPainter is PyQt4.QtGui.QPainter assert QtWidgets.QWidget is PyQt4.QtGui.QWidget assert QtWebEngineWidgets.QWebEnginePage is PyQt4.QtWebKit.QWebPage elif QT_API == 'pyqt5': import PyQt5 assert QtCore.QEvent is PyQt5.QtCore.QEvent assert QtGui.QPainter is PyQt5.QtGui.QPainter assert QtWidgets.QWidget is PyQt5.QtWidgets.QWidget assert QtWebEngineWidgets.QWebEnginePage is PyQt5.QtWebEngineWidgets.QWebEnginePage else: pass <commit_msg>Check that the priority order is respected if QT_API or USE_QT_API are not specified.<commit_after>
import os from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets def assert_pyside(): import PySide assert QtCore.QEvent is PySide.QtCore.QEvent assert QtGui.QPainter is PySide.QtGui.QPainter assert QtWidgets.QWidget is PySide.QtGui.QWidget assert QtWebEngineWidgets.QWebEnginePage is PySide.QtWebKit.QWebPage def assert_pyqt4(): import PyQt4 assert QtCore.QEvent is PyQt4.QtCore.QEvent assert QtGui.QPainter is PyQt4.QtGui.QPainter assert QtWidgets.QWidget is PyQt4.QtGui.QWidget assert QtWebEngineWidgets.QWebEnginePage is PyQt4.QtWebKit.QWebPage def assert_pyqt5(): import PyQt5 assert QtCore.QEvent is PyQt5.QtCore.QEvent assert QtGui.QPainter is PyQt5.QtGui.QPainter assert QtWidgets.QWidget is PyQt5.QtWidgets.QWidget assert QtWebEngineWidgets.QWebEnginePage is PyQt5.QtWebEngineWidgets.QWebEnginePage def test_qt_api(): """ If QT_API is specified, we check that the correct Qt wrapper was used """ QT_API = os.environ.get('QT_API', None) if QT_API == 'pyside': assert_pyside() elif QT_API in ('pyqt', 'pyqt4'): assert_pyqt4() elif QT_API == 'pyqt5': assert_pyqt5() else: # We should then be loading, in order of decreasing preference, PyQt5, # PyQt4, and PySide. try: import PyQt5 except ImportError: try: import PyQt4 except ImportError: import PySide assert_pyside() else: assert_pyqt4() else: assert_pyqt5()
import os def test_qt_api(): """ If QT_API is specified, we check that the correct Qt wrapper was used """ from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets QT_API = os.environ.get('QT_API', None) if QT_API == 'pyside': import PySide assert QtCore.QEvent is PySide.QtCore.QEvent assert QtGui.QPainter is PySide.QtGui.QPainter assert QtWidgets.QWidget is PySide.QtGui.QWidget assert QtWebEngineWidgets.QWebEnginePage is PySide.QtWebKit.QWebPage elif QT_API in ('pyqt', 'pyqt4'): import PyQt4 assert QtCore.QEvent is PyQt4.QtCore.QEvent assert QtGui.QPainter is PyQt4.QtGui.QPainter assert QtWidgets.QWidget is PyQt4.QtGui.QWidget assert QtWebEngineWidgets.QWebEnginePage is PyQt4.QtWebKit.QWebPage elif QT_API == 'pyqt5': import PyQt5 assert QtCore.QEvent is PyQt5.QtCore.QEvent assert QtGui.QPainter is PyQt5.QtGui.QPainter assert QtWidgets.QWidget is PyQt5.QtWidgets.QWidget assert QtWebEngineWidgets.QWebEnginePage is PyQt5.QtWebEngineWidgets.QWebEnginePage else: pass Check that the priority order is respected if QT_API or USE_QT_API are not specified.import os from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets def assert_pyside(): import PySide assert QtCore.QEvent is PySide.QtCore.QEvent assert QtGui.QPainter is PySide.QtGui.QPainter assert QtWidgets.QWidget is PySide.QtGui.QWidget assert QtWebEngineWidgets.QWebEnginePage is PySide.QtWebKit.QWebPage def assert_pyqt4(): import PyQt4 assert QtCore.QEvent is PyQt4.QtCore.QEvent assert QtGui.QPainter is PyQt4.QtGui.QPainter assert QtWidgets.QWidget is PyQt4.QtGui.QWidget assert QtWebEngineWidgets.QWebEnginePage is PyQt4.QtWebKit.QWebPage def assert_pyqt5(): import PyQt5 assert QtCore.QEvent is PyQt5.QtCore.QEvent assert QtGui.QPainter is PyQt5.QtGui.QPainter assert QtWidgets.QWidget is PyQt5.QtWidgets.QWidget assert QtWebEngineWidgets.QWebEnginePage is PyQt5.QtWebEngineWidgets.QWebEnginePage def test_qt_api(): """ If QT_API is specified, we check that the correct Qt wrapper was used """ QT_API = os.environ.get('QT_API', None) if QT_API == 'pyside': assert_pyside() elif QT_API in ('pyqt', 'pyqt4'): assert_pyqt4() elif QT_API == 'pyqt5': assert_pyqt5() else: # We should then be loading, in order of decreasing preference, PyQt5, # PyQt4, and PySide. try: import PyQt5 except ImportError: try: import PyQt4 except ImportError: import PySide assert_pyside() else: assert_pyqt4() else: assert_pyqt5()
<commit_before>import os def test_qt_api(): """ If QT_API is specified, we check that the correct Qt wrapper was used """ from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets QT_API = os.environ.get('QT_API', None) if QT_API == 'pyside': import PySide assert QtCore.QEvent is PySide.QtCore.QEvent assert QtGui.QPainter is PySide.QtGui.QPainter assert QtWidgets.QWidget is PySide.QtGui.QWidget assert QtWebEngineWidgets.QWebEnginePage is PySide.QtWebKit.QWebPage elif QT_API in ('pyqt', 'pyqt4'): import PyQt4 assert QtCore.QEvent is PyQt4.QtCore.QEvent assert QtGui.QPainter is PyQt4.QtGui.QPainter assert QtWidgets.QWidget is PyQt4.QtGui.QWidget assert QtWebEngineWidgets.QWebEnginePage is PyQt4.QtWebKit.QWebPage elif QT_API == 'pyqt5': import PyQt5 assert QtCore.QEvent is PyQt5.QtCore.QEvent assert QtGui.QPainter is PyQt5.QtGui.QPainter assert QtWidgets.QWidget is PyQt5.QtWidgets.QWidget assert QtWebEngineWidgets.QWebEnginePage is PyQt5.QtWebEngineWidgets.QWebEnginePage else: pass <commit_msg>Check that the priority order is respected if QT_API or USE_QT_API are not specified.<commit_after>import os from qtpy import QtCore, QtGui, QtWidgets, QtWebEngineWidgets def assert_pyside(): import PySide assert QtCore.QEvent is PySide.QtCore.QEvent assert QtGui.QPainter is PySide.QtGui.QPainter assert QtWidgets.QWidget is PySide.QtGui.QWidget assert QtWebEngineWidgets.QWebEnginePage is PySide.QtWebKit.QWebPage def assert_pyqt4(): import PyQt4 assert QtCore.QEvent is PyQt4.QtCore.QEvent assert QtGui.QPainter is PyQt4.QtGui.QPainter assert QtWidgets.QWidget is PyQt4.QtGui.QWidget assert QtWebEngineWidgets.QWebEnginePage is PyQt4.QtWebKit.QWebPage def assert_pyqt5(): import PyQt5 assert QtCore.QEvent is PyQt5.QtCore.QEvent assert QtGui.QPainter is PyQt5.QtGui.QPainter assert QtWidgets.QWidget is PyQt5.QtWidgets.QWidget assert QtWebEngineWidgets.QWebEnginePage is PyQt5.QtWebEngineWidgets.QWebEnginePage def test_qt_api(): """ If QT_API is specified, we check that the correct Qt wrapper was used """ QT_API = os.environ.get('QT_API', None) if QT_API == 'pyside': assert_pyside() elif QT_API in ('pyqt', 'pyqt4'): assert_pyqt4() elif QT_API == 'pyqt5': assert_pyqt5() else: # We should then be loading, in order of decreasing preference, PyQt5, # PyQt4, and PySide. try: import PyQt5 except ImportError: try: import PyQt4 except ImportError: import PySide assert_pyside() else: assert_pyqt4() else: assert_pyqt5()
66420e5b72f58652aca1cf0353897bd43ee57aef
nipype/testing/__init__.py
nipype/testing/__init__.py
"""Simple utility to pull in all the testing functions we're likely to use. """ import numpy as np from distutils.version import LooseVersion from nose.tools import (assert_true, assert_false, assert_not_equal, assert_raises) from nose import SkipTest if LooseVersion(np.__version__) >= '1.2': from numpy.testing import * from numpy.testing.decorators import * else: from numpytesting import * from numpytesting.decorators import * from utils import * from enthought.traits.api import TraitError
"""Simple utility to pull in all the testing functions we're likely to use. """ import numpy as np from distutils.version import LooseVersion from nose.tools import (assert_true, assert_false, assert_not_equal, assert_raises) from nose import SkipTest, with_setup if LooseVersion(np.__version__) >= '1.2': from numpy.testing import * from numpy.testing.decorators import * else: from numpytesting import * from numpytesting.decorators import * from utils import * from enthought.traits.api import TraitError
Add import of with_setup that got lost in the numpy testing merge.
Add import of with_setup that got lost in the numpy testing merge. git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@1236 ead46cd0-7350-4e37-8683-fc4c6f79bf00
Python
bsd-3-clause
carolFrohlich/nipype,FCP-INDI/nipype,gerddie/nipype,JohnGriffiths/nipype,FCP-INDI/nipype,Leoniela/nipype,mick-d/nipype_source,fprados/nipype,FredLoney/nipype,christianbrodbeck/nipype,carolFrohlich/nipype,carolFrohlich/nipype,wanderine/nipype,wanderine/nipype,gerddie/nipype,wanderine/nipype,mick-d/nipype,sgiavasis/nipype,Leoniela/nipype,FCP-INDI/nipype,pearsonlab/nipype,arokem/nipype,rameshvs/nipype,JohnGriffiths/nipype,mick-d/nipype_source,blakedewey/nipype,carolFrohlich/nipype,arokem/nipype,blakedewey/nipype,satra/NiPypeold,JohnGriffiths/nipype,iglpdc/nipype,blakedewey/nipype,dgellis90/nipype,mick-d/nipype,glatard/nipype,carlohamalainen/nipype,FredLoney/nipype,sgiavasis/nipype,pearsonlab/nipype,FCP-INDI/nipype,JohnGriffiths/nipype,grlee77/nipype,dmordom/nipype,dgellis90/nipype,dgellis90/nipype,fprados/nipype,grlee77/nipype,glatard/nipype,glatard/nipype,pearsonlab/nipype,dmordom/nipype,mick-d/nipype,satra/NiPypeold,wanderine/nipype,iglpdc/nipype,pearsonlab/nipype,grlee77/nipype,sgiavasis/nipype,Leoniela/nipype,rameshvs/nipype,sgiavasis/nipype,iglpdc/nipype,blakedewey/nipype,rameshvs/nipype,mick-d/nipype_source,fprados/nipype,gerddie/nipype,iglpdc/nipype,glatard/nipype,arokem/nipype,carlohamalainen/nipype,rameshvs/nipype,grlee77/nipype,dmordom/nipype,dgellis90/nipype,arokem/nipype,FredLoney/nipype,gerddie/nipype,mick-d/nipype,carlohamalainen/nipype,christianbrodbeck/nipype
"""Simple utility to pull in all the testing functions we're likely to use. """ import numpy as np from distutils.version import LooseVersion from nose.tools import (assert_true, assert_false, assert_not_equal, assert_raises) from nose import SkipTest if LooseVersion(np.__version__) >= '1.2': from numpy.testing import * from numpy.testing.decorators import * else: from numpytesting import * from numpytesting.decorators import * from utils import * from enthought.traits.api import TraitError Add import of with_setup that got lost in the numpy testing merge. git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@1236 ead46cd0-7350-4e37-8683-fc4c6f79bf00
"""Simple utility to pull in all the testing functions we're likely to use. """ import numpy as np from distutils.version import LooseVersion from nose.tools import (assert_true, assert_false, assert_not_equal, assert_raises) from nose import SkipTest, with_setup if LooseVersion(np.__version__) >= '1.2': from numpy.testing import * from numpy.testing.decorators import * else: from numpytesting import * from numpytesting.decorators import * from utils import * from enthought.traits.api import TraitError
<commit_before>"""Simple utility to pull in all the testing functions we're likely to use. """ import numpy as np from distutils.version import LooseVersion from nose.tools import (assert_true, assert_false, assert_not_equal, assert_raises) from nose import SkipTest if LooseVersion(np.__version__) >= '1.2': from numpy.testing import * from numpy.testing.decorators import * else: from numpytesting import * from numpytesting.decorators import * from utils import * from enthought.traits.api import TraitError <commit_msg>Add import of with_setup that got lost in the numpy testing merge. git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@1236 ead46cd0-7350-4e37-8683-fc4c6f79bf00<commit_after>
"""Simple utility to pull in all the testing functions we're likely to use. """ import numpy as np from distutils.version import LooseVersion from nose.tools import (assert_true, assert_false, assert_not_equal, assert_raises) from nose import SkipTest, with_setup if LooseVersion(np.__version__) >= '1.2': from numpy.testing import * from numpy.testing.decorators import * else: from numpytesting import * from numpytesting.decorators import * from utils import * from enthought.traits.api import TraitError
"""Simple utility to pull in all the testing functions we're likely to use. """ import numpy as np from distutils.version import LooseVersion from nose.tools import (assert_true, assert_false, assert_not_equal, assert_raises) from nose import SkipTest if LooseVersion(np.__version__) >= '1.2': from numpy.testing import * from numpy.testing.decorators import * else: from numpytesting import * from numpytesting.decorators import * from utils import * from enthought.traits.api import TraitError Add import of with_setup that got lost in the numpy testing merge. git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@1236 ead46cd0-7350-4e37-8683-fc4c6f79bf00"""Simple utility to pull in all the testing functions we're likely to use. """ import numpy as np from distutils.version import LooseVersion from nose.tools import (assert_true, assert_false, assert_not_equal, assert_raises) from nose import SkipTest, with_setup if LooseVersion(np.__version__) >= '1.2': from numpy.testing import * from numpy.testing.decorators import * else: from numpytesting import * from numpytesting.decorators import * from utils import * from enthought.traits.api import TraitError
<commit_before>"""Simple utility to pull in all the testing functions we're likely to use. """ import numpy as np from distutils.version import LooseVersion from nose.tools import (assert_true, assert_false, assert_not_equal, assert_raises) from nose import SkipTest if LooseVersion(np.__version__) >= '1.2': from numpy.testing import * from numpy.testing.decorators import * else: from numpytesting import * from numpytesting.decorators import * from utils import * from enthought.traits.api import TraitError <commit_msg>Add import of with_setup that got lost in the numpy testing merge. git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@1236 ead46cd0-7350-4e37-8683-fc4c6f79bf00<commit_after>"""Simple utility to pull in all the testing functions we're likely to use. """ import numpy as np from distutils.version import LooseVersion from nose.tools import (assert_true, assert_false, assert_not_equal, assert_raises) from nose import SkipTest, with_setup if LooseVersion(np.__version__) >= '1.2': from numpy.testing import * from numpy.testing.decorators import * else: from numpytesting import * from numpytesting.decorators import * from utils import * from enthought.traits.api import TraitError
45325a43cf4525ef39afec86c03451525f907e92
hiro/utils.py
hiro/utils.py
""" random utility functions """ import datetime import functools import time from .errors import InvalidTypeError def timedelta_to_seconds(delta): """ converts a timedelta object to seconds """ seconds = delta.microseconds seconds += (delta.seconds + delta.days * 24 * 3600) * 10 ** 6 return float(seconds) / 10 ** 6 def time_in_seconds(value): """ normalized either a datetime.date, datetime.datetime or float to a float """ if isinstance(value, (float, int)): return value elif isinstance(value, (datetime.date, datetime.datetime)): return time.mktime(value.timetuple()) else: raise InvalidTypeError(value) #adopted from: http://www.snip2code.com/Snippet/2535/Fluent-interface-decorators def chained(method): """ Method decorator to allow chaining. """ @functools.wraps(method) def wrapper(self, *args, **kwargs): """ fluent wrapper """ result = method(self, *args, **kwargs) return self if result is None else result return wrapper
""" random utility functions """ import calendar import datetime import functools from .errors import InvalidTypeError def timedelta_to_seconds(delta): """ converts a timedelta object to seconds """ seconds = delta.microseconds seconds += (delta.seconds + delta.days * 24 * 3600) * 10 ** 6 return float(seconds) / 10 ** 6 def time_in_seconds(value): """ normalized either a datetime.date, datetime.datetime or float to a float """ if isinstance(value, (float, int)): return value elif isinstance(value, (datetime.date, datetime.datetime)): return calendar.timegm(value.timetuple()) else: raise InvalidTypeError(value) #adopted from: http://www.snip2code.com/Snippet/2535/Fluent-interface-decorators def chained(method): """ Method decorator to allow chaining. """ @functools.wraps(method) def wrapper(self, *args, **kwargs): """ fluent wrapper """ result = method(self, *args, **kwargs) return self if result is None else result return wrapper
Fix TZ-dependent return values from time_in_seconds()
Fix TZ-dependent return values from time_in_seconds() time.mktime assumes that the time tuple is in local time, rather than UTC. Use calendar.timegm instead for consistency.
Python
mit
alisaifee/hiro,alisaifee/hiro
""" random utility functions """ import datetime import functools import time from .errors import InvalidTypeError def timedelta_to_seconds(delta): """ converts a timedelta object to seconds """ seconds = delta.microseconds seconds += (delta.seconds + delta.days * 24 * 3600) * 10 ** 6 return float(seconds) / 10 ** 6 def time_in_seconds(value): """ normalized either a datetime.date, datetime.datetime or float to a float """ if isinstance(value, (float, int)): return value elif isinstance(value, (datetime.date, datetime.datetime)): return time.mktime(value.timetuple()) else: raise InvalidTypeError(value) #adopted from: http://www.snip2code.com/Snippet/2535/Fluent-interface-decorators def chained(method): """ Method decorator to allow chaining. """ @functools.wraps(method) def wrapper(self, *args, **kwargs): """ fluent wrapper """ result = method(self, *args, **kwargs) return self if result is None else result return wrapper Fix TZ-dependent return values from time_in_seconds() time.mktime assumes that the time tuple is in local time, rather than UTC. Use calendar.timegm instead for consistency.
""" random utility functions """ import calendar import datetime import functools from .errors import InvalidTypeError def timedelta_to_seconds(delta): """ converts a timedelta object to seconds """ seconds = delta.microseconds seconds += (delta.seconds + delta.days * 24 * 3600) * 10 ** 6 return float(seconds) / 10 ** 6 def time_in_seconds(value): """ normalized either a datetime.date, datetime.datetime or float to a float """ if isinstance(value, (float, int)): return value elif isinstance(value, (datetime.date, datetime.datetime)): return calendar.timegm(value.timetuple()) else: raise InvalidTypeError(value) #adopted from: http://www.snip2code.com/Snippet/2535/Fluent-interface-decorators def chained(method): """ Method decorator to allow chaining. """ @functools.wraps(method) def wrapper(self, *args, **kwargs): """ fluent wrapper """ result = method(self, *args, **kwargs) return self if result is None else result return wrapper
<commit_before>""" random utility functions """ import datetime import functools import time from .errors import InvalidTypeError def timedelta_to_seconds(delta): """ converts a timedelta object to seconds """ seconds = delta.microseconds seconds += (delta.seconds + delta.days * 24 * 3600) * 10 ** 6 return float(seconds) / 10 ** 6 def time_in_seconds(value): """ normalized either a datetime.date, datetime.datetime or float to a float """ if isinstance(value, (float, int)): return value elif isinstance(value, (datetime.date, datetime.datetime)): return time.mktime(value.timetuple()) else: raise InvalidTypeError(value) #adopted from: http://www.snip2code.com/Snippet/2535/Fluent-interface-decorators def chained(method): """ Method decorator to allow chaining. """ @functools.wraps(method) def wrapper(self, *args, **kwargs): """ fluent wrapper """ result = method(self, *args, **kwargs) return self if result is None else result return wrapper <commit_msg>Fix TZ-dependent return values from time_in_seconds() time.mktime assumes that the time tuple is in local time, rather than UTC. Use calendar.timegm instead for consistency.<commit_after>
""" random utility functions """ import calendar import datetime import functools from .errors import InvalidTypeError def timedelta_to_seconds(delta): """ converts a timedelta object to seconds """ seconds = delta.microseconds seconds += (delta.seconds + delta.days * 24 * 3600) * 10 ** 6 return float(seconds) / 10 ** 6 def time_in_seconds(value): """ normalized either a datetime.date, datetime.datetime or float to a float """ if isinstance(value, (float, int)): return value elif isinstance(value, (datetime.date, datetime.datetime)): return calendar.timegm(value.timetuple()) else: raise InvalidTypeError(value) #adopted from: http://www.snip2code.com/Snippet/2535/Fluent-interface-decorators def chained(method): """ Method decorator to allow chaining. """ @functools.wraps(method) def wrapper(self, *args, **kwargs): """ fluent wrapper """ result = method(self, *args, **kwargs) return self if result is None else result return wrapper
""" random utility functions """ import datetime import functools import time from .errors import InvalidTypeError def timedelta_to_seconds(delta): """ converts a timedelta object to seconds """ seconds = delta.microseconds seconds += (delta.seconds + delta.days * 24 * 3600) * 10 ** 6 return float(seconds) / 10 ** 6 def time_in_seconds(value): """ normalized either a datetime.date, datetime.datetime or float to a float """ if isinstance(value, (float, int)): return value elif isinstance(value, (datetime.date, datetime.datetime)): return time.mktime(value.timetuple()) else: raise InvalidTypeError(value) #adopted from: http://www.snip2code.com/Snippet/2535/Fluent-interface-decorators def chained(method): """ Method decorator to allow chaining. """ @functools.wraps(method) def wrapper(self, *args, **kwargs): """ fluent wrapper """ result = method(self, *args, **kwargs) return self if result is None else result return wrapper Fix TZ-dependent return values from time_in_seconds() time.mktime assumes that the time tuple is in local time, rather than UTC. Use calendar.timegm instead for consistency.""" random utility functions """ import calendar import datetime import functools from .errors import InvalidTypeError def timedelta_to_seconds(delta): """ converts a timedelta object to seconds """ seconds = delta.microseconds seconds += (delta.seconds + delta.days * 24 * 3600) * 10 ** 6 return float(seconds) / 10 ** 6 def time_in_seconds(value): """ normalized either a datetime.date, datetime.datetime or float to a float """ if isinstance(value, (float, int)): return value elif isinstance(value, (datetime.date, datetime.datetime)): return calendar.timegm(value.timetuple()) else: raise InvalidTypeError(value) #adopted from: http://www.snip2code.com/Snippet/2535/Fluent-interface-decorators def chained(method): """ Method decorator to allow chaining. """ @functools.wraps(method) def wrapper(self, *args, **kwargs): """ fluent wrapper """ result = method(self, *args, **kwargs) return self if result is None else result return wrapper
<commit_before>""" random utility functions """ import datetime import functools import time from .errors import InvalidTypeError def timedelta_to_seconds(delta): """ converts a timedelta object to seconds """ seconds = delta.microseconds seconds += (delta.seconds + delta.days * 24 * 3600) * 10 ** 6 return float(seconds) / 10 ** 6 def time_in_seconds(value): """ normalized either a datetime.date, datetime.datetime or float to a float """ if isinstance(value, (float, int)): return value elif isinstance(value, (datetime.date, datetime.datetime)): return time.mktime(value.timetuple()) else: raise InvalidTypeError(value) #adopted from: http://www.snip2code.com/Snippet/2535/Fluent-interface-decorators def chained(method): """ Method decorator to allow chaining. """ @functools.wraps(method) def wrapper(self, *args, **kwargs): """ fluent wrapper """ result = method(self, *args, **kwargs) return self if result is None else result return wrapper <commit_msg>Fix TZ-dependent return values from time_in_seconds() time.mktime assumes that the time tuple is in local time, rather than UTC. Use calendar.timegm instead for consistency.<commit_after>""" random utility functions """ import calendar import datetime import functools from .errors import InvalidTypeError def timedelta_to_seconds(delta): """ converts a timedelta object to seconds """ seconds = delta.microseconds seconds += (delta.seconds + delta.days * 24 * 3600) * 10 ** 6 return float(seconds) / 10 ** 6 def time_in_seconds(value): """ normalized either a datetime.date, datetime.datetime or float to a float """ if isinstance(value, (float, int)): return value elif isinstance(value, (datetime.date, datetime.datetime)): return calendar.timegm(value.timetuple()) else: raise InvalidTypeError(value) #adopted from: http://www.snip2code.com/Snippet/2535/Fluent-interface-decorators def chained(method): """ Method decorator to allow chaining. """ @functools.wraps(method) def wrapper(self, *args, **kwargs): """ fluent wrapper """ result = method(self, *args, **kwargs) return self if result is None else result return wrapper
4ca420de76b2c385b07f46681a779b160f2af62f
mpl_style_gallery/__main__.py
mpl_style_gallery/__main__.py
from argparse import ArgumentParser from . import app from . import build parser = ArgumentParser() parser.add_argument('action', nargs='?', default='build', choices=['build', 'display']) args = parser.parse_args() if args.action == 'build': build.save_all_plots() if args.action in ('build', 'display'): app.main()
#!/usr/bin/env python """ Create Matplotlib style gallery for all Matplotlib stylesheets and display in the browser. By default, all plots are rebuilt, but this can be avoided using the `--skip-build` (`-s`) flag. """ import argparse from . import app from . import build def main(): formatter = argparse.ArgumentDefaultsHelpFormatter parser = argparse.ArgumentParser(description=__doc__, formatter_class=formatter) parser.add_argument('-s', '--skip-build', action='store_true', help="If set, skip plot-generation step.") args = parser.parse_args() if not args.skip_build: build.save_all_plots() app.main() if __name__ == '__main__': main()
Clean up API for main module
Clean up API for main module
Python
bsd-3-clause
tonysyu/matplotlib-style-gallery,tonysyu/matplotlib-style-gallery,tonysyu/matplotlib-style-gallery
from argparse import ArgumentParser from . import app from . import build parser = ArgumentParser() parser.add_argument('action', nargs='?', default='build', choices=['build', 'display']) args = parser.parse_args() if args.action == 'build': build.save_all_plots() if args.action in ('build', 'display'): app.main() Clean up API for main module
#!/usr/bin/env python """ Create Matplotlib style gallery for all Matplotlib stylesheets and display in the browser. By default, all plots are rebuilt, but this can be avoided using the `--skip-build` (`-s`) flag. """ import argparse from . import app from . import build def main(): formatter = argparse.ArgumentDefaultsHelpFormatter parser = argparse.ArgumentParser(description=__doc__, formatter_class=formatter) parser.add_argument('-s', '--skip-build', action='store_true', help="If set, skip plot-generation step.") args = parser.parse_args() if not args.skip_build: build.save_all_plots() app.main() if __name__ == '__main__': main()
<commit_before>from argparse import ArgumentParser from . import app from . import build parser = ArgumentParser() parser.add_argument('action', nargs='?', default='build', choices=['build', 'display']) args = parser.parse_args() if args.action == 'build': build.save_all_plots() if args.action in ('build', 'display'): app.main() <commit_msg>Clean up API for main module<commit_after>
#!/usr/bin/env python """ Create Matplotlib style gallery for all Matplotlib stylesheets and display in the browser. By default, all plots are rebuilt, but this can be avoided using the `--skip-build` (`-s`) flag. """ import argparse from . import app from . import build def main(): formatter = argparse.ArgumentDefaultsHelpFormatter parser = argparse.ArgumentParser(description=__doc__, formatter_class=formatter) parser.add_argument('-s', '--skip-build', action='store_true', help="If set, skip plot-generation step.") args = parser.parse_args() if not args.skip_build: build.save_all_plots() app.main() if __name__ == '__main__': main()
from argparse import ArgumentParser from . import app from . import build parser = ArgumentParser() parser.add_argument('action', nargs='?', default='build', choices=['build', 'display']) args = parser.parse_args() if args.action == 'build': build.save_all_plots() if args.action in ('build', 'display'): app.main() Clean up API for main module#!/usr/bin/env python """ Create Matplotlib style gallery for all Matplotlib stylesheets and display in the browser. By default, all plots are rebuilt, but this can be avoided using the `--skip-build` (`-s`) flag. """ import argparse from . import app from . import build def main(): formatter = argparse.ArgumentDefaultsHelpFormatter parser = argparse.ArgumentParser(description=__doc__, formatter_class=formatter) parser.add_argument('-s', '--skip-build', action='store_true', help="If set, skip plot-generation step.") args = parser.parse_args() if not args.skip_build: build.save_all_plots() app.main() if __name__ == '__main__': main()
<commit_before>from argparse import ArgumentParser from . import app from . import build parser = ArgumentParser() parser.add_argument('action', nargs='?', default='build', choices=['build', 'display']) args = parser.parse_args() if args.action == 'build': build.save_all_plots() if args.action in ('build', 'display'): app.main() <commit_msg>Clean up API for main module<commit_after>#!/usr/bin/env python """ Create Matplotlib style gallery for all Matplotlib stylesheets and display in the browser. By default, all plots are rebuilt, but this can be avoided using the `--skip-build` (`-s`) flag. """ import argparse from . import app from . import build def main(): formatter = argparse.ArgumentDefaultsHelpFormatter parser = argparse.ArgumentParser(description=__doc__, formatter_class=formatter) parser.add_argument('-s', '--skip-build', action='store_true', help="If set, skip plot-generation step.") args = parser.parse_args() if not args.skip_build: build.save_all_plots() app.main() if __name__ == '__main__': main()
f0a6a091e4b2d3943cdd582d3183602ad50b9729
httpDissec.py
httpDissec.py
# sudo apt-get install python-scapy from scapy.all import * # sudo pip install scapy_http from scapy.layers import http from scapy.layers.http import HTTPResponse import sys packets = rdpcap("task07_f1.pcap") requests = [] answers = [] def has_http_header(packet): return packet.haslayer(HTTPResponse) def extractNextFile(packets, file_name): if ! has_http_header(packets[0]): return False first = packets.pop(0) f = open(file_name, 'w+') f.write(first['Raw']) while !has_http_header(packets[0]): pkt = packets.pop(0) f.write(pkt['Raw']) f.close() return True for pkt in packets: tcp = pkt['TCP'] # destination port must be 80 if tcp.dport == 80 and pkt.haslayer('HTTP'): requests.append(pkt) for pkt in packets: tcp = pkt['TCP'] # source port must be 80 if tcp.sport == 80 and pkt.haslayer('HTTP'): answers.append(pkt) print '=============== REQUESTS ==================' i = 0 for req in requests: file_name = get_name(req) printGET(file_name) extractNextFile(answer, fileName)
# sudo apt-get install python-scapy from scapy.all import * # sudo pip install scapy_http from scapy.layers import http from scapy.layers.http import HTTPResponse import sys packets = rdpcap("task07_f1.pcap") requests = [] answers = [] def has_http_header(packet): return packet.haslayer(HTTPResponse) def extract_next_file(packets, file_name): if ! has_http_header(packets[0]): return False first = packets.pop(0) f = open(file_name, 'w+') f.write(first['Raw']) while !has_http_header(packets[0]): pkt = packets.pop(0) f.write(pkt['Raw']) f.close() return True for pkt in packets: tcp = pkt['TCP'] # destination port must be 80 if tcp.dport == 80 and pkt.haslayer('HTTP'): requests.append(pkt) for pkt in packets: tcp = pkt['TCP'] # source port must be 80 if tcp.sport == 80 and pkt.haslayer('HTTP'): answers.append(pkt) print '=============== REQUESTS ==================' i = 0 for req in requests: file_name = "file_" + str(i) printGET(file_name) extract_next_file(answer, file_name) i += 1
Set name of stored file
Set name of stored file
Python
mit
alexst07/http_dissector
# sudo apt-get install python-scapy from scapy.all import * # sudo pip install scapy_http from scapy.layers import http from scapy.layers.http import HTTPResponse import sys packets = rdpcap("task07_f1.pcap") requests = [] answers = [] def has_http_header(packet): return packet.haslayer(HTTPResponse) def extractNextFile(packets, file_name): if ! has_http_header(packets[0]): return False first = packets.pop(0) f = open(file_name, 'w+') f.write(first['Raw']) while !has_http_header(packets[0]): pkt = packets.pop(0) f.write(pkt['Raw']) f.close() return True for pkt in packets: tcp = pkt['TCP'] # destination port must be 80 if tcp.dport == 80 and pkt.haslayer('HTTP'): requests.append(pkt) for pkt in packets: tcp = pkt['TCP'] # source port must be 80 if tcp.sport == 80 and pkt.haslayer('HTTP'): answers.append(pkt) print '=============== REQUESTS ==================' i = 0 for req in requests: file_name = get_name(req) printGET(file_name) extractNextFile(answer, fileName) Set name of stored file
# sudo apt-get install python-scapy from scapy.all import * # sudo pip install scapy_http from scapy.layers import http from scapy.layers.http import HTTPResponse import sys packets = rdpcap("task07_f1.pcap") requests = [] answers = [] def has_http_header(packet): return packet.haslayer(HTTPResponse) def extract_next_file(packets, file_name): if ! has_http_header(packets[0]): return False first = packets.pop(0) f = open(file_name, 'w+') f.write(first['Raw']) while !has_http_header(packets[0]): pkt = packets.pop(0) f.write(pkt['Raw']) f.close() return True for pkt in packets: tcp = pkt['TCP'] # destination port must be 80 if tcp.dport == 80 and pkt.haslayer('HTTP'): requests.append(pkt) for pkt in packets: tcp = pkt['TCP'] # source port must be 80 if tcp.sport == 80 and pkt.haslayer('HTTP'): answers.append(pkt) print '=============== REQUESTS ==================' i = 0 for req in requests: file_name = "file_" + str(i) printGET(file_name) extract_next_file(answer, file_name) i += 1
<commit_before># sudo apt-get install python-scapy from scapy.all import * # sudo pip install scapy_http from scapy.layers import http from scapy.layers.http import HTTPResponse import sys packets = rdpcap("task07_f1.pcap") requests = [] answers = [] def has_http_header(packet): return packet.haslayer(HTTPResponse) def extractNextFile(packets, file_name): if ! has_http_header(packets[0]): return False first = packets.pop(0) f = open(file_name, 'w+') f.write(first['Raw']) while !has_http_header(packets[0]): pkt = packets.pop(0) f.write(pkt['Raw']) f.close() return True for pkt in packets: tcp = pkt['TCP'] # destination port must be 80 if tcp.dport == 80 and pkt.haslayer('HTTP'): requests.append(pkt) for pkt in packets: tcp = pkt['TCP'] # source port must be 80 if tcp.sport == 80 and pkt.haslayer('HTTP'): answers.append(pkt) print '=============== REQUESTS ==================' i = 0 for req in requests: file_name = get_name(req) printGET(file_name) extractNextFile(answer, fileName) <commit_msg>Set name of stored file<commit_after>
# sudo apt-get install python-scapy from scapy.all import * # sudo pip install scapy_http from scapy.layers import http from scapy.layers.http import HTTPResponse import sys packets = rdpcap("task07_f1.pcap") requests = [] answers = [] def has_http_header(packet): return packet.haslayer(HTTPResponse) def extract_next_file(packets, file_name): if ! has_http_header(packets[0]): return False first = packets.pop(0) f = open(file_name, 'w+') f.write(first['Raw']) while !has_http_header(packets[0]): pkt = packets.pop(0) f.write(pkt['Raw']) f.close() return True for pkt in packets: tcp = pkt['TCP'] # destination port must be 80 if tcp.dport == 80 and pkt.haslayer('HTTP'): requests.append(pkt) for pkt in packets: tcp = pkt['TCP'] # source port must be 80 if tcp.sport == 80 and pkt.haslayer('HTTP'): answers.append(pkt) print '=============== REQUESTS ==================' i = 0 for req in requests: file_name = "file_" + str(i) printGET(file_name) extract_next_file(answer, file_name) i += 1
# sudo apt-get install python-scapy from scapy.all import * # sudo pip install scapy_http from scapy.layers import http from scapy.layers.http import HTTPResponse import sys packets = rdpcap("task07_f1.pcap") requests = [] answers = [] def has_http_header(packet): return packet.haslayer(HTTPResponse) def extractNextFile(packets, file_name): if ! has_http_header(packets[0]): return False first = packets.pop(0) f = open(file_name, 'w+') f.write(first['Raw']) while !has_http_header(packets[0]): pkt = packets.pop(0) f.write(pkt['Raw']) f.close() return True for pkt in packets: tcp = pkt['TCP'] # destination port must be 80 if tcp.dport == 80 and pkt.haslayer('HTTP'): requests.append(pkt) for pkt in packets: tcp = pkt['TCP'] # source port must be 80 if tcp.sport == 80 and pkt.haslayer('HTTP'): answers.append(pkt) print '=============== REQUESTS ==================' i = 0 for req in requests: file_name = get_name(req) printGET(file_name) extractNextFile(answer, fileName) Set name of stored file# sudo apt-get install python-scapy from scapy.all import * # sudo pip install scapy_http from scapy.layers import http from scapy.layers.http import HTTPResponse import sys packets = rdpcap("task07_f1.pcap") requests = [] answers = [] def has_http_header(packet): return packet.haslayer(HTTPResponse) def extract_next_file(packets, file_name): if ! has_http_header(packets[0]): return False first = packets.pop(0) f = open(file_name, 'w+') f.write(first['Raw']) while !has_http_header(packets[0]): pkt = packets.pop(0) f.write(pkt['Raw']) f.close() return True for pkt in packets: tcp = pkt['TCP'] # destination port must be 80 if tcp.dport == 80 and pkt.haslayer('HTTP'): requests.append(pkt) for pkt in packets: tcp = pkt['TCP'] # source port must be 80 if tcp.sport == 80 and pkt.haslayer('HTTP'): answers.append(pkt) print '=============== REQUESTS ==================' i = 0 for req in requests: file_name = "file_" + str(i) printGET(file_name) extract_next_file(answer, file_name) i += 1
<commit_before># sudo apt-get install python-scapy from scapy.all import * # sudo pip install scapy_http from scapy.layers import http from scapy.layers.http import HTTPResponse import sys packets = rdpcap("task07_f1.pcap") requests = [] answers = [] def has_http_header(packet): return packet.haslayer(HTTPResponse) def extractNextFile(packets, file_name): if ! has_http_header(packets[0]): return False first = packets.pop(0) f = open(file_name, 'w+') f.write(first['Raw']) while !has_http_header(packets[0]): pkt = packets.pop(0) f.write(pkt['Raw']) f.close() return True for pkt in packets: tcp = pkt['TCP'] # destination port must be 80 if tcp.dport == 80 and pkt.haslayer('HTTP'): requests.append(pkt) for pkt in packets: tcp = pkt['TCP'] # source port must be 80 if tcp.sport == 80 and pkt.haslayer('HTTP'): answers.append(pkt) print '=============== REQUESTS ==================' i = 0 for req in requests: file_name = get_name(req) printGET(file_name) extractNextFile(answer, fileName) <commit_msg>Set name of stored file<commit_after># sudo apt-get install python-scapy from scapy.all import * # sudo pip install scapy_http from scapy.layers import http from scapy.layers.http import HTTPResponse import sys packets = rdpcap("task07_f1.pcap") requests = [] answers = [] def has_http_header(packet): return packet.haslayer(HTTPResponse) def extract_next_file(packets, file_name): if ! has_http_header(packets[0]): return False first = packets.pop(0) f = open(file_name, 'w+') f.write(first['Raw']) while !has_http_header(packets[0]): pkt = packets.pop(0) f.write(pkt['Raw']) f.close() return True for pkt in packets: tcp = pkt['TCP'] # destination port must be 80 if tcp.dport == 80 and pkt.haslayer('HTTP'): requests.append(pkt) for pkt in packets: tcp = pkt['TCP'] # source port must be 80 if tcp.sport == 80 and pkt.haslayer('HTTP'): answers.append(pkt) print '=============== REQUESTS ==================' i = 0 for req in requests: file_name = "file_" + str(i) printGET(file_name) extract_next_file(answer, file_name) i += 1
b8f948b58b06648c94fb746ae519a44a7e96ae15
tools/perf/perf_tools/kraken.py
tools/perf/perf_tools/kraken.py
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import multi_page_benchmark from telemetry import util def _Mean(l): return float(sum(l)) / len(l) if len(l) > 0 else 0.0 class Kraken(multi_page_benchmark.MultiPageBenchmark): def MeasurePage(self, _, tab, results): js_is_done = """ document.title.indexOf("Results") != -1 && document.readyState == "complete" """ def _IsDone(): return bool(tab.runtime.Evaluate(js_is_done)) util.WaitFor(_IsDone, 300) js_get_results = """ var formElement = document.getElementsByTagName("input")[0]; decodeURIComponent(formElement.value.split("?")[1]); """ result_dict = eval(tab.runtime.Evaluate(js_get_results)) total = 0 for key in result_dict: if key == 'v': continue results.Add(key, 'ms', result_dict[key], data_type='unimportant') total += _Mean(result_dict[key]) results.Add('Total', 'ms', total)
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import multi_page_benchmark from telemetry import util def _Mean(l): return float(sum(l)) / len(l) if len(l) > 0 else 0.0 class Kraken(multi_page_benchmark.MultiPageBenchmark): def MeasurePage(self, _, tab, results): js_is_done = """ document.title.indexOf("Results") != -1 && document.readyState == "complete" """ def _IsDone(): return bool(tab.runtime.Evaluate(js_is_done)) util.WaitFor(_IsDone, 500, poll_interval=5) js_get_results = """ var formElement = document.getElementsByTagName("input")[0]; decodeURIComponent(formElement.value.split("?")[1]); """ result_dict = eval(tab.runtime.Evaluate(js_get_results)) total = 0 for key in result_dict: if key == 'v': continue results.Add(key, 'ms', result_dict[key], data_type='unimportant') total += _Mean(result_dict[key]) results.Add('Total', 'ms', total)
Increase Kraken timeout to allow it to pass on Android.
[Telemetry] Increase Kraken timeout to allow it to pass on Android. BUG=163680 TEST=tools/perf/run_multipage_benchmarks --browser=android-content-shell kraken tools/perf/page_sets/kraken.json Review URL: https://chromiumcodereview.appspot.com/11519015 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@172374 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,bright-sparks/chromium-spacewalk,bright-sparks/chromium-spacewalk,Chilledheart/chromium,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,M4sse/chromium.src,mogoweb/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,ltilve/chromium,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,nacl-webkit/chrome_deps,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,patrickm/chromium.src,chuan9/chromium-crosswalk,littlstar/chromium.src,pozdnyakov/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,mogoweb/chromium-crosswalk,ltilve/chromium,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,fujunwei/chromium-crosswalk,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,fujunwei/chromium-crosswalk,Pluto-tv/chromium-crosswalk,pozdnyakov/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,timopulkkinen/BubbleFish,zcbenz/cefode-chromium,jaruba/chromium.src,hujiajie/pa-chromium,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,dednal/chromium.src,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,markYoungH/chromium.src,ltilve/chromium,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,axinging/chromium-crosswalk,mogoweb/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,Chilledheart/chromium,dushu1203/chromium.src,littlstar/chromium.src,ondra-novak/chromium.src,timopulkkinen/BubbleFish,PeterWangIntel/chromium-crosswalk,nacl-webkit/chrome_deps,Pluto-tv/chromium-crosswalk,nacl-webkit/chrome_deps,TheTypoMaster/chromium-crosswalk,patrickm/chromium.src,dushu1203/chromium.src,anirudhSK/chromium,mogoweb/chromium-crosswalk,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,timopulkkinen/BubbleFish,nacl-webkit/chrome_deps,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,zcbenz/cefode-chromium,TheTypoMaster/chromium-crosswalk,mogoweb/chromium-crosswalk,anirudhSK/chromium,pozdnyakov/chromium-crosswalk,Jonekee/chromium.src,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,pozdnyakov/chromium-crosswalk,ChromiumWebApps/chromium,hujiajie/pa-chromium,jaruba/chromium.src,dednal/chromium.src,chuan9/chromium-crosswalk,nacl-webkit/chrome_deps,mohamed--abdel-maksoud/chromium.src,timopulkkinen/BubbleFish,PeterWangIntel/chromium-crosswalk,mogoweb/chromium-crosswalk,littlstar/chromium.src,Just-D/chromium-1,hujiajie/pa-chromium,markYoungH/chromium.src,pozdnyakov/chromium-crosswalk,jaruba/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk,Chilledheart/chromium,ltilve/chromium,Jonekee/chromium.src,Chilledheart/chromium,zcbenz/cefode-chromium,jaruba/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,Just-D/chromium-1,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,zcbenz/cefode-chromium,Fireblend/chromium-crosswalk,anirudhSK/chromium,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,Chilledheart/chromium,hgl888/chromium-crosswalk,M4sse/chromium.src,markYoungH/chromium.src,Chilledheart/chromium,ondra-novak/chromium.src,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,zcbenz/cefode-chromium,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,nacl-webkit/chrome_deps,dushu1203/chromium.src,ChromiumWebApps/chromium,zcbenz/cefode-chromium,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,hgl888/chromium-crosswalk,patrickm/chromium.src,dednal/chromium.src,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,anirudhSK/chromium,bright-sparks/chromium-spacewalk,patrickm/chromium.src,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,littlstar/chromium.src,ondra-novak/chromium.src,M4sse/chromium.src,bright-sparks/chromium-spacewalk,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,dednal/chromium.src,Jonekee/chromium.src,dednal/chromium.src,ltilve/chromium,axinging/chromium-crosswalk,anirudhSK/chromium,zcbenz/cefode-chromium,Chilledheart/chromium,ChromiumWebApps/chromium,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,hujiajie/pa-chromium,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,dednal/chromium.src,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,littlstar/chromium.src,ltilve/chromium,jaruba/chromium.src,Just-D/chromium-1,fujunwei/chromium-crosswalk,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,ChromiumWebApps/chromium,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,markYoungH/chromium.src,littlstar/chromium.src,ChromiumWebApps/chromium,anirudhSK/chromium,hujiajie/pa-chromium,hujiajie/pa-chromium,M4sse/chromium.src,Fireblend/chromium-crosswalk,mogoweb/chromium-crosswalk,ondra-novak/chromium.src,axinging/chromium-crosswalk,nacl-webkit/chrome_deps,dednal/chromium.src,M4sse/chromium.src,krieger-od/nwjs_chromium.src,pozdnyakov/chromium-crosswalk,hujiajie/pa-chromium,zcbenz/cefode-chromium,markYoungH/chromium.src,dednal/chromium.src,hujiajie/pa-chromium,hujiajie/pa-chromium,markYoungH/chromium.src,dushu1203/chromium.src,nacl-webkit/chrome_deps,markYoungH/chromium.src,zcbenz/cefode-chromium,jaruba/chromium.src,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,ltilve/chromium,bright-sparks/chromium-spacewalk,M4sse/chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,timopulkkinen/BubbleFish,hujiajie/pa-chromium,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,timopulkkinen/BubbleFish,timopulkkinen/BubbleFish,dednal/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,mogoweb/chromium-crosswalk,Just-D/chromium-1,fujunwei/chromium-crosswalk,jaruba/chromium.src,ChromiumWebApps/chromium,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,hujiajie/pa-chromium,M4sse/chromium.src,ltilve/chromium,anirudhSK/chromium,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,patrickm/chromium.src,ondra-novak/chromium.src,pozdnyakov/chromium-crosswalk,nacl-webkit/chrome_deps,ondra-novak/chromium.src,patrickm/chromium.src,timopulkkinen/BubbleFish,littlstar/chromium.src,hgl888/chromium-crosswalk,littlstar/chromium.src,Pluto-tv/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk-efl,nacl-webkit/chrome_deps,dushu1203/chromium.src,jaruba/chromium.src,timopulkkinen/BubbleFish,nacl-webkit/chrome_deps,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,pozdnyakov/chromium-crosswalk,Jonekee/chromium.src,jaruba/chromium.src,mogoweb/chromium-crosswalk,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,zcbenz/cefode-chromium,axinging/chromium-crosswalk,markYoungH/chromium.src,Just-D/chromium-1,axinging/chromium-crosswalk,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,anirudhSK/chromium,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,anirudhSK/chromium,Jonekee/chromium.src,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import multi_page_benchmark from telemetry import util def _Mean(l): return float(sum(l)) / len(l) if len(l) > 0 else 0.0 class Kraken(multi_page_benchmark.MultiPageBenchmark): def MeasurePage(self, _, tab, results): js_is_done = """ document.title.indexOf("Results") != -1 && document.readyState == "complete" """ def _IsDone(): return bool(tab.runtime.Evaluate(js_is_done)) util.WaitFor(_IsDone, 300) js_get_results = """ var formElement = document.getElementsByTagName("input")[0]; decodeURIComponent(formElement.value.split("?")[1]); """ result_dict = eval(tab.runtime.Evaluate(js_get_results)) total = 0 for key in result_dict: if key == 'v': continue results.Add(key, 'ms', result_dict[key], data_type='unimportant') total += _Mean(result_dict[key]) results.Add('Total', 'ms', total) [Telemetry] Increase Kraken timeout to allow it to pass on Android. BUG=163680 TEST=tools/perf/run_multipage_benchmarks --browser=android-content-shell kraken tools/perf/page_sets/kraken.json Review URL: https://chromiumcodereview.appspot.com/11519015 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@172374 0039d316-1c4b-4281-b951-d872f2087c98
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import multi_page_benchmark from telemetry import util def _Mean(l): return float(sum(l)) / len(l) if len(l) > 0 else 0.0 class Kraken(multi_page_benchmark.MultiPageBenchmark): def MeasurePage(self, _, tab, results): js_is_done = """ document.title.indexOf("Results") != -1 && document.readyState == "complete" """ def _IsDone(): return bool(tab.runtime.Evaluate(js_is_done)) util.WaitFor(_IsDone, 500, poll_interval=5) js_get_results = """ var formElement = document.getElementsByTagName("input")[0]; decodeURIComponent(formElement.value.split("?")[1]); """ result_dict = eval(tab.runtime.Evaluate(js_get_results)) total = 0 for key in result_dict: if key == 'v': continue results.Add(key, 'ms', result_dict[key], data_type='unimportant') total += _Mean(result_dict[key]) results.Add('Total', 'ms', total)
<commit_before># Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import multi_page_benchmark from telemetry import util def _Mean(l): return float(sum(l)) / len(l) if len(l) > 0 else 0.0 class Kraken(multi_page_benchmark.MultiPageBenchmark): def MeasurePage(self, _, tab, results): js_is_done = """ document.title.indexOf("Results") != -1 && document.readyState == "complete" """ def _IsDone(): return bool(tab.runtime.Evaluate(js_is_done)) util.WaitFor(_IsDone, 300) js_get_results = """ var formElement = document.getElementsByTagName("input")[0]; decodeURIComponent(formElement.value.split("?")[1]); """ result_dict = eval(tab.runtime.Evaluate(js_get_results)) total = 0 for key in result_dict: if key == 'v': continue results.Add(key, 'ms', result_dict[key], data_type='unimportant') total += _Mean(result_dict[key]) results.Add('Total', 'ms', total) <commit_msg>[Telemetry] Increase Kraken timeout to allow it to pass on Android. BUG=163680 TEST=tools/perf/run_multipage_benchmarks --browser=android-content-shell kraken tools/perf/page_sets/kraken.json Review URL: https://chromiumcodereview.appspot.com/11519015 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@172374 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import multi_page_benchmark from telemetry import util def _Mean(l): return float(sum(l)) / len(l) if len(l) > 0 else 0.0 class Kraken(multi_page_benchmark.MultiPageBenchmark): def MeasurePage(self, _, tab, results): js_is_done = """ document.title.indexOf("Results") != -1 && document.readyState == "complete" """ def _IsDone(): return bool(tab.runtime.Evaluate(js_is_done)) util.WaitFor(_IsDone, 500, poll_interval=5) js_get_results = """ var formElement = document.getElementsByTagName("input")[0]; decodeURIComponent(formElement.value.split("?")[1]); """ result_dict = eval(tab.runtime.Evaluate(js_get_results)) total = 0 for key in result_dict: if key == 'v': continue results.Add(key, 'ms', result_dict[key], data_type='unimportant') total += _Mean(result_dict[key]) results.Add('Total', 'ms', total)
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import multi_page_benchmark from telemetry import util def _Mean(l): return float(sum(l)) / len(l) if len(l) > 0 else 0.0 class Kraken(multi_page_benchmark.MultiPageBenchmark): def MeasurePage(self, _, tab, results): js_is_done = """ document.title.indexOf("Results") != -1 && document.readyState == "complete" """ def _IsDone(): return bool(tab.runtime.Evaluate(js_is_done)) util.WaitFor(_IsDone, 300) js_get_results = """ var formElement = document.getElementsByTagName("input")[0]; decodeURIComponent(formElement.value.split("?")[1]); """ result_dict = eval(tab.runtime.Evaluate(js_get_results)) total = 0 for key in result_dict: if key == 'v': continue results.Add(key, 'ms', result_dict[key], data_type='unimportant') total += _Mean(result_dict[key]) results.Add('Total', 'ms', total) [Telemetry] Increase Kraken timeout to allow it to pass on Android. BUG=163680 TEST=tools/perf/run_multipage_benchmarks --browser=android-content-shell kraken tools/perf/page_sets/kraken.json Review URL: https://chromiumcodereview.appspot.com/11519015 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@172374 0039d316-1c4b-4281-b951-d872f2087c98# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import multi_page_benchmark from telemetry import util def _Mean(l): return float(sum(l)) / len(l) if len(l) > 0 else 0.0 class Kraken(multi_page_benchmark.MultiPageBenchmark): def MeasurePage(self, _, tab, results): js_is_done = """ document.title.indexOf("Results") != -1 && document.readyState == "complete" """ def _IsDone(): return bool(tab.runtime.Evaluate(js_is_done)) util.WaitFor(_IsDone, 500, poll_interval=5) js_get_results = """ var formElement = document.getElementsByTagName("input")[0]; decodeURIComponent(formElement.value.split("?")[1]); """ result_dict = eval(tab.runtime.Evaluate(js_get_results)) total = 0 for key in result_dict: if key == 'v': continue results.Add(key, 'ms', result_dict[key], data_type='unimportant') total += _Mean(result_dict[key]) results.Add('Total', 'ms', total)
<commit_before># Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import multi_page_benchmark from telemetry import util def _Mean(l): return float(sum(l)) / len(l) if len(l) > 0 else 0.0 class Kraken(multi_page_benchmark.MultiPageBenchmark): def MeasurePage(self, _, tab, results): js_is_done = """ document.title.indexOf("Results") != -1 && document.readyState == "complete" """ def _IsDone(): return bool(tab.runtime.Evaluate(js_is_done)) util.WaitFor(_IsDone, 300) js_get_results = """ var formElement = document.getElementsByTagName("input")[0]; decodeURIComponent(formElement.value.split("?")[1]); """ result_dict = eval(tab.runtime.Evaluate(js_get_results)) total = 0 for key in result_dict: if key == 'v': continue results.Add(key, 'ms', result_dict[key], data_type='unimportant') total += _Mean(result_dict[key]) results.Add('Total', 'ms', total) <commit_msg>[Telemetry] Increase Kraken timeout to allow it to pass on Android. BUG=163680 TEST=tools/perf/run_multipage_benchmarks --browser=android-content-shell kraken tools/perf/page_sets/kraken.json Review URL: https://chromiumcodereview.appspot.com/11519015 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@172374 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import multi_page_benchmark from telemetry import util def _Mean(l): return float(sum(l)) / len(l) if len(l) > 0 else 0.0 class Kraken(multi_page_benchmark.MultiPageBenchmark): def MeasurePage(self, _, tab, results): js_is_done = """ document.title.indexOf("Results") != -1 && document.readyState == "complete" """ def _IsDone(): return bool(tab.runtime.Evaluate(js_is_done)) util.WaitFor(_IsDone, 500, poll_interval=5) js_get_results = """ var formElement = document.getElementsByTagName("input")[0]; decodeURIComponent(formElement.value.split("?")[1]); """ result_dict = eval(tab.runtime.Evaluate(js_get_results)) total = 0 for key in result_dict: if key == 'v': continue results.Add(key, 'ms', result_dict[key], data_type='unimportant') total += _Mean(result_dict[key]) results.Add('Total', 'ms', total)
1819f9cb080f847ea5d669571853b28d8fc1ce1c
Script/test_screenshot.py
Script/test_screenshot.py
import unittest import os import time import shutil import filecmp import base64 import glob import json class ScreenShotTest(unittest.TestCase): def test_screenshots(self): generated_file_paths = glob.glob('build/Dev/Cpp/Test/Release/*.png') for path in generated_file_paths: name = os.path.basename(path) self.assertTrue(filecmp.cmp('TestData/Tests/Windows/' + name, path), name + ' is not equal') if __name__ == '__main__': unittest.main()
import sys import unittest import os import time import shutil import filecmp import base64 import glob import json class ScreenShotTest(unittest.TestCase): def test_screenshots(self): generated_file_paths = glob.glob('build/Dev/Cpp/Test/Release/*.png') success = True for path in generated_file_paths: name = os.path.basename(path) test_data_path = 'TestData/Tests/Windows/' + name if os.path.exists(test_data_path): is_same = filecmp.cmp(test_data_path, path) if not is_same: print(f'{name} is not equal.') success = False else: print(f'{test_data_path} is not found.') success = False self.assertTrue(success) if __name__ == '__main__': unittest.main()
Improve a script to test
Improve a script to test
Python
mit
effekseer/Effekseer,effekseer/Effekseer,effekseer/Effekseer,effekseer/Effekseer,effekseer/Effekseer,effekseer/Effekseer,effekseer/Effekseer
import unittest import os import time import shutil import filecmp import base64 import glob import json class ScreenShotTest(unittest.TestCase): def test_screenshots(self): generated_file_paths = glob.glob('build/Dev/Cpp/Test/Release/*.png') for path in generated_file_paths: name = os.path.basename(path) self.assertTrue(filecmp.cmp('TestData/Tests/Windows/' + name, path), name + ' is not equal') if __name__ == '__main__': unittest.main() Improve a script to test
import sys import unittest import os import time import shutil import filecmp import base64 import glob import json class ScreenShotTest(unittest.TestCase): def test_screenshots(self): generated_file_paths = glob.glob('build/Dev/Cpp/Test/Release/*.png') success = True for path in generated_file_paths: name = os.path.basename(path) test_data_path = 'TestData/Tests/Windows/' + name if os.path.exists(test_data_path): is_same = filecmp.cmp(test_data_path, path) if not is_same: print(f'{name} is not equal.') success = False else: print(f'{test_data_path} is not found.') success = False self.assertTrue(success) if __name__ == '__main__': unittest.main()
<commit_before>import unittest import os import time import shutil import filecmp import base64 import glob import json class ScreenShotTest(unittest.TestCase): def test_screenshots(self): generated_file_paths = glob.glob('build/Dev/Cpp/Test/Release/*.png') for path in generated_file_paths: name = os.path.basename(path) self.assertTrue(filecmp.cmp('TestData/Tests/Windows/' + name, path), name + ' is not equal') if __name__ == '__main__': unittest.main() <commit_msg>Improve a script to test<commit_after>
import sys import unittest import os import time import shutil import filecmp import base64 import glob import json class ScreenShotTest(unittest.TestCase): def test_screenshots(self): generated_file_paths = glob.glob('build/Dev/Cpp/Test/Release/*.png') success = True for path in generated_file_paths: name = os.path.basename(path) test_data_path = 'TestData/Tests/Windows/' + name if os.path.exists(test_data_path): is_same = filecmp.cmp(test_data_path, path) if not is_same: print(f'{name} is not equal.') success = False else: print(f'{test_data_path} is not found.') success = False self.assertTrue(success) if __name__ == '__main__': unittest.main()
import unittest import os import time import shutil import filecmp import base64 import glob import json class ScreenShotTest(unittest.TestCase): def test_screenshots(self): generated_file_paths = glob.glob('build/Dev/Cpp/Test/Release/*.png') for path in generated_file_paths: name = os.path.basename(path) self.assertTrue(filecmp.cmp('TestData/Tests/Windows/' + name, path), name + ' is not equal') if __name__ == '__main__': unittest.main() Improve a script to testimport sys import unittest import os import time import shutil import filecmp import base64 import glob import json class ScreenShotTest(unittest.TestCase): def test_screenshots(self): generated_file_paths = glob.glob('build/Dev/Cpp/Test/Release/*.png') success = True for path in generated_file_paths: name = os.path.basename(path) test_data_path = 'TestData/Tests/Windows/' + name if os.path.exists(test_data_path): is_same = filecmp.cmp(test_data_path, path) if not is_same: print(f'{name} is not equal.') success = False else: print(f'{test_data_path} is not found.') success = False self.assertTrue(success) if __name__ == '__main__': unittest.main()
<commit_before>import unittest import os import time import shutil import filecmp import base64 import glob import json class ScreenShotTest(unittest.TestCase): def test_screenshots(self): generated_file_paths = glob.glob('build/Dev/Cpp/Test/Release/*.png') for path in generated_file_paths: name = os.path.basename(path) self.assertTrue(filecmp.cmp('TestData/Tests/Windows/' + name, path), name + ' is not equal') if __name__ == '__main__': unittest.main() <commit_msg>Improve a script to test<commit_after>import sys import unittest import os import time import shutil import filecmp import base64 import glob import json class ScreenShotTest(unittest.TestCase): def test_screenshots(self): generated_file_paths = glob.glob('build/Dev/Cpp/Test/Release/*.png') success = True for path in generated_file_paths: name = os.path.basename(path) test_data_path = 'TestData/Tests/Windows/' + name if os.path.exists(test_data_path): is_same = filecmp.cmp(test_data_path, path) if not is_same: print(f'{name} is not equal.') success = False else: print(f'{test_data_path} is not found.') success = False self.assertTrue(success) if __name__ == '__main__': unittest.main()
01d9496eb74e5536bf953b0710673ec590061622
shuup/admin/modules/service_providers/views/_list.py
shuup/admin/modules/service_providers/views/_list.py
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from shuup.admin.utils.picotable import Column, TextFilter from shuup.admin.utils.views import PicotableListView from shuup.core.models import ServiceProvider class ServiceProviderListView(PicotableListView): model = ServiceProvider columns = [ Column( "name", _("Name"), sort_field="base_translations__name", filter_config=TextFilter( filter_field="base_translations__name", placeholder=_("Filter by name..."))), Column("type", _(u"Type"), display="get_type_display", sortable=False), ] def get_type_display(self, instance): return instance._meta.verbose_name.capitalize() def get_object_abstract(self, instance, item): return [ {"text": "%s" % instance, "class": "header"}, {"text": self.get_type_display(instance)}, ]
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from shuup.admin.utils.picotable import Column, TextFilter from shuup.admin.utils.views import PicotableListView from shuup.core.models import ServiceProvider class ServiceProviderListView(PicotableListView): model = ServiceProvider default_columns = [ Column( "name", _("Name"), sort_field="base_translations__name", filter_config=TextFilter( filter_field="base_translations__name", placeholder=_("Filter by name..."))), Column("type", _(u"Type"), display="get_type_display", sortable=False), ] def get_type_display(self, instance): return instance._meta.verbose_name.capitalize() def get_object_abstract(self, instance, item): return [ {"text": "%s" % instance, "class": "header"}, {"text": self.get_type_display(instance)}, ]
Modify service_providers for dynamic columns
Modify service_providers for dynamic columns Refs SH-64
Python
agpl-3.0
suutari-ai/shoop,suutari-ai/shoop,shoopio/shoop,suutari/shoop,shoopio/shoop,shawnadelic/shuup,shawnadelic/shuup,suutari-ai/shoop,shoopio/shoop,suutari/shoop,shawnadelic/shuup,suutari/shoop
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from shuup.admin.utils.picotable import Column, TextFilter from shuup.admin.utils.views import PicotableListView from shuup.core.models import ServiceProvider class ServiceProviderListView(PicotableListView): model = ServiceProvider columns = [ Column( "name", _("Name"), sort_field="base_translations__name", filter_config=TextFilter( filter_field="base_translations__name", placeholder=_("Filter by name..."))), Column("type", _(u"Type"), display="get_type_display", sortable=False), ] def get_type_display(self, instance): return instance._meta.verbose_name.capitalize() def get_object_abstract(self, instance, item): return [ {"text": "%s" % instance, "class": "header"}, {"text": self.get_type_display(instance)}, ] Modify service_providers for dynamic columns Refs SH-64
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from shuup.admin.utils.picotable import Column, TextFilter from shuup.admin.utils.views import PicotableListView from shuup.core.models import ServiceProvider class ServiceProviderListView(PicotableListView): model = ServiceProvider default_columns = [ Column( "name", _("Name"), sort_field="base_translations__name", filter_config=TextFilter( filter_field="base_translations__name", placeholder=_("Filter by name..."))), Column("type", _(u"Type"), display="get_type_display", sortable=False), ] def get_type_display(self, instance): return instance._meta.verbose_name.capitalize() def get_object_abstract(self, instance, item): return [ {"text": "%s" % instance, "class": "header"}, {"text": self.get_type_display(instance)}, ]
<commit_before># -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from shuup.admin.utils.picotable import Column, TextFilter from shuup.admin.utils.views import PicotableListView from shuup.core.models import ServiceProvider class ServiceProviderListView(PicotableListView): model = ServiceProvider columns = [ Column( "name", _("Name"), sort_field="base_translations__name", filter_config=TextFilter( filter_field="base_translations__name", placeholder=_("Filter by name..."))), Column("type", _(u"Type"), display="get_type_display", sortable=False), ] def get_type_display(self, instance): return instance._meta.verbose_name.capitalize() def get_object_abstract(self, instance, item): return [ {"text": "%s" % instance, "class": "header"}, {"text": self.get_type_display(instance)}, ] <commit_msg>Modify service_providers for dynamic columns Refs SH-64<commit_after>
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from shuup.admin.utils.picotable import Column, TextFilter from shuup.admin.utils.views import PicotableListView from shuup.core.models import ServiceProvider class ServiceProviderListView(PicotableListView): model = ServiceProvider default_columns = [ Column( "name", _("Name"), sort_field="base_translations__name", filter_config=TextFilter( filter_field="base_translations__name", placeholder=_("Filter by name..."))), Column("type", _(u"Type"), display="get_type_display", sortable=False), ] def get_type_display(self, instance): return instance._meta.verbose_name.capitalize() def get_object_abstract(self, instance, item): return [ {"text": "%s" % instance, "class": "header"}, {"text": self.get_type_display(instance)}, ]
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from shuup.admin.utils.picotable import Column, TextFilter from shuup.admin.utils.views import PicotableListView from shuup.core.models import ServiceProvider class ServiceProviderListView(PicotableListView): model = ServiceProvider columns = [ Column( "name", _("Name"), sort_field="base_translations__name", filter_config=TextFilter( filter_field="base_translations__name", placeholder=_("Filter by name..."))), Column("type", _(u"Type"), display="get_type_display", sortable=False), ] def get_type_display(self, instance): return instance._meta.verbose_name.capitalize() def get_object_abstract(self, instance, item): return [ {"text": "%s" % instance, "class": "header"}, {"text": self.get_type_display(instance)}, ] Modify service_providers for dynamic columns Refs SH-64# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from shuup.admin.utils.picotable import Column, TextFilter from shuup.admin.utils.views import PicotableListView from shuup.core.models import ServiceProvider class ServiceProviderListView(PicotableListView): model = ServiceProvider default_columns = [ Column( "name", _("Name"), sort_field="base_translations__name", filter_config=TextFilter( filter_field="base_translations__name", placeholder=_("Filter by name..."))), Column("type", _(u"Type"), display="get_type_display", sortable=False), ] def get_type_display(self, instance): return instance._meta.verbose_name.capitalize() def get_object_abstract(self, instance, item): return [ {"text": "%s" % instance, "class": "header"}, {"text": self.get_type_display(instance)}, ]
<commit_before># -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from shuup.admin.utils.picotable import Column, TextFilter from shuup.admin.utils.views import PicotableListView from shuup.core.models import ServiceProvider class ServiceProviderListView(PicotableListView): model = ServiceProvider columns = [ Column( "name", _("Name"), sort_field="base_translations__name", filter_config=TextFilter( filter_field="base_translations__name", placeholder=_("Filter by name..."))), Column("type", _(u"Type"), display="get_type_display", sortable=False), ] def get_type_display(self, instance): return instance._meta.verbose_name.capitalize() def get_object_abstract(self, instance, item): return [ {"text": "%s" % instance, "class": "header"}, {"text": self.get_type_display(instance)}, ] <commit_msg>Modify service_providers for dynamic columns Refs SH-64<commit_after># -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from shuup.admin.utils.picotable import Column, TextFilter from shuup.admin.utils.views import PicotableListView from shuup.core.models import ServiceProvider class ServiceProviderListView(PicotableListView): model = ServiceProvider default_columns = [ Column( "name", _("Name"), sort_field="base_translations__name", filter_config=TextFilter( filter_field="base_translations__name", placeholder=_("Filter by name..."))), Column("type", _(u"Type"), display="get_type_display", sortable=False), ] def get_type_display(self, instance): return instance._meta.verbose_name.capitalize() def get_object_abstract(self, instance, item): return [ {"text": "%s" % instance, "class": "header"}, {"text": self.get_type_display(instance)}, ]