commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
64d838897a38398074433ce1e6a50393fc414a03
test_project/urls.py
test_project/urls.py
from django.conf.urls.defaults import patterns, include, url import settings import os from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', url(r'^$', 'django.views.generic.simple.direct_to_template', {'template': 'index.html'} ), url(r'^test/', 'test_app.views.test_index'), url(r'^admin/', include(admin.site.urls)), ) if settings.DEBUG: urlpatterns += patterns( '', (r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': os.path.join( os.path.dirname(settings.__file__), 'static')}))
try: from django.conf.urls.defaults import patterns, include, url except ImportError: from django.conf.urls import patterns, url, include import settings import os from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', url(r'^$', 'django.views.generic.simple.direct_to_template', {'template': 'index.html'} ), url(r'^test/', 'test_app.views.test_index'), url(r'^admin/', include(admin.site.urls)), ) if settings.DEBUG: urlpatterns += patterns( '', (r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': os.path.join( os.path.dirname(settings.__file__), 'static')}))
Fix for 1.6 & 1.7
Fix for 1.6 & 1.7
Python
mit
nikolas/django-interval-field,mpasternak/django-interval-field,mpasternak/django-interval-field,nikolas/django-interval-field
from django.conf.urls.defaults import patterns, include, url import settings import os from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', url(r'^$', 'django.views.generic.simple.direct_to_template', {'template': 'index.html'} ), url(r'^test/', 'test_app.views.test_index'), url(r'^admin/', include(admin.site.urls)), ) if settings.DEBUG: urlpatterns += patterns( '', (r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': os.path.join( os.path.dirname(settings.__file__), 'static')})) Fix for 1.6 & 1.7
try: from django.conf.urls.defaults import patterns, include, url except ImportError: from django.conf.urls import patterns, url, include import settings import os from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', url(r'^$', 'django.views.generic.simple.direct_to_template', {'template': 'index.html'} ), url(r'^test/', 'test_app.views.test_index'), url(r'^admin/', include(admin.site.urls)), ) if settings.DEBUG: urlpatterns += patterns( '', (r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': os.path.join( os.path.dirname(settings.__file__), 'static')}))
<commit_before>from django.conf.urls.defaults import patterns, include, url import settings import os from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', url(r'^$', 'django.views.generic.simple.direct_to_template', {'template': 'index.html'} ), url(r'^test/', 'test_app.views.test_index'), url(r'^admin/', include(admin.site.urls)), ) if settings.DEBUG: urlpatterns += patterns( '', (r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': os.path.join( os.path.dirname(settings.__file__), 'static')})) <commit_msg>Fix for 1.6 & 1.7<commit_after>
try: from django.conf.urls.defaults import patterns, include, url except ImportError: from django.conf.urls import patterns, url, include import settings import os from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', url(r'^$', 'django.views.generic.simple.direct_to_template', {'template': 'index.html'} ), url(r'^test/', 'test_app.views.test_index'), url(r'^admin/', include(admin.site.urls)), ) if settings.DEBUG: urlpatterns += patterns( '', (r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': os.path.join( os.path.dirname(settings.__file__), 'static')}))
from django.conf.urls.defaults import patterns, include, url import settings import os from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', url(r'^$', 'django.views.generic.simple.direct_to_template', {'template': 'index.html'} ), url(r'^test/', 'test_app.views.test_index'), url(r'^admin/', include(admin.site.urls)), ) if settings.DEBUG: urlpatterns += patterns( '', (r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': os.path.join( os.path.dirname(settings.__file__), 'static')})) Fix for 1.6 & 1.7try: from django.conf.urls.defaults import patterns, include, url except ImportError: from django.conf.urls import patterns, url, include import settings import os from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', url(r'^$', 'django.views.generic.simple.direct_to_template', {'template': 'index.html'} ), url(r'^test/', 'test_app.views.test_index'), url(r'^admin/', include(admin.site.urls)), ) if settings.DEBUG: urlpatterns += patterns( '', (r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': os.path.join( os.path.dirname(settings.__file__), 'static')}))
<commit_before>from django.conf.urls.defaults import patterns, include, url import settings import os from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', url(r'^$', 'django.views.generic.simple.direct_to_template', {'template': 'index.html'} ), url(r'^test/', 'test_app.views.test_index'), url(r'^admin/', include(admin.site.urls)), ) if settings.DEBUG: urlpatterns += patterns( '', (r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': os.path.join( os.path.dirname(settings.__file__), 'static')})) <commit_msg>Fix for 1.6 & 1.7<commit_after>try: from django.conf.urls.defaults import patterns, include, url except ImportError: from django.conf.urls import patterns, url, include import settings import os from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', url(r'^$', 'django.views.generic.simple.direct_to_template', {'template': 'index.html'} ), url(r'^test/', 'test_app.views.test_index'), url(r'^admin/', include(admin.site.urls)), ) if settings.DEBUG: urlpatterns += patterns( '', (r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': os.path.join( os.path.dirname(settings.__file__), 'static')}))
935552df10dc3a17cf3edb897e83861bbeaae803
tests/test_thread.py
tests/test_thread.py
import os import unittest from common import gobject, gtk, testhelper # Enable PyGILState API os.environ['PYGTK_USE_GIL_STATE_API'] = '' gobject.threads_init() class TestThread(unittest.TestCase): def from_thread_cb(self, test, enum): assert test == self.obj assert int(enum) == 0 assert type(enum) != int def idle_cb(self): self.obj = testhelper.get_test_thread() self.obj.connect('from-thread', self.from_thread_cb) self.obj.emit('emit-signal') def testExtensionModule(self): gtk.idle_add(self.idle_cb) gtk.timeout_add(50, self.timeout_cb) gtk.main() def timeout_cb(self): gtk.main_quit()
import os import unittest from common import gobject, gtk, testhelper # Enable PyGILState API os.environ['PYGTK_USE_GIL_STATE_API'] = '' gobject.threads_init() class TestThread(unittest.TestCase): def from_thread_cb(self, test, enum): assert test == self.obj assert int(enum) == 0 assert type(enum) != int def idle_cb(self): self.obj = testhelper.get_test_thread() self.obj.connect('from-thread', self.from_thread_cb) self.obj.emit('emit-signal') def testExtensionModule(self): gobject.idle_add(self.idle_cb) gobject.timeout_add(50, self.timeout_cb) gtk.main() def timeout_cb(self): gtk.main_quit()
Add pygtk_postinstall.py Updated Deprecate gtk.idle_add and friends. Merge
Add pygtk_postinstall.py Updated Deprecate gtk.idle_add and friends. Merge * Makefile.am: Add pygtk_postinstall.py * docs/random/missing-symbols: Updated * gtk/__init__.py: Deprecate gtk.idle_add and friends. * gtk/gtk.defs: Merge in 2.6 api, for GtkLabel functions, thanks to Gian Mario Tagliaretti, fixes bug #163296 * tests/test_thread.py: Don't use gtk.idle_add
Python
lgpl-2.1
choeger/pygobject-cmake,GNOME/pygobject,nzjrs/pygobject,atizo/pygobject,pexip/pygobject,jdahlin/pygobject,davibe/pygobject,Distrotech/pygobject,sfeltman/pygobject,pexip/pygobject,choeger/pygobject-cmake,MathieuDuponchelle/pygobject,alexef/pygobject,alexef/pygobject,alexef/pygobject,sfeltman/pygobject,davidmalcolm/pygobject,MathieuDuponchelle/pygobject,davibe/pygobject,Distrotech/pygobject,atizo/pygobject,MathieuDuponchelle/pygobject,atizo/pygobject,Distrotech/pygobject,sfeltman/pygobject,thiblahute/pygobject,thiblahute/pygobject,davidmalcolm/pygobject,choeger/pygobject-cmake,davidmalcolm/pygobject,pexip/pygobject,davibe/pygobject,nzjrs/pygobject,thiblahute/pygobject,jdahlin/pygobject,nzjrs/pygobject,davibe/pygobject,GNOME/pygobject,GNOME/pygobject,Distrotech/pygobject,jdahlin/pygobject
import os import unittest from common import gobject, gtk, testhelper # Enable PyGILState API os.environ['PYGTK_USE_GIL_STATE_API'] = '' gobject.threads_init() class TestThread(unittest.TestCase): def from_thread_cb(self, test, enum): assert test == self.obj assert int(enum) == 0 assert type(enum) != int def idle_cb(self): self.obj = testhelper.get_test_thread() self.obj.connect('from-thread', self.from_thread_cb) self.obj.emit('emit-signal') def testExtensionModule(self): gtk.idle_add(self.idle_cb) gtk.timeout_add(50, self.timeout_cb) gtk.main() def timeout_cb(self): gtk.main_quit() Add pygtk_postinstall.py Updated Deprecate gtk.idle_add and friends. Merge * Makefile.am: Add pygtk_postinstall.py * docs/random/missing-symbols: Updated * gtk/__init__.py: Deprecate gtk.idle_add and friends. * gtk/gtk.defs: Merge in 2.6 api, for GtkLabel functions, thanks to Gian Mario Tagliaretti, fixes bug #163296 * tests/test_thread.py: Don't use gtk.idle_add
import os import unittest from common import gobject, gtk, testhelper # Enable PyGILState API os.environ['PYGTK_USE_GIL_STATE_API'] = '' gobject.threads_init() class TestThread(unittest.TestCase): def from_thread_cb(self, test, enum): assert test == self.obj assert int(enum) == 0 assert type(enum) != int def idle_cb(self): self.obj = testhelper.get_test_thread() self.obj.connect('from-thread', self.from_thread_cb) self.obj.emit('emit-signal') def testExtensionModule(self): gobject.idle_add(self.idle_cb) gobject.timeout_add(50, self.timeout_cb) gtk.main() def timeout_cb(self): gtk.main_quit()
<commit_before>import os import unittest from common import gobject, gtk, testhelper # Enable PyGILState API os.environ['PYGTK_USE_GIL_STATE_API'] = '' gobject.threads_init() class TestThread(unittest.TestCase): def from_thread_cb(self, test, enum): assert test == self.obj assert int(enum) == 0 assert type(enum) != int def idle_cb(self): self.obj = testhelper.get_test_thread() self.obj.connect('from-thread', self.from_thread_cb) self.obj.emit('emit-signal') def testExtensionModule(self): gtk.idle_add(self.idle_cb) gtk.timeout_add(50, self.timeout_cb) gtk.main() def timeout_cb(self): gtk.main_quit() <commit_msg>Add pygtk_postinstall.py Updated Deprecate gtk.idle_add and friends. Merge * Makefile.am: Add pygtk_postinstall.py * docs/random/missing-symbols: Updated * gtk/__init__.py: Deprecate gtk.idle_add and friends. * gtk/gtk.defs: Merge in 2.6 api, for GtkLabel functions, thanks to Gian Mario Tagliaretti, fixes bug #163296 * tests/test_thread.py: Don't use gtk.idle_add<commit_after>
import os import unittest from common import gobject, gtk, testhelper # Enable PyGILState API os.environ['PYGTK_USE_GIL_STATE_API'] = '' gobject.threads_init() class TestThread(unittest.TestCase): def from_thread_cb(self, test, enum): assert test == self.obj assert int(enum) == 0 assert type(enum) != int def idle_cb(self): self.obj = testhelper.get_test_thread() self.obj.connect('from-thread', self.from_thread_cb) self.obj.emit('emit-signal') def testExtensionModule(self): gobject.idle_add(self.idle_cb) gobject.timeout_add(50, self.timeout_cb) gtk.main() def timeout_cb(self): gtk.main_quit()
import os import unittest from common import gobject, gtk, testhelper # Enable PyGILState API os.environ['PYGTK_USE_GIL_STATE_API'] = '' gobject.threads_init() class TestThread(unittest.TestCase): def from_thread_cb(self, test, enum): assert test == self.obj assert int(enum) == 0 assert type(enum) != int def idle_cb(self): self.obj = testhelper.get_test_thread() self.obj.connect('from-thread', self.from_thread_cb) self.obj.emit('emit-signal') def testExtensionModule(self): gtk.idle_add(self.idle_cb) gtk.timeout_add(50, self.timeout_cb) gtk.main() def timeout_cb(self): gtk.main_quit() Add pygtk_postinstall.py Updated Deprecate gtk.idle_add and friends. Merge * Makefile.am: Add pygtk_postinstall.py * docs/random/missing-symbols: Updated * gtk/__init__.py: Deprecate gtk.idle_add and friends. * gtk/gtk.defs: Merge in 2.6 api, for GtkLabel functions, thanks to Gian Mario Tagliaretti, fixes bug #163296 * tests/test_thread.py: Don't use gtk.idle_addimport os import unittest from common import gobject, gtk, testhelper # Enable PyGILState API os.environ['PYGTK_USE_GIL_STATE_API'] = '' gobject.threads_init() class TestThread(unittest.TestCase): def from_thread_cb(self, test, enum): assert test == self.obj assert int(enum) == 0 assert type(enum) != int def idle_cb(self): self.obj = testhelper.get_test_thread() self.obj.connect('from-thread', self.from_thread_cb) self.obj.emit('emit-signal') def testExtensionModule(self): gobject.idle_add(self.idle_cb) gobject.timeout_add(50, self.timeout_cb) gtk.main() def timeout_cb(self): gtk.main_quit()
<commit_before>import os import unittest from common import gobject, gtk, testhelper # Enable PyGILState API os.environ['PYGTK_USE_GIL_STATE_API'] = '' gobject.threads_init() class TestThread(unittest.TestCase): def from_thread_cb(self, test, enum): assert test == self.obj assert int(enum) == 0 assert type(enum) != int def idle_cb(self): self.obj = testhelper.get_test_thread() self.obj.connect('from-thread', self.from_thread_cb) self.obj.emit('emit-signal') def testExtensionModule(self): gtk.idle_add(self.idle_cb) gtk.timeout_add(50, self.timeout_cb) gtk.main() def timeout_cb(self): gtk.main_quit() <commit_msg>Add pygtk_postinstall.py Updated Deprecate gtk.idle_add and friends. Merge * Makefile.am: Add pygtk_postinstall.py * docs/random/missing-symbols: Updated * gtk/__init__.py: Deprecate gtk.idle_add and friends. * gtk/gtk.defs: Merge in 2.6 api, for GtkLabel functions, thanks to Gian Mario Tagliaretti, fixes bug #163296 * tests/test_thread.py: Don't use gtk.idle_add<commit_after>import os import unittest from common import gobject, gtk, testhelper # Enable PyGILState API os.environ['PYGTK_USE_GIL_STATE_API'] = '' gobject.threads_init() class TestThread(unittest.TestCase): def from_thread_cb(self, test, enum): assert test == self.obj assert int(enum) == 0 assert type(enum) != int def idle_cb(self): self.obj = testhelper.get_test_thread() self.obj.connect('from-thread', self.from_thread_cb) self.obj.emit('emit-signal') def testExtensionModule(self): gobject.idle_add(self.idle_cb) gobject.timeout_add(50, self.timeout_cb) gtk.main() def timeout_cb(self): gtk.main_quit()
6621bef05b2d4cb3fc138622194fe39765ebcb7c
tests/unit/helper.py
tests/unit/helper.py
import mock import github3 import unittest MockedSession = mock.create_autospec(github3.session.GitHubSession) def build_url(self, *args, **kwargs): # We want to assert what is happening with the actual calls to the # Internet. We can proxy this. return github3.session.GitHubSession().build_url(*args, **kwargs) class UnitHelper(unittest.TestCase): # Sub-classes must assign the class to this during definition described_class = None # Sub-classes must also assign a dictionary to this during definition example_data = {} def create_session_mock(self, *args): session = MockedSession() base_attrs = ['headers', 'auth'] attrs = dict( (key, mock.Mock()) for key in set(args).union(base_attrs) ) session.configure_mock(**attrs) session.delete.return_value = None session.get.return_value = None session.patch.return_value = None session.post.return_value = None session.put.return_value = None return session def setUp(self): self.session = self.create_session_mock() self.instance = self.described_class(self.example_data, self.session) # Proxy the build_url method to the class so it can build the URL and # we can assert things about the call that will be attempted to the # internet self.described_class._build_url = build_url
import mock import github3 import unittest def build_url(self, *args, **kwargs): # We want to assert what is happening with the actual calls to the # Internet. We can proxy this. return github3.session.GitHubSession().build_url(*args, **kwargs) class UnitHelper(unittest.TestCase): # Sub-classes must assign the class to this during definition described_class = None # Sub-classes must also assign a dictionary to this during definition example_data = {} def create_mocked_session(self): MockedSession = mock.create_autospec(github3.session.GitHubSession) return MockedSession() def create_session_mock(self, *args): session = self.create_mocked_session() base_attrs = ['headers', 'auth'] attrs = dict( (key, mock.Mock()) for key in set(args).union(base_attrs) ) session.configure_mock(**attrs) session.delete.return_value = None session.get.return_value = None session.patch.return_value = None session.post.return_value = None session.put.return_value = None return session def setUp(self): self.session = self.create_session_mock() self.instance = self.described_class(self.example_data, self.session) # Proxy the build_url method to the class so it can build the URL and # we can assert things about the call that will be attempted to the # internet self.described_class._build_url = build_url
Fix the issue where the mock is persisting calls
Fix the issue where the mock is persisting calls
Python
bsd-3-clause
krxsky/github3.py,jim-minter/github3.py,agamdua/github3.py,degustaf/github3.py,christophelec/github3.py,wbrefvem/github3.py,h4ck3rm1k3/github3.py,sigmavirus24/github3.py,ueg1990/github3.py,icio/github3.py,balloob/github3.py,itsmemattchung/github3.py
import mock import github3 import unittest MockedSession = mock.create_autospec(github3.session.GitHubSession) def build_url(self, *args, **kwargs): # We want to assert what is happening with the actual calls to the # Internet. We can proxy this. return github3.session.GitHubSession().build_url(*args, **kwargs) class UnitHelper(unittest.TestCase): # Sub-classes must assign the class to this during definition described_class = None # Sub-classes must also assign a dictionary to this during definition example_data = {} def create_session_mock(self, *args): session = MockedSession() base_attrs = ['headers', 'auth'] attrs = dict( (key, mock.Mock()) for key in set(args).union(base_attrs) ) session.configure_mock(**attrs) session.delete.return_value = None session.get.return_value = None session.patch.return_value = None session.post.return_value = None session.put.return_value = None return session def setUp(self): self.session = self.create_session_mock() self.instance = self.described_class(self.example_data, self.session) # Proxy the build_url method to the class so it can build the URL and # we can assert things about the call that will be attempted to the # internet self.described_class._build_url = build_url Fix the issue where the mock is persisting calls
import mock import github3 import unittest def build_url(self, *args, **kwargs): # We want to assert what is happening with the actual calls to the # Internet. We can proxy this. return github3.session.GitHubSession().build_url(*args, **kwargs) class UnitHelper(unittest.TestCase): # Sub-classes must assign the class to this during definition described_class = None # Sub-classes must also assign a dictionary to this during definition example_data = {} def create_mocked_session(self): MockedSession = mock.create_autospec(github3.session.GitHubSession) return MockedSession() def create_session_mock(self, *args): session = self.create_mocked_session() base_attrs = ['headers', 'auth'] attrs = dict( (key, mock.Mock()) for key in set(args).union(base_attrs) ) session.configure_mock(**attrs) session.delete.return_value = None session.get.return_value = None session.patch.return_value = None session.post.return_value = None session.put.return_value = None return session def setUp(self): self.session = self.create_session_mock() self.instance = self.described_class(self.example_data, self.session) # Proxy the build_url method to the class so it can build the URL and # we can assert things about the call that will be attempted to the # internet self.described_class._build_url = build_url
<commit_before>import mock import github3 import unittest MockedSession = mock.create_autospec(github3.session.GitHubSession) def build_url(self, *args, **kwargs): # We want to assert what is happening with the actual calls to the # Internet. We can proxy this. return github3.session.GitHubSession().build_url(*args, **kwargs) class UnitHelper(unittest.TestCase): # Sub-classes must assign the class to this during definition described_class = None # Sub-classes must also assign a dictionary to this during definition example_data = {} def create_session_mock(self, *args): session = MockedSession() base_attrs = ['headers', 'auth'] attrs = dict( (key, mock.Mock()) for key in set(args).union(base_attrs) ) session.configure_mock(**attrs) session.delete.return_value = None session.get.return_value = None session.patch.return_value = None session.post.return_value = None session.put.return_value = None return session def setUp(self): self.session = self.create_session_mock() self.instance = self.described_class(self.example_data, self.session) # Proxy the build_url method to the class so it can build the URL and # we can assert things about the call that will be attempted to the # internet self.described_class._build_url = build_url <commit_msg>Fix the issue where the mock is persisting calls<commit_after>
import mock import github3 import unittest def build_url(self, *args, **kwargs): # We want to assert what is happening with the actual calls to the # Internet. We can proxy this. return github3.session.GitHubSession().build_url(*args, **kwargs) class UnitHelper(unittest.TestCase): # Sub-classes must assign the class to this during definition described_class = None # Sub-classes must also assign a dictionary to this during definition example_data = {} def create_mocked_session(self): MockedSession = mock.create_autospec(github3.session.GitHubSession) return MockedSession() def create_session_mock(self, *args): session = self.create_mocked_session() base_attrs = ['headers', 'auth'] attrs = dict( (key, mock.Mock()) for key in set(args).union(base_attrs) ) session.configure_mock(**attrs) session.delete.return_value = None session.get.return_value = None session.patch.return_value = None session.post.return_value = None session.put.return_value = None return session def setUp(self): self.session = self.create_session_mock() self.instance = self.described_class(self.example_data, self.session) # Proxy the build_url method to the class so it can build the URL and # we can assert things about the call that will be attempted to the # internet self.described_class._build_url = build_url
import mock import github3 import unittest MockedSession = mock.create_autospec(github3.session.GitHubSession) def build_url(self, *args, **kwargs): # We want to assert what is happening with the actual calls to the # Internet. We can proxy this. return github3.session.GitHubSession().build_url(*args, **kwargs) class UnitHelper(unittest.TestCase): # Sub-classes must assign the class to this during definition described_class = None # Sub-classes must also assign a dictionary to this during definition example_data = {} def create_session_mock(self, *args): session = MockedSession() base_attrs = ['headers', 'auth'] attrs = dict( (key, mock.Mock()) for key in set(args).union(base_attrs) ) session.configure_mock(**attrs) session.delete.return_value = None session.get.return_value = None session.patch.return_value = None session.post.return_value = None session.put.return_value = None return session def setUp(self): self.session = self.create_session_mock() self.instance = self.described_class(self.example_data, self.session) # Proxy the build_url method to the class so it can build the URL and # we can assert things about the call that will be attempted to the # internet self.described_class._build_url = build_url Fix the issue where the mock is persisting callsimport mock import github3 import unittest def build_url(self, *args, **kwargs): # We want to assert what is happening with the actual calls to the # Internet. We can proxy this. return github3.session.GitHubSession().build_url(*args, **kwargs) class UnitHelper(unittest.TestCase): # Sub-classes must assign the class to this during definition described_class = None # Sub-classes must also assign a dictionary to this during definition example_data = {} def create_mocked_session(self): MockedSession = mock.create_autospec(github3.session.GitHubSession) return MockedSession() def create_session_mock(self, *args): session = self.create_mocked_session() base_attrs = ['headers', 'auth'] attrs = dict( (key, mock.Mock()) for key in set(args).union(base_attrs) ) session.configure_mock(**attrs) session.delete.return_value = None session.get.return_value = None session.patch.return_value = None session.post.return_value = None session.put.return_value = None return session def setUp(self): self.session = self.create_session_mock() self.instance = self.described_class(self.example_data, self.session) # Proxy the build_url method to the class so it can build the URL and # we can assert things about the call that will be attempted to the # internet self.described_class._build_url = build_url
<commit_before>import mock import github3 import unittest MockedSession = mock.create_autospec(github3.session.GitHubSession) def build_url(self, *args, **kwargs): # We want to assert what is happening with the actual calls to the # Internet. We can proxy this. return github3.session.GitHubSession().build_url(*args, **kwargs) class UnitHelper(unittest.TestCase): # Sub-classes must assign the class to this during definition described_class = None # Sub-classes must also assign a dictionary to this during definition example_data = {} def create_session_mock(self, *args): session = MockedSession() base_attrs = ['headers', 'auth'] attrs = dict( (key, mock.Mock()) for key in set(args).union(base_attrs) ) session.configure_mock(**attrs) session.delete.return_value = None session.get.return_value = None session.patch.return_value = None session.post.return_value = None session.put.return_value = None return session def setUp(self): self.session = self.create_session_mock() self.instance = self.described_class(self.example_data, self.session) # Proxy the build_url method to the class so it can build the URL and # we can assert things about the call that will be attempted to the # internet self.described_class._build_url = build_url <commit_msg>Fix the issue where the mock is persisting calls<commit_after>import mock import github3 import unittest def build_url(self, *args, **kwargs): # We want to assert what is happening with the actual calls to the # Internet. We can proxy this. return github3.session.GitHubSession().build_url(*args, **kwargs) class UnitHelper(unittest.TestCase): # Sub-classes must assign the class to this during definition described_class = None # Sub-classes must also assign a dictionary to this during definition example_data = {} def create_mocked_session(self): MockedSession = mock.create_autospec(github3.session.GitHubSession) return MockedSession() def create_session_mock(self, *args): session = self.create_mocked_session() base_attrs = ['headers', 'auth'] attrs = dict( (key, mock.Mock()) for key in set(args).union(base_attrs) ) session.configure_mock(**attrs) session.delete.return_value = None session.get.return_value = None session.patch.return_value = None session.post.return_value = None session.put.return_value = None return session def setUp(self): self.session = self.create_session_mock() self.instance = self.described_class(self.example_data, self.session) # Proxy the build_url method to the class so it can build the URL and # we can assert things about the call that will be attempted to the # internet self.described_class._build_url = build_url
ededa7c9c616ac97dd6ce8638c6b959a0c51663c
examples/oauth/jupyterhub_config.py
examples/oauth/jupyterhub_config.py
# Configuration file for Jupyter Hub c = get_config() # spawn with Docker c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner' # The docker instances need access to the Hub, so the default loopback port doesn't work: from IPython.utils.localinterfaces import public_ips c.JupyterHub.hub_ip = public_ips()[0] # OAuth with GitHub c.JupyterHub.authenticator_class = 'oauthenticator.GitHubOAuthenticator' c.Authenticator.whitelist = whitelist = set() c.Authenticator.admin_users = admin = set() import os join = os.path.join here = os.path.dirname(__file__) with open(join(here, 'userlist')) as f: for line in f: if not line: continue parts = line.split() name = parts[0] whitelist.add(name) if len(parts) > 1 and parts[1] == 'admin': admin.add(name) c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL'] # ssl config ssl = join(here, 'ssl') keyfile = join(ssl, 'ssl.key') certfile = join(ssl, 'ssl.cert') if os.path.exists(keyfile): c.JupyterHub.ssl_key = keyfile if os.path.exists(certfile): c.JupyterHub.ssl_cert = certfile
# Configuration file for Jupyter Hub c = get_config() # spawn with Docker c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner' # The docker instances need access to the Hub, so the default loopback port doesn't work: from jupyter_client.localinterfaces import public_ips c.JupyterHub.hub_ip = public_ips()[0] # OAuth with GitHub c.JupyterHub.authenticator_class = 'oauthenticator.GitHubOAuthenticator' c.Authenticator.whitelist = whitelist = set() c.Authenticator.admin_users = admin = set() import os join = os.path.join here = os.path.dirname(__file__) with open(join(here, 'userlist')) as f: for line in f: if not line: continue parts = line.split() name = parts[0] whitelist.add(name) if len(parts) > 1 and parts[1] == 'admin': admin.add(name) c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL'] # ssl config ssl = join(here, 'ssl') keyfile = join(ssl, 'ssl.key') certfile = join(ssl, 'ssl.cert') if os.path.exists(keyfile): c.JupyterHub.ssl_key = keyfile if os.path.exists(certfile): c.JupyterHub.ssl_cert = certfile
Replace legacy ipython import with jupyter_client
Replace legacy ipython import with jupyter_client
Python
bsd-3-clause
jhamrick/dockerspawner,jupyter/dockerspawner,quantopian/dockerspawner,Fokko/dockerspawner,Fokko/dockerspawner,minrk/dockerspawner,quantopian/dockerspawner,minrk/dockerspawner,jupyter/dockerspawner,jhamrick/dockerspawner
# Configuration file for Jupyter Hub c = get_config() # spawn with Docker c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner' # The docker instances need access to the Hub, so the default loopback port doesn't work: from IPython.utils.localinterfaces import public_ips c.JupyterHub.hub_ip = public_ips()[0] # OAuth with GitHub c.JupyterHub.authenticator_class = 'oauthenticator.GitHubOAuthenticator' c.Authenticator.whitelist = whitelist = set() c.Authenticator.admin_users = admin = set() import os join = os.path.join here = os.path.dirname(__file__) with open(join(here, 'userlist')) as f: for line in f: if not line: continue parts = line.split() name = parts[0] whitelist.add(name) if len(parts) > 1 and parts[1] == 'admin': admin.add(name) c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL'] # ssl config ssl = join(here, 'ssl') keyfile = join(ssl, 'ssl.key') certfile = join(ssl, 'ssl.cert') if os.path.exists(keyfile): c.JupyterHub.ssl_key = keyfile if os.path.exists(certfile): c.JupyterHub.ssl_cert = certfile Replace legacy ipython import with jupyter_client
# Configuration file for Jupyter Hub c = get_config() # spawn with Docker c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner' # The docker instances need access to the Hub, so the default loopback port doesn't work: from jupyter_client.localinterfaces import public_ips c.JupyterHub.hub_ip = public_ips()[0] # OAuth with GitHub c.JupyterHub.authenticator_class = 'oauthenticator.GitHubOAuthenticator' c.Authenticator.whitelist = whitelist = set() c.Authenticator.admin_users = admin = set() import os join = os.path.join here = os.path.dirname(__file__) with open(join(here, 'userlist')) as f: for line in f: if not line: continue parts = line.split() name = parts[0] whitelist.add(name) if len(parts) > 1 and parts[1] == 'admin': admin.add(name) c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL'] # ssl config ssl = join(here, 'ssl') keyfile = join(ssl, 'ssl.key') certfile = join(ssl, 'ssl.cert') if os.path.exists(keyfile): c.JupyterHub.ssl_key = keyfile if os.path.exists(certfile): c.JupyterHub.ssl_cert = certfile
<commit_before># Configuration file for Jupyter Hub c = get_config() # spawn with Docker c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner' # The docker instances need access to the Hub, so the default loopback port doesn't work: from IPython.utils.localinterfaces import public_ips c.JupyterHub.hub_ip = public_ips()[0] # OAuth with GitHub c.JupyterHub.authenticator_class = 'oauthenticator.GitHubOAuthenticator' c.Authenticator.whitelist = whitelist = set() c.Authenticator.admin_users = admin = set() import os join = os.path.join here = os.path.dirname(__file__) with open(join(here, 'userlist')) as f: for line in f: if not line: continue parts = line.split() name = parts[0] whitelist.add(name) if len(parts) > 1 and parts[1] == 'admin': admin.add(name) c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL'] # ssl config ssl = join(here, 'ssl') keyfile = join(ssl, 'ssl.key') certfile = join(ssl, 'ssl.cert') if os.path.exists(keyfile): c.JupyterHub.ssl_key = keyfile if os.path.exists(certfile): c.JupyterHub.ssl_cert = certfile <commit_msg>Replace legacy ipython import with jupyter_client<commit_after>
# Configuration file for Jupyter Hub c = get_config() # spawn with Docker c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner' # The docker instances need access to the Hub, so the default loopback port doesn't work: from jupyter_client.localinterfaces import public_ips c.JupyterHub.hub_ip = public_ips()[0] # OAuth with GitHub c.JupyterHub.authenticator_class = 'oauthenticator.GitHubOAuthenticator' c.Authenticator.whitelist = whitelist = set() c.Authenticator.admin_users = admin = set() import os join = os.path.join here = os.path.dirname(__file__) with open(join(here, 'userlist')) as f: for line in f: if not line: continue parts = line.split() name = parts[0] whitelist.add(name) if len(parts) > 1 and parts[1] == 'admin': admin.add(name) c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL'] # ssl config ssl = join(here, 'ssl') keyfile = join(ssl, 'ssl.key') certfile = join(ssl, 'ssl.cert') if os.path.exists(keyfile): c.JupyterHub.ssl_key = keyfile if os.path.exists(certfile): c.JupyterHub.ssl_cert = certfile
# Configuration file for Jupyter Hub c = get_config() # spawn with Docker c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner' # The docker instances need access to the Hub, so the default loopback port doesn't work: from IPython.utils.localinterfaces import public_ips c.JupyterHub.hub_ip = public_ips()[0] # OAuth with GitHub c.JupyterHub.authenticator_class = 'oauthenticator.GitHubOAuthenticator' c.Authenticator.whitelist = whitelist = set() c.Authenticator.admin_users = admin = set() import os join = os.path.join here = os.path.dirname(__file__) with open(join(here, 'userlist')) as f: for line in f: if not line: continue parts = line.split() name = parts[0] whitelist.add(name) if len(parts) > 1 and parts[1] == 'admin': admin.add(name) c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL'] # ssl config ssl = join(here, 'ssl') keyfile = join(ssl, 'ssl.key') certfile = join(ssl, 'ssl.cert') if os.path.exists(keyfile): c.JupyterHub.ssl_key = keyfile if os.path.exists(certfile): c.JupyterHub.ssl_cert = certfile Replace legacy ipython import with jupyter_client# Configuration file for Jupyter Hub c = get_config() # spawn with Docker c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner' # The docker instances need access to the Hub, so the default loopback port doesn't work: from jupyter_client.localinterfaces import public_ips c.JupyterHub.hub_ip = public_ips()[0] # OAuth with GitHub c.JupyterHub.authenticator_class = 'oauthenticator.GitHubOAuthenticator' c.Authenticator.whitelist = whitelist = set() c.Authenticator.admin_users = admin = set() import os join = os.path.join here = os.path.dirname(__file__) with open(join(here, 'userlist')) as f: for line in f: if not line: continue parts = line.split() name = parts[0] whitelist.add(name) if len(parts) > 1 and parts[1] == 'admin': admin.add(name) c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL'] # ssl config ssl = join(here, 'ssl') keyfile = join(ssl, 'ssl.key') certfile = join(ssl, 'ssl.cert') if os.path.exists(keyfile): c.JupyterHub.ssl_key = keyfile if os.path.exists(certfile): c.JupyterHub.ssl_cert = certfile
<commit_before># Configuration file for Jupyter Hub c = get_config() # spawn with Docker c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner' # The docker instances need access to the Hub, so the default loopback port doesn't work: from IPython.utils.localinterfaces import public_ips c.JupyterHub.hub_ip = public_ips()[0] # OAuth with GitHub c.JupyterHub.authenticator_class = 'oauthenticator.GitHubOAuthenticator' c.Authenticator.whitelist = whitelist = set() c.Authenticator.admin_users = admin = set() import os join = os.path.join here = os.path.dirname(__file__) with open(join(here, 'userlist')) as f: for line in f: if not line: continue parts = line.split() name = parts[0] whitelist.add(name) if len(parts) > 1 and parts[1] == 'admin': admin.add(name) c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL'] # ssl config ssl = join(here, 'ssl') keyfile = join(ssl, 'ssl.key') certfile = join(ssl, 'ssl.cert') if os.path.exists(keyfile): c.JupyterHub.ssl_key = keyfile if os.path.exists(certfile): c.JupyterHub.ssl_cert = certfile <commit_msg>Replace legacy ipython import with jupyter_client<commit_after># Configuration file for Jupyter Hub c = get_config() # spawn with Docker c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner' # The docker instances need access to the Hub, so the default loopback port doesn't work: from jupyter_client.localinterfaces import public_ips c.JupyterHub.hub_ip = public_ips()[0] # OAuth with GitHub c.JupyterHub.authenticator_class = 'oauthenticator.GitHubOAuthenticator' c.Authenticator.whitelist = whitelist = set() c.Authenticator.admin_users = admin = set() import os join = os.path.join here = os.path.dirname(__file__) with open(join(here, 'userlist')) as f: for line in f: if not line: continue parts = line.split() name = parts[0] whitelist.add(name) if len(parts) > 1 and parts[1] == 'admin': admin.add(name) c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL'] # ssl config ssl = join(here, 'ssl') keyfile = join(ssl, 'ssl.key') certfile = join(ssl, 'ssl.cert') if os.path.exists(keyfile): c.JupyterHub.ssl_key = keyfile if os.path.exists(certfile): c.JupyterHub.ssl_cert = certfile
5831ce15a94d1941e0521bae328f0ede48bfbe8b
juliet_importer.py
juliet_importer.py
import os import imp modules = {} def load_modules(path="./modules/"): # Consider adding recursive searching at some point in the future modules['juliet_module'] = imp.load_source('juliet_module', path + "juliet_module.py") names = os.listdir(path) for name in names: if not name.endswith(".py"): continue print("Importing module {0}".format(name)) try: modules[name.split('.')[0]] = imp.load_source(name.split('.')[0], path + name) except ImportError as e: print("Error importing module {0} from directory {1}".format(name,os.getcwd())) print(e) continue print("Success") load_modules()
import os import imp modules = {} def load_modules(path="./modules/"): try: modules['juliet_module'] = imp.load_source('juliet_module', path + "juliet_module.py") except ImportError as e: print("Error importing module {0} from directory {1}".format(name,os.getcwd())) print(e) for root, dirs, files in os.walk(path): for name in files: if not name.endswith(".py"): continue print("Importing module {0}".format(name)) try: modules[name.split('.')[0]] = imp.load_source(name.split('.')[0], path + name) except ImportError as e: print("Error importing module {0} from directory {1}".format(name,os.getcwd())) print(e) continue print("Success") load_modules()
Add recursive search to import function
Add recursive search to import function
Python
bsd-2-clause
halfbro/juliet
import os import imp modules = {} def load_modules(path="./modules/"): # Consider adding recursive searching at some point in the future modules['juliet_module'] = imp.load_source('juliet_module', path + "juliet_module.py") names = os.listdir(path) for name in names: if not name.endswith(".py"): continue print("Importing module {0}".format(name)) try: modules[name.split('.')[0]] = imp.load_source(name.split('.')[0], path + name) except ImportError as e: print("Error importing module {0} from directory {1}".format(name,os.getcwd())) print(e) continue print("Success") load_modules() Add recursive search to import function
import os import imp modules = {} def load_modules(path="./modules/"): try: modules['juliet_module'] = imp.load_source('juliet_module', path + "juliet_module.py") except ImportError as e: print("Error importing module {0} from directory {1}".format(name,os.getcwd())) print(e) for root, dirs, files in os.walk(path): for name in files: if not name.endswith(".py"): continue print("Importing module {0}".format(name)) try: modules[name.split('.')[0]] = imp.load_source(name.split('.')[0], path + name) except ImportError as e: print("Error importing module {0} from directory {1}".format(name,os.getcwd())) print(e) continue print("Success") load_modules()
<commit_before>import os import imp modules = {} def load_modules(path="./modules/"): # Consider adding recursive searching at some point in the future modules['juliet_module'] = imp.load_source('juliet_module', path + "juliet_module.py") names = os.listdir(path) for name in names: if not name.endswith(".py"): continue print("Importing module {0}".format(name)) try: modules[name.split('.')[0]] = imp.load_source(name.split('.')[0], path + name) except ImportError as e: print("Error importing module {0} from directory {1}".format(name,os.getcwd())) print(e) continue print("Success") load_modules() <commit_msg>Add recursive search to import function<commit_after>
import os import imp modules = {} def load_modules(path="./modules/"): try: modules['juliet_module'] = imp.load_source('juliet_module', path + "juliet_module.py") except ImportError as e: print("Error importing module {0} from directory {1}".format(name,os.getcwd())) print(e) for root, dirs, files in os.walk(path): for name in files: if not name.endswith(".py"): continue print("Importing module {0}".format(name)) try: modules[name.split('.')[0]] = imp.load_source(name.split('.')[0], path + name) except ImportError as e: print("Error importing module {0} from directory {1}".format(name,os.getcwd())) print(e) continue print("Success") load_modules()
import os import imp modules = {} def load_modules(path="./modules/"): # Consider adding recursive searching at some point in the future modules['juliet_module'] = imp.load_source('juliet_module', path + "juliet_module.py") names = os.listdir(path) for name in names: if not name.endswith(".py"): continue print("Importing module {0}".format(name)) try: modules[name.split('.')[0]] = imp.load_source(name.split('.')[0], path + name) except ImportError as e: print("Error importing module {0} from directory {1}".format(name,os.getcwd())) print(e) continue print("Success") load_modules() Add recursive search to import functionimport os import imp modules = {} def load_modules(path="./modules/"): try: modules['juliet_module'] = imp.load_source('juliet_module', path + "juliet_module.py") except ImportError as e: print("Error importing module {0} from directory {1}".format(name,os.getcwd())) print(e) for root, dirs, files in os.walk(path): for name in files: if not name.endswith(".py"): continue print("Importing module {0}".format(name)) try: modules[name.split('.')[0]] = imp.load_source(name.split('.')[0], path + name) except ImportError as e: print("Error importing module {0} from directory {1}".format(name,os.getcwd())) print(e) continue print("Success") load_modules()
<commit_before>import os import imp modules = {} def load_modules(path="./modules/"): # Consider adding recursive searching at some point in the future modules['juliet_module'] = imp.load_source('juliet_module', path + "juliet_module.py") names = os.listdir(path) for name in names: if not name.endswith(".py"): continue print("Importing module {0}".format(name)) try: modules[name.split('.')[0]] = imp.load_source(name.split('.')[0], path + name) except ImportError as e: print("Error importing module {0} from directory {1}".format(name,os.getcwd())) print(e) continue print("Success") load_modules() <commit_msg>Add recursive search to import function<commit_after>import os import imp modules = {} def load_modules(path="./modules/"): try: modules['juliet_module'] = imp.load_source('juliet_module', path + "juliet_module.py") except ImportError as e: print("Error importing module {0} from directory {1}".format(name,os.getcwd())) print(e) for root, dirs, files in os.walk(path): for name in files: if not name.endswith(".py"): continue print("Importing module {0}".format(name)) try: modules[name.split('.')[0]] = imp.load_source(name.split('.')[0], path + name) except ImportError as e: print("Error importing module {0} from directory {1}".format(name,os.getcwd())) print(e) continue print("Success") load_modules()
381e89972bf4d12daae7aa399f1348a215fa85d9
jira/exceptions.py
jira/exceptions.py
import json class JIRAError(Exception): """General error raised for all problems in operation of the client.""" def __init__(self, status_code=None, text=None, url=None): self.status_code = status_code self.text = text self.url = url def __str__(self): if self.text: return 'HTTP {0}: "{1}"\n{2}'.format(self.status_code, self.text, self.url) else: return 'HTTP {0}: {1}'.format(self.status_code, self.url) def raise_on_error(r): if r.status_code >= 400: error = '' if r.text: try: response = json.loads(r.text) if 'message' in response: # JIRA 5.1 errors error = response['message'] elif 'errorMessages' in response: # JIRA 5.0.x error messages sometimes come wrapped in this array errorMessages = response['errorMessages'] if isinstance(errorMessages, (list, tuple)) and len(errorMessages) > 0: error = errorMessages[0] else: error = errorMessages else: error = r.text except ValueError: error = r.text raise JIRAError(r.status_code, error, r.url)
import json class JIRAError(Exception): """General error raised for all problems in operation of the client.""" def __init__(self, status_code=None, text=None, url=None): self.status_code = status_code self.text = text self.url = url def __str__(self): if self.text: return 'HTTP {0}: "{1}"\n{2}'.format(self.status_code, self.text, self.url) else: return 'HTTP {0}: {1}'.format(self.status_code, self.url) def raise_on_error(r): if r.status_code >= 400: error = '' if r.text: try: response = json.loads(r.text) if 'message' in response: # JIRA 5.1 errors error = response['message'] elif 'errorMessages' in response and len(response['errorMessages']) > 0: # JIRA 5.0.x error messages sometimes come wrapped in this array # Sometimes this is present but empty errorMessages = response['errorMessages'] if isinstance(errorMessages, (list, tuple)): error = errorMessages[0] else: error = errorMessages elif 'errors' in response and len(response['errors']) > 0: # JIRA 6.x error messages are found in this array. error = response['errors'] else: error = r.text except ValueError: error = r.text raise JIRAError(r.status_code, error, r.url)
Fix for empty errorMessages, moved length check to main logic for deciding which error message to use and added check for 'errors' in the response.
Fix for empty errorMessages, moved length check to main logic for deciding which error message to use and added check for 'errors' in the response.
Python
bsd-2-clause
pycontribs/jira,jameskeane/jira-python,rayyen/jira,pycontribs/jira,dbaxa/jira,coddingtonbear/jira,milo-minderbinder/jira,systemadev/jira-python,tsarnowski/jira-python,kinow/jira,jameskeane/jira-python,awurster/jira,stevencarey/jira,VikingDen/jira,awurster/jira,kinow/jira,m42e/jira,VikingDen/jira,tsarnowski/jira-python,dbaxa/jira,rayyen/jira,dwmarshall/pycontribs-jira,milo-minderbinder/jira,dwmarshall/pycontribs-jira,m42e/jira,akosiaris/jira,systemadev/jira-python,akosiaris/jira,stevencarey/jira,coddingtonbear/jira
import json class JIRAError(Exception): """General error raised for all problems in operation of the client.""" def __init__(self, status_code=None, text=None, url=None): self.status_code = status_code self.text = text self.url = url def __str__(self): if self.text: return 'HTTP {0}: "{1}"\n{2}'.format(self.status_code, self.text, self.url) else: return 'HTTP {0}: {1}'.format(self.status_code, self.url) def raise_on_error(r): if r.status_code >= 400: error = '' if r.text: try: response = json.loads(r.text) if 'message' in response: # JIRA 5.1 errors error = response['message'] elif 'errorMessages' in response: # JIRA 5.0.x error messages sometimes come wrapped in this array errorMessages = response['errorMessages'] if isinstance(errorMessages, (list, tuple)) and len(errorMessages) > 0: error = errorMessages[0] else: error = errorMessages else: error = r.text except ValueError: error = r.text raise JIRAError(r.status_code, error, r.url) Fix for empty errorMessages, moved length check to main logic for deciding which error message to use and added check for 'errors' in the response.
import json class JIRAError(Exception): """General error raised for all problems in operation of the client.""" def __init__(self, status_code=None, text=None, url=None): self.status_code = status_code self.text = text self.url = url def __str__(self): if self.text: return 'HTTP {0}: "{1}"\n{2}'.format(self.status_code, self.text, self.url) else: return 'HTTP {0}: {1}'.format(self.status_code, self.url) def raise_on_error(r): if r.status_code >= 400: error = '' if r.text: try: response = json.loads(r.text) if 'message' in response: # JIRA 5.1 errors error = response['message'] elif 'errorMessages' in response and len(response['errorMessages']) > 0: # JIRA 5.0.x error messages sometimes come wrapped in this array # Sometimes this is present but empty errorMessages = response['errorMessages'] if isinstance(errorMessages, (list, tuple)): error = errorMessages[0] else: error = errorMessages elif 'errors' in response and len(response['errors']) > 0: # JIRA 6.x error messages are found in this array. error = response['errors'] else: error = r.text except ValueError: error = r.text raise JIRAError(r.status_code, error, r.url)
<commit_before>import json class JIRAError(Exception): """General error raised for all problems in operation of the client.""" def __init__(self, status_code=None, text=None, url=None): self.status_code = status_code self.text = text self.url = url def __str__(self): if self.text: return 'HTTP {0}: "{1}"\n{2}'.format(self.status_code, self.text, self.url) else: return 'HTTP {0}: {1}'.format(self.status_code, self.url) def raise_on_error(r): if r.status_code >= 400: error = '' if r.text: try: response = json.loads(r.text) if 'message' in response: # JIRA 5.1 errors error = response['message'] elif 'errorMessages' in response: # JIRA 5.0.x error messages sometimes come wrapped in this array errorMessages = response['errorMessages'] if isinstance(errorMessages, (list, tuple)) and len(errorMessages) > 0: error = errorMessages[0] else: error = errorMessages else: error = r.text except ValueError: error = r.text raise JIRAError(r.status_code, error, r.url) <commit_msg>Fix for empty errorMessages, moved length check to main logic for deciding which error message to use and added check for 'errors' in the response.<commit_after>
import json class JIRAError(Exception): """General error raised for all problems in operation of the client.""" def __init__(self, status_code=None, text=None, url=None): self.status_code = status_code self.text = text self.url = url def __str__(self): if self.text: return 'HTTP {0}: "{1}"\n{2}'.format(self.status_code, self.text, self.url) else: return 'HTTP {0}: {1}'.format(self.status_code, self.url) def raise_on_error(r): if r.status_code >= 400: error = '' if r.text: try: response = json.loads(r.text) if 'message' in response: # JIRA 5.1 errors error = response['message'] elif 'errorMessages' in response and len(response['errorMessages']) > 0: # JIRA 5.0.x error messages sometimes come wrapped in this array # Sometimes this is present but empty errorMessages = response['errorMessages'] if isinstance(errorMessages, (list, tuple)): error = errorMessages[0] else: error = errorMessages elif 'errors' in response and len(response['errors']) > 0: # JIRA 6.x error messages are found in this array. error = response['errors'] else: error = r.text except ValueError: error = r.text raise JIRAError(r.status_code, error, r.url)
import json class JIRAError(Exception): """General error raised for all problems in operation of the client.""" def __init__(self, status_code=None, text=None, url=None): self.status_code = status_code self.text = text self.url = url def __str__(self): if self.text: return 'HTTP {0}: "{1}"\n{2}'.format(self.status_code, self.text, self.url) else: return 'HTTP {0}: {1}'.format(self.status_code, self.url) def raise_on_error(r): if r.status_code >= 400: error = '' if r.text: try: response = json.loads(r.text) if 'message' in response: # JIRA 5.1 errors error = response['message'] elif 'errorMessages' in response: # JIRA 5.0.x error messages sometimes come wrapped in this array errorMessages = response['errorMessages'] if isinstance(errorMessages, (list, tuple)) and len(errorMessages) > 0: error = errorMessages[0] else: error = errorMessages else: error = r.text except ValueError: error = r.text raise JIRAError(r.status_code, error, r.url) Fix for empty errorMessages, moved length check to main logic for deciding which error message to use and added check for 'errors' in the response.import json class JIRAError(Exception): """General error raised for all problems in operation of the client.""" def __init__(self, status_code=None, text=None, url=None): self.status_code = status_code self.text = text self.url = url def __str__(self): if self.text: return 'HTTP {0}: "{1}"\n{2}'.format(self.status_code, self.text, self.url) else: return 'HTTP {0}: {1}'.format(self.status_code, self.url) def raise_on_error(r): if r.status_code >= 400: error = '' if r.text: try: response = json.loads(r.text) if 'message' in response: # JIRA 5.1 errors error = response['message'] elif 'errorMessages' in response and len(response['errorMessages']) > 0: # JIRA 5.0.x error messages sometimes come wrapped in this array # Sometimes this is present but empty errorMessages = response['errorMessages'] if isinstance(errorMessages, (list, tuple)): error = errorMessages[0] else: error = errorMessages elif 'errors' in response and len(response['errors']) > 0: # JIRA 6.x error messages are found in this array. error = response['errors'] else: error = r.text except ValueError: error = r.text raise JIRAError(r.status_code, error, r.url)
<commit_before>import json class JIRAError(Exception): """General error raised for all problems in operation of the client.""" def __init__(self, status_code=None, text=None, url=None): self.status_code = status_code self.text = text self.url = url def __str__(self): if self.text: return 'HTTP {0}: "{1}"\n{2}'.format(self.status_code, self.text, self.url) else: return 'HTTP {0}: {1}'.format(self.status_code, self.url) def raise_on_error(r): if r.status_code >= 400: error = '' if r.text: try: response = json.loads(r.text) if 'message' in response: # JIRA 5.1 errors error = response['message'] elif 'errorMessages' in response: # JIRA 5.0.x error messages sometimes come wrapped in this array errorMessages = response['errorMessages'] if isinstance(errorMessages, (list, tuple)) and len(errorMessages) > 0: error = errorMessages[0] else: error = errorMessages else: error = r.text except ValueError: error = r.text raise JIRAError(r.status_code, error, r.url) <commit_msg>Fix for empty errorMessages, moved length check to main logic for deciding which error message to use and added check for 'errors' in the response.<commit_after>import json class JIRAError(Exception): """General error raised for all problems in operation of the client.""" def __init__(self, status_code=None, text=None, url=None): self.status_code = status_code self.text = text self.url = url def __str__(self): if self.text: return 'HTTP {0}: "{1}"\n{2}'.format(self.status_code, self.text, self.url) else: return 'HTTP {0}: {1}'.format(self.status_code, self.url) def raise_on_error(r): if r.status_code >= 400: error = '' if r.text: try: response = json.loads(r.text) if 'message' in response: # JIRA 5.1 errors error = response['message'] elif 'errorMessages' in response and len(response['errorMessages']) > 0: # JIRA 5.0.x error messages sometimes come wrapped in this array # Sometimes this is present but empty errorMessages = response['errorMessages'] if isinstance(errorMessages, (list, tuple)): error = errorMessages[0] else: error = errorMessages elif 'errors' in response and len(response['errors']) > 0: # JIRA 6.x error messages are found in this array. error = response['errors'] else: error = r.text except ValueError: error = r.text raise JIRAError(r.status_code, error, r.url)
9bdd2cbb545c56c660be9933a06f7eea2f9ad059
shallow_appify/_version.py
shallow_appify/_version.py
# -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals __version_info__ = (0, 4, 1) __version__ = '.'.join(map(str, __version_info__))
# -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals __version_info__ = (0, 4, 2) __version__ = '.'.join(map(str, __version_info__))
Increase the version number to `0.4.2`
Increase the version number to `0.4.2`
Python
mit
IngoHeimbach/shallow-appify
# -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals __version_info__ = (0, 4, 1) __version__ = '.'.join(map(str, __version_info__)) Increase the version number to `0.4.2`
# -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals __version_info__ = (0, 4, 2) __version__ = '.'.join(map(str, __version_info__))
<commit_before># -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals __version_info__ = (0, 4, 1) __version__ = '.'.join(map(str, __version_info__)) <commit_msg>Increase the version number to `0.4.2`<commit_after>
# -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals __version_info__ = (0, 4, 2) __version__ = '.'.join(map(str, __version_info__))
# -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals __version_info__ = (0, 4, 1) __version__ = '.'.join(map(str, __version_info__)) Increase the version number to `0.4.2`# -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals __version_info__ = (0, 4, 2) __version__ = '.'.join(map(str, __version_info__))
<commit_before># -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals __version_info__ = (0, 4, 1) __version__ = '.'.join(map(str, __version_info__)) <commit_msg>Increase the version number to `0.4.2`<commit_after># -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals __version_info__ = (0, 4, 2) __version__ = '.'.join(map(str, __version_info__))
2050385a5f5fdcffe333ae17463d6469af0b5cd8
mopidy/__init__.py
mopidy/__init__.py
from __future__ import unicode_literals import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( 'Mopidy requires Python >= 2.7, < 3, but found %s' % '.'.join(map(str, sys.version_info[:3]))) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( 'Mopidy requires Pykka >= 1.1, < 2, but found %s' % pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4'
from __future__ import unicode_literals import platform import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( 'ERROR: Mopidy requires Python 2.7, but found %s.' % platform.python_version()) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( 'ERROR: Mopidy requires Pykka >= 1.1, < 2, but found %s.' % pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4'
Update Python and Pykka version check error messages
Update Python and Pykka version check error messages
Python
apache-2.0
jmarsik/mopidy,adamcik/mopidy,priestd09/mopidy,woutervanwijk/mopidy,glogiotatidis/mopidy,tkem/mopidy,bencevans/mopidy,hkariti/mopidy,jcass77/mopidy,pacificIT/mopidy,vrs01/mopidy,ali/mopidy,bencevans/mopidy,mokieyue/mopidy,rawdlite/mopidy,swak/mopidy,tkem/mopidy,rawdlite/mopidy,jcass77/mopidy,woutervanwijk/mopidy,swak/mopidy,swak/mopidy,SuperStarPL/mopidy,SuperStarPL/mopidy,dbrgn/mopidy,mopidy/mopidy,bencevans/mopidy,jcass77/mopidy,glogiotatidis/mopidy,ZenithDK/mopidy,mokieyue/mopidy,SuperStarPL/mopidy,jodal/mopidy,mopidy/mopidy,ali/mopidy,tkem/mopidy,pacificIT/mopidy,quartz55/mopidy,dbrgn/mopidy,ali/mopidy,rawdlite/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,rawdlite/mopidy,priestd09/mopidy,jodal/mopidy,priestd09/mopidy,dbrgn/mopidy,hkariti/mopidy,jmarsik/mopidy,mopidy/mopidy,ZenithDK/mopidy,jmarsik/mopidy,dbrgn/mopidy,quartz55/mopidy,SuperStarPL/mopidy,kingosticks/mopidy,bacontext/mopidy,hkariti/mopidy,kingosticks/mopidy,tkem/mopidy,jodal/mopidy,jmarsik/mopidy,diandiankan/mopidy,diandiankan/mopidy,ZenithDK/mopidy,vrs01/mopidy,ZenithDK/mopidy,vrs01/mopidy,bacontext/mopidy,ali/mopidy,bencevans/mopidy,hkariti/mopidy,bacontext/mopidy,swak/mopidy,quartz55/mopidy,mokieyue/mopidy,diandiankan/mopidy,adamcik/mopidy,glogiotatidis/mopidy,kingosticks/mopidy,adamcik/mopidy,quartz55/mopidy,bacontext/mopidy,vrs01/mopidy,mokieyue/mopidy,diandiankan/mopidy
from __future__ import unicode_literals import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( 'Mopidy requires Python >= 2.7, < 3, but found %s' % '.'.join(map(str, sys.version_info[:3]))) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( 'Mopidy requires Pykka >= 1.1, < 2, but found %s' % pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4' Update Python and Pykka version check error messages
from __future__ import unicode_literals import platform import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( 'ERROR: Mopidy requires Python 2.7, but found %s.' % platform.python_version()) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( 'ERROR: Mopidy requires Pykka >= 1.1, < 2, but found %s.' % pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4'
<commit_before>from __future__ import unicode_literals import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( 'Mopidy requires Python >= 2.7, < 3, but found %s' % '.'.join(map(str, sys.version_info[:3]))) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( 'Mopidy requires Pykka >= 1.1, < 2, but found %s' % pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4' <commit_msg>Update Python and Pykka version check error messages<commit_after>
from __future__ import unicode_literals import platform import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( 'ERROR: Mopidy requires Python 2.7, but found %s.' % platform.python_version()) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( 'ERROR: Mopidy requires Pykka >= 1.1, < 2, but found %s.' % pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4'
from __future__ import unicode_literals import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( 'Mopidy requires Python >= 2.7, < 3, but found %s' % '.'.join(map(str, sys.version_info[:3]))) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( 'Mopidy requires Pykka >= 1.1, < 2, but found %s' % pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4' Update Python and Pykka version check error messagesfrom __future__ import unicode_literals import platform import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( 'ERROR: Mopidy requires Python 2.7, but found %s.' % platform.python_version()) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( 'ERROR: Mopidy requires Pykka >= 1.1, < 2, but found %s.' % pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4'
<commit_before>from __future__ import unicode_literals import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( 'Mopidy requires Python >= 2.7, < 3, but found %s' % '.'.join(map(str, sys.version_info[:3]))) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( 'Mopidy requires Pykka >= 1.1, < 2, but found %s' % pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4' <commit_msg>Update Python and Pykka version check error messages<commit_after>from __future__ import unicode_literals import platform import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( 'ERROR: Mopidy requires Python 2.7, but found %s.' % platform.python_version()) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( 'ERROR: Mopidy requires Pykka >= 1.1, < 2, but found %s.' % pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4'
780f7f2b9546bc7f1c87ad3744559de3287fee21
src/streamlink/plugins/europaplus.py
src/streamlink/plugins/europaplus.py
from __future__ import print_function import re from streamlink.plugin import Plugin from streamlink.plugin.api import http from streamlink.plugin.api.utils import itertags from streamlink.stream import HLSStream class EuropaPlusTV(Plugin): url_re = re.compile(r"https?://(?:www\.)?europaplus\.ru/europaplustv") src_re = re.compile(r"""['"]file['"]\s*:\s*(?P<quote>['"])(?P<url>.*?)(?P=quote)""") @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _get_streams(self): res = http.get(self.url) for iframe in itertags(res.text, "iframe"): self.logger.debug("Found iframe: {0}".format(iframe)) iframe_res = http.get(iframe.attributes['src'], headers={"Referer": self.url}) m = self.src_re.search(iframe_res.text) surl = m and m.group("url") if surl: self.logger.debug("Found stream URL: {0}".format(surl)) return HLSStream.parse_variant_playlist(self.session, surl) __plugin__ = EuropaPlusTV
from __future__ import print_function import re from streamlink.plugin import Plugin from streamlink.plugin.api import http from streamlink.plugin.api.utils import itertags from streamlink.stream import HLSStream from streamlink.utils import update_scheme class EuropaPlusTV(Plugin): url_re = re.compile(r"https?://(?:www\.)?europaplus\.ru/europaplustv") src_re = re.compile(r"""['"]file['"]\s*:\s*(?P<quote>['"])(?P<url>.*?)(?P=quote)""") @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _get_streams(self): res = http.get(self.url) for iframe in itertags(res.text, "iframe"): self.logger.debug("Found iframe: {0}".format(iframe)) iframe_res = http.get(iframe.attributes['src'], headers={"Referer": self.url}) m = self.src_re.search(iframe_res.text) surl = m and m.group("url") if surl: surl = update_scheme(self.url, surl) self.logger.debug("Found stream URL: {0}".format(surl)) return HLSStream.parse_variant_playlist(self.session, surl) __plugin__ = EuropaPlusTV
Fix for "No connection adapters were found"
plugins.EuropaPlusTV: Fix for "No connection adapters were found"
Python
bsd-2-clause
back-to/streamlink,streamlink/streamlink,gravyboat/streamlink,melmorabity/streamlink,back-to/streamlink,chhe/streamlink,bastimeyer/streamlink,beardypig/streamlink,wlerin/streamlink,wlerin/streamlink,streamlink/streamlink,chhe/streamlink,beardypig/streamlink,bastimeyer/streamlink,melmorabity/streamlink,gravyboat/streamlink
from __future__ import print_function import re from streamlink.plugin import Plugin from streamlink.plugin.api import http from streamlink.plugin.api.utils import itertags from streamlink.stream import HLSStream class EuropaPlusTV(Plugin): url_re = re.compile(r"https?://(?:www\.)?europaplus\.ru/europaplustv") src_re = re.compile(r"""['"]file['"]\s*:\s*(?P<quote>['"])(?P<url>.*?)(?P=quote)""") @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _get_streams(self): res = http.get(self.url) for iframe in itertags(res.text, "iframe"): self.logger.debug("Found iframe: {0}".format(iframe)) iframe_res = http.get(iframe.attributes['src'], headers={"Referer": self.url}) m = self.src_re.search(iframe_res.text) surl = m and m.group("url") if surl: self.logger.debug("Found stream URL: {0}".format(surl)) return HLSStream.parse_variant_playlist(self.session, surl) __plugin__ = EuropaPlusTV plugins.EuropaPlusTV: Fix for "No connection adapters were found"
from __future__ import print_function import re from streamlink.plugin import Plugin from streamlink.plugin.api import http from streamlink.plugin.api.utils import itertags from streamlink.stream import HLSStream from streamlink.utils import update_scheme class EuropaPlusTV(Plugin): url_re = re.compile(r"https?://(?:www\.)?europaplus\.ru/europaplustv") src_re = re.compile(r"""['"]file['"]\s*:\s*(?P<quote>['"])(?P<url>.*?)(?P=quote)""") @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _get_streams(self): res = http.get(self.url) for iframe in itertags(res.text, "iframe"): self.logger.debug("Found iframe: {0}".format(iframe)) iframe_res = http.get(iframe.attributes['src'], headers={"Referer": self.url}) m = self.src_re.search(iframe_res.text) surl = m and m.group("url") if surl: surl = update_scheme(self.url, surl) self.logger.debug("Found stream URL: {0}".format(surl)) return HLSStream.parse_variant_playlist(self.session, surl) __plugin__ = EuropaPlusTV
<commit_before>from __future__ import print_function import re from streamlink.plugin import Plugin from streamlink.plugin.api import http from streamlink.plugin.api.utils import itertags from streamlink.stream import HLSStream class EuropaPlusTV(Plugin): url_re = re.compile(r"https?://(?:www\.)?europaplus\.ru/europaplustv") src_re = re.compile(r"""['"]file['"]\s*:\s*(?P<quote>['"])(?P<url>.*?)(?P=quote)""") @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _get_streams(self): res = http.get(self.url) for iframe in itertags(res.text, "iframe"): self.logger.debug("Found iframe: {0}".format(iframe)) iframe_res = http.get(iframe.attributes['src'], headers={"Referer": self.url}) m = self.src_re.search(iframe_res.text) surl = m and m.group("url") if surl: self.logger.debug("Found stream URL: {0}".format(surl)) return HLSStream.parse_variant_playlist(self.session, surl) __plugin__ = EuropaPlusTV <commit_msg>plugins.EuropaPlusTV: Fix for "No connection adapters were found"<commit_after>
from __future__ import print_function import re from streamlink.plugin import Plugin from streamlink.plugin.api import http from streamlink.plugin.api.utils import itertags from streamlink.stream import HLSStream from streamlink.utils import update_scheme class EuropaPlusTV(Plugin): url_re = re.compile(r"https?://(?:www\.)?europaplus\.ru/europaplustv") src_re = re.compile(r"""['"]file['"]\s*:\s*(?P<quote>['"])(?P<url>.*?)(?P=quote)""") @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _get_streams(self): res = http.get(self.url) for iframe in itertags(res.text, "iframe"): self.logger.debug("Found iframe: {0}".format(iframe)) iframe_res = http.get(iframe.attributes['src'], headers={"Referer": self.url}) m = self.src_re.search(iframe_res.text) surl = m and m.group("url") if surl: surl = update_scheme(self.url, surl) self.logger.debug("Found stream URL: {0}".format(surl)) return HLSStream.parse_variant_playlist(self.session, surl) __plugin__ = EuropaPlusTV
from __future__ import print_function import re from streamlink.plugin import Plugin from streamlink.plugin.api import http from streamlink.plugin.api.utils import itertags from streamlink.stream import HLSStream class EuropaPlusTV(Plugin): url_re = re.compile(r"https?://(?:www\.)?europaplus\.ru/europaplustv") src_re = re.compile(r"""['"]file['"]\s*:\s*(?P<quote>['"])(?P<url>.*?)(?P=quote)""") @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _get_streams(self): res = http.get(self.url) for iframe in itertags(res.text, "iframe"): self.logger.debug("Found iframe: {0}".format(iframe)) iframe_res = http.get(iframe.attributes['src'], headers={"Referer": self.url}) m = self.src_re.search(iframe_res.text) surl = m and m.group("url") if surl: self.logger.debug("Found stream URL: {0}".format(surl)) return HLSStream.parse_variant_playlist(self.session, surl) __plugin__ = EuropaPlusTV plugins.EuropaPlusTV: Fix for "No connection adapters were found"from __future__ import print_function import re from streamlink.plugin import Plugin from streamlink.plugin.api import http from streamlink.plugin.api.utils import itertags from streamlink.stream import HLSStream from streamlink.utils import update_scheme class EuropaPlusTV(Plugin): url_re = re.compile(r"https?://(?:www\.)?europaplus\.ru/europaplustv") src_re = re.compile(r"""['"]file['"]\s*:\s*(?P<quote>['"])(?P<url>.*?)(?P=quote)""") @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _get_streams(self): res = http.get(self.url) for iframe in itertags(res.text, "iframe"): self.logger.debug("Found iframe: {0}".format(iframe)) iframe_res = http.get(iframe.attributes['src'], headers={"Referer": self.url}) m = self.src_re.search(iframe_res.text) surl = m and m.group("url") if surl: surl = update_scheme(self.url, surl) self.logger.debug("Found stream URL: {0}".format(surl)) return HLSStream.parse_variant_playlist(self.session, surl) __plugin__ = EuropaPlusTV
<commit_before>from __future__ import print_function import re from streamlink.plugin import Plugin from streamlink.plugin.api import http from streamlink.plugin.api.utils import itertags from streamlink.stream import HLSStream class EuropaPlusTV(Plugin): url_re = re.compile(r"https?://(?:www\.)?europaplus\.ru/europaplustv") src_re = re.compile(r"""['"]file['"]\s*:\s*(?P<quote>['"])(?P<url>.*?)(?P=quote)""") @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _get_streams(self): res = http.get(self.url) for iframe in itertags(res.text, "iframe"): self.logger.debug("Found iframe: {0}".format(iframe)) iframe_res = http.get(iframe.attributes['src'], headers={"Referer": self.url}) m = self.src_re.search(iframe_res.text) surl = m and m.group("url") if surl: self.logger.debug("Found stream URL: {0}".format(surl)) return HLSStream.parse_variant_playlist(self.session, surl) __plugin__ = EuropaPlusTV <commit_msg>plugins.EuropaPlusTV: Fix for "No connection adapters were found"<commit_after>from __future__ import print_function import re from streamlink.plugin import Plugin from streamlink.plugin.api import http from streamlink.plugin.api.utils import itertags from streamlink.stream import HLSStream from streamlink.utils import update_scheme class EuropaPlusTV(Plugin): url_re = re.compile(r"https?://(?:www\.)?europaplus\.ru/europaplustv") src_re = re.compile(r"""['"]file['"]\s*:\s*(?P<quote>['"])(?P<url>.*?)(?P=quote)""") @classmethod def can_handle_url(cls, url): return cls.url_re.match(url) is not None def _get_streams(self): res = http.get(self.url) for iframe in itertags(res.text, "iframe"): self.logger.debug("Found iframe: {0}".format(iframe)) iframe_res = http.get(iframe.attributes['src'], headers={"Referer": self.url}) m = self.src_re.search(iframe_res.text) surl = m and m.group("url") if surl: surl = update_scheme(self.url, surl) self.logger.debug("Found stream URL: {0}".format(surl)) return HLSStream.parse_variant_playlist(self.session, surl) __plugin__ = EuropaPlusTV
a10c02e6bac0ff87576e359316901d576bed8d9d
rest_framework_simplejwt/settings.py
rest_framework_simplejwt/settings.py
from __future__ import unicode_literals from datetime import timedelta from django.conf import settings from rest_framework.settings import APISettings USER_SETTINGS = getattr(settings, 'SIMPLE_JWT', None) DEFAULTS = { 'AUTH_HEADER_TYPE': 'Bearer', 'USER_ID_FIELD': 'id', 'USER_ID_CLAIM': 'user_id', 'TOKEN_TYPE_CLAIM': 'token_type', 'SLIDING_REFRESH_EXP_CLAIM': 'refresh_exp', 'SLIDING_TOKEN_LIFETIME': timedelta(days=1), 'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=3), 'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5), 'REFRESH_TOKEN_LIFETIME': timedelta(days=1), 'SECRET_KEY': settings.SECRET_KEY, # Undocumented settings. Changing these may lead to unexpected behavior. # Make sure you know what you're doing. These might become part of the # public API eventually but that would require some adjustments. 'AUTH_TOKEN_CLASS': 'rest_framework_simplejwt.tokens.SlidingToken', 'TOKEN_BACKEND_CLASS': 'rest_framework_simplejwt.backends.PythonJOSEBackend', 'ALGORITHM': 'HS256', } IMPORT_STRING_SETTINGS = ( 'AUTH_TOKEN_CLASS', 'TOKEN_BACKEND_CLASS', ) api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRING_SETTINGS)
from __future__ import unicode_literals from datetime import timedelta from django.conf import settings from rest_framework.settings import APISettings USER_SETTINGS = getattr(settings, 'SIMPLE_JWT', None) DEFAULTS = { 'AUTH_HEADER_TYPE': 'Bearer', 'USER_ID_FIELD': 'id', 'USER_ID_CLAIM': 'user_id', 'TOKEN_TYPE_CLAIM': 'token_type', 'SLIDING_REFRESH_EXP_CLAIM': 'refresh_exp', 'SLIDING_TOKEN_LIFETIME': timedelta(days=1), 'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=3), 'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5), 'REFRESH_TOKEN_LIFETIME': timedelta(days=1), 'SECRET_KEY': settings.SECRET_KEY, # Undocumented settings. Changing these may lead to unexpected behavior. # Make sure you know what you're doing. These might become part of the # public API eventually but that would require some adjustments. 'AUTH_TOKEN_CLASS': 'rest_framework_simplejwt.tokens.AccessToken', 'TOKEN_BACKEND_CLASS': 'rest_framework_simplejwt.backends.PythonJOSEBackend', 'ALGORITHM': 'HS256', } IMPORT_STRING_SETTINGS = ( 'AUTH_TOKEN_CLASS', 'TOKEN_BACKEND_CLASS', ) api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRING_SETTINGS)
Update AUTH_TOKEN_CLASS setting default value
Update AUTH_TOKEN_CLASS setting default value
Python
mit
davesque/django-rest-framework-simplejwt,davesque/django-rest-framework-simplejwt
from __future__ import unicode_literals from datetime import timedelta from django.conf import settings from rest_framework.settings import APISettings USER_SETTINGS = getattr(settings, 'SIMPLE_JWT', None) DEFAULTS = { 'AUTH_HEADER_TYPE': 'Bearer', 'USER_ID_FIELD': 'id', 'USER_ID_CLAIM': 'user_id', 'TOKEN_TYPE_CLAIM': 'token_type', 'SLIDING_REFRESH_EXP_CLAIM': 'refresh_exp', 'SLIDING_TOKEN_LIFETIME': timedelta(days=1), 'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=3), 'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5), 'REFRESH_TOKEN_LIFETIME': timedelta(days=1), 'SECRET_KEY': settings.SECRET_KEY, # Undocumented settings. Changing these may lead to unexpected behavior. # Make sure you know what you're doing. These might become part of the # public API eventually but that would require some adjustments. 'AUTH_TOKEN_CLASS': 'rest_framework_simplejwt.tokens.SlidingToken', 'TOKEN_BACKEND_CLASS': 'rest_framework_simplejwt.backends.PythonJOSEBackend', 'ALGORITHM': 'HS256', } IMPORT_STRING_SETTINGS = ( 'AUTH_TOKEN_CLASS', 'TOKEN_BACKEND_CLASS', ) api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRING_SETTINGS) Update AUTH_TOKEN_CLASS setting default value
from __future__ import unicode_literals from datetime import timedelta from django.conf import settings from rest_framework.settings import APISettings USER_SETTINGS = getattr(settings, 'SIMPLE_JWT', None) DEFAULTS = { 'AUTH_HEADER_TYPE': 'Bearer', 'USER_ID_FIELD': 'id', 'USER_ID_CLAIM': 'user_id', 'TOKEN_TYPE_CLAIM': 'token_type', 'SLIDING_REFRESH_EXP_CLAIM': 'refresh_exp', 'SLIDING_TOKEN_LIFETIME': timedelta(days=1), 'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=3), 'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5), 'REFRESH_TOKEN_LIFETIME': timedelta(days=1), 'SECRET_KEY': settings.SECRET_KEY, # Undocumented settings. Changing these may lead to unexpected behavior. # Make sure you know what you're doing. These might become part of the # public API eventually but that would require some adjustments. 'AUTH_TOKEN_CLASS': 'rest_framework_simplejwt.tokens.AccessToken', 'TOKEN_BACKEND_CLASS': 'rest_framework_simplejwt.backends.PythonJOSEBackend', 'ALGORITHM': 'HS256', } IMPORT_STRING_SETTINGS = ( 'AUTH_TOKEN_CLASS', 'TOKEN_BACKEND_CLASS', ) api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRING_SETTINGS)
<commit_before>from __future__ import unicode_literals from datetime import timedelta from django.conf import settings from rest_framework.settings import APISettings USER_SETTINGS = getattr(settings, 'SIMPLE_JWT', None) DEFAULTS = { 'AUTH_HEADER_TYPE': 'Bearer', 'USER_ID_FIELD': 'id', 'USER_ID_CLAIM': 'user_id', 'TOKEN_TYPE_CLAIM': 'token_type', 'SLIDING_REFRESH_EXP_CLAIM': 'refresh_exp', 'SLIDING_TOKEN_LIFETIME': timedelta(days=1), 'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=3), 'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5), 'REFRESH_TOKEN_LIFETIME': timedelta(days=1), 'SECRET_KEY': settings.SECRET_KEY, # Undocumented settings. Changing these may lead to unexpected behavior. # Make sure you know what you're doing. These might become part of the # public API eventually but that would require some adjustments. 'AUTH_TOKEN_CLASS': 'rest_framework_simplejwt.tokens.SlidingToken', 'TOKEN_BACKEND_CLASS': 'rest_framework_simplejwt.backends.PythonJOSEBackend', 'ALGORITHM': 'HS256', } IMPORT_STRING_SETTINGS = ( 'AUTH_TOKEN_CLASS', 'TOKEN_BACKEND_CLASS', ) api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRING_SETTINGS) <commit_msg>Update AUTH_TOKEN_CLASS setting default value<commit_after>
from __future__ import unicode_literals from datetime import timedelta from django.conf import settings from rest_framework.settings import APISettings USER_SETTINGS = getattr(settings, 'SIMPLE_JWT', None) DEFAULTS = { 'AUTH_HEADER_TYPE': 'Bearer', 'USER_ID_FIELD': 'id', 'USER_ID_CLAIM': 'user_id', 'TOKEN_TYPE_CLAIM': 'token_type', 'SLIDING_REFRESH_EXP_CLAIM': 'refresh_exp', 'SLIDING_TOKEN_LIFETIME': timedelta(days=1), 'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=3), 'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5), 'REFRESH_TOKEN_LIFETIME': timedelta(days=1), 'SECRET_KEY': settings.SECRET_KEY, # Undocumented settings. Changing these may lead to unexpected behavior. # Make sure you know what you're doing. These might become part of the # public API eventually but that would require some adjustments. 'AUTH_TOKEN_CLASS': 'rest_framework_simplejwt.tokens.AccessToken', 'TOKEN_BACKEND_CLASS': 'rest_framework_simplejwt.backends.PythonJOSEBackend', 'ALGORITHM': 'HS256', } IMPORT_STRING_SETTINGS = ( 'AUTH_TOKEN_CLASS', 'TOKEN_BACKEND_CLASS', ) api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRING_SETTINGS)
from __future__ import unicode_literals from datetime import timedelta from django.conf import settings from rest_framework.settings import APISettings USER_SETTINGS = getattr(settings, 'SIMPLE_JWT', None) DEFAULTS = { 'AUTH_HEADER_TYPE': 'Bearer', 'USER_ID_FIELD': 'id', 'USER_ID_CLAIM': 'user_id', 'TOKEN_TYPE_CLAIM': 'token_type', 'SLIDING_REFRESH_EXP_CLAIM': 'refresh_exp', 'SLIDING_TOKEN_LIFETIME': timedelta(days=1), 'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=3), 'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5), 'REFRESH_TOKEN_LIFETIME': timedelta(days=1), 'SECRET_KEY': settings.SECRET_KEY, # Undocumented settings. Changing these may lead to unexpected behavior. # Make sure you know what you're doing. These might become part of the # public API eventually but that would require some adjustments. 'AUTH_TOKEN_CLASS': 'rest_framework_simplejwt.tokens.SlidingToken', 'TOKEN_BACKEND_CLASS': 'rest_framework_simplejwt.backends.PythonJOSEBackend', 'ALGORITHM': 'HS256', } IMPORT_STRING_SETTINGS = ( 'AUTH_TOKEN_CLASS', 'TOKEN_BACKEND_CLASS', ) api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRING_SETTINGS) Update AUTH_TOKEN_CLASS setting default valuefrom __future__ import unicode_literals from datetime import timedelta from django.conf import settings from rest_framework.settings import APISettings USER_SETTINGS = getattr(settings, 'SIMPLE_JWT', None) DEFAULTS = { 'AUTH_HEADER_TYPE': 'Bearer', 'USER_ID_FIELD': 'id', 'USER_ID_CLAIM': 'user_id', 'TOKEN_TYPE_CLAIM': 'token_type', 'SLIDING_REFRESH_EXP_CLAIM': 'refresh_exp', 'SLIDING_TOKEN_LIFETIME': timedelta(days=1), 'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=3), 'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5), 'REFRESH_TOKEN_LIFETIME': timedelta(days=1), 'SECRET_KEY': settings.SECRET_KEY, # Undocumented settings. Changing these may lead to unexpected behavior. # Make sure you know what you're doing. These might become part of the # public API eventually but that would require some adjustments. 'AUTH_TOKEN_CLASS': 'rest_framework_simplejwt.tokens.AccessToken', 'TOKEN_BACKEND_CLASS': 'rest_framework_simplejwt.backends.PythonJOSEBackend', 'ALGORITHM': 'HS256', } IMPORT_STRING_SETTINGS = ( 'AUTH_TOKEN_CLASS', 'TOKEN_BACKEND_CLASS', ) api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRING_SETTINGS)
<commit_before>from __future__ import unicode_literals from datetime import timedelta from django.conf import settings from rest_framework.settings import APISettings USER_SETTINGS = getattr(settings, 'SIMPLE_JWT', None) DEFAULTS = { 'AUTH_HEADER_TYPE': 'Bearer', 'USER_ID_FIELD': 'id', 'USER_ID_CLAIM': 'user_id', 'TOKEN_TYPE_CLAIM': 'token_type', 'SLIDING_REFRESH_EXP_CLAIM': 'refresh_exp', 'SLIDING_TOKEN_LIFETIME': timedelta(days=1), 'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=3), 'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5), 'REFRESH_TOKEN_LIFETIME': timedelta(days=1), 'SECRET_KEY': settings.SECRET_KEY, # Undocumented settings. Changing these may lead to unexpected behavior. # Make sure you know what you're doing. These might become part of the # public API eventually but that would require some adjustments. 'AUTH_TOKEN_CLASS': 'rest_framework_simplejwt.tokens.SlidingToken', 'TOKEN_BACKEND_CLASS': 'rest_framework_simplejwt.backends.PythonJOSEBackend', 'ALGORITHM': 'HS256', } IMPORT_STRING_SETTINGS = ( 'AUTH_TOKEN_CLASS', 'TOKEN_BACKEND_CLASS', ) api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRING_SETTINGS) <commit_msg>Update AUTH_TOKEN_CLASS setting default value<commit_after>from __future__ import unicode_literals from datetime import timedelta from django.conf import settings from rest_framework.settings import APISettings USER_SETTINGS = getattr(settings, 'SIMPLE_JWT', None) DEFAULTS = { 'AUTH_HEADER_TYPE': 'Bearer', 'USER_ID_FIELD': 'id', 'USER_ID_CLAIM': 'user_id', 'TOKEN_TYPE_CLAIM': 'token_type', 'SLIDING_REFRESH_EXP_CLAIM': 'refresh_exp', 'SLIDING_TOKEN_LIFETIME': timedelta(days=1), 'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=3), 'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5), 'REFRESH_TOKEN_LIFETIME': timedelta(days=1), 'SECRET_KEY': settings.SECRET_KEY, # Undocumented settings. Changing these may lead to unexpected behavior. # Make sure you know what you're doing. These might become part of the # public API eventually but that would require some adjustments. 'AUTH_TOKEN_CLASS': 'rest_framework_simplejwt.tokens.AccessToken', 'TOKEN_BACKEND_CLASS': 'rest_framework_simplejwt.backends.PythonJOSEBackend', 'ALGORITHM': 'HS256', } IMPORT_STRING_SETTINGS = ( 'AUTH_TOKEN_CLASS', 'TOKEN_BACKEND_CLASS', ) api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRING_SETTINGS)
c953a1435b6f20b337ee1d7410d29868d17bc6d9
pdf_generator/pdf_generator.py
pdf_generator/pdf_generator.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import collections from reportlab.platypus import ( PageBreak, FrameBreak, NextPageTemplate, ) class Story(collections.MutableSequence): def __init__(self, template): self._template = template self._story = list() self._index = 0 def next_page(self): self._story.append(PageBreak()) def next_frame(self): self._story.append(FrameBreak()) def next_template(self): self._index += 1 self._story.append(NextPageTemplate(self._index)) def __len__(self): return len(self._story) def __iter__(self): return iter(self._story) def __getitem__(self, index): return self._story[index] def insert(self, index, value): return self._story.insert(index, value) def __setitem__(self, index, value): self._story[index] = value def __delitem__(self, index): del self._story[index] def build(self, out, title, author, debug=False, **kw): doc = self._template(out, title, author, debug) doc.build(self._story, **kw) return out
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import collections from reportlab.platypus import ( PageBreak, FrameBreak, NextPageTemplate, ) class Story(collections.MutableSequence): def __init__(self, template): self._template = template self._story = list() self._index = 0 @property def template(self): return self._template def next_page(self): self._story.append(PageBreak()) def next_frame(self): self._story.append(FrameBreak()) def next_template(self): self._index += 1 self._story.append(NextPageTemplate(self._index)) def __len__(self): return len(self._story) def __iter__(self): return iter(self._story) def __getitem__(self, index): return self._story[index] def insert(self, index, value): return self._story.insert(index, value) def __setitem__(self, index, value): self._story[index] = value def __delitem__(self, index): del self._story[index] def build(self, out, title, author, debug=False, **kw): doc = self._template(out, title, author, debug) doc.build(self._story, **kw) return out
Add a property to access to the template
Add a property to access to the template
Python
mit
cecedille1/PDF_generator
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import collections from reportlab.platypus import ( PageBreak, FrameBreak, NextPageTemplate, ) class Story(collections.MutableSequence): def __init__(self, template): self._template = template self._story = list() self._index = 0 def next_page(self): self._story.append(PageBreak()) def next_frame(self): self._story.append(FrameBreak()) def next_template(self): self._index += 1 self._story.append(NextPageTemplate(self._index)) def __len__(self): return len(self._story) def __iter__(self): return iter(self._story) def __getitem__(self, index): return self._story[index] def insert(self, index, value): return self._story.insert(index, value) def __setitem__(self, index, value): self._story[index] = value def __delitem__(self, index): del self._story[index] def build(self, out, title, author, debug=False, **kw): doc = self._template(out, title, author, debug) doc.build(self._story, **kw) return out Add a property to access to the template
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import collections from reportlab.platypus import ( PageBreak, FrameBreak, NextPageTemplate, ) class Story(collections.MutableSequence): def __init__(self, template): self._template = template self._story = list() self._index = 0 @property def template(self): return self._template def next_page(self): self._story.append(PageBreak()) def next_frame(self): self._story.append(FrameBreak()) def next_template(self): self._index += 1 self._story.append(NextPageTemplate(self._index)) def __len__(self): return len(self._story) def __iter__(self): return iter(self._story) def __getitem__(self, index): return self._story[index] def insert(self, index, value): return self._story.insert(index, value) def __setitem__(self, index, value): self._story[index] = value def __delitem__(self, index): del self._story[index] def build(self, out, title, author, debug=False, **kw): doc = self._template(out, title, author, debug) doc.build(self._story, **kw) return out
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import collections from reportlab.platypus import ( PageBreak, FrameBreak, NextPageTemplate, ) class Story(collections.MutableSequence): def __init__(self, template): self._template = template self._story = list() self._index = 0 def next_page(self): self._story.append(PageBreak()) def next_frame(self): self._story.append(FrameBreak()) def next_template(self): self._index += 1 self._story.append(NextPageTemplate(self._index)) def __len__(self): return len(self._story) def __iter__(self): return iter(self._story) def __getitem__(self, index): return self._story[index] def insert(self, index, value): return self._story.insert(index, value) def __setitem__(self, index, value): self._story[index] = value def __delitem__(self, index): del self._story[index] def build(self, out, title, author, debug=False, **kw): doc = self._template(out, title, author, debug) doc.build(self._story, **kw) return out <commit_msg>Add a property to access to the template<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import collections from reportlab.platypus import ( PageBreak, FrameBreak, NextPageTemplate, ) class Story(collections.MutableSequence): def __init__(self, template): self._template = template self._story = list() self._index = 0 @property def template(self): return self._template def next_page(self): self._story.append(PageBreak()) def next_frame(self): self._story.append(FrameBreak()) def next_template(self): self._index += 1 self._story.append(NextPageTemplate(self._index)) def __len__(self): return len(self._story) def __iter__(self): return iter(self._story) def __getitem__(self, index): return self._story[index] def insert(self, index, value): return self._story.insert(index, value) def __setitem__(self, index, value): self._story[index] = value def __delitem__(self, index): del self._story[index] def build(self, out, title, author, debug=False, **kw): doc = self._template(out, title, author, debug) doc.build(self._story, **kw) return out
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import collections from reportlab.platypus import ( PageBreak, FrameBreak, NextPageTemplate, ) class Story(collections.MutableSequence): def __init__(self, template): self._template = template self._story = list() self._index = 0 def next_page(self): self._story.append(PageBreak()) def next_frame(self): self._story.append(FrameBreak()) def next_template(self): self._index += 1 self._story.append(NextPageTemplate(self._index)) def __len__(self): return len(self._story) def __iter__(self): return iter(self._story) def __getitem__(self, index): return self._story[index] def insert(self, index, value): return self._story.insert(index, value) def __setitem__(self, index, value): self._story[index] = value def __delitem__(self, index): del self._story[index] def build(self, out, title, author, debug=False, **kw): doc = self._template(out, title, author, debug) doc.build(self._story, **kw) return out Add a property to access to the template#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import collections from reportlab.platypus import ( PageBreak, FrameBreak, NextPageTemplate, ) class Story(collections.MutableSequence): def __init__(self, template): self._template = template self._story = list() self._index = 0 @property def template(self): return self._template def next_page(self): self._story.append(PageBreak()) def next_frame(self): self._story.append(FrameBreak()) def next_template(self): self._index += 1 self._story.append(NextPageTemplate(self._index)) def __len__(self): return len(self._story) def __iter__(self): return iter(self._story) def __getitem__(self, index): return self._story[index] def insert(self, index, value): return self._story.insert(index, value) def __setitem__(self, index, value): self._story[index] = value def __delitem__(self, index): del self._story[index] def build(self, out, title, author, debug=False, **kw): doc = self._template(out, title, author, debug) doc.build(self._story, **kw) return out
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import collections from reportlab.platypus import ( PageBreak, FrameBreak, NextPageTemplate, ) class Story(collections.MutableSequence): def __init__(self, template): self._template = template self._story = list() self._index = 0 def next_page(self): self._story.append(PageBreak()) def next_frame(self): self._story.append(FrameBreak()) def next_template(self): self._index += 1 self._story.append(NextPageTemplate(self._index)) def __len__(self): return len(self._story) def __iter__(self): return iter(self._story) def __getitem__(self, index): return self._story[index] def insert(self, index, value): return self._story.insert(index, value) def __setitem__(self, index, value): self._story[index] = value def __delitem__(self, index): del self._story[index] def build(self, out, title, author, debug=False, **kw): doc = self._template(out, title, author, debug) doc.build(self._story, **kw) return out <commit_msg>Add a property to access to the template<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import collections from reportlab.platypus import ( PageBreak, FrameBreak, NextPageTemplate, ) class Story(collections.MutableSequence): def __init__(self, template): self._template = template self._story = list() self._index = 0 @property def template(self): return self._template def next_page(self): self._story.append(PageBreak()) def next_frame(self): self._story.append(FrameBreak()) def next_template(self): self._index += 1 self._story.append(NextPageTemplate(self._index)) def __len__(self): return len(self._story) def __iter__(self): return iter(self._story) def __getitem__(self, index): return self._story[index] def insert(self, index, value): return self._story.insert(index, value) def __setitem__(self, index, value): self._story[index] = value def __delitem__(self, index): del self._story[index] def build(self, out, title, author, debug=False, **kw): doc = self._template(out, title, author, debug) doc.build(self._story, **kw) return out
fb042cd3ff15f35672e543f040053859c18cff24
timedelta/templatetags/timedelta.py
timedelta/templatetags/timedelta.py
from django import template register = template.Library() # Don't really like using relative imports, but no choice here! from ..helpers import nice_repr, iso8601_repr, total_seconds as _total_seconds @register.filter(name='timedelta') def timedelta(value, display="long"): return nice_repr(value, display) @register.filter(name='iso8601') def iso8601(value): return iso8601_repr(value) @register.filter(name='total_seconds') def total_seconds(value): return _total_seconds(value) @register.filter(name='total_seconds_sort') def total_seconds(value, places=10): return ("%0" + str(places) + "i") % _total_seconds(value)
from django import template register = template.Library() # Don't really like using relative imports, but no choice here! from ..helpers import nice_repr, iso8601_repr, total_seconds as _total_seconds @register.filter(name='timedelta') def timedelta(value, display="long"): if value is None: return value return nice_repr(value, display) @register.filter(name='iso8601') def iso8601(value): if value is None: return value return iso8601_repr(value) @register.filter(name='total_seconds') def total_seconds(value): if value is None: return value return _total_seconds(value) @register.filter(name='total_seconds_sort') def total_seconds(value, places=10): if value is None: return value return ("%0" + str(places) + "i") % _total_seconds(value)
Allow for calling our filters on objects that are None
Allow for calling our filters on objects that are None
Python
bsd-3-clause
sookasa/django-timedelta-field
from django import template register = template.Library() # Don't really like using relative imports, but no choice here! from ..helpers import nice_repr, iso8601_repr, total_seconds as _total_seconds @register.filter(name='timedelta') def timedelta(value, display="long"): return nice_repr(value, display) @register.filter(name='iso8601') def iso8601(value): return iso8601_repr(value) @register.filter(name='total_seconds') def total_seconds(value): return _total_seconds(value) @register.filter(name='total_seconds_sort') def total_seconds(value, places=10): return ("%0" + str(places) + "i") % _total_seconds(value) Allow for calling our filters on objects that are None
from django import template register = template.Library() # Don't really like using relative imports, but no choice here! from ..helpers import nice_repr, iso8601_repr, total_seconds as _total_seconds @register.filter(name='timedelta') def timedelta(value, display="long"): if value is None: return value return nice_repr(value, display) @register.filter(name='iso8601') def iso8601(value): if value is None: return value return iso8601_repr(value) @register.filter(name='total_seconds') def total_seconds(value): if value is None: return value return _total_seconds(value) @register.filter(name='total_seconds_sort') def total_seconds(value, places=10): if value is None: return value return ("%0" + str(places) + "i") % _total_seconds(value)
<commit_before>from django import template register = template.Library() # Don't really like using relative imports, but no choice here! from ..helpers import nice_repr, iso8601_repr, total_seconds as _total_seconds @register.filter(name='timedelta') def timedelta(value, display="long"): return nice_repr(value, display) @register.filter(name='iso8601') def iso8601(value): return iso8601_repr(value) @register.filter(name='total_seconds') def total_seconds(value): return _total_seconds(value) @register.filter(name='total_seconds_sort') def total_seconds(value, places=10): return ("%0" + str(places) + "i") % _total_seconds(value) <commit_msg>Allow for calling our filters on objects that are None<commit_after>
from django import template register = template.Library() # Don't really like using relative imports, but no choice here! from ..helpers import nice_repr, iso8601_repr, total_seconds as _total_seconds @register.filter(name='timedelta') def timedelta(value, display="long"): if value is None: return value return nice_repr(value, display) @register.filter(name='iso8601') def iso8601(value): if value is None: return value return iso8601_repr(value) @register.filter(name='total_seconds') def total_seconds(value): if value is None: return value return _total_seconds(value) @register.filter(name='total_seconds_sort') def total_seconds(value, places=10): if value is None: return value return ("%0" + str(places) + "i") % _total_seconds(value)
from django import template register = template.Library() # Don't really like using relative imports, but no choice here! from ..helpers import nice_repr, iso8601_repr, total_seconds as _total_seconds @register.filter(name='timedelta') def timedelta(value, display="long"): return nice_repr(value, display) @register.filter(name='iso8601') def iso8601(value): return iso8601_repr(value) @register.filter(name='total_seconds') def total_seconds(value): return _total_seconds(value) @register.filter(name='total_seconds_sort') def total_seconds(value, places=10): return ("%0" + str(places) + "i") % _total_seconds(value) Allow for calling our filters on objects that are Nonefrom django import template register = template.Library() # Don't really like using relative imports, but no choice here! from ..helpers import nice_repr, iso8601_repr, total_seconds as _total_seconds @register.filter(name='timedelta') def timedelta(value, display="long"): if value is None: return value return nice_repr(value, display) @register.filter(name='iso8601') def iso8601(value): if value is None: return value return iso8601_repr(value) @register.filter(name='total_seconds') def total_seconds(value): if value is None: return value return _total_seconds(value) @register.filter(name='total_seconds_sort') def total_seconds(value, places=10): if value is None: return value return ("%0" + str(places) + "i") % _total_seconds(value)
<commit_before>from django import template register = template.Library() # Don't really like using relative imports, but no choice here! from ..helpers import nice_repr, iso8601_repr, total_seconds as _total_seconds @register.filter(name='timedelta') def timedelta(value, display="long"): return nice_repr(value, display) @register.filter(name='iso8601') def iso8601(value): return iso8601_repr(value) @register.filter(name='total_seconds') def total_seconds(value): return _total_seconds(value) @register.filter(name='total_seconds_sort') def total_seconds(value, places=10): return ("%0" + str(places) + "i") % _total_seconds(value) <commit_msg>Allow for calling our filters on objects that are None<commit_after>from django import template register = template.Library() # Don't really like using relative imports, but no choice here! from ..helpers import nice_repr, iso8601_repr, total_seconds as _total_seconds @register.filter(name='timedelta') def timedelta(value, display="long"): if value is None: return value return nice_repr(value, display) @register.filter(name='iso8601') def iso8601(value): if value is None: return value return iso8601_repr(value) @register.filter(name='total_seconds') def total_seconds(value): if value is None: return value return _total_seconds(value) @register.filter(name='total_seconds_sort') def total_seconds(value, places=10): if value is None: return value return ("%0" + str(places) + "i") % _total_seconds(value)
e000f5db7bf8aee6b3ae267824491d03b20fbb36
saau/sections/transportation/data.py
saau/sections/transportation/data.py
from operator import attrgetter, itemgetter from itertools import chain from ...utils.py3_hook import with_hook with with_hook(): from arcrest import Catalog import numpy as np def get_layers(service): layers = service.layers return { layer.name: layer for layer in layers } def mend_extent(extent): extent.wkid = extent.spatialReference.wkid return extent def get_data(requested_layers): catalog = Catalog('http://services.ga.gov.au/site_7/rest/services') service = catalog['NM_Transport_Infrastructure'] layers = get_layers(service) return chain.from_iterable( layers[layer].QueryLayer(Geometry=mend_extent(layers[layer].extent)) for layer in requested_layers ) def get_paths(request_layers): paths = get_data(request_layers) paths = map(itemgetter('geometry'), paths) paths = filter(lambda path: hasattr(path, 'paths'), paths) paths = map(attrgetter('paths'), paths) paths = chain.from_iterable(paths) return np.array([ tuple( (part.x, part.y) for part in path ) for path in paths ])
from operator import itemgetter from itertools import chain from ...utils.py3_hook import with_hook with with_hook(): from arcrest import Catalog import numpy as np def get_layers(service): layers = service.layers return { layer.name: layer for layer in layers } def mend_extent(extent): extent.wkid = extent.spatialReference.wkid return extent def get_data(requested_layers): catalog = Catalog('http://services.ga.gov.au/site_7/rest/services') service = catalog['NM_Transport_Infrastructure'] layers = get_layers(service) return chain.from_iterable( layers[layer].QueryLayer(Geometry=mend_extent(layers[layer].extent)) for layer in requested_layers ) def get_paths(request_layers): paths = get_data(request_layers) paths = map(itemgetter('geometry'), paths) paths = chain.from_iterable( geometry.paths for geometry in paths if hasattr(geometry, 'paths') ) return np.array([ tuple( (part.x, part.y) for part in path ) for path in paths ])
Remove map and filter use
Remove map and filter use
Python
mit
Mause/statistical_atlas_of_au
from operator import attrgetter, itemgetter from itertools import chain from ...utils.py3_hook import with_hook with with_hook(): from arcrest import Catalog import numpy as np def get_layers(service): layers = service.layers return { layer.name: layer for layer in layers } def mend_extent(extent): extent.wkid = extent.spatialReference.wkid return extent def get_data(requested_layers): catalog = Catalog('http://services.ga.gov.au/site_7/rest/services') service = catalog['NM_Transport_Infrastructure'] layers = get_layers(service) return chain.from_iterable( layers[layer].QueryLayer(Geometry=mend_extent(layers[layer].extent)) for layer in requested_layers ) def get_paths(request_layers): paths = get_data(request_layers) paths = map(itemgetter('geometry'), paths) paths = filter(lambda path: hasattr(path, 'paths'), paths) paths = map(attrgetter('paths'), paths) paths = chain.from_iterable(paths) return np.array([ tuple( (part.x, part.y) for part in path ) for path in paths ]) Remove map and filter use
from operator import itemgetter from itertools import chain from ...utils.py3_hook import with_hook with with_hook(): from arcrest import Catalog import numpy as np def get_layers(service): layers = service.layers return { layer.name: layer for layer in layers } def mend_extent(extent): extent.wkid = extent.spatialReference.wkid return extent def get_data(requested_layers): catalog = Catalog('http://services.ga.gov.au/site_7/rest/services') service = catalog['NM_Transport_Infrastructure'] layers = get_layers(service) return chain.from_iterable( layers[layer].QueryLayer(Geometry=mend_extent(layers[layer].extent)) for layer in requested_layers ) def get_paths(request_layers): paths = get_data(request_layers) paths = map(itemgetter('geometry'), paths) paths = chain.from_iterable( geometry.paths for geometry in paths if hasattr(geometry, 'paths') ) return np.array([ tuple( (part.x, part.y) for part in path ) for path in paths ])
<commit_before>from operator import attrgetter, itemgetter from itertools import chain from ...utils.py3_hook import with_hook with with_hook(): from arcrest import Catalog import numpy as np def get_layers(service): layers = service.layers return { layer.name: layer for layer in layers } def mend_extent(extent): extent.wkid = extent.spatialReference.wkid return extent def get_data(requested_layers): catalog = Catalog('http://services.ga.gov.au/site_7/rest/services') service = catalog['NM_Transport_Infrastructure'] layers = get_layers(service) return chain.from_iterable( layers[layer].QueryLayer(Geometry=mend_extent(layers[layer].extent)) for layer in requested_layers ) def get_paths(request_layers): paths = get_data(request_layers) paths = map(itemgetter('geometry'), paths) paths = filter(lambda path: hasattr(path, 'paths'), paths) paths = map(attrgetter('paths'), paths) paths = chain.from_iterable(paths) return np.array([ tuple( (part.x, part.y) for part in path ) for path in paths ]) <commit_msg>Remove map and filter use<commit_after>
from operator import itemgetter from itertools import chain from ...utils.py3_hook import with_hook with with_hook(): from arcrest import Catalog import numpy as np def get_layers(service): layers = service.layers return { layer.name: layer for layer in layers } def mend_extent(extent): extent.wkid = extent.spatialReference.wkid return extent def get_data(requested_layers): catalog = Catalog('http://services.ga.gov.au/site_7/rest/services') service = catalog['NM_Transport_Infrastructure'] layers = get_layers(service) return chain.from_iterable( layers[layer].QueryLayer(Geometry=mend_extent(layers[layer].extent)) for layer in requested_layers ) def get_paths(request_layers): paths = get_data(request_layers) paths = map(itemgetter('geometry'), paths) paths = chain.from_iterable( geometry.paths for geometry in paths if hasattr(geometry, 'paths') ) return np.array([ tuple( (part.x, part.y) for part in path ) for path in paths ])
from operator import attrgetter, itemgetter from itertools import chain from ...utils.py3_hook import with_hook with with_hook(): from arcrest import Catalog import numpy as np def get_layers(service): layers = service.layers return { layer.name: layer for layer in layers } def mend_extent(extent): extent.wkid = extent.spatialReference.wkid return extent def get_data(requested_layers): catalog = Catalog('http://services.ga.gov.au/site_7/rest/services') service = catalog['NM_Transport_Infrastructure'] layers = get_layers(service) return chain.from_iterable( layers[layer].QueryLayer(Geometry=mend_extent(layers[layer].extent)) for layer in requested_layers ) def get_paths(request_layers): paths = get_data(request_layers) paths = map(itemgetter('geometry'), paths) paths = filter(lambda path: hasattr(path, 'paths'), paths) paths = map(attrgetter('paths'), paths) paths = chain.from_iterable(paths) return np.array([ tuple( (part.x, part.y) for part in path ) for path in paths ]) Remove map and filter usefrom operator import itemgetter from itertools import chain from ...utils.py3_hook import with_hook with with_hook(): from arcrest import Catalog import numpy as np def get_layers(service): layers = service.layers return { layer.name: layer for layer in layers } def mend_extent(extent): extent.wkid = extent.spatialReference.wkid return extent def get_data(requested_layers): catalog = Catalog('http://services.ga.gov.au/site_7/rest/services') service = catalog['NM_Transport_Infrastructure'] layers = get_layers(service) return chain.from_iterable( layers[layer].QueryLayer(Geometry=mend_extent(layers[layer].extent)) for layer in requested_layers ) def get_paths(request_layers): paths = get_data(request_layers) paths = map(itemgetter('geometry'), paths) paths = chain.from_iterable( geometry.paths for geometry in paths if hasattr(geometry, 'paths') ) return np.array([ tuple( (part.x, part.y) for part in path ) for path in paths ])
<commit_before>from operator import attrgetter, itemgetter from itertools import chain from ...utils.py3_hook import with_hook with with_hook(): from arcrest import Catalog import numpy as np def get_layers(service): layers = service.layers return { layer.name: layer for layer in layers } def mend_extent(extent): extent.wkid = extent.spatialReference.wkid return extent def get_data(requested_layers): catalog = Catalog('http://services.ga.gov.au/site_7/rest/services') service = catalog['NM_Transport_Infrastructure'] layers = get_layers(service) return chain.from_iterable( layers[layer].QueryLayer(Geometry=mend_extent(layers[layer].extent)) for layer in requested_layers ) def get_paths(request_layers): paths = get_data(request_layers) paths = map(itemgetter('geometry'), paths) paths = filter(lambda path: hasattr(path, 'paths'), paths) paths = map(attrgetter('paths'), paths) paths = chain.from_iterable(paths) return np.array([ tuple( (part.x, part.y) for part in path ) for path in paths ]) <commit_msg>Remove map and filter use<commit_after>from operator import itemgetter from itertools import chain from ...utils.py3_hook import with_hook with with_hook(): from arcrest import Catalog import numpy as np def get_layers(service): layers = service.layers return { layer.name: layer for layer in layers } def mend_extent(extent): extent.wkid = extent.spatialReference.wkid return extent def get_data(requested_layers): catalog = Catalog('http://services.ga.gov.au/site_7/rest/services') service = catalog['NM_Transport_Infrastructure'] layers = get_layers(service) return chain.from_iterable( layers[layer].QueryLayer(Geometry=mend_extent(layers[layer].extent)) for layer in requested_layers ) def get_paths(request_layers): paths = get_data(request_layers) paths = map(itemgetter('geometry'), paths) paths = chain.from_iterable( geometry.paths for geometry in paths if hasattr(geometry, 'paths') ) return np.array([ tuple( (part.x, part.y) for part in path ) for path in paths ])
fc6806608c5e407882248185bca57afa712e065a
byceps/blueprints/news_admin/forms.py
byceps/blueprints/news_admin/forms.py
""" byceps.blueprints.news_admin.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2017 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import re from wtforms import StringField, TextAreaField from wtforms.validators import InputRequired, Length, Optional, Regexp from ...util.l10n import LocalizedForm SLUG_REGEX = re.compile('^[a-z0-9-]+$') class ItemCreateForm(LocalizedForm): slug = StringField('Slug', [InputRequired(), Length(max=80), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')]) title = StringField('Titel', [InputRequired(), Length(max=80)]) body = TextAreaField('Text', [InputRequired(), Length(max=80)]) image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=80)]) class ItemUpdateForm(ItemCreateForm): pass
""" byceps.blueprints.news_admin.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2017 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import re from wtforms import StringField, TextAreaField from wtforms.validators import InputRequired, Length, Optional, Regexp from ...util.l10n import LocalizedForm SLUG_REGEX = re.compile('^[a-z0-9-]+$') class ItemCreateForm(LocalizedForm): slug = StringField('Slug', [InputRequired(), Length(max=80), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')]) title = StringField('Titel', [InputRequired(), Length(max=80)]) body = TextAreaField('Text', [InputRequired()]) image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=80)]) class ItemUpdateForm(ItemCreateForm): pass
Fix validation of news creation form
Fix validation of news creation form
Python
bsd-3-clause
m-ober/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
""" byceps.blueprints.news_admin.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2017 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import re from wtforms import StringField, TextAreaField from wtforms.validators import InputRequired, Length, Optional, Regexp from ...util.l10n import LocalizedForm SLUG_REGEX = re.compile('^[a-z0-9-]+$') class ItemCreateForm(LocalizedForm): slug = StringField('Slug', [InputRequired(), Length(max=80), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')]) title = StringField('Titel', [InputRequired(), Length(max=80)]) body = TextAreaField('Text', [InputRequired(), Length(max=80)]) image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=80)]) class ItemUpdateForm(ItemCreateForm): pass Fix validation of news creation form
""" byceps.blueprints.news_admin.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2017 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import re from wtforms import StringField, TextAreaField from wtforms.validators import InputRequired, Length, Optional, Regexp from ...util.l10n import LocalizedForm SLUG_REGEX = re.compile('^[a-z0-9-]+$') class ItemCreateForm(LocalizedForm): slug = StringField('Slug', [InputRequired(), Length(max=80), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')]) title = StringField('Titel', [InputRequired(), Length(max=80)]) body = TextAreaField('Text', [InputRequired()]) image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=80)]) class ItemUpdateForm(ItemCreateForm): pass
<commit_before>""" byceps.blueprints.news_admin.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2017 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import re from wtforms import StringField, TextAreaField from wtforms.validators import InputRequired, Length, Optional, Regexp from ...util.l10n import LocalizedForm SLUG_REGEX = re.compile('^[a-z0-9-]+$') class ItemCreateForm(LocalizedForm): slug = StringField('Slug', [InputRequired(), Length(max=80), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')]) title = StringField('Titel', [InputRequired(), Length(max=80)]) body = TextAreaField('Text', [InputRequired(), Length(max=80)]) image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=80)]) class ItemUpdateForm(ItemCreateForm): pass <commit_msg>Fix validation of news creation form<commit_after>
""" byceps.blueprints.news_admin.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2017 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import re from wtforms import StringField, TextAreaField from wtforms.validators import InputRequired, Length, Optional, Regexp from ...util.l10n import LocalizedForm SLUG_REGEX = re.compile('^[a-z0-9-]+$') class ItemCreateForm(LocalizedForm): slug = StringField('Slug', [InputRequired(), Length(max=80), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')]) title = StringField('Titel', [InputRequired(), Length(max=80)]) body = TextAreaField('Text', [InputRequired()]) image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=80)]) class ItemUpdateForm(ItemCreateForm): pass
""" byceps.blueprints.news_admin.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2017 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import re from wtforms import StringField, TextAreaField from wtforms.validators import InputRequired, Length, Optional, Regexp from ...util.l10n import LocalizedForm SLUG_REGEX = re.compile('^[a-z0-9-]+$') class ItemCreateForm(LocalizedForm): slug = StringField('Slug', [InputRequired(), Length(max=80), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')]) title = StringField('Titel', [InputRequired(), Length(max=80)]) body = TextAreaField('Text', [InputRequired(), Length(max=80)]) image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=80)]) class ItemUpdateForm(ItemCreateForm): pass Fix validation of news creation form""" byceps.blueprints.news_admin.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2017 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import re from wtforms import StringField, TextAreaField from wtforms.validators import InputRequired, Length, Optional, Regexp from ...util.l10n import LocalizedForm SLUG_REGEX = re.compile('^[a-z0-9-]+$') class ItemCreateForm(LocalizedForm): slug = StringField('Slug', [InputRequired(), Length(max=80), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')]) title = StringField('Titel', [InputRequired(), Length(max=80)]) body = TextAreaField('Text', [InputRequired()]) image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=80)]) class ItemUpdateForm(ItemCreateForm): pass
<commit_before>""" byceps.blueprints.news_admin.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2017 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import re from wtforms import StringField, TextAreaField from wtforms.validators import InputRequired, Length, Optional, Regexp from ...util.l10n import LocalizedForm SLUG_REGEX = re.compile('^[a-z0-9-]+$') class ItemCreateForm(LocalizedForm): slug = StringField('Slug', [InputRequired(), Length(max=80), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')]) title = StringField('Titel', [InputRequired(), Length(max=80)]) body = TextAreaField('Text', [InputRequired(), Length(max=80)]) image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=80)]) class ItemUpdateForm(ItemCreateForm): pass <commit_msg>Fix validation of news creation form<commit_after>""" byceps.blueprints.news_admin.forms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2017 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import re from wtforms import StringField, TextAreaField from wtforms.validators import InputRequired, Length, Optional, Regexp from ...util.l10n import LocalizedForm SLUG_REGEX = re.compile('^[a-z0-9-]+$') class ItemCreateForm(LocalizedForm): slug = StringField('Slug', [InputRequired(), Length(max=80), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')]) title = StringField('Titel', [InputRequired(), Length(max=80)]) body = TextAreaField('Text', [InputRequired()]) image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=80)]) class ItemUpdateForm(ItemCreateForm): pass
4bd41e0e9381ef1c29b1a912a5d8d6ac99b03f4c
capstone/rl/learners/qlearning.py
capstone/rl/learners/qlearning.py
from ..learner import Learner from ..policies import RandomPolicy from ..util import max_action_value from ..value_functions import TabularF from ...utils import check_random_state class QLearning(Learner): def __init__(self, env, policy=None, qf=None, alpha=0.1, gamma=0.99, n_episodes=1000, random_state=None, verbose=None): super(QLearning, self).__init__(env, n_episodes=n_episodes, verbose=verbose) self.alpha = alpha self.gamma = gamma self.random_state = check_random_state(random_state) self.policy = policy or RandomPolicy(env.actions, self.random_state) self.qf = qf or TabularF(self.random_state) def best_qvalue(self, state, actions): return max_action_value(self.qf, state, actions) ########### # Learner # ########### def episode(self): while not self.env.is_terminal(): state = self.env.cur_state() action = self.policy.action(state) reward, next_state = self.env.do_action(action) best_qvalue = self.best_qvalue(next_state, next_actions) target = reward + (self.gamma * best_qvalue) td_error = target - self.qf[state, action] self.qf[state, action] += self.alpha * td_error
from ..learner import Learner from ..policies import RandomPolicy from ..util import max_action_value from ..value_functions import TabularF from ...utils import check_random_state class QLearning(Learner): def __init__(self, env, policy=None, qf=None, learning_rate=0.1, discount_factor=0.99, n_episodes=1000, random_state=None, verbose=None): super(QLearning, self).__init__(env, n_episodes=n_episodes, verbose=verbose) self.learning_rate = learning_rate self.discount_factor = discount_factor self.random_state = check_random_state(random_state) self.policy = policy or RandomPolicy(env.actions, self.random_state) self.qf = qf or TabularF(self.random_state) def best_qvalue(self, state, actions): return max_action_value(self.qf, state, actions) ########### # Learner # ########### def episode(self): while not self.env.is_terminal(): state = self.env.cur_state() action = self.policy.action(state) reward, next_state = self.env.do_action(action) best_qvalue = self.best_qvalue(next_state, next_actions) target = reward + (self.discount_factor * best_qvalue) td_error = target - self.qf[state, action] self.qf[state, action] += self.learning_rate * td_error
Rename alpha -> learning_rate and gamma -> discount_factor
Rename alpha -> learning_rate and gamma -> discount_factor
Python
mit
davidrobles/mlnd-capstone-code
from ..learner import Learner from ..policies import RandomPolicy from ..util import max_action_value from ..value_functions import TabularF from ...utils import check_random_state class QLearning(Learner): def __init__(self, env, policy=None, qf=None, alpha=0.1, gamma=0.99, n_episodes=1000, random_state=None, verbose=None): super(QLearning, self).__init__(env, n_episodes=n_episodes, verbose=verbose) self.alpha = alpha self.gamma = gamma self.random_state = check_random_state(random_state) self.policy = policy or RandomPolicy(env.actions, self.random_state) self.qf = qf or TabularF(self.random_state) def best_qvalue(self, state, actions): return max_action_value(self.qf, state, actions) ########### # Learner # ########### def episode(self): while not self.env.is_terminal(): state = self.env.cur_state() action = self.policy.action(state) reward, next_state = self.env.do_action(action) best_qvalue = self.best_qvalue(next_state, next_actions) target = reward + (self.gamma * best_qvalue) td_error = target - self.qf[state, action] self.qf[state, action] += self.alpha * td_error Rename alpha -> learning_rate and gamma -> discount_factor
from ..learner import Learner from ..policies import RandomPolicy from ..util import max_action_value from ..value_functions import TabularF from ...utils import check_random_state class QLearning(Learner): def __init__(self, env, policy=None, qf=None, learning_rate=0.1, discount_factor=0.99, n_episodes=1000, random_state=None, verbose=None): super(QLearning, self).__init__(env, n_episodes=n_episodes, verbose=verbose) self.learning_rate = learning_rate self.discount_factor = discount_factor self.random_state = check_random_state(random_state) self.policy = policy or RandomPolicy(env.actions, self.random_state) self.qf = qf or TabularF(self.random_state) def best_qvalue(self, state, actions): return max_action_value(self.qf, state, actions) ########### # Learner # ########### def episode(self): while not self.env.is_terminal(): state = self.env.cur_state() action = self.policy.action(state) reward, next_state = self.env.do_action(action) best_qvalue = self.best_qvalue(next_state, next_actions) target = reward + (self.discount_factor * best_qvalue) td_error = target - self.qf[state, action] self.qf[state, action] += self.learning_rate * td_error
<commit_before>from ..learner import Learner from ..policies import RandomPolicy from ..util import max_action_value from ..value_functions import TabularF from ...utils import check_random_state class QLearning(Learner): def __init__(self, env, policy=None, qf=None, alpha=0.1, gamma=0.99, n_episodes=1000, random_state=None, verbose=None): super(QLearning, self).__init__(env, n_episodes=n_episodes, verbose=verbose) self.alpha = alpha self.gamma = gamma self.random_state = check_random_state(random_state) self.policy = policy or RandomPolicy(env.actions, self.random_state) self.qf = qf or TabularF(self.random_state) def best_qvalue(self, state, actions): return max_action_value(self.qf, state, actions) ########### # Learner # ########### def episode(self): while not self.env.is_terminal(): state = self.env.cur_state() action = self.policy.action(state) reward, next_state = self.env.do_action(action) best_qvalue = self.best_qvalue(next_state, next_actions) target = reward + (self.gamma * best_qvalue) td_error = target - self.qf[state, action] self.qf[state, action] += self.alpha * td_error <commit_msg>Rename alpha -> learning_rate and gamma -> discount_factor<commit_after>
from ..learner import Learner from ..policies import RandomPolicy from ..util import max_action_value from ..value_functions import TabularF from ...utils import check_random_state class QLearning(Learner): def __init__(self, env, policy=None, qf=None, learning_rate=0.1, discount_factor=0.99, n_episodes=1000, random_state=None, verbose=None): super(QLearning, self).__init__(env, n_episodes=n_episodes, verbose=verbose) self.learning_rate = learning_rate self.discount_factor = discount_factor self.random_state = check_random_state(random_state) self.policy = policy or RandomPolicy(env.actions, self.random_state) self.qf = qf or TabularF(self.random_state) def best_qvalue(self, state, actions): return max_action_value(self.qf, state, actions) ########### # Learner # ########### def episode(self): while not self.env.is_terminal(): state = self.env.cur_state() action = self.policy.action(state) reward, next_state = self.env.do_action(action) best_qvalue = self.best_qvalue(next_state, next_actions) target = reward + (self.discount_factor * best_qvalue) td_error = target - self.qf[state, action] self.qf[state, action] += self.learning_rate * td_error
from ..learner import Learner from ..policies import RandomPolicy from ..util import max_action_value from ..value_functions import TabularF from ...utils import check_random_state class QLearning(Learner): def __init__(self, env, policy=None, qf=None, alpha=0.1, gamma=0.99, n_episodes=1000, random_state=None, verbose=None): super(QLearning, self).__init__(env, n_episodes=n_episodes, verbose=verbose) self.alpha = alpha self.gamma = gamma self.random_state = check_random_state(random_state) self.policy = policy or RandomPolicy(env.actions, self.random_state) self.qf = qf or TabularF(self.random_state) def best_qvalue(self, state, actions): return max_action_value(self.qf, state, actions) ########### # Learner # ########### def episode(self): while not self.env.is_terminal(): state = self.env.cur_state() action = self.policy.action(state) reward, next_state = self.env.do_action(action) best_qvalue = self.best_qvalue(next_state, next_actions) target = reward + (self.gamma * best_qvalue) td_error = target - self.qf[state, action] self.qf[state, action] += self.alpha * td_error Rename alpha -> learning_rate and gamma -> discount_factorfrom ..learner import Learner from ..policies import RandomPolicy from ..util import max_action_value from ..value_functions import TabularF from ...utils import check_random_state class QLearning(Learner): def __init__(self, env, policy=None, qf=None, learning_rate=0.1, discount_factor=0.99, n_episodes=1000, random_state=None, verbose=None): super(QLearning, self).__init__(env, n_episodes=n_episodes, verbose=verbose) self.learning_rate = learning_rate self.discount_factor = discount_factor self.random_state = check_random_state(random_state) self.policy = policy or RandomPolicy(env.actions, self.random_state) self.qf = qf or TabularF(self.random_state) def best_qvalue(self, state, actions): return max_action_value(self.qf, state, actions) ########### # Learner # ########### def episode(self): while not self.env.is_terminal(): state = self.env.cur_state() action = self.policy.action(state) reward, next_state = self.env.do_action(action) best_qvalue = self.best_qvalue(next_state, next_actions) target = reward + (self.discount_factor * best_qvalue) td_error = target - self.qf[state, action] self.qf[state, action] += self.learning_rate * td_error
<commit_before>from ..learner import Learner from ..policies import RandomPolicy from ..util import max_action_value from ..value_functions import TabularF from ...utils import check_random_state class QLearning(Learner): def __init__(self, env, policy=None, qf=None, alpha=0.1, gamma=0.99, n_episodes=1000, random_state=None, verbose=None): super(QLearning, self).__init__(env, n_episodes=n_episodes, verbose=verbose) self.alpha = alpha self.gamma = gamma self.random_state = check_random_state(random_state) self.policy = policy or RandomPolicy(env.actions, self.random_state) self.qf = qf or TabularF(self.random_state) def best_qvalue(self, state, actions): return max_action_value(self.qf, state, actions) ########### # Learner # ########### def episode(self): while not self.env.is_terminal(): state = self.env.cur_state() action = self.policy.action(state) reward, next_state = self.env.do_action(action) best_qvalue = self.best_qvalue(next_state, next_actions) target = reward + (self.gamma * best_qvalue) td_error = target - self.qf[state, action] self.qf[state, action] += self.alpha * td_error <commit_msg>Rename alpha -> learning_rate and gamma -> discount_factor<commit_after>from ..learner import Learner from ..policies import RandomPolicy from ..util import max_action_value from ..value_functions import TabularF from ...utils import check_random_state class QLearning(Learner): def __init__(self, env, policy=None, qf=None, learning_rate=0.1, discount_factor=0.99, n_episodes=1000, random_state=None, verbose=None): super(QLearning, self).__init__(env, n_episodes=n_episodes, verbose=verbose) self.learning_rate = learning_rate self.discount_factor = discount_factor self.random_state = check_random_state(random_state) self.policy = policy or RandomPolicy(env.actions, self.random_state) self.qf = qf or TabularF(self.random_state) def best_qvalue(self, state, actions): return max_action_value(self.qf, state, actions) ########### # Learner # ########### def episode(self): while not self.env.is_terminal(): state = self.env.cur_state() action = self.policy.action(state) reward, next_state = self.env.do_action(action) best_qvalue = self.best_qvalue(next_state, next_actions) target = reward + (self.discount_factor * best_qvalue) td_error = target - self.qf[state, action] self.qf[state, action] += self.learning_rate * td_error
0167e246b74789cc0181b603520ec7f58ef7b5fe
pandas/core/api.py
pandas/core/api.py
# pylint: disable=W0614,W0401,W0611 import numpy as np from pandas.core.algorithms import factorize, match, unique, value_counts from pandas.core.common import isnull, notnull, save, load from pandas.core.categorical import Categorical, Factor from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.core.index import Index, Int64Index, MultiIndex from pandas.core.series import Series, TimeSeries from pandas.core.frame import DataFrame from pandas.core.panel import Panel from pandas.core.groupby import groupby from pandas.core.reshape import (pivot_simple as pivot, get_dummies, lreshape) WidePanel = Panel from pandas.tseries.offsets import DateOffset from pandas.tseries.tools import to_datetime from pandas.tseries.index import (DatetimeIndex, Timestamp, date_range, bdate_range) from pandas.tseries.period import Period, PeriodIndex # legacy from pandas.core.daterange import DateRange # deprecated import pandas.core.datetools as datetools
# pylint: disable=W0614,W0401,W0611 import numpy as np from pandas.core.algorithms import factorize, match, unique, value_counts from pandas.core.common import isnull, notnull, save, load from pandas.core.categorical import Categorical, Factor from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.core.index import Index, Int64Index, MultiIndex from pandas.core.series import Series, TimeSeries from pandas.core.frame import DataFrame from pandas.core.panel import Panel from pandas.core.groupby import groupby from pandas.core.reshape import (pivot_simple as pivot, get_dummies, lreshape) WidePanel = Panel from pandas.tseries.offsets import DateOffset from pandas.tseries.tools import to_datetime from pandas.tseries.index import (DatetimeIndex, Timestamp, date_range, bdate_range) from pandas.tseries.period import Period, PeriodIndex # legacy from pandas.core.daterange import DateRange # deprecated import pandas.core.datetools as datetools from pandas.core.config import get_option,set_option,reset_option,\ reset_options,describe_options
Add new core.config API functions to the pandas top level module
ENH: Add new core.config API functions to the pandas top level module
Python
bsd-3-clause
pandas-dev/pandas,GuessWhoSamFoo/pandas,TomAugspurger/pandas,toobaz/pandas,MJuddBooth/pandas,cython-testbed/pandas,TomAugspurger/pandas,nmartensen/pandas,cython-testbed/pandas,DGrady/pandas,DGrady/pandas,datapythonista/pandas,kdebrab/pandas,dsm054/pandas,Winand/pandas,linebp/pandas,dsm054/pandas,toobaz/pandas,jmmease/pandas,zfrenchee/pandas,jorisvandenbossche/pandas,cbertinato/pandas,linebp/pandas,harisbal/pandas,rs2/pandas,linebp/pandas,nmartensen/pandas,jmmease/pandas,jreback/pandas,linebp/pandas,cbertinato/pandas,zfrenchee/pandas,nmartensen/pandas,MJuddBooth/pandas,cython-testbed/pandas,amolkahat/pandas,jmmease/pandas,cython-testbed/pandas,GuessWhoSamFoo/pandas,harisbal/pandas,zfrenchee/pandas,jmmease/pandas,jorisvandenbossche/pandas,GuessWhoSamFoo/pandas,gfyoung/pandas,amolkahat/pandas,pandas-dev/pandas,jreback/pandas,kdebrab/pandas,MJuddBooth/pandas,datapythonista/pandas,pratapvardhan/pandas,amolkahat/pandas,Winand/pandas,cbertinato/pandas,jreback/pandas,gfyoung/pandas,pandas-dev/pandas,jreback/pandas,louispotok/pandas,linebp/pandas,toobaz/pandas,gfyoung/pandas,Winand/pandas,jorisvandenbossche/pandas,rs2/pandas,DGrady/pandas,dsm054/pandas,winklerand/pandas,kdebrab/pandas,winklerand/pandas,TomAugspurger/pandas,datapythonista/pandas,winklerand/pandas,kdebrab/pandas,zfrenchee/pandas,pratapvardhan/pandas,Winand/pandas,TomAugspurger/pandas,datapythonista/pandas,toobaz/pandas,DGrady/pandas,cbertinato/pandas,rs2/pandas,rs2/pandas,DGrady/pandas,toobaz/pandas,gfyoung/pandas,harisbal/pandas,jorisvandenbossche/pandas,nmartensen/pandas,louispotok/pandas,harisbal/pandas,amolkahat/pandas,linebp/pandas,cbertinato/pandas,Winand/pandas,louispotok/pandas,Winand/pandas,pratapvardhan/pandas,nmartensen/pandas,winklerand/pandas,DGrady/pandas,gfyoung/pandas,cython-testbed/pandas,pratapvardhan/pandas,louispotok/pandas,zfrenchee/pandas,MJuddBooth/pandas,GuessWhoSamFoo/pandas,pratapvardhan/pandas,winklerand/pandas,amolkahat/pandas,kdebrab/pandas,pandas-dev/pandas,harisbal/pandas,jreback/pandas,dsm054/pandas,GuessWhoSamFoo/pandas,MJuddBooth/pandas,jmmease/pandas,winklerand/pandas,dsm054/pandas,louispotok/pandas,jmmease/pandas,nmartensen/pandas
# pylint: disable=W0614,W0401,W0611 import numpy as np from pandas.core.algorithms import factorize, match, unique, value_counts from pandas.core.common import isnull, notnull, save, load from pandas.core.categorical import Categorical, Factor from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.core.index import Index, Int64Index, MultiIndex from pandas.core.series import Series, TimeSeries from pandas.core.frame import DataFrame from pandas.core.panel import Panel from pandas.core.groupby import groupby from pandas.core.reshape import (pivot_simple as pivot, get_dummies, lreshape) WidePanel = Panel from pandas.tseries.offsets import DateOffset from pandas.tseries.tools import to_datetime from pandas.tseries.index import (DatetimeIndex, Timestamp, date_range, bdate_range) from pandas.tseries.period import Period, PeriodIndex # legacy from pandas.core.daterange import DateRange # deprecated import pandas.core.datetools as datetools ENH: Add new core.config API functions to the pandas top level module
# pylint: disable=W0614,W0401,W0611 import numpy as np from pandas.core.algorithms import factorize, match, unique, value_counts from pandas.core.common import isnull, notnull, save, load from pandas.core.categorical import Categorical, Factor from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.core.index import Index, Int64Index, MultiIndex from pandas.core.series import Series, TimeSeries from pandas.core.frame import DataFrame from pandas.core.panel import Panel from pandas.core.groupby import groupby from pandas.core.reshape import (pivot_simple as pivot, get_dummies, lreshape) WidePanel = Panel from pandas.tseries.offsets import DateOffset from pandas.tseries.tools import to_datetime from pandas.tseries.index import (DatetimeIndex, Timestamp, date_range, bdate_range) from pandas.tseries.period import Period, PeriodIndex # legacy from pandas.core.daterange import DateRange # deprecated import pandas.core.datetools as datetools from pandas.core.config import get_option,set_option,reset_option,\ reset_options,describe_options
<commit_before> # pylint: disable=W0614,W0401,W0611 import numpy as np from pandas.core.algorithms import factorize, match, unique, value_counts from pandas.core.common import isnull, notnull, save, load from pandas.core.categorical import Categorical, Factor from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.core.index import Index, Int64Index, MultiIndex from pandas.core.series import Series, TimeSeries from pandas.core.frame import DataFrame from pandas.core.panel import Panel from pandas.core.groupby import groupby from pandas.core.reshape import (pivot_simple as pivot, get_dummies, lreshape) WidePanel = Panel from pandas.tseries.offsets import DateOffset from pandas.tseries.tools import to_datetime from pandas.tseries.index import (DatetimeIndex, Timestamp, date_range, bdate_range) from pandas.tseries.period import Period, PeriodIndex # legacy from pandas.core.daterange import DateRange # deprecated import pandas.core.datetools as datetools <commit_msg>ENH: Add new core.config API functions to the pandas top level module<commit_after>
# pylint: disable=W0614,W0401,W0611 import numpy as np from pandas.core.algorithms import factorize, match, unique, value_counts from pandas.core.common import isnull, notnull, save, load from pandas.core.categorical import Categorical, Factor from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.core.index import Index, Int64Index, MultiIndex from pandas.core.series import Series, TimeSeries from pandas.core.frame import DataFrame from pandas.core.panel import Panel from pandas.core.groupby import groupby from pandas.core.reshape import (pivot_simple as pivot, get_dummies, lreshape) WidePanel = Panel from pandas.tseries.offsets import DateOffset from pandas.tseries.tools import to_datetime from pandas.tseries.index import (DatetimeIndex, Timestamp, date_range, bdate_range) from pandas.tseries.period import Period, PeriodIndex # legacy from pandas.core.daterange import DateRange # deprecated import pandas.core.datetools as datetools from pandas.core.config import get_option,set_option,reset_option,\ reset_options,describe_options
# pylint: disable=W0614,W0401,W0611 import numpy as np from pandas.core.algorithms import factorize, match, unique, value_counts from pandas.core.common import isnull, notnull, save, load from pandas.core.categorical import Categorical, Factor from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.core.index import Index, Int64Index, MultiIndex from pandas.core.series import Series, TimeSeries from pandas.core.frame import DataFrame from pandas.core.panel import Panel from pandas.core.groupby import groupby from pandas.core.reshape import (pivot_simple as pivot, get_dummies, lreshape) WidePanel = Panel from pandas.tseries.offsets import DateOffset from pandas.tseries.tools import to_datetime from pandas.tseries.index import (DatetimeIndex, Timestamp, date_range, bdate_range) from pandas.tseries.period import Period, PeriodIndex # legacy from pandas.core.daterange import DateRange # deprecated import pandas.core.datetools as datetools ENH: Add new core.config API functions to the pandas top level module # pylint: disable=W0614,W0401,W0611 import numpy as np from pandas.core.algorithms import factorize, match, unique, value_counts from pandas.core.common import isnull, notnull, save, load from pandas.core.categorical import Categorical, Factor from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.core.index import Index, Int64Index, MultiIndex from pandas.core.series import Series, TimeSeries from pandas.core.frame import DataFrame from pandas.core.panel import Panel from pandas.core.groupby import groupby from pandas.core.reshape import (pivot_simple as pivot, get_dummies, lreshape) WidePanel = Panel from pandas.tseries.offsets import DateOffset from pandas.tseries.tools import to_datetime from pandas.tseries.index import (DatetimeIndex, Timestamp, date_range, bdate_range) from pandas.tseries.period import Period, PeriodIndex # legacy from pandas.core.daterange import DateRange # deprecated import pandas.core.datetools as datetools from pandas.core.config import get_option,set_option,reset_option,\ reset_options,describe_options
<commit_before> # pylint: disable=W0614,W0401,W0611 import numpy as np from pandas.core.algorithms import factorize, match, unique, value_counts from pandas.core.common import isnull, notnull, save, load from pandas.core.categorical import Categorical, Factor from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.core.index import Index, Int64Index, MultiIndex from pandas.core.series import Series, TimeSeries from pandas.core.frame import DataFrame from pandas.core.panel import Panel from pandas.core.groupby import groupby from pandas.core.reshape import (pivot_simple as pivot, get_dummies, lreshape) WidePanel = Panel from pandas.tseries.offsets import DateOffset from pandas.tseries.tools import to_datetime from pandas.tseries.index import (DatetimeIndex, Timestamp, date_range, bdate_range) from pandas.tseries.period import Period, PeriodIndex # legacy from pandas.core.daterange import DateRange # deprecated import pandas.core.datetools as datetools <commit_msg>ENH: Add new core.config API functions to the pandas top level module<commit_after> # pylint: disable=W0614,W0401,W0611 import numpy as np from pandas.core.algorithms import factorize, match, unique, value_counts from pandas.core.common import isnull, notnull, save, load from pandas.core.categorical import Categorical, Factor from pandas.core.format import (set_printoptions, reset_printoptions, set_eng_float_format) from pandas.core.index import Index, Int64Index, MultiIndex from pandas.core.series import Series, TimeSeries from pandas.core.frame import DataFrame from pandas.core.panel import Panel from pandas.core.groupby import groupby from pandas.core.reshape import (pivot_simple as pivot, get_dummies, lreshape) WidePanel = Panel from pandas.tseries.offsets import DateOffset from pandas.tseries.tools import to_datetime from pandas.tseries.index import (DatetimeIndex, Timestamp, date_range, bdate_range) from pandas.tseries.period import Period, PeriodIndex # legacy from pandas.core.daterange import DateRange # deprecated import pandas.core.datetools as datetools from pandas.core.config import get_option,set_option,reset_option,\ reset_options,describe_options
a12a027ba027ebb23e18da38e444dc51e57a91bc
aero/adapters/brew.py
aero/adapters/brew.py
# -*- coding: utf-8 -*- __author__ = 'nickl-' from aero.__version__ import __version__ from .base import BaseAdapter class Brew(BaseAdapter): """ Homebrew adapter. """ adapter_command = 'brew' def search(self, query): response = self._execute_command(self.adapter_command, ['search', query])[0] if 'No formula found' not in response and 'Error:' not in response: return dict([( self.adapter_command + ':' + line, '\n'.join(map( lambda k: k[0] if len(k) < 2 else k[0] + ': ' + k[1], self.info(line) )) ) for line in response.splitlines() if line]) return {} def info(self, query): if '/' in query: self._execute_command(self.adapter_command, ['tap', '/'.join(query.split('/')[:-1])]) response = self._execute_command(self.adapter_command, ['info', query])[0] if 'Error:' not in response: response = response.replace(query + ': ', 'version: ') return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line] return 'No info available' def install(self, query): print self._execute_shell(self.adapter_command, ['install', query]) return {}
# -*- coding: utf-8 -*- __author__ = 'nickl-' from aero.__version__ import __version__ from .base import BaseAdapter class Brew(BaseAdapter): """ Homebrew adapter. """ adapter_command = 'brew' def search(self, query): response = self._execute_command(self.adapter_command, ['search', query])[0] if 'No formula found' not in response and 'Error:' not in response: return dict([( self.adapter_command + ':' + line, '\n'.join(map( lambda k: k[0] if len(k) < 2 else k[0] + ': ' + k[1], self.info(line) )) ) for line in response.splitlines() if line]) return {} def info(self, query): if '/' in query: self._execute_command(self.adapter_command, ['tap', '/'.join(query.split('/')[:-1])]) response = self._execute_command(self.adapter_command, ['info', query])[0] if 'Error:' not in response: response = response.replace(query + ': ', 'version: ') return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line] return [['No info available']] def install(self, query): print self._execute_shell(self.adapter_command, ['install', query]) return {}
Fix bew 'No info available'
Fix bew 'No info available'
Python
bsd-3-clause
Aeronautics/aero
# -*- coding: utf-8 -*- __author__ = 'nickl-' from aero.__version__ import __version__ from .base import BaseAdapter class Brew(BaseAdapter): """ Homebrew adapter. """ adapter_command = 'brew' def search(self, query): response = self._execute_command(self.adapter_command, ['search', query])[0] if 'No formula found' not in response and 'Error:' not in response: return dict([( self.adapter_command + ':' + line, '\n'.join(map( lambda k: k[0] if len(k) < 2 else k[0] + ': ' + k[1], self.info(line) )) ) for line in response.splitlines() if line]) return {} def info(self, query): if '/' in query: self._execute_command(self.adapter_command, ['tap', '/'.join(query.split('/')[:-1])]) response = self._execute_command(self.adapter_command, ['info', query])[0] if 'Error:' not in response: response = response.replace(query + ': ', 'version: ') return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line] return 'No info available' def install(self, query): print self._execute_shell(self.adapter_command, ['install', query]) return {} Fix bew 'No info available'
# -*- coding: utf-8 -*- __author__ = 'nickl-' from aero.__version__ import __version__ from .base import BaseAdapter class Brew(BaseAdapter): """ Homebrew adapter. """ adapter_command = 'brew' def search(self, query): response = self._execute_command(self.adapter_command, ['search', query])[0] if 'No formula found' not in response and 'Error:' not in response: return dict([( self.adapter_command + ':' + line, '\n'.join(map( lambda k: k[0] if len(k) < 2 else k[0] + ': ' + k[1], self.info(line) )) ) for line in response.splitlines() if line]) return {} def info(self, query): if '/' in query: self._execute_command(self.adapter_command, ['tap', '/'.join(query.split('/')[:-1])]) response = self._execute_command(self.adapter_command, ['info', query])[0] if 'Error:' not in response: response = response.replace(query + ': ', 'version: ') return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line] return [['No info available']] def install(self, query): print self._execute_shell(self.adapter_command, ['install', query]) return {}
<commit_before># -*- coding: utf-8 -*- __author__ = 'nickl-' from aero.__version__ import __version__ from .base import BaseAdapter class Brew(BaseAdapter): """ Homebrew adapter. """ adapter_command = 'brew' def search(self, query): response = self._execute_command(self.adapter_command, ['search', query])[0] if 'No formula found' not in response and 'Error:' not in response: return dict([( self.adapter_command + ':' + line, '\n'.join(map( lambda k: k[0] if len(k) < 2 else k[0] + ': ' + k[1], self.info(line) )) ) for line in response.splitlines() if line]) return {} def info(self, query): if '/' in query: self._execute_command(self.adapter_command, ['tap', '/'.join(query.split('/')[:-1])]) response = self._execute_command(self.adapter_command, ['info', query])[0] if 'Error:' not in response: response = response.replace(query + ': ', 'version: ') return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line] return 'No info available' def install(self, query): print self._execute_shell(self.adapter_command, ['install', query]) return {} <commit_msg>Fix bew 'No info available'<commit_after>
# -*- coding: utf-8 -*- __author__ = 'nickl-' from aero.__version__ import __version__ from .base import BaseAdapter class Brew(BaseAdapter): """ Homebrew adapter. """ adapter_command = 'brew' def search(self, query): response = self._execute_command(self.adapter_command, ['search', query])[0] if 'No formula found' not in response and 'Error:' not in response: return dict([( self.adapter_command + ':' + line, '\n'.join(map( lambda k: k[0] if len(k) < 2 else k[0] + ': ' + k[1], self.info(line) )) ) for line in response.splitlines() if line]) return {} def info(self, query): if '/' in query: self._execute_command(self.adapter_command, ['tap', '/'.join(query.split('/')[:-1])]) response = self._execute_command(self.adapter_command, ['info', query])[0] if 'Error:' not in response: response = response.replace(query + ': ', 'version: ') return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line] return [['No info available']] def install(self, query): print self._execute_shell(self.adapter_command, ['install', query]) return {}
# -*- coding: utf-8 -*- __author__ = 'nickl-' from aero.__version__ import __version__ from .base import BaseAdapter class Brew(BaseAdapter): """ Homebrew adapter. """ adapter_command = 'brew' def search(self, query): response = self._execute_command(self.adapter_command, ['search', query])[0] if 'No formula found' not in response and 'Error:' not in response: return dict([( self.adapter_command + ':' + line, '\n'.join(map( lambda k: k[0] if len(k) < 2 else k[0] + ': ' + k[1], self.info(line) )) ) for line in response.splitlines() if line]) return {} def info(self, query): if '/' in query: self._execute_command(self.adapter_command, ['tap', '/'.join(query.split('/')[:-1])]) response = self._execute_command(self.adapter_command, ['info', query])[0] if 'Error:' not in response: response = response.replace(query + ': ', 'version: ') return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line] return 'No info available' def install(self, query): print self._execute_shell(self.adapter_command, ['install', query]) return {} Fix bew 'No info available'# -*- coding: utf-8 -*- __author__ = 'nickl-' from aero.__version__ import __version__ from .base import BaseAdapter class Brew(BaseAdapter): """ Homebrew adapter. """ adapter_command = 'brew' def search(self, query): response = self._execute_command(self.adapter_command, ['search', query])[0] if 'No formula found' not in response and 'Error:' not in response: return dict([( self.adapter_command + ':' + line, '\n'.join(map( lambda k: k[0] if len(k) < 2 else k[0] + ': ' + k[1], self.info(line) )) ) for line in response.splitlines() if line]) return {} def info(self, query): if '/' in query: self._execute_command(self.adapter_command, ['tap', '/'.join(query.split('/')[:-1])]) response = self._execute_command(self.adapter_command, ['info', query])[0] if 'Error:' not in response: response = response.replace(query + ': ', 'version: ') return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line] return [['No info available']] def install(self, query): print self._execute_shell(self.adapter_command, ['install', query]) return {}
<commit_before># -*- coding: utf-8 -*- __author__ = 'nickl-' from aero.__version__ import __version__ from .base import BaseAdapter class Brew(BaseAdapter): """ Homebrew adapter. """ adapter_command = 'brew' def search(self, query): response = self._execute_command(self.adapter_command, ['search', query])[0] if 'No formula found' not in response and 'Error:' not in response: return dict([( self.adapter_command + ':' + line, '\n'.join(map( lambda k: k[0] if len(k) < 2 else k[0] + ': ' + k[1], self.info(line) )) ) for line in response.splitlines() if line]) return {} def info(self, query): if '/' in query: self._execute_command(self.adapter_command, ['tap', '/'.join(query.split('/')[:-1])]) response = self._execute_command(self.adapter_command, ['info', query])[0] if 'Error:' not in response: response = response.replace(query + ': ', 'version: ') return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line] return 'No info available' def install(self, query): print self._execute_shell(self.adapter_command, ['install', query]) return {} <commit_msg>Fix bew 'No info available'<commit_after># -*- coding: utf-8 -*- __author__ = 'nickl-' from aero.__version__ import __version__ from .base import BaseAdapter class Brew(BaseAdapter): """ Homebrew adapter. """ adapter_command = 'brew' def search(self, query): response = self._execute_command(self.adapter_command, ['search', query])[0] if 'No formula found' not in response and 'Error:' not in response: return dict([( self.adapter_command + ':' + line, '\n'.join(map( lambda k: k[0] if len(k) < 2 else k[0] + ': ' + k[1], self.info(line) )) ) for line in response.splitlines() if line]) return {} def info(self, query): if '/' in query: self._execute_command(self.adapter_command, ['tap', '/'.join(query.split('/')[:-1])]) response = self._execute_command(self.adapter_command, ['info', query])[0] if 'Error:' not in response: response = response.replace(query + ': ', 'version: ') return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line] return [['No info available']] def install(self, query): print self._execute_shell(self.adapter_command, ['install', query]) return {}
ebcdf90a44d3ae87be8032f89bec26697e22cbf3
alexandra/__init__.py
alexandra/__init__.py
""" Python support for Alexa applications. Because like everything Amazon it involves a ton of tedious boilerplate. """ import logging logging.getLogger(__name__).addHandler(logging.NullHandler()) from alexandra.app import Application from alexandra.session import Session from alexandra.util import respond, reprompt
# flake8: noqa """ Python support for Alexa applications. Because like everything Amazon it involves a ton of tedious boilerplate. """ import logging logging.getLogger(__name__).addHandler(logging.NullHandler()) from alexandra.app import Application from alexandra.session import Session from alexandra.util import respond, reprompt
Add a noqa to init
Add a noqa to init
Python
isc
erik/alexandra
""" Python support for Alexa applications. Because like everything Amazon it involves a ton of tedious boilerplate. """ import logging logging.getLogger(__name__).addHandler(logging.NullHandler()) from alexandra.app import Application from alexandra.session import Session from alexandra.util import respond, reprompt Add a noqa to init
# flake8: noqa """ Python support for Alexa applications. Because like everything Amazon it involves a ton of tedious boilerplate. """ import logging logging.getLogger(__name__).addHandler(logging.NullHandler()) from alexandra.app import Application from alexandra.session import Session from alexandra.util import respond, reprompt
<commit_before>""" Python support for Alexa applications. Because like everything Amazon it involves a ton of tedious boilerplate. """ import logging logging.getLogger(__name__).addHandler(logging.NullHandler()) from alexandra.app import Application from alexandra.session import Session from alexandra.util import respond, reprompt <commit_msg>Add a noqa to init<commit_after>
# flake8: noqa """ Python support for Alexa applications. Because like everything Amazon it involves a ton of tedious boilerplate. """ import logging logging.getLogger(__name__).addHandler(logging.NullHandler()) from alexandra.app import Application from alexandra.session import Session from alexandra.util import respond, reprompt
""" Python support for Alexa applications. Because like everything Amazon it involves a ton of tedious boilerplate. """ import logging logging.getLogger(__name__).addHandler(logging.NullHandler()) from alexandra.app import Application from alexandra.session import Session from alexandra.util import respond, reprompt Add a noqa to init# flake8: noqa """ Python support for Alexa applications. Because like everything Amazon it involves a ton of tedious boilerplate. """ import logging logging.getLogger(__name__).addHandler(logging.NullHandler()) from alexandra.app import Application from alexandra.session import Session from alexandra.util import respond, reprompt
<commit_before>""" Python support for Alexa applications. Because like everything Amazon it involves a ton of tedious boilerplate. """ import logging logging.getLogger(__name__).addHandler(logging.NullHandler()) from alexandra.app import Application from alexandra.session import Session from alexandra.util import respond, reprompt <commit_msg>Add a noqa to init<commit_after># flake8: noqa """ Python support for Alexa applications. Because like everything Amazon it involves a ton of tedious boilerplate. """ import logging logging.getLogger(__name__).addHandler(logging.NullHandler()) from alexandra.app import Application from alexandra.session import Session from alexandra.util import respond, reprompt
2cb7c80bc4358631b897e3ea91d3c7eff684f69b
pmxbot/__init__.py
pmxbot/__init__.py
# -*- coding: utf-8 -*- # vim:ts=4:sw=4:noexpandtab from __future__ import absolute_import import socket import logging from .dictlib import ConfigDict config = ConfigDict( bot_nickname = 'pmxbot', database = 'sqlite:pmxbot.sqlite', server_host = 'localhost', server_port = 6667, use_ssl = False, password = None, silent_bot = False, log_channels = [], other_channels = [], places = ['London', 'Tokyo', 'New York'], feed_interval = 15, # minutes feeds = [dict( name = 'pmxbot bitbucket', channel = '#inane', linkurl = 'http://bitbucket.org/yougov/pmxbot', url = 'http://bitbucket.org/yougov/pmxbot', ), ], librarypaste = 'http://paste.jaraco.com', ) config['logs URL'] = 'http://' + socket.getfqdn() config['log level'] = logging.INFO "The config object"
# -*- coding: utf-8 -*- # vim:ts=4:sw=4:noexpandtab from __future__ import absolute_import import socket import logging as _logging from .dictlib import ConfigDict config = ConfigDict( bot_nickname = 'pmxbot', database = 'sqlite:pmxbot.sqlite', server_host = 'localhost', server_port = 6667, use_ssl = False, password = None, silent_bot = False, log_channels = [], other_channels = [], places = ['London', 'Tokyo', 'New York'], feed_interval = 15, # minutes feeds = [dict( name = 'pmxbot bitbucket', channel = '#inane', linkurl = 'http://bitbucket.org/yougov/pmxbot', url = 'http://bitbucket.org/yougov/pmxbot', ), ], librarypaste = 'http://paste.jaraco.com', ) config['logs URL'] = 'http://' + socket.getfqdn() config['log level'] = _logging.INFO "The config object"
Fix issue with conflated pmxbot.logging
Fix issue with conflated pmxbot.logging
Python
bsd-3-clause
jawilson/pmxbot,jawilson/pmxbot
# -*- coding: utf-8 -*- # vim:ts=4:sw=4:noexpandtab from __future__ import absolute_import import socket import logging from .dictlib import ConfigDict config = ConfigDict( bot_nickname = 'pmxbot', database = 'sqlite:pmxbot.sqlite', server_host = 'localhost', server_port = 6667, use_ssl = False, password = None, silent_bot = False, log_channels = [], other_channels = [], places = ['London', 'Tokyo', 'New York'], feed_interval = 15, # minutes feeds = [dict( name = 'pmxbot bitbucket', channel = '#inane', linkurl = 'http://bitbucket.org/yougov/pmxbot', url = 'http://bitbucket.org/yougov/pmxbot', ), ], librarypaste = 'http://paste.jaraco.com', ) config['logs URL'] = 'http://' + socket.getfqdn() config['log level'] = logging.INFO "The config object" Fix issue with conflated pmxbot.logging
# -*- coding: utf-8 -*- # vim:ts=4:sw=4:noexpandtab from __future__ import absolute_import import socket import logging as _logging from .dictlib import ConfigDict config = ConfigDict( bot_nickname = 'pmxbot', database = 'sqlite:pmxbot.sqlite', server_host = 'localhost', server_port = 6667, use_ssl = False, password = None, silent_bot = False, log_channels = [], other_channels = [], places = ['London', 'Tokyo', 'New York'], feed_interval = 15, # minutes feeds = [dict( name = 'pmxbot bitbucket', channel = '#inane', linkurl = 'http://bitbucket.org/yougov/pmxbot', url = 'http://bitbucket.org/yougov/pmxbot', ), ], librarypaste = 'http://paste.jaraco.com', ) config['logs URL'] = 'http://' + socket.getfqdn() config['log level'] = _logging.INFO "The config object"
<commit_before># -*- coding: utf-8 -*- # vim:ts=4:sw=4:noexpandtab from __future__ import absolute_import import socket import logging from .dictlib import ConfigDict config = ConfigDict( bot_nickname = 'pmxbot', database = 'sqlite:pmxbot.sqlite', server_host = 'localhost', server_port = 6667, use_ssl = False, password = None, silent_bot = False, log_channels = [], other_channels = [], places = ['London', 'Tokyo', 'New York'], feed_interval = 15, # minutes feeds = [dict( name = 'pmxbot bitbucket', channel = '#inane', linkurl = 'http://bitbucket.org/yougov/pmxbot', url = 'http://bitbucket.org/yougov/pmxbot', ), ], librarypaste = 'http://paste.jaraco.com', ) config['logs URL'] = 'http://' + socket.getfqdn() config['log level'] = logging.INFO "The config object" <commit_msg>Fix issue with conflated pmxbot.logging<commit_after>
# -*- coding: utf-8 -*- # vim:ts=4:sw=4:noexpandtab from __future__ import absolute_import import socket import logging as _logging from .dictlib import ConfigDict config = ConfigDict( bot_nickname = 'pmxbot', database = 'sqlite:pmxbot.sqlite', server_host = 'localhost', server_port = 6667, use_ssl = False, password = None, silent_bot = False, log_channels = [], other_channels = [], places = ['London', 'Tokyo', 'New York'], feed_interval = 15, # minutes feeds = [dict( name = 'pmxbot bitbucket', channel = '#inane', linkurl = 'http://bitbucket.org/yougov/pmxbot', url = 'http://bitbucket.org/yougov/pmxbot', ), ], librarypaste = 'http://paste.jaraco.com', ) config['logs URL'] = 'http://' + socket.getfqdn() config['log level'] = _logging.INFO "The config object"
# -*- coding: utf-8 -*- # vim:ts=4:sw=4:noexpandtab from __future__ import absolute_import import socket import logging from .dictlib import ConfigDict config = ConfigDict( bot_nickname = 'pmxbot', database = 'sqlite:pmxbot.sqlite', server_host = 'localhost', server_port = 6667, use_ssl = False, password = None, silent_bot = False, log_channels = [], other_channels = [], places = ['London', 'Tokyo', 'New York'], feed_interval = 15, # minutes feeds = [dict( name = 'pmxbot bitbucket', channel = '#inane', linkurl = 'http://bitbucket.org/yougov/pmxbot', url = 'http://bitbucket.org/yougov/pmxbot', ), ], librarypaste = 'http://paste.jaraco.com', ) config['logs URL'] = 'http://' + socket.getfqdn() config['log level'] = logging.INFO "The config object" Fix issue with conflated pmxbot.logging# -*- coding: utf-8 -*- # vim:ts=4:sw=4:noexpandtab from __future__ import absolute_import import socket import logging as _logging from .dictlib import ConfigDict config = ConfigDict( bot_nickname = 'pmxbot', database = 'sqlite:pmxbot.sqlite', server_host = 'localhost', server_port = 6667, use_ssl = False, password = None, silent_bot = False, log_channels = [], other_channels = [], places = ['London', 'Tokyo', 'New York'], feed_interval = 15, # minutes feeds = [dict( name = 'pmxbot bitbucket', channel = '#inane', linkurl = 'http://bitbucket.org/yougov/pmxbot', url = 'http://bitbucket.org/yougov/pmxbot', ), ], librarypaste = 'http://paste.jaraco.com', ) config['logs URL'] = 'http://' + socket.getfqdn() config['log level'] = _logging.INFO "The config object"
<commit_before># -*- coding: utf-8 -*- # vim:ts=4:sw=4:noexpandtab from __future__ import absolute_import import socket import logging from .dictlib import ConfigDict config = ConfigDict( bot_nickname = 'pmxbot', database = 'sqlite:pmxbot.sqlite', server_host = 'localhost', server_port = 6667, use_ssl = False, password = None, silent_bot = False, log_channels = [], other_channels = [], places = ['London', 'Tokyo', 'New York'], feed_interval = 15, # minutes feeds = [dict( name = 'pmxbot bitbucket', channel = '#inane', linkurl = 'http://bitbucket.org/yougov/pmxbot', url = 'http://bitbucket.org/yougov/pmxbot', ), ], librarypaste = 'http://paste.jaraco.com', ) config['logs URL'] = 'http://' + socket.getfqdn() config['log level'] = logging.INFO "The config object" <commit_msg>Fix issue with conflated pmxbot.logging<commit_after># -*- coding: utf-8 -*- # vim:ts=4:sw=4:noexpandtab from __future__ import absolute_import import socket import logging as _logging from .dictlib import ConfigDict config = ConfigDict( bot_nickname = 'pmxbot', database = 'sqlite:pmxbot.sqlite', server_host = 'localhost', server_port = 6667, use_ssl = False, password = None, silent_bot = False, log_channels = [], other_channels = [], places = ['London', 'Tokyo', 'New York'], feed_interval = 15, # minutes feeds = [dict( name = 'pmxbot bitbucket', channel = '#inane', linkurl = 'http://bitbucket.org/yougov/pmxbot', url = 'http://bitbucket.org/yougov/pmxbot', ), ], librarypaste = 'http://paste.jaraco.com', ) config['logs URL'] = 'http://' + socket.getfqdn() config['log level'] = _logging.INFO "The config object"
513fa57c8062310a1e852316f51a4382acf6f9b0
retdec/__init__.py
retdec/__init__.py
# # Project: retdec-python # Copyright: (c) 2015-2016 by Petr Zemek <s3rvac@gmail.com> and contributors # License: MIT, see the LICENSE file for more details # """The main package of a Python library and tools providing easy access to the `retdec.com <https://retdec.com>`_ decompilation service through their public `REST API <https://retdec.com/api/>`_. """ __version__ = '0.2' #: Default API URL. DEFAULT_API_URL = 'https://retdec.com/service/api'
# # Project: retdec-python # Copyright: (c) 2015-2016 by Petr Zemek <s3rvac@gmail.com> and contributors # License: MIT, see the LICENSE file for more details # """The main package of a Python library and tools providing easy access to the `retdec.com <https://retdec.com>`_ decompilation service through their public `REST API <https://retdec.com/api/>`_. """ __version__ = '0.3-dev' #: Default API URL. DEFAULT_API_URL = 'https://retdec.com/service/api'
Bump the version number to 0.3-dev.
Bump the version number to 0.3-dev.
Python
mit
s3rvac/retdec-python
# # Project: retdec-python # Copyright: (c) 2015-2016 by Petr Zemek <s3rvac@gmail.com> and contributors # License: MIT, see the LICENSE file for more details # """The main package of a Python library and tools providing easy access to the `retdec.com <https://retdec.com>`_ decompilation service through their public `REST API <https://retdec.com/api/>`_. """ __version__ = '0.2' #: Default API URL. DEFAULT_API_URL = 'https://retdec.com/service/api' Bump the version number to 0.3-dev.
# # Project: retdec-python # Copyright: (c) 2015-2016 by Petr Zemek <s3rvac@gmail.com> and contributors # License: MIT, see the LICENSE file for more details # """The main package of a Python library and tools providing easy access to the `retdec.com <https://retdec.com>`_ decompilation service through their public `REST API <https://retdec.com/api/>`_. """ __version__ = '0.3-dev' #: Default API URL. DEFAULT_API_URL = 'https://retdec.com/service/api'
<commit_before># # Project: retdec-python # Copyright: (c) 2015-2016 by Petr Zemek <s3rvac@gmail.com> and contributors # License: MIT, see the LICENSE file for more details # """The main package of a Python library and tools providing easy access to the `retdec.com <https://retdec.com>`_ decompilation service through their public `REST API <https://retdec.com/api/>`_. """ __version__ = '0.2' #: Default API URL. DEFAULT_API_URL = 'https://retdec.com/service/api' <commit_msg>Bump the version number to 0.3-dev.<commit_after>
# # Project: retdec-python # Copyright: (c) 2015-2016 by Petr Zemek <s3rvac@gmail.com> and contributors # License: MIT, see the LICENSE file for more details # """The main package of a Python library and tools providing easy access to the `retdec.com <https://retdec.com>`_ decompilation service through their public `REST API <https://retdec.com/api/>`_. """ __version__ = '0.3-dev' #: Default API URL. DEFAULT_API_URL = 'https://retdec.com/service/api'
# # Project: retdec-python # Copyright: (c) 2015-2016 by Petr Zemek <s3rvac@gmail.com> and contributors # License: MIT, see the LICENSE file for more details # """The main package of a Python library and tools providing easy access to the `retdec.com <https://retdec.com>`_ decompilation service through their public `REST API <https://retdec.com/api/>`_. """ __version__ = '0.2' #: Default API URL. DEFAULT_API_URL = 'https://retdec.com/service/api' Bump the version number to 0.3-dev.# # Project: retdec-python # Copyright: (c) 2015-2016 by Petr Zemek <s3rvac@gmail.com> and contributors # License: MIT, see the LICENSE file for more details # """The main package of a Python library and tools providing easy access to the `retdec.com <https://retdec.com>`_ decompilation service through their public `REST API <https://retdec.com/api/>`_. """ __version__ = '0.3-dev' #: Default API URL. DEFAULT_API_URL = 'https://retdec.com/service/api'
<commit_before># # Project: retdec-python # Copyright: (c) 2015-2016 by Petr Zemek <s3rvac@gmail.com> and contributors # License: MIT, see the LICENSE file for more details # """The main package of a Python library and tools providing easy access to the `retdec.com <https://retdec.com>`_ decompilation service through their public `REST API <https://retdec.com/api/>`_. """ __version__ = '0.2' #: Default API URL. DEFAULT_API_URL = 'https://retdec.com/service/api' <commit_msg>Bump the version number to 0.3-dev.<commit_after># # Project: retdec-python # Copyright: (c) 2015-2016 by Petr Zemek <s3rvac@gmail.com> and contributors # License: MIT, see the LICENSE file for more details # """The main package of a Python library and tools providing easy access to the `retdec.com <https://retdec.com>`_ decompilation service through their public `REST API <https://retdec.com/api/>`_. """ __version__ = '0.3-dev' #: Default API URL. DEFAULT_API_URL = 'https://retdec.com/service/api'
c5c2d3c411ba38a7b110044e04657ae6584be861
scripts/helpers.py
scripts/helpers.py
def printSnapshot(doc): print(u'Created {} => {}'.format(doc.id, doc.to_dict())) def queryUsers(db): users_ref = db.collection(u'users') docs = users_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def queryRequests(db): requests_ref = db.collection(u'requests') docs = requests_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def getUser(userId, users): for user in users: if user.id == userId: return user return None
def printSnapshot(doc): print(u'Created {} => {}'.format(doc.id, doc.to_dict())) def queryUsers(db): users_ref = db.collection(u'users') docs = users_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def queryRequests(db): requests_ref = db.collection(u'requests') docs = requests_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def queryMessages(db): messages_ref = db.collection(u'messages') docs = messages_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def getUser(userId, users): for user in users: if user.id == userId: return user return None
Add script to clean the message table
Add script to clean the message table
Python
mit
frinder/frinder-app,frinder/frinder-app,frinder/frinder-app
def printSnapshot(doc): print(u'Created {} => {}'.format(doc.id, doc.to_dict())) def queryUsers(db): users_ref = db.collection(u'users') docs = users_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def queryRequests(db): requests_ref = db.collection(u'requests') docs = requests_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def getUser(userId, users): for user in users: if user.id == userId: return user return NoneAdd script to clean the message table
def printSnapshot(doc): print(u'Created {} => {}'.format(doc.id, doc.to_dict())) def queryUsers(db): users_ref = db.collection(u'users') docs = users_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def queryRequests(db): requests_ref = db.collection(u'requests') docs = requests_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def queryMessages(db): messages_ref = db.collection(u'messages') docs = messages_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def getUser(userId, users): for user in users: if user.id == userId: return user return None
<commit_before> def printSnapshot(doc): print(u'Created {} => {}'.format(doc.id, doc.to_dict())) def queryUsers(db): users_ref = db.collection(u'users') docs = users_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def queryRequests(db): requests_ref = db.collection(u'requests') docs = requests_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def getUser(userId, users): for user in users: if user.id == userId: return user return None<commit_msg>Add script to clean the message table<commit_after>
def printSnapshot(doc): print(u'Created {} => {}'.format(doc.id, doc.to_dict())) def queryUsers(db): users_ref = db.collection(u'users') docs = users_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def queryRequests(db): requests_ref = db.collection(u'requests') docs = requests_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def queryMessages(db): messages_ref = db.collection(u'messages') docs = messages_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def getUser(userId, users): for user in users: if user.id == userId: return user return None
def printSnapshot(doc): print(u'Created {} => {}'.format(doc.id, doc.to_dict())) def queryUsers(db): users_ref = db.collection(u'users') docs = users_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def queryRequests(db): requests_ref = db.collection(u'requests') docs = requests_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def getUser(userId, users): for user in users: if user.id == userId: return user return NoneAdd script to clean the message table def printSnapshot(doc): print(u'Created {} => {}'.format(doc.id, doc.to_dict())) def queryUsers(db): users_ref = db.collection(u'users') docs = users_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def queryRequests(db): requests_ref = db.collection(u'requests') docs = requests_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def queryMessages(db): messages_ref = db.collection(u'messages') docs = messages_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def getUser(userId, users): for user in users: if user.id == userId: return user return None
<commit_before> def printSnapshot(doc): print(u'Created {} => {}'.format(doc.id, doc.to_dict())) def queryUsers(db): users_ref = db.collection(u'users') docs = users_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def queryRequests(db): requests_ref = db.collection(u'requests') docs = requests_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def getUser(userId, users): for user in users: if user.id == userId: return user return None<commit_msg>Add script to clean the message table<commit_after> def printSnapshot(doc): print(u'Created {} => {}'.format(doc.id, doc.to_dict())) def queryUsers(db): users_ref = db.collection(u'users') docs = users_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def queryRequests(db): requests_ref = db.collection(u'requests') docs = requests_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def queryMessages(db): messages_ref = db.collection(u'messages') docs = messages_ref.get() docList = list() for doc in docs: docList.append(doc) return docList def getUser(userId, users): for user in users: if user.id == userId: return user return None
2f6c82d74592c80b5042c0b808a658650896cbec
rebulk/__init__.py
rebulk/__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Define simple search patterns in bulk to perform advanced matching on any string """ from .rebulk import Rebulk from .match import Match from .rules import Rule from .pattern import REGEX_AVAILABLE
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Define simple search patterns in bulk to perform advanced matching on any string """ from .rebulk import Rebulk from .match import Match from .rules import Rule, AppendMatchRule, RemoveMatchRule from .pattern import REGEX_AVAILABLE
Add global imports for rules classes
Add global imports for rules classes
Python
mit
Toilal/rebulk
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Define simple search patterns in bulk to perform advanced matching on any string """ from .rebulk import Rebulk from .match import Match from .rules import Rule from .pattern import REGEX_AVAILABLE Add global imports for rules classes
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Define simple search patterns in bulk to perform advanced matching on any string """ from .rebulk import Rebulk from .match import Match from .rules import Rule, AppendMatchRule, RemoveMatchRule from .pattern import REGEX_AVAILABLE
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- """ Define simple search patterns in bulk to perform advanced matching on any string """ from .rebulk import Rebulk from .match import Match from .rules import Rule from .pattern import REGEX_AVAILABLE <commit_msg>Add global imports for rules classes<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Define simple search patterns in bulk to perform advanced matching on any string """ from .rebulk import Rebulk from .match import Match from .rules import Rule, AppendMatchRule, RemoveMatchRule from .pattern import REGEX_AVAILABLE
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Define simple search patterns in bulk to perform advanced matching on any string """ from .rebulk import Rebulk from .match import Match from .rules import Rule from .pattern import REGEX_AVAILABLE Add global imports for rules classes#!/usr/bin/env python # -*- coding: utf-8 -*- """ Define simple search patterns in bulk to perform advanced matching on any string """ from .rebulk import Rebulk from .match import Match from .rules import Rule, AppendMatchRule, RemoveMatchRule from .pattern import REGEX_AVAILABLE
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- """ Define simple search patterns in bulk to perform advanced matching on any string """ from .rebulk import Rebulk from .match import Match from .rules import Rule from .pattern import REGEX_AVAILABLE <commit_msg>Add global imports for rules classes<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- """ Define simple search patterns in bulk to perform advanced matching on any string """ from .rebulk import Rebulk from .match import Match from .rules import Rule, AppendMatchRule, RemoveMatchRule from .pattern import REGEX_AVAILABLE
75092d41fc93306ddc640463886e80620cbcbf46
pemi/transforms.py
pemi/transforms.py
def isblank(value): return ( value is not False and value != 0 and value != float(0) and not bool(value) ) def concatenate(delimiter=''): def _concatenate(row): return delimiter.join(row) return _concatenate def nvl(default=''): def _nvl(row): return next((v for v in row if not isblank(v)), default) return _nvl
import pandas as pd def isblank(value): return ( value is not False and value != 0 and value != float(0) and (value is None or pd.isnull(value) or not value) ) def concatenate(delimiter=''): def _concatenate(row): return delimiter.join(row) return _concatenate def nvl(default=''): def _nvl(row): return next((v for v in row if not isblank(v)), default) return _nvl
Revert "DE-1903 - fix isblank() error"
Revert "DE-1903 - fix isblank() error" This reverts commit 27fd096d9641971f34cb0811fc2240ebc4f3450b.
Python
mit
inside-track/pemi
def isblank(value): return ( value is not False and value != 0 and value != float(0) and not bool(value) ) def concatenate(delimiter=''): def _concatenate(row): return delimiter.join(row) return _concatenate def nvl(default=''): def _nvl(row): return next((v for v in row if not isblank(v)), default) return _nvl Revert "DE-1903 - fix isblank() error" This reverts commit 27fd096d9641971f34cb0811fc2240ebc4f3450b.
import pandas as pd def isblank(value): return ( value is not False and value != 0 and value != float(0) and (value is None or pd.isnull(value) or not value) ) def concatenate(delimiter=''): def _concatenate(row): return delimiter.join(row) return _concatenate def nvl(default=''): def _nvl(row): return next((v for v in row if not isblank(v)), default) return _nvl
<commit_before>def isblank(value): return ( value is not False and value != 0 and value != float(0) and not bool(value) ) def concatenate(delimiter=''): def _concatenate(row): return delimiter.join(row) return _concatenate def nvl(default=''): def _nvl(row): return next((v for v in row if not isblank(v)), default) return _nvl <commit_msg>Revert "DE-1903 - fix isblank() error" This reverts commit 27fd096d9641971f34cb0811fc2240ebc4f3450b.<commit_after>
import pandas as pd def isblank(value): return ( value is not False and value != 0 and value != float(0) and (value is None or pd.isnull(value) or not value) ) def concatenate(delimiter=''): def _concatenate(row): return delimiter.join(row) return _concatenate def nvl(default=''): def _nvl(row): return next((v for v in row if not isblank(v)), default) return _nvl
def isblank(value): return ( value is not False and value != 0 and value != float(0) and not bool(value) ) def concatenate(delimiter=''): def _concatenate(row): return delimiter.join(row) return _concatenate def nvl(default=''): def _nvl(row): return next((v for v in row if not isblank(v)), default) return _nvl Revert "DE-1903 - fix isblank() error" This reverts commit 27fd096d9641971f34cb0811fc2240ebc4f3450b.import pandas as pd def isblank(value): return ( value is not False and value != 0 and value != float(0) and (value is None or pd.isnull(value) or not value) ) def concatenate(delimiter=''): def _concatenate(row): return delimiter.join(row) return _concatenate def nvl(default=''): def _nvl(row): return next((v for v in row if not isblank(v)), default) return _nvl
<commit_before>def isblank(value): return ( value is not False and value != 0 and value != float(0) and not bool(value) ) def concatenate(delimiter=''): def _concatenate(row): return delimiter.join(row) return _concatenate def nvl(default=''): def _nvl(row): return next((v for v in row if not isblank(v)), default) return _nvl <commit_msg>Revert "DE-1903 - fix isblank() error" This reverts commit 27fd096d9641971f34cb0811fc2240ebc4f3450b.<commit_after>import pandas as pd def isblank(value): return ( value is not False and value != 0 and value != float(0) and (value is None or pd.isnull(value) or not value) ) def concatenate(delimiter=''): def _concatenate(row): return delimiter.join(row) return _concatenate def nvl(default=''): def _nvl(row): return next((v for v in row if not isblank(v)), default) return _nvl
fd7fb7ade0fc879e24543f13c39b00de073004bc
setuptools/tests/py26compat.py
setuptools/tests/py26compat.py
import sys import tarfile import contextlib def _tarfile_open_ex(*args, **kwargs): """ Extend result as a context manager. """ return contextlib.closing(tarfile.open(*args, **kwargs)) tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open
import sys import tarfile import contextlib def _tarfile_open_ex(*args, **kwargs): """ Extend result as a context manager. """ return contextlib.closing(tarfile.open(*args, **kwargs)) if sys.version_info[:2] < (2, 7) or (3, 0) <= sys.version_info[:2] < (3, 2): tarfile_open = _tarfile_open_ex else: tarfile_open = tarfile.open
Fix "AttributeError: 'TarFile' object has no attribute '__exit__'" with Python 3.1.
Fix "AttributeError: 'TarFile' object has no attribute '__exit__'" with Python 3.1.
Python
mit
pypa/setuptools,pypa/setuptools,pypa/setuptools
import sys import tarfile import contextlib def _tarfile_open_ex(*args, **kwargs): """ Extend result as a context manager. """ return contextlib.closing(tarfile.open(*args, **kwargs)) tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open Fix "AttributeError: 'TarFile' object has no attribute '__exit__'" with Python 3.1.
import sys import tarfile import contextlib def _tarfile_open_ex(*args, **kwargs): """ Extend result as a context manager. """ return contextlib.closing(tarfile.open(*args, **kwargs)) if sys.version_info[:2] < (2, 7) or (3, 0) <= sys.version_info[:2] < (3, 2): tarfile_open = _tarfile_open_ex else: tarfile_open = tarfile.open
<commit_before>import sys import tarfile import contextlib def _tarfile_open_ex(*args, **kwargs): """ Extend result as a context manager. """ return contextlib.closing(tarfile.open(*args, **kwargs)) tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open <commit_msg>Fix "AttributeError: 'TarFile' object has no attribute '__exit__'" with Python 3.1.<commit_after>
import sys import tarfile import contextlib def _tarfile_open_ex(*args, **kwargs): """ Extend result as a context manager. """ return contextlib.closing(tarfile.open(*args, **kwargs)) if sys.version_info[:2] < (2, 7) or (3, 0) <= sys.version_info[:2] < (3, 2): tarfile_open = _tarfile_open_ex else: tarfile_open = tarfile.open
import sys import tarfile import contextlib def _tarfile_open_ex(*args, **kwargs): """ Extend result as a context manager. """ return contextlib.closing(tarfile.open(*args, **kwargs)) tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open Fix "AttributeError: 'TarFile' object has no attribute '__exit__'" with Python 3.1.import sys import tarfile import contextlib def _tarfile_open_ex(*args, **kwargs): """ Extend result as a context manager. """ return contextlib.closing(tarfile.open(*args, **kwargs)) if sys.version_info[:2] < (2, 7) or (3, 0) <= sys.version_info[:2] < (3, 2): tarfile_open = _tarfile_open_ex else: tarfile_open = tarfile.open
<commit_before>import sys import tarfile import contextlib def _tarfile_open_ex(*args, **kwargs): """ Extend result as a context manager. """ return contextlib.closing(tarfile.open(*args, **kwargs)) tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open <commit_msg>Fix "AttributeError: 'TarFile' object has no attribute '__exit__'" with Python 3.1.<commit_after>import sys import tarfile import contextlib def _tarfile_open_ex(*args, **kwargs): """ Extend result as a context manager. """ return contextlib.closing(tarfile.open(*args, **kwargs)) if sys.version_info[:2] < (2, 7) or (3, 0) <= sys.version_info[:2] < (3, 2): tarfile_open = _tarfile_open_ex else: tarfile_open = tarfile.open
b19707479410c04a19e6cf224e048260edbf0155
cc/settings/default.py
cc/settings/default.py
# Case Conductor is a Test Case Management system. # Copyright (C) 2011-2012 Mozilla # # This file is part of Case Conductor. # # Case Conductor is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Case Conductor is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Case Conductor. If not, see <http://www.gnu.org/licenses/>. from .base import * try: from .local import * except ImportError: pass CACHES["default"]["VERSION"] = 1 if DEBUG: MIDDLEWARE_CLASSES.insert( 0, "cc.debug.middleware.AjaxTracebackMiddleware") try: HMAC_KEYS except NameError: HMAC_KEYS = {"default": SECRET_KEY}
# Case Conductor is a Test Case Management system. # Copyright (C) 2011-2012 Mozilla # # This file is part of Case Conductor. # # Case Conductor is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Case Conductor is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Case Conductor. If not, see <http://www.gnu.org/licenses/>. from .base import * try: from .local import * except ImportError: pass CACHES["default"]["VERSION"] = 1 if DEBUG: MIDDLEWARE_CLASSES.insert( 0, "cc.debug.middleware.AjaxTracebackMiddleware") LOGGING["handlers"]["console"] = { "level": "DEBUG", "class": "logging.StreamHandler", } LOGGING["root"] = {"handlers": ["console"]} try: HMAC_KEYS except NameError: HMAC_KEYS = {"default": SECRET_KEY}
Add debug console root logging handler.
Add debug console root logging handler.
Python
bsd-2-clause
mozilla/moztrap,shinglyu/moztrap,shinglyu/moztrap,mozilla/moztrap,mccarrmb/moztrap,bobsilverberg/moztrap,mozilla/moztrap,shinglyu/moztrap,bobsilverberg/moztrap,mozilla/moztrap,bobsilverberg/moztrap,shinglyu/moztrap,mccarrmb/moztrap,shinglyu/moztrap,mccarrmb/moztrap,bobsilverberg/moztrap,mccarrmb/moztrap,mozilla/moztrap,mccarrmb/moztrap
# Case Conductor is a Test Case Management system. # Copyright (C) 2011-2012 Mozilla # # This file is part of Case Conductor. # # Case Conductor is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Case Conductor is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Case Conductor. If not, see <http://www.gnu.org/licenses/>. from .base import * try: from .local import * except ImportError: pass CACHES["default"]["VERSION"] = 1 if DEBUG: MIDDLEWARE_CLASSES.insert( 0, "cc.debug.middleware.AjaxTracebackMiddleware") try: HMAC_KEYS except NameError: HMAC_KEYS = {"default": SECRET_KEY} Add debug console root logging handler.
# Case Conductor is a Test Case Management system. # Copyright (C) 2011-2012 Mozilla # # This file is part of Case Conductor. # # Case Conductor is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Case Conductor is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Case Conductor. If not, see <http://www.gnu.org/licenses/>. from .base import * try: from .local import * except ImportError: pass CACHES["default"]["VERSION"] = 1 if DEBUG: MIDDLEWARE_CLASSES.insert( 0, "cc.debug.middleware.AjaxTracebackMiddleware") LOGGING["handlers"]["console"] = { "level": "DEBUG", "class": "logging.StreamHandler", } LOGGING["root"] = {"handlers": ["console"]} try: HMAC_KEYS except NameError: HMAC_KEYS = {"default": SECRET_KEY}
<commit_before># Case Conductor is a Test Case Management system. # Copyright (C) 2011-2012 Mozilla # # This file is part of Case Conductor. # # Case Conductor is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Case Conductor is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Case Conductor. If not, see <http://www.gnu.org/licenses/>. from .base import * try: from .local import * except ImportError: pass CACHES["default"]["VERSION"] = 1 if DEBUG: MIDDLEWARE_CLASSES.insert( 0, "cc.debug.middleware.AjaxTracebackMiddleware") try: HMAC_KEYS except NameError: HMAC_KEYS = {"default": SECRET_KEY} <commit_msg>Add debug console root logging handler.<commit_after>
# Case Conductor is a Test Case Management system. # Copyright (C) 2011-2012 Mozilla # # This file is part of Case Conductor. # # Case Conductor is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Case Conductor is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Case Conductor. If not, see <http://www.gnu.org/licenses/>. from .base import * try: from .local import * except ImportError: pass CACHES["default"]["VERSION"] = 1 if DEBUG: MIDDLEWARE_CLASSES.insert( 0, "cc.debug.middleware.AjaxTracebackMiddleware") LOGGING["handlers"]["console"] = { "level": "DEBUG", "class": "logging.StreamHandler", } LOGGING["root"] = {"handlers": ["console"]} try: HMAC_KEYS except NameError: HMAC_KEYS = {"default": SECRET_KEY}
# Case Conductor is a Test Case Management system. # Copyright (C) 2011-2012 Mozilla # # This file is part of Case Conductor. # # Case Conductor is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Case Conductor is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Case Conductor. If not, see <http://www.gnu.org/licenses/>. from .base import * try: from .local import * except ImportError: pass CACHES["default"]["VERSION"] = 1 if DEBUG: MIDDLEWARE_CLASSES.insert( 0, "cc.debug.middleware.AjaxTracebackMiddleware") try: HMAC_KEYS except NameError: HMAC_KEYS = {"default": SECRET_KEY} Add debug console root logging handler.# Case Conductor is a Test Case Management system. # Copyright (C) 2011-2012 Mozilla # # This file is part of Case Conductor. # # Case Conductor is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Case Conductor is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Case Conductor. If not, see <http://www.gnu.org/licenses/>. from .base import * try: from .local import * except ImportError: pass CACHES["default"]["VERSION"] = 1 if DEBUG: MIDDLEWARE_CLASSES.insert( 0, "cc.debug.middleware.AjaxTracebackMiddleware") LOGGING["handlers"]["console"] = { "level": "DEBUG", "class": "logging.StreamHandler", } LOGGING["root"] = {"handlers": ["console"]} try: HMAC_KEYS except NameError: HMAC_KEYS = {"default": SECRET_KEY}
<commit_before># Case Conductor is a Test Case Management system. # Copyright (C) 2011-2012 Mozilla # # This file is part of Case Conductor. # # Case Conductor is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Case Conductor is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Case Conductor. If not, see <http://www.gnu.org/licenses/>. from .base import * try: from .local import * except ImportError: pass CACHES["default"]["VERSION"] = 1 if DEBUG: MIDDLEWARE_CLASSES.insert( 0, "cc.debug.middleware.AjaxTracebackMiddleware") try: HMAC_KEYS except NameError: HMAC_KEYS = {"default": SECRET_KEY} <commit_msg>Add debug console root logging handler.<commit_after># Case Conductor is a Test Case Management system. # Copyright (C) 2011-2012 Mozilla # # This file is part of Case Conductor. # # Case Conductor is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Case Conductor is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Case Conductor. If not, see <http://www.gnu.org/licenses/>. from .base import * try: from .local import * except ImportError: pass CACHES["default"]["VERSION"] = 1 if DEBUG: MIDDLEWARE_CLASSES.insert( 0, "cc.debug.middleware.AjaxTracebackMiddleware") LOGGING["handlers"]["console"] = { "level": "DEBUG", "class": "logging.StreamHandler", } LOGGING["root"] = {"handlers": ["console"]} try: HMAC_KEYS except NameError: HMAC_KEYS = {"default": SECRET_KEY}
4f1f5b22a92876f9eb61058fc1911dda73a2acf7
installer/terraform/jazz-terraform-unix-noinstances/scripts/configure-gitlab.py
installer/terraform/jazz-terraform-unix-noinstances/scripts/configure-gitlab.py
from gitlab_personalaccesstoken import generate_personal_access_token from terraform_external_data import terraform_external_data import gitlab @terraform_external_data def get_gitlab_group(query): token = generate_personal_access_token('mytoken', query['passwd'], query['gitlab-ip']) gl = gitlab.Gitlab('http://'.format(query['gitlab_ip']), api_version=4, private_token=token) group = gl.groups.create({'name': 'SLF', 'path': 'slf', 'description': 'Jazz framework, templates and services'}) return { 'gitlab_slfid': str(group.id), 'gitlab_token': str(token) } if __name__ == '__main__': get_gitlab_group()
from gitlab_personalaccesstoken import generate_personal_access_token from terraform_external_data import terraform_external_data import gitlab @terraform_external_data def get_gitlab_group(query): token = generate_personal_access_token('mytoken', query['passwd'], query['gitlab_ip']) gl = gitlab.Gitlab('http://'.format(query['gitlab_ip']), api_version=4, private_token=token) group = gl.groups.create({'name': 'SLF', 'path': 'slf', 'description': 'Jazz framework, templates and services'}) return { 'gitlab_slfid': str(group.id), 'gitlab_token': str(token) } if __name__ == '__main__': get_gitlab_group()
Fix key error due to lack of underscore
Fix key error due to lack of underscore
Python
apache-2.0
tmobile/jazz-installer,tmobile/jazz-installer,tmobile/jazz-installer,tmobile/jazz-installer
from gitlab_personalaccesstoken import generate_personal_access_token from terraform_external_data import terraform_external_data import gitlab @terraform_external_data def get_gitlab_group(query): token = generate_personal_access_token('mytoken', query['passwd'], query['gitlab-ip']) gl = gitlab.Gitlab('http://'.format(query['gitlab_ip']), api_version=4, private_token=token) group = gl.groups.create({'name': 'SLF', 'path': 'slf', 'description': 'Jazz framework, templates and services'}) return { 'gitlab_slfid': str(group.id), 'gitlab_token': str(token) } if __name__ == '__main__': get_gitlab_group() Fix key error due to lack of underscore
from gitlab_personalaccesstoken import generate_personal_access_token from terraform_external_data import terraform_external_data import gitlab @terraform_external_data def get_gitlab_group(query): token = generate_personal_access_token('mytoken', query['passwd'], query['gitlab_ip']) gl = gitlab.Gitlab('http://'.format(query['gitlab_ip']), api_version=4, private_token=token) group = gl.groups.create({'name': 'SLF', 'path': 'slf', 'description': 'Jazz framework, templates and services'}) return { 'gitlab_slfid': str(group.id), 'gitlab_token': str(token) } if __name__ == '__main__': get_gitlab_group()
<commit_before>from gitlab_personalaccesstoken import generate_personal_access_token from terraform_external_data import terraform_external_data import gitlab @terraform_external_data def get_gitlab_group(query): token = generate_personal_access_token('mytoken', query['passwd'], query['gitlab-ip']) gl = gitlab.Gitlab('http://'.format(query['gitlab_ip']), api_version=4, private_token=token) group = gl.groups.create({'name': 'SLF', 'path': 'slf', 'description': 'Jazz framework, templates and services'}) return { 'gitlab_slfid': str(group.id), 'gitlab_token': str(token) } if __name__ == '__main__': get_gitlab_group() <commit_msg>Fix key error due to lack of underscore<commit_after>
from gitlab_personalaccesstoken import generate_personal_access_token from terraform_external_data import terraform_external_data import gitlab @terraform_external_data def get_gitlab_group(query): token = generate_personal_access_token('mytoken', query['passwd'], query['gitlab_ip']) gl = gitlab.Gitlab('http://'.format(query['gitlab_ip']), api_version=4, private_token=token) group = gl.groups.create({'name': 'SLF', 'path': 'slf', 'description': 'Jazz framework, templates and services'}) return { 'gitlab_slfid': str(group.id), 'gitlab_token': str(token) } if __name__ == '__main__': get_gitlab_group()
from gitlab_personalaccesstoken import generate_personal_access_token from terraform_external_data import terraform_external_data import gitlab @terraform_external_data def get_gitlab_group(query): token = generate_personal_access_token('mytoken', query['passwd'], query['gitlab-ip']) gl = gitlab.Gitlab('http://'.format(query['gitlab_ip']), api_version=4, private_token=token) group = gl.groups.create({'name': 'SLF', 'path': 'slf', 'description': 'Jazz framework, templates and services'}) return { 'gitlab_slfid': str(group.id), 'gitlab_token': str(token) } if __name__ == '__main__': get_gitlab_group() Fix key error due to lack of underscorefrom gitlab_personalaccesstoken import generate_personal_access_token from terraform_external_data import terraform_external_data import gitlab @terraform_external_data def get_gitlab_group(query): token = generate_personal_access_token('mytoken', query['passwd'], query['gitlab_ip']) gl = gitlab.Gitlab('http://'.format(query['gitlab_ip']), api_version=4, private_token=token) group = gl.groups.create({'name': 'SLF', 'path': 'slf', 'description': 'Jazz framework, templates and services'}) return { 'gitlab_slfid': str(group.id), 'gitlab_token': str(token) } if __name__ == '__main__': get_gitlab_group()
<commit_before>from gitlab_personalaccesstoken import generate_personal_access_token from terraform_external_data import terraform_external_data import gitlab @terraform_external_data def get_gitlab_group(query): token = generate_personal_access_token('mytoken', query['passwd'], query['gitlab-ip']) gl = gitlab.Gitlab('http://'.format(query['gitlab_ip']), api_version=4, private_token=token) group = gl.groups.create({'name': 'SLF', 'path': 'slf', 'description': 'Jazz framework, templates and services'}) return { 'gitlab_slfid': str(group.id), 'gitlab_token': str(token) } if __name__ == '__main__': get_gitlab_group() <commit_msg>Fix key error due to lack of underscore<commit_after>from gitlab_personalaccesstoken import generate_personal_access_token from terraform_external_data import terraform_external_data import gitlab @terraform_external_data def get_gitlab_group(query): token = generate_personal_access_token('mytoken', query['passwd'], query['gitlab_ip']) gl = gitlab.Gitlab('http://'.format(query['gitlab_ip']), api_version=4, private_token=token) group = gl.groups.create({'name': 'SLF', 'path': 'slf', 'description': 'Jazz framework, templates and services'}) return { 'gitlab_slfid': str(group.id), 'gitlab_token': str(token) } if __name__ == '__main__': get_gitlab_group()
916affcb04fe24f353da918aa707685f7768ea62
pyleus/__init__.py
pyleus/__init__.py
import os import sys __version__ = '0.1.3' BASE_JAR = "pyleus-base.jar" BASE_JAR_INSTALL_DIR = "share/pyleus" BASE_JAR_PATH = os.path.join(sys.prefix, BASE_JAR_INSTALL_DIR, BASE_JAR)
import os import sys __version__ = '0.1.4' BASE_JAR = "pyleus-base.jar" BASE_JAR_INSTALL_DIR = "share/pyleus" BASE_JAR_PATH = os.path.join(sys.prefix, BASE_JAR_INSTALL_DIR, BASE_JAR)
Bump pyleus version to 0.1.4
Bump pyleus version to 0.1.4
Python
apache-2.0
ecanzonieri/pyleus,imcom/pyleus,imcom/pyleus,patricklucas/pyleus,patricklucas/pyleus,jirafe/pyleus,dapuck/pyleus,mzbyszynski/pyleus,jirafe/pyleus,dapuck/pyleus,imcom/pyleus,stallman-cui/pyleus,mzbyszynski/pyleus,poros/pyleus,stallman-cui/pyleus,Yelp/pyleus,Yelp/pyleus,poros/pyleus,ecanzonieri/pyleus
import os import sys __version__ = '0.1.3' BASE_JAR = "pyleus-base.jar" BASE_JAR_INSTALL_DIR = "share/pyleus" BASE_JAR_PATH = os.path.join(sys.prefix, BASE_JAR_INSTALL_DIR, BASE_JAR) Bump pyleus version to 0.1.4
import os import sys __version__ = '0.1.4' BASE_JAR = "pyleus-base.jar" BASE_JAR_INSTALL_DIR = "share/pyleus" BASE_JAR_PATH = os.path.join(sys.prefix, BASE_JAR_INSTALL_DIR, BASE_JAR)
<commit_before>import os import sys __version__ = '0.1.3' BASE_JAR = "pyleus-base.jar" BASE_JAR_INSTALL_DIR = "share/pyleus" BASE_JAR_PATH = os.path.join(sys.prefix, BASE_JAR_INSTALL_DIR, BASE_JAR) <commit_msg>Bump pyleus version to 0.1.4<commit_after>
import os import sys __version__ = '0.1.4' BASE_JAR = "pyleus-base.jar" BASE_JAR_INSTALL_DIR = "share/pyleus" BASE_JAR_PATH = os.path.join(sys.prefix, BASE_JAR_INSTALL_DIR, BASE_JAR)
import os import sys __version__ = '0.1.3' BASE_JAR = "pyleus-base.jar" BASE_JAR_INSTALL_DIR = "share/pyleus" BASE_JAR_PATH = os.path.join(sys.prefix, BASE_JAR_INSTALL_DIR, BASE_JAR) Bump pyleus version to 0.1.4import os import sys __version__ = '0.1.4' BASE_JAR = "pyleus-base.jar" BASE_JAR_INSTALL_DIR = "share/pyleus" BASE_JAR_PATH = os.path.join(sys.prefix, BASE_JAR_INSTALL_DIR, BASE_JAR)
<commit_before>import os import sys __version__ = '0.1.3' BASE_JAR = "pyleus-base.jar" BASE_JAR_INSTALL_DIR = "share/pyleus" BASE_JAR_PATH = os.path.join(sys.prefix, BASE_JAR_INSTALL_DIR, BASE_JAR) <commit_msg>Bump pyleus version to 0.1.4<commit_after>import os import sys __version__ = '0.1.4' BASE_JAR = "pyleus-base.jar" BASE_JAR_INSTALL_DIR = "share/pyleus" BASE_JAR_PATH = os.path.join(sys.prefix, BASE_JAR_INSTALL_DIR, BASE_JAR)
6830f29022746838677ecca420aeff190943c5ed
random/__init__.py
random/__init__.py
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Nomisma Quantitative Finance random number samplers.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from nomisma_quant_finance.random.random_ops import multivariate_normal from nomisma_quant_finance.random.stateless_random_ops import stateless_random_shuffle __all__ = [ 'multivariate_normal', 'stateless_random_shuffle' ]
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Random number samplers.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from nomisma_quant_finance.random.random_ops import multivariate_normal from nomisma_quant_finance.random.stateless_random_ops import stateless_random_shuffle __all__ = [ 'multivariate_normal', 'stateless_random_shuffle' ]
Remove remnants of internal project naming in one docstring.
Remove remnants of internal project naming in one docstring. PiperOrigin-RevId: 263530441
Python
apache-2.0
google/tf-quant-finance,google/tf-quant-finance
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Nomisma Quantitative Finance random number samplers.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from nomisma_quant_finance.random.random_ops import multivariate_normal from nomisma_quant_finance.random.stateless_random_ops import stateless_random_shuffle __all__ = [ 'multivariate_normal', 'stateless_random_shuffle' ] Remove remnants of internal project naming in one docstring. PiperOrigin-RevId: 263530441
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Random number samplers.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from nomisma_quant_finance.random.random_ops import multivariate_normal from nomisma_quant_finance.random.stateless_random_ops import stateless_random_shuffle __all__ = [ 'multivariate_normal', 'stateless_random_shuffle' ]
<commit_before># Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Nomisma Quantitative Finance random number samplers.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from nomisma_quant_finance.random.random_ops import multivariate_normal from nomisma_quant_finance.random.stateless_random_ops import stateless_random_shuffle __all__ = [ 'multivariate_normal', 'stateless_random_shuffle' ] <commit_msg>Remove remnants of internal project naming in one docstring. PiperOrigin-RevId: 263530441<commit_after>
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Random number samplers.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from nomisma_quant_finance.random.random_ops import multivariate_normal from nomisma_quant_finance.random.stateless_random_ops import stateless_random_shuffle __all__ = [ 'multivariate_normal', 'stateless_random_shuffle' ]
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Nomisma Quantitative Finance random number samplers.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from nomisma_quant_finance.random.random_ops import multivariate_normal from nomisma_quant_finance.random.stateless_random_ops import stateless_random_shuffle __all__ = [ 'multivariate_normal', 'stateless_random_shuffle' ] Remove remnants of internal project naming in one docstring. PiperOrigin-RevId: 263530441# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Random number samplers.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from nomisma_quant_finance.random.random_ops import multivariate_normal from nomisma_quant_finance.random.stateless_random_ops import stateless_random_shuffle __all__ = [ 'multivariate_normal', 'stateless_random_shuffle' ]
<commit_before># Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Nomisma Quantitative Finance random number samplers.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from nomisma_quant_finance.random.random_ops import multivariate_normal from nomisma_quant_finance.random.stateless_random_ops import stateless_random_shuffle __all__ = [ 'multivariate_normal', 'stateless_random_shuffle' ] <commit_msg>Remove remnants of internal project naming in one docstring. PiperOrigin-RevId: 263530441<commit_after># Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Random number samplers.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from nomisma_quant_finance.random.random_ops import multivariate_normal from nomisma_quant_finance.random.stateless_random_ops import stateless_random_shuffle __all__ = [ 'multivariate_normal', 'stateless_random_shuffle' ]
0eb7ddce9f425c30c70bc1442618deb72c530911
networks/models.py
networks/models.py
from django.db import models from helpers import models as helpermodels # Create your models here. class Networks(models.Model): POLICIES = ( ('reject', 'Reject'), ('drop', 'Ignore'), ('accept', 'Accept'), ) name = models.CharField(max_length=30) interface = models.CharField(max_length=10) ip_range = helpermodels.IPNetworkField() policy = models.CharField("default policy", choices=POLICIES, max_length=6)
from django.db import models from helpers.models import IPNetworkField # Create your models here. class Network(models.Model): POLICIES = ( ('reject', 'Reject'), ('drop', 'Ignore'), ('accept', 'Accept'), ) name = models.CharField(max_length=30) interface = models.CharField(max_length=10) ip_range = IPNetworkField() policy = models.CharField("default policy", choices=POLICIES, max_length=6)
Fix Network model name; better import
Fix Network model name; better import
Python
mit
Kromey/piroute,Kromey/piroute,Kromey/piroute
from django.db import models from helpers import models as helpermodels # Create your models here. class Networks(models.Model): POLICIES = ( ('reject', 'Reject'), ('drop', 'Ignore'), ('accept', 'Accept'), ) name = models.CharField(max_length=30) interface = models.CharField(max_length=10) ip_range = helpermodels.IPNetworkField() policy = models.CharField("default policy", choices=POLICIES, max_length=6) Fix Network model name; better import
from django.db import models from helpers.models import IPNetworkField # Create your models here. class Network(models.Model): POLICIES = ( ('reject', 'Reject'), ('drop', 'Ignore'), ('accept', 'Accept'), ) name = models.CharField(max_length=30) interface = models.CharField(max_length=10) ip_range = IPNetworkField() policy = models.CharField("default policy", choices=POLICIES, max_length=6)
<commit_before>from django.db import models from helpers import models as helpermodels # Create your models here. class Networks(models.Model): POLICIES = ( ('reject', 'Reject'), ('drop', 'Ignore'), ('accept', 'Accept'), ) name = models.CharField(max_length=30) interface = models.CharField(max_length=10) ip_range = helpermodels.IPNetworkField() policy = models.CharField("default policy", choices=POLICIES, max_length=6) <commit_msg>Fix Network model name; better import<commit_after>
from django.db import models from helpers.models import IPNetworkField # Create your models here. class Network(models.Model): POLICIES = ( ('reject', 'Reject'), ('drop', 'Ignore'), ('accept', 'Accept'), ) name = models.CharField(max_length=30) interface = models.CharField(max_length=10) ip_range = IPNetworkField() policy = models.CharField("default policy", choices=POLICIES, max_length=6)
from django.db import models from helpers import models as helpermodels # Create your models here. class Networks(models.Model): POLICIES = ( ('reject', 'Reject'), ('drop', 'Ignore'), ('accept', 'Accept'), ) name = models.CharField(max_length=30) interface = models.CharField(max_length=10) ip_range = helpermodels.IPNetworkField() policy = models.CharField("default policy", choices=POLICIES, max_length=6) Fix Network model name; better importfrom django.db import models from helpers.models import IPNetworkField # Create your models here. class Network(models.Model): POLICIES = ( ('reject', 'Reject'), ('drop', 'Ignore'), ('accept', 'Accept'), ) name = models.CharField(max_length=30) interface = models.CharField(max_length=10) ip_range = IPNetworkField() policy = models.CharField("default policy", choices=POLICIES, max_length=6)
<commit_before>from django.db import models from helpers import models as helpermodels # Create your models here. class Networks(models.Model): POLICIES = ( ('reject', 'Reject'), ('drop', 'Ignore'), ('accept', 'Accept'), ) name = models.CharField(max_length=30) interface = models.CharField(max_length=10) ip_range = helpermodels.IPNetworkField() policy = models.CharField("default policy", choices=POLICIES, max_length=6) <commit_msg>Fix Network model name; better import<commit_after>from django.db import models from helpers.models import IPNetworkField # Create your models here. class Network(models.Model): POLICIES = ( ('reject', 'Reject'), ('drop', 'Ignore'), ('accept', 'Accept'), ) name = models.CharField(max_length=30) interface = models.CharField(max_length=10) ip_range = IPNetworkField() policy = models.CharField("default policy", choices=POLICIES, max_length=6)
563e7d5bc2fadd35b0fc71d45c949aa0b2e872a9
example/example/tasksapp/run_tasks.py
example/example/tasksapp/run_tasks.py
import os import time from dj_experiment.tasks.tasks import longtime_add, netcdf_save from example.settings import (DJ_EXPERIMENT_BASE_DATA_DIR, DJ_EXPERIMENT_DATA_DIR) if __name__ == '__main__': result = longtime_add.delay(1, 2) # at this time, our task is not finished, so it will return False print 'Task finished? ', result.ready() print 'Task result: ', result.result # sleep 10 seconds to ensure the task has been finished time.sleep(10) # now the task should be finished and ready method will return True print 'Task finished? ', result.ready() print 'Task result: ', result.result rcmdatadir = os.path.join(DJ_EXPERIMENT_BASE_DATA_DIR, DJ_EXPERIMENT_DATA_DIR) result1 = netcdf_save.delay(14, rcmdatadir) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result time.sleep(10) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result
import os import time from dj_experiment.tasks.tasks import longtime_add, netcdf_save from example.settings import DJ_EXPERIMENT_BASE_DATA_DIR if __name__ == '__main__': result = longtime_add.delay(1, 2) # at this time, our task is not finished, so it will return False print 'Task finished? ', result.ready() print 'Task result: ', result.result # sleep 10 seconds to ensure the task has been finished time.sleep(10) # now the task should be finished and ready method will return True print 'Task finished? ', result.ready() print 'Task result: ', result.result rcmdatadir = DJ_EXPERIMENT_BASE_DATA_DIR result1 = netcdf_save.delay(14, rcmdatadir) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result time.sleep(10) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result
Fix wrong path composition for data directory
Fix wrong path composition for data directory
Python
mit
francbartoli/dj-experiment,francbartoli/dj-experiment
import os import time from dj_experiment.tasks.tasks import longtime_add, netcdf_save from example.settings import (DJ_EXPERIMENT_BASE_DATA_DIR, DJ_EXPERIMENT_DATA_DIR) if __name__ == '__main__': result = longtime_add.delay(1, 2) # at this time, our task is not finished, so it will return False print 'Task finished? ', result.ready() print 'Task result: ', result.result # sleep 10 seconds to ensure the task has been finished time.sleep(10) # now the task should be finished and ready method will return True print 'Task finished? ', result.ready() print 'Task result: ', result.result rcmdatadir = os.path.join(DJ_EXPERIMENT_BASE_DATA_DIR, DJ_EXPERIMENT_DATA_DIR) result1 = netcdf_save.delay(14, rcmdatadir) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result time.sleep(10) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result Fix wrong path composition for data directory
import os import time from dj_experiment.tasks.tasks import longtime_add, netcdf_save from example.settings import DJ_EXPERIMENT_BASE_DATA_DIR if __name__ == '__main__': result = longtime_add.delay(1, 2) # at this time, our task is not finished, so it will return False print 'Task finished? ', result.ready() print 'Task result: ', result.result # sleep 10 seconds to ensure the task has been finished time.sleep(10) # now the task should be finished and ready method will return True print 'Task finished? ', result.ready() print 'Task result: ', result.result rcmdatadir = DJ_EXPERIMENT_BASE_DATA_DIR result1 = netcdf_save.delay(14, rcmdatadir) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result time.sleep(10) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result
<commit_before>import os import time from dj_experiment.tasks.tasks import longtime_add, netcdf_save from example.settings import (DJ_EXPERIMENT_BASE_DATA_DIR, DJ_EXPERIMENT_DATA_DIR) if __name__ == '__main__': result = longtime_add.delay(1, 2) # at this time, our task is not finished, so it will return False print 'Task finished? ', result.ready() print 'Task result: ', result.result # sleep 10 seconds to ensure the task has been finished time.sleep(10) # now the task should be finished and ready method will return True print 'Task finished? ', result.ready() print 'Task result: ', result.result rcmdatadir = os.path.join(DJ_EXPERIMENT_BASE_DATA_DIR, DJ_EXPERIMENT_DATA_DIR) result1 = netcdf_save.delay(14, rcmdatadir) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result time.sleep(10) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result <commit_msg>Fix wrong path composition for data directory<commit_after>
import os import time from dj_experiment.tasks.tasks import longtime_add, netcdf_save from example.settings import DJ_EXPERIMENT_BASE_DATA_DIR if __name__ == '__main__': result = longtime_add.delay(1, 2) # at this time, our task is not finished, so it will return False print 'Task finished? ', result.ready() print 'Task result: ', result.result # sleep 10 seconds to ensure the task has been finished time.sleep(10) # now the task should be finished and ready method will return True print 'Task finished? ', result.ready() print 'Task result: ', result.result rcmdatadir = DJ_EXPERIMENT_BASE_DATA_DIR result1 = netcdf_save.delay(14, rcmdatadir) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result time.sleep(10) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result
import os import time from dj_experiment.tasks.tasks import longtime_add, netcdf_save from example.settings import (DJ_EXPERIMENT_BASE_DATA_DIR, DJ_EXPERIMENT_DATA_DIR) if __name__ == '__main__': result = longtime_add.delay(1, 2) # at this time, our task is not finished, so it will return False print 'Task finished? ', result.ready() print 'Task result: ', result.result # sleep 10 seconds to ensure the task has been finished time.sleep(10) # now the task should be finished and ready method will return True print 'Task finished? ', result.ready() print 'Task result: ', result.result rcmdatadir = os.path.join(DJ_EXPERIMENT_BASE_DATA_DIR, DJ_EXPERIMENT_DATA_DIR) result1 = netcdf_save.delay(14, rcmdatadir) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result time.sleep(10) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result Fix wrong path composition for data directoryimport os import time from dj_experiment.tasks.tasks import longtime_add, netcdf_save from example.settings import DJ_EXPERIMENT_BASE_DATA_DIR if __name__ == '__main__': result = longtime_add.delay(1, 2) # at this time, our task is not finished, so it will return False print 'Task finished? ', result.ready() print 'Task result: ', result.result # sleep 10 seconds to ensure the task has been finished time.sleep(10) # now the task should be finished and ready method will return True print 'Task finished? ', result.ready() print 'Task result: ', result.result rcmdatadir = DJ_EXPERIMENT_BASE_DATA_DIR result1 = netcdf_save.delay(14, rcmdatadir) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result time.sleep(10) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result
<commit_before>import os import time from dj_experiment.tasks.tasks import longtime_add, netcdf_save from example.settings import (DJ_EXPERIMENT_BASE_DATA_DIR, DJ_EXPERIMENT_DATA_DIR) if __name__ == '__main__': result = longtime_add.delay(1, 2) # at this time, our task is not finished, so it will return False print 'Task finished? ', result.ready() print 'Task result: ', result.result # sleep 10 seconds to ensure the task has been finished time.sleep(10) # now the task should be finished and ready method will return True print 'Task finished? ', result.ready() print 'Task result: ', result.result rcmdatadir = os.path.join(DJ_EXPERIMENT_BASE_DATA_DIR, DJ_EXPERIMENT_DATA_DIR) result1 = netcdf_save.delay(14, rcmdatadir) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result time.sleep(10) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result <commit_msg>Fix wrong path composition for data directory<commit_after>import os import time from dj_experiment.tasks.tasks import longtime_add, netcdf_save from example.settings import DJ_EXPERIMENT_BASE_DATA_DIR if __name__ == '__main__': result = longtime_add.delay(1, 2) # at this time, our task is not finished, so it will return False print 'Task finished? ', result.ready() print 'Task result: ', result.result # sleep 10 seconds to ensure the task has been finished time.sleep(10) # now the task should be finished and ready method will return True print 'Task finished? ', result.ready() print 'Task result: ', result.result rcmdatadir = DJ_EXPERIMENT_BASE_DATA_DIR result1 = netcdf_save.delay(14, rcmdatadir) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result time.sleep(10) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result
64750693969bda63dae28db0b43eaca09c549ab4
scripts/lib/logger.py
scripts/lib/logger.py
# logger module from datetime import datetime import os logfile = None logbuf = [] def init(analysis_path): global logfile if not os.path.isdir(analysis_path): log("logger: analysis_path missing:", analysis_path) use_log_dir = False if use_log_dir: logdir = os.path.join(analysis_path, "log") if not os.path.isdir(logdir): log("logger: creating log directory:", logdir) os.makedirs(logdir) logfile = os.path.join(logdir, "messages") else: logfile = os.path.join(analysis_path, "messages") # log a message to messages files (and to stdout by default) def log(*args, quiet=False): global logbuf # timestamp now = datetime.now() timestamp = str(now) + ": " # assemble message line msg = [] for a in args: msg.append(str(a)) logbuf.append(timestamp + " ".join(msg)) if logfile: # flush log buffer f = open(logfile, "a") for line in logbuf: f.write(line) f.write("\n") f.close() logbuf = [] if not quiet: print(*msg) # log quietly (log to file, but not to stdout) def qlog(*args): log(*args, quiet=True)
# logger module from datetime import datetime import os logfile = None logbuf = [] def init(analysis_path): global logfile if not os.path.isdir(analysis_path): log("logger: analysis_path missing:", analysis_path) use_log_dir = False if use_log_dir: logdir = os.path.join(analysis_path, "log") if not os.path.isdir(logdir): log("logger: creating log directory:", logdir) os.makedirs(logdir) logfile = os.path.join(logdir, "messages") else: logfile = os.path.join(analysis_path, "messages") # log a message to messages files (and to stdout by default) def log(*args, quiet=False, fancy=False): global logbuf # timestamp now = datetime.now() timestamp = str(now) + ": " # assemble message line msg = [] for a in args: msg.append(str(a)) if not fancy: logbuf.append(timestamp + " ".join(msg)) else: logbuf.append("") logbuf.append("############################################################################") logbuf.append("### " + timestamp + " ".join(msg)) logbuf.append("############################################################################") logbuf.append("") if logfile: # flush log buffer f = open(logfile, "a") for line in logbuf: f.write(line) f.write("\n") f.close() logbuf = [] if not quiet: print(*msg) # log quietly (log to file, but not to stdout) def qlog(*args): log(*args, quiet=True)
Add a fancy log mode.
Add a fancy log mode.
Python
mit
UASLab/ImageAnalysis
# logger module from datetime import datetime import os logfile = None logbuf = [] def init(analysis_path): global logfile if not os.path.isdir(analysis_path): log("logger: analysis_path missing:", analysis_path) use_log_dir = False if use_log_dir: logdir = os.path.join(analysis_path, "log") if not os.path.isdir(logdir): log("logger: creating log directory:", logdir) os.makedirs(logdir) logfile = os.path.join(logdir, "messages") else: logfile = os.path.join(analysis_path, "messages") # log a message to messages files (and to stdout by default) def log(*args, quiet=False): global logbuf # timestamp now = datetime.now() timestamp = str(now) + ": " # assemble message line msg = [] for a in args: msg.append(str(a)) logbuf.append(timestamp + " ".join(msg)) if logfile: # flush log buffer f = open(logfile, "a") for line in logbuf: f.write(line) f.write("\n") f.close() logbuf = [] if not quiet: print(*msg) # log quietly (log to file, but not to stdout) def qlog(*args): log(*args, quiet=True) Add a fancy log mode.
# logger module from datetime import datetime import os logfile = None logbuf = [] def init(analysis_path): global logfile if not os.path.isdir(analysis_path): log("logger: analysis_path missing:", analysis_path) use_log_dir = False if use_log_dir: logdir = os.path.join(analysis_path, "log") if not os.path.isdir(logdir): log("logger: creating log directory:", logdir) os.makedirs(logdir) logfile = os.path.join(logdir, "messages") else: logfile = os.path.join(analysis_path, "messages") # log a message to messages files (and to stdout by default) def log(*args, quiet=False, fancy=False): global logbuf # timestamp now = datetime.now() timestamp = str(now) + ": " # assemble message line msg = [] for a in args: msg.append(str(a)) if not fancy: logbuf.append(timestamp + " ".join(msg)) else: logbuf.append("") logbuf.append("############################################################################") logbuf.append("### " + timestamp + " ".join(msg)) logbuf.append("############################################################################") logbuf.append("") if logfile: # flush log buffer f = open(logfile, "a") for line in logbuf: f.write(line) f.write("\n") f.close() logbuf = [] if not quiet: print(*msg) # log quietly (log to file, but not to stdout) def qlog(*args): log(*args, quiet=True)
<commit_before># logger module from datetime import datetime import os logfile = None logbuf = [] def init(analysis_path): global logfile if not os.path.isdir(analysis_path): log("logger: analysis_path missing:", analysis_path) use_log_dir = False if use_log_dir: logdir = os.path.join(analysis_path, "log") if not os.path.isdir(logdir): log("logger: creating log directory:", logdir) os.makedirs(logdir) logfile = os.path.join(logdir, "messages") else: logfile = os.path.join(analysis_path, "messages") # log a message to messages files (and to stdout by default) def log(*args, quiet=False): global logbuf # timestamp now = datetime.now() timestamp = str(now) + ": " # assemble message line msg = [] for a in args: msg.append(str(a)) logbuf.append(timestamp + " ".join(msg)) if logfile: # flush log buffer f = open(logfile, "a") for line in logbuf: f.write(line) f.write("\n") f.close() logbuf = [] if not quiet: print(*msg) # log quietly (log to file, but not to stdout) def qlog(*args): log(*args, quiet=True) <commit_msg>Add a fancy log mode.<commit_after>
# logger module from datetime import datetime import os logfile = None logbuf = [] def init(analysis_path): global logfile if not os.path.isdir(analysis_path): log("logger: analysis_path missing:", analysis_path) use_log_dir = False if use_log_dir: logdir = os.path.join(analysis_path, "log") if not os.path.isdir(logdir): log("logger: creating log directory:", logdir) os.makedirs(logdir) logfile = os.path.join(logdir, "messages") else: logfile = os.path.join(analysis_path, "messages") # log a message to messages files (and to stdout by default) def log(*args, quiet=False, fancy=False): global logbuf # timestamp now = datetime.now() timestamp = str(now) + ": " # assemble message line msg = [] for a in args: msg.append(str(a)) if not fancy: logbuf.append(timestamp + " ".join(msg)) else: logbuf.append("") logbuf.append("############################################################################") logbuf.append("### " + timestamp + " ".join(msg)) logbuf.append("############################################################################") logbuf.append("") if logfile: # flush log buffer f = open(logfile, "a") for line in logbuf: f.write(line) f.write("\n") f.close() logbuf = [] if not quiet: print(*msg) # log quietly (log to file, but not to stdout) def qlog(*args): log(*args, quiet=True)
# logger module from datetime import datetime import os logfile = None logbuf = [] def init(analysis_path): global logfile if not os.path.isdir(analysis_path): log("logger: analysis_path missing:", analysis_path) use_log_dir = False if use_log_dir: logdir = os.path.join(analysis_path, "log") if not os.path.isdir(logdir): log("logger: creating log directory:", logdir) os.makedirs(logdir) logfile = os.path.join(logdir, "messages") else: logfile = os.path.join(analysis_path, "messages") # log a message to messages files (and to stdout by default) def log(*args, quiet=False): global logbuf # timestamp now = datetime.now() timestamp = str(now) + ": " # assemble message line msg = [] for a in args: msg.append(str(a)) logbuf.append(timestamp + " ".join(msg)) if logfile: # flush log buffer f = open(logfile, "a") for line in logbuf: f.write(line) f.write("\n") f.close() logbuf = [] if not quiet: print(*msg) # log quietly (log to file, but not to stdout) def qlog(*args): log(*args, quiet=True) Add a fancy log mode.# logger module from datetime import datetime import os logfile = None logbuf = [] def init(analysis_path): global logfile if not os.path.isdir(analysis_path): log("logger: analysis_path missing:", analysis_path) use_log_dir = False if use_log_dir: logdir = os.path.join(analysis_path, "log") if not os.path.isdir(logdir): log("logger: creating log directory:", logdir) os.makedirs(logdir) logfile = os.path.join(logdir, "messages") else: logfile = os.path.join(analysis_path, "messages") # log a message to messages files (and to stdout by default) def log(*args, quiet=False, fancy=False): global logbuf # timestamp now = datetime.now() timestamp = str(now) + ": " # assemble message line msg = [] for a in args: msg.append(str(a)) if not fancy: logbuf.append(timestamp + " ".join(msg)) else: logbuf.append("") logbuf.append("############################################################################") logbuf.append("### " + timestamp + " ".join(msg)) logbuf.append("############################################################################") logbuf.append("") if logfile: # flush log buffer f = open(logfile, "a") for line in logbuf: f.write(line) f.write("\n") f.close() logbuf = [] if not quiet: print(*msg) # log quietly (log to file, but not to stdout) def qlog(*args): log(*args, quiet=True)
<commit_before># logger module from datetime import datetime import os logfile = None logbuf = [] def init(analysis_path): global logfile if not os.path.isdir(analysis_path): log("logger: analysis_path missing:", analysis_path) use_log_dir = False if use_log_dir: logdir = os.path.join(analysis_path, "log") if not os.path.isdir(logdir): log("logger: creating log directory:", logdir) os.makedirs(logdir) logfile = os.path.join(logdir, "messages") else: logfile = os.path.join(analysis_path, "messages") # log a message to messages files (and to stdout by default) def log(*args, quiet=False): global logbuf # timestamp now = datetime.now() timestamp = str(now) + ": " # assemble message line msg = [] for a in args: msg.append(str(a)) logbuf.append(timestamp + " ".join(msg)) if logfile: # flush log buffer f = open(logfile, "a") for line in logbuf: f.write(line) f.write("\n") f.close() logbuf = [] if not quiet: print(*msg) # log quietly (log to file, but not to stdout) def qlog(*args): log(*args, quiet=True) <commit_msg>Add a fancy log mode.<commit_after># logger module from datetime import datetime import os logfile = None logbuf = [] def init(analysis_path): global logfile if not os.path.isdir(analysis_path): log("logger: analysis_path missing:", analysis_path) use_log_dir = False if use_log_dir: logdir = os.path.join(analysis_path, "log") if not os.path.isdir(logdir): log("logger: creating log directory:", logdir) os.makedirs(logdir) logfile = os.path.join(logdir, "messages") else: logfile = os.path.join(analysis_path, "messages") # log a message to messages files (and to stdout by default) def log(*args, quiet=False, fancy=False): global logbuf # timestamp now = datetime.now() timestamp = str(now) + ": " # assemble message line msg = [] for a in args: msg.append(str(a)) if not fancy: logbuf.append(timestamp + " ".join(msg)) else: logbuf.append("") logbuf.append("############################################################################") logbuf.append("### " + timestamp + " ".join(msg)) logbuf.append("############################################################################") logbuf.append("") if logfile: # flush log buffer f = open(logfile, "a") for line in logbuf: f.write(line) f.write("\n") f.close() logbuf = [] if not quiet: print(*msg) # log quietly (log to file, but not to stdout) def qlog(*args): log(*args, quiet=True)
28d933b351f58fabad464deedb57af55b499b7c8
tag_release.py
tag_release.py
#!/usr/bin/env python import os import sys def main(): if len(sys.argv) != 2: print('Usage: %s version' % sys.argv[0]) os.system('git tag | sort -n | tail -n 1') sys.exit() version = sys.argv[1] with open('floo/version.py', 'r') as fd: version_py = fd.read().split('\n') version_py[0] = "PLUGIN_VERSION = '%s'" % version with open('floo/version.py', 'w') as fd: fd.write('\n'.join(version_py)) os.system('git add packages.json floo/version.py') os.system('git commit -m "Tag new release: %s"' % version) os.system('git tag %s' % version) os.system('git push --tags') os.system('git push') if __name__ == "__main__": main()
#!/usr/bin/env python import os import re import sys from distutils.version import StrictVersion def main(): if len(sys.argv) != 2: print('Usage: %s version' % sys.argv[0]) versions = os.popen('git tag').read().split('\n') versions = [v for v in versions if re.match("\\d\\.\\d\\.\\d", v)] versions.sort(key=StrictVersion) print(versions[-1]) sys.exit() version = sys.argv[1] with open('floo/version.py', 'r') as fd: version_py = fd.read().split('\n') version_py[0] = "PLUGIN_VERSION = '%s'" % version with open('floo/version.py', 'w') as fd: fd.write('\n'.join(version_py)) os.system('git add packages.json floo/version.py') os.system('git commit -m "Tag new release: %s"' % version) os.system('git tag %s' % version) os.system('git push --tags') os.system('git push') if __name__ == "__main__": main()
Tag release script now works with semvers.
Tag release script now works with semvers.
Python
apache-2.0
Floobits/floobits-sublime,Floobits/floobits-sublime
#!/usr/bin/env python import os import sys def main(): if len(sys.argv) != 2: print('Usage: %s version' % sys.argv[0]) os.system('git tag | sort -n | tail -n 1') sys.exit() version = sys.argv[1] with open('floo/version.py', 'r') as fd: version_py = fd.read().split('\n') version_py[0] = "PLUGIN_VERSION = '%s'" % version with open('floo/version.py', 'w') as fd: fd.write('\n'.join(version_py)) os.system('git add packages.json floo/version.py') os.system('git commit -m "Tag new release: %s"' % version) os.system('git tag %s' % version) os.system('git push --tags') os.system('git push') if __name__ == "__main__": main() Tag release script now works with semvers.
#!/usr/bin/env python import os import re import sys from distutils.version import StrictVersion def main(): if len(sys.argv) != 2: print('Usage: %s version' % sys.argv[0]) versions = os.popen('git tag').read().split('\n') versions = [v for v in versions if re.match("\\d\\.\\d\\.\\d", v)] versions.sort(key=StrictVersion) print(versions[-1]) sys.exit() version = sys.argv[1] with open('floo/version.py', 'r') as fd: version_py = fd.read().split('\n') version_py[0] = "PLUGIN_VERSION = '%s'" % version with open('floo/version.py', 'w') as fd: fd.write('\n'.join(version_py)) os.system('git add packages.json floo/version.py') os.system('git commit -m "Tag new release: %s"' % version) os.system('git tag %s' % version) os.system('git push --tags') os.system('git push') if __name__ == "__main__": main()
<commit_before>#!/usr/bin/env python import os import sys def main(): if len(sys.argv) != 2: print('Usage: %s version' % sys.argv[0]) os.system('git tag | sort -n | tail -n 1') sys.exit() version = sys.argv[1] with open('floo/version.py', 'r') as fd: version_py = fd.read().split('\n') version_py[0] = "PLUGIN_VERSION = '%s'" % version with open('floo/version.py', 'w') as fd: fd.write('\n'.join(version_py)) os.system('git add packages.json floo/version.py') os.system('git commit -m "Tag new release: %s"' % version) os.system('git tag %s' % version) os.system('git push --tags') os.system('git push') if __name__ == "__main__": main() <commit_msg>Tag release script now works with semvers.<commit_after>
#!/usr/bin/env python import os import re import sys from distutils.version import StrictVersion def main(): if len(sys.argv) != 2: print('Usage: %s version' % sys.argv[0]) versions = os.popen('git tag').read().split('\n') versions = [v for v in versions if re.match("\\d\\.\\d\\.\\d", v)] versions.sort(key=StrictVersion) print(versions[-1]) sys.exit() version = sys.argv[1] with open('floo/version.py', 'r') as fd: version_py = fd.read().split('\n') version_py[0] = "PLUGIN_VERSION = '%s'" % version with open('floo/version.py', 'w') as fd: fd.write('\n'.join(version_py)) os.system('git add packages.json floo/version.py') os.system('git commit -m "Tag new release: %s"' % version) os.system('git tag %s' % version) os.system('git push --tags') os.system('git push') if __name__ == "__main__": main()
#!/usr/bin/env python import os import sys def main(): if len(sys.argv) != 2: print('Usage: %s version' % sys.argv[0]) os.system('git tag | sort -n | tail -n 1') sys.exit() version = sys.argv[1] with open('floo/version.py', 'r') as fd: version_py = fd.read().split('\n') version_py[0] = "PLUGIN_VERSION = '%s'" % version with open('floo/version.py', 'w') as fd: fd.write('\n'.join(version_py)) os.system('git add packages.json floo/version.py') os.system('git commit -m "Tag new release: %s"' % version) os.system('git tag %s' % version) os.system('git push --tags') os.system('git push') if __name__ == "__main__": main() Tag release script now works with semvers.#!/usr/bin/env python import os import re import sys from distutils.version import StrictVersion def main(): if len(sys.argv) != 2: print('Usage: %s version' % sys.argv[0]) versions = os.popen('git tag').read().split('\n') versions = [v for v in versions if re.match("\\d\\.\\d\\.\\d", v)] versions.sort(key=StrictVersion) print(versions[-1]) sys.exit() version = sys.argv[1] with open('floo/version.py', 'r') as fd: version_py = fd.read().split('\n') version_py[0] = "PLUGIN_VERSION = '%s'" % version with open('floo/version.py', 'w') as fd: fd.write('\n'.join(version_py)) os.system('git add packages.json floo/version.py') os.system('git commit -m "Tag new release: %s"' % version) os.system('git tag %s' % version) os.system('git push --tags') os.system('git push') if __name__ == "__main__": main()
<commit_before>#!/usr/bin/env python import os import sys def main(): if len(sys.argv) != 2: print('Usage: %s version' % sys.argv[0]) os.system('git tag | sort -n | tail -n 1') sys.exit() version = sys.argv[1] with open('floo/version.py', 'r') as fd: version_py = fd.read().split('\n') version_py[0] = "PLUGIN_VERSION = '%s'" % version with open('floo/version.py', 'w') as fd: fd.write('\n'.join(version_py)) os.system('git add packages.json floo/version.py') os.system('git commit -m "Tag new release: %s"' % version) os.system('git tag %s' % version) os.system('git push --tags') os.system('git push') if __name__ == "__main__": main() <commit_msg>Tag release script now works with semvers.<commit_after>#!/usr/bin/env python import os import re import sys from distutils.version import StrictVersion def main(): if len(sys.argv) != 2: print('Usage: %s version' % sys.argv[0]) versions = os.popen('git tag').read().split('\n') versions = [v for v in versions if re.match("\\d\\.\\d\\.\\d", v)] versions.sort(key=StrictVersion) print(versions[-1]) sys.exit() version = sys.argv[1] with open('floo/version.py', 'r') as fd: version_py = fd.read().split('\n') version_py[0] = "PLUGIN_VERSION = '%s'" % version with open('floo/version.py', 'w') as fd: fd.write('\n'.join(version_py)) os.system('git add packages.json floo/version.py') os.system('git commit -m "Tag new release: %s"' % version) os.system('git tag %s' % version) os.system('git push --tags') os.system('git push') if __name__ == "__main__": main()
348b79cfd68afa91a71009a6481f2d45495909cf
test/server.py
test/server.py
#!/usr/bin/env python import sys import logging from os.path import abspath, dirname basepath = abspath(dirname(abspath(__file__)) + '/..') sys.path.insert(0, basepath) from server import Server from deflate_frame import DeflateFrame class EchoServer(Server): def onmessage(self, client, message): Server.onmessage(self, client, message) client.send(message) if __name__ == '__main__': EchoServer(('localhost', 8000), #extensions=[DeflateFrame(no_context_takeover=True)], extensions=[DeflateFrame()], #ssl_args=dict(keyfile='cert.pem', certfile='cert.pem'), loglevel=logging.DEBUG).run()
#!/usr/bin/env python import sys import logging from os.path import abspath, dirname basepath = abspath(dirname(abspath(__file__)) + '/..') sys.path.insert(0, basepath) from async import AsyncServer from deflate_message import DeflateMessage from deflate_frame import DeflateFrame class EchoServer(AsyncServer): def onmessage(self, client, message): client.send(message) if __name__ == '__main__': EchoServer(('localhost', 8000), extensions=[DeflateMessage(), DeflateFrame()], #ssl_args=dict(keyfile='cert.pem', certfile='cert.pem'), loglevel=logging.DEBUG).run()
Test EchoServer now uses AsyncServer and deflate extensions
Test EchoServer now uses AsyncServer and deflate extensions
Python
bsd-3-clause
taddeus/wspy,taddeus/wspy
#!/usr/bin/env python import sys import logging from os.path import abspath, dirname basepath = abspath(dirname(abspath(__file__)) + '/..') sys.path.insert(0, basepath) from server import Server from deflate_frame import DeflateFrame class EchoServer(Server): def onmessage(self, client, message): Server.onmessage(self, client, message) client.send(message) if __name__ == '__main__': EchoServer(('localhost', 8000), #extensions=[DeflateFrame(no_context_takeover=True)], extensions=[DeflateFrame()], #ssl_args=dict(keyfile='cert.pem', certfile='cert.pem'), loglevel=logging.DEBUG).run() Test EchoServer now uses AsyncServer and deflate extensions
#!/usr/bin/env python import sys import logging from os.path import abspath, dirname basepath = abspath(dirname(abspath(__file__)) + '/..') sys.path.insert(0, basepath) from async import AsyncServer from deflate_message import DeflateMessage from deflate_frame import DeflateFrame class EchoServer(AsyncServer): def onmessage(self, client, message): client.send(message) if __name__ == '__main__': EchoServer(('localhost', 8000), extensions=[DeflateMessage(), DeflateFrame()], #ssl_args=dict(keyfile='cert.pem', certfile='cert.pem'), loglevel=logging.DEBUG).run()
<commit_before>#!/usr/bin/env python import sys import logging from os.path import abspath, dirname basepath = abspath(dirname(abspath(__file__)) + '/..') sys.path.insert(0, basepath) from server import Server from deflate_frame import DeflateFrame class EchoServer(Server): def onmessage(self, client, message): Server.onmessage(self, client, message) client.send(message) if __name__ == '__main__': EchoServer(('localhost', 8000), #extensions=[DeflateFrame(no_context_takeover=True)], extensions=[DeflateFrame()], #ssl_args=dict(keyfile='cert.pem', certfile='cert.pem'), loglevel=logging.DEBUG).run() <commit_msg>Test EchoServer now uses AsyncServer and deflate extensions<commit_after>
#!/usr/bin/env python import sys import logging from os.path import abspath, dirname basepath = abspath(dirname(abspath(__file__)) + '/..') sys.path.insert(0, basepath) from async import AsyncServer from deflate_message import DeflateMessage from deflate_frame import DeflateFrame class EchoServer(AsyncServer): def onmessage(self, client, message): client.send(message) if __name__ == '__main__': EchoServer(('localhost', 8000), extensions=[DeflateMessage(), DeflateFrame()], #ssl_args=dict(keyfile='cert.pem', certfile='cert.pem'), loglevel=logging.DEBUG).run()
#!/usr/bin/env python import sys import logging from os.path import abspath, dirname basepath = abspath(dirname(abspath(__file__)) + '/..') sys.path.insert(0, basepath) from server import Server from deflate_frame import DeflateFrame class EchoServer(Server): def onmessage(self, client, message): Server.onmessage(self, client, message) client.send(message) if __name__ == '__main__': EchoServer(('localhost', 8000), #extensions=[DeflateFrame(no_context_takeover=True)], extensions=[DeflateFrame()], #ssl_args=dict(keyfile='cert.pem', certfile='cert.pem'), loglevel=logging.DEBUG).run() Test EchoServer now uses AsyncServer and deflate extensions#!/usr/bin/env python import sys import logging from os.path import abspath, dirname basepath = abspath(dirname(abspath(__file__)) + '/..') sys.path.insert(0, basepath) from async import AsyncServer from deflate_message import DeflateMessage from deflate_frame import DeflateFrame class EchoServer(AsyncServer): def onmessage(self, client, message): client.send(message) if __name__ == '__main__': EchoServer(('localhost', 8000), extensions=[DeflateMessage(), DeflateFrame()], #ssl_args=dict(keyfile='cert.pem', certfile='cert.pem'), loglevel=logging.DEBUG).run()
<commit_before>#!/usr/bin/env python import sys import logging from os.path import abspath, dirname basepath = abspath(dirname(abspath(__file__)) + '/..') sys.path.insert(0, basepath) from server import Server from deflate_frame import DeflateFrame class EchoServer(Server): def onmessage(self, client, message): Server.onmessage(self, client, message) client.send(message) if __name__ == '__main__': EchoServer(('localhost', 8000), #extensions=[DeflateFrame(no_context_takeover=True)], extensions=[DeflateFrame()], #ssl_args=dict(keyfile='cert.pem', certfile='cert.pem'), loglevel=logging.DEBUG).run() <commit_msg>Test EchoServer now uses AsyncServer and deflate extensions<commit_after>#!/usr/bin/env python import sys import logging from os.path import abspath, dirname basepath = abspath(dirname(abspath(__file__)) + '/..') sys.path.insert(0, basepath) from async import AsyncServer from deflate_message import DeflateMessage from deflate_frame import DeflateFrame class EchoServer(AsyncServer): def onmessage(self, client, message): client.send(message) if __name__ == '__main__': EchoServer(('localhost', 8000), extensions=[DeflateMessage(), DeflateFrame()], #ssl_args=dict(keyfile='cert.pem', certfile='cert.pem'), loglevel=logging.DEBUG).run()
e50b95143ce4a807c434eaa0e6ef38d36f91a77a
pylons/__init__.py
pylons/__init__.py
"""Base objects to be exported for use in Controllers""" # Import pkg_resources first so namespace handling is properly done so the # paste imports work import pkg_resources from paste.registry import StackedObjectProxy from pylons.configuration import config __all__ = ['app_globals', 'cache', 'config', 'request', 'response', 'session', 'tmpl_context', 'url'] def __figure_version(): try: from pkg_resources import require import os # NOTE: this only works when the package is either installed, # or has an .egg-info directory present (i.e. wont work with raw # SVN checkout) info = require('pylons')[0] if os.path.dirname(os.path.dirname(__file__)) == info.location: return info.version else: return '(not installed)' except: return '(not installed)' __version__ = __figure_version() app_globals = StackedObjectProxy(name="app_globals") cache = StackedObjectProxy(name="cache") request = StackedObjectProxy(name="request") response = StackedObjectProxy(name="response") session = StackedObjectProxy(name="session") tmpl_context = StackedObjectProxy(name="tmpl_context or C") url = StackedObjectProxy(name="url") translator = StackedObjectProxy(name="translator")
"""Base objects to be exported for use in Controllers""" # Import pkg_resources first so namespace handling is properly done so the # paste imports work import pkg_resources from paste.registry import StackedObjectProxy from pylons.configuration import config from pylons.controllers.util import Request from pylons.controllers.util import Response __all__ = ['app_globals', 'cache', 'config', 'request', 'response', 'session', 'tmpl_context', 'url', 'Request', 'Response'] def __figure_version(): try: from pkg_resources import require import os # NOTE: this only works when the package is either installed, # or has an .egg-info directory present (i.e. wont work with raw # SVN checkout) info = require('pylons')[0] if os.path.dirname(os.path.dirname(__file__)) == info.location: return info.version else: return '(not installed)' except: return '(not installed)' __version__ = __figure_version() app_globals = StackedObjectProxy(name="app_globals") cache = StackedObjectProxy(name="cache") request = StackedObjectProxy(name="request") response = StackedObjectProxy(name="response") session = StackedObjectProxy(name="session") tmpl_context = StackedObjectProxy(name="tmpl_context or C") url = StackedObjectProxy(name="url") translator = StackedObjectProxy(name="translator")
Add Request/Response import points for pylons.
Add Request/Response import points for pylons. --HG-- branch : trunk
Python
bsd-3-clause
Pylons/pylons,Pylons/pylons,moreati/pylons,moreati/pylons,Pylons/pylons,moreati/pylons
"""Base objects to be exported for use in Controllers""" # Import pkg_resources first so namespace handling is properly done so the # paste imports work import pkg_resources from paste.registry import StackedObjectProxy from pylons.configuration import config __all__ = ['app_globals', 'cache', 'config', 'request', 'response', 'session', 'tmpl_context', 'url'] def __figure_version(): try: from pkg_resources import require import os # NOTE: this only works when the package is either installed, # or has an .egg-info directory present (i.e. wont work with raw # SVN checkout) info = require('pylons')[0] if os.path.dirname(os.path.dirname(__file__)) == info.location: return info.version else: return '(not installed)' except: return '(not installed)' __version__ = __figure_version() app_globals = StackedObjectProxy(name="app_globals") cache = StackedObjectProxy(name="cache") request = StackedObjectProxy(name="request") response = StackedObjectProxy(name="response") session = StackedObjectProxy(name="session") tmpl_context = StackedObjectProxy(name="tmpl_context or C") url = StackedObjectProxy(name="url") translator = StackedObjectProxy(name="translator") Add Request/Response import points for pylons. --HG-- branch : trunk
"""Base objects to be exported for use in Controllers""" # Import pkg_resources first so namespace handling is properly done so the # paste imports work import pkg_resources from paste.registry import StackedObjectProxy from pylons.configuration import config from pylons.controllers.util import Request from pylons.controllers.util import Response __all__ = ['app_globals', 'cache', 'config', 'request', 'response', 'session', 'tmpl_context', 'url', 'Request', 'Response'] def __figure_version(): try: from pkg_resources import require import os # NOTE: this only works when the package is either installed, # or has an .egg-info directory present (i.e. wont work with raw # SVN checkout) info = require('pylons')[0] if os.path.dirname(os.path.dirname(__file__)) == info.location: return info.version else: return '(not installed)' except: return '(not installed)' __version__ = __figure_version() app_globals = StackedObjectProxy(name="app_globals") cache = StackedObjectProxy(name="cache") request = StackedObjectProxy(name="request") response = StackedObjectProxy(name="response") session = StackedObjectProxy(name="session") tmpl_context = StackedObjectProxy(name="tmpl_context or C") url = StackedObjectProxy(name="url") translator = StackedObjectProxy(name="translator")
<commit_before>"""Base objects to be exported for use in Controllers""" # Import pkg_resources first so namespace handling is properly done so the # paste imports work import pkg_resources from paste.registry import StackedObjectProxy from pylons.configuration import config __all__ = ['app_globals', 'cache', 'config', 'request', 'response', 'session', 'tmpl_context', 'url'] def __figure_version(): try: from pkg_resources import require import os # NOTE: this only works when the package is either installed, # or has an .egg-info directory present (i.e. wont work with raw # SVN checkout) info = require('pylons')[0] if os.path.dirname(os.path.dirname(__file__)) == info.location: return info.version else: return '(not installed)' except: return '(not installed)' __version__ = __figure_version() app_globals = StackedObjectProxy(name="app_globals") cache = StackedObjectProxy(name="cache") request = StackedObjectProxy(name="request") response = StackedObjectProxy(name="response") session = StackedObjectProxy(name="session") tmpl_context = StackedObjectProxy(name="tmpl_context or C") url = StackedObjectProxy(name="url") translator = StackedObjectProxy(name="translator") <commit_msg>Add Request/Response import points for pylons. --HG-- branch : trunk<commit_after>
"""Base objects to be exported for use in Controllers""" # Import pkg_resources first so namespace handling is properly done so the # paste imports work import pkg_resources from paste.registry import StackedObjectProxy from pylons.configuration import config from pylons.controllers.util import Request from pylons.controllers.util import Response __all__ = ['app_globals', 'cache', 'config', 'request', 'response', 'session', 'tmpl_context', 'url', 'Request', 'Response'] def __figure_version(): try: from pkg_resources import require import os # NOTE: this only works when the package is either installed, # or has an .egg-info directory present (i.e. wont work with raw # SVN checkout) info = require('pylons')[0] if os.path.dirname(os.path.dirname(__file__)) == info.location: return info.version else: return '(not installed)' except: return '(not installed)' __version__ = __figure_version() app_globals = StackedObjectProxy(name="app_globals") cache = StackedObjectProxy(name="cache") request = StackedObjectProxy(name="request") response = StackedObjectProxy(name="response") session = StackedObjectProxy(name="session") tmpl_context = StackedObjectProxy(name="tmpl_context or C") url = StackedObjectProxy(name="url") translator = StackedObjectProxy(name="translator")
"""Base objects to be exported for use in Controllers""" # Import pkg_resources first so namespace handling is properly done so the # paste imports work import pkg_resources from paste.registry import StackedObjectProxy from pylons.configuration import config __all__ = ['app_globals', 'cache', 'config', 'request', 'response', 'session', 'tmpl_context', 'url'] def __figure_version(): try: from pkg_resources import require import os # NOTE: this only works when the package is either installed, # or has an .egg-info directory present (i.e. wont work with raw # SVN checkout) info = require('pylons')[0] if os.path.dirname(os.path.dirname(__file__)) == info.location: return info.version else: return '(not installed)' except: return '(not installed)' __version__ = __figure_version() app_globals = StackedObjectProxy(name="app_globals") cache = StackedObjectProxy(name="cache") request = StackedObjectProxy(name="request") response = StackedObjectProxy(name="response") session = StackedObjectProxy(name="session") tmpl_context = StackedObjectProxy(name="tmpl_context or C") url = StackedObjectProxy(name="url") translator = StackedObjectProxy(name="translator") Add Request/Response import points for pylons. --HG-- branch : trunk"""Base objects to be exported for use in Controllers""" # Import pkg_resources first so namespace handling is properly done so the # paste imports work import pkg_resources from paste.registry import StackedObjectProxy from pylons.configuration import config from pylons.controllers.util import Request from pylons.controllers.util import Response __all__ = ['app_globals', 'cache', 'config', 'request', 'response', 'session', 'tmpl_context', 'url', 'Request', 'Response'] def __figure_version(): try: from pkg_resources import require import os # NOTE: this only works when the package is either installed, # or has an .egg-info directory present (i.e. wont work with raw # SVN checkout) info = require('pylons')[0] if os.path.dirname(os.path.dirname(__file__)) == info.location: return info.version else: return '(not installed)' except: return '(not installed)' __version__ = __figure_version() app_globals = StackedObjectProxy(name="app_globals") cache = StackedObjectProxy(name="cache") request = StackedObjectProxy(name="request") response = StackedObjectProxy(name="response") session = StackedObjectProxy(name="session") tmpl_context = StackedObjectProxy(name="tmpl_context or C") url = StackedObjectProxy(name="url") translator = StackedObjectProxy(name="translator")
<commit_before>"""Base objects to be exported for use in Controllers""" # Import pkg_resources first so namespace handling is properly done so the # paste imports work import pkg_resources from paste.registry import StackedObjectProxy from pylons.configuration import config __all__ = ['app_globals', 'cache', 'config', 'request', 'response', 'session', 'tmpl_context', 'url'] def __figure_version(): try: from pkg_resources import require import os # NOTE: this only works when the package is either installed, # or has an .egg-info directory present (i.e. wont work with raw # SVN checkout) info = require('pylons')[0] if os.path.dirname(os.path.dirname(__file__)) == info.location: return info.version else: return '(not installed)' except: return '(not installed)' __version__ = __figure_version() app_globals = StackedObjectProxy(name="app_globals") cache = StackedObjectProxy(name="cache") request = StackedObjectProxy(name="request") response = StackedObjectProxy(name="response") session = StackedObjectProxy(name="session") tmpl_context = StackedObjectProxy(name="tmpl_context or C") url = StackedObjectProxy(name="url") translator = StackedObjectProxy(name="translator") <commit_msg>Add Request/Response import points for pylons. --HG-- branch : trunk<commit_after>"""Base objects to be exported for use in Controllers""" # Import pkg_resources first so namespace handling is properly done so the # paste imports work import pkg_resources from paste.registry import StackedObjectProxy from pylons.configuration import config from pylons.controllers.util import Request from pylons.controllers.util import Response __all__ = ['app_globals', 'cache', 'config', 'request', 'response', 'session', 'tmpl_context', 'url', 'Request', 'Response'] def __figure_version(): try: from pkg_resources import require import os # NOTE: this only works when the package is either installed, # or has an .egg-info directory present (i.e. wont work with raw # SVN checkout) info = require('pylons')[0] if os.path.dirname(os.path.dirname(__file__)) == info.location: return info.version else: return '(not installed)' except: return '(not installed)' __version__ = __figure_version() app_globals = StackedObjectProxy(name="app_globals") cache = StackedObjectProxy(name="cache") request = StackedObjectProxy(name="request") response = StackedObjectProxy(name="response") session = StackedObjectProxy(name="session") tmpl_context = StackedObjectProxy(name="tmpl_context or C") url = StackedObjectProxy(name="url") translator = StackedObjectProxy(name="translator")
9f0b77ba9d98c6f78e5320d2b4515f3152a6f38c
db-integrity-tests/src/cliargs.py
db-integrity-tests/src/cliargs.py
"""Module with specification of all supported command line arguments.""" import argparse cli_parser = argparse.ArgumentParser() cli_parser.add_argument('--log-level', help='log level as defined in ' + 'https://docs.python.org/3.5/library/logging.html#logging-level', type=int, default=20)
"""Module with specification of all supported command line arguments.""" import argparse cli_parser = argparse.ArgumentParser() cli_parser.add_argument('--log-level', help='log level as defined in ' + 'https://docs.python.org/3/library/logging.html#logging-levels', type=int, default=20)
Fix wrong URL to Python logging lib documentation
Fix wrong URL to Python logging lib documentation
Python
apache-2.0
jpopelka/fabric8-analytics-common,tisnik/fabric8-analytics-common,tisnik/fabric8-analytics-common,jpopelka/fabric8-analytics-common,jpopelka/fabric8-analytics-common,tisnik/fabric8-analytics-common
"""Module with specification of all supported command line arguments.""" import argparse cli_parser = argparse.ArgumentParser() cli_parser.add_argument('--log-level', help='log level as defined in ' + 'https://docs.python.org/3.5/library/logging.html#logging-level', type=int, default=20) Fix wrong URL to Python logging lib documentation
"""Module with specification of all supported command line arguments.""" import argparse cli_parser = argparse.ArgumentParser() cli_parser.add_argument('--log-level', help='log level as defined in ' + 'https://docs.python.org/3/library/logging.html#logging-levels', type=int, default=20)
<commit_before>"""Module with specification of all supported command line arguments.""" import argparse cli_parser = argparse.ArgumentParser() cli_parser.add_argument('--log-level', help='log level as defined in ' + 'https://docs.python.org/3.5/library/logging.html#logging-level', type=int, default=20) <commit_msg>Fix wrong URL to Python logging lib documentation<commit_after>
"""Module with specification of all supported command line arguments.""" import argparse cli_parser = argparse.ArgumentParser() cli_parser.add_argument('--log-level', help='log level as defined in ' + 'https://docs.python.org/3/library/logging.html#logging-levels', type=int, default=20)
"""Module with specification of all supported command line arguments.""" import argparse cli_parser = argparse.ArgumentParser() cli_parser.add_argument('--log-level', help='log level as defined in ' + 'https://docs.python.org/3.5/library/logging.html#logging-level', type=int, default=20) Fix wrong URL to Python logging lib documentation"""Module with specification of all supported command line arguments.""" import argparse cli_parser = argparse.ArgumentParser() cli_parser.add_argument('--log-level', help='log level as defined in ' + 'https://docs.python.org/3/library/logging.html#logging-levels', type=int, default=20)
<commit_before>"""Module with specification of all supported command line arguments.""" import argparse cli_parser = argparse.ArgumentParser() cli_parser.add_argument('--log-level', help='log level as defined in ' + 'https://docs.python.org/3.5/library/logging.html#logging-level', type=int, default=20) <commit_msg>Fix wrong URL to Python logging lib documentation<commit_after>"""Module with specification of all supported command line arguments.""" import argparse cli_parser = argparse.ArgumentParser() cli_parser.add_argument('--log-level', help='log level as defined in ' + 'https://docs.python.org/3/library/logging.html#logging-levels', type=int, default=20)
0519824c537a96474e0501e1ac45f7a626391a31
tests/test_model_object.py
tests/test_model_object.py
# encoding: utf-8 from marathon.models.base import MarathonObject import unittest class MarathonObjectTest(unittest.TestCase): def test_hashable(self): """ Regression test for issue #203 MarathonObject defined __eq__ but not __hash__, meaning that in in Python2.7 MarathonObjects are hashable, but in Python3 they're not, This test ensures that we are hashable in all versions of python """ obj = MarathonObject() collection = {} collection[obj] = True assert collection[obj]
# encoding: utf-8 from marathon.models.base import MarathonObject from marathon.models.base import MarathonResource import unittest class MarathonObjectTest(unittest.TestCase): def test_hashable(self): """ Regression test for issue #203 MarathonObject defined __eq__ but not __hash__, meaning that in in Python2.7 MarathonObjects are hashable, but in Python3 they're not, This test ensures that we are hashable in all versions of python """ obj = MarathonObject() collection = {} collection[obj] = True assert collection[obj] class MarathonResourceHashable(unittest.TestCase): def test_hashable(self): """ Regression test for issue #203 MarathonResource defined __eq__ but not __hash__, meaning that in in Python2.7 MarathonResources are hashable, but in Python3 they're not This test ensures that we are hashable in all versions of python """ obj = MarathonResource() collection = {} collection[obj] = True assert collection[obj]
Add regression test for MarathonResource
Add regression test for MarathonResource
Python
mit
thefactory/marathon-python,thefactory/marathon-python
# encoding: utf-8 from marathon.models.base import MarathonObject import unittest class MarathonObjectTest(unittest.TestCase): def test_hashable(self): """ Regression test for issue #203 MarathonObject defined __eq__ but not __hash__, meaning that in in Python2.7 MarathonObjects are hashable, but in Python3 they're not, This test ensures that we are hashable in all versions of python """ obj = MarathonObject() collection = {} collection[obj] = True assert collection[obj] Add regression test for MarathonResource
# encoding: utf-8 from marathon.models.base import MarathonObject from marathon.models.base import MarathonResource import unittest class MarathonObjectTest(unittest.TestCase): def test_hashable(self): """ Regression test for issue #203 MarathonObject defined __eq__ but not __hash__, meaning that in in Python2.7 MarathonObjects are hashable, but in Python3 they're not, This test ensures that we are hashable in all versions of python """ obj = MarathonObject() collection = {} collection[obj] = True assert collection[obj] class MarathonResourceHashable(unittest.TestCase): def test_hashable(self): """ Regression test for issue #203 MarathonResource defined __eq__ but not __hash__, meaning that in in Python2.7 MarathonResources are hashable, but in Python3 they're not This test ensures that we are hashable in all versions of python """ obj = MarathonResource() collection = {} collection[obj] = True assert collection[obj]
<commit_before># encoding: utf-8 from marathon.models.base import MarathonObject import unittest class MarathonObjectTest(unittest.TestCase): def test_hashable(self): """ Regression test for issue #203 MarathonObject defined __eq__ but not __hash__, meaning that in in Python2.7 MarathonObjects are hashable, but in Python3 they're not, This test ensures that we are hashable in all versions of python """ obj = MarathonObject() collection = {} collection[obj] = True assert collection[obj] <commit_msg>Add regression test for MarathonResource<commit_after>
# encoding: utf-8 from marathon.models.base import MarathonObject from marathon.models.base import MarathonResource import unittest class MarathonObjectTest(unittest.TestCase): def test_hashable(self): """ Regression test for issue #203 MarathonObject defined __eq__ but not __hash__, meaning that in in Python2.7 MarathonObjects are hashable, but in Python3 they're not, This test ensures that we are hashable in all versions of python """ obj = MarathonObject() collection = {} collection[obj] = True assert collection[obj] class MarathonResourceHashable(unittest.TestCase): def test_hashable(self): """ Regression test for issue #203 MarathonResource defined __eq__ but not __hash__, meaning that in in Python2.7 MarathonResources are hashable, but in Python3 they're not This test ensures that we are hashable in all versions of python """ obj = MarathonResource() collection = {} collection[obj] = True assert collection[obj]
# encoding: utf-8 from marathon.models.base import MarathonObject import unittest class MarathonObjectTest(unittest.TestCase): def test_hashable(self): """ Regression test for issue #203 MarathonObject defined __eq__ but not __hash__, meaning that in in Python2.7 MarathonObjects are hashable, but in Python3 they're not, This test ensures that we are hashable in all versions of python """ obj = MarathonObject() collection = {} collection[obj] = True assert collection[obj] Add regression test for MarathonResource# encoding: utf-8 from marathon.models.base import MarathonObject from marathon.models.base import MarathonResource import unittest class MarathonObjectTest(unittest.TestCase): def test_hashable(self): """ Regression test for issue #203 MarathonObject defined __eq__ but not __hash__, meaning that in in Python2.7 MarathonObjects are hashable, but in Python3 they're not, This test ensures that we are hashable in all versions of python """ obj = MarathonObject() collection = {} collection[obj] = True assert collection[obj] class MarathonResourceHashable(unittest.TestCase): def test_hashable(self): """ Regression test for issue #203 MarathonResource defined __eq__ but not __hash__, meaning that in in Python2.7 MarathonResources are hashable, but in Python3 they're not This test ensures that we are hashable in all versions of python """ obj = MarathonResource() collection = {} collection[obj] = True assert collection[obj]
<commit_before># encoding: utf-8 from marathon.models.base import MarathonObject import unittest class MarathonObjectTest(unittest.TestCase): def test_hashable(self): """ Regression test for issue #203 MarathonObject defined __eq__ but not __hash__, meaning that in in Python2.7 MarathonObjects are hashable, but in Python3 they're not, This test ensures that we are hashable in all versions of python """ obj = MarathonObject() collection = {} collection[obj] = True assert collection[obj] <commit_msg>Add regression test for MarathonResource<commit_after># encoding: utf-8 from marathon.models.base import MarathonObject from marathon.models.base import MarathonResource import unittest class MarathonObjectTest(unittest.TestCase): def test_hashable(self): """ Regression test for issue #203 MarathonObject defined __eq__ but not __hash__, meaning that in in Python2.7 MarathonObjects are hashable, but in Python3 they're not, This test ensures that we are hashable in all versions of python """ obj = MarathonObject() collection = {} collection[obj] = True assert collection[obj] class MarathonResourceHashable(unittest.TestCase): def test_hashable(self): """ Regression test for issue #203 MarathonResource defined __eq__ but not __hash__, meaning that in in Python2.7 MarathonResources are hashable, but in Python3 they're not This test ensures that we are hashable in all versions of python """ obj = MarathonResource() collection = {} collection[obj] = True assert collection[obj]
1e19a78652e8fed32eb6e315fca3346b3bc31044
bamova/bamov2npy.py
bamova/bamov2npy.py
import sys import numpy as np def read_phi(flname, n_steps, n_loci): sampled_phis = np.zeros((n_steps, n_loci)) fl = open(flname) current_iter_idx = 0 # index used for storage last_iter_idx = 0 # index used to identify when we finish a step for ln in fl: cols = ln.strip().split(",") iter_idx = int(cols[0]) locus_idx = int(cols[1]) phi = float(cols[2]) if last_iter_idx != iter_idx: last_iter_idx = iter_idx current_iter_idx += 1 sampled_phis[current_iter_idx, locus_idx] = phi fl.close() return sampled_phis if __name__ == "__main__": bamova_phi_output_flname = sys.argv[1] n_steps = int(sys.argv[2]) n_loci = int(sys.argv[3]) npy_flname = sys.argv[4] matrix = read_phi(bamova_phi_output_flname, n_steps, n_loci) np.save(matrix, npy_flname)
import sys import numpy as np def read_phi(flname, n_steps, n_loci): sampled_phis = np.zeros((n_steps, n_loci)) fl = open(flname) current_iter_idx = 0 # index used for storage last_iter_idx = 0 # index used to identify when we finish a step for ln in fl: cols = ln.strip().split(",") iter_idx = int(cols[0]) locus_idx = int(cols[1]) phi = float(cols[2]) if last_iter_idx != iter_idx: last_iter_idx = iter_idx current_iter_idx += 1 sampled_phis[current_iter_idx, locus_idx] = phi fl.close() return sampled_phis if __name__ == "__main__": bamova_phi_output_flname = sys.argv[1] n_steps = int(sys.argv[2]) n_loci = int(sys.argv[3]) npy_flname = sys.argv[4] matrix = read_phi(bamova_phi_output_flname, n_steps, n_loci) np.save(npy_flname, matrix)
Fix ordering of parameters to save
Fix ordering of parameters to save
Python
apache-2.0
rnowling/pop-gen-models
import sys import numpy as np def read_phi(flname, n_steps, n_loci): sampled_phis = np.zeros((n_steps, n_loci)) fl = open(flname) current_iter_idx = 0 # index used for storage last_iter_idx = 0 # index used to identify when we finish a step for ln in fl: cols = ln.strip().split(",") iter_idx = int(cols[0]) locus_idx = int(cols[1]) phi = float(cols[2]) if last_iter_idx != iter_idx: last_iter_idx = iter_idx current_iter_idx += 1 sampled_phis[current_iter_idx, locus_idx] = phi fl.close() return sampled_phis if __name__ == "__main__": bamova_phi_output_flname = sys.argv[1] n_steps = int(sys.argv[2]) n_loci = int(sys.argv[3]) npy_flname = sys.argv[4] matrix = read_phi(bamova_phi_output_flname, n_steps, n_loci) np.save(matrix, npy_flname)Fix ordering of parameters to save
import sys import numpy as np def read_phi(flname, n_steps, n_loci): sampled_phis = np.zeros((n_steps, n_loci)) fl = open(flname) current_iter_idx = 0 # index used for storage last_iter_idx = 0 # index used to identify when we finish a step for ln in fl: cols = ln.strip().split(",") iter_idx = int(cols[0]) locus_idx = int(cols[1]) phi = float(cols[2]) if last_iter_idx != iter_idx: last_iter_idx = iter_idx current_iter_idx += 1 sampled_phis[current_iter_idx, locus_idx] = phi fl.close() return sampled_phis if __name__ == "__main__": bamova_phi_output_flname = sys.argv[1] n_steps = int(sys.argv[2]) n_loci = int(sys.argv[3]) npy_flname = sys.argv[4] matrix = read_phi(bamova_phi_output_flname, n_steps, n_loci) np.save(npy_flname, matrix)
<commit_before>import sys import numpy as np def read_phi(flname, n_steps, n_loci): sampled_phis = np.zeros((n_steps, n_loci)) fl = open(flname) current_iter_idx = 0 # index used for storage last_iter_idx = 0 # index used to identify when we finish a step for ln in fl: cols = ln.strip().split(",") iter_idx = int(cols[0]) locus_idx = int(cols[1]) phi = float(cols[2]) if last_iter_idx != iter_idx: last_iter_idx = iter_idx current_iter_idx += 1 sampled_phis[current_iter_idx, locus_idx] = phi fl.close() return sampled_phis if __name__ == "__main__": bamova_phi_output_flname = sys.argv[1] n_steps = int(sys.argv[2]) n_loci = int(sys.argv[3]) npy_flname = sys.argv[4] matrix = read_phi(bamova_phi_output_flname, n_steps, n_loci) np.save(matrix, npy_flname)<commit_msg>Fix ordering of parameters to save<commit_after>
import sys import numpy as np def read_phi(flname, n_steps, n_loci): sampled_phis = np.zeros((n_steps, n_loci)) fl = open(flname) current_iter_idx = 0 # index used for storage last_iter_idx = 0 # index used to identify when we finish a step for ln in fl: cols = ln.strip().split(",") iter_idx = int(cols[0]) locus_idx = int(cols[1]) phi = float(cols[2]) if last_iter_idx != iter_idx: last_iter_idx = iter_idx current_iter_idx += 1 sampled_phis[current_iter_idx, locus_idx] = phi fl.close() return sampled_phis if __name__ == "__main__": bamova_phi_output_flname = sys.argv[1] n_steps = int(sys.argv[2]) n_loci = int(sys.argv[3]) npy_flname = sys.argv[4] matrix = read_phi(bamova_phi_output_flname, n_steps, n_loci) np.save(npy_flname, matrix)
import sys import numpy as np def read_phi(flname, n_steps, n_loci): sampled_phis = np.zeros((n_steps, n_loci)) fl = open(flname) current_iter_idx = 0 # index used for storage last_iter_idx = 0 # index used to identify when we finish a step for ln in fl: cols = ln.strip().split(",") iter_idx = int(cols[0]) locus_idx = int(cols[1]) phi = float(cols[2]) if last_iter_idx != iter_idx: last_iter_idx = iter_idx current_iter_idx += 1 sampled_phis[current_iter_idx, locus_idx] = phi fl.close() return sampled_phis if __name__ == "__main__": bamova_phi_output_flname = sys.argv[1] n_steps = int(sys.argv[2]) n_loci = int(sys.argv[3]) npy_flname = sys.argv[4] matrix = read_phi(bamova_phi_output_flname, n_steps, n_loci) np.save(matrix, npy_flname)Fix ordering of parameters to saveimport sys import numpy as np def read_phi(flname, n_steps, n_loci): sampled_phis = np.zeros((n_steps, n_loci)) fl = open(flname) current_iter_idx = 0 # index used for storage last_iter_idx = 0 # index used to identify when we finish a step for ln in fl: cols = ln.strip().split(",") iter_idx = int(cols[0]) locus_idx = int(cols[1]) phi = float(cols[2]) if last_iter_idx != iter_idx: last_iter_idx = iter_idx current_iter_idx += 1 sampled_phis[current_iter_idx, locus_idx] = phi fl.close() return sampled_phis if __name__ == "__main__": bamova_phi_output_flname = sys.argv[1] n_steps = int(sys.argv[2]) n_loci = int(sys.argv[3]) npy_flname = sys.argv[4] matrix = read_phi(bamova_phi_output_flname, n_steps, n_loci) np.save(npy_flname, matrix)
<commit_before>import sys import numpy as np def read_phi(flname, n_steps, n_loci): sampled_phis = np.zeros((n_steps, n_loci)) fl = open(flname) current_iter_idx = 0 # index used for storage last_iter_idx = 0 # index used to identify when we finish a step for ln in fl: cols = ln.strip().split(",") iter_idx = int(cols[0]) locus_idx = int(cols[1]) phi = float(cols[2]) if last_iter_idx != iter_idx: last_iter_idx = iter_idx current_iter_idx += 1 sampled_phis[current_iter_idx, locus_idx] = phi fl.close() return sampled_phis if __name__ == "__main__": bamova_phi_output_flname = sys.argv[1] n_steps = int(sys.argv[2]) n_loci = int(sys.argv[3]) npy_flname = sys.argv[4] matrix = read_phi(bamova_phi_output_flname, n_steps, n_loci) np.save(matrix, npy_flname)<commit_msg>Fix ordering of parameters to save<commit_after>import sys import numpy as np def read_phi(flname, n_steps, n_loci): sampled_phis = np.zeros((n_steps, n_loci)) fl = open(flname) current_iter_idx = 0 # index used for storage last_iter_idx = 0 # index used to identify when we finish a step for ln in fl: cols = ln.strip().split(",") iter_idx = int(cols[0]) locus_idx = int(cols[1]) phi = float(cols[2]) if last_iter_idx != iter_idx: last_iter_idx = iter_idx current_iter_idx += 1 sampled_phis[current_iter_idx, locus_idx] = phi fl.close() return sampled_phis if __name__ == "__main__": bamova_phi_output_flname = sys.argv[1] n_steps = int(sys.argv[2]) n_loci = int(sys.argv[3]) npy_flname = sys.argv[4] matrix = read_phi(bamova_phi_output_flname, n_steps, n_loci) np.save(npy_flname, matrix)
3747158af790a38ccfce217426ee5261877e9f0e
project/api/management/commands/seed_database.py
project/api/management/commands/seed_database.py
# Django from django.core.management.base import BaseCommand from api.factories import ( InternationalFactory, ) class Command(BaseCommand): help = "Command to seed database." def handle(self, *args, **options): InternationalFactory()
# Django from django.core.management.base import BaseCommand class Command(BaseCommand): help = "Command to seed database." from api.factories import ( InternationalFactory, ) def handle(self, *args, **options): self.InternationalFactory()
Fix seeding in management command
Fix seeding in management command
Python
bsd-2-clause
barberscore/barberscore-api,barberscore/barberscore-api,barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore,dbinetti/barberscore-django,dbinetti/barberscore
# Django from django.core.management.base import BaseCommand from api.factories import ( InternationalFactory, ) class Command(BaseCommand): help = "Command to seed database." def handle(self, *args, **options): InternationalFactory() Fix seeding in management command
# Django from django.core.management.base import BaseCommand class Command(BaseCommand): help = "Command to seed database." from api.factories import ( InternationalFactory, ) def handle(self, *args, **options): self.InternationalFactory()
<commit_before># Django from django.core.management.base import BaseCommand from api.factories import ( InternationalFactory, ) class Command(BaseCommand): help = "Command to seed database." def handle(self, *args, **options): InternationalFactory() <commit_msg>Fix seeding in management command<commit_after>
# Django from django.core.management.base import BaseCommand class Command(BaseCommand): help = "Command to seed database." from api.factories import ( InternationalFactory, ) def handle(self, *args, **options): self.InternationalFactory()
# Django from django.core.management.base import BaseCommand from api.factories import ( InternationalFactory, ) class Command(BaseCommand): help = "Command to seed database." def handle(self, *args, **options): InternationalFactory() Fix seeding in management command# Django from django.core.management.base import BaseCommand class Command(BaseCommand): help = "Command to seed database." from api.factories import ( InternationalFactory, ) def handle(self, *args, **options): self.InternationalFactory()
<commit_before># Django from django.core.management.base import BaseCommand from api.factories import ( InternationalFactory, ) class Command(BaseCommand): help = "Command to seed database." def handle(self, *args, **options): InternationalFactory() <commit_msg>Fix seeding in management command<commit_after># Django from django.core.management.base import BaseCommand class Command(BaseCommand): help = "Command to seed database." from api.factories import ( InternationalFactory, ) def handle(self, *args, **options): self.InternationalFactory()
13f78350b42e48bc8195d6ec05c8b4342866d8e3
unit_tests/test_analyse_idynomics.py
unit_tests/test_analyse_idynomics.py
from nose.tools import * from analyse_idynomics import * class TestAnalyseiDynomics: def setUp(self): self.directory = 'test_data' self.analysis = AnalyseiDynomics(self.directory) def test_init(self): assert_is(self.directory, self.analysis.directory)
from nose.tools import * from analyse_idynomics import * from os.path import join, dirname, realpath class TestAnalyseiDynomics: expected_solutes = ['MyAtmos', 'pressure'] expected_species = ['MyBact'] def setUp(self): self.directory = join(dirname(realpath(__file__)), 'test_data') self.analysis = AnalyseiDynomics(self.directory) def test_init(self): assert_is(self.directory, self.analysis.directory) def test_solute_names(self): actual_solutes = self.analysis.solute_names assert_list_equal(self.expected_solutes, actual_solutes) def test_species_names(self): actual_species = self.analysis.species_names assert_list_equal(self.expected_species, actual_species)
Add unit tests for solute and species names
Add unit tests for solute and species names
Python
mit
fophillips/pyDynoMiCS
from nose.tools import * from analyse_idynomics import * class TestAnalyseiDynomics: def setUp(self): self.directory = 'test_data' self.analysis = AnalyseiDynomics(self.directory) def test_init(self): assert_is(self.directory, self.analysis.directory) Add unit tests for solute and species names
from nose.tools import * from analyse_idynomics import * from os.path import join, dirname, realpath class TestAnalyseiDynomics: expected_solutes = ['MyAtmos', 'pressure'] expected_species = ['MyBact'] def setUp(self): self.directory = join(dirname(realpath(__file__)), 'test_data') self.analysis = AnalyseiDynomics(self.directory) def test_init(self): assert_is(self.directory, self.analysis.directory) def test_solute_names(self): actual_solutes = self.analysis.solute_names assert_list_equal(self.expected_solutes, actual_solutes) def test_species_names(self): actual_species = self.analysis.species_names assert_list_equal(self.expected_species, actual_species)
<commit_before>from nose.tools import * from analyse_idynomics import * class TestAnalyseiDynomics: def setUp(self): self.directory = 'test_data' self.analysis = AnalyseiDynomics(self.directory) def test_init(self): assert_is(self.directory, self.analysis.directory) <commit_msg>Add unit tests for solute and species names<commit_after>
from nose.tools import * from analyse_idynomics import * from os.path import join, dirname, realpath class TestAnalyseiDynomics: expected_solutes = ['MyAtmos', 'pressure'] expected_species = ['MyBact'] def setUp(self): self.directory = join(dirname(realpath(__file__)), 'test_data') self.analysis = AnalyseiDynomics(self.directory) def test_init(self): assert_is(self.directory, self.analysis.directory) def test_solute_names(self): actual_solutes = self.analysis.solute_names assert_list_equal(self.expected_solutes, actual_solutes) def test_species_names(self): actual_species = self.analysis.species_names assert_list_equal(self.expected_species, actual_species)
from nose.tools import * from analyse_idynomics import * class TestAnalyseiDynomics: def setUp(self): self.directory = 'test_data' self.analysis = AnalyseiDynomics(self.directory) def test_init(self): assert_is(self.directory, self.analysis.directory) Add unit tests for solute and species namesfrom nose.tools import * from analyse_idynomics import * from os.path import join, dirname, realpath class TestAnalyseiDynomics: expected_solutes = ['MyAtmos', 'pressure'] expected_species = ['MyBact'] def setUp(self): self.directory = join(dirname(realpath(__file__)), 'test_data') self.analysis = AnalyseiDynomics(self.directory) def test_init(self): assert_is(self.directory, self.analysis.directory) def test_solute_names(self): actual_solutes = self.analysis.solute_names assert_list_equal(self.expected_solutes, actual_solutes) def test_species_names(self): actual_species = self.analysis.species_names assert_list_equal(self.expected_species, actual_species)
<commit_before>from nose.tools import * from analyse_idynomics import * class TestAnalyseiDynomics: def setUp(self): self.directory = 'test_data' self.analysis = AnalyseiDynomics(self.directory) def test_init(self): assert_is(self.directory, self.analysis.directory) <commit_msg>Add unit tests for solute and species names<commit_after>from nose.tools import * from analyse_idynomics import * from os.path import join, dirname, realpath class TestAnalyseiDynomics: expected_solutes = ['MyAtmos', 'pressure'] expected_species = ['MyBact'] def setUp(self): self.directory = join(dirname(realpath(__file__)), 'test_data') self.analysis = AnalyseiDynomics(self.directory) def test_init(self): assert_is(self.directory, self.analysis.directory) def test_solute_names(self): actual_solutes = self.analysis.solute_names assert_list_equal(self.expected_solutes, actual_solutes) def test_species_names(self): actual_species = self.analysis.species_names assert_list_equal(self.expected_species, actual_species)
aea5b07357ba06811e085d79d1ea4a62726ce8e4
sgext/drivers/devices/loadbalancers/amazonelb.py
sgext/drivers/devices/loadbalancers/amazonelb.py
from clusto.drivers.devices.appliance.basicappliance import BasicAppliance import boto.ec2.elb class AmazonELB(BasicAppliance): _driver_name = 'amazonelb' def __init__(self, name, elbname, **kwargs): BasicAppliance.__init__(self, name, **kwargs) self.set_attr(key='elb', subkey='name', value='elbname') def get_boto_connection(self): region = self.attr_value(key='ec2', subkey='region', merge_container_attrs=True) return boto.ec2.elb.connect_to_region(region) def enable_zone(self, name_or_entity): conn = self.get_boto_connection() if isinstance(name_or_entity, str): name = name_or_entity else: name = name_or_entity.name conn.enable_availability_zones(self.attr_value(key='elb', subkey='name'), [name]) def disable_zone(self, name_or_entity): conn = self.get_boto_connection() if isinstance(name_or_entity, str): name = name_or_entity else: name = name_or_entity.name conn.disable_availability_zones(self.attr_value(key='elb', subkey='name'), [name]) def get_state(self): conn = self.get_boto_connection() return conn.describe_instance_health(self.attr_value(key='elb', subkey='name'))
from clusto.drivers.devices.appliance.basicappliance import BasicAppliance import boto.ec2.elb class AmazonELB(BasicAppliance): _driver_name = 'amazonelb' def __init__(self, name, elbname, **kwargs): BasicAppliance.__init__(self, name, **kwargs) self.set_attr(key='elb', subkey='name', value=elbname) def get_boto_connection(self): region = self.attr_value(key='ec2', subkey='region', merge_container_attrs=True) return boto.ec2.elb.connect_to_region(region) def enable_zone(self, name_or_entity): conn = self.get_boto_connection() if isinstance(name_or_entity, str): name = name_or_entity else: name = name_or_entity.name conn.enable_availability_zones(self.attr_value(key='elb', subkey='name'), [name]) def disable_zone(self, name_or_entity): conn = self.get_boto_connection() if isinstance(name_or_entity, str): name = name_or_entity else: name = name_or_entity.name conn.disable_availability_zones(self.attr_value(key='elb', subkey='name'), [name]) def get_state(self): conn = self.get_boto_connection() return conn.describe_instance_health(self.attr_value(key='elb', subkey='name'))
Fix bug setting the key='elb', subkey='name' attribute.
Fix bug setting the key='elb', subkey='name' attribute.
Python
bsd-2-clause
simplegeo/clusto-sgext
from clusto.drivers.devices.appliance.basicappliance import BasicAppliance import boto.ec2.elb class AmazonELB(BasicAppliance): _driver_name = 'amazonelb' def __init__(self, name, elbname, **kwargs): BasicAppliance.__init__(self, name, **kwargs) self.set_attr(key='elb', subkey='name', value='elbname') def get_boto_connection(self): region = self.attr_value(key='ec2', subkey='region', merge_container_attrs=True) return boto.ec2.elb.connect_to_region(region) def enable_zone(self, name_or_entity): conn = self.get_boto_connection() if isinstance(name_or_entity, str): name = name_or_entity else: name = name_or_entity.name conn.enable_availability_zones(self.attr_value(key='elb', subkey='name'), [name]) def disable_zone(self, name_or_entity): conn = self.get_boto_connection() if isinstance(name_or_entity, str): name = name_or_entity else: name = name_or_entity.name conn.disable_availability_zones(self.attr_value(key='elb', subkey='name'), [name]) def get_state(self): conn = self.get_boto_connection() return conn.describe_instance_health(self.attr_value(key='elb', subkey='name')) Fix bug setting the key='elb', subkey='name' attribute.
from clusto.drivers.devices.appliance.basicappliance import BasicAppliance import boto.ec2.elb class AmazonELB(BasicAppliance): _driver_name = 'amazonelb' def __init__(self, name, elbname, **kwargs): BasicAppliance.__init__(self, name, **kwargs) self.set_attr(key='elb', subkey='name', value=elbname) def get_boto_connection(self): region = self.attr_value(key='ec2', subkey='region', merge_container_attrs=True) return boto.ec2.elb.connect_to_region(region) def enable_zone(self, name_or_entity): conn = self.get_boto_connection() if isinstance(name_or_entity, str): name = name_or_entity else: name = name_or_entity.name conn.enable_availability_zones(self.attr_value(key='elb', subkey='name'), [name]) def disable_zone(self, name_or_entity): conn = self.get_boto_connection() if isinstance(name_or_entity, str): name = name_or_entity else: name = name_or_entity.name conn.disable_availability_zones(self.attr_value(key='elb', subkey='name'), [name]) def get_state(self): conn = self.get_boto_connection() return conn.describe_instance_health(self.attr_value(key='elb', subkey='name'))
<commit_before>from clusto.drivers.devices.appliance.basicappliance import BasicAppliance import boto.ec2.elb class AmazonELB(BasicAppliance): _driver_name = 'amazonelb' def __init__(self, name, elbname, **kwargs): BasicAppliance.__init__(self, name, **kwargs) self.set_attr(key='elb', subkey='name', value='elbname') def get_boto_connection(self): region = self.attr_value(key='ec2', subkey='region', merge_container_attrs=True) return boto.ec2.elb.connect_to_region(region) def enable_zone(self, name_or_entity): conn = self.get_boto_connection() if isinstance(name_or_entity, str): name = name_or_entity else: name = name_or_entity.name conn.enable_availability_zones(self.attr_value(key='elb', subkey='name'), [name]) def disable_zone(self, name_or_entity): conn = self.get_boto_connection() if isinstance(name_or_entity, str): name = name_or_entity else: name = name_or_entity.name conn.disable_availability_zones(self.attr_value(key='elb', subkey='name'), [name]) def get_state(self): conn = self.get_boto_connection() return conn.describe_instance_health(self.attr_value(key='elb', subkey='name')) <commit_msg>Fix bug setting the key='elb', subkey='name' attribute.<commit_after>
from clusto.drivers.devices.appliance.basicappliance import BasicAppliance import boto.ec2.elb class AmazonELB(BasicAppliance): _driver_name = 'amazonelb' def __init__(self, name, elbname, **kwargs): BasicAppliance.__init__(self, name, **kwargs) self.set_attr(key='elb', subkey='name', value=elbname) def get_boto_connection(self): region = self.attr_value(key='ec2', subkey='region', merge_container_attrs=True) return boto.ec2.elb.connect_to_region(region) def enable_zone(self, name_or_entity): conn = self.get_boto_connection() if isinstance(name_or_entity, str): name = name_or_entity else: name = name_or_entity.name conn.enable_availability_zones(self.attr_value(key='elb', subkey='name'), [name]) def disable_zone(self, name_or_entity): conn = self.get_boto_connection() if isinstance(name_or_entity, str): name = name_or_entity else: name = name_or_entity.name conn.disable_availability_zones(self.attr_value(key='elb', subkey='name'), [name]) def get_state(self): conn = self.get_boto_connection() return conn.describe_instance_health(self.attr_value(key='elb', subkey='name'))
from clusto.drivers.devices.appliance.basicappliance import BasicAppliance import boto.ec2.elb class AmazonELB(BasicAppliance): _driver_name = 'amazonelb' def __init__(self, name, elbname, **kwargs): BasicAppliance.__init__(self, name, **kwargs) self.set_attr(key='elb', subkey='name', value='elbname') def get_boto_connection(self): region = self.attr_value(key='ec2', subkey='region', merge_container_attrs=True) return boto.ec2.elb.connect_to_region(region) def enable_zone(self, name_or_entity): conn = self.get_boto_connection() if isinstance(name_or_entity, str): name = name_or_entity else: name = name_or_entity.name conn.enable_availability_zones(self.attr_value(key='elb', subkey='name'), [name]) def disable_zone(self, name_or_entity): conn = self.get_boto_connection() if isinstance(name_or_entity, str): name = name_or_entity else: name = name_or_entity.name conn.disable_availability_zones(self.attr_value(key='elb', subkey='name'), [name]) def get_state(self): conn = self.get_boto_connection() return conn.describe_instance_health(self.attr_value(key='elb', subkey='name')) Fix bug setting the key='elb', subkey='name' attribute.from clusto.drivers.devices.appliance.basicappliance import BasicAppliance import boto.ec2.elb class AmazonELB(BasicAppliance): _driver_name = 'amazonelb' def __init__(self, name, elbname, **kwargs): BasicAppliance.__init__(self, name, **kwargs) self.set_attr(key='elb', subkey='name', value=elbname) def get_boto_connection(self): region = self.attr_value(key='ec2', subkey='region', merge_container_attrs=True) return boto.ec2.elb.connect_to_region(region) def enable_zone(self, name_or_entity): conn = self.get_boto_connection() if isinstance(name_or_entity, str): name = name_or_entity else: name = name_or_entity.name conn.enable_availability_zones(self.attr_value(key='elb', subkey='name'), [name]) def disable_zone(self, name_or_entity): conn = self.get_boto_connection() if isinstance(name_or_entity, str): name = name_or_entity else: name = name_or_entity.name conn.disable_availability_zones(self.attr_value(key='elb', subkey='name'), [name]) def get_state(self): conn = self.get_boto_connection() return conn.describe_instance_health(self.attr_value(key='elb', subkey='name'))
<commit_before>from clusto.drivers.devices.appliance.basicappliance import BasicAppliance import boto.ec2.elb class AmazonELB(BasicAppliance): _driver_name = 'amazonelb' def __init__(self, name, elbname, **kwargs): BasicAppliance.__init__(self, name, **kwargs) self.set_attr(key='elb', subkey='name', value='elbname') def get_boto_connection(self): region = self.attr_value(key='ec2', subkey='region', merge_container_attrs=True) return boto.ec2.elb.connect_to_region(region) def enable_zone(self, name_or_entity): conn = self.get_boto_connection() if isinstance(name_or_entity, str): name = name_or_entity else: name = name_or_entity.name conn.enable_availability_zones(self.attr_value(key='elb', subkey='name'), [name]) def disable_zone(self, name_or_entity): conn = self.get_boto_connection() if isinstance(name_or_entity, str): name = name_or_entity else: name = name_or_entity.name conn.disable_availability_zones(self.attr_value(key='elb', subkey='name'), [name]) def get_state(self): conn = self.get_boto_connection() return conn.describe_instance_health(self.attr_value(key='elb', subkey='name')) <commit_msg>Fix bug setting the key='elb', subkey='name' attribute.<commit_after>from clusto.drivers.devices.appliance.basicappliance import BasicAppliance import boto.ec2.elb class AmazonELB(BasicAppliance): _driver_name = 'amazonelb' def __init__(self, name, elbname, **kwargs): BasicAppliance.__init__(self, name, **kwargs) self.set_attr(key='elb', subkey='name', value=elbname) def get_boto_connection(self): region = self.attr_value(key='ec2', subkey='region', merge_container_attrs=True) return boto.ec2.elb.connect_to_region(region) def enable_zone(self, name_or_entity): conn = self.get_boto_connection() if isinstance(name_or_entity, str): name = name_or_entity else: name = name_or_entity.name conn.enable_availability_zones(self.attr_value(key='elb', subkey='name'), [name]) def disable_zone(self, name_or_entity): conn = self.get_boto_connection() if isinstance(name_or_entity, str): name = name_or_entity else: name = name_or_entity.name conn.disable_availability_zones(self.attr_value(key='elb', subkey='name'), [name]) def get_state(self): conn = self.get_boto_connection() return conn.describe_instance_health(self.attr_value(key='elb', subkey='name'))
3fb41919ebfd73fe1199e95f7ee9b8fa7557ea18
tests/test_vane.py
tests/test_vane.py
import os import unittest import vane class TestFetch(unittest.TestCase): def test_owm_good_fetch(self): loc = 'New York, NY' w = vane.fetch_weather(loc) self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) def test_owm_bad_fetch(self): with self.assertRaises(Exception): loc = 'Somewhere, On, Mars' w = vane.fetch_weather(loc) def test_wund_good_fetch(self): api_key = os.environ['WUND_API'] loc = 'New York, NY' w = vane.fetch_weather(location=loc, provider='wund', api_key=api_key) self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) def test_wund_bad_fetch(self): api_key = os.environ['WUND_API'] with self.assertRaises(Exception): loc = '0' w = vane.fetch_weather( location=loc, provider='wund', api_key=api_key) def test_geolocation(self): w = vane.fetch_weather() self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) if __name__ == '__main__': unittest.main()
import os import unittest import vane class TestFetch(unittest.TestCase): def test_owm_good_fetch(self): loc = 'New York, NY' w = vane.fetch_weather(loc) self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) def test_owm_bad_fetch(self): with self.assertRaises(Exception): loc = 'Somewhere, On, Mars' w = vane.fetch_weather(loc) def test_wund_good_fetch(self): api_key = os.environ['WUND_API'] loc = 'New York, NY' w = vane.fetch_weather(location=loc, provider='wund', api_key=api_key) self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) def test_wund_bad_fetch(self): api_key = os.environ['WUND_API'] with self.assertRaises(Exception): loc = '0' w = vane.fetch_weather( location=loc, provider='wund', api_key=api_key) if __name__ == '__main__': unittest.main()
Remove geolocation test for now
Remove geolocation test for now Geolocation doesn't always work, and since location is the one absolutely required parameter to get anything useful out of vane, there's no way to gracefully fall back. I'm going to leave the test out until I have time to write a proper one.
Python
bsd-3-clause
trevorparker/vane
import os import unittest import vane class TestFetch(unittest.TestCase): def test_owm_good_fetch(self): loc = 'New York, NY' w = vane.fetch_weather(loc) self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) def test_owm_bad_fetch(self): with self.assertRaises(Exception): loc = 'Somewhere, On, Mars' w = vane.fetch_weather(loc) def test_wund_good_fetch(self): api_key = os.environ['WUND_API'] loc = 'New York, NY' w = vane.fetch_weather(location=loc, provider='wund', api_key=api_key) self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) def test_wund_bad_fetch(self): api_key = os.environ['WUND_API'] with self.assertRaises(Exception): loc = '0' w = vane.fetch_weather( location=loc, provider='wund', api_key=api_key) def test_geolocation(self): w = vane.fetch_weather() self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) if __name__ == '__main__': unittest.main() Remove geolocation test for now Geolocation doesn't always work, and since location is the one absolutely required parameter to get anything useful out of vane, there's no way to gracefully fall back. I'm going to leave the test out until I have time to write a proper one.
import os import unittest import vane class TestFetch(unittest.TestCase): def test_owm_good_fetch(self): loc = 'New York, NY' w = vane.fetch_weather(loc) self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) def test_owm_bad_fetch(self): with self.assertRaises(Exception): loc = 'Somewhere, On, Mars' w = vane.fetch_weather(loc) def test_wund_good_fetch(self): api_key = os.environ['WUND_API'] loc = 'New York, NY' w = vane.fetch_weather(location=loc, provider='wund', api_key=api_key) self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) def test_wund_bad_fetch(self): api_key = os.environ['WUND_API'] with self.assertRaises(Exception): loc = '0' w = vane.fetch_weather( location=loc, provider='wund', api_key=api_key) if __name__ == '__main__': unittest.main()
<commit_before>import os import unittest import vane class TestFetch(unittest.TestCase): def test_owm_good_fetch(self): loc = 'New York, NY' w = vane.fetch_weather(loc) self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) def test_owm_bad_fetch(self): with self.assertRaises(Exception): loc = 'Somewhere, On, Mars' w = vane.fetch_weather(loc) def test_wund_good_fetch(self): api_key = os.environ['WUND_API'] loc = 'New York, NY' w = vane.fetch_weather(location=loc, provider='wund', api_key=api_key) self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) def test_wund_bad_fetch(self): api_key = os.environ['WUND_API'] with self.assertRaises(Exception): loc = '0' w = vane.fetch_weather( location=loc, provider='wund', api_key=api_key) def test_geolocation(self): w = vane.fetch_weather() self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) if __name__ == '__main__': unittest.main() <commit_msg>Remove geolocation test for now Geolocation doesn't always work, and since location is the one absolutely required parameter to get anything useful out of vane, there's no way to gracefully fall back. I'm going to leave the test out until I have time to write a proper one.<commit_after>
import os import unittest import vane class TestFetch(unittest.TestCase): def test_owm_good_fetch(self): loc = 'New York, NY' w = vane.fetch_weather(loc) self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) def test_owm_bad_fetch(self): with self.assertRaises(Exception): loc = 'Somewhere, On, Mars' w = vane.fetch_weather(loc) def test_wund_good_fetch(self): api_key = os.environ['WUND_API'] loc = 'New York, NY' w = vane.fetch_weather(location=loc, provider='wund', api_key=api_key) self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) def test_wund_bad_fetch(self): api_key = os.environ['WUND_API'] with self.assertRaises(Exception): loc = '0' w = vane.fetch_weather( location=loc, provider='wund', api_key=api_key) if __name__ == '__main__': unittest.main()
import os import unittest import vane class TestFetch(unittest.TestCase): def test_owm_good_fetch(self): loc = 'New York, NY' w = vane.fetch_weather(loc) self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) def test_owm_bad_fetch(self): with self.assertRaises(Exception): loc = 'Somewhere, On, Mars' w = vane.fetch_weather(loc) def test_wund_good_fetch(self): api_key = os.environ['WUND_API'] loc = 'New York, NY' w = vane.fetch_weather(location=loc, provider='wund', api_key=api_key) self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) def test_wund_bad_fetch(self): api_key = os.environ['WUND_API'] with self.assertRaises(Exception): loc = '0' w = vane.fetch_weather( location=loc, provider='wund', api_key=api_key) def test_geolocation(self): w = vane.fetch_weather() self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) if __name__ == '__main__': unittest.main() Remove geolocation test for now Geolocation doesn't always work, and since location is the one absolutely required parameter to get anything useful out of vane, there's no way to gracefully fall back. I'm going to leave the test out until I have time to write a proper one.import os import unittest import vane class TestFetch(unittest.TestCase): def test_owm_good_fetch(self): loc = 'New York, NY' w = vane.fetch_weather(loc) self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) def test_owm_bad_fetch(self): with self.assertRaises(Exception): loc = 'Somewhere, On, Mars' w = vane.fetch_weather(loc) def test_wund_good_fetch(self): api_key = os.environ['WUND_API'] loc = 'New York, NY' w = vane.fetch_weather(location=loc, provider='wund', api_key=api_key) self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) def test_wund_bad_fetch(self): api_key = os.environ['WUND_API'] with self.assertRaises(Exception): loc = '0' w = vane.fetch_weather( location=loc, provider='wund', api_key=api_key) if __name__ == '__main__': unittest.main()
<commit_before>import os import unittest import vane class TestFetch(unittest.TestCase): def test_owm_good_fetch(self): loc = 'New York, NY' w = vane.fetch_weather(loc) self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) def test_owm_bad_fetch(self): with self.assertRaises(Exception): loc = 'Somewhere, On, Mars' w = vane.fetch_weather(loc) def test_wund_good_fetch(self): api_key = os.environ['WUND_API'] loc = 'New York, NY' w = vane.fetch_weather(location=loc, provider='wund', api_key=api_key) self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) def test_wund_bad_fetch(self): api_key = os.environ['WUND_API'] with self.assertRaises(Exception): loc = '0' w = vane.fetch_weather( location=loc, provider='wund', api_key=api_key) def test_geolocation(self): w = vane.fetch_weather() self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) if __name__ == '__main__': unittest.main() <commit_msg>Remove geolocation test for now Geolocation doesn't always work, and since location is the one absolutely required parameter to get anything useful out of vane, there's no way to gracefully fall back. I'm going to leave the test out until I have time to write a proper one.<commit_after>import os import unittest import vane class TestFetch(unittest.TestCase): def test_owm_good_fetch(self): loc = 'New York, NY' w = vane.fetch_weather(loc) self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) def test_owm_bad_fetch(self): with self.assertRaises(Exception): loc = 'Somewhere, On, Mars' w = vane.fetch_weather(loc) def test_wund_good_fetch(self): api_key = os.environ['WUND_API'] loc = 'New York, NY' w = vane.fetch_weather(location=loc, provider='wund', api_key=api_key) self.assertTrue('temperature' in w['current']) self.assertTrue('summary' in w['current']) def test_wund_bad_fetch(self): api_key = os.environ['WUND_API'] with self.assertRaises(Exception): loc = '0' w = vane.fetch_weather( location=loc, provider='wund', api_key=api_key) if __name__ == '__main__': unittest.main()
a53df8062680b017d7cd0cf61cfef0e53b2364b1
src/loaders/npzpck.py
src/loaders/npzpck.py
# -*- coding: utf-8 -*- # Copyright (c) 2019 shmilee ''' Contains Npz pickled file loader class. ''' import numpy import zipfile from ..glogger import getGLogger from .base import BasePckLoader __all__ = ['NpzPckLoader'] log = getGLogger('L') class NpzPckLoader(BasePckLoader): ''' Load pickled data from ``.npz`` file. Return a dictionary-like object. Notes ----- Q: How to read data from .npz file? A: npzfile[datakey] >>> npzfile = numpy.load('/tmp/test.npz') >>> datakey = 'group/key' >>> npzfile[datakey] ''' __slots__ = [] loader_type = '.npz' def _special_check_path(self): if zipfile.is_zipfile(self.path): return True else: log.error("'%s' is not a ZIP file!" % self.path) return False def _special_open(self): return numpy.load(self.path) def _special_close(self, tmpobj): tmpobj.close() def _special_getkeys(self, tmpobj): return sorted(dict.fromkeys(tmpobj.files)) def _special_get(self, tmpobj, key): value = tmpobj[key] # if value.size == 1: # value = value.item() return value
# -*- coding: utf-8 -*- # Copyright (c) 2019 shmilee ''' Contains Npz pickled file loader class. ''' import numpy import zipfile from ..glogger import getGLogger from .base import BasePckLoader __all__ = ['NpzPckLoader'] log = getGLogger('L') class NpzPckLoader(BasePckLoader): ''' Load pickled data from ``.npz`` file. Return a dictionary-like object. Notes ----- Q: How to read data from .npz file? A: npzfile[datakey] >>> npzfile = numpy.load('/tmp/test.npz') >>> datakey = 'group/key' >>> npzfile[datakey] ''' __slots__ = [] loader_type = '.npz' def _special_check_path(self): if zipfile.is_zipfile(self.path): return True else: log.error("'%s' is not a ZIP file!" % self.path) return False def _special_open(self): return numpy.load(self.path) def _special_close(self, tmpobj): tmpobj.close() def _special_getkeys(self, tmpobj): return sorted(dict.fromkeys(tmpobj.files)) def _special_get(self, tmpobj, key): value = tmpobj[key] if value.size == 1: value = value.item() return value
Revert "fix: NpzPckLoader meddle one-element array"
Revert "fix: NpzPckLoader meddle one-element array" This reverts commit b3bf178ed07d789282a05fa4efac799c9e0c5910. NpzPckLoader: * 1 vs array(1) * 'a' vs array('a', dtype='<U1') NpzPckSaver np.asanyarray: * 1 vs np.void(pickle.dumps('1')) * 'a' vs np.void(pickle.dumps('a')) Note: * type(np.asanyarray(np.void(pickle.dumps('a')))[()]) is np.void * type(np.asanyarray(np.void(pickle.dumps('a'))).item()) is not np.void * pickle.loads(np.asanyarray(np.void(pickle.dumps('a')))[()].tostring()) is 'a'
Python
mit
shmilee/gdpy3,shmilee/gdpy3,shmilee/gdpy3,shmilee/gdpy3
# -*- coding: utf-8 -*- # Copyright (c) 2019 shmilee ''' Contains Npz pickled file loader class. ''' import numpy import zipfile from ..glogger import getGLogger from .base import BasePckLoader __all__ = ['NpzPckLoader'] log = getGLogger('L') class NpzPckLoader(BasePckLoader): ''' Load pickled data from ``.npz`` file. Return a dictionary-like object. Notes ----- Q: How to read data from .npz file? A: npzfile[datakey] >>> npzfile = numpy.load('/tmp/test.npz') >>> datakey = 'group/key' >>> npzfile[datakey] ''' __slots__ = [] loader_type = '.npz' def _special_check_path(self): if zipfile.is_zipfile(self.path): return True else: log.error("'%s' is not a ZIP file!" % self.path) return False def _special_open(self): return numpy.load(self.path) def _special_close(self, tmpobj): tmpobj.close() def _special_getkeys(self, tmpobj): return sorted(dict.fromkeys(tmpobj.files)) def _special_get(self, tmpobj, key): value = tmpobj[key] # if value.size == 1: # value = value.item() return value Revert "fix: NpzPckLoader meddle one-element array" This reverts commit b3bf178ed07d789282a05fa4efac799c9e0c5910. NpzPckLoader: * 1 vs array(1) * 'a' vs array('a', dtype='<U1') NpzPckSaver np.asanyarray: * 1 vs np.void(pickle.dumps('1')) * 'a' vs np.void(pickle.dumps('a')) Note: * type(np.asanyarray(np.void(pickle.dumps('a')))[()]) is np.void * type(np.asanyarray(np.void(pickle.dumps('a'))).item()) is not np.void * pickle.loads(np.asanyarray(np.void(pickle.dumps('a')))[()].tostring()) is 'a'
# -*- coding: utf-8 -*- # Copyright (c) 2019 shmilee ''' Contains Npz pickled file loader class. ''' import numpy import zipfile from ..glogger import getGLogger from .base import BasePckLoader __all__ = ['NpzPckLoader'] log = getGLogger('L') class NpzPckLoader(BasePckLoader): ''' Load pickled data from ``.npz`` file. Return a dictionary-like object. Notes ----- Q: How to read data from .npz file? A: npzfile[datakey] >>> npzfile = numpy.load('/tmp/test.npz') >>> datakey = 'group/key' >>> npzfile[datakey] ''' __slots__ = [] loader_type = '.npz' def _special_check_path(self): if zipfile.is_zipfile(self.path): return True else: log.error("'%s' is not a ZIP file!" % self.path) return False def _special_open(self): return numpy.load(self.path) def _special_close(self, tmpobj): tmpobj.close() def _special_getkeys(self, tmpobj): return sorted(dict.fromkeys(tmpobj.files)) def _special_get(self, tmpobj, key): value = tmpobj[key] if value.size == 1: value = value.item() return value
<commit_before># -*- coding: utf-8 -*- # Copyright (c) 2019 shmilee ''' Contains Npz pickled file loader class. ''' import numpy import zipfile from ..glogger import getGLogger from .base import BasePckLoader __all__ = ['NpzPckLoader'] log = getGLogger('L') class NpzPckLoader(BasePckLoader): ''' Load pickled data from ``.npz`` file. Return a dictionary-like object. Notes ----- Q: How to read data from .npz file? A: npzfile[datakey] >>> npzfile = numpy.load('/tmp/test.npz') >>> datakey = 'group/key' >>> npzfile[datakey] ''' __slots__ = [] loader_type = '.npz' def _special_check_path(self): if zipfile.is_zipfile(self.path): return True else: log.error("'%s' is not a ZIP file!" % self.path) return False def _special_open(self): return numpy.load(self.path) def _special_close(self, tmpobj): tmpobj.close() def _special_getkeys(self, tmpobj): return sorted(dict.fromkeys(tmpobj.files)) def _special_get(self, tmpobj, key): value = tmpobj[key] # if value.size == 1: # value = value.item() return value <commit_msg>Revert "fix: NpzPckLoader meddle one-element array" This reverts commit b3bf178ed07d789282a05fa4efac799c9e0c5910. NpzPckLoader: * 1 vs array(1) * 'a' vs array('a', dtype='<U1') NpzPckSaver np.asanyarray: * 1 vs np.void(pickle.dumps('1')) * 'a' vs np.void(pickle.dumps('a')) Note: * type(np.asanyarray(np.void(pickle.dumps('a')))[()]) is np.void * type(np.asanyarray(np.void(pickle.dumps('a'))).item()) is not np.void * pickle.loads(np.asanyarray(np.void(pickle.dumps('a')))[()].tostring()) is 'a'<commit_after>
# -*- coding: utf-8 -*- # Copyright (c) 2019 shmilee ''' Contains Npz pickled file loader class. ''' import numpy import zipfile from ..glogger import getGLogger from .base import BasePckLoader __all__ = ['NpzPckLoader'] log = getGLogger('L') class NpzPckLoader(BasePckLoader): ''' Load pickled data from ``.npz`` file. Return a dictionary-like object. Notes ----- Q: How to read data from .npz file? A: npzfile[datakey] >>> npzfile = numpy.load('/tmp/test.npz') >>> datakey = 'group/key' >>> npzfile[datakey] ''' __slots__ = [] loader_type = '.npz' def _special_check_path(self): if zipfile.is_zipfile(self.path): return True else: log.error("'%s' is not a ZIP file!" % self.path) return False def _special_open(self): return numpy.load(self.path) def _special_close(self, tmpobj): tmpobj.close() def _special_getkeys(self, tmpobj): return sorted(dict.fromkeys(tmpobj.files)) def _special_get(self, tmpobj, key): value = tmpobj[key] if value.size == 1: value = value.item() return value
# -*- coding: utf-8 -*- # Copyright (c) 2019 shmilee ''' Contains Npz pickled file loader class. ''' import numpy import zipfile from ..glogger import getGLogger from .base import BasePckLoader __all__ = ['NpzPckLoader'] log = getGLogger('L') class NpzPckLoader(BasePckLoader): ''' Load pickled data from ``.npz`` file. Return a dictionary-like object. Notes ----- Q: How to read data from .npz file? A: npzfile[datakey] >>> npzfile = numpy.load('/tmp/test.npz') >>> datakey = 'group/key' >>> npzfile[datakey] ''' __slots__ = [] loader_type = '.npz' def _special_check_path(self): if zipfile.is_zipfile(self.path): return True else: log.error("'%s' is not a ZIP file!" % self.path) return False def _special_open(self): return numpy.load(self.path) def _special_close(self, tmpobj): tmpobj.close() def _special_getkeys(self, tmpobj): return sorted(dict.fromkeys(tmpobj.files)) def _special_get(self, tmpobj, key): value = tmpobj[key] # if value.size == 1: # value = value.item() return value Revert "fix: NpzPckLoader meddle one-element array" This reverts commit b3bf178ed07d789282a05fa4efac799c9e0c5910. NpzPckLoader: * 1 vs array(1) * 'a' vs array('a', dtype='<U1') NpzPckSaver np.asanyarray: * 1 vs np.void(pickle.dumps('1')) * 'a' vs np.void(pickle.dumps('a')) Note: * type(np.asanyarray(np.void(pickle.dumps('a')))[()]) is np.void * type(np.asanyarray(np.void(pickle.dumps('a'))).item()) is not np.void * pickle.loads(np.asanyarray(np.void(pickle.dumps('a')))[()].tostring()) is 'a'# -*- coding: utf-8 -*- # Copyright (c) 2019 shmilee ''' Contains Npz pickled file loader class. ''' import numpy import zipfile from ..glogger import getGLogger from .base import BasePckLoader __all__ = ['NpzPckLoader'] log = getGLogger('L') class NpzPckLoader(BasePckLoader): ''' Load pickled data from ``.npz`` file. Return a dictionary-like object. Notes ----- Q: How to read data from .npz file? A: npzfile[datakey] >>> npzfile = numpy.load('/tmp/test.npz') >>> datakey = 'group/key' >>> npzfile[datakey] ''' __slots__ = [] loader_type = '.npz' def _special_check_path(self): if zipfile.is_zipfile(self.path): return True else: log.error("'%s' is not a ZIP file!" % self.path) return False def _special_open(self): return numpy.load(self.path) def _special_close(self, tmpobj): tmpobj.close() def _special_getkeys(self, tmpobj): return sorted(dict.fromkeys(tmpobj.files)) def _special_get(self, tmpobj, key): value = tmpobj[key] if value.size == 1: value = value.item() return value
<commit_before># -*- coding: utf-8 -*- # Copyright (c) 2019 shmilee ''' Contains Npz pickled file loader class. ''' import numpy import zipfile from ..glogger import getGLogger from .base import BasePckLoader __all__ = ['NpzPckLoader'] log = getGLogger('L') class NpzPckLoader(BasePckLoader): ''' Load pickled data from ``.npz`` file. Return a dictionary-like object. Notes ----- Q: How to read data from .npz file? A: npzfile[datakey] >>> npzfile = numpy.load('/tmp/test.npz') >>> datakey = 'group/key' >>> npzfile[datakey] ''' __slots__ = [] loader_type = '.npz' def _special_check_path(self): if zipfile.is_zipfile(self.path): return True else: log.error("'%s' is not a ZIP file!" % self.path) return False def _special_open(self): return numpy.load(self.path) def _special_close(self, tmpobj): tmpobj.close() def _special_getkeys(self, tmpobj): return sorted(dict.fromkeys(tmpobj.files)) def _special_get(self, tmpobj, key): value = tmpobj[key] # if value.size == 1: # value = value.item() return value <commit_msg>Revert "fix: NpzPckLoader meddle one-element array" This reverts commit b3bf178ed07d789282a05fa4efac799c9e0c5910. NpzPckLoader: * 1 vs array(1) * 'a' vs array('a', dtype='<U1') NpzPckSaver np.asanyarray: * 1 vs np.void(pickle.dumps('1')) * 'a' vs np.void(pickle.dumps('a')) Note: * type(np.asanyarray(np.void(pickle.dumps('a')))[()]) is np.void * type(np.asanyarray(np.void(pickle.dumps('a'))).item()) is not np.void * pickle.loads(np.asanyarray(np.void(pickle.dumps('a')))[()].tostring()) is 'a'<commit_after># -*- coding: utf-8 -*- # Copyright (c) 2019 shmilee ''' Contains Npz pickled file loader class. ''' import numpy import zipfile from ..glogger import getGLogger from .base import BasePckLoader __all__ = ['NpzPckLoader'] log = getGLogger('L') class NpzPckLoader(BasePckLoader): ''' Load pickled data from ``.npz`` file. Return a dictionary-like object. Notes ----- Q: How to read data from .npz file? A: npzfile[datakey] >>> npzfile = numpy.load('/tmp/test.npz') >>> datakey = 'group/key' >>> npzfile[datakey] ''' __slots__ = [] loader_type = '.npz' def _special_check_path(self): if zipfile.is_zipfile(self.path): return True else: log.error("'%s' is not a ZIP file!" % self.path) return False def _special_open(self): return numpy.load(self.path) def _special_close(self, tmpobj): tmpobj.close() def _special_getkeys(self, tmpobj): return sorted(dict.fromkeys(tmpobj.files)) def _special_get(self, tmpobj, key): value = tmpobj[key] if value.size == 1: value = value.item() return value
ea43ab87dbb66e0f3da3b9c5345a134b7c001d50
app/__init__.py
app/__init__.py
from flask import Flask from flask.ext.mongoengine import MongoEngine import logging # Start a flask application context. app = Flask(__name__) # Load configuration from file. app.config.from_object('config') # Setup Db object. db = MongoEngine(app) # Setup logging. logging.basicConfig(level = logging.INFO) logger = logging.getLogger(__name__) # Import auth module and register blueprint. from app.mod_auth.controller import auth app.register_blueprint(auth, url_prefix='/auth') # Import budget module and register blueprint. from app.mod_budget.controller import budget app.register_blueprint(budget, url_prefix='/budget')
from flask import Flask from flask.ext.mongoengine import MongoEngine import logging # Start a flask application context. app = Flask(__name__) # Load configuration from file. app.config.from_object('config') # Setup Db object. db = MongoEngine(app) # Setup logging. logging.basicConfig(level = logging.DEBUG) logger = logging.getLogger(__name__) # Import auth module and register blueprint. from app.mod_auth.controller import auth app.register_blueprint(auth, url_prefix='/auth') # Import budget module and register blueprint. from app.mod_budget.controller import budget app.register_blueprint(budget, url_prefix='/budget')
Set logging level to DEBUG
Set logging level to DEBUG
Python
mit
Zillolo/mana-vault,Zillolo/mana-vault,Zillolo/mana-vault
from flask import Flask from flask.ext.mongoengine import MongoEngine import logging # Start a flask application context. app = Flask(__name__) # Load configuration from file. app.config.from_object('config') # Setup Db object. db = MongoEngine(app) # Setup logging. logging.basicConfig(level = logging.INFO) logger = logging.getLogger(__name__) # Import auth module and register blueprint. from app.mod_auth.controller import auth app.register_blueprint(auth, url_prefix='/auth') # Import budget module and register blueprint. from app.mod_budget.controller import budget app.register_blueprint(budget, url_prefix='/budget') Set logging level to DEBUG
from flask import Flask from flask.ext.mongoengine import MongoEngine import logging # Start a flask application context. app = Flask(__name__) # Load configuration from file. app.config.from_object('config') # Setup Db object. db = MongoEngine(app) # Setup logging. logging.basicConfig(level = logging.DEBUG) logger = logging.getLogger(__name__) # Import auth module and register blueprint. from app.mod_auth.controller import auth app.register_blueprint(auth, url_prefix='/auth') # Import budget module and register blueprint. from app.mod_budget.controller import budget app.register_blueprint(budget, url_prefix='/budget')
<commit_before>from flask import Flask from flask.ext.mongoengine import MongoEngine import logging # Start a flask application context. app = Flask(__name__) # Load configuration from file. app.config.from_object('config') # Setup Db object. db = MongoEngine(app) # Setup logging. logging.basicConfig(level = logging.INFO) logger = logging.getLogger(__name__) # Import auth module and register blueprint. from app.mod_auth.controller import auth app.register_blueprint(auth, url_prefix='/auth') # Import budget module and register blueprint. from app.mod_budget.controller import budget app.register_blueprint(budget, url_prefix='/budget') <commit_msg>Set logging level to DEBUG<commit_after>
from flask import Flask from flask.ext.mongoengine import MongoEngine import logging # Start a flask application context. app = Flask(__name__) # Load configuration from file. app.config.from_object('config') # Setup Db object. db = MongoEngine(app) # Setup logging. logging.basicConfig(level = logging.DEBUG) logger = logging.getLogger(__name__) # Import auth module and register blueprint. from app.mod_auth.controller import auth app.register_blueprint(auth, url_prefix='/auth') # Import budget module and register blueprint. from app.mod_budget.controller import budget app.register_blueprint(budget, url_prefix='/budget')
from flask import Flask from flask.ext.mongoengine import MongoEngine import logging # Start a flask application context. app = Flask(__name__) # Load configuration from file. app.config.from_object('config') # Setup Db object. db = MongoEngine(app) # Setup logging. logging.basicConfig(level = logging.INFO) logger = logging.getLogger(__name__) # Import auth module and register blueprint. from app.mod_auth.controller import auth app.register_blueprint(auth, url_prefix='/auth') # Import budget module and register blueprint. from app.mod_budget.controller import budget app.register_blueprint(budget, url_prefix='/budget') Set logging level to DEBUGfrom flask import Flask from flask.ext.mongoengine import MongoEngine import logging # Start a flask application context. app = Flask(__name__) # Load configuration from file. app.config.from_object('config') # Setup Db object. db = MongoEngine(app) # Setup logging. logging.basicConfig(level = logging.DEBUG) logger = logging.getLogger(__name__) # Import auth module and register blueprint. from app.mod_auth.controller import auth app.register_blueprint(auth, url_prefix='/auth') # Import budget module and register blueprint. from app.mod_budget.controller import budget app.register_blueprint(budget, url_prefix='/budget')
<commit_before>from flask import Flask from flask.ext.mongoengine import MongoEngine import logging # Start a flask application context. app = Flask(__name__) # Load configuration from file. app.config.from_object('config') # Setup Db object. db = MongoEngine(app) # Setup logging. logging.basicConfig(level = logging.INFO) logger = logging.getLogger(__name__) # Import auth module and register blueprint. from app.mod_auth.controller import auth app.register_blueprint(auth, url_prefix='/auth') # Import budget module and register blueprint. from app.mod_budget.controller import budget app.register_blueprint(budget, url_prefix='/budget') <commit_msg>Set logging level to DEBUG<commit_after>from flask import Flask from flask.ext.mongoengine import MongoEngine import logging # Start a flask application context. app = Flask(__name__) # Load configuration from file. app.config.from_object('config') # Setup Db object. db = MongoEngine(app) # Setup logging. logging.basicConfig(level = logging.DEBUG) logger = logging.getLogger(__name__) # Import auth module and register blueprint. from app.mod_auth.controller import auth app.register_blueprint(auth, url_prefix='/auth') # Import budget module and register blueprint. from app.mod_budget.controller import budget app.register_blueprint(budget, url_prefix='/budget')
78e6cd5fc57c338ac9c61b6e50a5ac4355a5d8b7
json-templates/create-template.py
json-templates/create-template.py
#!/bin/env python import blank_template import json import os import subprocess import sys import tarfile if __name__ == '__main__': # Load template fname = sys.argv[1] template = blank_template.load_template(fname) # Generate ova.xml version = {'hostname': 'golm-2', 'date': '2016-04-29', 'product_version': '7.0.0', 'product_brand': 'XenServer', 'build_number': '125122c', 'xapi_major': '1', 'xapi_minor': '9', 'export_vsn': '2'} xml = template.toXML(version) ova_xml = open("ova.xml", "w") ova_xml.write(xml) ova_xml.close() # Generate tarball containing ova.xml template_name = os.path.splitext(fname)[0] tar = tarfile.open("%s.tar" % template_name, "w") tar.add("ova.xml") tar.close() os.remove("ova.xml") # Import XS template uuid = subprocess.check_output(["xe", "vm-import", "filename=%s.tar" % template_name, "preserve=true"]) # Set default_template = true out = subprocess.check_output(["xe", "template-param-set", "other-config:default_template=true", "uuid=%s" % uuid.strip()])
#!/bin/env python import blank_template import json import os import subprocess import sys import tarfile if __name__ == '__main__': # Load template fname = sys.argv[1] template = blank_template.load_template(fname) # Generate ova.xml version = {'hostname': 'localhost', 'date': '1970-01-01', 'product_version': '7.0.0', 'product_brand': 'XenServer', 'build_number': '0x', 'xapi_major': '1', 'xapi_minor': '9', 'export_vsn': '2'} xml = template.toXML(version) ova_xml = open("ova.xml", "w") ova_xml.write(xml) ova_xml.close() # Generate tarball containing ova.xml template_name = os.path.splitext(fname)[0] tar = tarfile.open("%s.tar" % template_name, "w") tar.add("ova.xml") tar.close() os.remove("ova.xml") # Import XS template uuid = subprocess.check_output(["xe", "vm-import", "filename=%s.tar" % template_name, "preserve=true"]) # Set default_template = true out = subprocess.check_output(["xe", "template-param-set", "other-config:default_template=true", "uuid=%s" % uuid.strip()])
Use more generic values for version
Use more generic values for version
Python
bsd-2-clause
xenserver/guest-templates,xenserver/guest-templates
#!/bin/env python import blank_template import json import os import subprocess import sys import tarfile if __name__ == '__main__': # Load template fname = sys.argv[1] template = blank_template.load_template(fname) # Generate ova.xml version = {'hostname': 'golm-2', 'date': '2016-04-29', 'product_version': '7.0.0', 'product_brand': 'XenServer', 'build_number': '125122c', 'xapi_major': '1', 'xapi_minor': '9', 'export_vsn': '2'} xml = template.toXML(version) ova_xml = open("ova.xml", "w") ova_xml.write(xml) ova_xml.close() # Generate tarball containing ova.xml template_name = os.path.splitext(fname)[0] tar = tarfile.open("%s.tar" % template_name, "w") tar.add("ova.xml") tar.close() os.remove("ova.xml") # Import XS template uuid = subprocess.check_output(["xe", "vm-import", "filename=%s.tar" % template_name, "preserve=true"]) # Set default_template = true out = subprocess.check_output(["xe", "template-param-set", "other-config:default_template=true", "uuid=%s" % uuid.strip()]) Use more generic values for version
#!/bin/env python import blank_template import json import os import subprocess import sys import tarfile if __name__ == '__main__': # Load template fname = sys.argv[1] template = blank_template.load_template(fname) # Generate ova.xml version = {'hostname': 'localhost', 'date': '1970-01-01', 'product_version': '7.0.0', 'product_brand': 'XenServer', 'build_number': '0x', 'xapi_major': '1', 'xapi_minor': '9', 'export_vsn': '2'} xml = template.toXML(version) ova_xml = open("ova.xml", "w") ova_xml.write(xml) ova_xml.close() # Generate tarball containing ova.xml template_name = os.path.splitext(fname)[0] tar = tarfile.open("%s.tar" % template_name, "w") tar.add("ova.xml") tar.close() os.remove("ova.xml") # Import XS template uuid = subprocess.check_output(["xe", "vm-import", "filename=%s.tar" % template_name, "preserve=true"]) # Set default_template = true out = subprocess.check_output(["xe", "template-param-set", "other-config:default_template=true", "uuid=%s" % uuid.strip()])
<commit_before>#!/bin/env python import blank_template import json import os import subprocess import sys import tarfile if __name__ == '__main__': # Load template fname = sys.argv[1] template = blank_template.load_template(fname) # Generate ova.xml version = {'hostname': 'golm-2', 'date': '2016-04-29', 'product_version': '7.0.0', 'product_brand': 'XenServer', 'build_number': '125122c', 'xapi_major': '1', 'xapi_minor': '9', 'export_vsn': '2'} xml = template.toXML(version) ova_xml = open("ova.xml", "w") ova_xml.write(xml) ova_xml.close() # Generate tarball containing ova.xml template_name = os.path.splitext(fname)[0] tar = tarfile.open("%s.tar" % template_name, "w") tar.add("ova.xml") tar.close() os.remove("ova.xml") # Import XS template uuid = subprocess.check_output(["xe", "vm-import", "filename=%s.tar" % template_name, "preserve=true"]) # Set default_template = true out = subprocess.check_output(["xe", "template-param-set", "other-config:default_template=true", "uuid=%s" % uuid.strip()]) <commit_msg>Use more generic values for version<commit_after>
#!/bin/env python import blank_template import json import os import subprocess import sys import tarfile if __name__ == '__main__': # Load template fname = sys.argv[1] template = blank_template.load_template(fname) # Generate ova.xml version = {'hostname': 'localhost', 'date': '1970-01-01', 'product_version': '7.0.0', 'product_brand': 'XenServer', 'build_number': '0x', 'xapi_major': '1', 'xapi_minor': '9', 'export_vsn': '2'} xml = template.toXML(version) ova_xml = open("ova.xml", "w") ova_xml.write(xml) ova_xml.close() # Generate tarball containing ova.xml template_name = os.path.splitext(fname)[0] tar = tarfile.open("%s.tar" % template_name, "w") tar.add("ova.xml") tar.close() os.remove("ova.xml") # Import XS template uuid = subprocess.check_output(["xe", "vm-import", "filename=%s.tar" % template_name, "preserve=true"]) # Set default_template = true out = subprocess.check_output(["xe", "template-param-set", "other-config:default_template=true", "uuid=%s" % uuid.strip()])
#!/bin/env python import blank_template import json import os import subprocess import sys import tarfile if __name__ == '__main__': # Load template fname = sys.argv[1] template = blank_template.load_template(fname) # Generate ova.xml version = {'hostname': 'golm-2', 'date': '2016-04-29', 'product_version': '7.0.0', 'product_brand': 'XenServer', 'build_number': '125122c', 'xapi_major': '1', 'xapi_minor': '9', 'export_vsn': '2'} xml = template.toXML(version) ova_xml = open("ova.xml", "w") ova_xml.write(xml) ova_xml.close() # Generate tarball containing ova.xml template_name = os.path.splitext(fname)[0] tar = tarfile.open("%s.tar" % template_name, "w") tar.add("ova.xml") tar.close() os.remove("ova.xml") # Import XS template uuid = subprocess.check_output(["xe", "vm-import", "filename=%s.tar" % template_name, "preserve=true"]) # Set default_template = true out = subprocess.check_output(["xe", "template-param-set", "other-config:default_template=true", "uuid=%s" % uuid.strip()]) Use more generic values for version#!/bin/env python import blank_template import json import os import subprocess import sys import tarfile if __name__ == '__main__': # Load template fname = sys.argv[1] template = blank_template.load_template(fname) # Generate ova.xml version = {'hostname': 'localhost', 'date': '1970-01-01', 'product_version': '7.0.0', 'product_brand': 'XenServer', 'build_number': '0x', 'xapi_major': '1', 'xapi_minor': '9', 'export_vsn': '2'} xml = template.toXML(version) ova_xml = open("ova.xml", "w") ova_xml.write(xml) ova_xml.close() # Generate tarball containing ova.xml template_name = os.path.splitext(fname)[0] tar = tarfile.open("%s.tar" % template_name, "w") tar.add("ova.xml") tar.close() os.remove("ova.xml") # Import XS template uuid = subprocess.check_output(["xe", "vm-import", "filename=%s.tar" % template_name, "preserve=true"]) # Set default_template = true out = subprocess.check_output(["xe", "template-param-set", "other-config:default_template=true", "uuid=%s" % uuid.strip()])
<commit_before>#!/bin/env python import blank_template import json import os import subprocess import sys import tarfile if __name__ == '__main__': # Load template fname = sys.argv[1] template = blank_template.load_template(fname) # Generate ova.xml version = {'hostname': 'golm-2', 'date': '2016-04-29', 'product_version': '7.0.0', 'product_brand': 'XenServer', 'build_number': '125122c', 'xapi_major': '1', 'xapi_minor': '9', 'export_vsn': '2'} xml = template.toXML(version) ova_xml = open("ova.xml", "w") ova_xml.write(xml) ova_xml.close() # Generate tarball containing ova.xml template_name = os.path.splitext(fname)[0] tar = tarfile.open("%s.tar" % template_name, "w") tar.add("ova.xml") tar.close() os.remove("ova.xml") # Import XS template uuid = subprocess.check_output(["xe", "vm-import", "filename=%s.tar" % template_name, "preserve=true"]) # Set default_template = true out = subprocess.check_output(["xe", "template-param-set", "other-config:default_template=true", "uuid=%s" % uuid.strip()]) <commit_msg>Use more generic values for version<commit_after>#!/bin/env python import blank_template import json import os import subprocess import sys import tarfile if __name__ == '__main__': # Load template fname = sys.argv[1] template = blank_template.load_template(fname) # Generate ova.xml version = {'hostname': 'localhost', 'date': '1970-01-01', 'product_version': '7.0.0', 'product_brand': 'XenServer', 'build_number': '0x', 'xapi_major': '1', 'xapi_minor': '9', 'export_vsn': '2'} xml = template.toXML(version) ova_xml = open("ova.xml", "w") ova_xml.write(xml) ova_xml.close() # Generate tarball containing ova.xml template_name = os.path.splitext(fname)[0] tar = tarfile.open("%s.tar" % template_name, "w") tar.add("ova.xml") tar.close() os.remove("ova.xml") # Import XS template uuid = subprocess.check_output(["xe", "vm-import", "filename=%s.tar" % template_name, "preserve=true"]) # Set default_template = true out = subprocess.check_output(["xe", "template-param-set", "other-config:default_template=true", "uuid=%s" % uuid.strip()])
842b128dd4fb3b93492578008de0969e85e3039a
qcfractal/alembic/versions/1604623c481a_id_pirmary_key_for_torsion_init_mol.py
qcfractal/alembic/versions/1604623c481a_id_pirmary_key_for_torsion_init_mol.py
"""id(pirmary key) for torsion_init_mol Revision ID: 1604623c481a Revises: fb5bd88ae2f3 Create Date: 2020-07-02 18:42:17.267792 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = "1604623c481a" down_revision = "fb5bd88ae2f3" branch_labels = None depends_on = None def upgrade(): op.execute( "DELETE FROM torsion_init_mol_association a USING \ (SELECT MIN(ctid) as ctid, torsion_id, molecule_id \ FROM torsion_init_mol_association \ GROUP BY torsion_id, molecule_id HAVING COUNT(*) > 1 \ ) b \ WHERE a.torsion_id = b.torsion_id and a.molecule_id = b.molecule_id \ AND a.ctid <> b.ctid" ) op.execute("alter table add primary key (torsion_id, molecule_id)") def downgrade(): op.execute("alter table drop constraint torsion_init_mol_association_pkey")
"""id(pirmary key) for torsion_init_mol Revision ID: 1604623c481a Revises: fb5bd88ae2f3 Create Date: 2020-07-02 18:42:17.267792 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = "1604623c481a" down_revision = "fb5bd88ae2f3" branch_labels = None depends_on = None def upgrade(): # Removes (harmless) duplicate rows op.execute( "DELETE FROM torsion_init_mol_association a USING \ (SELECT MIN(ctid) as ctid, torsion_id, molecule_id \ FROM torsion_init_mol_association \ GROUP BY torsion_id, molecule_id HAVING COUNT(*) > 1 \ ) b \ WHERE a.torsion_id = b.torsion_id and a.molecule_id = b.molecule_id \ AND a.ctid <> b.ctid" ) op.execute("alter table torsion_init_mol_association add primary key (torsion_id, molecule_id)") def downgrade(): op.execute("alter table torsion_init_mol_association drop constraint torsion_init_mol_association_pkey")
Fix missing table name in alter_table
Fix missing table name in alter_table
Python
bsd-3-clause
psi4/mongo_qcdb,psi4/mongo_qcdb,psi4/DatenQM,psi4/DatenQM
"""id(pirmary key) for torsion_init_mol Revision ID: 1604623c481a Revises: fb5bd88ae2f3 Create Date: 2020-07-02 18:42:17.267792 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = "1604623c481a" down_revision = "fb5bd88ae2f3" branch_labels = None depends_on = None def upgrade(): op.execute( "DELETE FROM torsion_init_mol_association a USING \ (SELECT MIN(ctid) as ctid, torsion_id, molecule_id \ FROM torsion_init_mol_association \ GROUP BY torsion_id, molecule_id HAVING COUNT(*) > 1 \ ) b \ WHERE a.torsion_id = b.torsion_id and a.molecule_id = b.molecule_id \ AND a.ctid <> b.ctid" ) op.execute("alter table add primary key (torsion_id, molecule_id)") def downgrade(): op.execute("alter table drop constraint torsion_init_mol_association_pkey") Fix missing table name in alter_table
"""id(pirmary key) for torsion_init_mol Revision ID: 1604623c481a Revises: fb5bd88ae2f3 Create Date: 2020-07-02 18:42:17.267792 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = "1604623c481a" down_revision = "fb5bd88ae2f3" branch_labels = None depends_on = None def upgrade(): # Removes (harmless) duplicate rows op.execute( "DELETE FROM torsion_init_mol_association a USING \ (SELECT MIN(ctid) as ctid, torsion_id, molecule_id \ FROM torsion_init_mol_association \ GROUP BY torsion_id, molecule_id HAVING COUNT(*) > 1 \ ) b \ WHERE a.torsion_id = b.torsion_id and a.molecule_id = b.molecule_id \ AND a.ctid <> b.ctid" ) op.execute("alter table torsion_init_mol_association add primary key (torsion_id, molecule_id)") def downgrade(): op.execute("alter table torsion_init_mol_association drop constraint torsion_init_mol_association_pkey")
<commit_before>"""id(pirmary key) for torsion_init_mol Revision ID: 1604623c481a Revises: fb5bd88ae2f3 Create Date: 2020-07-02 18:42:17.267792 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = "1604623c481a" down_revision = "fb5bd88ae2f3" branch_labels = None depends_on = None def upgrade(): op.execute( "DELETE FROM torsion_init_mol_association a USING \ (SELECT MIN(ctid) as ctid, torsion_id, molecule_id \ FROM torsion_init_mol_association \ GROUP BY torsion_id, molecule_id HAVING COUNT(*) > 1 \ ) b \ WHERE a.torsion_id = b.torsion_id and a.molecule_id = b.molecule_id \ AND a.ctid <> b.ctid" ) op.execute("alter table add primary key (torsion_id, molecule_id)") def downgrade(): op.execute("alter table drop constraint torsion_init_mol_association_pkey") <commit_msg>Fix missing table name in alter_table<commit_after>
"""id(pirmary key) for torsion_init_mol Revision ID: 1604623c481a Revises: fb5bd88ae2f3 Create Date: 2020-07-02 18:42:17.267792 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = "1604623c481a" down_revision = "fb5bd88ae2f3" branch_labels = None depends_on = None def upgrade(): # Removes (harmless) duplicate rows op.execute( "DELETE FROM torsion_init_mol_association a USING \ (SELECT MIN(ctid) as ctid, torsion_id, molecule_id \ FROM torsion_init_mol_association \ GROUP BY torsion_id, molecule_id HAVING COUNT(*) > 1 \ ) b \ WHERE a.torsion_id = b.torsion_id and a.molecule_id = b.molecule_id \ AND a.ctid <> b.ctid" ) op.execute("alter table torsion_init_mol_association add primary key (torsion_id, molecule_id)") def downgrade(): op.execute("alter table torsion_init_mol_association drop constraint torsion_init_mol_association_pkey")
"""id(pirmary key) for torsion_init_mol Revision ID: 1604623c481a Revises: fb5bd88ae2f3 Create Date: 2020-07-02 18:42:17.267792 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = "1604623c481a" down_revision = "fb5bd88ae2f3" branch_labels = None depends_on = None def upgrade(): op.execute( "DELETE FROM torsion_init_mol_association a USING \ (SELECT MIN(ctid) as ctid, torsion_id, molecule_id \ FROM torsion_init_mol_association \ GROUP BY torsion_id, molecule_id HAVING COUNT(*) > 1 \ ) b \ WHERE a.torsion_id = b.torsion_id and a.molecule_id = b.molecule_id \ AND a.ctid <> b.ctid" ) op.execute("alter table add primary key (torsion_id, molecule_id)") def downgrade(): op.execute("alter table drop constraint torsion_init_mol_association_pkey") Fix missing table name in alter_table"""id(pirmary key) for torsion_init_mol Revision ID: 1604623c481a Revises: fb5bd88ae2f3 Create Date: 2020-07-02 18:42:17.267792 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = "1604623c481a" down_revision = "fb5bd88ae2f3" branch_labels = None depends_on = None def upgrade(): # Removes (harmless) duplicate rows op.execute( "DELETE FROM torsion_init_mol_association a USING \ (SELECT MIN(ctid) as ctid, torsion_id, molecule_id \ FROM torsion_init_mol_association \ GROUP BY torsion_id, molecule_id HAVING COUNT(*) > 1 \ ) b \ WHERE a.torsion_id = b.torsion_id and a.molecule_id = b.molecule_id \ AND a.ctid <> b.ctid" ) op.execute("alter table torsion_init_mol_association add primary key (torsion_id, molecule_id)") def downgrade(): op.execute("alter table torsion_init_mol_association drop constraint torsion_init_mol_association_pkey")
<commit_before>"""id(pirmary key) for torsion_init_mol Revision ID: 1604623c481a Revises: fb5bd88ae2f3 Create Date: 2020-07-02 18:42:17.267792 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = "1604623c481a" down_revision = "fb5bd88ae2f3" branch_labels = None depends_on = None def upgrade(): op.execute( "DELETE FROM torsion_init_mol_association a USING \ (SELECT MIN(ctid) as ctid, torsion_id, molecule_id \ FROM torsion_init_mol_association \ GROUP BY torsion_id, molecule_id HAVING COUNT(*) > 1 \ ) b \ WHERE a.torsion_id = b.torsion_id and a.molecule_id = b.molecule_id \ AND a.ctid <> b.ctid" ) op.execute("alter table add primary key (torsion_id, molecule_id)") def downgrade(): op.execute("alter table drop constraint torsion_init_mol_association_pkey") <commit_msg>Fix missing table name in alter_table<commit_after>"""id(pirmary key) for torsion_init_mol Revision ID: 1604623c481a Revises: fb5bd88ae2f3 Create Date: 2020-07-02 18:42:17.267792 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = "1604623c481a" down_revision = "fb5bd88ae2f3" branch_labels = None depends_on = None def upgrade(): # Removes (harmless) duplicate rows op.execute( "DELETE FROM torsion_init_mol_association a USING \ (SELECT MIN(ctid) as ctid, torsion_id, molecule_id \ FROM torsion_init_mol_association \ GROUP BY torsion_id, molecule_id HAVING COUNT(*) > 1 \ ) b \ WHERE a.torsion_id = b.torsion_id and a.molecule_id = b.molecule_id \ AND a.ctid <> b.ctid" ) op.execute("alter table torsion_init_mol_association add primary key (torsion_id, molecule_id)") def downgrade(): op.execute("alter table torsion_init_mol_association drop constraint torsion_init_mol_association_pkey")
6cdcb7f089c0440ad551ec44b3667a1a5cd380d1
kafka/tools/assigner/batcher.py
kafka/tools/assigner/batcher.py
from kafka.tools.assigner.exceptions import ProgrammingException def split_partitions_into_batches(partitions, batch_size=10, use_class=None): # Currently, this is a very simplistic implementation that just breaks the list of partitions down # into even sized chunks. While it could be implemented as a generator, it's not so that it can # split the list into more efficient batches. if use_class is None: raise ProgrammingException("split_partitions_into_batches called with no use_class") batches = [use_class(partitions[i:i + batch_size]) for i in xrange(0, len(partitions), batch_size)] return batches
from kafka.tools.assigner.exceptions import ProgrammingException def split_partitions_into_batches(partitions, batch_size=10, use_class=None): # Currently, this is a very simplistic implementation that just breaks the list of partitions down # into even sized chunks. While it could be implemented as a generator, it's not so that it can # split the list into more efficient batches. if use_class is None: raise ProgrammingException("split_partitions_into_batches called with no use_class") batches = [use_class(partitions[i:i + batch_size]) for i in range(0, len(partitions), batch_size)] return batches
Remove used of xrange to support python 3
Remove used of xrange to support python 3
Python
apache-2.0
toddpalino/kafka-tools
from kafka.tools.assigner.exceptions import ProgrammingException def split_partitions_into_batches(partitions, batch_size=10, use_class=None): # Currently, this is a very simplistic implementation that just breaks the list of partitions down # into even sized chunks. While it could be implemented as a generator, it's not so that it can # split the list into more efficient batches. if use_class is None: raise ProgrammingException("split_partitions_into_batches called with no use_class") batches = [use_class(partitions[i:i + batch_size]) for i in xrange(0, len(partitions), batch_size)] return batches Remove used of xrange to support python 3
from kafka.tools.assigner.exceptions import ProgrammingException def split_partitions_into_batches(partitions, batch_size=10, use_class=None): # Currently, this is a very simplistic implementation that just breaks the list of partitions down # into even sized chunks. While it could be implemented as a generator, it's not so that it can # split the list into more efficient batches. if use_class is None: raise ProgrammingException("split_partitions_into_batches called with no use_class") batches = [use_class(partitions[i:i + batch_size]) for i in range(0, len(partitions), batch_size)] return batches
<commit_before>from kafka.tools.assigner.exceptions import ProgrammingException def split_partitions_into_batches(partitions, batch_size=10, use_class=None): # Currently, this is a very simplistic implementation that just breaks the list of partitions down # into even sized chunks. While it could be implemented as a generator, it's not so that it can # split the list into more efficient batches. if use_class is None: raise ProgrammingException("split_partitions_into_batches called with no use_class") batches = [use_class(partitions[i:i + batch_size]) for i in xrange(0, len(partitions), batch_size)] return batches <commit_msg>Remove used of xrange to support python 3<commit_after>
from kafka.tools.assigner.exceptions import ProgrammingException def split_partitions_into_batches(partitions, batch_size=10, use_class=None): # Currently, this is a very simplistic implementation that just breaks the list of partitions down # into even sized chunks. While it could be implemented as a generator, it's not so that it can # split the list into more efficient batches. if use_class is None: raise ProgrammingException("split_partitions_into_batches called with no use_class") batches = [use_class(partitions[i:i + batch_size]) for i in range(0, len(partitions), batch_size)] return batches
from kafka.tools.assigner.exceptions import ProgrammingException def split_partitions_into_batches(partitions, batch_size=10, use_class=None): # Currently, this is a very simplistic implementation that just breaks the list of partitions down # into even sized chunks. While it could be implemented as a generator, it's not so that it can # split the list into more efficient batches. if use_class is None: raise ProgrammingException("split_partitions_into_batches called with no use_class") batches = [use_class(partitions[i:i + batch_size]) for i in xrange(0, len(partitions), batch_size)] return batches Remove used of xrange to support python 3from kafka.tools.assigner.exceptions import ProgrammingException def split_partitions_into_batches(partitions, batch_size=10, use_class=None): # Currently, this is a very simplistic implementation that just breaks the list of partitions down # into even sized chunks. While it could be implemented as a generator, it's not so that it can # split the list into more efficient batches. if use_class is None: raise ProgrammingException("split_partitions_into_batches called with no use_class") batches = [use_class(partitions[i:i + batch_size]) for i in range(0, len(partitions), batch_size)] return batches
<commit_before>from kafka.tools.assigner.exceptions import ProgrammingException def split_partitions_into_batches(partitions, batch_size=10, use_class=None): # Currently, this is a very simplistic implementation that just breaks the list of partitions down # into even sized chunks. While it could be implemented as a generator, it's not so that it can # split the list into more efficient batches. if use_class is None: raise ProgrammingException("split_partitions_into_batches called with no use_class") batches = [use_class(partitions[i:i + batch_size]) for i in xrange(0, len(partitions), batch_size)] return batches <commit_msg>Remove used of xrange to support python 3<commit_after>from kafka.tools.assigner.exceptions import ProgrammingException def split_partitions_into_batches(partitions, batch_size=10, use_class=None): # Currently, this is a very simplistic implementation that just breaks the list of partitions down # into even sized chunks. While it could be implemented as a generator, it's not so that it can # split the list into more efficient batches. if use_class is None: raise ProgrammingException("split_partitions_into_batches called with no use_class") batches = [use_class(partitions[i:i + batch_size]) for i in range(0, len(partitions), batch_size)] return batches
3048fa2883d79a706599ccc6828cb1512acea35d
kolibri/utils/tests/test_cli.py
kolibri/utils/tests/test_cli.py
""" Tests for `kolibri` module. """ from __future__ import absolute_import, print_function, unicode_literals import logging import os import shutil import tempfile from kolibri.utils.cli import main from .base import KolibriTestBase logger = logging.getLogger(__name__) class TestKolibriCLI(KolibriTestBase): @classmethod def setup_class(cls): os.environ["KOLIBRI_HOME"] = tempfile.mkdtemp() def test_cli(self): logger.debug("This is a unit test in the main Kolibri app space") # Test the -h with self.assertRaises(SystemExit): main("-h") @classmethod def teardown_class(cls): try: shutil.rmtree(os.environ["KOLIBRI_HOME"]) except WindowsError as e: logger.debug("Couldn't delete temporary file because\n\t" + str(e))
""" Tests for `kolibri` module. """ from __future__ import absolute_import, print_function, unicode_literals import logging from kolibri.utils.cli import main from .base import KolibriTestBase logger = logging.getLogger(__name__) class TestKolibriCLI(KolibriTestBase): def test_cli(self): logger.debug("This is a unit test in the main Kolibri app space") # Test the -h with self.assertRaises(SystemExit): main("-h")
Remove what's already done in KolibriTestBase
Remove what's already done in KolibriTestBase
Python
mit
lyw07/kolibri,rtibbles/kolibri,DXCanas/kolibri,mrpau/kolibri,christianmemije/kolibri,learningequality/kolibri,DXCanas/kolibri,christianmemije/kolibri,christianmemije/kolibri,DXCanas/kolibri,MingDai/kolibri,lyw07/kolibri,MingDai/kolibri,MingDai/kolibri,rtibbles/kolibri,benjaoming/kolibri,benjaoming/kolibri,mrpau/kolibri,indirectlylit/kolibri,mrpau/kolibri,jonboiser/kolibri,mrpau/kolibri,jonboiser/kolibri,rtibbles/kolibri,learningequality/kolibri,lyw07/kolibri,indirectlylit/kolibri,indirectlylit/kolibri,DXCanas/kolibri,indirectlylit/kolibri,MingDai/kolibri,learningequality/kolibri,learningequality/kolibri,benjaoming/kolibri,benjaoming/kolibri,lyw07/kolibri,rtibbles/kolibri,jonboiser/kolibri,christianmemije/kolibri,jonboiser/kolibri
""" Tests for `kolibri` module. """ from __future__ import absolute_import, print_function, unicode_literals import logging import os import shutil import tempfile from kolibri.utils.cli import main from .base import KolibriTestBase logger = logging.getLogger(__name__) class TestKolibriCLI(KolibriTestBase): @classmethod def setup_class(cls): os.environ["KOLIBRI_HOME"] = tempfile.mkdtemp() def test_cli(self): logger.debug("This is a unit test in the main Kolibri app space") # Test the -h with self.assertRaises(SystemExit): main("-h") @classmethod def teardown_class(cls): try: shutil.rmtree(os.environ["KOLIBRI_HOME"]) except WindowsError as e: logger.debug("Couldn't delete temporary file because\n\t" + str(e)) Remove what's already done in KolibriTestBase
""" Tests for `kolibri` module. """ from __future__ import absolute_import, print_function, unicode_literals import logging from kolibri.utils.cli import main from .base import KolibriTestBase logger = logging.getLogger(__name__) class TestKolibriCLI(KolibriTestBase): def test_cli(self): logger.debug("This is a unit test in the main Kolibri app space") # Test the -h with self.assertRaises(SystemExit): main("-h")
<commit_before>""" Tests for `kolibri` module. """ from __future__ import absolute_import, print_function, unicode_literals import logging import os import shutil import tempfile from kolibri.utils.cli import main from .base import KolibriTestBase logger = logging.getLogger(__name__) class TestKolibriCLI(KolibriTestBase): @classmethod def setup_class(cls): os.environ["KOLIBRI_HOME"] = tempfile.mkdtemp() def test_cli(self): logger.debug("This is a unit test in the main Kolibri app space") # Test the -h with self.assertRaises(SystemExit): main("-h") @classmethod def teardown_class(cls): try: shutil.rmtree(os.environ["KOLIBRI_HOME"]) except WindowsError as e: logger.debug("Couldn't delete temporary file because\n\t" + str(e)) <commit_msg>Remove what's already done in KolibriTestBase<commit_after>
""" Tests for `kolibri` module. """ from __future__ import absolute_import, print_function, unicode_literals import logging from kolibri.utils.cli import main from .base import KolibriTestBase logger = logging.getLogger(__name__) class TestKolibriCLI(KolibriTestBase): def test_cli(self): logger.debug("This is a unit test in the main Kolibri app space") # Test the -h with self.assertRaises(SystemExit): main("-h")
""" Tests for `kolibri` module. """ from __future__ import absolute_import, print_function, unicode_literals import logging import os import shutil import tempfile from kolibri.utils.cli import main from .base import KolibriTestBase logger = logging.getLogger(__name__) class TestKolibriCLI(KolibriTestBase): @classmethod def setup_class(cls): os.environ["KOLIBRI_HOME"] = tempfile.mkdtemp() def test_cli(self): logger.debug("This is a unit test in the main Kolibri app space") # Test the -h with self.assertRaises(SystemExit): main("-h") @classmethod def teardown_class(cls): try: shutil.rmtree(os.environ["KOLIBRI_HOME"]) except WindowsError as e: logger.debug("Couldn't delete temporary file because\n\t" + str(e)) Remove what's already done in KolibriTestBase""" Tests for `kolibri` module. """ from __future__ import absolute_import, print_function, unicode_literals import logging from kolibri.utils.cli import main from .base import KolibriTestBase logger = logging.getLogger(__name__) class TestKolibriCLI(KolibriTestBase): def test_cli(self): logger.debug("This is a unit test in the main Kolibri app space") # Test the -h with self.assertRaises(SystemExit): main("-h")
<commit_before>""" Tests for `kolibri` module. """ from __future__ import absolute_import, print_function, unicode_literals import logging import os import shutil import tempfile from kolibri.utils.cli import main from .base import KolibriTestBase logger = logging.getLogger(__name__) class TestKolibriCLI(KolibriTestBase): @classmethod def setup_class(cls): os.environ["KOLIBRI_HOME"] = tempfile.mkdtemp() def test_cli(self): logger.debug("This is a unit test in the main Kolibri app space") # Test the -h with self.assertRaises(SystemExit): main("-h") @classmethod def teardown_class(cls): try: shutil.rmtree(os.environ["KOLIBRI_HOME"]) except WindowsError as e: logger.debug("Couldn't delete temporary file because\n\t" + str(e)) <commit_msg>Remove what's already done in KolibriTestBase<commit_after>""" Tests for `kolibri` module. """ from __future__ import absolute_import, print_function, unicode_literals import logging from kolibri.utils.cli import main from .base import KolibriTestBase logger = logging.getLogger(__name__) class TestKolibriCLI(KolibriTestBase): def test_cli(self): logger.debug("This is a unit test in the main Kolibri app space") # Test the -h with self.assertRaises(SystemExit): main("-h")
ea0c9a977cdf7611138599c54e28ccc4848f2eb5
troposphere/ivs.py
troposphere/ivs.py
# Copyright (c) 2012-2021, Mark Peek <mark@peek.org> # All rights reserved. # # See LICENSE file for full license. # # *** Do not modify - this file is autogenerated *** # Resource specification version: 25.0.0 from troposphere import Tags from . import AWSObject from .validators import boolean class Channel(AWSObject): resource_type = "AWS::IVS::Channel" props = { "Authorized": (boolean, False), "LatencyMode": (str, False), "Name": (str, False), "Tags": (Tags, False), "Type": (str, False), } class PlaybackKeyPair(AWSObject): resource_type = "AWS::IVS::PlaybackKeyPair" props = { "Name": (str, False), "PublicKeyMaterial": (str, True), "Tags": (Tags, False), } class StreamKey(AWSObject): resource_type = "AWS::IVS::StreamKey" props = { "ChannelArn": (str, True), "Tags": (Tags, False), }
# Copyright (c) 2012-2021, Mark Peek <mark@peek.org> # All rights reserved. # # See LICENSE file for full license. # # *** Do not modify - this file is autogenerated *** # Resource specification version: 35.0.0 from troposphere import Tags from . import AWSObject, AWSProperty from .validators import boolean class Channel(AWSObject): resource_type = "AWS::IVS::Channel" props = { "Authorized": (boolean, False), "LatencyMode": (str, False), "Name": (str, False), "RecordingConfigurationArn": (str, False), "Tags": (Tags, False), "Type": (str, False), } class PlaybackKeyPair(AWSObject): resource_type = "AWS::IVS::PlaybackKeyPair" props = { "Name": (str, False), "PublicKeyMaterial": (str, True), "Tags": (Tags, False), } class S3DestinationConfiguration(AWSProperty): props = { "BucketName": (str, True), } class DestinationConfiguration(AWSProperty): props = { "S3": (S3DestinationConfiguration, True), } class RecordingConfiguration(AWSObject): resource_type = "AWS::IVS::RecordingConfiguration" props = { "DestinationConfiguration": (DestinationConfiguration, True), "Name": (str, False), "Tags": (Tags, False), } class StreamKey(AWSObject): resource_type = "AWS::IVS::StreamKey" props = { "ChannelArn": (str, True), "Tags": (Tags, False), }
Update IVS per 2021-04-15 changes
Update IVS per 2021-04-15 changes
Python
bsd-2-clause
cloudtools/troposphere,cloudtools/troposphere
# Copyright (c) 2012-2021, Mark Peek <mark@peek.org> # All rights reserved. # # See LICENSE file for full license. # # *** Do not modify - this file is autogenerated *** # Resource specification version: 25.0.0 from troposphere import Tags from . import AWSObject from .validators import boolean class Channel(AWSObject): resource_type = "AWS::IVS::Channel" props = { "Authorized": (boolean, False), "LatencyMode": (str, False), "Name": (str, False), "Tags": (Tags, False), "Type": (str, False), } class PlaybackKeyPair(AWSObject): resource_type = "AWS::IVS::PlaybackKeyPair" props = { "Name": (str, False), "PublicKeyMaterial": (str, True), "Tags": (Tags, False), } class StreamKey(AWSObject): resource_type = "AWS::IVS::StreamKey" props = { "ChannelArn": (str, True), "Tags": (Tags, False), } Update IVS per 2021-04-15 changes
# Copyright (c) 2012-2021, Mark Peek <mark@peek.org> # All rights reserved. # # See LICENSE file for full license. # # *** Do not modify - this file is autogenerated *** # Resource specification version: 35.0.0 from troposphere import Tags from . import AWSObject, AWSProperty from .validators import boolean class Channel(AWSObject): resource_type = "AWS::IVS::Channel" props = { "Authorized": (boolean, False), "LatencyMode": (str, False), "Name": (str, False), "RecordingConfigurationArn": (str, False), "Tags": (Tags, False), "Type": (str, False), } class PlaybackKeyPair(AWSObject): resource_type = "AWS::IVS::PlaybackKeyPair" props = { "Name": (str, False), "PublicKeyMaterial": (str, True), "Tags": (Tags, False), } class S3DestinationConfiguration(AWSProperty): props = { "BucketName": (str, True), } class DestinationConfiguration(AWSProperty): props = { "S3": (S3DestinationConfiguration, True), } class RecordingConfiguration(AWSObject): resource_type = "AWS::IVS::RecordingConfiguration" props = { "DestinationConfiguration": (DestinationConfiguration, True), "Name": (str, False), "Tags": (Tags, False), } class StreamKey(AWSObject): resource_type = "AWS::IVS::StreamKey" props = { "ChannelArn": (str, True), "Tags": (Tags, False), }
<commit_before># Copyright (c) 2012-2021, Mark Peek <mark@peek.org> # All rights reserved. # # See LICENSE file for full license. # # *** Do not modify - this file is autogenerated *** # Resource specification version: 25.0.0 from troposphere import Tags from . import AWSObject from .validators import boolean class Channel(AWSObject): resource_type = "AWS::IVS::Channel" props = { "Authorized": (boolean, False), "LatencyMode": (str, False), "Name": (str, False), "Tags": (Tags, False), "Type": (str, False), } class PlaybackKeyPair(AWSObject): resource_type = "AWS::IVS::PlaybackKeyPair" props = { "Name": (str, False), "PublicKeyMaterial": (str, True), "Tags": (Tags, False), } class StreamKey(AWSObject): resource_type = "AWS::IVS::StreamKey" props = { "ChannelArn": (str, True), "Tags": (Tags, False), } <commit_msg>Update IVS per 2021-04-15 changes<commit_after>
# Copyright (c) 2012-2021, Mark Peek <mark@peek.org> # All rights reserved. # # See LICENSE file for full license. # # *** Do not modify - this file is autogenerated *** # Resource specification version: 35.0.0 from troposphere import Tags from . import AWSObject, AWSProperty from .validators import boolean class Channel(AWSObject): resource_type = "AWS::IVS::Channel" props = { "Authorized": (boolean, False), "LatencyMode": (str, False), "Name": (str, False), "RecordingConfigurationArn": (str, False), "Tags": (Tags, False), "Type": (str, False), } class PlaybackKeyPair(AWSObject): resource_type = "AWS::IVS::PlaybackKeyPair" props = { "Name": (str, False), "PublicKeyMaterial": (str, True), "Tags": (Tags, False), } class S3DestinationConfiguration(AWSProperty): props = { "BucketName": (str, True), } class DestinationConfiguration(AWSProperty): props = { "S3": (S3DestinationConfiguration, True), } class RecordingConfiguration(AWSObject): resource_type = "AWS::IVS::RecordingConfiguration" props = { "DestinationConfiguration": (DestinationConfiguration, True), "Name": (str, False), "Tags": (Tags, False), } class StreamKey(AWSObject): resource_type = "AWS::IVS::StreamKey" props = { "ChannelArn": (str, True), "Tags": (Tags, False), }
# Copyright (c) 2012-2021, Mark Peek <mark@peek.org> # All rights reserved. # # See LICENSE file for full license. # # *** Do not modify - this file is autogenerated *** # Resource specification version: 25.0.0 from troposphere import Tags from . import AWSObject from .validators import boolean class Channel(AWSObject): resource_type = "AWS::IVS::Channel" props = { "Authorized": (boolean, False), "LatencyMode": (str, False), "Name": (str, False), "Tags": (Tags, False), "Type": (str, False), } class PlaybackKeyPair(AWSObject): resource_type = "AWS::IVS::PlaybackKeyPair" props = { "Name": (str, False), "PublicKeyMaterial": (str, True), "Tags": (Tags, False), } class StreamKey(AWSObject): resource_type = "AWS::IVS::StreamKey" props = { "ChannelArn": (str, True), "Tags": (Tags, False), } Update IVS per 2021-04-15 changes# Copyright (c) 2012-2021, Mark Peek <mark@peek.org> # All rights reserved. # # See LICENSE file for full license. # # *** Do not modify - this file is autogenerated *** # Resource specification version: 35.0.0 from troposphere import Tags from . import AWSObject, AWSProperty from .validators import boolean class Channel(AWSObject): resource_type = "AWS::IVS::Channel" props = { "Authorized": (boolean, False), "LatencyMode": (str, False), "Name": (str, False), "RecordingConfigurationArn": (str, False), "Tags": (Tags, False), "Type": (str, False), } class PlaybackKeyPair(AWSObject): resource_type = "AWS::IVS::PlaybackKeyPair" props = { "Name": (str, False), "PublicKeyMaterial": (str, True), "Tags": (Tags, False), } class S3DestinationConfiguration(AWSProperty): props = { "BucketName": (str, True), } class DestinationConfiguration(AWSProperty): props = { "S3": (S3DestinationConfiguration, True), } class RecordingConfiguration(AWSObject): resource_type = "AWS::IVS::RecordingConfiguration" props = { "DestinationConfiguration": (DestinationConfiguration, True), "Name": (str, False), "Tags": (Tags, False), } class StreamKey(AWSObject): resource_type = "AWS::IVS::StreamKey" props = { "ChannelArn": (str, True), "Tags": (Tags, False), }
<commit_before># Copyright (c) 2012-2021, Mark Peek <mark@peek.org> # All rights reserved. # # See LICENSE file for full license. # # *** Do not modify - this file is autogenerated *** # Resource specification version: 25.0.0 from troposphere import Tags from . import AWSObject from .validators import boolean class Channel(AWSObject): resource_type = "AWS::IVS::Channel" props = { "Authorized": (boolean, False), "LatencyMode": (str, False), "Name": (str, False), "Tags": (Tags, False), "Type": (str, False), } class PlaybackKeyPair(AWSObject): resource_type = "AWS::IVS::PlaybackKeyPair" props = { "Name": (str, False), "PublicKeyMaterial": (str, True), "Tags": (Tags, False), } class StreamKey(AWSObject): resource_type = "AWS::IVS::StreamKey" props = { "ChannelArn": (str, True), "Tags": (Tags, False), } <commit_msg>Update IVS per 2021-04-15 changes<commit_after># Copyright (c) 2012-2021, Mark Peek <mark@peek.org> # All rights reserved. # # See LICENSE file for full license. # # *** Do not modify - this file is autogenerated *** # Resource specification version: 35.0.0 from troposphere import Tags from . import AWSObject, AWSProperty from .validators import boolean class Channel(AWSObject): resource_type = "AWS::IVS::Channel" props = { "Authorized": (boolean, False), "LatencyMode": (str, False), "Name": (str, False), "RecordingConfigurationArn": (str, False), "Tags": (Tags, False), "Type": (str, False), } class PlaybackKeyPair(AWSObject): resource_type = "AWS::IVS::PlaybackKeyPair" props = { "Name": (str, False), "PublicKeyMaterial": (str, True), "Tags": (Tags, False), } class S3DestinationConfiguration(AWSProperty): props = { "BucketName": (str, True), } class DestinationConfiguration(AWSProperty): props = { "S3": (S3DestinationConfiguration, True), } class RecordingConfiguration(AWSObject): resource_type = "AWS::IVS::RecordingConfiguration" props = { "DestinationConfiguration": (DestinationConfiguration, True), "Name": (str, False), "Tags": (Tags, False), } class StreamKey(AWSObject): resource_type = "AWS::IVS::StreamKey" props = { "ChannelArn": (str, True), "Tags": (Tags, False), }
61b38528b60203003b9595f7ba2204c287dc6970
string/compress.py
string/compress.py
# Compress string using counts of repeated characters def compress_str(str): output = "" curr_char = "" char_count = "" for i in str: if curr_char != str[i]: output = output + curr_char + char_count # add new unique character and its count to our output curr_char = str[i] # move on to the next character in string char_count = 1 # reset count to 1
# Compress string using counts of repeated characters def compress_str(str): output = "" curr_char = "" char_count = "" for i in str: if curr_char != str[i]: output = output + curr_char + char_count # add new unique character and its count to our output curr_char = str[i] # move on to the next character in string char_count = 1 # reset count to 1 else: # add to repeated count if there is a match char_count += 1
Add to current count if there is a match
Add to current count if there is a match
Python
mit
derekmpham/interview-prep,derekmpham/interview-prep
# Compress string using counts of repeated characters def compress_str(str): output = "" curr_char = "" char_count = "" for i in str: if curr_char != str[i]: output = output + curr_char + char_count # add new unique character and its count to our output curr_char = str[i] # move on to the next character in string char_count = 1 # reset count to 1 Add to current count if there is a match
# Compress string using counts of repeated characters def compress_str(str): output = "" curr_char = "" char_count = "" for i in str: if curr_char != str[i]: output = output + curr_char + char_count # add new unique character and its count to our output curr_char = str[i] # move on to the next character in string char_count = 1 # reset count to 1 else: # add to repeated count if there is a match char_count += 1
<commit_before># Compress string using counts of repeated characters def compress_str(str): output = "" curr_char = "" char_count = "" for i in str: if curr_char != str[i]: output = output + curr_char + char_count # add new unique character and its count to our output curr_char = str[i] # move on to the next character in string char_count = 1 # reset count to 1 <commit_msg>Add to current count if there is a match<commit_after>
# Compress string using counts of repeated characters def compress_str(str): output = "" curr_char = "" char_count = "" for i in str: if curr_char != str[i]: output = output + curr_char + char_count # add new unique character and its count to our output curr_char = str[i] # move on to the next character in string char_count = 1 # reset count to 1 else: # add to repeated count if there is a match char_count += 1
# Compress string using counts of repeated characters def compress_str(str): output = "" curr_char = "" char_count = "" for i in str: if curr_char != str[i]: output = output + curr_char + char_count # add new unique character and its count to our output curr_char = str[i] # move on to the next character in string char_count = 1 # reset count to 1 Add to current count if there is a match# Compress string using counts of repeated characters def compress_str(str): output = "" curr_char = "" char_count = "" for i in str: if curr_char != str[i]: output = output + curr_char + char_count # add new unique character and its count to our output curr_char = str[i] # move on to the next character in string char_count = 1 # reset count to 1 else: # add to repeated count if there is a match char_count += 1
<commit_before># Compress string using counts of repeated characters def compress_str(str): output = "" curr_char = "" char_count = "" for i in str: if curr_char != str[i]: output = output + curr_char + char_count # add new unique character and its count to our output curr_char = str[i] # move on to the next character in string char_count = 1 # reset count to 1 <commit_msg>Add to current count if there is a match<commit_after># Compress string using counts of repeated characters def compress_str(str): output = "" curr_char = "" char_count = "" for i in str: if curr_char != str[i]: output = output + curr_char + char_count # add new unique character and its count to our output curr_char = str[i] # move on to the next character in string char_count = 1 # reset count to 1 else: # add to repeated count if there is a match char_count += 1
58f5d541da1e9e234258985b3362967a9c0d7b67
Discord/utilities/errors.py
Discord/utilities/errors.py
from discord.ext.commands.errors import CommandError class NotServerOwner(CommandError): '''Not Server Owner''' pass class VoiceNotConnected(CommandError): '''Voice Not Connected''' pass class PermittedVoiceNotConnected(VoiceNotConnected): '''Permitted, but Voice Not Connected''' pass class NotPermittedVoiceNotConnected(VoiceNotConnected): '''Voice Not Connected, and Not Permitted''' pass class MissingPermissions(CommandError): '''Missing Permissions''' pass class NotPermitted(CommandError): '''Not Permitted''' pass class AudioError(CommandError): '''Audio Error''' pass
from discord.ext.commands.errors import CommandError class NotServerOwner(CommandError): '''Not Server Owner''' pass class VoiceNotConnected(CommandError): '''Voice Not Connected''' pass class PermittedVoiceNotConnected(VoiceNotConnected): '''Permitted, but Voice Not Connected''' pass class NotPermittedVoiceNotConnected(VoiceNotConnected): '''Voice Not Connected, and Not Permitted''' pass class NotPermitted(CommandError): '''Not Permitted''' pass class AudioError(CommandError): '''Audio Error''' pass
Remove no longer used custom Missing Permissions error
[Discord] Remove no longer used custom Missing Permissions error
Python
mit
Harmon758/Harmonbot,Harmon758/Harmonbot
from discord.ext.commands.errors import CommandError class NotServerOwner(CommandError): '''Not Server Owner''' pass class VoiceNotConnected(CommandError): '''Voice Not Connected''' pass class PermittedVoiceNotConnected(VoiceNotConnected): '''Permitted, but Voice Not Connected''' pass class NotPermittedVoiceNotConnected(VoiceNotConnected): '''Voice Not Connected, and Not Permitted''' pass class MissingPermissions(CommandError): '''Missing Permissions''' pass class NotPermitted(CommandError): '''Not Permitted''' pass class AudioError(CommandError): '''Audio Error''' pass [Discord] Remove no longer used custom Missing Permissions error
from discord.ext.commands.errors import CommandError class NotServerOwner(CommandError): '''Not Server Owner''' pass class VoiceNotConnected(CommandError): '''Voice Not Connected''' pass class PermittedVoiceNotConnected(VoiceNotConnected): '''Permitted, but Voice Not Connected''' pass class NotPermittedVoiceNotConnected(VoiceNotConnected): '''Voice Not Connected, and Not Permitted''' pass class NotPermitted(CommandError): '''Not Permitted''' pass class AudioError(CommandError): '''Audio Error''' pass
<commit_before> from discord.ext.commands.errors import CommandError class NotServerOwner(CommandError): '''Not Server Owner''' pass class VoiceNotConnected(CommandError): '''Voice Not Connected''' pass class PermittedVoiceNotConnected(VoiceNotConnected): '''Permitted, but Voice Not Connected''' pass class NotPermittedVoiceNotConnected(VoiceNotConnected): '''Voice Not Connected, and Not Permitted''' pass class MissingPermissions(CommandError): '''Missing Permissions''' pass class NotPermitted(CommandError): '''Not Permitted''' pass class AudioError(CommandError): '''Audio Error''' pass <commit_msg>[Discord] Remove no longer used custom Missing Permissions error<commit_after>
from discord.ext.commands.errors import CommandError class NotServerOwner(CommandError): '''Not Server Owner''' pass class VoiceNotConnected(CommandError): '''Voice Not Connected''' pass class PermittedVoiceNotConnected(VoiceNotConnected): '''Permitted, but Voice Not Connected''' pass class NotPermittedVoiceNotConnected(VoiceNotConnected): '''Voice Not Connected, and Not Permitted''' pass class NotPermitted(CommandError): '''Not Permitted''' pass class AudioError(CommandError): '''Audio Error''' pass
from discord.ext.commands.errors import CommandError class NotServerOwner(CommandError): '''Not Server Owner''' pass class VoiceNotConnected(CommandError): '''Voice Not Connected''' pass class PermittedVoiceNotConnected(VoiceNotConnected): '''Permitted, but Voice Not Connected''' pass class NotPermittedVoiceNotConnected(VoiceNotConnected): '''Voice Not Connected, and Not Permitted''' pass class MissingPermissions(CommandError): '''Missing Permissions''' pass class NotPermitted(CommandError): '''Not Permitted''' pass class AudioError(CommandError): '''Audio Error''' pass [Discord] Remove no longer used custom Missing Permissions error from discord.ext.commands.errors import CommandError class NotServerOwner(CommandError): '''Not Server Owner''' pass class VoiceNotConnected(CommandError): '''Voice Not Connected''' pass class PermittedVoiceNotConnected(VoiceNotConnected): '''Permitted, but Voice Not Connected''' pass class NotPermittedVoiceNotConnected(VoiceNotConnected): '''Voice Not Connected, and Not Permitted''' pass class NotPermitted(CommandError): '''Not Permitted''' pass class AudioError(CommandError): '''Audio Error''' pass
<commit_before> from discord.ext.commands.errors import CommandError class NotServerOwner(CommandError): '''Not Server Owner''' pass class VoiceNotConnected(CommandError): '''Voice Not Connected''' pass class PermittedVoiceNotConnected(VoiceNotConnected): '''Permitted, but Voice Not Connected''' pass class NotPermittedVoiceNotConnected(VoiceNotConnected): '''Voice Not Connected, and Not Permitted''' pass class MissingPermissions(CommandError): '''Missing Permissions''' pass class NotPermitted(CommandError): '''Not Permitted''' pass class AudioError(CommandError): '''Audio Error''' pass <commit_msg>[Discord] Remove no longer used custom Missing Permissions error<commit_after> from discord.ext.commands.errors import CommandError class NotServerOwner(CommandError): '''Not Server Owner''' pass class VoiceNotConnected(CommandError): '''Voice Not Connected''' pass class PermittedVoiceNotConnected(VoiceNotConnected): '''Permitted, but Voice Not Connected''' pass class NotPermittedVoiceNotConnected(VoiceNotConnected): '''Voice Not Connected, and Not Permitted''' pass class NotPermitted(CommandError): '''Not Permitted''' pass class AudioError(CommandError): '''Audio Error''' pass
91ffbe22e56387491775a569e237c4e46495c6a9
nyuki/workflow/tasks/task_selector.py
nyuki/workflow/tasks/task_selector.py
import logging from tukio import Workflow from tukio.task import register from tukio.task.holder import TaskHolder from nyuki.utils.evaluate import ConditionBlock from nyuki.workflow.tasks.utils import generate_schema log = logging.getLogger(__name__) class TaskConditionBlock(ConditionBlock): """ Overrides work on ConditionBlock from the factory task to set next workflow tasks. """ def __init__(self, conditions, workflow): super().__init__(conditions) self._workflow = workflow def condition_validated(self, condition, data): """ Set next workflow tasks upon validating a condition. """ self._workflow.set_next_tasks(condition['tasks']) @register('task_selector', 'execute') class TaskSelector(TaskHolder): SCHEMA = generate_schema(tasks={ 'type': 'object', 'properties': { 'type': {'type': 'string', 'enum': ['task-selector']}, 'tasks': { 'type': 'array', 'items': { 'type': 'string', 'minLength': 1, 'uniqueItems': True } } } }) async def execute(self, event): data = event.data workflow = Workflow.current_workflow() for block in self.config['rules']: if block['type'] == 'task-selector': workflow.set_next_tasks(block['tasks']) elif block['type'] == 'condition-block': TaskConditionBlock(block['conditions'], workflow).apply(data) return data
import logging from tukio import Workflow from tukio.task import register from tukio.task.holder import TaskHolder from nyuki.utils.evaluate import ConditionBlock from nyuki.workflow.tasks.utils import generate_schema log = logging.getLogger(__name__) class TaskConditionBlock(ConditionBlock): """ Overrides work on ConditionBlock from the factory task to set next workflow tasks. """ def __init__(self, conditions, workflow): super().__init__(conditions) self._workflow = workflow def condition_validated(self, condition, data): """ Set next workflow tasks upon validating a condition. """ if condition['rules']: self._workflow.set_next_tasks(condition['rules'][0]['tasks']) @register('task_selector', 'execute') class TaskSelector(TaskHolder): SCHEMA = generate_schema(tasks={ 'type': 'object', 'properties': { 'type': {'type': 'string', 'enum': ['task-selector']}, 'tasks': { 'type': 'array', 'items': { 'type': 'string', 'minLength': 1, 'uniqueItems': True } } } }) async def execute(self, event): data = event.data workflow = Workflow.current_workflow() for block in self.config['rules']: if block['type'] == 'task-selector': workflow.set_next_tasks(block['tasks']) elif block['type'] == 'condition-block': TaskConditionBlock(block['conditions'], workflow).apply(data) return data
Fix an issue with the child-task selector.
Fix an issue with the child-task selector.
Python
apache-2.0
optiflows/nyuki,gdraynz/nyuki,optiflows/nyuki,gdraynz/nyuki
import logging from tukio import Workflow from tukio.task import register from tukio.task.holder import TaskHolder from nyuki.utils.evaluate import ConditionBlock from nyuki.workflow.tasks.utils import generate_schema log = logging.getLogger(__name__) class TaskConditionBlock(ConditionBlock): """ Overrides work on ConditionBlock from the factory task to set next workflow tasks. """ def __init__(self, conditions, workflow): super().__init__(conditions) self._workflow = workflow def condition_validated(self, condition, data): """ Set next workflow tasks upon validating a condition. """ self._workflow.set_next_tasks(condition['tasks']) @register('task_selector', 'execute') class TaskSelector(TaskHolder): SCHEMA = generate_schema(tasks={ 'type': 'object', 'properties': { 'type': {'type': 'string', 'enum': ['task-selector']}, 'tasks': { 'type': 'array', 'items': { 'type': 'string', 'minLength': 1, 'uniqueItems': True } } } }) async def execute(self, event): data = event.data workflow = Workflow.current_workflow() for block in self.config['rules']: if block['type'] == 'task-selector': workflow.set_next_tasks(block['tasks']) elif block['type'] == 'condition-block': TaskConditionBlock(block['conditions'], workflow).apply(data) return data Fix an issue with the child-task selector.
import logging from tukio import Workflow from tukio.task import register from tukio.task.holder import TaskHolder from nyuki.utils.evaluate import ConditionBlock from nyuki.workflow.tasks.utils import generate_schema log = logging.getLogger(__name__) class TaskConditionBlock(ConditionBlock): """ Overrides work on ConditionBlock from the factory task to set next workflow tasks. """ def __init__(self, conditions, workflow): super().__init__(conditions) self._workflow = workflow def condition_validated(self, condition, data): """ Set next workflow tasks upon validating a condition. """ if condition['rules']: self._workflow.set_next_tasks(condition['rules'][0]['tasks']) @register('task_selector', 'execute') class TaskSelector(TaskHolder): SCHEMA = generate_schema(tasks={ 'type': 'object', 'properties': { 'type': {'type': 'string', 'enum': ['task-selector']}, 'tasks': { 'type': 'array', 'items': { 'type': 'string', 'minLength': 1, 'uniqueItems': True } } } }) async def execute(self, event): data = event.data workflow = Workflow.current_workflow() for block in self.config['rules']: if block['type'] == 'task-selector': workflow.set_next_tasks(block['tasks']) elif block['type'] == 'condition-block': TaskConditionBlock(block['conditions'], workflow).apply(data) return data
<commit_before>import logging from tukio import Workflow from tukio.task import register from tukio.task.holder import TaskHolder from nyuki.utils.evaluate import ConditionBlock from nyuki.workflow.tasks.utils import generate_schema log = logging.getLogger(__name__) class TaskConditionBlock(ConditionBlock): """ Overrides work on ConditionBlock from the factory task to set next workflow tasks. """ def __init__(self, conditions, workflow): super().__init__(conditions) self._workflow = workflow def condition_validated(self, condition, data): """ Set next workflow tasks upon validating a condition. """ self._workflow.set_next_tasks(condition['tasks']) @register('task_selector', 'execute') class TaskSelector(TaskHolder): SCHEMA = generate_schema(tasks={ 'type': 'object', 'properties': { 'type': {'type': 'string', 'enum': ['task-selector']}, 'tasks': { 'type': 'array', 'items': { 'type': 'string', 'minLength': 1, 'uniqueItems': True } } } }) async def execute(self, event): data = event.data workflow = Workflow.current_workflow() for block in self.config['rules']: if block['type'] == 'task-selector': workflow.set_next_tasks(block['tasks']) elif block['type'] == 'condition-block': TaskConditionBlock(block['conditions'], workflow).apply(data) return data <commit_msg>Fix an issue with the child-task selector.<commit_after>
import logging from tukio import Workflow from tukio.task import register from tukio.task.holder import TaskHolder from nyuki.utils.evaluate import ConditionBlock from nyuki.workflow.tasks.utils import generate_schema log = logging.getLogger(__name__) class TaskConditionBlock(ConditionBlock): """ Overrides work on ConditionBlock from the factory task to set next workflow tasks. """ def __init__(self, conditions, workflow): super().__init__(conditions) self._workflow = workflow def condition_validated(self, condition, data): """ Set next workflow tasks upon validating a condition. """ if condition['rules']: self._workflow.set_next_tasks(condition['rules'][0]['tasks']) @register('task_selector', 'execute') class TaskSelector(TaskHolder): SCHEMA = generate_schema(tasks={ 'type': 'object', 'properties': { 'type': {'type': 'string', 'enum': ['task-selector']}, 'tasks': { 'type': 'array', 'items': { 'type': 'string', 'minLength': 1, 'uniqueItems': True } } } }) async def execute(self, event): data = event.data workflow = Workflow.current_workflow() for block in self.config['rules']: if block['type'] == 'task-selector': workflow.set_next_tasks(block['tasks']) elif block['type'] == 'condition-block': TaskConditionBlock(block['conditions'], workflow).apply(data) return data
import logging from tukio import Workflow from tukio.task import register from tukio.task.holder import TaskHolder from nyuki.utils.evaluate import ConditionBlock from nyuki.workflow.tasks.utils import generate_schema log = logging.getLogger(__name__) class TaskConditionBlock(ConditionBlock): """ Overrides work on ConditionBlock from the factory task to set next workflow tasks. """ def __init__(self, conditions, workflow): super().__init__(conditions) self._workflow = workflow def condition_validated(self, condition, data): """ Set next workflow tasks upon validating a condition. """ self._workflow.set_next_tasks(condition['tasks']) @register('task_selector', 'execute') class TaskSelector(TaskHolder): SCHEMA = generate_schema(tasks={ 'type': 'object', 'properties': { 'type': {'type': 'string', 'enum': ['task-selector']}, 'tasks': { 'type': 'array', 'items': { 'type': 'string', 'minLength': 1, 'uniqueItems': True } } } }) async def execute(self, event): data = event.data workflow = Workflow.current_workflow() for block in self.config['rules']: if block['type'] == 'task-selector': workflow.set_next_tasks(block['tasks']) elif block['type'] == 'condition-block': TaskConditionBlock(block['conditions'], workflow).apply(data) return data Fix an issue with the child-task selector.import logging from tukio import Workflow from tukio.task import register from tukio.task.holder import TaskHolder from nyuki.utils.evaluate import ConditionBlock from nyuki.workflow.tasks.utils import generate_schema log = logging.getLogger(__name__) class TaskConditionBlock(ConditionBlock): """ Overrides work on ConditionBlock from the factory task to set next workflow tasks. """ def __init__(self, conditions, workflow): super().__init__(conditions) self._workflow = workflow def condition_validated(self, condition, data): """ Set next workflow tasks upon validating a condition. """ if condition['rules']: self._workflow.set_next_tasks(condition['rules'][0]['tasks']) @register('task_selector', 'execute') class TaskSelector(TaskHolder): SCHEMA = generate_schema(tasks={ 'type': 'object', 'properties': { 'type': {'type': 'string', 'enum': ['task-selector']}, 'tasks': { 'type': 'array', 'items': { 'type': 'string', 'minLength': 1, 'uniqueItems': True } } } }) async def execute(self, event): data = event.data workflow = Workflow.current_workflow() for block in self.config['rules']: if block['type'] == 'task-selector': workflow.set_next_tasks(block['tasks']) elif block['type'] == 'condition-block': TaskConditionBlock(block['conditions'], workflow).apply(data) return data
<commit_before>import logging from tukio import Workflow from tukio.task import register from tukio.task.holder import TaskHolder from nyuki.utils.evaluate import ConditionBlock from nyuki.workflow.tasks.utils import generate_schema log = logging.getLogger(__name__) class TaskConditionBlock(ConditionBlock): """ Overrides work on ConditionBlock from the factory task to set next workflow tasks. """ def __init__(self, conditions, workflow): super().__init__(conditions) self._workflow = workflow def condition_validated(self, condition, data): """ Set next workflow tasks upon validating a condition. """ self._workflow.set_next_tasks(condition['tasks']) @register('task_selector', 'execute') class TaskSelector(TaskHolder): SCHEMA = generate_schema(tasks={ 'type': 'object', 'properties': { 'type': {'type': 'string', 'enum': ['task-selector']}, 'tasks': { 'type': 'array', 'items': { 'type': 'string', 'minLength': 1, 'uniqueItems': True } } } }) async def execute(self, event): data = event.data workflow = Workflow.current_workflow() for block in self.config['rules']: if block['type'] == 'task-selector': workflow.set_next_tasks(block['tasks']) elif block['type'] == 'condition-block': TaskConditionBlock(block['conditions'], workflow).apply(data) return data <commit_msg>Fix an issue with the child-task selector.<commit_after>import logging from tukio import Workflow from tukio.task import register from tukio.task.holder import TaskHolder from nyuki.utils.evaluate import ConditionBlock from nyuki.workflow.tasks.utils import generate_schema log = logging.getLogger(__name__) class TaskConditionBlock(ConditionBlock): """ Overrides work on ConditionBlock from the factory task to set next workflow tasks. """ def __init__(self, conditions, workflow): super().__init__(conditions) self._workflow = workflow def condition_validated(self, condition, data): """ Set next workflow tasks upon validating a condition. """ if condition['rules']: self._workflow.set_next_tasks(condition['rules'][0]['tasks']) @register('task_selector', 'execute') class TaskSelector(TaskHolder): SCHEMA = generate_schema(tasks={ 'type': 'object', 'properties': { 'type': {'type': 'string', 'enum': ['task-selector']}, 'tasks': { 'type': 'array', 'items': { 'type': 'string', 'minLength': 1, 'uniqueItems': True } } } }) async def execute(self, event): data = event.data workflow = Workflow.current_workflow() for block in self.config['rules']: if block['type'] == 'task-selector': workflow.set_next_tasks(block['tasks']) elif block['type'] == 'condition-block': TaskConditionBlock(block['conditions'], workflow).apply(data) return data
417ffca6a10edc87fc36b1c7c47e7dea36cecd2e
test/test_basic.py
test/test_basic.py
import random import markovify import sys, os HERE = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(HERE, "texts/sherlock.txt")) as f: sherlock = f.read() def test_text_too_small(): text = u"Example phrase. This is another example sentence." text_model = markovify.Text(text) assert(text_model.make_sentence() == None) def test_sherlock(): text_model = markovify.Text(sherlock) sent = text_model.make_sentence() assert(len(sent) != 0) def test_json(): text_model = markovify.Text(sherlock) json_model = text_model.chain.to_json() stored_chain = markovify.Chain.from_json(json_model) new_text_model = markovify.Text(sherlock, chain=stored_chain) sent = text_model.make_sentence() assert(len(sent) != 0)
import random import markovify import sys, os import operator HERE = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(HERE, "texts/sherlock.txt")) as f: sherlock = f.read() def test_text_too_small(): text = u"Example phrase. This is another example sentence." text_model = markovify.Text(text) assert(text_model.make_sentence() == None) def test_sherlock(): text_model = markovify.Text(sherlock) sent = text_model.make_sentence() assert(len(sent) != 0) def get_sorted(chain_json): return sorted(chain_json, key=operator.itemgetter(0)) def test_json(): text_model = markovify.Text(sherlock) chain_json = text_model.chain.to_json() stored_chain = markovify.Chain.from_json(chain_json) assert(get_sorted(stored_chain.to_json()) == get_sorted(chain_json)) new_text_model = markovify.Text(sherlock, chain=stored_chain) sent = text_model.make_sentence() assert(len(sent) != 0)
Add test for chain-JSON equality
Add test for chain-JSON equality
Python
mit
jsvine/markovify,orf/markovify
import random import markovify import sys, os HERE = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(HERE, "texts/sherlock.txt")) as f: sherlock = f.read() def test_text_too_small(): text = u"Example phrase. This is another example sentence." text_model = markovify.Text(text) assert(text_model.make_sentence() == None) def test_sherlock(): text_model = markovify.Text(sherlock) sent = text_model.make_sentence() assert(len(sent) != 0) def test_json(): text_model = markovify.Text(sherlock) json_model = text_model.chain.to_json() stored_chain = markovify.Chain.from_json(json_model) new_text_model = markovify.Text(sherlock, chain=stored_chain) sent = text_model.make_sentence() assert(len(sent) != 0) Add test for chain-JSON equality
import random import markovify import sys, os import operator HERE = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(HERE, "texts/sherlock.txt")) as f: sherlock = f.read() def test_text_too_small(): text = u"Example phrase. This is another example sentence." text_model = markovify.Text(text) assert(text_model.make_sentence() == None) def test_sherlock(): text_model = markovify.Text(sherlock) sent = text_model.make_sentence() assert(len(sent) != 0) def get_sorted(chain_json): return sorted(chain_json, key=operator.itemgetter(0)) def test_json(): text_model = markovify.Text(sherlock) chain_json = text_model.chain.to_json() stored_chain = markovify.Chain.from_json(chain_json) assert(get_sorted(stored_chain.to_json()) == get_sorted(chain_json)) new_text_model = markovify.Text(sherlock, chain=stored_chain) sent = text_model.make_sentence() assert(len(sent) != 0)
<commit_before>import random import markovify import sys, os HERE = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(HERE, "texts/sherlock.txt")) as f: sherlock = f.read() def test_text_too_small(): text = u"Example phrase. This is another example sentence." text_model = markovify.Text(text) assert(text_model.make_sentence() == None) def test_sherlock(): text_model = markovify.Text(sherlock) sent = text_model.make_sentence() assert(len(sent) != 0) def test_json(): text_model = markovify.Text(sherlock) json_model = text_model.chain.to_json() stored_chain = markovify.Chain.from_json(json_model) new_text_model = markovify.Text(sherlock, chain=stored_chain) sent = text_model.make_sentence() assert(len(sent) != 0) <commit_msg>Add test for chain-JSON equality<commit_after>
import random import markovify import sys, os import operator HERE = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(HERE, "texts/sherlock.txt")) as f: sherlock = f.read() def test_text_too_small(): text = u"Example phrase. This is another example sentence." text_model = markovify.Text(text) assert(text_model.make_sentence() == None) def test_sherlock(): text_model = markovify.Text(sherlock) sent = text_model.make_sentence() assert(len(sent) != 0) def get_sorted(chain_json): return sorted(chain_json, key=operator.itemgetter(0)) def test_json(): text_model = markovify.Text(sherlock) chain_json = text_model.chain.to_json() stored_chain = markovify.Chain.from_json(chain_json) assert(get_sorted(stored_chain.to_json()) == get_sorted(chain_json)) new_text_model = markovify.Text(sherlock, chain=stored_chain) sent = text_model.make_sentence() assert(len(sent) != 0)
import random import markovify import sys, os HERE = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(HERE, "texts/sherlock.txt")) as f: sherlock = f.read() def test_text_too_small(): text = u"Example phrase. This is another example sentence." text_model = markovify.Text(text) assert(text_model.make_sentence() == None) def test_sherlock(): text_model = markovify.Text(sherlock) sent = text_model.make_sentence() assert(len(sent) != 0) def test_json(): text_model = markovify.Text(sherlock) json_model = text_model.chain.to_json() stored_chain = markovify.Chain.from_json(json_model) new_text_model = markovify.Text(sherlock, chain=stored_chain) sent = text_model.make_sentence() assert(len(sent) != 0) Add test for chain-JSON equalityimport random import markovify import sys, os import operator HERE = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(HERE, "texts/sherlock.txt")) as f: sherlock = f.read() def test_text_too_small(): text = u"Example phrase. This is another example sentence." text_model = markovify.Text(text) assert(text_model.make_sentence() == None) def test_sherlock(): text_model = markovify.Text(sherlock) sent = text_model.make_sentence() assert(len(sent) != 0) def get_sorted(chain_json): return sorted(chain_json, key=operator.itemgetter(0)) def test_json(): text_model = markovify.Text(sherlock) chain_json = text_model.chain.to_json() stored_chain = markovify.Chain.from_json(chain_json) assert(get_sorted(stored_chain.to_json()) == get_sorted(chain_json)) new_text_model = markovify.Text(sherlock, chain=stored_chain) sent = text_model.make_sentence() assert(len(sent) != 0)
<commit_before>import random import markovify import sys, os HERE = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(HERE, "texts/sherlock.txt")) as f: sherlock = f.read() def test_text_too_small(): text = u"Example phrase. This is another example sentence." text_model = markovify.Text(text) assert(text_model.make_sentence() == None) def test_sherlock(): text_model = markovify.Text(sherlock) sent = text_model.make_sentence() assert(len(sent) != 0) def test_json(): text_model = markovify.Text(sherlock) json_model = text_model.chain.to_json() stored_chain = markovify.Chain.from_json(json_model) new_text_model = markovify.Text(sherlock, chain=stored_chain) sent = text_model.make_sentence() assert(len(sent) != 0) <commit_msg>Add test for chain-JSON equality<commit_after>import random import markovify import sys, os import operator HERE = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(HERE, "texts/sherlock.txt")) as f: sherlock = f.read() def test_text_too_small(): text = u"Example phrase. This is another example sentence." text_model = markovify.Text(text) assert(text_model.make_sentence() == None) def test_sherlock(): text_model = markovify.Text(sherlock) sent = text_model.make_sentence() assert(len(sent) != 0) def get_sorted(chain_json): return sorted(chain_json, key=operator.itemgetter(0)) def test_json(): text_model = markovify.Text(sherlock) chain_json = text_model.chain.to_json() stored_chain = markovify.Chain.from_json(chain_json) assert(get_sorted(stored_chain.to_json()) == get_sorted(chain_json)) new_text_model = markovify.Text(sherlock, chain=stored_chain) sent = text_model.make_sentence() assert(len(sent) != 0)
db13b52924a96bdfe8e38c20df07b62b6c455aa8
Instanssi/dblog/handlers.py
Instanssi/dblog/handlers.py
# -*- coding: utf-8 -*- from logging import Handler from datetime import datetime class DBLogHandler(Handler, object): def __init__(self): super(DBLogHandler, self).__init__() def emit(self, record): from models import DBLogEntry as _LogEntry entry = _LogEntry() entry.level = record.levelname entry.message = self.format(record) entry.module = record.name try: entry.event = record.event except: pass try: entry.user = record.user except: pass entry.save()
# -*- coding: utf-8 -*- from logging import Handler from datetime import datetime class DBLogHandler(Handler, object): def __init__(self): super(DBLogHandler, self).__init__() def emit(self, record): from models import DBLogEntry as _LogEntry entry = _LogEntry() entry.level = record.levelname entry.message = self.format(record) entry.module = record.name try: entry.event = record.event except: try: entry.event_id = record.event_id except: pass try: entry.user = record.user except: pass entry.save()
Allow event saving by id
dblog: Allow event saving by id
Python
mit
Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org
# -*- coding: utf-8 -*- from logging import Handler from datetime import datetime class DBLogHandler(Handler, object): def __init__(self): super(DBLogHandler, self).__init__() def emit(self, record): from models import DBLogEntry as _LogEntry entry = _LogEntry() entry.level = record.levelname entry.message = self.format(record) entry.module = record.name try: entry.event = record.event except: pass try: entry.user = record.user except: pass entry.save() dblog: Allow event saving by id
# -*- coding: utf-8 -*- from logging import Handler from datetime import datetime class DBLogHandler(Handler, object): def __init__(self): super(DBLogHandler, self).__init__() def emit(self, record): from models import DBLogEntry as _LogEntry entry = _LogEntry() entry.level = record.levelname entry.message = self.format(record) entry.module = record.name try: entry.event = record.event except: try: entry.event_id = record.event_id except: pass try: entry.user = record.user except: pass entry.save()
<commit_before># -*- coding: utf-8 -*- from logging import Handler from datetime import datetime class DBLogHandler(Handler, object): def __init__(self): super(DBLogHandler, self).__init__() def emit(self, record): from models import DBLogEntry as _LogEntry entry = _LogEntry() entry.level = record.levelname entry.message = self.format(record) entry.module = record.name try: entry.event = record.event except: pass try: entry.user = record.user except: pass entry.save() <commit_msg>dblog: Allow event saving by id<commit_after>
# -*- coding: utf-8 -*- from logging import Handler from datetime import datetime class DBLogHandler(Handler, object): def __init__(self): super(DBLogHandler, self).__init__() def emit(self, record): from models import DBLogEntry as _LogEntry entry = _LogEntry() entry.level = record.levelname entry.message = self.format(record) entry.module = record.name try: entry.event = record.event except: try: entry.event_id = record.event_id except: pass try: entry.user = record.user except: pass entry.save()
# -*- coding: utf-8 -*- from logging import Handler from datetime import datetime class DBLogHandler(Handler, object): def __init__(self): super(DBLogHandler, self).__init__() def emit(self, record): from models import DBLogEntry as _LogEntry entry = _LogEntry() entry.level = record.levelname entry.message = self.format(record) entry.module = record.name try: entry.event = record.event except: pass try: entry.user = record.user except: pass entry.save() dblog: Allow event saving by id# -*- coding: utf-8 -*- from logging import Handler from datetime import datetime class DBLogHandler(Handler, object): def __init__(self): super(DBLogHandler, self).__init__() def emit(self, record): from models import DBLogEntry as _LogEntry entry = _LogEntry() entry.level = record.levelname entry.message = self.format(record) entry.module = record.name try: entry.event = record.event except: try: entry.event_id = record.event_id except: pass try: entry.user = record.user except: pass entry.save()
<commit_before># -*- coding: utf-8 -*- from logging import Handler from datetime import datetime class DBLogHandler(Handler, object): def __init__(self): super(DBLogHandler, self).__init__() def emit(self, record): from models import DBLogEntry as _LogEntry entry = _LogEntry() entry.level = record.levelname entry.message = self.format(record) entry.module = record.name try: entry.event = record.event except: pass try: entry.user = record.user except: pass entry.save() <commit_msg>dblog: Allow event saving by id<commit_after># -*- coding: utf-8 -*- from logging import Handler from datetime import datetime class DBLogHandler(Handler, object): def __init__(self): super(DBLogHandler, self).__init__() def emit(self, record): from models import DBLogEntry as _LogEntry entry = _LogEntry() entry.level = record.levelname entry.message = self.format(record) entry.module = record.name try: entry.event = record.event except: try: entry.event_id = record.event_id except: pass try: entry.user = record.user except: pass entry.save()
91b3891078b889db98d3832f0c06e465a86e52ef
django_tenants/staticfiles/storage.py
django_tenants/staticfiles/storage.py
import os from django.contrib.staticfiles.storage import StaticFilesStorage from django_tenants.files.storages import TenantStorageMixin from django.conf import settings from django.core.exceptions import ImproperlyConfigured class TenantStaticFilesStorage(TenantStorageMixin, StaticFilesStorage): """ Implementation that extends core Django's StaticFilesStorage. """ def __init__(self, location=None, base_url=None, *args, **kwargs): super(TenantStaticFilesStorage, self).__init__(location, base_url, *args, **kwargs) if hasattr(settings, "MULTITENANT_RELATIVE_STATIC_ROOT"): self.location = os.path.join(self.location, settings.MULTITENANT_RELATIVE_STATIC_ROOT) def path(self, name): """ if not hasattr(settings, "MULTITENANT_RELATIVE_STATIC_ROOT") or \ not settings.MULTITENANT_RELATIVE_STATIC_ROOT: raise ImproperlyConfigured("You're using the TenantStaticFilesStorage " "without having set the MULTITENANT_RELATIVE_STATIC_ROOT " "setting to a filesystem path.") """ return super(TenantStaticFilesStorage, self).path(name)
import os from django.contrib.staticfiles.storage import StaticFilesStorage from django_tenants.files.storages import TenantStorageMixin from django.conf import settings from django.core.exceptions import ImproperlyConfigured class TenantStaticFilesStorage(TenantStorageMixin, StaticFilesStorage): """ Implementation that extends core Django's StaticFilesStorage. """ def __init__(self, location=None, base_url=None, *args, **kwargs): super(TenantStaticFilesStorage, self).__init__(location, base_url, *args, **kwargs) if hasattr(settings, "MULTITENANT_RELATIVE_STATIC_ROOT"): self.location = os.path.join(self.location, settings.MULTITENANT_RELATIVE_STATIC_ROOT) """ def path(self, name): if not hasattr(settings, "MULTITENANT_RELATIVE_STATIC_ROOT") or \ not settings.MULTITENANT_RELATIVE_STATIC_ROOT: raise ImproperlyConfigured("You're using the TenantStaticFilesStorage " "without having set the MULTITENANT_RELATIVE_STATIC_ROOT " "setting to a filesystem path.") return super(TenantStaticFilesStorage, self).path(name) """
Fix regression in path handling of TenantStaticFileStorage.
Fix regression in path handling of TenantStaticFileStorage. Fixes #197.
Python
mit
tomturner/django-tenants,tomturner/django-tenants,tomturner/django-tenants
import os from django.contrib.staticfiles.storage import StaticFilesStorage from django_tenants.files.storages import TenantStorageMixin from django.conf import settings from django.core.exceptions import ImproperlyConfigured class TenantStaticFilesStorage(TenantStorageMixin, StaticFilesStorage): """ Implementation that extends core Django's StaticFilesStorage. """ def __init__(self, location=None, base_url=None, *args, **kwargs): super(TenantStaticFilesStorage, self).__init__(location, base_url, *args, **kwargs) if hasattr(settings, "MULTITENANT_RELATIVE_STATIC_ROOT"): self.location = os.path.join(self.location, settings.MULTITENANT_RELATIVE_STATIC_ROOT) def path(self, name): """ if not hasattr(settings, "MULTITENANT_RELATIVE_STATIC_ROOT") or \ not settings.MULTITENANT_RELATIVE_STATIC_ROOT: raise ImproperlyConfigured("You're using the TenantStaticFilesStorage " "without having set the MULTITENANT_RELATIVE_STATIC_ROOT " "setting to a filesystem path.") """ return super(TenantStaticFilesStorage, self).path(name) Fix regression in path handling of TenantStaticFileStorage. Fixes #197.
import os from django.contrib.staticfiles.storage import StaticFilesStorage from django_tenants.files.storages import TenantStorageMixin from django.conf import settings from django.core.exceptions import ImproperlyConfigured class TenantStaticFilesStorage(TenantStorageMixin, StaticFilesStorage): """ Implementation that extends core Django's StaticFilesStorage. """ def __init__(self, location=None, base_url=None, *args, **kwargs): super(TenantStaticFilesStorage, self).__init__(location, base_url, *args, **kwargs) if hasattr(settings, "MULTITENANT_RELATIVE_STATIC_ROOT"): self.location = os.path.join(self.location, settings.MULTITENANT_RELATIVE_STATIC_ROOT) """ def path(self, name): if not hasattr(settings, "MULTITENANT_RELATIVE_STATIC_ROOT") or \ not settings.MULTITENANT_RELATIVE_STATIC_ROOT: raise ImproperlyConfigured("You're using the TenantStaticFilesStorage " "without having set the MULTITENANT_RELATIVE_STATIC_ROOT " "setting to a filesystem path.") return super(TenantStaticFilesStorage, self).path(name) """
<commit_before>import os from django.contrib.staticfiles.storage import StaticFilesStorage from django_tenants.files.storages import TenantStorageMixin from django.conf import settings from django.core.exceptions import ImproperlyConfigured class TenantStaticFilesStorage(TenantStorageMixin, StaticFilesStorage): """ Implementation that extends core Django's StaticFilesStorage. """ def __init__(self, location=None, base_url=None, *args, **kwargs): super(TenantStaticFilesStorage, self).__init__(location, base_url, *args, **kwargs) if hasattr(settings, "MULTITENANT_RELATIVE_STATIC_ROOT"): self.location = os.path.join(self.location, settings.MULTITENANT_RELATIVE_STATIC_ROOT) def path(self, name): """ if not hasattr(settings, "MULTITENANT_RELATIVE_STATIC_ROOT") or \ not settings.MULTITENANT_RELATIVE_STATIC_ROOT: raise ImproperlyConfigured("You're using the TenantStaticFilesStorage " "without having set the MULTITENANT_RELATIVE_STATIC_ROOT " "setting to a filesystem path.") """ return super(TenantStaticFilesStorage, self).path(name) <commit_msg>Fix regression in path handling of TenantStaticFileStorage. Fixes #197.<commit_after>
import os from django.contrib.staticfiles.storage import StaticFilesStorage from django_tenants.files.storages import TenantStorageMixin from django.conf import settings from django.core.exceptions import ImproperlyConfigured class TenantStaticFilesStorage(TenantStorageMixin, StaticFilesStorage): """ Implementation that extends core Django's StaticFilesStorage. """ def __init__(self, location=None, base_url=None, *args, **kwargs): super(TenantStaticFilesStorage, self).__init__(location, base_url, *args, **kwargs) if hasattr(settings, "MULTITENANT_RELATIVE_STATIC_ROOT"): self.location = os.path.join(self.location, settings.MULTITENANT_RELATIVE_STATIC_ROOT) """ def path(self, name): if not hasattr(settings, "MULTITENANT_RELATIVE_STATIC_ROOT") or \ not settings.MULTITENANT_RELATIVE_STATIC_ROOT: raise ImproperlyConfigured("You're using the TenantStaticFilesStorage " "without having set the MULTITENANT_RELATIVE_STATIC_ROOT " "setting to a filesystem path.") return super(TenantStaticFilesStorage, self).path(name) """
import os from django.contrib.staticfiles.storage import StaticFilesStorage from django_tenants.files.storages import TenantStorageMixin from django.conf import settings from django.core.exceptions import ImproperlyConfigured class TenantStaticFilesStorage(TenantStorageMixin, StaticFilesStorage): """ Implementation that extends core Django's StaticFilesStorage. """ def __init__(self, location=None, base_url=None, *args, **kwargs): super(TenantStaticFilesStorage, self).__init__(location, base_url, *args, **kwargs) if hasattr(settings, "MULTITENANT_RELATIVE_STATIC_ROOT"): self.location = os.path.join(self.location, settings.MULTITENANT_RELATIVE_STATIC_ROOT) def path(self, name): """ if not hasattr(settings, "MULTITENANT_RELATIVE_STATIC_ROOT") or \ not settings.MULTITENANT_RELATIVE_STATIC_ROOT: raise ImproperlyConfigured("You're using the TenantStaticFilesStorage " "without having set the MULTITENANT_RELATIVE_STATIC_ROOT " "setting to a filesystem path.") """ return super(TenantStaticFilesStorage, self).path(name) Fix regression in path handling of TenantStaticFileStorage. Fixes #197.import os from django.contrib.staticfiles.storage import StaticFilesStorage from django_tenants.files.storages import TenantStorageMixin from django.conf import settings from django.core.exceptions import ImproperlyConfigured class TenantStaticFilesStorage(TenantStorageMixin, StaticFilesStorage): """ Implementation that extends core Django's StaticFilesStorage. """ def __init__(self, location=None, base_url=None, *args, **kwargs): super(TenantStaticFilesStorage, self).__init__(location, base_url, *args, **kwargs) if hasattr(settings, "MULTITENANT_RELATIVE_STATIC_ROOT"): self.location = os.path.join(self.location, settings.MULTITENANT_RELATIVE_STATIC_ROOT) """ def path(self, name): if not hasattr(settings, "MULTITENANT_RELATIVE_STATIC_ROOT") or \ not settings.MULTITENANT_RELATIVE_STATIC_ROOT: raise ImproperlyConfigured("You're using the TenantStaticFilesStorage " "without having set the MULTITENANT_RELATIVE_STATIC_ROOT " "setting to a filesystem path.") return super(TenantStaticFilesStorage, self).path(name) """
<commit_before>import os from django.contrib.staticfiles.storage import StaticFilesStorage from django_tenants.files.storages import TenantStorageMixin from django.conf import settings from django.core.exceptions import ImproperlyConfigured class TenantStaticFilesStorage(TenantStorageMixin, StaticFilesStorage): """ Implementation that extends core Django's StaticFilesStorage. """ def __init__(self, location=None, base_url=None, *args, **kwargs): super(TenantStaticFilesStorage, self).__init__(location, base_url, *args, **kwargs) if hasattr(settings, "MULTITENANT_RELATIVE_STATIC_ROOT"): self.location = os.path.join(self.location, settings.MULTITENANT_RELATIVE_STATIC_ROOT) def path(self, name): """ if not hasattr(settings, "MULTITENANT_RELATIVE_STATIC_ROOT") or \ not settings.MULTITENANT_RELATIVE_STATIC_ROOT: raise ImproperlyConfigured("You're using the TenantStaticFilesStorage " "without having set the MULTITENANT_RELATIVE_STATIC_ROOT " "setting to a filesystem path.") """ return super(TenantStaticFilesStorage, self).path(name) <commit_msg>Fix regression in path handling of TenantStaticFileStorage. Fixes #197.<commit_after>import os from django.contrib.staticfiles.storage import StaticFilesStorage from django_tenants.files.storages import TenantStorageMixin from django.conf import settings from django.core.exceptions import ImproperlyConfigured class TenantStaticFilesStorage(TenantStorageMixin, StaticFilesStorage): """ Implementation that extends core Django's StaticFilesStorage. """ def __init__(self, location=None, base_url=None, *args, **kwargs): super(TenantStaticFilesStorage, self).__init__(location, base_url, *args, **kwargs) if hasattr(settings, "MULTITENANT_RELATIVE_STATIC_ROOT"): self.location = os.path.join(self.location, settings.MULTITENANT_RELATIVE_STATIC_ROOT) """ def path(self, name): if not hasattr(settings, "MULTITENANT_RELATIVE_STATIC_ROOT") or \ not settings.MULTITENANT_RELATIVE_STATIC_ROOT: raise ImproperlyConfigured("You're using the TenantStaticFilesStorage " "without having set the MULTITENANT_RELATIVE_STATIC_ROOT " "setting to a filesystem path.") return super(TenantStaticFilesStorage, self).path(name) """
fccc7b59e742bc887580c91c2c2dbeae2c85caee
wagtailannotatedimage/views.py
wagtailannotatedimage/views.py
from django.http import HttpResponse from wagtail.wagtailimages.models import Filter, Image def get_full_image_url(request, image_id): image = Image.objects.get(id=image_id) if image: filter, _ = Filter.objects.get_or_create(spec='original') orig_rendition = image.get_rendition(filter) return HttpResponse(orig_rendition.img_tag()) else: return HttpResponse('')
from django.http import HttpResponse from django.shortcuts import get_object_or_404 from wagtail.wagtailimages.models import Filter, get_iamge_model Image = get_iamge_model() def get_full_image_url(request, image_id): image = get_object_or_404(Image, id=image_id) if image: filter, _ = Filter.objects.get_or_create(spec='original') orig_rendition = image.get_rendition(filter) return HttpResponse(orig_rendition.img_tag()) else: return HttpResponse('')
Allow for custom image models, 404 on image not found intead of error
Allow for custom image models, 404 on image not found intead of error
Python
bsd-3-clause
takeflight/wagtailannotatedimage,takeflight/wagtailannotatedimage,takeflight/wagtailannotatedimage
from django.http import HttpResponse from wagtail.wagtailimages.models import Filter, Image def get_full_image_url(request, image_id): image = Image.objects.get(id=image_id) if image: filter, _ = Filter.objects.get_or_create(spec='original') orig_rendition = image.get_rendition(filter) return HttpResponse(orig_rendition.img_tag()) else: return HttpResponse('') Allow for custom image models, 404 on image not found intead of error
from django.http import HttpResponse from django.shortcuts import get_object_or_404 from wagtail.wagtailimages.models import Filter, get_iamge_model Image = get_iamge_model() def get_full_image_url(request, image_id): image = get_object_or_404(Image, id=image_id) if image: filter, _ = Filter.objects.get_or_create(spec='original') orig_rendition = image.get_rendition(filter) return HttpResponse(orig_rendition.img_tag()) else: return HttpResponse('')
<commit_before>from django.http import HttpResponse from wagtail.wagtailimages.models import Filter, Image def get_full_image_url(request, image_id): image = Image.objects.get(id=image_id) if image: filter, _ = Filter.objects.get_or_create(spec='original') orig_rendition = image.get_rendition(filter) return HttpResponse(orig_rendition.img_tag()) else: return HttpResponse('') <commit_msg>Allow for custom image models, 404 on image not found intead of error<commit_after>
from django.http import HttpResponse from django.shortcuts import get_object_or_404 from wagtail.wagtailimages.models import Filter, get_iamge_model Image = get_iamge_model() def get_full_image_url(request, image_id): image = get_object_or_404(Image, id=image_id) if image: filter, _ = Filter.objects.get_or_create(spec='original') orig_rendition = image.get_rendition(filter) return HttpResponse(orig_rendition.img_tag()) else: return HttpResponse('')
from django.http import HttpResponse from wagtail.wagtailimages.models import Filter, Image def get_full_image_url(request, image_id): image = Image.objects.get(id=image_id) if image: filter, _ = Filter.objects.get_or_create(spec='original') orig_rendition = image.get_rendition(filter) return HttpResponse(orig_rendition.img_tag()) else: return HttpResponse('') Allow for custom image models, 404 on image not found intead of errorfrom django.http import HttpResponse from django.shortcuts import get_object_or_404 from wagtail.wagtailimages.models import Filter, get_iamge_model Image = get_iamge_model() def get_full_image_url(request, image_id): image = get_object_or_404(Image, id=image_id) if image: filter, _ = Filter.objects.get_or_create(spec='original') orig_rendition = image.get_rendition(filter) return HttpResponse(orig_rendition.img_tag()) else: return HttpResponse('')
<commit_before>from django.http import HttpResponse from wagtail.wagtailimages.models import Filter, Image def get_full_image_url(request, image_id): image = Image.objects.get(id=image_id) if image: filter, _ = Filter.objects.get_or_create(spec='original') orig_rendition = image.get_rendition(filter) return HttpResponse(orig_rendition.img_tag()) else: return HttpResponse('') <commit_msg>Allow for custom image models, 404 on image not found intead of error<commit_after>from django.http import HttpResponse from django.shortcuts import get_object_or_404 from wagtail.wagtailimages.models import Filter, get_iamge_model Image = get_iamge_model() def get_full_image_url(request, image_id): image = get_object_or_404(Image, id=image_id) if image: filter, _ = Filter.objects.get_or_create(spec='original') orig_rendition = image.get_rendition(filter) return HttpResponse(orig_rendition.img_tag()) else: return HttpResponse('')
97b07af6c6bcdd1a3b6c751a1462d88667c9e529
tests/test_core.py
tests/test_core.py
import pytest from mock import Mock from saleor.core.utils import ( Country, get_country_by_ip, get_currency_for_country) @pytest.mark.parametrize('ip_data, expected_country', [ ({'country': {'iso_code': 'PL'}}, Country('PL')), ({'country': {'iso_code': 'UNKNOWN'}}, None), (None, None), ({}, None), ({'country': {}}, None)]) def test_get_country_by_ip(ip_data, expected_country, monkeypatch): monkeypatch.setattr( 'saleor.core.utils.geolite2.reader', Mock(return_value=Mock(get=Mock(return_value=ip_data)))) country = get_country_by_ip('127.0.0.1') assert country == expected_country @pytest.mark.parametrize('country, expected_currency', [ (Country('PL'), 'PLN'), (Country('USA'), 'USD'), (Country('GB'), 'GBP')]) def test_get_currency_for_country(country, expected_currency, monkeypatch): currency = get_currency_for_country(country) assert currency == expected_currency
import pytest from mock import Mock from saleor.core.utils import ( Country, get_country_by_ip, get_currency_for_country) @pytest.mark.parametrize('ip_data, expected_country', [ ({'country': {'iso_code': 'PL'}}, Country('PL')), ({'country': {'iso_code': 'UNKNOWN'}}, None), (None, None), ({}, None), ({'country': {}}, None)]) def test_get_country_by_ip(ip_data, expected_country, monkeypatch): monkeypatch.setattr( 'saleor.core.utils.geolite2.reader', Mock(return_value=Mock(get=Mock(return_value=ip_data)))) country = get_country_by_ip('127.0.0.1') assert country == expected_country @pytest.mark.parametrize('country, expected_currency', [ (Country('PL'), 'PLN'), (Country('US'), 'USD'), (Country('GB'), 'GBP')]) def test_get_currency_for_country(country, expected_currency, monkeypatch): currency = get_currency_for_country(country) assert currency == expected_currency
Fix country ISO code for US
Fix country ISO code for US
Python
bsd-3-clause
UITools/saleor,HyperManTT/ECommerceSaleor,jreigel/saleor,tfroehlich82/saleor,UITools/saleor,UITools/saleor,tfroehlich82/saleor,itbabu/saleor,jreigel/saleor,KenMutemi/saleor,car3oon/saleor,mociepka/saleor,jreigel/saleor,KenMutemi/saleor,HyperManTT/ECommerceSaleor,HyperManTT/ECommerceSaleor,tfroehlich82/saleor,itbabu/saleor,mociepka/saleor,UITools/saleor,rodrigozn/CW-Shop,car3oon/saleor,UITools/saleor,maferelo/saleor,itbabu/saleor,mociepka/saleor,KenMutemi/saleor,rodrigozn/CW-Shop,car3oon/saleor,rodrigozn/CW-Shop,maferelo/saleor,maferelo/saleor
import pytest from mock import Mock from saleor.core.utils import ( Country, get_country_by_ip, get_currency_for_country) @pytest.mark.parametrize('ip_data, expected_country', [ ({'country': {'iso_code': 'PL'}}, Country('PL')), ({'country': {'iso_code': 'UNKNOWN'}}, None), (None, None), ({}, None), ({'country': {}}, None)]) def test_get_country_by_ip(ip_data, expected_country, monkeypatch): monkeypatch.setattr( 'saleor.core.utils.geolite2.reader', Mock(return_value=Mock(get=Mock(return_value=ip_data)))) country = get_country_by_ip('127.0.0.1') assert country == expected_country @pytest.mark.parametrize('country, expected_currency', [ (Country('PL'), 'PLN'), (Country('USA'), 'USD'), (Country('GB'), 'GBP')]) def test_get_currency_for_country(country, expected_currency, monkeypatch): currency = get_currency_for_country(country) assert currency == expected_currency Fix country ISO code for US
import pytest from mock import Mock from saleor.core.utils import ( Country, get_country_by_ip, get_currency_for_country) @pytest.mark.parametrize('ip_data, expected_country', [ ({'country': {'iso_code': 'PL'}}, Country('PL')), ({'country': {'iso_code': 'UNKNOWN'}}, None), (None, None), ({}, None), ({'country': {}}, None)]) def test_get_country_by_ip(ip_data, expected_country, monkeypatch): monkeypatch.setattr( 'saleor.core.utils.geolite2.reader', Mock(return_value=Mock(get=Mock(return_value=ip_data)))) country = get_country_by_ip('127.0.0.1') assert country == expected_country @pytest.mark.parametrize('country, expected_currency', [ (Country('PL'), 'PLN'), (Country('US'), 'USD'), (Country('GB'), 'GBP')]) def test_get_currency_for_country(country, expected_currency, monkeypatch): currency = get_currency_for_country(country) assert currency == expected_currency
<commit_before>import pytest from mock import Mock from saleor.core.utils import ( Country, get_country_by_ip, get_currency_for_country) @pytest.mark.parametrize('ip_data, expected_country', [ ({'country': {'iso_code': 'PL'}}, Country('PL')), ({'country': {'iso_code': 'UNKNOWN'}}, None), (None, None), ({}, None), ({'country': {}}, None)]) def test_get_country_by_ip(ip_data, expected_country, monkeypatch): monkeypatch.setattr( 'saleor.core.utils.geolite2.reader', Mock(return_value=Mock(get=Mock(return_value=ip_data)))) country = get_country_by_ip('127.0.0.1') assert country == expected_country @pytest.mark.parametrize('country, expected_currency', [ (Country('PL'), 'PLN'), (Country('USA'), 'USD'), (Country('GB'), 'GBP')]) def test_get_currency_for_country(country, expected_currency, monkeypatch): currency = get_currency_for_country(country) assert currency == expected_currency <commit_msg>Fix country ISO code for US<commit_after>
import pytest from mock import Mock from saleor.core.utils import ( Country, get_country_by_ip, get_currency_for_country) @pytest.mark.parametrize('ip_data, expected_country', [ ({'country': {'iso_code': 'PL'}}, Country('PL')), ({'country': {'iso_code': 'UNKNOWN'}}, None), (None, None), ({}, None), ({'country': {}}, None)]) def test_get_country_by_ip(ip_data, expected_country, monkeypatch): monkeypatch.setattr( 'saleor.core.utils.geolite2.reader', Mock(return_value=Mock(get=Mock(return_value=ip_data)))) country = get_country_by_ip('127.0.0.1') assert country == expected_country @pytest.mark.parametrize('country, expected_currency', [ (Country('PL'), 'PLN'), (Country('US'), 'USD'), (Country('GB'), 'GBP')]) def test_get_currency_for_country(country, expected_currency, monkeypatch): currency = get_currency_for_country(country) assert currency == expected_currency
import pytest from mock import Mock from saleor.core.utils import ( Country, get_country_by_ip, get_currency_for_country) @pytest.mark.parametrize('ip_data, expected_country', [ ({'country': {'iso_code': 'PL'}}, Country('PL')), ({'country': {'iso_code': 'UNKNOWN'}}, None), (None, None), ({}, None), ({'country': {}}, None)]) def test_get_country_by_ip(ip_data, expected_country, monkeypatch): monkeypatch.setattr( 'saleor.core.utils.geolite2.reader', Mock(return_value=Mock(get=Mock(return_value=ip_data)))) country = get_country_by_ip('127.0.0.1') assert country == expected_country @pytest.mark.parametrize('country, expected_currency', [ (Country('PL'), 'PLN'), (Country('USA'), 'USD'), (Country('GB'), 'GBP')]) def test_get_currency_for_country(country, expected_currency, monkeypatch): currency = get_currency_for_country(country) assert currency == expected_currency Fix country ISO code for USimport pytest from mock import Mock from saleor.core.utils import ( Country, get_country_by_ip, get_currency_for_country) @pytest.mark.parametrize('ip_data, expected_country', [ ({'country': {'iso_code': 'PL'}}, Country('PL')), ({'country': {'iso_code': 'UNKNOWN'}}, None), (None, None), ({}, None), ({'country': {}}, None)]) def test_get_country_by_ip(ip_data, expected_country, monkeypatch): monkeypatch.setattr( 'saleor.core.utils.geolite2.reader', Mock(return_value=Mock(get=Mock(return_value=ip_data)))) country = get_country_by_ip('127.0.0.1') assert country == expected_country @pytest.mark.parametrize('country, expected_currency', [ (Country('PL'), 'PLN'), (Country('US'), 'USD'), (Country('GB'), 'GBP')]) def test_get_currency_for_country(country, expected_currency, monkeypatch): currency = get_currency_for_country(country) assert currency == expected_currency
<commit_before>import pytest from mock import Mock from saleor.core.utils import ( Country, get_country_by_ip, get_currency_for_country) @pytest.mark.parametrize('ip_data, expected_country', [ ({'country': {'iso_code': 'PL'}}, Country('PL')), ({'country': {'iso_code': 'UNKNOWN'}}, None), (None, None), ({}, None), ({'country': {}}, None)]) def test_get_country_by_ip(ip_data, expected_country, monkeypatch): monkeypatch.setattr( 'saleor.core.utils.geolite2.reader', Mock(return_value=Mock(get=Mock(return_value=ip_data)))) country = get_country_by_ip('127.0.0.1') assert country == expected_country @pytest.mark.parametrize('country, expected_currency', [ (Country('PL'), 'PLN'), (Country('USA'), 'USD'), (Country('GB'), 'GBP')]) def test_get_currency_for_country(country, expected_currency, monkeypatch): currency = get_currency_for_country(country) assert currency == expected_currency <commit_msg>Fix country ISO code for US<commit_after>import pytest from mock import Mock from saleor.core.utils import ( Country, get_country_by_ip, get_currency_for_country) @pytest.mark.parametrize('ip_data, expected_country', [ ({'country': {'iso_code': 'PL'}}, Country('PL')), ({'country': {'iso_code': 'UNKNOWN'}}, None), (None, None), ({}, None), ({'country': {}}, None)]) def test_get_country_by_ip(ip_data, expected_country, monkeypatch): monkeypatch.setattr( 'saleor.core.utils.geolite2.reader', Mock(return_value=Mock(get=Mock(return_value=ip_data)))) country = get_country_by_ip('127.0.0.1') assert country == expected_country @pytest.mark.parametrize('country, expected_currency', [ (Country('PL'), 'PLN'), (Country('US'), 'USD'), (Country('GB'), 'GBP')]) def test_get_currency_for_country(country, expected_currency, monkeypatch): currency = get_currency_for_country(country) assert currency == expected_currency
202fba50c287d3df99b22a4f30a96a3d8d9c8141
tests/test_pypi.py
tests/test_pypi.py
from unittest import TestCase from semantic_release.pypi import upload_to_pypi from . import mock class PypiTests(TestCase): @mock.patch('semantic_release.pypi.run') def test_upload_without_arguments(self, mock_run): upload_to_pypi(username='username', password='password') self.assertEqual( mock_run.call_args_list, [ mock.call('python setup.py sdist bdist_wheel'), mock.call('twine upload -u username -p password dist/*'), mock.call('rm -rf build dist') ] )
from unittest import TestCase from semantic_release.pypi import upload_to_pypi from . import mock class PypiTests(TestCase): @mock.patch('semantic_release.pypi.run') def test_upload_without_arguments(self, mock_run): upload_to_pypi(username='username', password='password') self.assertEqual( mock_run.call_args_list, [ mock.call('rm -rf build dist'), mock.call('python setup.py sdist bdist_wheel'), mock.call('twine upload -u username -p password dist/*'), mock.call('rm -rf build dist') ] )
Update test after adding cleaning of dist
test: Update test after adding cleaning of dist
Python
mit
relekang/python-semantic-release,relekang/python-semantic-release
from unittest import TestCase from semantic_release.pypi import upload_to_pypi from . import mock class PypiTests(TestCase): @mock.patch('semantic_release.pypi.run') def test_upload_without_arguments(self, mock_run): upload_to_pypi(username='username', password='password') self.assertEqual( mock_run.call_args_list, [ mock.call('python setup.py sdist bdist_wheel'), mock.call('twine upload -u username -p password dist/*'), mock.call('rm -rf build dist') ] ) test: Update test after adding cleaning of dist
from unittest import TestCase from semantic_release.pypi import upload_to_pypi from . import mock class PypiTests(TestCase): @mock.patch('semantic_release.pypi.run') def test_upload_without_arguments(self, mock_run): upload_to_pypi(username='username', password='password') self.assertEqual( mock_run.call_args_list, [ mock.call('rm -rf build dist'), mock.call('python setup.py sdist bdist_wheel'), mock.call('twine upload -u username -p password dist/*'), mock.call('rm -rf build dist') ] )
<commit_before>from unittest import TestCase from semantic_release.pypi import upload_to_pypi from . import mock class PypiTests(TestCase): @mock.patch('semantic_release.pypi.run') def test_upload_without_arguments(self, mock_run): upload_to_pypi(username='username', password='password') self.assertEqual( mock_run.call_args_list, [ mock.call('python setup.py sdist bdist_wheel'), mock.call('twine upload -u username -p password dist/*'), mock.call('rm -rf build dist') ] ) <commit_msg>test: Update test after adding cleaning of dist<commit_after>
from unittest import TestCase from semantic_release.pypi import upload_to_pypi from . import mock class PypiTests(TestCase): @mock.patch('semantic_release.pypi.run') def test_upload_without_arguments(self, mock_run): upload_to_pypi(username='username', password='password') self.assertEqual( mock_run.call_args_list, [ mock.call('rm -rf build dist'), mock.call('python setup.py sdist bdist_wheel'), mock.call('twine upload -u username -p password dist/*'), mock.call('rm -rf build dist') ] )
from unittest import TestCase from semantic_release.pypi import upload_to_pypi from . import mock class PypiTests(TestCase): @mock.patch('semantic_release.pypi.run') def test_upload_without_arguments(self, mock_run): upload_to_pypi(username='username', password='password') self.assertEqual( mock_run.call_args_list, [ mock.call('python setup.py sdist bdist_wheel'), mock.call('twine upload -u username -p password dist/*'), mock.call('rm -rf build dist') ] ) test: Update test after adding cleaning of distfrom unittest import TestCase from semantic_release.pypi import upload_to_pypi from . import mock class PypiTests(TestCase): @mock.patch('semantic_release.pypi.run') def test_upload_without_arguments(self, mock_run): upload_to_pypi(username='username', password='password') self.assertEqual( mock_run.call_args_list, [ mock.call('rm -rf build dist'), mock.call('python setup.py sdist bdist_wheel'), mock.call('twine upload -u username -p password dist/*'), mock.call('rm -rf build dist') ] )
<commit_before>from unittest import TestCase from semantic_release.pypi import upload_to_pypi from . import mock class PypiTests(TestCase): @mock.patch('semantic_release.pypi.run') def test_upload_without_arguments(self, mock_run): upload_to_pypi(username='username', password='password') self.assertEqual( mock_run.call_args_list, [ mock.call('python setup.py sdist bdist_wheel'), mock.call('twine upload -u username -p password dist/*'), mock.call('rm -rf build dist') ] ) <commit_msg>test: Update test after adding cleaning of dist<commit_after>from unittest import TestCase from semantic_release.pypi import upload_to_pypi from . import mock class PypiTests(TestCase): @mock.patch('semantic_release.pypi.run') def test_upload_without_arguments(self, mock_run): upload_to_pypi(username='username', password='password') self.assertEqual( mock_run.call_args_list, [ mock.call('rm -rf build dist'), mock.call('python setup.py sdist bdist_wheel'), mock.call('twine upload -u username -p password dist/*'), mock.call('rm -rf build dist') ] )
fd2bd48ca9da96e894031f7979798672e1cebdea
tests/test_util.py
tests/test_util.py
# Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from project_generator.util import * def test_unicode_detection(): try: print(u'\U0001F648') except UnicodeEncodeError: assert not unicode_available() else: assert unicode_available() def test_flatten(): l1 = [['aa', 'bb', ['cc', 'dd', 'ee'], ['ee', 'ff'], 'gg']] assert list(flatten(l1)) == ['aa', 'bb', 'cc', 'dd', 'ee', 'ee', 'ff', 'gg'] assert uniqify(flatten(l1)) == ['aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg'] def test_uniqify(): l1 = ['a', 'b', 'b', 'c', 'b', 'd', 'c', 'e', 'f', 'a'] assert uniqify(l1) == ['a', 'b', 'c', 'd', 'e', 'f']
# Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from project_generator.util import * def test_flatten(): l1 = [['aa', 'bb', ['cc', 'dd', 'ee'], ['ee', 'ff'], 'gg']] assert list(flatten(l1)) == ['aa', 'bb', 'cc', 'dd', 'ee', 'ee', 'ff', 'gg'] assert uniqify(flatten(l1)) == ['aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg'] def test_uniqify(): l1 = ['a', 'b', 'b', 'c', 'b', 'd', 'c', 'e', 'f', 'a'] assert uniqify(l1) == ['a', 'b', 'c', 'd', 'e', 'f']
Test util - unicode removal
Test util - unicode removal
Python
apache-2.0
ohagendorf/project_generator,project-generator/project_generator,sarahmarshy/project_generator,0xc0170/project_generator
# Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from project_generator.util import * def test_unicode_detection(): try: print(u'\U0001F648') except UnicodeEncodeError: assert not unicode_available() else: assert unicode_available() def test_flatten(): l1 = [['aa', 'bb', ['cc', 'dd', 'ee'], ['ee', 'ff'], 'gg']] assert list(flatten(l1)) == ['aa', 'bb', 'cc', 'dd', 'ee', 'ee', 'ff', 'gg'] assert uniqify(flatten(l1)) == ['aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg'] def test_uniqify(): l1 = ['a', 'b', 'b', 'c', 'b', 'd', 'c', 'e', 'f', 'a'] assert uniqify(l1) == ['a', 'b', 'c', 'd', 'e', 'f'] Test util - unicode removal
# Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from project_generator.util import * def test_flatten(): l1 = [['aa', 'bb', ['cc', 'dd', 'ee'], ['ee', 'ff'], 'gg']] assert list(flatten(l1)) == ['aa', 'bb', 'cc', 'dd', 'ee', 'ee', 'ff', 'gg'] assert uniqify(flatten(l1)) == ['aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg'] def test_uniqify(): l1 = ['a', 'b', 'b', 'c', 'b', 'd', 'c', 'e', 'f', 'a'] assert uniqify(l1) == ['a', 'b', 'c', 'd', 'e', 'f']
<commit_before># Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from project_generator.util import * def test_unicode_detection(): try: print(u'\U0001F648') except UnicodeEncodeError: assert not unicode_available() else: assert unicode_available() def test_flatten(): l1 = [['aa', 'bb', ['cc', 'dd', 'ee'], ['ee', 'ff'], 'gg']] assert list(flatten(l1)) == ['aa', 'bb', 'cc', 'dd', 'ee', 'ee', 'ff', 'gg'] assert uniqify(flatten(l1)) == ['aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg'] def test_uniqify(): l1 = ['a', 'b', 'b', 'c', 'b', 'd', 'c', 'e', 'f', 'a'] assert uniqify(l1) == ['a', 'b', 'c', 'd', 'e', 'f'] <commit_msg>Test util - unicode removal<commit_after>
# Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from project_generator.util import * def test_flatten(): l1 = [['aa', 'bb', ['cc', 'dd', 'ee'], ['ee', 'ff'], 'gg']] assert list(flatten(l1)) == ['aa', 'bb', 'cc', 'dd', 'ee', 'ee', 'ff', 'gg'] assert uniqify(flatten(l1)) == ['aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg'] def test_uniqify(): l1 = ['a', 'b', 'b', 'c', 'b', 'd', 'c', 'e', 'f', 'a'] assert uniqify(l1) == ['a', 'b', 'c', 'd', 'e', 'f']
# Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from project_generator.util import * def test_unicode_detection(): try: print(u'\U0001F648') except UnicodeEncodeError: assert not unicode_available() else: assert unicode_available() def test_flatten(): l1 = [['aa', 'bb', ['cc', 'dd', 'ee'], ['ee', 'ff'], 'gg']] assert list(flatten(l1)) == ['aa', 'bb', 'cc', 'dd', 'ee', 'ee', 'ff', 'gg'] assert uniqify(flatten(l1)) == ['aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg'] def test_uniqify(): l1 = ['a', 'b', 'b', 'c', 'b', 'd', 'c', 'e', 'f', 'a'] assert uniqify(l1) == ['a', 'b', 'c', 'd', 'e', 'f'] Test util - unicode removal# Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from project_generator.util import * def test_flatten(): l1 = [['aa', 'bb', ['cc', 'dd', 'ee'], ['ee', 'ff'], 'gg']] assert list(flatten(l1)) == ['aa', 'bb', 'cc', 'dd', 'ee', 'ee', 'ff', 'gg'] assert uniqify(flatten(l1)) == ['aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg'] def test_uniqify(): l1 = ['a', 'b', 'b', 'c', 'b', 'd', 'c', 'e', 'f', 'a'] assert uniqify(l1) == ['a', 'b', 'c', 'd', 'e', 'f']
<commit_before># Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from project_generator.util import * def test_unicode_detection(): try: print(u'\U0001F648') except UnicodeEncodeError: assert not unicode_available() else: assert unicode_available() def test_flatten(): l1 = [['aa', 'bb', ['cc', 'dd', 'ee'], ['ee', 'ff'], 'gg']] assert list(flatten(l1)) == ['aa', 'bb', 'cc', 'dd', 'ee', 'ee', 'ff', 'gg'] assert uniqify(flatten(l1)) == ['aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg'] def test_uniqify(): l1 = ['a', 'b', 'b', 'c', 'b', 'd', 'c', 'e', 'f', 'a'] assert uniqify(l1) == ['a', 'b', 'c', 'd', 'e', 'f'] <commit_msg>Test util - unicode removal<commit_after># Copyright 2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from project_generator.util import * def test_flatten(): l1 = [['aa', 'bb', ['cc', 'dd', 'ee'], ['ee', 'ff'], 'gg']] assert list(flatten(l1)) == ['aa', 'bb', 'cc', 'dd', 'ee', 'ee', 'ff', 'gg'] assert uniqify(flatten(l1)) == ['aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg'] def test_uniqify(): l1 = ['a', 'b', 'b', 'c', 'b', 'd', 'c', 'e', 'f', 'a'] assert uniqify(l1) == ['a', 'b', 'c', 'd', 'e', 'f']
2423958016d552a6f696b7124454c7b362c84a5f
pylearn2/scripts/dbm/dbm_metrics.py
pylearn2/scripts/dbm/dbm_metrics.py
#!/usr/bin/env python import argparse if __name__ == '__main__': # Argument parsing parser = argparse.ArgumentParser() parser.add_argument("metric", help="the desired metric", choices=["ais"]) parser.add_argument("model_path", help="path to the pickled DBM model") args = parser.parse_args() metric = args.metric model_path = args.model_path
#!/usr/bin/env python import argparse from pylearn2.utils import serial def compute_ais(model): pass if __name__ == '__main__': # Possible metrics metrics = {'ais': compute_ais} # Argument parsing parser = argparse.ArgumentParser() parser.add_argument("metric", help="the desired metric", choices=metrics.keys()) parser.add_argument("model_path", help="path to the pickled DBM model") args = parser.parse_args() metric = metrics[args.metric] model = serial.load(args.model_path) metric(model)
Make the script recuperate the correct method
Make the script recuperate the correct method
Python
bsd-3-clause
pkainz/pylearn2,KennethPierce/pylearnk,abergeron/pylearn2,alexjc/pylearn2,pombredanne/pylearn2,cosmoharrigan/pylearn2,bartvm/pylearn2,mclaughlin6464/pylearn2,ashhher3/pylearn2,se4u/pylearn2,abergeron/pylearn2,skearnes/pylearn2,lisa-lab/pylearn2,lamblin/pylearn2,fulmicoton/pylearn2,alexjc/pylearn2,matrogers/pylearn2,jamessergeant/pylearn2,fyffyt/pylearn2,hantek/pylearn2,kose-y/pylearn2,cosmoharrigan/pylearn2,matrogers/pylearn2,chrish42/pylearn,JesseLivezey/plankton,pkainz/pylearn2,mkraemer67/pylearn2,JesseLivezey/plankton,chrish42/pylearn,caidongyun/pylearn2,nouiz/pylearn2,w1kke/pylearn2,daemonmaker/pylearn2,lisa-lab/pylearn2,theoryno3/pylearn2,mkraemer67/pylearn2,ddboline/pylearn2,kastnerkyle/pylearn2,sandeepkbhat/pylearn2,lamblin/pylearn2,theoryno3/pylearn2,cosmoharrigan/pylearn2,pombredanne/pylearn2,jeremyfix/pylearn2,junbochen/pylearn2,fishcorn/pylearn2,pombredanne/pylearn2,ddboline/pylearn2,nouiz/pylearn2,fishcorn/pylearn2,KennethPierce/pylearnk,JesseLivezey/plankton,bartvm/pylearn2,pombredanne/pylearn2,lunyang/pylearn2,hantek/pylearn2,goodfeli/pylearn2,goodfeli/pylearn2,matrogers/pylearn2,pkainz/pylearn2,Refefer/pylearn2,lamblin/pylearn2,TNick/pylearn2,KennethPierce/pylearnk,TNick/pylearn2,hantek/pylearn2,hantek/pylearn2,kastnerkyle/pylearn2,lancezlin/pylearn2,alexjc/pylearn2,sandeepkbhat/pylearn2,lisa-lab/pylearn2,junbochen/pylearn2,JesseLivezey/pylearn2,w1kke/pylearn2,mclaughlin6464/pylearn2,lancezlin/pylearn2,hyqneuron/pylearn2-maxsom,fulmicoton/pylearn2,lamblin/pylearn2,pkainz/pylearn2,jamessergeant/pylearn2,woozzu/pylearn2,JesseLivezey/pylearn2,TNick/pylearn2,fishcorn/pylearn2,goodfeli/pylearn2,theoryno3/pylearn2,hyqneuron/pylearn2-maxsom,mclaughlin6464/pylearn2,lancezlin/pylearn2,lunyang/pylearn2,se4u/pylearn2,w1kke/pylearn2,chrish42/pylearn,TNick/pylearn2,daemonmaker/pylearn2,woozzu/pylearn2,kastnerkyle/pylearn2,lisa-lab/pylearn2,msingh172/pylearn2,hyqneuron/pylearn2-maxsom,lunyang/pylearn2,theoryno3/pylearn2,sandeepkbhat/pylearn2,chrish42/pylearn,w1kke/pylearn2,skearnes/pylearn2,ashhher3/pylearn2,abergeron/pylearn2,CIFASIS/pylearn2,KennethPierce/pylearnk,junbochen/pylearn2,fulmicoton/pylearn2,Refefer/pylearn2,skearnes/pylearn2,sandeepkbhat/pylearn2,goodfeli/pylearn2,kose-y/pylearn2,se4u/pylearn2,JesseLivezey/plankton,Refefer/pylearn2,shiquanwang/pylearn2,CIFASIS/pylearn2,JesseLivezey/pylearn2,mclaughlin6464/pylearn2,fulmicoton/pylearn2,se4u/pylearn2,kose-y/pylearn2,shiquanwang/pylearn2,caidongyun/pylearn2,mkraemer67/pylearn2,aalmah/pylearn2,JesseLivezey/pylearn2,hyqneuron/pylearn2-maxsom,aalmah/pylearn2,ddboline/pylearn2,jeremyfix/pylearn2,fyffyt/pylearn2,fishcorn/pylearn2,alexjc/pylearn2,junbochen/pylearn2,daemonmaker/pylearn2,daemonmaker/pylearn2,nouiz/pylearn2,CIFASIS/pylearn2,Refefer/pylearn2,abergeron/pylearn2,cosmoharrigan/pylearn2,woozzu/pylearn2,lancezlin/pylearn2,msingh172/pylearn2,skearnes/pylearn2,bartvm/pylearn2,ddboline/pylearn2,caidongyun/pylearn2,msingh172/pylearn2,woozzu/pylearn2,ashhher3/pylearn2,aalmah/pylearn2,jamessergeant/pylearn2,msingh172/pylearn2,shiquanwang/pylearn2,caidongyun/pylearn2,shiquanwang/pylearn2,jamessergeant/pylearn2,aalmah/pylearn2,jeremyfix/pylearn2,ashhher3/pylearn2,bartvm/pylearn2,CIFASIS/pylearn2,kastnerkyle/pylearn2,jeremyfix/pylearn2,nouiz/pylearn2,fyffyt/pylearn2,fyffyt/pylearn2,matrogers/pylearn2,lunyang/pylearn2,mkraemer67/pylearn2,kose-y/pylearn2
#!/usr/bin/env python import argparse if __name__ == '__main__': # Argument parsing parser = argparse.ArgumentParser() parser.add_argument("metric", help="the desired metric", choices=["ais"]) parser.add_argument("model_path", help="path to the pickled DBM model") args = parser.parse_args() metric = args.metric model_path = args.model_path Make the script recuperate the correct method
#!/usr/bin/env python import argparse from pylearn2.utils import serial def compute_ais(model): pass if __name__ == '__main__': # Possible metrics metrics = {'ais': compute_ais} # Argument parsing parser = argparse.ArgumentParser() parser.add_argument("metric", help="the desired metric", choices=metrics.keys()) parser.add_argument("model_path", help="path to the pickled DBM model") args = parser.parse_args() metric = metrics[args.metric] model = serial.load(args.model_path) metric(model)
<commit_before>#!/usr/bin/env python import argparse if __name__ == '__main__': # Argument parsing parser = argparse.ArgumentParser() parser.add_argument("metric", help="the desired metric", choices=["ais"]) parser.add_argument("model_path", help="path to the pickled DBM model") args = parser.parse_args() metric = args.metric model_path = args.model_path <commit_msg>Make the script recuperate the correct method<commit_after>
#!/usr/bin/env python import argparse from pylearn2.utils import serial def compute_ais(model): pass if __name__ == '__main__': # Possible metrics metrics = {'ais': compute_ais} # Argument parsing parser = argparse.ArgumentParser() parser.add_argument("metric", help="the desired metric", choices=metrics.keys()) parser.add_argument("model_path", help="path to the pickled DBM model") args = parser.parse_args() metric = metrics[args.metric] model = serial.load(args.model_path) metric(model)
#!/usr/bin/env python import argparse if __name__ == '__main__': # Argument parsing parser = argparse.ArgumentParser() parser.add_argument("metric", help="the desired metric", choices=["ais"]) parser.add_argument("model_path", help="path to the pickled DBM model") args = parser.parse_args() metric = args.metric model_path = args.model_path Make the script recuperate the correct method#!/usr/bin/env python import argparse from pylearn2.utils import serial def compute_ais(model): pass if __name__ == '__main__': # Possible metrics metrics = {'ais': compute_ais} # Argument parsing parser = argparse.ArgumentParser() parser.add_argument("metric", help="the desired metric", choices=metrics.keys()) parser.add_argument("model_path", help="path to the pickled DBM model") args = parser.parse_args() metric = metrics[args.metric] model = serial.load(args.model_path) metric(model)
<commit_before>#!/usr/bin/env python import argparse if __name__ == '__main__': # Argument parsing parser = argparse.ArgumentParser() parser.add_argument("metric", help="the desired metric", choices=["ais"]) parser.add_argument("model_path", help="path to the pickled DBM model") args = parser.parse_args() metric = args.metric model_path = args.model_path <commit_msg>Make the script recuperate the correct method<commit_after>#!/usr/bin/env python import argparse from pylearn2.utils import serial def compute_ais(model): pass if __name__ == '__main__': # Possible metrics metrics = {'ais': compute_ais} # Argument parsing parser = argparse.ArgumentParser() parser.add_argument("metric", help="the desired metric", choices=metrics.keys()) parser.add_argument("model_path", help="path to the pickled DBM model") args = parser.parse_args() metric = metrics[args.metric] model = serial.load(args.model_path) metric(model)
6c0212b004aef96c305406352810bd40f3d5500e
censusreporter/config/prod/settings.py
censusreporter/config/prod/settings.py
from config.base.settings import * DEBUG = False TEMPLATE_DEBUG = DEBUG ROOT_URLCONF = 'config.prod.urls' WSGI_APPLICATION = "config.prod.wsgi.application" ALLOWED_HOSTS = [ '174.129.183.221', '54.173.179.176', '.censusreporter.org', ] CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION': 'localhost:11211', } }
from config.base.settings import * DEBUG = False TEMPLATE_DEBUG = DEBUG ROOT_URLCONF = 'config.prod.urls' WSGI_APPLICATION = "config.prod.wsgi.application" ALLOWED_HOSTS = [ '.censusreporter.org', '.compute-1.amazonaws.com', # allows viewing of instances directly ] # From https://dryan.com/articles/elb-django-allowed-hosts/ import requests EC2_PRIVATE_IP = None try: EC2_PRIVATE_IP = requests.get('http://169.254.169.254/latest/meta-data/local-ipv4', timeout=0.01).text except requests.exceptions.RequestException: pass if EC2_PRIVATE_IP: ALLOWED_HOSTS.append(EC2_PRIVATE_IP) CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION': 'localhost:11211', } }
Add support for ELB to ALLOWED_HOSTS
Add support for ELB to ALLOWED_HOSTS
Python
mit
censusreporter/censusreporter,censusreporter/censusreporter,censusreporter/censusreporter,censusreporter/censusreporter
from config.base.settings import * DEBUG = False TEMPLATE_DEBUG = DEBUG ROOT_URLCONF = 'config.prod.urls' WSGI_APPLICATION = "config.prod.wsgi.application" ALLOWED_HOSTS = [ '174.129.183.221', '54.173.179.176', '.censusreporter.org', ] CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION': 'localhost:11211', } } Add support for ELB to ALLOWED_HOSTS
from config.base.settings import * DEBUG = False TEMPLATE_DEBUG = DEBUG ROOT_URLCONF = 'config.prod.urls' WSGI_APPLICATION = "config.prod.wsgi.application" ALLOWED_HOSTS = [ '.censusreporter.org', '.compute-1.amazonaws.com', # allows viewing of instances directly ] # From https://dryan.com/articles/elb-django-allowed-hosts/ import requests EC2_PRIVATE_IP = None try: EC2_PRIVATE_IP = requests.get('http://169.254.169.254/latest/meta-data/local-ipv4', timeout=0.01).text except requests.exceptions.RequestException: pass if EC2_PRIVATE_IP: ALLOWED_HOSTS.append(EC2_PRIVATE_IP) CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION': 'localhost:11211', } }
<commit_before>from config.base.settings import * DEBUG = False TEMPLATE_DEBUG = DEBUG ROOT_URLCONF = 'config.prod.urls' WSGI_APPLICATION = "config.prod.wsgi.application" ALLOWED_HOSTS = [ '174.129.183.221', '54.173.179.176', '.censusreporter.org', ] CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION': 'localhost:11211', } } <commit_msg>Add support for ELB to ALLOWED_HOSTS<commit_after>
from config.base.settings import * DEBUG = False TEMPLATE_DEBUG = DEBUG ROOT_URLCONF = 'config.prod.urls' WSGI_APPLICATION = "config.prod.wsgi.application" ALLOWED_HOSTS = [ '.censusreporter.org', '.compute-1.amazonaws.com', # allows viewing of instances directly ] # From https://dryan.com/articles/elb-django-allowed-hosts/ import requests EC2_PRIVATE_IP = None try: EC2_PRIVATE_IP = requests.get('http://169.254.169.254/latest/meta-data/local-ipv4', timeout=0.01).text except requests.exceptions.RequestException: pass if EC2_PRIVATE_IP: ALLOWED_HOSTS.append(EC2_PRIVATE_IP) CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION': 'localhost:11211', } }
from config.base.settings import * DEBUG = False TEMPLATE_DEBUG = DEBUG ROOT_URLCONF = 'config.prod.urls' WSGI_APPLICATION = "config.prod.wsgi.application" ALLOWED_HOSTS = [ '174.129.183.221', '54.173.179.176', '.censusreporter.org', ] CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION': 'localhost:11211', } } Add support for ELB to ALLOWED_HOSTSfrom config.base.settings import * DEBUG = False TEMPLATE_DEBUG = DEBUG ROOT_URLCONF = 'config.prod.urls' WSGI_APPLICATION = "config.prod.wsgi.application" ALLOWED_HOSTS = [ '.censusreporter.org', '.compute-1.amazonaws.com', # allows viewing of instances directly ] # From https://dryan.com/articles/elb-django-allowed-hosts/ import requests EC2_PRIVATE_IP = None try: EC2_PRIVATE_IP = requests.get('http://169.254.169.254/latest/meta-data/local-ipv4', timeout=0.01).text except requests.exceptions.RequestException: pass if EC2_PRIVATE_IP: ALLOWED_HOSTS.append(EC2_PRIVATE_IP) CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION': 'localhost:11211', } }
<commit_before>from config.base.settings import * DEBUG = False TEMPLATE_DEBUG = DEBUG ROOT_URLCONF = 'config.prod.urls' WSGI_APPLICATION = "config.prod.wsgi.application" ALLOWED_HOSTS = [ '174.129.183.221', '54.173.179.176', '.censusreporter.org', ] CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION': 'localhost:11211', } } <commit_msg>Add support for ELB to ALLOWED_HOSTS<commit_after>from config.base.settings import * DEBUG = False TEMPLATE_DEBUG = DEBUG ROOT_URLCONF = 'config.prod.urls' WSGI_APPLICATION = "config.prod.wsgi.application" ALLOWED_HOSTS = [ '.censusreporter.org', '.compute-1.amazonaws.com', # allows viewing of instances directly ] # From https://dryan.com/articles/elb-django-allowed-hosts/ import requests EC2_PRIVATE_IP = None try: EC2_PRIVATE_IP = requests.get('http://169.254.169.254/latest/meta-data/local-ipv4', timeout=0.01).text except requests.exceptions.RequestException: pass if EC2_PRIVATE_IP: ALLOWED_HOSTS.append(EC2_PRIVATE_IP) CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION': 'localhost:11211', } }
9c381721f4b4febef64276a2eb83c5a9169f7b8c
meta-analyze.py
meta-analyze.py
#!/usr/bin/env python import argparse def parsed_command_line(): """Returns an object that results from parsing the command-line for this program argparse.ArgumentParser(...).parse_ags() """ parser = argparse.ArgumentParser( description='Run multiple network snp analysis algorithms'); parser.add_argument('--plink_in', type=argparse.FileType('r'), help='Path to a plink association file https://www.cog-genomics.org/plink2/formats#assoc') return parser.parse_args() def input_files(parsed_args): """Returns a list of input files that were passed on the command line parsed_args: the result of parsing the command-line arguments """ if parsed_args.plink_in: print "Plink input: "+str(parsed_args.plink_in.name); parsed = parsed_command_line() input_files(parsed)
#!/usr/bin/env python import argparse class InputFile: """Represents a data in a specified format""" def __init__(self, file_format, path): self.file_format = file_format self.path = path def __repr__(self): return "InputFile('{}','{}')".format(self.file_format, self.path) def parsed_command_line(): """Returns an object that results from parsing the command-line for this program argparse.ArgumentParser(...).parse_ags() """ parser = argparse.ArgumentParser( description='Run multiple network snp analysis algorithms'); parser.add_argument('--plink_assoc_in', type=argparse.FileType('r'), help='Path to a plink association file https://www.cog-genomics.org/plink2/formats#assoc') return parser.parse_args() def input_files(parsed_args): """Returns a list of input files that were passed on the command line parsed_args: the result of parsing the command-line arguments """ files=[] if parsed_args.plink_assoc_in: files.append(InputFile("plink_assoc", parsed_args.plink_assoc_in.name)) return files def plink_assoc_to_networkx(input_path, output_path): """Create a new networkx formatted file at output_path""" pass converters = {('plink_assoc','networkx'):plink_assoc_to_networkx} parsed = parsed_command_line() print ",".join([str(i) for i in input_files(parsed)])
Add input file and converter abstraction
Add input file and converter abstraction
Python
cc0-1.0
NCBI-Hackathons/Network_SNPs,NCBI-Hackathons/Network_SNPs,NCBI-Hackathons/Network_SNPs,NCBI-Hackathons/Network_SNPs,NCBI-Hackathons/Network_SNPs
#!/usr/bin/env python import argparse def parsed_command_line(): """Returns an object that results from parsing the command-line for this program argparse.ArgumentParser(...).parse_ags() """ parser = argparse.ArgumentParser( description='Run multiple network snp analysis algorithms'); parser.add_argument('--plink_in', type=argparse.FileType('r'), help='Path to a plink association file https://www.cog-genomics.org/plink2/formats#assoc') return parser.parse_args() def input_files(parsed_args): """Returns a list of input files that were passed on the command line parsed_args: the result of parsing the command-line arguments """ if parsed_args.plink_in: print "Plink input: "+str(parsed_args.plink_in.name); parsed = parsed_command_line() input_files(parsed) Add input file and converter abstraction
#!/usr/bin/env python import argparse class InputFile: """Represents a data in a specified format""" def __init__(self, file_format, path): self.file_format = file_format self.path = path def __repr__(self): return "InputFile('{}','{}')".format(self.file_format, self.path) def parsed_command_line(): """Returns an object that results from parsing the command-line for this program argparse.ArgumentParser(...).parse_ags() """ parser = argparse.ArgumentParser( description='Run multiple network snp analysis algorithms'); parser.add_argument('--plink_assoc_in', type=argparse.FileType('r'), help='Path to a plink association file https://www.cog-genomics.org/plink2/formats#assoc') return parser.parse_args() def input_files(parsed_args): """Returns a list of input files that were passed on the command line parsed_args: the result of parsing the command-line arguments """ files=[] if parsed_args.plink_assoc_in: files.append(InputFile("plink_assoc", parsed_args.plink_assoc_in.name)) return files def plink_assoc_to_networkx(input_path, output_path): """Create a new networkx formatted file at output_path""" pass converters = {('plink_assoc','networkx'):plink_assoc_to_networkx} parsed = parsed_command_line() print ",".join([str(i) for i in input_files(parsed)])
<commit_before>#!/usr/bin/env python import argparse def parsed_command_line(): """Returns an object that results from parsing the command-line for this program argparse.ArgumentParser(...).parse_ags() """ parser = argparse.ArgumentParser( description='Run multiple network snp analysis algorithms'); parser.add_argument('--plink_in', type=argparse.FileType('r'), help='Path to a plink association file https://www.cog-genomics.org/plink2/formats#assoc') return parser.parse_args() def input_files(parsed_args): """Returns a list of input files that were passed on the command line parsed_args: the result of parsing the command-line arguments """ if parsed_args.plink_in: print "Plink input: "+str(parsed_args.plink_in.name); parsed = parsed_command_line() input_files(parsed) <commit_msg>Add input file and converter abstraction<commit_after>
#!/usr/bin/env python import argparse class InputFile: """Represents a data in a specified format""" def __init__(self, file_format, path): self.file_format = file_format self.path = path def __repr__(self): return "InputFile('{}','{}')".format(self.file_format, self.path) def parsed_command_line(): """Returns an object that results from parsing the command-line for this program argparse.ArgumentParser(...).parse_ags() """ parser = argparse.ArgumentParser( description='Run multiple network snp analysis algorithms'); parser.add_argument('--plink_assoc_in', type=argparse.FileType('r'), help='Path to a plink association file https://www.cog-genomics.org/plink2/formats#assoc') return parser.parse_args() def input_files(parsed_args): """Returns a list of input files that were passed on the command line parsed_args: the result of parsing the command-line arguments """ files=[] if parsed_args.plink_assoc_in: files.append(InputFile("plink_assoc", parsed_args.plink_assoc_in.name)) return files def plink_assoc_to_networkx(input_path, output_path): """Create a new networkx formatted file at output_path""" pass converters = {('plink_assoc','networkx'):plink_assoc_to_networkx} parsed = parsed_command_line() print ",".join([str(i) for i in input_files(parsed)])
#!/usr/bin/env python import argparse def parsed_command_line(): """Returns an object that results from parsing the command-line for this program argparse.ArgumentParser(...).parse_ags() """ parser = argparse.ArgumentParser( description='Run multiple network snp analysis algorithms'); parser.add_argument('--plink_in', type=argparse.FileType('r'), help='Path to a plink association file https://www.cog-genomics.org/plink2/formats#assoc') return parser.parse_args() def input_files(parsed_args): """Returns a list of input files that were passed on the command line parsed_args: the result of parsing the command-line arguments """ if parsed_args.plink_in: print "Plink input: "+str(parsed_args.plink_in.name); parsed = parsed_command_line() input_files(parsed) Add input file and converter abstraction#!/usr/bin/env python import argparse class InputFile: """Represents a data in a specified format""" def __init__(self, file_format, path): self.file_format = file_format self.path = path def __repr__(self): return "InputFile('{}','{}')".format(self.file_format, self.path) def parsed_command_line(): """Returns an object that results from parsing the command-line for this program argparse.ArgumentParser(...).parse_ags() """ parser = argparse.ArgumentParser( description='Run multiple network snp analysis algorithms'); parser.add_argument('--plink_assoc_in', type=argparse.FileType('r'), help='Path to a plink association file https://www.cog-genomics.org/plink2/formats#assoc') return parser.parse_args() def input_files(parsed_args): """Returns a list of input files that were passed on the command line parsed_args: the result of parsing the command-line arguments """ files=[] if parsed_args.plink_assoc_in: files.append(InputFile("plink_assoc", parsed_args.plink_assoc_in.name)) return files def plink_assoc_to_networkx(input_path, output_path): """Create a new networkx formatted file at output_path""" pass converters = {('plink_assoc','networkx'):plink_assoc_to_networkx} parsed = parsed_command_line() print ",".join([str(i) for i in input_files(parsed)])
<commit_before>#!/usr/bin/env python import argparse def parsed_command_line(): """Returns an object that results from parsing the command-line for this program argparse.ArgumentParser(...).parse_ags() """ parser = argparse.ArgumentParser( description='Run multiple network snp analysis algorithms'); parser.add_argument('--plink_in', type=argparse.FileType('r'), help='Path to a plink association file https://www.cog-genomics.org/plink2/formats#assoc') return parser.parse_args() def input_files(parsed_args): """Returns a list of input files that were passed on the command line parsed_args: the result of parsing the command-line arguments """ if parsed_args.plink_in: print "Plink input: "+str(parsed_args.plink_in.name); parsed = parsed_command_line() input_files(parsed) <commit_msg>Add input file and converter abstraction<commit_after>#!/usr/bin/env python import argparse class InputFile: """Represents a data in a specified format""" def __init__(self, file_format, path): self.file_format = file_format self.path = path def __repr__(self): return "InputFile('{}','{}')".format(self.file_format, self.path) def parsed_command_line(): """Returns an object that results from parsing the command-line for this program argparse.ArgumentParser(...).parse_ags() """ parser = argparse.ArgumentParser( description='Run multiple network snp analysis algorithms'); parser.add_argument('--plink_assoc_in', type=argparse.FileType('r'), help='Path to a plink association file https://www.cog-genomics.org/plink2/formats#assoc') return parser.parse_args() def input_files(parsed_args): """Returns a list of input files that were passed on the command line parsed_args: the result of parsing the command-line arguments """ files=[] if parsed_args.plink_assoc_in: files.append(InputFile("plink_assoc", parsed_args.plink_assoc_in.name)) return files def plink_assoc_to_networkx(input_path, output_path): """Create a new networkx formatted file at output_path""" pass converters = {('plink_assoc','networkx'):plink_assoc_to_networkx} parsed = parsed_command_line() print ",".join([str(i) for i in input_files(parsed)])
9a988056944700d6188f6e7164e68dcd35c342d8
databench/analysis.py
databench/analysis.py
"""Analysis module for Databench.""" from flask import Blueprint, render_template import databench.signals LIST_ALL = [] class Analysis(object): """Databench's analysis class. An optional :class:`databench.Signals` instance and :class:`flask.Blueprint` can be dependency-injected, however that should not be necessary for standard use cases. Args: name (str): Name of this analysis. If ``signals`` is not specified, this also becomes the namespace for the Socket.IO connection and has to match the frontend's :js:class:`Databench` ``name``. import_name (str): Usually the file name ``__name__`` where this analysis is instantiated. signals (optional): Inject an instance of :class:`databench.Signals`. blueprint (optional): Inject an instance of a :class:`flask.Blueprint`. """ def __init__( self, name, import_name, signals=None, blueprint=None ): LIST_ALL.append(self) self.name = name self.import_name = import_name if not signals: self.signals = databench.signals.Signals(name) else: self.signals = signals if not blueprint: self.blueprint = Blueprint( name, import_name, template_folder='templates', static_folder='static', ) else: self.blueprint = blueprint self.show_in_index = True @self.blueprint.route('/') def render_index(): """Renders the main analysis frontend template.""" return render_template(self.name+'.html')
"""Analysis module for Databench.""" from flask import Blueprint, render_template import databench.signals LIST_ALL = [] class Analysis(object): """Databench's analysis class. An optional :class:`databench.Signals` instance and :class:`flask.Blueprint` can be dependency-injected, however that should not be necessary for standard use cases. Args: name (str): Name of this analysis. If ``signals`` is not specified, this also becomes the namespace for the Socket.IO connection and has to match the frontend's :js:class:`Databench` ``name``. import_name (str): Usually the file name ``__name__`` where this analysis is instantiated. signals (optional): Inject an instance of :class:`databench.Signals`. blueprint (optional): Inject an instance of a :class:`flask.Blueprint`. """ def __init__( self, name, import_name, signals=None, blueprint=None ): LIST_ALL.append(self) self.show_in_index = True self.name = name self.import_name = import_name if not signals: self.signals = databench.signals.Signals(name) else: self.signals = signals if not blueprint: self.blueprint = Blueprint( name, import_name, template_folder='templates', static_folder='static', ) else: self.blueprint = blueprint self.blueprint.add_url_rule('/', 'render_index', self.render_index) def render_index(self): """Renders the main analysis frontend template.""" return render_template(self.name+'.html')
Move the render_index() function out of the constructor and use add_url_rule() instead of the route() decorator to connect it to Flask.
Move the render_index() function out of the constructor and use add_url_rule() instead of the route() decorator to connect it to Flask.
Python
mit
svenkreiss/databench,svenkreiss/databench,svenkreiss/databench,svenkreiss/databench
"""Analysis module for Databench.""" from flask import Blueprint, render_template import databench.signals LIST_ALL = [] class Analysis(object): """Databench's analysis class. An optional :class:`databench.Signals` instance and :class:`flask.Blueprint` can be dependency-injected, however that should not be necessary for standard use cases. Args: name (str): Name of this analysis. If ``signals`` is not specified, this also becomes the namespace for the Socket.IO connection and has to match the frontend's :js:class:`Databench` ``name``. import_name (str): Usually the file name ``__name__`` where this analysis is instantiated. signals (optional): Inject an instance of :class:`databench.Signals`. blueprint (optional): Inject an instance of a :class:`flask.Blueprint`. """ def __init__( self, name, import_name, signals=None, blueprint=None ): LIST_ALL.append(self) self.name = name self.import_name = import_name if not signals: self.signals = databench.signals.Signals(name) else: self.signals = signals if not blueprint: self.blueprint = Blueprint( name, import_name, template_folder='templates', static_folder='static', ) else: self.blueprint = blueprint self.show_in_index = True @self.blueprint.route('/') def render_index(): """Renders the main analysis frontend template.""" return render_template(self.name+'.html') Move the render_index() function out of the constructor and use add_url_rule() instead of the route() decorator to connect it to Flask.
"""Analysis module for Databench.""" from flask import Blueprint, render_template import databench.signals LIST_ALL = [] class Analysis(object): """Databench's analysis class. An optional :class:`databench.Signals` instance and :class:`flask.Blueprint` can be dependency-injected, however that should not be necessary for standard use cases. Args: name (str): Name of this analysis. If ``signals`` is not specified, this also becomes the namespace for the Socket.IO connection and has to match the frontend's :js:class:`Databench` ``name``. import_name (str): Usually the file name ``__name__`` where this analysis is instantiated. signals (optional): Inject an instance of :class:`databench.Signals`. blueprint (optional): Inject an instance of a :class:`flask.Blueprint`. """ def __init__( self, name, import_name, signals=None, blueprint=None ): LIST_ALL.append(self) self.show_in_index = True self.name = name self.import_name = import_name if not signals: self.signals = databench.signals.Signals(name) else: self.signals = signals if not blueprint: self.blueprint = Blueprint( name, import_name, template_folder='templates', static_folder='static', ) else: self.blueprint = blueprint self.blueprint.add_url_rule('/', 'render_index', self.render_index) def render_index(self): """Renders the main analysis frontend template.""" return render_template(self.name+'.html')
<commit_before>"""Analysis module for Databench.""" from flask import Blueprint, render_template import databench.signals LIST_ALL = [] class Analysis(object): """Databench's analysis class. An optional :class:`databench.Signals` instance and :class:`flask.Blueprint` can be dependency-injected, however that should not be necessary for standard use cases. Args: name (str): Name of this analysis. If ``signals`` is not specified, this also becomes the namespace for the Socket.IO connection and has to match the frontend's :js:class:`Databench` ``name``. import_name (str): Usually the file name ``__name__`` where this analysis is instantiated. signals (optional): Inject an instance of :class:`databench.Signals`. blueprint (optional): Inject an instance of a :class:`flask.Blueprint`. """ def __init__( self, name, import_name, signals=None, blueprint=None ): LIST_ALL.append(self) self.name = name self.import_name = import_name if not signals: self.signals = databench.signals.Signals(name) else: self.signals = signals if not blueprint: self.blueprint = Blueprint( name, import_name, template_folder='templates', static_folder='static', ) else: self.blueprint = blueprint self.show_in_index = True @self.blueprint.route('/') def render_index(): """Renders the main analysis frontend template.""" return render_template(self.name+'.html') <commit_msg>Move the render_index() function out of the constructor and use add_url_rule() instead of the route() decorator to connect it to Flask.<commit_after>
"""Analysis module for Databench.""" from flask import Blueprint, render_template import databench.signals LIST_ALL = [] class Analysis(object): """Databench's analysis class. An optional :class:`databench.Signals` instance and :class:`flask.Blueprint` can be dependency-injected, however that should not be necessary for standard use cases. Args: name (str): Name of this analysis. If ``signals`` is not specified, this also becomes the namespace for the Socket.IO connection and has to match the frontend's :js:class:`Databench` ``name``. import_name (str): Usually the file name ``__name__`` where this analysis is instantiated. signals (optional): Inject an instance of :class:`databench.Signals`. blueprint (optional): Inject an instance of a :class:`flask.Blueprint`. """ def __init__( self, name, import_name, signals=None, blueprint=None ): LIST_ALL.append(self) self.show_in_index = True self.name = name self.import_name = import_name if not signals: self.signals = databench.signals.Signals(name) else: self.signals = signals if not blueprint: self.blueprint = Blueprint( name, import_name, template_folder='templates', static_folder='static', ) else: self.blueprint = blueprint self.blueprint.add_url_rule('/', 'render_index', self.render_index) def render_index(self): """Renders the main analysis frontend template.""" return render_template(self.name+'.html')
"""Analysis module for Databench.""" from flask import Blueprint, render_template import databench.signals LIST_ALL = [] class Analysis(object): """Databench's analysis class. An optional :class:`databench.Signals` instance and :class:`flask.Blueprint` can be dependency-injected, however that should not be necessary for standard use cases. Args: name (str): Name of this analysis. If ``signals`` is not specified, this also becomes the namespace for the Socket.IO connection and has to match the frontend's :js:class:`Databench` ``name``. import_name (str): Usually the file name ``__name__`` where this analysis is instantiated. signals (optional): Inject an instance of :class:`databench.Signals`. blueprint (optional): Inject an instance of a :class:`flask.Blueprint`. """ def __init__( self, name, import_name, signals=None, blueprint=None ): LIST_ALL.append(self) self.name = name self.import_name = import_name if not signals: self.signals = databench.signals.Signals(name) else: self.signals = signals if not blueprint: self.blueprint = Blueprint( name, import_name, template_folder='templates', static_folder='static', ) else: self.blueprint = blueprint self.show_in_index = True @self.blueprint.route('/') def render_index(): """Renders the main analysis frontend template.""" return render_template(self.name+'.html') Move the render_index() function out of the constructor and use add_url_rule() instead of the route() decorator to connect it to Flask."""Analysis module for Databench.""" from flask import Blueprint, render_template import databench.signals LIST_ALL = [] class Analysis(object): """Databench's analysis class. An optional :class:`databench.Signals` instance and :class:`flask.Blueprint` can be dependency-injected, however that should not be necessary for standard use cases. Args: name (str): Name of this analysis. If ``signals`` is not specified, this also becomes the namespace for the Socket.IO connection and has to match the frontend's :js:class:`Databench` ``name``. import_name (str): Usually the file name ``__name__`` where this analysis is instantiated. signals (optional): Inject an instance of :class:`databench.Signals`. blueprint (optional): Inject an instance of a :class:`flask.Blueprint`. """ def __init__( self, name, import_name, signals=None, blueprint=None ): LIST_ALL.append(self) self.show_in_index = True self.name = name self.import_name = import_name if not signals: self.signals = databench.signals.Signals(name) else: self.signals = signals if not blueprint: self.blueprint = Blueprint( name, import_name, template_folder='templates', static_folder='static', ) else: self.blueprint = blueprint self.blueprint.add_url_rule('/', 'render_index', self.render_index) def render_index(self): """Renders the main analysis frontend template.""" return render_template(self.name+'.html')
<commit_before>"""Analysis module for Databench.""" from flask import Blueprint, render_template import databench.signals LIST_ALL = [] class Analysis(object): """Databench's analysis class. An optional :class:`databench.Signals` instance and :class:`flask.Blueprint` can be dependency-injected, however that should not be necessary for standard use cases. Args: name (str): Name of this analysis. If ``signals`` is not specified, this also becomes the namespace for the Socket.IO connection and has to match the frontend's :js:class:`Databench` ``name``. import_name (str): Usually the file name ``__name__`` where this analysis is instantiated. signals (optional): Inject an instance of :class:`databench.Signals`. blueprint (optional): Inject an instance of a :class:`flask.Blueprint`. """ def __init__( self, name, import_name, signals=None, blueprint=None ): LIST_ALL.append(self) self.name = name self.import_name = import_name if not signals: self.signals = databench.signals.Signals(name) else: self.signals = signals if not blueprint: self.blueprint = Blueprint( name, import_name, template_folder='templates', static_folder='static', ) else: self.blueprint = blueprint self.show_in_index = True @self.blueprint.route('/') def render_index(): """Renders the main analysis frontend template.""" return render_template(self.name+'.html') <commit_msg>Move the render_index() function out of the constructor and use add_url_rule() instead of the route() decorator to connect it to Flask.<commit_after>"""Analysis module for Databench.""" from flask import Blueprint, render_template import databench.signals LIST_ALL = [] class Analysis(object): """Databench's analysis class. An optional :class:`databench.Signals` instance and :class:`flask.Blueprint` can be dependency-injected, however that should not be necessary for standard use cases. Args: name (str): Name of this analysis. If ``signals`` is not specified, this also becomes the namespace for the Socket.IO connection and has to match the frontend's :js:class:`Databench` ``name``. import_name (str): Usually the file name ``__name__`` where this analysis is instantiated. signals (optional): Inject an instance of :class:`databench.Signals`. blueprint (optional): Inject an instance of a :class:`flask.Blueprint`. """ def __init__( self, name, import_name, signals=None, blueprint=None ): LIST_ALL.append(self) self.show_in_index = True self.name = name self.import_name = import_name if not signals: self.signals = databench.signals.Signals(name) else: self.signals = signals if not blueprint: self.blueprint = Blueprint( name, import_name, template_folder='templates', static_folder='static', ) else: self.blueprint = blueprint self.blueprint.add_url_rule('/', 'render_index', self.render_index) def render_index(self): """Renders the main analysis frontend template.""" return render_template(self.name+'.html')
90375db8488bc50b57bce9b10a2274ec3dc81787
cyder/base/forms.py
cyder/base/forms.py
from django import forms class BugReportForm(forms.Form): bug = forms.CharField(label="Bug (required)", required=True) description = forms.CharField( label="Description (required)", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=True) reproduce = forms.CharField( label="How to reproduce the error", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) expected = forms.CharField( label="The expected result", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) actual = forms.CharField( label="The actual result", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) session_data = forms.CharField(widget=forms.HiddenInput()) class EditUserForm(forms.Form): user = forms.CharField( widget=forms.TextInput(attrs={'id': 'user-searchbox'})) action = forms.ChoiceField( widget=forms.RadioSelect, choices=( ('Promote', 'Promote to Superuser'), ('Demote', 'Demote from Superuser'), ('Create', 'Create a user'), ('Delete', 'Permanently delete user'))) def strip_charfield(self, value): return charfield_clean(self, value.strip()) charfield_clean = forms.fields.CharField.clean forms.fields.CharField.clean = strip_charfield
from django import forms class BugReportForm(forms.Form): bug = forms.CharField(label="Bug (required)", required=True) description = forms.CharField( label="Description (required)", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=True) reproduce = forms.CharField( label="How to reproduce the error", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) expected = forms.CharField( label="The expected result", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) actual = forms.CharField( label="The actual result", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) session_data = forms.CharField(widget=forms.HiddenInput()) class EditUserForm(forms.Form): user = forms.CharField( widget=forms.TextInput(attrs={'id': 'user-searchbox'})) action = forms.ChoiceField( widget=forms.RadioSelect, choices=( ('Promote', 'Promote to Superuser'), ('Demote', 'Demote from Superuser'), ('Create', 'Create a user'), ('Delete', 'Permanently delete user'))) charfield_clean = forms.fields.CharField.clean def strip_charfield(self, value): if hasattr(value, 'strip'): value = value.strip() return charfield_clean(self, value) forms.fields.CharField.clean = strip_charfield
Check for strip attr for none values
Check for strip attr for none values
Python
bsd-3-clause
murrown/cyder,akeym/cyder,murrown/cyder,OSU-Net/cyder,akeym/cyder,OSU-Net/cyder,akeym/cyder,drkitty/cyder,zeeman/cyder,drkitty/cyder,drkitty/cyder,murrown/cyder,drkitty/cyder,akeym/cyder,OSU-Net/cyder,OSU-Net/cyder,zeeman/cyder,zeeman/cyder,murrown/cyder,zeeman/cyder
from django import forms class BugReportForm(forms.Form): bug = forms.CharField(label="Bug (required)", required=True) description = forms.CharField( label="Description (required)", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=True) reproduce = forms.CharField( label="How to reproduce the error", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) expected = forms.CharField( label="The expected result", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) actual = forms.CharField( label="The actual result", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) session_data = forms.CharField(widget=forms.HiddenInput()) class EditUserForm(forms.Form): user = forms.CharField( widget=forms.TextInput(attrs={'id': 'user-searchbox'})) action = forms.ChoiceField( widget=forms.RadioSelect, choices=( ('Promote', 'Promote to Superuser'), ('Demote', 'Demote from Superuser'), ('Create', 'Create a user'), ('Delete', 'Permanently delete user'))) def strip_charfield(self, value): return charfield_clean(self, value.strip()) charfield_clean = forms.fields.CharField.clean forms.fields.CharField.clean = strip_charfield Check for strip attr for none values
from django import forms class BugReportForm(forms.Form): bug = forms.CharField(label="Bug (required)", required=True) description = forms.CharField( label="Description (required)", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=True) reproduce = forms.CharField( label="How to reproduce the error", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) expected = forms.CharField( label="The expected result", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) actual = forms.CharField( label="The actual result", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) session_data = forms.CharField(widget=forms.HiddenInput()) class EditUserForm(forms.Form): user = forms.CharField( widget=forms.TextInput(attrs={'id': 'user-searchbox'})) action = forms.ChoiceField( widget=forms.RadioSelect, choices=( ('Promote', 'Promote to Superuser'), ('Demote', 'Demote from Superuser'), ('Create', 'Create a user'), ('Delete', 'Permanently delete user'))) charfield_clean = forms.fields.CharField.clean def strip_charfield(self, value): if hasattr(value, 'strip'): value = value.strip() return charfield_clean(self, value) forms.fields.CharField.clean = strip_charfield
<commit_before>from django import forms class BugReportForm(forms.Form): bug = forms.CharField(label="Bug (required)", required=True) description = forms.CharField( label="Description (required)", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=True) reproduce = forms.CharField( label="How to reproduce the error", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) expected = forms.CharField( label="The expected result", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) actual = forms.CharField( label="The actual result", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) session_data = forms.CharField(widget=forms.HiddenInput()) class EditUserForm(forms.Form): user = forms.CharField( widget=forms.TextInput(attrs={'id': 'user-searchbox'})) action = forms.ChoiceField( widget=forms.RadioSelect, choices=( ('Promote', 'Promote to Superuser'), ('Demote', 'Demote from Superuser'), ('Create', 'Create a user'), ('Delete', 'Permanently delete user'))) def strip_charfield(self, value): return charfield_clean(self, value.strip()) charfield_clean = forms.fields.CharField.clean forms.fields.CharField.clean = strip_charfield <commit_msg>Check for strip attr for none values<commit_after>
from django import forms class BugReportForm(forms.Form): bug = forms.CharField(label="Bug (required)", required=True) description = forms.CharField( label="Description (required)", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=True) reproduce = forms.CharField( label="How to reproduce the error", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) expected = forms.CharField( label="The expected result", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) actual = forms.CharField( label="The actual result", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) session_data = forms.CharField(widget=forms.HiddenInput()) class EditUserForm(forms.Form): user = forms.CharField( widget=forms.TextInput(attrs={'id': 'user-searchbox'})) action = forms.ChoiceField( widget=forms.RadioSelect, choices=( ('Promote', 'Promote to Superuser'), ('Demote', 'Demote from Superuser'), ('Create', 'Create a user'), ('Delete', 'Permanently delete user'))) charfield_clean = forms.fields.CharField.clean def strip_charfield(self, value): if hasattr(value, 'strip'): value = value.strip() return charfield_clean(self, value) forms.fields.CharField.clean = strip_charfield
from django import forms class BugReportForm(forms.Form): bug = forms.CharField(label="Bug (required)", required=True) description = forms.CharField( label="Description (required)", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=True) reproduce = forms.CharField( label="How to reproduce the error", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) expected = forms.CharField( label="The expected result", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) actual = forms.CharField( label="The actual result", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) session_data = forms.CharField(widget=forms.HiddenInput()) class EditUserForm(forms.Form): user = forms.CharField( widget=forms.TextInput(attrs={'id': 'user-searchbox'})) action = forms.ChoiceField( widget=forms.RadioSelect, choices=( ('Promote', 'Promote to Superuser'), ('Demote', 'Demote from Superuser'), ('Create', 'Create a user'), ('Delete', 'Permanently delete user'))) def strip_charfield(self, value): return charfield_clean(self, value.strip()) charfield_clean = forms.fields.CharField.clean forms.fields.CharField.clean = strip_charfield Check for strip attr for none valuesfrom django import forms class BugReportForm(forms.Form): bug = forms.CharField(label="Bug (required)", required=True) description = forms.CharField( label="Description (required)", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=True) reproduce = forms.CharField( label="How to reproduce the error", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) expected = forms.CharField( label="The expected result", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) actual = forms.CharField( label="The actual result", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) session_data = forms.CharField(widget=forms.HiddenInput()) class EditUserForm(forms.Form): user = forms.CharField( widget=forms.TextInput(attrs={'id': 'user-searchbox'})) action = forms.ChoiceField( widget=forms.RadioSelect, choices=( ('Promote', 'Promote to Superuser'), ('Demote', 'Demote from Superuser'), ('Create', 'Create a user'), ('Delete', 'Permanently delete user'))) charfield_clean = forms.fields.CharField.clean def strip_charfield(self, value): if hasattr(value, 'strip'): value = value.strip() return charfield_clean(self, value) forms.fields.CharField.clean = strip_charfield
<commit_before>from django import forms class BugReportForm(forms.Form): bug = forms.CharField(label="Bug (required)", required=True) description = forms.CharField( label="Description (required)", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=True) reproduce = forms.CharField( label="How to reproduce the error", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) expected = forms.CharField( label="The expected result", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) actual = forms.CharField( label="The actual result", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) session_data = forms.CharField(widget=forms.HiddenInput()) class EditUserForm(forms.Form): user = forms.CharField( widget=forms.TextInput(attrs={'id': 'user-searchbox'})) action = forms.ChoiceField( widget=forms.RadioSelect, choices=( ('Promote', 'Promote to Superuser'), ('Demote', 'Demote from Superuser'), ('Create', 'Create a user'), ('Delete', 'Permanently delete user'))) def strip_charfield(self, value): return charfield_clean(self, value.strip()) charfield_clean = forms.fields.CharField.clean forms.fields.CharField.clean = strip_charfield <commit_msg>Check for strip attr for none values<commit_after>from django import forms class BugReportForm(forms.Form): bug = forms.CharField(label="Bug (required)", required=True) description = forms.CharField( label="Description (required)", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=True) reproduce = forms.CharField( label="How to reproduce the error", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) expected = forms.CharField( label="The expected result", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) actual = forms.CharField( label="The actual result", widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False) session_data = forms.CharField(widget=forms.HiddenInput()) class EditUserForm(forms.Form): user = forms.CharField( widget=forms.TextInput(attrs={'id': 'user-searchbox'})) action = forms.ChoiceField( widget=forms.RadioSelect, choices=( ('Promote', 'Promote to Superuser'), ('Demote', 'Demote from Superuser'), ('Create', 'Create a user'), ('Delete', 'Permanently delete user'))) charfield_clean = forms.fields.CharField.clean def strip_charfield(self, value): if hasattr(value, 'strip'): value = value.strip() return charfield_clean(self, value) forms.fields.CharField.clean = strip_charfield
c23787680c40cc7f871f23e920486d07452d2cf3
traits/__init__.py
traits/__init__.py
from __future__ import absolute_import __version__ = '4.3.0'
from __future__ import absolute_import __version__ = '4.3.0' # Add a NullHandler so 'traits' loggers don't complain when they get used. import logging class NullHandler(logging.Handler): def handle(self, record): pass def emit(self, record): pass def createLock(self): self.lock = None logger = logging.getLogger(__name__) logger.addHandler(NullHandler()) del logging, logger, NullHandler
Use a NullHandler for all 'traits' loggers per best practice for logging.
FIX: Use a NullHandler for all 'traits' loggers per best practice for logging.
Python
bsd-3-clause
burnpanck/traits,burnpanck/traits
from __future__ import absolute_import __version__ = '4.3.0' FIX: Use a NullHandler for all 'traits' loggers per best practice for logging.
from __future__ import absolute_import __version__ = '4.3.0' # Add a NullHandler so 'traits' loggers don't complain when they get used. import logging class NullHandler(logging.Handler): def handle(self, record): pass def emit(self, record): pass def createLock(self): self.lock = None logger = logging.getLogger(__name__) logger.addHandler(NullHandler()) del logging, logger, NullHandler
<commit_before>from __future__ import absolute_import __version__ = '4.3.0' <commit_msg>FIX: Use a NullHandler for all 'traits' loggers per best practice for logging.<commit_after>
from __future__ import absolute_import __version__ = '4.3.0' # Add a NullHandler so 'traits' loggers don't complain when they get used. import logging class NullHandler(logging.Handler): def handle(self, record): pass def emit(self, record): pass def createLock(self): self.lock = None logger = logging.getLogger(__name__) logger.addHandler(NullHandler()) del logging, logger, NullHandler
from __future__ import absolute_import __version__ = '4.3.0' FIX: Use a NullHandler for all 'traits' loggers per best practice for logging.from __future__ import absolute_import __version__ = '4.3.0' # Add a NullHandler so 'traits' loggers don't complain when they get used. import logging class NullHandler(logging.Handler): def handle(self, record): pass def emit(self, record): pass def createLock(self): self.lock = None logger = logging.getLogger(__name__) logger.addHandler(NullHandler()) del logging, logger, NullHandler
<commit_before>from __future__ import absolute_import __version__ = '4.3.0' <commit_msg>FIX: Use a NullHandler for all 'traits' loggers per best practice for logging.<commit_after>from __future__ import absolute_import __version__ = '4.3.0' # Add a NullHandler so 'traits' loggers don't complain when they get used. import logging class NullHandler(logging.Handler): def handle(self, record): pass def emit(self, record): pass def createLock(self): self.lock = None logger = logging.getLogger(__name__) logger.addHandler(NullHandler()) del logging, logger, NullHandler
667294dcc3b8ab34618ad674c2b6ac8efeec0620
places/admin.py
places/admin.py
from django.contrib.gis import admin from models import Place admin.site.register(Place, admin.OSMGeoAdmin)
from django.contrib.gis import admin from models import Place try: _model_admin = admin.OSMGeoAdmin except AttributeError: _model_admin = admin.ModelAdmin admin.site.register(Place, _model_admin)
Make it possible to run dev server on my desktop.
Make it possible to run dev server on my desktop. While I'm accessing a suitable database remotely, I don't have enough stuff installed locally to have OSMGeoAdmin (no GDAL installed, for example).
Python
bsd-3-clause
MAPC/masshealth,MAPC/masshealth
from django.contrib.gis import admin from models import Place admin.site.register(Place, admin.OSMGeoAdmin) Make it possible to run dev server on my desktop. While I'm accessing a suitable database remotely, I don't have enough stuff installed locally to have OSMGeoAdmin (no GDAL installed, for example).
from django.contrib.gis import admin from models import Place try: _model_admin = admin.OSMGeoAdmin except AttributeError: _model_admin = admin.ModelAdmin admin.site.register(Place, _model_admin)
<commit_before>from django.contrib.gis import admin from models import Place admin.site.register(Place, admin.OSMGeoAdmin) <commit_msg>Make it possible to run dev server on my desktop. While I'm accessing a suitable database remotely, I don't have enough stuff installed locally to have OSMGeoAdmin (no GDAL installed, for example).<commit_after>
from django.contrib.gis import admin from models import Place try: _model_admin = admin.OSMGeoAdmin except AttributeError: _model_admin = admin.ModelAdmin admin.site.register(Place, _model_admin)
from django.contrib.gis import admin from models import Place admin.site.register(Place, admin.OSMGeoAdmin) Make it possible to run dev server on my desktop. While I'm accessing a suitable database remotely, I don't have enough stuff installed locally to have OSMGeoAdmin (no GDAL installed, for example).from django.contrib.gis import admin from models import Place try: _model_admin = admin.OSMGeoAdmin except AttributeError: _model_admin = admin.ModelAdmin admin.site.register(Place, _model_admin)
<commit_before>from django.contrib.gis import admin from models import Place admin.site.register(Place, admin.OSMGeoAdmin) <commit_msg>Make it possible to run dev server on my desktop. While I'm accessing a suitable database remotely, I don't have enough stuff installed locally to have OSMGeoAdmin (no GDAL installed, for example).<commit_after>from django.contrib.gis import admin from models import Place try: _model_admin = admin.OSMGeoAdmin except AttributeError: _model_admin = admin.ModelAdmin admin.site.register(Place, _model_admin)
e8b44733ff44162f4a01de76b66046af23a9c946
tcconfig/_error.py
tcconfig/_error.py
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import class NetworkInterfaceNotFoundError(Exception): """ Exception raised when network interface not found. """ class ModuleNotFoundError(Exception): """ Exception raised when mandatory kernel module not found. """ class TcCommandExecutionError(Exception): """ Exception raised when failed to execute a ``tc`` command. """ class TcAlreadyExist(TcCommandExecutionError): """ Exception raised when a traffic shaping rule already exist. """ class EmptyParameterError(ValueError): """ Exception raised when a parameter value is empty value. """ class InvalidParameterError(ValueError): """ Exception raised when an invalid parameter specified for a traffic shaping rule. """ class UnitNotFoundError(InvalidParameterError): """ """
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import class NetworkInterfaceNotFoundError(Exception): """ Exception raised when network interface not found. """ class ModuleNotFoundError(Exception): """ Exception raised when mandatory kernel module not found. """ class TcCommandExecutionError(Exception): """ Exception raised when failed to execute a ``tc`` command. """ class TcAlreadyExist(TcCommandExecutionError): """ Exception raised when a traffic shaping rule already exist. """ class EmptyParameterError(ValueError): """ Exception raised when a parameter value is empty value. """ class InvalidParameterError(ValueError): """ Exception raised when an invalid parameter specified for a traffic shaping rule. """ def __init__(self, *args, **kwargs): self.__value = kwargs.pop("value", None) self.__expected = kwargs.pop("expected", None) super(ValueError, self).__init__(*args) def __str__(self, *args, **kwargs): item_list = [ValueError.__str__(self, *args, **kwargs)] extra_item_list = [] if self.__expected: extra_item_list.append("expected={}".format(self.__expected)) if self.__value: extra_item_list.append("value={}".format(self.__value)) if extra_item_list: item_list.extend([":", ", ".join(extra_item_list)]) return " ".join(item_list) def __repr__(self, *args, **kwargs): return self.__str__(*args, **kwargs) class UnitNotFoundError(InvalidParameterError): """ """
Add custom arguments for InvalidParameterError
Add custom arguments for InvalidParameterError
Python
mit
thombashi/tcconfig,thombashi/tcconfig
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import class NetworkInterfaceNotFoundError(Exception): """ Exception raised when network interface not found. """ class ModuleNotFoundError(Exception): """ Exception raised when mandatory kernel module not found. """ class TcCommandExecutionError(Exception): """ Exception raised when failed to execute a ``tc`` command. """ class TcAlreadyExist(TcCommandExecutionError): """ Exception raised when a traffic shaping rule already exist. """ class EmptyParameterError(ValueError): """ Exception raised when a parameter value is empty value. """ class InvalidParameterError(ValueError): """ Exception raised when an invalid parameter specified for a traffic shaping rule. """ class UnitNotFoundError(InvalidParameterError): """ """ Add custom arguments for InvalidParameterError
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import class NetworkInterfaceNotFoundError(Exception): """ Exception raised when network interface not found. """ class ModuleNotFoundError(Exception): """ Exception raised when mandatory kernel module not found. """ class TcCommandExecutionError(Exception): """ Exception raised when failed to execute a ``tc`` command. """ class TcAlreadyExist(TcCommandExecutionError): """ Exception raised when a traffic shaping rule already exist. """ class EmptyParameterError(ValueError): """ Exception raised when a parameter value is empty value. """ class InvalidParameterError(ValueError): """ Exception raised when an invalid parameter specified for a traffic shaping rule. """ def __init__(self, *args, **kwargs): self.__value = kwargs.pop("value", None) self.__expected = kwargs.pop("expected", None) super(ValueError, self).__init__(*args) def __str__(self, *args, **kwargs): item_list = [ValueError.__str__(self, *args, **kwargs)] extra_item_list = [] if self.__expected: extra_item_list.append("expected={}".format(self.__expected)) if self.__value: extra_item_list.append("value={}".format(self.__value)) if extra_item_list: item_list.extend([":", ", ".join(extra_item_list)]) return " ".join(item_list) def __repr__(self, *args, **kwargs): return self.__str__(*args, **kwargs) class UnitNotFoundError(InvalidParameterError): """ """
<commit_before># encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import class NetworkInterfaceNotFoundError(Exception): """ Exception raised when network interface not found. """ class ModuleNotFoundError(Exception): """ Exception raised when mandatory kernel module not found. """ class TcCommandExecutionError(Exception): """ Exception raised when failed to execute a ``tc`` command. """ class TcAlreadyExist(TcCommandExecutionError): """ Exception raised when a traffic shaping rule already exist. """ class EmptyParameterError(ValueError): """ Exception raised when a parameter value is empty value. """ class InvalidParameterError(ValueError): """ Exception raised when an invalid parameter specified for a traffic shaping rule. """ class UnitNotFoundError(InvalidParameterError): """ """ <commit_msg>Add custom arguments for InvalidParameterError<commit_after>
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import class NetworkInterfaceNotFoundError(Exception): """ Exception raised when network interface not found. """ class ModuleNotFoundError(Exception): """ Exception raised when mandatory kernel module not found. """ class TcCommandExecutionError(Exception): """ Exception raised when failed to execute a ``tc`` command. """ class TcAlreadyExist(TcCommandExecutionError): """ Exception raised when a traffic shaping rule already exist. """ class EmptyParameterError(ValueError): """ Exception raised when a parameter value is empty value. """ class InvalidParameterError(ValueError): """ Exception raised when an invalid parameter specified for a traffic shaping rule. """ def __init__(self, *args, **kwargs): self.__value = kwargs.pop("value", None) self.__expected = kwargs.pop("expected", None) super(ValueError, self).__init__(*args) def __str__(self, *args, **kwargs): item_list = [ValueError.__str__(self, *args, **kwargs)] extra_item_list = [] if self.__expected: extra_item_list.append("expected={}".format(self.__expected)) if self.__value: extra_item_list.append("value={}".format(self.__value)) if extra_item_list: item_list.extend([":", ", ".join(extra_item_list)]) return " ".join(item_list) def __repr__(self, *args, **kwargs): return self.__str__(*args, **kwargs) class UnitNotFoundError(InvalidParameterError): """ """
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import class NetworkInterfaceNotFoundError(Exception): """ Exception raised when network interface not found. """ class ModuleNotFoundError(Exception): """ Exception raised when mandatory kernel module not found. """ class TcCommandExecutionError(Exception): """ Exception raised when failed to execute a ``tc`` command. """ class TcAlreadyExist(TcCommandExecutionError): """ Exception raised when a traffic shaping rule already exist. """ class EmptyParameterError(ValueError): """ Exception raised when a parameter value is empty value. """ class InvalidParameterError(ValueError): """ Exception raised when an invalid parameter specified for a traffic shaping rule. """ class UnitNotFoundError(InvalidParameterError): """ """ Add custom arguments for InvalidParameterError# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import class NetworkInterfaceNotFoundError(Exception): """ Exception raised when network interface not found. """ class ModuleNotFoundError(Exception): """ Exception raised when mandatory kernel module not found. """ class TcCommandExecutionError(Exception): """ Exception raised when failed to execute a ``tc`` command. """ class TcAlreadyExist(TcCommandExecutionError): """ Exception raised when a traffic shaping rule already exist. """ class EmptyParameterError(ValueError): """ Exception raised when a parameter value is empty value. """ class InvalidParameterError(ValueError): """ Exception raised when an invalid parameter specified for a traffic shaping rule. """ def __init__(self, *args, **kwargs): self.__value = kwargs.pop("value", None) self.__expected = kwargs.pop("expected", None) super(ValueError, self).__init__(*args) def __str__(self, *args, **kwargs): item_list = [ValueError.__str__(self, *args, **kwargs)] extra_item_list = [] if self.__expected: extra_item_list.append("expected={}".format(self.__expected)) if self.__value: extra_item_list.append("value={}".format(self.__value)) if extra_item_list: item_list.extend([":", ", ".join(extra_item_list)]) return " ".join(item_list) def __repr__(self, *args, **kwargs): return self.__str__(*args, **kwargs) class UnitNotFoundError(InvalidParameterError): """ """
<commit_before># encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import class NetworkInterfaceNotFoundError(Exception): """ Exception raised when network interface not found. """ class ModuleNotFoundError(Exception): """ Exception raised when mandatory kernel module not found. """ class TcCommandExecutionError(Exception): """ Exception raised when failed to execute a ``tc`` command. """ class TcAlreadyExist(TcCommandExecutionError): """ Exception raised when a traffic shaping rule already exist. """ class EmptyParameterError(ValueError): """ Exception raised when a parameter value is empty value. """ class InvalidParameterError(ValueError): """ Exception raised when an invalid parameter specified for a traffic shaping rule. """ class UnitNotFoundError(InvalidParameterError): """ """ <commit_msg>Add custom arguments for InvalidParameterError<commit_after># encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com> """ from __future__ import absolute_import class NetworkInterfaceNotFoundError(Exception): """ Exception raised when network interface not found. """ class ModuleNotFoundError(Exception): """ Exception raised when mandatory kernel module not found. """ class TcCommandExecutionError(Exception): """ Exception raised when failed to execute a ``tc`` command. """ class TcAlreadyExist(TcCommandExecutionError): """ Exception raised when a traffic shaping rule already exist. """ class EmptyParameterError(ValueError): """ Exception raised when a parameter value is empty value. """ class InvalidParameterError(ValueError): """ Exception raised when an invalid parameter specified for a traffic shaping rule. """ def __init__(self, *args, **kwargs): self.__value = kwargs.pop("value", None) self.__expected = kwargs.pop("expected", None) super(ValueError, self).__init__(*args) def __str__(self, *args, **kwargs): item_list = [ValueError.__str__(self, *args, **kwargs)] extra_item_list = [] if self.__expected: extra_item_list.append("expected={}".format(self.__expected)) if self.__value: extra_item_list.append("value={}".format(self.__value)) if extra_item_list: item_list.extend([":", ", ".join(extra_item_list)]) return " ".join(item_list) def __repr__(self, *args, **kwargs): return self.__str__(*args, **kwargs) class UnitNotFoundError(InvalidParameterError): """ """
1dd8e7ddfccd657fde2697fc1e39da7fb9c3548f
alg_insertion_sort.py
alg_insertion_sort.py
from __future__ import absolute_import from __future__ import print_function from __future__ import division def insertion_sort(a_list): """Insertion Sort algortihm.""" for index in range(1, len(a_list)): current_value = a_list[index] position = index while position > 0 and a_list[position - 1] > current_value: a_list[position] = a_list[position - 1] position -= 1 a_list[position] = current_value def main(): a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20] print('a_list: \n{}'.format(a_list)) print('By insertion sort: ') insertion_sort(a_list) print(a_list) if __name__ == '__main__': main()
from __future__ import absolute_import from __future__ import print_function from __future__ import division def insertion_sort(a_list): """Insertion Sort algortihm. Time complexity: O(n^2). """ for index in range(1, len(a_list)): current_value = a_list[index] position = index while position > 0 and a_list[position - 1] > current_value: a_list[position] = a_list[position - 1] position -= 1 a_list[position] = current_value def main(): a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20] print('a_list: \n{}'.format(a_list)) print('By insertion sort: ') insertion_sort(a_list) print(a_list) if __name__ == '__main__': main()
Add to doc string: time complexity
Add to doc string: time complexity
Python
bsd-2-clause
bowen0701/algorithms_data_structures
from __future__ import absolute_import from __future__ import print_function from __future__ import division def insertion_sort(a_list): """Insertion Sort algortihm.""" for index in range(1, len(a_list)): current_value = a_list[index] position = index while position > 0 and a_list[position - 1] > current_value: a_list[position] = a_list[position - 1] position -= 1 a_list[position] = current_value def main(): a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20] print('a_list: \n{}'.format(a_list)) print('By insertion sort: ') insertion_sort(a_list) print(a_list) if __name__ == '__main__': main() Add to doc string: time complexity
from __future__ import absolute_import from __future__ import print_function from __future__ import division def insertion_sort(a_list): """Insertion Sort algortihm. Time complexity: O(n^2). """ for index in range(1, len(a_list)): current_value = a_list[index] position = index while position > 0 and a_list[position - 1] > current_value: a_list[position] = a_list[position - 1] position -= 1 a_list[position] = current_value def main(): a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20] print('a_list: \n{}'.format(a_list)) print('By insertion sort: ') insertion_sort(a_list) print(a_list) if __name__ == '__main__': main()
<commit_before>from __future__ import absolute_import from __future__ import print_function from __future__ import division def insertion_sort(a_list): """Insertion Sort algortihm.""" for index in range(1, len(a_list)): current_value = a_list[index] position = index while position > 0 and a_list[position - 1] > current_value: a_list[position] = a_list[position - 1] position -= 1 a_list[position] = current_value def main(): a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20] print('a_list: \n{}'.format(a_list)) print('By insertion sort: ') insertion_sort(a_list) print(a_list) if __name__ == '__main__': main() <commit_msg>Add to doc string: time complexity<commit_after>
from __future__ import absolute_import from __future__ import print_function from __future__ import division def insertion_sort(a_list): """Insertion Sort algortihm. Time complexity: O(n^2). """ for index in range(1, len(a_list)): current_value = a_list[index] position = index while position > 0 and a_list[position - 1] > current_value: a_list[position] = a_list[position - 1] position -= 1 a_list[position] = current_value def main(): a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20] print('a_list: \n{}'.format(a_list)) print('By insertion sort: ') insertion_sort(a_list) print(a_list) if __name__ == '__main__': main()
from __future__ import absolute_import from __future__ import print_function from __future__ import division def insertion_sort(a_list): """Insertion Sort algortihm.""" for index in range(1, len(a_list)): current_value = a_list[index] position = index while position > 0 and a_list[position - 1] > current_value: a_list[position] = a_list[position - 1] position -= 1 a_list[position] = current_value def main(): a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20] print('a_list: \n{}'.format(a_list)) print('By insertion sort: ') insertion_sort(a_list) print(a_list) if __name__ == '__main__': main() Add to doc string: time complexityfrom __future__ import absolute_import from __future__ import print_function from __future__ import division def insertion_sort(a_list): """Insertion Sort algortihm. Time complexity: O(n^2). """ for index in range(1, len(a_list)): current_value = a_list[index] position = index while position > 0 and a_list[position - 1] > current_value: a_list[position] = a_list[position - 1] position -= 1 a_list[position] = current_value def main(): a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20] print('a_list: \n{}'.format(a_list)) print('By insertion sort: ') insertion_sort(a_list) print(a_list) if __name__ == '__main__': main()
<commit_before>from __future__ import absolute_import from __future__ import print_function from __future__ import division def insertion_sort(a_list): """Insertion Sort algortihm.""" for index in range(1, len(a_list)): current_value = a_list[index] position = index while position > 0 and a_list[position - 1] > current_value: a_list[position] = a_list[position - 1] position -= 1 a_list[position] = current_value def main(): a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20] print('a_list: \n{}'.format(a_list)) print('By insertion sort: ') insertion_sort(a_list) print(a_list) if __name__ == '__main__': main() <commit_msg>Add to doc string: time complexity<commit_after>from __future__ import absolute_import from __future__ import print_function from __future__ import division def insertion_sort(a_list): """Insertion Sort algortihm. Time complexity: O(n^2). """ for index in range(1, len(a_list)): current_value = a_list[index] position = index while position > 0 and a_list[position - 1] > current_value: a_list[position] = a_list[position - 1] position -= 1 a_list[position] = current_value def main(): a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20] print('a_list: \n{}'.format(a_list)) print('By insertion sort: ') insertion_sort(a_list) print(a_list) if __name__ == '__main__': main()
1cbab715a647689aeda4679d7dcf4e60ff9ab5b1
api/webview/models.py
api/webview/models.py
from django.db import models from django_pgjson.fields import JsonField class Document(models.Model): source = models.CharField(max_length=100) docID = models.CharField(max_length=100) providerUpdatedDateTime = models.DateTimeField(null=True) raw = JsonField() normalized = JsonField()
import json import six from requests.structures import CaseInsensitiveDict from django.db import models from django_pgjson.fields import JsonField class Document(models.Model): source = models.CharField(max_length=100) docID = models.CharField(max_length=100) providerUpdatedDateTime = models.DateTimeField(null=True) raw = JsonField() normalized = JsonField() class HarvesterResponse(models.Model): method = models.TextField(primary_key=True) url = models.TextField(primary_key=True, required=True) # Raw request data ok = models.BooleanField() content = models.BinaryField() encoding = models.TextField() headers_str = models.TextField() status_code = models.IntegerField() time_made = models.DateTimeField(auto_now=True) def json(self): return json.loads(self.content) @property def headers(self): return CaseInsensitiveDict(json.loads(self.headers_str)) @property def text(self): return six.u(self.content)
Add harvester response model in django ORM
Add harvester response model in django ORM
Python
apache-2.0
felliott/scrapi,fabianvf/scrapi,erinspace/scrapi,mehanig/scrapi,felliott/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,CenterForOpenScience/scrapi,mehanig/scrapi,fabianvf/scrapi
from django.db import models from django_pgjson.fields import JsonField class Document(models.Model): source = models.CharField(max_length=100) docID = models.CharField(max_length=100) providerUpdatedDateTime = models.DateTimeField(null=True) raw = JsonField() normalized = JsonField() Add harvester response model in django ORM
import json import six from requests.structures import CaseInsensitiveDict from django.db import models from django_pgjson.fields import JsonField class Document(models.Model): source = models.CharField(max_length=100) docID = models.CharField(max_length=100) providerUpdatedDateTime = models.DateTimeField(null=True) raw = JsonField() normalized = JsonField() class HarvesterResponse(models.Model): method = models.TextField(primary_key=True) url = models.TextField(primary_key=True, required=True) # Raw request data ok = models.BooleanField() content = models.BinaryField() encoding = models.TextField() headers_str = models.TextField() status_code = models.IntegerField() time_made = models.DateTimeField(auto_now=True) def json(self): return json.loads(self.content) @property def headers(self): return CaseInsensitiveDict(json.loads(self.headers_str)) @property def text(self): return six.u(self.content)
<commit_before>from django.db import models from django_pgjson.fields import JsonField class Document(models.Model): source = models.CharField(max_length=100) docID = models.CharField(max_length=100) providerUpdatedDateTime = models.DateTimeField(null=True) raw = JsonField() normalized = JsonField() <commit_msg>Add harvester response model in django ORM<commit_after>
import json import six from requests.structures import CaseInsensitiveDict from django.db import models from django_pgjson.fields import JsonField class Document(models.Model): source = models.CharField(max_length=100) docID = models.CharField(max_length=100) providerUpdatedDateTime = models.DateTimeField(null=True) raw = JsonField() normalized = JsonField() class HarvesterResponse(models.Model): method = models.TextField(primary_key=True) url = models.TextField(primary_key=True, required=True) # Raw request data ok = models.BooleanField() content = models.BinaryField() encoding = models.TextField() headers_str = models.TextField() status_code = models.IntegerField() time_made = models.DateTimeField(auto_now=True) def json(self): return json.loads(self.content) @property def headers(self): return CaseInsensitiveDict(json.loads(self.headers_str)) @property def text(self): return six.u(self.content)
from django.db import models from django_pgjson.fields import JsonField class Document(models.Model): source = models.CharField(max_length=100) docID = models.CharField(max_length=100) providerUpdatedDateTime = models.DateTimeField(null=True) raw = JsonField() normalized = JsonField() Add harvester response model in django ORMimport json import six from requests.structures import CaseInsensitiveDict from django.db import models from django_pgjson.fields import JsonField class Document(models.Model): source = models.CharField(max_length=100) docID = models.CharField(max_length=100) providerUpdatedDateTime = models.DateTimeField(null=True) raw = JsonField() normalized = JsonField() class HarvesterResponse(models.Model): method = models.TextField(primary_key=True) url = models.TextField(primary_key=True, required=True) # Raw request data ok = models.BooleanField() content = models.BinaryField() encoding = models.TextField() headers_str = models.TextField() status_code = models.IntegerField() time_made = models.DateTimeField(auto_now=True) def json(self): return json.loads(self.content) @property def headers(self): return CaseInsensitiveDict(json.loads(self.headers_str)) @property def text(self): return six.u(self.content)
<commit_before>from django.db import models from django_pgjson.fields import JsonField class Document(models.Model): source = models.CharField(max_length=100) docID = models.CharField(max_length=100) providerUpdatedDateTime = models.DateTimeField(null=True) raw = JsonField() normalized = JsonField() <commit_msg>Add harvester response model in django ORM<commit_after>import json import six from requests.structures import CaseInsensitiveDict from django.db import models from django_pgjson.fields import JsonField class Document(models.Model): source = models.CharField(max_length=100) docID = models.CharField(max_length=100) providerUpdatedDateTime = models.DateTimeField(null=True) raw = JsonField() normalized = JsonField() class HarvesterResponse(models.Model): method = models.TextField(primary_key=True) url = models.TextField(primary_key=True, required=True) # Raw request data ok = models.BooleanField() content = models.BinaryField() encoding = models.TextField() headers_str = models.TextField() status_code = models.IntegerField() time_made = models.DateTimeField(auto_now=True) def json(self): return json.loads(self.content) @property def headers(self): return CaseInsensitiveDict(json.loads(self.headers_str)) @property def text(self): return six.u(self.content)
d1fd045791ad4d7c3544352faf68361637213f57
product_onepage/templatetags/onepage_tags.py
product_onepage/templatetags/onepage_tags.py
"""Gallery templatetags""" from django.template import Library from django.core.exceptions import ObjectDoesNotExist register = Library() @register.filter(name='divide') def divide(dividend, divisor): return dividend / divisor @register.filter(name='get_language') def get_language(list, language): try: return list.get(language=language) except ObjectDoesNotExist: try: return list.get(language='en') except ObjectDoesNotExist: return list.all()[0]
"""Gallery templatetags""" from django.template import Library from django.core.exceptions import ObjectDoesNotExist register = Library() @register.filter(name='divide') def divide(dividend, divisor): return dividend / divisor @register.filter(name='get_language') def get_language(queryset, language): try: return queryset.get(language=language) except ObjectDoesNotExist: try: return queryset.get(language='en') except ObjectDoesNotExist: return queryset.all()[0]
Fix variable name in get_language tag
Fix variable name in get_language tag
Python
mit
emencia/emencia-product-onepage,emencia/emencia-product-onepage
"""Gallery templatetags""" from django.template import Library from django.core.exceptions import ObjectDoesNotExist register = Library() @register.filter(name='divide') def divide(dividend, divisor): return dividend / divisor @register.filter(name='get_language') def get_language(list, language): try: return list.get(language=language) except ObjectDoesNotExist: try: return list.get(language='en') except ObjectDoesNotExist: return list.all()[0] Fix variable name in get_language tag
"""Gallery templatetags""" from django.template import Library from django.core.exceptions import ObjectDoesNotExist register = Library() @register.filter(name='divide') def divide(dividend, divisor): return dividend / divisor @register.filter(name='get_language') def get_language(queryset, language): try: return queryset.get(language=language) except ObjectDoesNotExist: try: return queryset.get(language='en') except ObjectDoesNotExist: return queryset.all()[0]
<commit_before>"""Gallery templatetags""" from django.template import Library from django.core.exceptions import ObjectDoesNotExist register = Library() @register.filter(name='divide') def divide(dividend, divisor): return dividend / divisor @register.filter(name='get_language') def get_language(list, language): try: return list.get(language=language) except ObjectDoesNotExist: try: return list.get(language='en') except ObjectDoesNotExist: return list.all()[0] <commit_msg>Fix variable name in get_language tag<commit_after>
"""Gallery templatetags""" from django.template import Library from django.core.exceptions import ObjectDoesNotExist register = Library() @register.filter(name='divide') def divide(dividend, divisor): return dividend / divisor @register.filter(name='get_language') def get_language(queryset, language): try: return queryset.get(language=language) except ObjectDoesNotExist: try: return queryset.get(language='en') except ObjectDoesNotExist: return queryset.all()[0]
"""Gallery templatetags""" from django.template import Library from django.core.exceptions import ObjectDoesNotExist register = Library() @register.filter(name='divide') def divide(dividend, divisor): return dividend / divisor @register.filter(name='get_language') def get_language(list, language): try: return list.get(language=language) except ObjectDoesNotExist: try: return list.get(language='en') except ObjectDoesNotExist: return list.all()[0] Fix variable name in get_language tag"""Gallery templatetags""" from django.template import Library from django.core.exceptions import ObjectDoesNotExist register = Library() @register.filter(name='divide') def divide(dividend, divisor): return dividend / divisor @register.filter(name='get_language') def get_language(queryset, language): try: return queryset.get(language=language) except ObjectDoesNotExist: try: return queryset.get(language='en') except ObjectDoesNotExist: return queryset.all()[0]
<commit_before>"""Gallery templatetags""" from django.template import Library from django.core.exceptions import ObjectDoesNotExist register = Library() @register.filter(name='divide') def divide(dividend, divisor): return dividend / divisor @register.filter(name='get_language') def get_language(list, language): try: return list.get(language=language) except ObjectDoesNotExist: try: return list.get(language='en') except ObjectDoesNotExist: return list.all()[0] <commit_msg>Fix variable name in get_language tag<commit_after>"""Gallery templatetags""" from django.template import Library from django.core.exceptions import ObjectDoesNotExist register = Library() @register.filter(name='divide') def divide(dividend, divisor): return dividend / divisor @register.filter(name='get_language') def get_language(queryset, language): try: return queryset.get(language=language) except ObjectDoesNotExist: try: return queryset.get(language='en') except ObjectDoesNotExist: return queryset.all()[0]
b5785cbd9586a767b37da2e0c71bcb1fcfed0604
tests/main_test.py
tests/main_test.py
#!/usr/bin/env python3 from libpals.util import ( xor_find_singlechar_key, hamming_distance, fixed_xor ) def test_xor_find_singlechar_key(): input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736' ciphertext = bytes.fromhex(input) result = xor_find_singlechar_key(ciphertext) assert result['key'] == 88 assert result['plaintext'] == b"Cooking MC's like a pound of bacon" def test_hamming_distance(): assert hamming_distance(b"this is a test", b"wokka wokka!!!") == 37 def test_fixed_xor(): input = bytes.fromhex("1c0111001f010100061a024b53535009181c") key = bytes.fromhex("686974207468652062756c6c277320657965") assert fixed_xor(input, key) == b"the kid don't play"
#!/usr/bin/env python3 from libpals.util import ( xor_find_singlechar_key, hamming_distance, fixed_xor, transpose ) def test_xor_find_singlechar_key(): input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736' ciphertext = bytes.fromhex(input) result = xor_find_singlechar_key(ciphertext) assert result['key'] == 88 assert result['plaintext'] == b"Cooking MC's like a pound of bacon" def test_hamming_distance(): assert hamming_distance(b"this is a test", b"wokka wokka!!!") == 37 def test_fixed_xor(): input = bytes.fromhex("1c0111001f010100061a024b53535009181c") key = bytes.fromhex("686974207468652062756c6c277320657965") assert fixed_xor(input, key) == b"the kid don't play" def test_transpose(): chunks = [b'adg', b'beh', b'cfi'] assert transpose(chunks) == b'abcdefghi'
Add test for the transpose function.
Add test for the transpose function.
Python
bsd-2-clause
cpach/cryptopals-python3
#!/usr/bin/env python3 from libpals.util import ( xor_find_singlechar_key, hamming_distance, fixed_xor ) def test_xor_find_singlechar_key(): input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736' ciphertext = bytes.fromhex(input) result = xor_find_singlechar_key(ciphertext) assert result['key'] == 88 assert result['plaintext'] == b"Cooking MC's like a pound of bacon" def test_hamming_distance(): assert hamming_distance(b"this is a test", b"wokka wokka!!!") == 37 def test_fixed_xor(): input = bytes.fromhex("1c0111001f010100061a024b53535009181c") key = bytes.fromhex("686974207468652062756c6c277320657965") assert fixed_xor(input, key) == b"the kid don't play" Add test for the transpose function.
#!/usr/bin/env python3 from libpals.util import ( xor_find_singlechar_key, hamming_distance, fixed_xor, transpose ) def test_xor_find_singlechar_key(): input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736' ciphertext = bytes.fromhex(input) result = xor_find_singlechar_key(ciphertext) assert result['key'] == 88 assert result['plaintext'] == b"Cooking MC's like a pound of bacon" def test_hamming_distance(): assert hamming_distance(b"this is a test", b"wokka wokka!!!") == 37 def test_fixed_xor(): input = bytes.fromhex("1c0111001f010100061a024b53535009181c") key = bytes.fromhex("686974207468652062756c6c277320657965") assert fixed_xor(input, key) == b"the kid don't play" def test_transpose(): chunks = [b'adg', b'beh', b'cfi'] assert transpose(chunks) == b'abcdefghi'
<commit_before>#!/usr/bin/env python3 from libpals.util import ( xor_find_singlechar_key, hamming_distance, fixed_xor ) def test_xor_find_singlechar_key(): input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736' ciphertext = bytes.fromhex(input) result = xor_find_singlechar_key(ciphertext) assert result['key'] == 88 assert result['plaintext'] == b"Cooking MC's like a pound of bacon" def test_hamming_distance(): assert hamming_distance(b"this is a test", b"wokka wokka!!!") == 37 def test_fixed_xor(): input = bytes.fromhex("1c0111001f010100061a024b53535009181c") key = bytes.fromhex("686974207468652062756c6c277320657965") assert fixed_xor(input, key) == b"the kid don't play" <commit_msg>Add test for the transpose function.<commit_after>
#!/usr/bin/env python3 from libpals.util import ( xor_find_singlechar_key, hamming_distance, fixed_xor, transpose ) def test_xor_find_singlechar_key(): input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736' ciphertext = bytes.fromhex(input) result = xor_find_singlechar_key(ciphertext) assert result['key'] == 88 assert result['plaintext'] == b"Cooking MC's like a pound of bacon" def test_hamming_distance(): assert hamming_distance(b"this is a test", b"wokka wokka!!!") == 37 def test_fixed_xor(): input = bytes.fromhex("1c0111001f010100061a024b53535009181c") key = bytes.fromhex("686974207468652062756c6c277320657965") assert fixed_xor(input, key) == b"the kid don't play" def test_transpose(): chunks = [b'adg', b'beh', b'cfi'] assert transpose(chunks) == b'abcdefghi'
#!/usr/bin/env python3 from libpals.util import ( xor_find_singlechar_key, hamming_distance, fixed_xor ) def test_xor_find_singlechar_key(): input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736' ciphertext = bytes.fromhex(input) result = xor_find_singlechar_key(ciphertext) assert result['key'] == 88 assert result['plaintext'] == b"Cooking MC's like a pound of bacon" def test_hamming_distance(): assert hamming_distance(b"this is a test", b"wokka wokka!!!") == 37 def test_fixed_xor(): input = bytes.fromhex("1c0111001f010100061a024b53535009181c") key = bytes.fromhex("686974207468652062756c6c277320657965") assert fixed_xor(input, key) == b"the kid don't play" Add test for the transpose function.#!/usr/bin/env python3 from libpals.util import ( xor_find_singlechar_key, hamming_distance, fixed_xor, transpose ) def test_xor_find_singlechar_key(): input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736' ciphertext = bytes.fromhex(input) result = xor_find_singlechar_key(ciphertext) assert result['key'] == 88 assert result['plaintext'] == b"Cooking MC's like a pound of bacon" def test_hamming_distance(): assert hamming_distance(b"this is a test", b"wokka wokka!!!") == 37 def test_fixed_xor(): input = bytes.fromhex("1c0111001f010100061a024b53535009181c") key = bytes.fromhex("686974207468652062756c6c277320657965") assert fixed_xor(input, key) == b"the kid don't play" def test_transpose(): chunks = [b'adg', b'beh', b'cfi'] assert transpose(chunks) == b'abcdefghi'
<commit_before>#!/usr/bin/env python3 from libpals.util import ( xor_find_singlechar_key, hamming_distance, fixed_xor ) def test_xor_find_singlechar_key(): input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736' ciphertext = bytes.fromhex(input) result = xor_find_singlechar_key(ciphertext) assert result['key'] == 88 assert result['plaintext'] == b"Cooking MC's like a pound of bacon" def test_hamming_distance(): assert hamming_distance(b"this is a test", b"wokka wokka!!!") == 37 def test_fixed_xor(): input = bytes.fromhex("1c0111001f010100061a024b53535009181c") key = bytes.fromhex("686974207468652062756c6c277320657965") assert fixed_xor(input, key) == b"the kid don't play" <commit_msg>Add test for the transpose function.<commit_after>#!/usr/bin/env python3 from libpals.util import ( xor_find_singlechar_key, hamming_distance, fixed_xor, transpose ) def test_xor_find_singlechar_key(): input = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736' ciphertext = bytes.fromhex(input) result = xor_find_singlechar_key(ciphertext) assert result['key'] == 88 assert result['plaintext'] == b"Cooking MC's like a pound of bacon" def test_hamming_distance(): assert hamming_distance(b"this is a test", b"wokka wokka!!!") == 37 def test_fixed_xor(): input = bytes.fromhex("1c0111001f010100061a024b53535009181c") key = bytes.fromhex("686974207468652062756c6c277320657965") assert fixed_xor(input, key) == b"the kid don't play" def test_transpose(): chunks = [b'adg', b'beh', b'cfi'] assert transpose(chunks) == b'abcdefghi'
2cc1f3dc699258fa7a571cde96a434b450bc0cf8
phonenumber_field/formfields.py
phonenumber_field/formfields.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from django.forms.fields import CharField from django.core.exceptions import ValidationError from phonenumber_field.validators import validate_international_phonenumber from phonenumber_field.phonenumber import to_python class PhoneNumberField(CharField): default_error_messages = { 'invalid': _('Enter a valid phone number.'), } default_validators = [validate_international_phonenumber] def to_python(self, value): phone_number = to_python(value) if phone_number and not phone_number.is_valid(): raise ValidationError(self.error_messages['invalid']) return phone_number
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from django.forms.fields import CharField from django.core.exceptions import ValidationError from phonenumber_field.validators import validate_international_phonenumber from phonenumber_field.phonenumber import to_python class PhoneNumberField(CharField): default_error_messages = { 'invalid': _('Enter a valid phone number.'), } default_validators = [validate_international_phonenumber] def __init__(self, *args, **kwargs): super(PhoneNumberField, self).__init__(*args, **kwargs) self.widget.input_type = 'tel' def to_python(self, value): phone_number = to_python(value) if phone_number and not phone_number.is_valid(): raise ValidationError(self.error_messages['invalid']) return phone_number
Support HTML5's input type 'tel'
Support HTML5's input type 'tel'
Python
mit
hovel/django-phonenumber-field,hovel/django-phonenumber-field,stefanfoulis/django-phonenumber-field
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from django.forms.fields import CharField from django.core.exceptions import ValidationError from phonenumber_field.validators import validate_international_phonenumber from phonenumber_field.phonenumber import to_python class PhoneNumberField(CharField): default_error_messages = { 'invalid': _('Enter a valid phone number.'), } default_validators = [validate_international_phonenumber] def to_python(self, value): phone_number = to_python(value) if phone_number and not phone_number.is_valid(): raise ValidationError(self.error_messages['invalid']) return phone_number Support HTML5's input type 'tel'
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from django.forms.fields import CharField from django.core.exceptions import ValidationError from phonenumber_field.validators import validate_international_phonenumber from phonenumber_field.phonenumber import to_python class PhoneNumberField(CharField): default_error_messages = { 'invalid': _('Enter a valid phone number.'), } default_validators = [validate_international_phonenumber] def __init__(self, *args, **kwargs): super(PhoneNumberField, self).__init__(*args, **kwargs) self.widget.input_type = 'tel' def to_python(self, value): phone_number = to_python(value) if phone_number and not phone_number.is_valid(): raise ValidationError(self.error_messages['invalid']) return phone_number
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from django.forms.fields import CharField from django.core.exceptions import ValidationError from phonenumber_field.validators import validate_international_phonenumber from phonenumber_field.phonenumber import to_python class PhoneNumberField(CharField): default_error_messages = { 'invalid': _('Enter a valid phone number.'), } default_validators = [validate_international_phonenumber] def to_python(self, value): phone_number = to_python(value) if phone_number and not phone_number.is_valid(): raise ValidationError(self.error_messages['invalid']) return phone_number <commit_msg>Support HTML5's input type 'tel'<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from django.forms.fields import CharField from django.core.exceptions import ValidationError from phonenumber_field.validators import validate_international_phonenumber from phonenumber_field.phonenumber import to_python class PhoneNumberField(CharField): default_error_messages = { 'invalid': _('Enter a valid phone number.'), } default_validators = [validate_international_phonenumber] def __init__(self, *args, **kwargs): super(PhoneNumberField, self).__init__(*args, **kwargs) self.widget.input_type = 'tel' def to_python(self, value): phone_number = to_python(value) if phone_number and not phone_number.is_valid(): raise ValidationError(self.error_messages['invalid']) return phone_number
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from django.forms.fields import CharField from django.core.exceptions import ValidationError from phonenumber_field.validators import validate_international_phonenumber from phonenumber_field.phonenumber import to_python class PhoneNumberField(CharField): default_error_messages = { 'invalid': _('Enter a valid phone number.'), } default_validators = [validate_international_phonenumber] def to_python(self, value): phone_number = to_python(value) if phone_number and not phone_number.is_valid(): raise ValidationError(self.error_messages['invalid']) return phone_number Support HTML5's input type 'tel'# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from django.forms.fields import CharField from django.core.exceptions import ValidationError from phonenumber_field.validators import validate_international_phonenumber from phonenumber_field.phonenumber import to_python class PhoneNumberField(CharField): default_error_messages = { 'invalid': _('Enter a valid phone number.'), } default_validators = [validate_international_phonenumber] def __init__(self, *args, **kwargs): super(PhoneNumberField, self).__init__(*args, **kwargs) self.widget.input_type = 'tel' def to_python(self, value): phone_number = to_python(value) if phone_number and not phone_number.is_valid(): raise ValidationError(self.error_messages['invalid']) return phone_number
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from django.forms.fields import CharField from django.core.exceptions import ValidationError from phonenumber_field.validators import validate_international_phonenumber from phonenumber_field.phonenumber import to_python class PhoneNumberField(CharField): default_error_messages = { 'invalid': _('Enter a valid phone number.'), } default_validators = [validate_international_phonenumber] def to_python(self, value): phone_number = to_python(value) if phone_number and not phone_number.is_valid(): raise ValidationError(self.error_messages['invalid']) return phone_number <commit_msg>Support HTML5's input type 'tel'<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from django.forms.fields import CharField from django.core.exceptions import ValidationError from phonenumber_field.validators import validate_international_phonenumber from phonenumber_field.phonenumber import to_python class PhoneNumberField(CharField): default_error_messages = { 'invalid': _('Enter a valid phone number.'), } default_validators = [validate_international_phonenumber] def __init__(self, *args, **kwargs): super(PhoneNumberField, self).__init__(*args, **kwargs) self.widget.input_type = 'tel' def to_python(self, value): phone_number = to_python(value) if phone_number and not phone_number.is_valid(): raise ValidationError(self.error_messages['invalid']) return phone_number
21319fc8d22469911c1cbcc41ec7320b1d6141e9
powerline/bindings/i3/powerline-i3.py
powerline/bindings/i3/powerline-i3.py
#!/usr/bin/env python # vim:fileencoding=utf-8:noet from powerline import Powerline from powerline.lib.monotonic import monotonic import sys import time import i3 from threading import Lock name = 'wm' if len( sys.argv ) > 1: name = sys.argv[1] powerline = Powerline(name, renderer_module='i3bgbar') powerline.update_renderer() interval = 0.5 print '{"version": 1, "custom_workspace": true}' print '[' print ' [[],[]]' lock = Lock() def render( event=None, data=None, sub=None ): global lock lock.acquire() s = '[\n' + powerline.render(side='right')[:-2] + '\n]\n' s += ',[\n' + powerline.render(side='left' )[:-2] + '\n]' print ',[\n' + s + '\n]' sys.stdout.flush() lock.release() sub = i3.Subscription( render, 'workspace' ) while True: start_time = monotonic() render() time.sleep(max(interval - (monotonic() - start_time), 0.1))
#!/usr/bin/env python # vim:fileencoding=utf-8:noet from powerline import Powerline from powerline.lib.monotonic import monotonic import sys import time import i3 from threading import Lock name = 'wm' if len( sys.argv ) > 1: name = sys.argv[1] powerline = Powerline(name, renderer_module='i3bgbar') powerline.update_renderer() interval = 0.5 print '{"version": 1, "custom_workspace": true}' print '[' print ' [[],[]]' lock = Lock() def render( event=None, data=None, sub=None ): global lock with lock: s = '[\n' + powerline.render(side='right')[:-2] + '\n]\n' s += ',[\n' + powerline.render(side='left' )[:-2] + '\n]' print ',[\n' + s + '\n]' sys.stdout.flush() sub = i3.Subscription( render, 'workspace' ) while True: start_time = monotonic() render() time.sleep(max(interval - (monotonic() - start_time), 0.1))
Use 'with' instead of lock.acquire/release()
Use 'with' instead of lock.acquire/release()
Python
mit
DoctorJellyface/powerline,bartvm/powerline,areteix/powerline,russellb/powerline,seanfisk/powerline,s0undt3ch/powerline,IvanAli/powerline,cyrixhero/powerline,blindFS/powerline,keelerm84/powerline,kenrachynski/powerline,IvanAli/powerline,darac/powerline,xfumihiro/powerline,Liangjianghao/powerline,darac/powerline,QuLogic/powerline,xxxhycl2010/powerline,EricSB/powerline,firebitsbr/powerline,cyrixhero/powerline,russellb/powerline,wfscheper/powerline,wfscheper/powerline,cyrixhero/powerline,seanfisk/powerline,firebitsbr/powerline,magus424/powerline,dragon788/powerline,junix/powerline,magus424/powerline,kenrachynski/powerline,blindFS/powerline,magus424/powerline,EricSB/powerline,areteix/powerline,lukw00/powerline,bartvm/powerline,prvnkumar/powerline,bezhermoso/powerline,blindFS/powerline,QuLogic/powerline,DoctorJellyface/powerline,dragon788/powerline,s0undt3ch/powerline,QuLogic/powerline,xxxhycl2010/powerline,IvanAli/powerline,firebitsbr/powerline,bezhermoso/powerline,bartvm/powerline,keelerm84/powerline,xfumihiro/powerline,s0undt3ch/powerline,S0lll0s/powerline,xxxhycl2010/powerline,russellb/powerline,Liangjianghao/powerline,kenrachynski/powerline,Luffin/powerline,wfscheper/powerline,prvnkumar/powerline,Luffin/powerline,lukw00/powerline,bezhermoso/powerline,junix/powerline,S0lll0s/powerline,Luffin/powerline,darac/powerline,S0lll0s/powerline,seanfisk/powerline,dragon788/powerline,EricSB/powerline,DoctorJellyface/powerline,lukw00/powerline,Liangjianghao/powerline,areteix/powerline,xfumihiro/powerline,junix/powerline,prvnkumar/powerline
#!/usr/bin/env python # vim:fileencoding=utf-8:noet from powerline import Powerline from powerline.lib.monotonic import monotonic import sys import time import i3 from threading import Lock name = 'wm' if len( sys.argv ) > 1: name = sys.argv[1] powerline = Powerline(name, renderer_module='i3bgbar') powerline.update_renderer() interval = 0.5 print '{"version": 1, "custom_workspace": true}' print '[' print ' [[],[]]' lock = Lock() def render( event=None, data=None, sub=None ): global lock lock.acquire() s = '[\n' + powerline.render(side='right')[:-2] + '\n]\n' s += ',[\n' + powerline.render(side='left' )[:-2] + '\n]' print ',[\n' + s + '\n]' sys.stdout.flush() lock.release() sub = i3.Subscription( render, 'workspace' ) while True: start_time = monotonic() render() time.sleep(max(interval - (monotonic() - start_time), 0.1)) Use 'with' instead of lock.acquire/release()
#!/usr/bin/env python # vim:fileencoding=utf-8:noet from powerline import Powerline from powerline.lib.monotonic import monotonic import sys import time import i3 from threading import Lock name = 'wm' if len( sys.argv ) > 1: name = sys.argv[1] powerline = Powerline(name, renderer_module='i3bgbar') powerline.update_renderer() interval = 0.5 print '{"version": 1, "custom_workspace": true}' print '[' print ' [[],[]]' lock = Lock() def render( event=None, data=None, sub=None ): global lock with lock: s = '[\n' + powerline.render(side='right')[:-2] + '\n]\n' s += ',[\n' + powerline.render(side='left' )[:-2] + '\n]' print ',[\n' + s + '\n]' sys.stdout.flush() sub = i3.Subscription( render, 'workspace' ) while True: start_time = monotonic() render() time.sleep(max(interval - (monotonic() - start_time), 0.1))
<commit_before>#!/usr/bin/env python # vim:fileencoding=utf-8:noet from powerline import Powerline from powerline.lib.monotonic import monotonic import sys import time import i3 from threading import Lock name = 'wm' if len( sys.argv ) > 1: name = sys.argv[1] powerline = Powerline(name, renderer_module='i3bgbar') powerline.update_renderer() interval = 0.5 print '{"version": 1, "custom_workspace": true}' print '[' print ' [[],[]]' lock = Lock() def render( event=None, data=None, sub=None ): global lock lock.acquire() s = '[\n' + powerline.render(side='right')[:-2] + '\n]\n' s += ',[\n' + powerline.render(side='left' )[:-2] + '\n]' print ',[\n' + s + '\n]' sys.stdout.flush() lock.release() sub = i3.Subscription( render, 'workspace' ) while True: start_time = monotonic() render() time.sleep(max(interval - (monotonic() - start_time), 0.1)) <commit_msg>Use 'with' instead of lock.acquire/release()<commit_after>
#!/usr/bin/env python # vim:fileencoding=utf-8:noet from powerline import Powerline from powerline.lib.monotonic import monotonic import sys import time import i3 from threading import Lock name = 'wm' if len( sys.argv ) > 1: name = sys.argv[1] powerline = Powerline(name, renderer_module='i3bgbar') powerline.update_renderer() interval = 0.5 print '{"version": 1, "custom_workspace": true}' print '[' print ' [[],[]]' lock = Lock() def render( event=None, data=None, sub=None ): global lock with lock: s = '[\n' + powerline.render(side='right')[:-2] + '\n]\n' s += ',[\n' + powerline.render(side='left' )[:-2] + '\n]' print ',[\n' + s + '\n]' sys.stdout.flush() sub = i3.Subscription( render, 'workspace' ) while True: start_time = monotonic() render() time.sleep(max(interval - (monotonic() - start_time), 0.1))
#!/usr/bin/env python # vim:fileencoding=utf-8:noet from powerline import Powerline from powerline.lib.monotonic import monotonic import sys import time import i3 from threading import Lock name = 'wm' if len( sys.argv ) > 1: name = sys.argv[1] powerline = Powerline(name, renderer_module='i3bgbar') powerline.update_renderer() interval = 0.5 print '{"version": 1, "custom_workspace": true}' print '[' print ' [[],[]]' lock = Lock() def render( event=None, data=None, sub=None ): global lock lock.acquire() s = '[\n' + powerline.render(side='right')[:-2] + '\n]\n' s += ',[\n' + powerline.render(side='left' )[:-2] + '\n]' print ',[\n' + s + '\n]' sys.stdout.flush() lock.release() sub = i3.Subscription( render, 'workspace' ) while True: start_time = monotonic() render() time.sleep(max(interval - (monotonic() - start_time), 0.1)) Use 'with' instead of lock.acquire/release()#!/usr/bin/env python # vim:fileencoding=utf-8:noet from powerline import Powerline from powerline.lib.monotonic import monotonic import sys import time import i3 from threading import Lock name = 'wm' if len( sys.argv ) > 1: name = sys.argv[1] powerline = Powerline(name, renderer_module='i3bgbar') powerline.update_renderer() interval = 0.5 print '{"version": 1, "custom_workspace": true}' print '[' print ' [[],[]]' lock = Lock() def render( event=None, data=None, sub=None ): global lock with lock: s = '[\n' + powerline.render(side='right')[:-2] + '\n]\n' s += ',[\n' + powerline.render(side='left' )[:-2] + '\n]' print ',[\n' + s + '\n]' sys.stdout.flush() sub = i3.Subscription( render, 'workspace' ) while True: start_time = monotonic() render() time.sleep(max(interval - (monotonic() - start_time), 0.1))
<commit_before>#!/usr/bin/env python # vim:fileencoding=utf-8:noet from powerline import Powerline from powerline.lib.monotonic import monotonic import sys import time import i3 from threading import Lock name = 'wm' if len( sys.argv ) > 1: name = sys.argv[1] powerline = Powerline(name, renderer_module='i3bgbar') powerline.update_renderer() interval = 0.5 print '{"version": 1, "custom_workspace": true}' print '[' print ' [[],[]]' lock = Lock() def render( event=None, data=None, sub=None ): global lock lock.acquire() s = '[\n' + powerline.render(side='right')[:-2] + '\n]\n' s += ',[\n' + powerline.render(side='left' )[:-2] + '\n]' print ',[\n' + s + '\n]' sys.stdout.flush() lock.release() sub = i3.Subscription( render, 'workspace' ) while True: start_time = monotonic() render() time.sleep(max(interval - (monotonic() - start_time), 0.1)) <commit_msg>Use 'with' instead of lock.acquire/release()<commit_after>#!/usr/bin/env python # vim:fileencoding=utf-8:noet from powerline import Powerline from powerline.lib.monotonic import monotonic import sys import time import i3 from threading import Lock name = 'wm' if len( sys.argv ) > 1: name = sys.argv[1] powerline = Powerline(name, renderer_module='i3bgbar') powerline.update_renderer() interval = 0.5 print '{"version": 1, "custom_workspace": true}' print '[' print ' [[],[]]' lock = Lock() def render( event=None, data=None, sub=None ): global lock with lock: s = '[\n' + powerline.render(side='right')[:-2] + '\n]\n' s += ',[\n' + powerline.render(side='left' )[:-2] + '\n]' print ',[\n' + s + '\n]' sys.stdout.flush() sub = i3.Subscription( render, 'workspace' ) while True: start_time = monotonic() render() time.sleep(max(interval - (monotonic() - start_time), 0.1))
5a39d00cf39e80a4e9f1ca8bbf0eac767d39f61d
nbgrader/tests/apps/test_nbgrader.py
nbgrader/tests/apps/test_nbgrader.py
import os import sys from .. import run_nbgrader, run_command from .base import BaseTestApp class TestNbGrader(BaseTestApp): def test_help(self): """Does the help display without error?""" run_nbgrader(["--help-all"]) def test_no_subapp(self): """Is the help displayed when no subapp is given?""" run_nbgrader([], retcode=1) def test_generate_config(self): """Is the config file properly generated?""" # it already exists, because we create it in conftest.py os.remove("nbgrader_config.py") # try recreating it run_nbgrader(["--generate-config"]) assert os.path.isfile("nbgrader_config.py") # does it fail if it already exists? run_nbgrader(["--generate-config"], retcode=1) def test_check_version(self, capfd): """Is the version the same regardless of how we run nbgrader?""" out1 = '\n'.join( run_command(["nbgrader", "--version"]).splitlines()[-3:] ).strip() out2 = '\n'.join( run_nbgrader(["--version"], stdout=True).splitlines()[-3:] ).strip() assert out1 == out2
import os import sys from .. import run_nbgrader, run_command from .base import BaseTestApp class TestNbGrader(BaseTestApp): def test_help(self): """Does the help display without error?""" run_nbgrader(["--help-all"]) def test_no_subapp(self): """Is the help displayed when no subapp is given?""" run_nbgrader([], retcode=1) def test_generate_config(self): """Is the config file properly generated?""" # it already exists, because we create it in conftest.py os.remove("nbgrader_config.py") # try recreating it run_nbgrader(["--generate-config"]) assert os.path.isfile("nbgrader_config.py") # does it fail if it already exists? run_nbgrader(["--generate-config"], retcode=1) def test_check_version(self, capfd): """Is the version the same regardless of how we run nbgrader?""" out1 = '\n'.join( run_command([sys.executable, "-m", "nbgrader", "--version"]).splitlines()[-3:] ).strip() out2 = '\n'.join( run_nbgrader(["--version"], stdout=True).splitlines()[-3:] ).strip() assert out1 == out2
Use sys.executable when executing nbgrader
Use sys.executable when executing nbgrader
Python
bsd-3-clause
jupyter/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jhamrick/nbgrader,jhamrick/nbgrader,jupyter/nbgrader
import os import sys from .. import run_nbgrader, run_command from .base import BaseTestApp class TestNbGrader(BaseTestApp): def test_help(self): """Does the help display without error?""" run_nbgrader(["--help-all"]) def test_no_subapp(self): """Is the help displayed when no subapp is given?""" run_nbgrader([], retcode=1) def test_generate_config(self): """Is the config file properly generated?""" # it already exists, because we create it in conftest.py os.remove("nbgrader_config.py") # try recreating it run_nbgrader(["--generate-config"]) assert os.path.isfile("nbgrader_config.py") # does it fail if it already exists? run_nbgrader(["--generate-config"], retcode=1) def test_check_version(self, capfd): """Is the version the same regardless of how we run nbgrader?""" out1 = '\n'.join( run_command(["nbgrader", "--version"]).splitlines()[-3:] ).strip() out2 = '\n'.join( run_nbgrader(["--version"], stdout=True).splitlines()[-3:] ).strip() assert out1 == out2 Use sys.executable when executing nbgrader
import os import sys from .. import run_nbgrader, run_command from .base import BaseTestApp class TestNbGrader(BaseTestApp): def test_help(self): """Does the help display without error?""" run_nbgrader(["--help-all"]) def test_no_subapp(self): """Is the help displayed when no subapp is given?""" run_nbgrader([], retcode=1) def test_generate_config(self): """Is the config file properly generated?""" # it already exists, because we create it in conftest.py os.remove("nbgrader_config.py") # try recreating it run_nbgrader(["--generate-config"]) assert os.path.isfile("nbgrader_config.py") # does it fail if it already exists? run_nbgrader(["--generate-config"], retcode=1) def test_check_version(self, capfd): """Is the version the same regardless of how we run nbgrader?""" out1 = '\n'.join( run_command([sys.executable, "-m", "nbgrader", "--version"]).splitlines()[-3:] ).strip() out2 = '\n'.join( run_nbgrader(["--version"], stdout=True).splitlines()[-3:] ).strip() assert out1 == out2
<commit_before>import os import sys from .. import run_nbgrader, run_command from .base import BaseTestApp class TestNbGrader(BaseTestApp): def test_help(self): """Does the help display without error?""" run_nbgrader(["--help-all"]) def test_no_subapp(self): """Is the help displayed when no subapp is given?""" run_nbgrader([], retcode=1) def test_generate_config(self): """Is the config file properly generated?""" # it already exists, because we create it in conftest.py os.remove("nbgrader_config.py") # try recreating it run_nbgrader(["--generate-config"]) assert os.path.isfile("nbgrader_config.py") # does it fail if it already exists? run_nbgrader(["--generate-config"], retcode=1) def test_check_version(self, capfd): """Is the version the same regardless of how we run nbgrader?""" out1 = '\n'.join( run_command(["nbgrader", "--version"]).splitlines()[-3:] ).strip() out2 = '\n'.join( run_nbgrader(["--version"], stdout=True).splitlines()[-3:] ).strip() assert out1 == out2 <commit_msg>Use sys.executable when executing nbgrader<commit_after>
import os import sys from .. import run_nbgrader, run_command from .base import BaseTestApp class TestNbGrader(BaseTestApp): def test_help(self): """Does the help display without error?""" run_nbgrader(["--help-all"]) def test_no_subapp(self): """Is the help displayed when no subapp is given?""" run_nbgrader([], retcode=1) def test_generate_config(self): """Is the config file properly generated?""" # it already exists, because we create it in conftest.py os.remove("nbgrader_config.py") # try recreating it run_nbgrader(["--generate-config"]) assert os.path.isfile("nbgrader_config.py") # does it fail if it already exists? run_nbgrader(["--generate-config"], retcode=1) def test_check_version(self, capfd): """Is the version the same regardless of how we run nbgrader?""" out1 = '\n'.join( run_command([sys.executable, "-m", "nbgrader", "--version"]).splitlines()[-3:] ).strip() out2 = '\n'.join( run_nbgrader(["--version"], stdout=True).splitlines()[-3:] ).strip() assert out1 == out2
import os import sys from .. import run_nbgrader, run_command from .base import BaseTestApp class TestNbGrader(BaseTestApp): def test_help(self): """Does the help display without error?""" run_nbgrader(["--help-all"]) def test_no_subapp(self): """Is the help displayed when no subapp is given?""" run_nbgrader([], retcode=1) def test_generate_config(self): """Is the config file properly generated?""" # it already exists, because we create it in conftest.py os.remove("nbgrader_config.py") # try recreating it run_nbgrader(["--generate-config"]) assert os.path.isfile("nbgrader_config.py") # does it fail if it already exists? run_nbgrader(["--generate-config"], retcode=1) def test_check_version(self, capfd): """Is the version the same regardless of how we run nbgrader?""" out1 = '\n'.join( run_command(["nbgrader", "--version"]).splitlines()[-3:] ).strip() out2 = '\n'.join( run_nbgrader(["--version"], stdout=True).splitlines()[-3:] ).strip() assert out1 == out2 Use sys.executable when executing nbgraderimport os import sys from .. import run_nbgrader, run_command from .base import BaseTestApp class TestNbGrader(BaseTestApp): def test_help(self): """Does the help display without error?""" run_nbgrader(["--help-all"]) def test_no_subapp(self): """Is the help displayed when no subapp is given?""" run_nbgrader([], retcode=1) def test_generate_config(self): """Is the config file properly generated?""" # it already exists, because we create it in conftest.py os.remove("nbgrader_config.py") # try recreating it run_nbgrader(["--generate-config"]) assert os.path.isfile("nbgrader_config.py") # does it fail if it already exists? run_nbgrader(["--generate-config"], retcode=1) def test_check_version(self, capfd): """Is the version the same regardless of how we run nbgrader?""" out1 = '\n'.join( run_command([sys.executable, "-m", "nbgrader", "--version"]).splitlines()[-3:] ).strip() out2 = '\n'.join( run_nbgrader(["--version"], stdout=True).splitlines()[-3:] ).strip() assert out1 == out2
<commit_before>import os import sys from .. import run_nbgrader, run_command from .base import BaseTestApp class TestNbGrader(BaseTestApp): def test_help(self): """Does the help display without error?""" run_nbgrader(["--help-all"]) def test_no_subapp(self): """Is the help displayed when no subapp is given?""" run_nbgrader([], retcode=1) def test_generate_config(self): """Is the config file properly generated?""" # it already exists, because we create it in conftest.py os.remove("nbgrader_config.py") # try recreating it run_nbgrader(["--generate-config"]) assert os.path.isfile("nbgrader_config.py") # does it fail if it already exists? run_nbgrader(["--generate-config"], retcode=1) def test_check_version(self, capfd): """Is the version the same regardless of how we run nbgrader?""" out1 = '\n'.join( run_command(["nbgrader", "--version"]).splitlines()[-3:] ).strip() out2 = '\n'.join( run_nbgrader(["--version"], stdout=True).splitlines()[-3:] ).strip() assert out1 == out2 <commit_msg>Use sys.executable when executing nbgrader<commit_after>import os import sys from .. import run_nbgrader, run_command from .base import BaseTestApp class TestNbGrader(BaseTestApp): def test_help(self): """Does the help display without error?""" run_nbgrader(["--help-all"]) def test_no_subapp(self): """Is the help displayed when no subapp is given?""" run_nbgrader([], retcode=1) def test_generate_config(self): """Is the config file properly generated?""" # it already exists, because we create it in conftest.py os.remove("nbgrader_config.py") # try recreating it run_nbgrader(["--generate-config"]) assert os.path.isfile("nbgrader_config.py") # does it fail if it already exists? run_nbgrader(["--generate-config"], retcode=1) def test_check_version(self, capfd): """Is the version the same regardless of how we run nbgrader?""" out1 = '\n'.join( run_command([sys.executable, "-m", "nbgrader", "--version"]).splitlines()[-3:] ).strip() out2 = '\n'.join( run_nbgrader(["--version"], stdout=True).splitlines()[-3:] ).strip() assert out1 == out2
5212d6eabf199ed9ddd34bd6fd2b159f7b2e6a02
tviserrys/views.py
tviserrys/views.py
from django.http import HttpResponse, HttpResponseRedirect from django.views.generic import View from django.utils.decorators import method_decorator from django.template import RequestContext, loader from django.core.exceptions import PermissionDenied from django.contrib.auth.decorators import login_required from django.shortcuts import get_object_or_404, render_to_response, render from tviit.models import Tviit, TviitForm class IndexView(View): @method_decorator(login_required(login_url='/login/')) def get(self, request, *args, **kwargs): template = loader.get_template('tviit/index.html') context = { 'tviit_form': TviitForm, } return HttpResponse(template.render(context, request))
from django.http import HttpResponse, HttpResponseRedirect from django.views.generic import View from django.utils.decorators import method_decorator from django.template import RequestContext, loader from django.core.exceptions import PermissionDenied from django.contrib.auth.decorators import login_required from django.shortcuts import get_object_or_404, render_to_response, render from tviit.models import Tviit, TviitForm from django.contrib.auth.models import User from user_profile.models import UserProfile class IndexView(View): @method_decorator(login_required(login_url='/login/')) def get(self, request, *args, **kwargs): template = loader.get_template('tviit/index.html') profile = UserProfile.objects.get(user=request.user) tviits = get_latest_tviits(profile) print(tviits) context = { 'profile': profile, 'tviit_form': TviitForm, 'tviits': tviits, } return HttpResponse(template.render(context, request)) # Get all the tviits, which aren't replies def get_latest_tviits(profile): follows = User.objects.filter(pk__in=profile.follows.all()) tviits = Tviit.objects.filter(sender__in=follows) return tviits
Add functionality to get latest tviits
Add functionality to get latest tviits
Python
mit
DeWaster/Tviserrys,DeWaster/Tviserrys
from django.http import HttpResponse, HttpResponseRedirect from django.views.generic import View from django.utils.decorators import method_decorator from django.template import RequestContext, loader from django.core.exceptions import PermissionDenied from django.contrib.auth.decorators import login_required from django.shortcuts import get_object_or_404, render_to_response, render from tviit.models import Tviit, TviitForm class IndexView(View): @method_decorator(login_required(login_url='/login/')) def get(self, request, *args, **kwargs): template = loader.get_template('tviit/index.html') context = { 'tviit_form': TviitForm, } return HttpResponse(template.render(context, request)) Add functionality to get latest tviits
from django.http import HttpResponse, HttpResponseRedirect from django.views.generic import View from django.utils.decorators import method_decorator from django.template import RequestContext, loader from django.core.exceptions import PermissionDenied from django.contrib.auth.decorators import login_required from django.shortcuts import get_object_or_404, render_to_response, render from tviit.models import Tviit, TviitForm from django.contrib.auth.models import User from user_profile.models import UserProfile class IndexView(View): @method_decorator(login_required(login_url='/login/')) def get(self, request, *args, **kwargs): template = loader.get_template('tviit/index.html') profile = UserProfile.objects.get(user=request.user) tviits = get_latest_tviits(profile) print(tviits) context = { 'profile': profile, 'tviit_form': TviitForm, 'tviits': tviits, } return HttpResponse(template.render(context, request)) # Get all the tviits, which aren't replies def get_latest_tviits(profile): follows = User.objects.filter(pk__in=profile.follows.all()) tviits = Tviit.objects.filter(sender__in=follows) return tviits
<commit_before>from django.http import HttpResponse, HttpResponseRedirect from django.views.generic import View from django.utils.decorators import method_decorator from django.template import RequestContext, loader from django.core.exceptions import PermissionDenied from django.contrib.auth.decorators import login_required from django.shortcuts import get_object_or_404, render_to_response, render from tviit.models import Tviit, TviitForm class IndexView(View): @method_decorator(login_required(login_url='/login/')) def get(self, request, *args, **kwargs): template = loader.get_template('tviit/index.html') context = { 'tviit_form': TviitForm, } return HttpResponse(template.render(context, request)) <commit_msg>Add functionality to get latest tviits<commit_after>
from django.http import HttpResponse, HttpResponseRedirect from django.views.generic import View from django.utils.decorators import method_decorator from django.template import RequestContext, loader from django.core.exceptions import PermissionDenied from django.contrib.auth.decorators import login_required from django.shortcuts import get_object_or_404, render_to_response, render from tviit.models import Tviit, TviitForm from django.contrib.auth.models import User from user_profile.models import UserProfile class IndexView(View): @method_decorator(login_required(login_url='/login/')) def get(self, request, *args, **kwargs): template = loader.get_template('tviit/index.html') profile = UserProfile.objects.get(user=request.user) tviits = get_latest_tviits(profile) print(tviits) context = { 'profile': profile, 'tviit_form': TviitForm, 'tviits': tviits, } return HttpResponse(template.render(context, request)) # Get all the tviits, which aren't replies def get_latest_tviits(profile): follows = User.objects.filter(pk__in=profile.follows.all()) tviits = Tviit.objects.filter(sender__in=follows) return tviits
from django.http import HttpResponse, HttpResponseRedirect from django.views.generic import View from django.utils.decorators import method_decorator from django.template import RequestContext, loader from django.core.exceptions import PermissionDenied from django.contrib.auth.decorators import login_required from django.shortcuts import get_object_or_404, render_to_response, render from tviit.models import Tviit, TviitForm class IndexView(View): @method_decorator(login_required(login_url='/login/')) def get(self, request, *args, **kwargs): template = loader.get_template('tviit/index.html') context = { 'tviit_form': TviitForm, } return HttpResponse(template.render(context, request)) Add functionality to get latest tviitsfrom django.http import HttpResponse, HttpResponseRedirect from django.views.generic import View from django.utils.decorators import method_decorator from django.template import RequestContext, loader from django.core.exceptions import PermissionDenied from django.contrib.auth.decorators import login_required from django.shortcuts import get_object_or_404, render_to_response, render from tviit.models import Tviit, TviitForm from django.contrib.auth.models import User from user_profile.models import UserProfile class IndexView(View): @method_decorator(login_required(login_url='/login/')) def get(self, request, *args, **kwargs): template = loader.get_template('tviit/index.html') profile = UserProfile.objects.get(user=request.user) tviits = get_latest_tviits(profile) print(tviits) context = { 'profile': profile, 'tviit_form': TviitForm, 'tviits': tviits, } return HttpResponse(template.render(context, request)) # Get all the tviits, which aren't replies def get_latest_tviits(profile): follows = User.objects.filter(pk__in=profile.follows.all()) tviits = Tviit.objects.filter(sender__in=follows) return tviits
<commit_before>from django.http import HttpResponse, HttpResponseRedirect from django.views.generic import View from django.utils.decorators import method_decorator from django.template import RequestContext, loader from django.core.exceptions import PermissionDenied from django.contrib.auth.decorators import login_required from django.shortcuts import get_object_or_404, render_to_response, render from tviit.models import Tviit, TviitForm class IndexView(View): @method_decorator(login_required(login_url='/login/')) def get(self, request, *args, **kwargs): template = loader.get_template('tviit/index.html') context = { 'tviit_form': TviitForm, } return HttpResponse(template.render(context, request)) <commit_msg>Add functionality to get latest tviits<commit_after>from django.http import HttpResponse, HttpResponseRedirect from django.views.generic import View from django.utils.decorators import method_decorator from django.template import RequestContext, loader from django.core.exceptions import PermissionDenied from django.contrib.auth.decorators import login_required from django.shortcuts import get_object_or_404, render_to_response, render from tviit.models import Tviit, TviitForm from django.contrib.auth.models import User from user_profile.models import UserProfile class IndexView(View): @method_decorator(login_required(login_url='/login/')) def get(self, request, *args, **kwargs): template = loader.get_template('tviit/index.html') profile = UserProfile.objects.get(user=request.user) tviits = get_latest_tviits(profile) print(tviits) context = { 'profile': profile, 'tviit_form': TviitForm, 'tviits': tviits, } return HttpResponse(template.render(context, request)) # Get all the tviits, which aren't replies def get_latest_tviits(profile): follows = User.objects.filter(pk__in=profile.follows.all()) tviits = Tviit.objects.filter(sender__in=follows) return tviits
409c69dd967f18ef99658ed63d54dc9723f84250
anchorhub/builtin/github/collector.py
anchorhub/builtin/github/collector.py
""" File that initializes a Collector object designed for GitHub style markdown files. """ from anchorhub.collector import Collector from anchorhub.builtin.github.cstrategies import MarkdownATXCollectorStrategy import anchorhub.builtin.github.converter as converter import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_collector(opts): """ Creates a Collector object used for parsing Markdown files with a GitHub style anchor transformation :param opts: Namespace object of options for the AnchorHub program. Usually created from command-line arguments. It must contain a 'wrapper_regex' attribute :return: a Collector object designed for collecting tag/anchor pairs from Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXCollectorStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx] switches = [code_block_switch] return Collector(converter.create_anchor_from_header, strategies, switches=switches)
""" File that initializes a Collector object designed for GitHub style markdown files. """ from anchorhub.collector import Collector from anchorhub.builtin.github.cstrategies import \ MarkdownATXCollectorStrategy, MarkdownSetextCollectorStrategy import anchorhub.builtin.github.converter as converter import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_collector(opts): """ Creates a Collector object used for parsing Markdown files with a GitHub style anchor transformation :param opts: Namespace object of options for the AnchorHub program. Usually created from command-line arguments. It must contain a 'wrapper_regex' attribute :return: a Collector object designed for collecting tag/anchor pairs from Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXCollectorStrategy(opts) setext = MarkdownSetextCollectorStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx, setext] switches = [code_block_switch] return Collector(converter.create_anchor_from_header, strategies, switches=switches)
Use Setext strategy in GitHub built in Collector
Use Setext strategy in GitHub built in Collector
Python
apache-2.0
samjabrahams/anchorhub
""" File that initializes a Collector object designed for GitHub style markdown files. """ from anchorhub.collector import Collector from anchorhub.builtin.github.cstrategies import MarkdownATXCollectorStrategy import anchorhub.builtin.github.converter as converter import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_collector(opts): """ Creates a Collector object used for parsing Markdown files with a GitHub style anchor transformation :param opts: Namespace object of options for the AnchorHub program. Usually created from command-line arguments. It must contain a 'wrapper_regex' attribute :return: a Collector object designed for collecting tag/anchor pairs from Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXCollectorStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx] switches = [code_block_switch] return Collector(converter.create_anchor_from_header, strategies, switches=switches) Use Setext strategy in GitHub built in Collector
""" File that initializes a Collector object designed for GitHub style markdown files. """ from anchorhub.collector import Collector from anchorhub.builtin.github.cstrategies import \ MarkdownATXCollectorStrategy, MarkdownSetextCollectorStrategy import anchorhub.builtin.github.converter as converter import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_collector(opts): """ Creates a Collector object used for parsing Markdown files with a GitHub style anchor transformation :param opts: Namespace object of options for the AnchorHub program. Usually created from command-line arguments. It must contain a 'wrapper_regex' attribute :return: a Collector object designed for collecting tag/anchor pairs from Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXCollectorStrategy(opts) setext = MarkdownSetextCollectorStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx, setext] switches = [code_block_switch] return Collector(converter.create_anchor_from_header, strategies, switches=switches)
<commit_before>""" File that initializes a Collector object designed for GitHub style markdown files. """ from anchorhub.collector import Collector from anchorhub.builtin.github.cstrategies import MarkdownATXCollectorStrategy import anchorhub.builtin.github.converter as converter import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_collector(opts): """ Creates a Collector object used for parsing Markdown files with a GitHub style anchor transformation :param opts: Namespace object of options for the AnchorHub program. Usually created from command-line arguments. It must contain a 'wrapper_regex' attribute :return: a Collector object designed for collecting tag/anchor pairs from Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXCollectorStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx] switches = [code_block_switch] return Collector(converter.create_anchor_from_header, strategies, switches=switches) <commit_msg>Use Setext strategy in GitHub built in Collector<commit_after>
""" File that initializes a Collector object designed for GitHub style markdown files. """ from anchorhub.collector import Collector from anchorhub.builtin.github.cstrategies import \ MarkdownATXCollectorStrategy, MarkdownSetextCollectorStrategy import anchorhub.builtin.github.converter as converter import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_collector(opts): """ Creates a Collector object used for parsing Markdown files with a GitHub style anchor transformation :param opts: Namespace object of options for the AnchorHub program. Usually created from command-line arguments. It must contain a 'wrapper_regex' attribute :return: a Collector object designed for collecting tag/anchor pairs from Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXCollectorStrategy(opts) setext = MarkdownSetextCollectorStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx, setext] switches = [code_block_switch] return Collector(converter.create_anchor_from_header, strategies, switches=switches)
""" File that initializes a Collector object designed for GitHub style markdown files. """ from anchorhub.collector import Collector from anchorhub.builtin.github.cstrategies import MarkdownATXCollectorStrategy import anchorhub.builtin.github.converter as converter import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_collector(opts): """ Creates a Collector object used for parsing Markdown files with a GitHub style anchor transformation :param opts: Namespace object of options for the AnchorHub program. Usually created from command-line arguments. It must contain a 'wrapper_regex' attribute :return: a Collector object designed for collecting tag/anchor pairs from Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXCollectorStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx] switches = [code_block_switch] return Collector(converter.create_anchor_from_header, strategies, switches=switches) Use Setext strategy in GitHub built in Collector""" File that initializes a Collector object designed for GitHub style markdown files. """ from anchorhub.collector import Collector from anchorhub.builtin.github.cstrategies import \ MarkdownATXCollectorStrategy, MarkdownSetextCollectorStrategy import anchorhub.builtin.github.converter as converter import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_collector(opts): """ Creates a Collector object used for parsing Markdown files with a GitHub style anchor transformation :param opts: Namespace object of options for the AnchorHub program. Usually created from command-line arguments. It must contain a 'wrapper_regex' attribute :return: a Collector object designed for collecting tag/anchor pairs from Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXCollectorStrategy(opts) setext = MarkdownSetextCollectorStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx, setext] switches = [code_block_switch] return Collector(converter.create_anchor_from_header, strategies, switches=switches)
<commit_before>""" File that initializes a Collector object designed for GitHub style markdown files. """ from anchorhub.collector import Collector from anchorhub.builtin.github.cstrategies import MarkdownATXCollectorStrategy import anchorhub.builtin.github.converter as converter import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_collector(opts): """ Creates a Collector object used for parsing Markdown files with a GitHub style anchor transformation :param opts: Namespace object of options for the AnchorHub program. Usually created from command-line arguments. It must contain a 'wrapper_regex' attribute :return: a Collector object designed for collecting tag/anchor pairs from Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXCollectorStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx] switches = [code_block_switch] return Collector(converter.create_anchor_from_header, strategies, switches=switches) <commit_msg>Use Setext strategy in GitHub built in Collector<commit_after>""" File that initializes a Collector object designed for GitHub style markdown files. """ from anchorhub.collector import Collector from anchorhub.builtin.github.cstrategies import \ MarkdownATXCollectorStrategy, MarkdownSetextCollectorStrategy import anchorhub.builtin.github.converter as converter import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_collector(opts): """ Creates a Collector object used for parsing Markdown files with a GitHub style anchor transformation :param opts: Namespace object of options for the AnchorHub program. Usually created from command-line arguments. It must contain a 'wrapper_regex' attribute :return: a Collector object designed for collecting tag/anchor pairs from Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXCollectorStrategy(opts) setext = MarkdownSetextCollectorStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx, setext] switches = [code_block_switch] return Collector(converter.create_anchor_from_header, strategies, switches=switches)
e9f25dd0c9028613ef7317ad3a8287dc60b9a217
slave/skia_slave_scripts/chromeos_install.py
slave/skia_slave_scripts/chromeos_install.py
#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Install all executables, and any runtime resources that are needed by *both* Test and Bench builders. """ from build_step import BuildStep from chromeos_build_step import ChromeOSBuildStep from install import Install from utils import ssh_utils import os import sys class ChromeOSInstall(ChromeOSBuildStep, Install): def _PutSCP(self, executable): ssh_utils.PutSCP(local_path=os.path.join('out', 'config', 'chromeos-' + self._args['board'], self._configuration, executable), remote_path='/usr/local/bin/skia_%s' % executable, username=self._ssh_username, host=self._ssh_host, port=self._ssh_port) def _Run(self): super(ChromeOSInstall, self)._Run() self._PutSCP('tests') self._PutSCP('gm') self._PutSCP('render_pictures') self._PutSCP('render_pdfs') self._PutSCP('bench') self._PutSCP('bench_pictures') if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(ChromeOSInstall))
#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Install all executables, and any runtime resources that are needed by *both* Test and Bench builders. """ from build_step import BuildStep from chromeos_build_step import ChromeOSBuildStep from install import Install from utils import ssh_utils import os import sys class ChromeOSInstall(ChromeOSBuildStep, Install): def _PutSCP(self, executable): # First, make sure that the program isn't running. ssh_utils.RunSSH(self._ssh_username, self._ssh_host, self._ssh_port, ['killall', 'skia_%s' % executable]) ssh_utils.PutSCP(local_path=os.path.join('out', 'config', 'chromeos-' + self._args['board'], self._configuration, executable), remote_path='/usr/local/bin/skia_%s' % executable, username=self._ssh_username, host=self._ssh_host, port=self._ssh_port) def _Run(self): super(ChromeOSInstall, self)._Run() self._PutSCP('tests') self._PutSCP('gm') self._PutSCP('render_pictures') self._PutSCP('render_pdfs') self._PutSCP('bench') self._PutSCP('bench_pictures') if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(ChromeOSInstall))
Kill running Skia processes in ChromeOS Install step
Kill running Skia processes in ChromeOS Install step (RunBuilders:Test-ChromeOS-Alex-GMA3150-x86-Debug,Test-ChromeOS-Alex-GMA3150-x86-Release,Perf-ChromeOS-Alex-GMA3150-x86-Release) R=rmistry@google.com Review URL: https://codereview.chromium.org/17599009 git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9748 2bbb7eff-a529-9590-31e7-b0007b416f81
Python
bsd-3-clause
google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot
#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Install all executables, and any runtime resources that are needed by *both* Test and Bench builders. """ from build_step import BuildStep from chromeos_build_step import ChromeOSBuildStep from install import Install from utils import ssh_utils import os import sys class ChromeOSInstall(ChromeOSBuildStep, Install): def _PutSCP(self, executable): ssh_utils.PutSCP(local_path=os.path.join('out', 'config', 'chromeos-' + self._args['board'], self._configuration, executable), remote_path='/usr/local/bin/skia_%s' % executable, username=self._ssh_username, host=self._ssh_host, port=self._ssh_port) def _Run(self): super(ChromeOSInstall, self)._Run() self._PutSCP('tests') self._PutSCP('gm') self._PutSCP('render_pictures') self._PutSCP('render_pdfs') self._PutSCP('bench') self._PutSCP('bench_pictures') if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(ChromeOSInstall)) Kill running Skia processes in ChromeOS Install step (RunBuilders:Test-ChromeOS-Alex-GMA3150-x86-Debug,Test-ChromeOS-Alex-GMA3150-x86-Release,Perf-ChromeOS-Alex-GMA3150-x86-Release) R=rmistry@google.com Review URL: https://codereview.chromium.org/17599009 git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9748 2bbb7eff-a529-9590-31e7-b0007b416f81
#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Install all executables, and any runtime resources that are needed by *both* Test and Bench builders. """ from build_step import BuildStep from chromeos_build_step import ChromeOSBuildStep from install import Install from utils import ssh_utils import os import sys class ChromeOSInstall(ChromeOSBuildStep, Install): def _PutSCP(self, executable): # First, make sure that the program isn't running. ssh_utils.RunSSH(self._ssh_username, self._ssh_host, self._ssh_port, ['killall', 'skia_%s' % executable]) ssh_utils.PutSCP(local_path=os.path.join('out', 'config', 'chromeos-' + self._args['board'], self._configuration, executable), remote_path='/usr/local/bin/skia_%s' % executable, username=self._ssh_username, host=self._ssh_host, port=self._ssh_port) def _Run(self): super(ChromeOSInstall, self)._Run() self._PutSCP('tests') self._PutSCP('gm') self._PutSCP('render_pictures') self._PutSCP('render_pdfs') self._PutSCP('bench') self._PutSCP('bench_pictures') if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(ChromeOSInstall))
<commit_before>#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Install all executables, and any runtime resources that are needed by *both* Test and Bench builders. """ from build_step import BuildStep from chromeos_build_step import ChromeOSBuildStep from install import Install from utils import ssh_utils import os import sys class ChromeOSInstall(ChromeOSBuildStep, Install): def _PutSCP(self, executable): ssh_utils.PutSCP(local_path=os.path.join('out', 'config', 'chromeos-' + self._args['board'], self._configuration, executable), remote_path='/usr/local/bin/skia_%s' % executable, username=self._ssh_username, host=self._ssh_host, port=self._ssh_port) def _Run(self): super(ChromeOSInstall, self)._Run() self._PutSCP('tests') self._PutSCP('gm') self._PutSCP('render_pictures') self._PutSCP('render_pdfs') self._PutSCP('bench') self._PutSCP('bench_pictures') if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(ChromeOSInstall)) <commit_msg>Kill running Skia processes in ChromeOS Install step (RunBuilders:Test-ChromeOS-Alex-GMA3150-x86-Debug,Test-ChromeOS-Alex-GMA3150-x86-Release,Perf-ChromeOS-Alex-GMA3150-x86-Release) R=rmistry@google.com Review URL: https://codereview.chromium.org/17599009 git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9748 2bbb7eff-a529-9590-31e7-b0007b416f81<commit_after>
#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Install all executables, and any runtime resources that are needed by *both* Test and Bench builders. """ from build_step import BuildStep from chromeos_build_step import ChromeOSBuildStep from install import Install from utils import ssh_utils import os import sys class ChromeOSInstall(ChromeOSBuildStep, Install): def _PutSCP(self, executable): # First, make sure that the program isn't running. ssh_utils.RunSSH(self._ssh_username, self._ssh_host, self._ssh_port, ['killall', 'skia_%s' % executable]) ssh_utils.PutSCP(local_path=os.path.join('out', 'config', 'chromeos-' + self._args['board'], self._configuration, executable), remote_path='/usr/local/bin/skia_%s' % executable, username=self._ssh_username, host=self._ssh_host, port=self._ssh_port) def _Run(self): super(ChromeOSInstall, self)._Run() self._PutSCP('tests') self._PutSCP('gm') self._PutSCP('render_pictures') self._PutSCP('render_pdfs') self._PutSCP('bench') self._PutSCP('bench_pictures') if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(ChromeOSInstall))
#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Install all executables, and any runtime resources that are needed by *both* Test and Bench builders. """ from build_step import BuildStep from chromeos_build_step import ChromeOSBuildStep from install import Install from utils import ssh_utils import os import sys class ChromeOSInstall(ChromeOSBuildStep, Install): def _PutSCP(self, executable): ssh_utils.PutSCP(local_path=os.path.join('out', 'config', 'chromeos-' + self._args['board'], self._configuration, executable), remote_path='/usr/local/bin/skia_%s' % executable, username=self._ssh_username, host=self._ssh_host, port=self._ssh_port) def _Run(self): super(ChromeOSInstall, self)._Run() self._PutSCP('tests') self._PutSCP('gm') self._PutSCP('render_pictures') self._PutSCP('render_pdfs') self._PutSCP('bench') self._PutSCP('bench_pictures') if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(ChromeOSInstall)) Kill running Skia processes in ChromeOS Install step (RunBuilders:Test-ChromeOS-Alex-GMA3150-x86-Debug,Test-ChromeOS-Alex-GMA3150-x86-Release,Perf-ChromeOS-Alex-GMA3150-x86-Release) R=rmistry@google.com Review URL: https://codereview.chromium.org/17599009 git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9748 2bbb7eff-a529-9590-31e7-b0007b416f81#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Install all executables, and any runtime resources that are needed by *both* Test and Bench builders. """ from build_step import BuildStep from chromeos_build_step import ChromeOSBuildStep from install import Install from utils import ssh_utils import os import sys class ChromeOSInstall(ChromeOSBuildStep, Install): def _PutSCP(self, executable): # First, make sure that the program isn't running. ssh_utils.RunSSH(self._ssh_username, self._ssh_host, self._ssh_port, ['killall', 'skia_%s' % executable]) ssh_utils.PutSCP(local_path=os.path.join('out', 'config', 'chromeos-' + self._args['board'], self._configuration, executable), remote_path='/usr/local/bin/skia_%s' % executable, username=self._ssh_username, host=self._ssh_host, port=self._ssh_port) def _Run(self): super(ChromeOSInstall, self)._Run() self._PutSCP('tests') self._PutSCP('gm') self._PutSCP('render_pictures') self._PutSCP('render_pdfs') self._PutSCP('bench') self._PutSCP('bench_pictures') if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(ChromeOSInstall))
<commit_before>#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Install all executables, and any runtime resources that are needed by *both* Test and Bench builders. """ from build_step import BuildStep from chromeos_build_step import ChromeOSBuildStep from install import Install from utils import ssh_utils import os import sys class ChromeOSInstall(ChromeOSBuildStep, Install): def _PutSCP(self, executable): ssh_utils.PutSCP(local_path=os.path.join('out', 'config', 'chromeos-' + self._args['board'], self._configuration, executable), remote_path='/usr/local/bin/skia_%s' % executable, username=self._ssh_username, host=self._ssh_host, port=self._ssh_port) def _Run(self): super(ChromeOSInstall, self)._Run() self._PutSCP('tests') self._PutSCP('gm') self._PutSCP('render_pictures') self._PutSCP('render_pdfs') self._PutSCP('bench') self._PutSCP('bench_pictures') if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(ChromeOSInstall)) <commit_msg>Kill running Skia processes in ChromeOS Install step (RunBuilders:Test-ChromeOS-Alex-GMA3150-x86-Debug,Test-ChromeOS-Alex-GMA3150-x86-Release,Perf-ChromeOS-Alex-GMA3150-x86-Release) R=rmistry@google.com Review URL: https://codereview.chromium.org/17599009 git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9748 2bbb7eff-a529-9590-31e7-b0007b416f81<commit_after>#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Install all executables, and any runtime resources that are needed by *both* Test and Bench builders. """ from build_step import BuildStep from chromeos_build_step import ChromeOSBuildStep from install import Install from utils import ssh_utils import os import sys class ChromeOSInstall(ChromeOSBuildStep, Install): def _PutSCP(self, executable): # First, make sure that the program isn't running. ssh_utils.RunSSH(self._ssh_username, self._ssh_host, self._ssh_port, ['killall', 'skia_%s' % executable]) ssh_utils.PutSCP(local_path=os.path.join('out', 'config', 'chromeos-' + self._args['board'], self._configuration, executable), remote_path='/usr/local/bin/skia_%s' % executable, username=self._ssh_username, host=self._ssh_host, port=self._ssh_port) def _Run(self): super(ChromeOSInstall, self)._Run() self._PutSCP('tests') self._PutSCP('gm') self._PutSCP('render_pictures') self._PutSCP('render_pdfs') self._PutSCP('bench') self._PutSCP('bench_pictures') if '__main__' == __name__: sys.exit(BuildStep.RunBuildStep(ChromeOSInstall))
182f070c69e59907eeda3c261d833a492af46967
rojak-database/generate_media_data.py
rojak-database/generate_media_data.py
import MySQLdb as mysql from faker import Factory # Open database connection db = mysql.connect('localhost', 'root', 'rojak', 'rojak_database') # Create new db cursor cursor = db.cursor() sql = ''' INSERT INTO `media`(`name`, `website_url`, `logo_url`, `facebookpage_url`, `slogan`) VALUES ('{}', '{}', '{}', '{}', '{}'); ''' MAX_MEDIA=100 fake = Factory.create('it_IT') for i in xrange(MAX_MEDIA): # Generate random data for the media media_name = fake.name() + ' Media ' + str(i) website_name = media_name.lower().replace(' ', '') website_name = website_name.replace("'", '') website_url = 'https://{}.com'.format(website_name) cat_txt = website_name cat_img = 'http://lorempixel.com/500/500/cats/{}'.format(cat_txt) logo_url = cat_img facebookpage_url = 'https://facebook.com/{}'.format(website_name) slogan = ' '.join(fake.text().split()[:5]) # Parse the SQL command insert_sql = sql.format(media_name, website_url, logo_url, facebookpage_url, slogan) # insert to the database try: cursor.execute(insert_sql) db.commit() except mysql.Error as err: print("Something went wrong: {}".format(err)) db.rollback() # Close the DB connection db.close()
import MySQLdb as mysql from faker import Factory # Open database connection db = mysql.connect('localhost', 'root', 'rojak', 'rojak_database') # Create new db cursor cursor = db.cursor() sql = ''' INSERT INTO `media`(`name`, `website_url`, `logo_url`, `facebookpage_url`, `slogan`) VALUES ('{}', '{}', '{}', '{}', '{}'); ''' MAX_MEDIA=100 fake = Factory.create() for i in xrange(MAX_MEDIA): # Generate random data for the media media_name = fake.name() + ' Media ' + str(i) website_name = media_name.lower().replace(' ', '') website_url = 'https://{}.com'.format(website_name) cat_txt = website_name cat_img = 'http://lorempixel.com/500/500/cats/{}'.format(cat_txt) logo_url = cat_img facebookpage_url = 'https://facebook.com/{}'.format(website_name) slogan = ' '.join(fake.text().split()[:5]) # Parse the SQL command insert_sql = sql.format(media_name, website_url, logo_url, facebookpage_url, slogan) # insert to the database try: cursor.execute(insert_sql) db.commit() except mysql.Error as err: print("Something went wrong: {}".format(err)) db.rollback() # Close the DB connection db.close()
Update the default language for the media generator
Update the default language for the media generator
Python
bsd-3-clause
CodeRiderz/rojak,bobbypriambodo/rojak,reinarduswindy/rojak,CodeRiderz/rojak,bobbypriambodo/rojak,reinarduswindy/rojak,bobbypriambodo/rojak,reinarduswindy/rojak,CodeRiderz/rojak,pyk/rojak,pyk/rojak,bobbypriambodo/rojak,pyk/rojak,bobbypriambodo/rojak,rawgni/rojak,bobbypriambodo/rojak,CodeRiderz/rojak,pyk/rojak,pyk/rojak,CodeRiderz/rojak,rawgni/rojak,pyk/rojak,rawgni/rojak,rawgni/rojak,rawgni/rojak,pyk/rojak,reinarduswindy/rojak,reinarduswindy/rojak,CodeRiderz/rojak,CodeRiderz/rojak,rawgni/rojak,rawgni/rojak,reinarduswindy/rojak,reinarduswindy/rojak,bobbypriambodo/rojak
import MySQLdb as mysql from faker import Factory # Open database connection db = mysql.connect('localhost', 'root', 'rojak', 'rojak_database') # Create new db cursor cursor = db.cursor() sql = ''' INSERT INTO `media`(`name`, `website_url`, `logo_url`, `facebookpage_url`, `slogan`) VALUES ('{}', '{}', '{}', '{}', '{}'); ''' MAX_MEDIA=100 fake = Factory.create('it_IT') for i in xrange(MAX_MEDIA): # Generate random data for the media media_name = fake.name() + ' Media ' + str(i) website_name = media_name.lower().replace(' ', '') website_name = website_name.replace("'", '') website_url = 'https://{}.com'.format(website_name) cat_txt = website_name cat_img = 'http://lorempixel.com/500/500/cats/{}'.format(cat_txt) logo_url = cat_img facebookpage_url = 'https://facebook.com/{}'.format(website_name) slogan = ' '.join(fake.text().split()[:5]) # Parse the SQL command insert_sql = sql.format(media_name, website_url, logo_url, facebookpage_url, slogan) # insert to the database try: cursor.execute(insert_sql) db.commit() except mysql.Error as err: print("Something went wrong: {}".format(err)) db.rollback() # Close the DB connection db.close() Update the default language for the media generator
import MySQLdb as mysql from faker import Factory # Open database connection db = mysql.connect('localhost', 'root', 'rojak', 'rojak_database') # Create new db cursor cursor = db.cursor() sql = ''' INSERT INTO `media`(`name`, `website_url`, `logo_url`, `facebookpage_url`, `slogan`) VALUES ('{}', '{}', '{}', '{}', '{}'); ''' MAX_MEDIA=100 fake = Factory.create() for i in xrange(MAX_MEDIA): # Generate random data for the media media_name = fake.name() + ' Media ' + str(i) website_name = media_name.lower().replace(' ', '') website_url = 'https://{}.com'.format(website_name) cat_txt = website_name cat_img = 'http://lorempixel.com/500/500/cats/{}'.format(cat_txt) logo_url = cat_img facebookpage_url = 'https://facebook.com/{}'.format(website_name) slogan = ' '.join(fake.text().split()[:5]) # Parse the SQL command insert_sql = sql.format(media_name, website_url, logo_url, facebookpage_url, slogan) # insert to the database try: cursor.execute(insert_sql) db.commit() except mysql.Error as err: print("Something went wrong: {}".format(err)) db.rollback() # Close the DB connection db.close()
<commit_before>import MySQLdb as mysql from faker import Factory # Open database connection db = mysql.connect('localhost', 'root', 'rojak', 'rojak_database') # Create new db cursor cursor = db.cursor() sql = ''' INSERT INTO `media`(`name`, `website_url`, `logo_url`, `facebookpage_url`, `slogan`) VALUES ('{}', '{}', '{}', '{}', '{}'); ''' MAX_MEDIA=100 fake = Factory.create('it_IT') for i in xrange(MAX_MEDIA): # Generate random data for the media media_name = fake.name() + ' Media ' + str(i) website_name = media_name.lower().replace(' ', '') website_name = website_name.replace("'", '') website_url = 'https://{}.com'.format(website_name) cat_txt = website_name cat_img = 'http://lorempixel.com/500/500/cats/{}'.format(cat_txt) logo_url = cat_img facebookpage_url = 'https://facebook.com/{}'.format(website_name) slogan = ' '.join(fake.text().split()[:5]) # Parse the SQL command insert_sql = sql.format(media_name, website_url, logo_url, facebookpage_url, slogan) # insert to the database try: cursor.execute(insert_sql) db.commit() except mysql.Error as err: print("Something went wrong: {}".format(err)) db.rollback() # Close the DB connection db.close() <commit_msg>Update the default language for the media generator<commit_after>
import MySQLdb as mysql from faker import Factory # Open database connection db = mysql.connect('localhost', 'root', 'rojak', 'rojak_database') # Create new db cursor cursor = db.cursor() sql = ''' INSERT INTO `media`(`name`, `website_url`, `logo_url`, `facebookpage_url`, `slogan`) VALUES ('{}', '{}', '{}', '{}', '{}'); ''' MAX_MEDIA=100 fake = Factory.create() for i in xrange(MAX_MEDIA): # Generate random data for the media media_name = fake.name() + ' Media ' + str(i) website_name = media_name.lower().replace(' ', '') website_url = 'https://{}.com'.format(website_name) cat_txt = website_name cat_img = 'http://lorempixel.com/500/500/cats/{}'.format(cat_txt) logo_url = cat_img facebookpage_url = 'https://facebook.com/{}'.format(website_name) slogan = ' '.join(fake.text().split()[:5]) # Parse the SQL command insert_sql = sql.format(media_name, website_url, logo_url, facebookpage_url, slogan) # insert to the database try: cursor.execute(insert_sql) db.commit() except mysql.Error as err: print("Something went wrong: {}".format(err)) db.rollback() # Close the DB connection db.close()
import MySQLdb as mysql from faker import Factory # Open database connection db = mysql.connect('localhost', 'root', 'rojak', 'rojak_database') # Create new db cursor cursor = db.cursor() sql = ''' INSERT INTO `media`(`name`, `website_url`, `logo_url`, `facebookpage_url`, `slogan`) VALUES ('{}', '{}', '{}', '{}', '{}'); ''' MAX_MEDIA=100 fake = Factory.create('it_IT') for i in xrange(MAX_MEDIA): # Generate random data for the media media_name = fake.name() + ' Media ' + str(i) website_name = media_name.lower().replace(' ', '') website_name = website_name.replace("'", '') website_url = 'https://{}.com'.format(website_name) cat_txt = website_name cat_img = 'http://lorempixel.com/500/500/cats/{}'.format(cat_txt) logo_url = cat_img facebookpage_url = 'https://facebook.com/{}'.format(website_name) slogan = ' '.join(fake.text().split()[:5]) # Parse the SQL command insert_sql = sql.format(media_name, website_url, logo_url, facebookpage_url, slogan) # insert to the database try: cursor.execute(insert_sql) db.commit() except mysql.Error as err: print("Something went wrong: {}".format(err)) db.rollback() # Close the DB connection db.close() Update the default language for the media generatorimport MySQLdb as mysql from faker import Factory # Open database connection db = mysql.connect('localhost', 'root', 'rojak', 'rojak_database') # Create new db cursor cursor = db.cursor() sql = ''' INSERT INTO `media`(`name`, `website_url`, `logo_url`, `facebookpage_url`, `slogan`) VALUES ('{}', '{}', '{}', '{}', '{}'); ''' MAX_MEDIA=100 fake = Factory.create() for i in xrange(MAX_MEDIA): # Generate random data for the media media_name = fake.name() + ' Media ' + str(i) website_name = media_name.lower().replace(' ', '') website_url = 'https://{}.com'.format(website_name) cat_txt = website_name cat_img = 'http://lorempixel.com/500/500/cats/{}'.format(cat_txt) logo_url = cat_img facebookpage_url = 'https://facebook.com/{}'.format(website_name) slogan = ' '.join(fake.text().split()[:5]) # Parse the SQL command insert_sql = sql.format(media_name, website_url, logo_url, facebookpage_url, slogan) # insert to the database try: cursor.execute(insert_sql) db.commit() except mysql.Error as err: print("Something went wrong: {}".format(err)) db.rollback() # Close the DB connection db.close()
<commit_before>import MySQLdb as mysql from faker import Factory # Open database connection db = mysql.connect('localhost', 'root', 'rojak', 'rojak_database') # Create new db cursor cursor = db.cursor() sql = ''' INSERT INTO `media`(`name`, `website_url`, `logo_url`, `facebookpage_url`, `slogan`) VALUES ('{}', '{}', '{}', '{}', '{}'); ''' MAX_MEDIA=100 fake = Factory.create('it_IT') for i in xrange(MAX_MEDIA): # Generate random data for the media media_name = fake.name() + ' Media ' + str(i) website_name = media_name.lower().replace(' ', '') website_name = website_name.replace("'", '') website_url = 'https://{}.com'.format(website_name) cat_txt = website_name cat_img = 'http://lorempixel.com/500/500/cats/{}'.format(cat_txt) logo_url = cat_img facebookpage_url = 'https://facebook.com/{}'.format(website_name) slogan = ' '.join(fake.text().split()[:5]) # Parse the SQL command insert_sql = sql.format(media_name, website_url, logo_url, facebookpage_url, slogan) # insert to the database try: cursor.execute(insert_sql) db.commit() except mysql.Error as err: print("Something went wrong: {}".format(err)) db.rollback() # Close the DB connection db.close() <commit_msg>Update the default language for the media generator<commit_after>import MySQLdb as mysql from faker import Factory # Open database connection db = mysql.connect('localhost', 'root', 'rojak', 'rojak_database') # Create new db cursor cursor = db.cursor() sql = ''' INSERT INTO `media`(`name`, `website_url`, `logo_url`, `facebookpage_url`, `slogan`) VALUES ('{}', '{}', '{}', '{}', '{}'); ''' MAX_MEDIA=100 fake = Factory.create() for i in xrange(MAX_MEDIA): # Generate random data for the media media_name = fake.name() + ' Media ' + str(i) website_name = media_name.lower().replace(' ', '') website_url = 'https://{}.com'.format(website_name) cat_txt = website_name cat_img = 'http://lorempixel.com/500/500/cats/{}'.format(cat_txt) logo_url = cat_img facebookpage_url = 'https://facebook.com/{}'.format(website_name) slogan = ' '.join(fake.text().split()[:5]) # Parse the SQL command insert_sql = sql.format(media_name, website_url, logo_url, facebookpage_url, slogan) # insert to the database try: cursor.execute(insert_sql) db.commit() except mysql.Error as err: print("Something went wrong: {}".format(err)) db.rollback() # Close the DB connection db.close()
5bc0226fe1ad03495e97dc2933fa17d18cd38bb9
meetup_facebook_bot/models/speaker.py
meetup_facebook_bot/models/speaker.py
from sqlalchemy import Column, BIGINT, String, Integer from meetup_facebook_bot.models.base import Base class Speaker(Base): __tablename__ = 'speakers' id = Column(Integer, primary_key=True, autoincrement=True) page_scoped_id = Column(BIGINT, unique=True) name = Column(String(128), nullable=False) token = Column(String(128), unique=True, nullable=False) def __repr__(self): return '<Speaker %r>' % self.id
from sqlalchemy import Column, BIGINT, String, Integer from meetup_facebook_bot.models.base import Base class Speaker(Base): __tablename__ = 'speakers' id = Column(Integer, primary_key=True, autoincrement=True) page_scoped_id = Column(BIGINT) name = Column(String(128), nullable=False) token = Column(String(128), unique=True, nullable=False) def __repr__(self): return '<Speaker %r>' % self.id
Remove uniqueness constraint from page_scoped_id
Remove uniqueness constraint from page_scoped_id
Python
mit
Stark-Mountain/meetup-facebook-bot,Stark-Mountain/meetup-facebook-bot
from sqlalchemy import Column, BIGINT, String, Integer from meetup_facebook_bot.models.base import Base class Speaker(Base): __tablename__ = 'speakers' id = Column(Integer, primary_key=True, autoincrement=True) page_scoped_id = Column(BIGINT, unique=True) name = Column(String(128), nullable=False) token = Column(String(128), unique=True, nullable=False) def __repr__(self): return '<Speaker %r>' % self.id Remove uniqueness constraint from page_scoped_id
from sqlalchemy import Column, BIGINT, String, Integer from meetup_facebook_bot.models.base import Base class Speaker(Base): __tablename__ = 'speakers' id = Column(Integer, primary_key=True, autoincrement=True) page_scoped_id = Column(BIGINT) name = Column(String(128), nullable=False) token = Column(String(128), unique=True, nullable=False) def __repr__(self): return '<Speaker %r>' % self.id
<commit_before>from sqlalchemy import Column, BIGINT, String, Integer from meetup_facebook_bot.models.base import Base class Speaker(Base): __tablename__ = 'speakers' id = Column(Integer, primary_key=True, autoincrement=True) page_scoped_id = Column(BIGINT, unique=True) name = Column(String(128), nullable=False) token = Column(String(128), unique=True, nullable=False) def __repr__(self): return '<Speaker %r>' % self.id <commit_msg>Remove uniqueness constraint from page_scoped_id<commit_after>
from sqlalchemy import Column, BIGINT, String, Integer from meetup_facebook_bot.models.base import Base class Speaker(Base): __tablename__ = 'speakers' id = Column(Integer, primary_key=True, autoincrement=True) page_scoped_id = Column(BIGINT) name = Column(String(128), nullable=False) token = Column(String(128), unique=True, nullable=False) def __repr__(self): return '<Speaker %r>' % self.id
from sqlalchemy import Column, BIGINT, String, Integer from meetup_facebook_bot.models.base import Base class Speaker(Base): __tablename__ = 'speakers' id = Column(Integer, primary_key=True, autoincrement=True) page_scoped_id = Column(BIGINT, unique=True) name = Column(String(128), nullable=False) token = Column(String(128), unique=True, nullable=False) def __repr__(self): return '<Speaker %r>' % self.id Remove uniqueness constraint from page_scoped_idfrom sqlalchemy import Column, BIGINT, String, Integer from meetup_facebook_bot.models.base import Base class Speaker(Base): __tablename__ = 'speakers' id = Column(Integer, primary_key=True, autoincrement=True) page_scoped_id = Column(BIGINT) name = Column(String(128), nullable=False) token = Column(String(128), unique=True, nullable=False) def __repr__(self): return '<Speaker %r>' % self.id
<commit_before>from sqlalchemy import Column, BIGINT, String, Integer from meetup_facebook_bot.models.base import Base class Speaker(Base): __tablename__ = 'speakers' id = Column(Integer, primary_key=True, autoincrement=True) page_scoped_id = Column(BIGINT, unique=True) name = Column(String(128), nullable=False) token = Column(String(128), unique=True, nullable=False) def __repr__(self): return '<Speaker %r>' % self.id <commit_msg>Remove uniqueness constraint from page_scoped_id<commit_after>from sqlalchemy import Column, BIGINT, String, Integer from meetup_facebook_bot.models.base import Base class Speaker(Base): __tablename__ = 'speakers' id = Column(Integer, primary_key=True, autoincrement=True) page_scoped_id = Column(BIGINT) name = Column(String(128), nullable=False) token = Column(String(128), unique=True, nullable=False) def __repr__(self): return '<Speaker %r>' % self.id
a87010c4c7ba6c9f1f295c8da688946d149c7fbd
metal/mmtl/glue/make_glue_datasets.py
metal/mmtl/glue/make_glue_datasets.py
import argparse import os import dill from metal.mmtl.glue.glue_datasets import get_glue_dataset def make_datasets(task, bert_version): datasets = {} for split in ["train", "dev", "test"]: datasets[split] = get_glue_dataset(task, split, bert_version, run_spacy=True) return datasets def pickle_datasets(datasets, task, bert_version): bert_str = bert_version.replace("-", "_") filename = f"{task}_{bert_str}_spacy_datasets" filepath = f"{os.environ['GLUEDATA']}/datasets/{filename}.dill" with open(filepath, "wb") as f: dill.dump(datasets, f) return True if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--task", type=str) parser.add_argument("--bert_version", type=str, default="bert-base-uncased") args = parser.parse_args() assert args.task.isupper() datasets = make_datasets(args.task, args.bert_version) if pickle_datasets(datasets, args.task, args.bert_version): print(f"FINISHED: {args.task}") else: print(f"FAILED: {args.task}")
import argparse import os import dill from metal.mmtl.glue.glue_datasets import get_glue_dataset def make_datasets(task, bert_version): datasets = {} for split in ["train", "dev", "test"]: datasets[split] = get_glue_dataset( task, split, bert_version, max_len=200, run_spacy=True ) return datasets def pickle_datasets(datasets, task, bert_version): bert_str = bert_version.replace("-", "_") filename = f"{task}_{bert_str}_spacy_datasets" filepath = f"{os.environ['GLUEDATA']}/datasets/{filename}.dill" with open(filepath, "wb") as f: dill.dump(datasets, f) return True if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--task", type=str) parser.add_argument("--bert_version", type=str, default="bert-base-uncased") args = parser.parse_args() assert args.task.isupper() datasets = make_datasets(args.task, args.bert_version) if pickle_datasets(datasets, args.task, args.bert_version): print(f"FINISHED: {args.task}") else: print(f"FAILED: {args.task}")
Set max_len default to 200 when making glue datasets
Set max_len default to 200 when making glue datasets
Python
apache-2.0
HazyResearch/metal,HazyResearch/metal
import argparse import os import dill from metal.mmtl.glue.glue_datasets import get_glue_dataset def make_datasets(task, bert_version): datasets = {} for split in ["train", "dev", "test"]: datasets[split] = get_glue_dataset(task, split, bert_version, run_spacy=True) return datasets def pickle_datasets(datasets, task, bert_version): bert_str = bert_version.replace("-", "_") filename = f"{task}_{bert_str}_spacy_datasets" filepath = f"{os.environ['GLUEDATA']}/datasets/{filename}.dill" with open(filepath, "wb") as f: dill.dump(datasets, f) return True if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--task", type=str) parser.add_argument("--bert_version", type=str, default="bert-base-uncased") args = parser.parse_args() assert args.task.isupper() datasets = make_datasets(args.task, args.bert_version) if pickle_datasets(datasets, args.task, args.bert_version): print(f"FINISHED: {args.task}") else: print(f"FAILED: {args.task}") Set max_len default to 200 when making glue datasets
import argparse import os import dill from metal.mmtl.glue.glue_datasets import get_glue_dataset def make_datasets(task, bert_version): datasets = {} for split in ["train", "dev", "test"]: datasets[split] = get_glue_dataset( task, split, bert_version, max_len=200, run_spacy=True ) return datasets def pickle_datasets(datasets, task, bert_version): bert_str = bert_version.replace("-", "_") filename = f"{task}_{bert_str}_spacy_datasets" filepath = f"{os.environ['GLUEDATA']}/datasets/{filename}.dill" with open(filepath, "wb") as f: dill.dump(datasets, f) return True if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--task", type=str) parser.add_argument("--bert_version", type=str, default="bert-base-uncased") args = parser.parse_args() assert args.task.isupper() datasets = make_datasets(args.task, args.bert_version) if pickle_datasets(datasets, args.task, args.bert_version): print(f"FINISHED: {args.task}") else: print(f"FAILED: {args.task}")
<commit_before>import argparse import os import dill from metal.mmtl.glue.glue_datasets import get_glue_dataset def make_datasets(task, bert_version): datasets = {} for split in ["train", "dev", "test"]: datasets[split] = get_glue_dataset(task, split, bert_version, run_spacy=True) return datasets def pickle_datasets(datasets, task, bert_version): bert_str = bert_version.replace("-", "_") filename = f"{task}_{bert_str}_spacy_datasets" filepath = f"{os.environ['GLUEDATA']}/datasets/{filename}.dill" with open(filepath, "wb") as f: dill.dump(datasets, f) return True if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--task", type=str) parser.add_argument("--bert_version", type=str, default="bert-base-uncased") args = parser.parse_args() assert args.task.isupper() datasets = make_datasets(args.task, args.bert_version) if pickle_datasets(datasets, args.task, args.bert_version): print(f"FINISHED: {args.task}") else: print(f"FAILED: {args.task}") <commit_msg>Set max_len default to 200 when making glue datasets<commit_after>
import argparse import os import dill from metal.mmtl.glue.glue_datasets import get_glue_dataset def make_datasets(task, bert_version): datasets = {} for split in ["train", "dev", "test"]: datasets[split] = get_glue_dataset( task, split, bert_version, max_len=200, run_spacy=True ) return datasets def pickle_datasets(datasets, task, bert_version): bert_str = bert_version.replace("-", "_") filename = f"{task}_{bert_str}_spacy_datasets" filepath = f"{os.environ['GLUEDATA']}/datasets/{filename}.dill" with open(filepath, "wb") as f: dill.dump(datasets, f) return True if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--task", type=str) parser.add_argument("--bert_version", type=str, default="bert-base-uncased") args = parser.parse_args() assert args.task.isupper() datasets = make_datasets(args.task, args.bert_version) if pickle_datasets(datasets, args.task, args.bert_version): print(f"FINISHED: {args.task}") else: print(f"FAILED: {args.task}")
import argparse import os import dill from metal.mmtl.glue.glue_datasets import get_glue_dataset def make_datasets(task, bert_version): datasets = {} for split in ["train", "dev", "test"]: datasets[split] = get_glue_dataset(task, split, bert_version, run_spacy=True) return datasets def pickle_datasets(datasets, task, bert_version): bert_str = bert_version.replace("-", "_") filename = f"{task}_{bert_str}_spacy_datasets" filepath = f"{os.environ['GLUEDATA']}/datasets/{filename}.dill" with open(filepath, "wb") as f: dill.dump(datasets, f) return True if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--task", type=str) parser.add_argument("--bert_version", type=str, default="bert-base-uncased") args = parser.parse_args() assert args.task.isupper() datasets = make_datasets(args.task, args.bert_version) if pickle_datasets(datasets, args.task, args.bert_version): print(f"FINISHED: {args.task}") else: print(f"FAILED: {args.task}") Set max_len default to 200 when making glue datasetsimport argparse import os import dill from metal.mmtl.glue.glue_datasets import get_glue_dataset def make_datasets(task, bert_version): datasets = {} for split in ["train", "dev", "test"]: datasets[split] = get_glue_dataset( task, split, bert_version, max_len=200, run_spacy=True ) return datasets def pickle_datasets(datasets, task, bert_version): bert_str = bert_version.replace("-", "_") filename = f"{task}_{bert_str}_spacy_datasets" filepath = f"{os.environ['GLUEDATA']}/datasets/{filename}.dill" with open(filepath, "wb") as f: dill.dump(datasets, f) return True if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--task", type=str) parser.add_argument("--bert_version", type=str, default="bert-base-uncased") args = parser.parse_args() assert args.task.isupper() datasets = make_datasets(args.task, args.bert_version) if pickle_datasets(datasets, args.task, args.bert_version): print(f"FINISHED: {args.task}") else: print(f"FAILED: {args.task}")
<commit_before>import argparse import os import dill from metal.mmtl.glue.glue_datasets import get_glue_dataset def make_datasets(task, bert_version): datasets = {} for split in ["train", "dev", "test"]: datasets[split] = get_glue_dataset(task, split, bert_version, run_spacy=True) return datasets def pickle_datasets(datasets, task, bert_version): bert_str = bert_version.replace("-", "_") filename = f"{task}_{bert_str}_spacy_datasets" filepath = f"{os.environ['GLUEDATA']}/datasets/{filename}.dill" with open(filepath, "wb") as f: dill.dump(datasets, f) return True if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--task", type=str) parser.add_argument("--bert_version", type=str, default="bert-base-uncased") args = parser.parse_args() assert args.task.isupper() datasets = make_datasets(args.task, args.bert_version) if pickle_datasets(datasets, args.task, args.bert_version): print(f"FINISHED: {args.task}") else: print(f"FAILED: {args.task}") <commit_msg>Set max_len default to 200 when making glue datasets<commit_after>import argparse import os import dill from metal.mmtl.glue.glue_datasets import get_glue_dataset def make_datasets(task, bert_version): datasets = {} for split in ["train", "dev", "test"]: datasets[split] = get_glue_dataset( task, split, bert_version, max_len=200, run_spacy=True ) return datasets def pickle_datasets(datasets, task, bert_version): bert_str = bert_version.replace("-", "_") filename = f"{task}_{bert_str}_spacy_datasets" filepath = f"{os.environ['GLUEDATA']}/datasets/{filename}.dill" with open(filepath, "wb") as f: dill.dump(datasets, f) return True if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--task", type=str) parser.add_argument("--bert_version", type=str, default="bert-base-uncased") args = parser.parse_args() assert args.task.isupper() datasets = make_datasets(args.task, args.bert_version) if pickle_datasets(datasets, args.task, args.bert_version): print(f"FINISHED: {args.task}") else: print(f"FAILED: {args.task}")
e5bda294e291a2d96b4f703a89128de9ee53a495
src/geelweb/django/editos/models.py
src/geelweb/django/editos/models.py
from django.db import models from geelweb.django.editos import settings class Edito(models.Model): title = models.CharField(max_length=100) link = models.URLField() button_label = models.CharField(max_length=20, default="Go !", Required=False) image = models.FileField(upload_to="editos") text_content = models.CharField(max_length=400) display_from = models.DateField() display_until = models.DateField() active = models.BooleanField(default=True) text_theme = models.CharField(max_length=10, choices=settings.EDITOS_THEMES, default=settings.EDITOS_DEFAULT_THEME) def __unicode__(self): return self.title
from django.db import models from geelweb.django.editos import settings class Edito(models.Model): title = models.CharField(max_length=100) link = models.URLField() button_label = models.CharField(max_length=20, default="Go !", Required=False) image = models.FileField(upload_to="editos") text_content = models.CharField(max_length=400) display_from = models.DateField() display_until = models.DateField() active = models.BooleanField(default=True) text_theme = models.CharField(max_length=10, choices=settings.EDITOS_THEMES, default=settings.EDITOS_DEFAULT_THEME) date_created = models.DateTimeField(auto_now_add=True) date_updated = models.DateTimeField(auto_now=True) def __unicode__(self): return self.title
Add date_created and date_updated to editos.Edito model
Add date_created and date_updated to editos.Edito model
Python
mit
geelweb/django-editos,geelweb/django-editos
from django.db import models from geelweb.django.editos import settings class Edito(models.Model): title = models.CharField(max_length=100) link = models.URLField() button_label = models.CharField(max_length=20, default="Go !", Required=False) image = models.FileField(upload_to="editos") text_content = models.CharField(max_length=400) display_from = models.DateField() display_until = models.DateField() active = models.BooleanField(default=True) text_theme = models.CharField(max_length=10, choices=settings.EDITOS_THEMES, default=settings.EDITOS_DEFAULT_THEME) def __unicode__(self): return self.title Add date_created and date_updated to editos.Edito model
from django.db import models from geelweb.django.editos import settings class Edito(models.Model): title = models.CharField(max_length=100) link = models.URLField() button_label = models.CharField(max_length=20, default="Go !", Required=False) image = models.FileField(upload_to="editos") text_content = models.CharField(max_length=400) display_from = models.DateField() display_until = models.DateField() active = models.BooleanField(default=True) text_theme = models.CharField(max_length=10, choices=settings.EDITOS_THEMES, default=settings.EDITOS_DEFAULT_THEME) date_created = models.DateTimeField(auto_now_add=True) date_updated = models.DateTimeField(auto_now=True) def __unicode__(self): return self.title
<commit_before>from django.db import models from geelweb.django.editos import settings class Edito(models.Model): title = models.CharField(max_length=100) link = models.URLField() button_label = models.CharField(max_length=20, default="Go !", Required=False) image = models.FileField(upload_to="editos") text_content = models.CharField(max_length=400) display_from = models.DateField() display_until = models.DateField() active = models.BooleanField(default=True) text_theme = models.CharField(max_length=10, choices=settings.EDITOS_THEMES, default=settings.EDITOS_DEFAULT_THEME) def __unicode__(self): return self.title <commit_msg>Add date_created and date_updated to editos.Edito model<commit_after>
from django.db import models from geelweb.django.editos import settings class Edito(models.Model): title = models.CharField(max_length=100) link = models.URLField() button_label = models.CharField(max_length=20, default="Go !", Required=False) image = models.FileField(upload_to="editos") text_content = models.CharField(max_length=400) display_from = models.DateField() display_until = models.DateField() active = models.BooleanField(default=True) text_theme = models.CharField(max_length=10, choices=settings.EDITOS_THEMES, default=settings.EDITOS_DEFAULT_THEME) date_created = models.DateTimeField(auto_now_add=True) date_updated = models.DateTimeField(auto_now=True) def __unicode__(self): return self.title
from django.db import models from geelweb.django.editos import settings class Edito(models.Model): title = models.CharField(max_length=100) link = models.URLField() button_label = models.CharField(max_length=20, default="Go !", Required=False) image = models.FileField(upload_to="editos") text_content = models.CharField(max_length=400) display_from = models.DateField() display_until = models.DateField() active = models.BooleanField(default=True) text_theme = models.CharField(max_length=10, choices=settings.EDITOS_THEMES, default=settings.EDITOS_DEFAULT_THEME) def __unicode__(self): return self.title Add date_created and date_updated to editos.Edito modelfrom django.db import models from geelweb.django.editos import settings class Edito(models.Model): title = models.CharField(max_length=100) link = models.URLField() button_label = models.CharField(max_length=20, default="Go !", Required=False) image = models.FileField(upload_to="editos") text_content = models.CharField(max_length=400) display_from = models.DateField() display_until = models.DateField() active = models.BooleanField(default=True) text_theme = models.CharField(max_length=10, choices=settings.EDITOS_THEMES, default=settings.EDITOS_DEFAULT_THEME) date_created = models.DateTimeField(auto_now_add=True) date_updated = models.DateTimeField(auto_now=True) def __unicode__(self): return self.title
<commit_before>from django.db import models from geelweb.django.editos import settings class Edito(models.Model): title = models.CharField(max_length=100) link = models.URLField() button_label = models.CharField(max_length=20, default="Go !", Required=False) image = models.FileField(upload_to="editos") text_content = models.CharField(max_length=400) display_from = models.DateField() display_until = models.DateField() active = models.BooleanField(default=True) text_theme = models.CharField(max_length=10, choices=settings.EDITOS_THEMES, default=settings.EDITOS_DEFAULT_THEME) def __unicode__(self): return self.title <commit_msg>Add date_created and date_updated to editos.Edito model<commit_after>from django.db import models from geelweb.django.editos import settings class Edito(models.Model): title = models.CharField(max_length=100) link = models.URLField() button_label = models.CharField(max_length=20, default="Go !", Required=False) image = models.FileField(upload_to="editos") text_content = models.CharField(max_length=400) display_from = models.DateField() display_until = models.DateField() active = models.BooleanField(default=True) text_theme = models.CharField(max_length=10, choices=settings.EDITOS_THEMES, default=settings.EDITOS_DEFAULT_THEME) date_created = models.DateTimeField(auto_now_add=True) date_updated = models.DateTimeField(auto_now=True) def __unicode__(self): return self.title
a292c87137386bfdc7bc09b1f16269fe1c382858
bedrock/mozorg/templatetags/qrcode.py
bedrock/mozorg/templatetags/qrcode.py
from hashlib import sha1 from django.conf import settings import qrcode as qr from django_jinja import library from jinja2 import Markup from qrcode.image.svg import SvgPathImage QR_CACHE_PATH = settings.DATA_PATH.joinpath('qrcode_cache') QR_CACHE_PATH.mkdir(exist_ok=True) @library.global_function def qrcode(data, box_size=20): name = sha1(f'{data}-{box_size}'.encode('utf-8')).hexdigest() filename = f'{name}.svg' filepath = QR_CACHE_PATH.joinpath(filename) if not filepath.exists(): img = qr.make(data, image_factory=SvgPathImage, box_size=box_size) img.save(str(filepath)) with filepath.open() as fp: return Markup(fp.read())
from hashlib import sha1 from pathlib import Path import qrcode as qr from django_jinja import library from jinja2 import Markup from qrcode.image.svg import SvgPathImage QR_CACHE_PATH = Path('/tmp/qrcode_cache') QR_CACHE_PATH.mkdir(exist_ok=True) @library.global_function def qrcode(data, box_size=20): name = sha1(f'{data}-{box_size}'.encode('utf-8')).hexdigest() filename = f'{name}.svg' filepath = QR_CACHE_PATH.joinpath(filename) if not filepath.exists(): img = qr.make(data, image_factory=SvgPathImage, box_size=box_size) img.save(str(filepath)) with filepath.open() as fp: return Markup(fp.read())
Move QR Code cache to /tmp
Move QR Code cache to /tmp
Python
mpl-2.0
sylvestre/bedrock,craigcook/bedrock,alexgibson/bedrock,flodolo/bedrock,mozilla/bedrock,mozilla/bedrock,craigcook/bedrock,MichaelKohler/bedrock,MichaelKohler/bedrock,craigcook/bedrock,alexgibson/bedrock,pascalchevrel/bedrock,flodolo/bedrock,pascalchevrel/bedrock,MichaelKohler/bedrock,pascalchevrel/bedrock,alexgibson/bedrock,sylvestre/bedrock,mozilla/bedrock,flodolo/bedrock,flodolo/bedrock,sylvestre/bedrock,MichaelKohler/bedrock,pascalchevrel/bedrock,mozilla/bedrock,alexgibson/bedrock,craigcook/bedrock,sylvestre/bedrock
from hashlib import sha1 from django.conf import settings import qrcode as qr from django_jinja import library from jinja2 import Markup from qrcode.image.svg import SvgPathImage QR_CACHE_PATH = settings.DATA_PATH.joinpath('qrcode_cache') QR_CACHE_PATH.mkdir(exist_ok=True) @library.global_function def qrcode(data, box_size=20): name = sha1(f'{data}-{box_size}'.encode('utf-8')).hexdigest() filename = f'{name}.svg' filepath = QR_CACHE_PATH.joinpath(filename) if not filepath.exists(): img = qr.make(data, image_factory=SvgPathImage, box_size=box_size) img.save(str(filepath)) with filepath.open() as fp: return Markup(fp.read()) Move QR Code cache to /tmp
from hashlib import sha1 from pathlib import Path import qrcode as qr from django_jinja import library from jinja2 import Markup from qrcode.image.svg import SvgPathImage QR_CACHE_PATH = Path('/tmp/qrcode_cache') QR_CACHE_PATH.mkdir(exist_ok=True) @library.global_function def qrcode(data, box_size=20): name = sha1(f'{data}-{box_size}'.encode('utf-8')).hexdigest() filename = f'{name}.svg' filepath = QR_CACHE_PATH.joinpath(filename) if not filepath.exists(): img = qr.make(data, image_factory=SvgPathImage, box_size=box_size) img.save(str(filepath)) with filepath.open() as fp: return Markup(fp.read())
<commit_before>from hashlib import sha1 from django.conf import settings import qrcode as qr from django_jinja import library from jinja2 import Markup from qrcode.image.svg import SvgPathImage QR_CACHE_PATH = settings.DATA_PATH.joinpath('qrcode_cache') QR_CACHE_PATH.mkdir(exist_ok=True) @library.global_function def qrcode(data, box_size=20): name = sha1(f'{data}-{box_size}'.encode('utf-8')).hexdigest() filename = f'{name}.svg' filepath = QR_CACHE_PATH.joinpath(filename) if not filepath.exists(): img = qr.make(data, image_factory=SvgPathImage, box_size=box_size) img.save(str(filepath)) with filepath.open() as fp: return Markup(fp.read()) <commit_msg>Move QR Code cache to /tmp<commit_after>
from hashlib import sha1 from pathlib import Path import qrcode as qr from django_jinja import library from jinja2 import Markup from qrcode.image.svg import SvgPathImage QR_CACHE_PATH = Path('/tmp/qrcode_cache') QR_CACHE_PATH.mkdir(exist_ok=True) @library.global_function def qrcode(data, box_size=20): name = sha1(f'{data}-{box_size}'.encode('utf-8')).hexdigest() filename = f'{name}.svg' filepath = QR_CACHE_PATH.joinpath(filename) if not filepath.exists(): img = qr.make(data, image_factory=SvgPathImage, box_size=box_size) img.save(str(filepath)) with filepath.open() as fp: return Markup(fp.read())
from hashlib import sha1 from django.conf import settings import qrcode as qr from django_jinja import library from jinja2 import Markup from qrcode.image.svg import SvgPathImage QR_CACHE_PATH = settings.DATA_PATH.joinpath('qrcode_cache') QR_CACHE_PATH.mkdir(exist_ok=True) @library.global_function def qrcode(data, box_size=20): name = sha1(f'{data}-{box_size}'.encode('utf-8')).hexdigest() filename = f'{name}.svg' filepath = QR_CACHE_PATH.joinpath(filename) if not filepath.exists(): img = qr.make(data, image_factory=SvgPathImage, box_size=box_size) img.save(str(filepath)) with filepath.open() as fp: return Markup(fp.read()) Move QR Code cache to /tmpfrom hashlib import sha1 from pathlib import Path import qrcode as qr from django_jinja import library from jinja2 import Markup from qrcode.image.svg import SvgPathImage QR_CACHE_PATH = Path('/tmp/qrcode_cache') QR_CACHE_PATH.mkdir(exist_ok=True) @library.global_function def qrcode(data, box_size=20): name = sha1(f'{data}-{box_size}'.encode('utf-8')).hexdigest() filename = f'{name}.svg' filepath = QR_CACHE_PATH.joinpath(filename) if not filepath.exists(): img = qr.make(data, image_factory=SvgPathImage, box_size=box_size) img.save(str(filepath)) with filepath.open() as fp: return Markup(fp.read())
<commit_before>from hashlib import sha1 from django.conf import settings import qrcode as qr from django_jinja import library from jinja2 import Markup from qrcode.image.svg import SvgPathImage QR_CACHE_PATH = settings.DATA_PATH.joinpath('qrcode_cache') QR_CACHE_PATH.mkdir(exist_ok=True) @library.global_function def qrcode(data, box_size=20): name = sha1(f'{data}-{box_size}'.encode('utf-8')).hexdigest() filename = f'{name}.svg' filepath = QR_CACHE_PATH.joinpath(filename) if not filepath.exists(): img = qr.make(data, image_factory=SvgPathImage, box_size=box_size) img.save(str(filepath)) with filepath.open() as fp: return Markup(fp.read()) <commit_msg>Move QR Code cache to /tmp<commit_after>from hashlib import sha1 from pathlib import Path import qrcode as qr from django_jinja import library from jinja2 import Markup from qrcode.image.svg import SvgPathImage QR_CACHE_PATH = Path('/tmp/qrcode_cache') QR_CACHE_PATH.mkdir(exist_ok=True) @library.global_function def qrcode(data, box_size=20): name = sha1(f'{data}-{box_size}'.encode('utf-8')).hexdigest() filename = f'{name}.svg' filepath = QR_CACHE_PATH.joinpath(filename) if not filepath.exists(): img = qr.make(data, image_factory=SvgPathImage, box_size=box_size) img.save(str(filepath)) with filepath.open() as fp: return Markup(fp.read())
b89115165c55e51e76a533ba4eb9637897319e0a
oidc_provider/management/commands/creatersakey.py
oidc_provider/management/commands/creatersakey.py
from Crypto.PublicKey import RSA from django.conf import settings from django.core.management.base import BaseCommand class Command(BaseCommand): help = 'Randomly generate a new RSA key for the OpenID server' def handle(self, *args, **options): try: key = RSA.generate(1024) file_path = settings.BASE_DIR + '/OIDC_RSA_KEY.pem' with open(file_path, 'w') as f: f.write(key.exportKey('PEM')) self.stdout.write('RSA key successfully created at: ' + file_path) except Exception as e: self.stdout.write('Something goes wrong: ' + e.message)
from Crypto.PublicKey import RSA from django.conf import settings from django.core.management.base import BaseCommand class Command(BaseCommand): help = 'Randomly generate a new RSA key for the OpenID server' def handle(self, *args, **options): try: key = RSA.generate(1024) file_path = settings.BASE_DIR + '/OIDC_RSA_KEY.pem' with open(file_path, 'w') as f: f.write(key.exportKey('PEM')) self.stdout.write('RSA key successfully created at: ' + file_path) except Exception as e: self.stdout.write('Something goes wrong: {0}'.format(e))
Fix use of deprecated Exception.message in Python 3
Fix use of deprecated Exception.message in Python 3
Python
mit
torreco/django-oidc-provider,ByteInternet/django-oidc-provider,wojtek-fliposports/django-oidc-provider,wayward710/django-oidc-provider,bunnyinc/django-oidc-provider,juanifioren/django-oidc-provider,wojtek-fliposports/django-oidc-provider,ByteInternet/django-oidc-provider,torreco/django-oidc-provider,bunnyinc/django-oidc-provider,juanifioren/django-oidc-provider,wayward710/django-oidc-provider
from Crypto.PublicKey import RSA from django.conf import settings from django.core.management.base import BaseCommand class Command(BaseCommand): help = 'Randomly generate a new RSA key for the OpenID server' def handle(self, *args, **options): try: key = RSA.generate(1024) file_path = settings.BASE_DIR + '/OIDC_RSA_KEY.pem' with open(file_path, 'w') as f: f.write(key.exportKey('PEM')) self.stdout.write('RSA key successfully created at: ' + file_path) except Exception as e: self.stdout.write('Something goes wrong: ' + e.message) Fix use of deprecated Exception.message in Python 3
from Crypto.PublicKey import RSA from django.conf import settings from django.core.management.base import BaseCommand class Command(BaseCommand): help = 'Randomly generate a new RSA key for the OpenID server' def handle(self, *args, **options): try: key = RSA.generate(1024) file_path = settings.BASE_DIR + '/OIDC_RSA_KEY.pem' with open(file_path, 'w') as f: f.write(key.exportKey('PEM')) self.stdout.write('RSA key successfully created at: ' + file_path) except Exception as e: self.stdout.write('Something goes wrong: {0}'.format(e))
<commit_before>from Crypto.PublicKey import RSA from django.conf import settings from django.core.management.base import BaseCommand class Command(BaseCommand): help = 'Randomly generate a new RSA key for the OpenID server' def handle(self, *args, **options): try: key = RSA.generate(1024) file_path = settings.BASE_DIR + '/OIDC_RSA_KEY.pem' with open(file_path, 'w') as f: f.write(key.exportKey('PEM')) self.stdout.write('RSA key successfully created at: ' + file_path) except Exception as e: self.stdout.write('Something goes wrong: ' + e.message) <commit_msg>Fix use of deprecated Exception.message in Python 3<commit_after>
from Crypto.PublicKey import RSA from django.conf import settings from django.core.management.base import BaseCommand class Command(BaseCommand): help = 'Randomly generate a new RSA key for the OpenID server' def handle(self, *args, **options): try: key = RSA.generate(1024) file_path = settings.BASE_DIR + '/OIDC_RSA_KEY.pem' with open(file_path, 'w') as f: f.write(key.exportKey('PEM')) self.stdout.write('RSA key successfully created at: ' + file_path) except Exception as e: self.stdout.write('Something goes wrong: {0}'.format(e))
from Crypto.PublicKey import RSA from django.conf import settings from django.core.management.base import BaseCommand class Command(BaseCommand): help = 'Randomly generate a new RSA key for the OpenID server' def handle(self, *args, **options): try: key = RSA.generate(1024) file_path = settings.BASE_DIR + '/OIDC_RSA_KEY.pem' with open(file_path, 'w') as f: f.write(key.exportKey('PEM')) self.stdout.write('RSA key successfully created at: ' + file_path) except Exception as e: self.stdout.write('Something goes wrong: ' + e.message) Fix use of deprecated Exception.message in Python 3from Crypto.PublicKey import RSA from django.conf import settings from django.core.management.base import BaseCommand class Command(BaseCommand): help = 'Randomly generate a new RSA key for the OpenID server' def handle(self, *args, **options): try: key = RSA.generate(1024) file_path = settings.BASE_DIR + '/OIDC_RSA_KEY.pem' with open(file_path, 'w') as f: f.write(key.exportKey('PEM')) self.stdout.write('RSA key successfully created at: ' + file_path) except Exception as e: self.stdout.write('Something goes wrong: {0}'.format(e))
<commit_before>from Crypto.PublicKey import RSA from django.conf import settings from django.core.management.base import BaseCommand class Command(BaseCommand): help = 'Randomly generate a new RSA key for the OpenID server' def handle(self, *args, **options): try: key = RSA.generate(1024) file_path = settings.BASE_DIR + '/OIDC_RSA_KEY.pem' with open(file_path, 'w') as f: f.write(key.exportKey('PEM')) self.stdout.write('RSA key successfully created at: ' + file_path) except Exception as e: self.stdout.write('Something goes wrong: ' + e.message) <commit_msg>Fix use of deprecated Exception.message in Python 3<commit_after>from Crypto.PublicKey import RSA from django.conf import settings from django.core.management.base import BaseCommand class Command(BaseCommand): help = 'Randomly generate a new RSA key for the OpenID server' def handle(self, *args, **options): try: key = RSA.generate(1024) file_path = settings.BASE_DIR + '/OIDC_RSA_KEY.pem' with open(file_path, 'w') as f: f.write(key.exportKey('PEM')) self.stdout.write('RSA key successfully created at: ' + file_path) except Exception as e: self.stdout.write('Something goes wrong: {0}'.format(e))
4de72b4bd349ebf16c0046c4ed9034914c03ffb5
cea/interfaces/dashboard/api/utils.py
cea/interfaces/dashboard/api/utils.py
from flask import current_app import cea.config import cea.inputlocator def deconstruct_parameters(p): params = {'name': p.name, 'type': p.typename, 'value': p.get(), 'help': p.help} if isinstance(p, cea.config.ChoiceParameter): params['choices'] = p._choices if p.typename == 'WeatherPathParameter': config = current_app.cea_config locator = cea.inputlocator.InputLocator(config.scenario) params['choices'] = {wn: locator.get_weather( wn) for wn in locator.get_weather_names()} elif p.typename == 'DatabasePathParameter': params['choices'] = p._choices return params
from flask import current_app import cea.config import cea.inputlocator def deconstruct_parameters(p: cea.config.Parameter): params = {'name': p.name, 'type': p.typename, 'help': p.help} try: params["value"] = p.get() except cea.ConfigError as e: print(e) params["value"] = "" if isinstance(p, cea.config.ChoiceParameter): params['choices'] = p._choices if p.typename == 'WeatherPathParameter': config = current_app.cea_config locator = cea.inputlocator.InputLocator(config.scenario) params['choices'] = {wn: locator.get_weather( wn) for wn in locator.get_weather_names()} elif p.typename == 'DatabasePathParameter': params['choices'] = p._choices return params
Fix `weather_helper` bug when creating new scenario
Fix `weather_helper` bug when creating new scenario
Python
mit
architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst
from flask import current_app import cea.config import cea.inputlocator def deconstruct_parameters(p): params = {'name': p.name, 'type': p.typename, 'value': p.get(), 'help': p.help} if isinstance(p, cea.config.ChoiceParameter): params['choices'] = p._choices if p.typename == 'WeatherPathParameter': config = current_app.cea_config locator = cea.inputlocator.InputLocator(config.scenario) params['choices'] = {wn: locator.get_weather( wn) for wn in locator.get_weather_names()} elif p.typename == 'DatabasePathParameter': params['choices'] = p._choices return paramsFix `weather_helper` bug when creating new scenario
from flask import current_app import cea.config import cea.inputlocator def deconstruct_parameters(p: cea.config.Parameter): params = {'name': p.name, 'type': p.typename, 'help': p.help} try: params["value"] = p.get() except cea.ConfigError as e: print(e) params["value"] = "" if isinstance(p, cea.config.ChoiceParameter): params['choices'] = p._choices if p.typename == 'WeatherPathParameter': config = current_app.cea_config locator = cea.inputlocator.InputLocator(config.scenario) params['choices'] = {wn: locator.get_weather( wn) for wn in locator.get_weather_names()} elif p.typename == 'DatabasePathParameter': params['choices'] = p._choices return params
<commit_before> from flask import current_app import cea.config import cea.inputlocator def deconstruct_parameters(p): params = {'name': p.name, 'type': p.typename, 'value': p.get(), 'help': p.help} if isinstance(p, cea.config.ChoiceParameter): params['choices'] = p._choices if p.typename == 'WeatherPathParameter': config = current_app.cea_config locator = cea.inputlocator.InputLocator(config.scenario) params['choices'] = {wn: locator.get_weather( wn) for wn in locator.get_weather_names()} elif p.typename == 'DatabasePathParameter': params['choices'] = p._choices return params<commit_msg>Fix `weather_helper` bug when creating new scenario<commit_after>
from flask import current_app import cea.config import cea.inputlocator def deconstruct_parameters(p: cea.config.Parameter): params = {'name': p.name, 'type': p.typename, 'help': p.help} try: params["value"] = p.get() except cea.ConfigError as e: print(e) params["value"] = "" if isinstance(p, cea.config.ChoiceParameter): params['choices'] = p._choices if p.typename == 'WeatherPathParameter': config = current_app.cea_config locator = cea.inputlocator.InputLocator(config.scenario) params['choices'] = {wn: locator.get_weather( wn) for wn in locator.get_weather_names()} elif p.typename == 'DatabasePathParameter': params['choices'] = p._choices return params
from flask import current_app import cea.config import cea.inputlocator def deconstruct_parameters(p): params = {'name': p.name, 'type': p.typename, 'value': p.get(), 'help': p.help} if isinstance(p, cea.config.ChoiceParameter): params['choices'] = p._choices if p.typename == 'WeatherPathParameter': config = current_app.cea_config locator = cea.inputlocator.InputLocator(config.scenario) params['choices'] = {wn: locator.get_weather( wn) for wn in locator.get_weather_names()} elif p.typename == 'DatabasePathParameter': params['choices'] = p._choices return paramsFix `weather_helper` bug when creating new scenario from flask import current_app import cea.config import cea.inputlocator def deconstruct_parameters(p: cea.config.Parameter): params = {'name': p.name, 'type': p.typename, 'help': p.help} try: params["value"] = p.get() except cea.ConfigError as e: print(e) params["value"] = "" if isinstance(p, cea.config.ChoiceParameter): params['choices'] = p._choices if p.typename == 'WeatherPathParameter': config = current_app.cea_config locator = cea.inputlocator.InputLocator(config.scenario) params['choices'] = {wn: locator.get_weather( wn) for wn in locator.get_weather_names()} elif p.typename == 'DatabasePathParameter': params['choices'] = p._choices return params
<commit_before> from flask import current_app import cea.config import cea.inputlocator def deconstruct_parameters(p): params = {'name': p.name, 'type': p.typename, 'value': p.get(), 'help': p.help} if isinstance(p, cea.config.ChoiceParameter): params['choices'] = p._choices if p.typename == 'WeatherPathParameter': config = current_app.cea_config locator = cea.inputlocator.InputLocator(config.scenario) params['choices'] = {wn: locator.get_weather( wn) for wn in locator.get_weather_names()} elif p.typename == 'DatabasePathParameter': params['choices'] = p._choices return params<commit_msg>Fix `weather_helper` bug when creating new scenario<commit_after> from flask import current_app import cea.config import cea.inputlocator def deconstruct_parameters(p: cea.config.Parameter): params = {'name': p.name, 'type': p.typename, 'help': p.help} try: params["value"] = p.get() except cea.ConfigError as e: print(e) params["value"] = "" if isinstance(p, cea.config.ChoiceParameter): params['choices'] = p._choices if p.typename == 'WeatherPathParameter': config = current_app.cea_config locator = cea.inputlocator.InputLocator(config.scenario) params['choices'] = {wn: locator.get_weather( wn) for wn in locator.get_weather_names()} elif p.typename == 'DatabasePathParameter': params['choices'] = p._choices return params
a390f3b711df89b2552bf059c89b1fd4f7ab1fa7
towel/templatetags/modelview_list.py
towel/templatetags/modelview_list.py
from django import template from django.db import models from django.utils.safestring import mark_safe register = template.Library() @register.filter def model_row(instance, fields): for name in fields.split(','): f = instance._meta.get_field(name) if isinstance(f, models.ForeignKey): fk = getattr(instance, f.name) if hasattr(fk, 'get_absolute_url'): value = mark_safe(u'<a href="%s">%s</a>' % ( fk.get_absolute_url(), fk)) else: value = unicode(fk) elif f.choices: value = getattr(instance, 'get_%s_display' % f.name)() else: value = unicode(getattr(instance, f.name)) yield (f.verbose_name, value)
from django import template from django.db import models from django.utils.safestring import mark_safe register = template.Library() @register.filter def model_row(instance, fields): for name in fields.split(','): try: f = instance._meta.get_field(name) except models.FieldDoesNotExist: attr = getattr(instance, name) if hasattr(attr, '__call__'): yield (name, attr()) yield (name, attr) continue if isinstance(f, models.ForeignKey): fk = getattr(instance, f.name) if hasattr(fk, 'get_absolute_url'): value = mark_safe(u'<a href="%s">%s</a>' % ( fk.get_absolute_url(), fk)) else: value = unicode(fk) elif f.choices: value = getattr(instance, 'get_%s_display' % f.name)() else: value = unicode(getattr(instance, f.name)) yield (f.verbose_name, value)
Handle methods and non-field attributes
model_row: Handle methods and non-field attributes
Python
bsd-3-clause
matthiask/towel,matthiask/towel,matthiask/towel,matthiask/towel
from django import template from django.db import models from django.utils.safestring import mark_safe register = template.Library() @register.filter def model_row(instance, fields): for name in fields.split(','): f = instance._meta.get_field(name) if isinstance(f, models.ForeignKey): fk = getattr(instance, f.name) if hasattr(fk, 'get_absolute_url'): value = mark_safe(u'<a href="%s">%s</a>' % ( fk.get_absolute_url(), fk)) else: value = unicode(fk) elif f.choices: value = getattr(instance, 'get_%s_display' % f.name)() else: value = unicode(getattr(instance, f.name)) yield (f.verbose_name, value) model_row: Handle methods and non-field attributes
from django import template from django.db import models from django.utils.safestring import mark_safe register = template.Library() @register.filter def model_row(instance, fields): for name in fields.split(','): try: f = instance._meta.get_field(name) except models.FieldDoesNotExist: attr = getattr(instance, name) if hasattr(attr, '__call__'): yield (name, attr()) yield (name, attr) continue if isinstance(f, models.ForeignKey): fk = getattr(instance, f.name) if hasattr(fk, 'get_absolute_url'): value = mark_safe(u'<a href="%s">%s</a>' % ( fk.get_absolute_url(), fk)) else: value = unicode(fk) elif f.choices: value = getattr(instance, 'get_%s_display' % f.name)() else: value = unicode(getattr(instance, f.name)) yield (f.verbose_name, value)
<commit_before>from django import template from django.db import models from django.utils.safestring import mark_safe register = template.Library() @register.filter def model_row(instance, fields): for name in fields.split(','): f = instance._meta.get_field(name) if isinstance(f, models.ForeignKey): fk = getattr(instance, f.name) if hasattr(fk, 'get_absolute_url'): value = mark_safe(u'<a href="%s">%s</a>' % ( fk.get_absolute_url(), fk)) else: value = unicode(fk) elif f.choices: value = getattr(instance, 'get_%s_display' % f.name)() else: value = unicode(getattr(instance, f.name)) yield (f.verbose_name, value) <commit_msg>model_row: Handle methods and non-field attributes<commit_after>
from django import template from django.db import models from django.utils.safestring import mark_safe register = template.Library() @register.filter def model_row(instance, fields): for name in fields.split(','): try: f = instance._meta.get_field(name) except models.FieldDoesNotExist: attr = getattr(instance, name) if hasattr(attr, '__call__'): yield (name, attr()) yield (name, attr) continue if isinstance(f, models.ForeignKey): fk = getattr(instance, f.name) if hasattr(fk, 'get_absolute_url'): value = mark_safe(u'<a href="%s">%s</a>' % ( fk.get_absolute_url(), fk)) else: value = unicode(fk) elif f.choices: value = getattr(instance, 'get_%s_display' % f.name)() else: value = unicode(getattr(instance, f.name)) yield (f.verbose_name, value)
from django import template from django.db import models from django.utils.safestring import mark_safe register = template.Library() @register.filter def model_row(instance, fields): for name in fields.split(','): f = instance._meta.get_field(name) if isinstance(f, models.ForeignKey): fk = getattr(instance, f.name) if hasattr(fk, 'get_absolute_url'): value = mark_safe(u'<a href="%s">%s</a>' % ( fk.get_absolute_url(), fk)) else: value = unicode(fk) elif f.choices: value = getattr(instance, 'get_%s_display' % f.name)() else: value = unicode(getattr(instance, f.name)) yield (f.verbose_name, value) model_row: Handle methods and non-field attributesfrom django import template from django.db import models from django.utils.safestring import mark_safe register = template.Library() @register.filter def model_row(instance, fields): for name in fields.split(','): try: f = instance._meta.get_field(name) except models.FieldDoesNotExist: attr = getattr(instance, name) if hasattr(attr, '__call__'): yield (name, attr()) yield (name, attr) continue if isinstance(f, models.ForeignKey): fk = getattr(instance, f.name) if hasattr(fk, 'get_absolute_url'): value = mark_safe(u'<a href="%s">%s</a>' % ( fk.get_absolute_url(), fk)) else: value = unicode(fk) elif f.choices: value = getattr(instance, 'get_%s_display' % f.name)() else: value = unicode(getattr(instance, f.name)) yield (f.verbose_name, value)
<commit_before>from django import template from django.db import models from django.utils.safestring import mark_safe register = template.Library() @register.filter def model_row(instance, fields): for name in fields.split(','): f = instance._meta.get_field(name) if isinstance(f, models.ForeignKey): fk = getattr(instance, f.name) if hasattr(fk, 'get_absolute_url'): value = mark_safe(u'<a href="%s">%s</a>' % ( fk.get_absolute_url(), fk)) else: value = unicode(fk) elif f.choices: value = getattr(instance, 'get_%s_display' % f.name)() else: value = unicode(getattr(instance, f.name)) yield (f.verbose_name, value) <commit_msg>model_row: Handle methods and non-field attributes<commit_after>from django import template from django.db import models from django.utils.safestring import mark_safe register = template.Library() @register.filter def model_row(instance, fields): for name in fields.split(','): try: f = instance._meta.get_field(name) except models.FieldDoesNotExist: attr = getattr(instance, name) if hasattr(attr, '__call__'): yield (name, attr()) yield (name, attr) continue if isinstance(f, models.ForeignKey): fk = getattr(instance, f.name) if hasattr(fk, 'get_absolute_url'): value = mark_safe(u'<a href="%s">%s</a>' % ( fk.get_absolute_url(), fk)) else: value = unicode(fk) elif f.choices: value = getattr(instance, 'get_%s_display' % f.name)() else: value = unicode(getattr(instance, f.name)) yield (f.verbose_name, value)
266105d371193ccf0f02a3975ebdca04980b675b
eche/special_forms.py
eche/special_forms.py
from funcy.seqs import partition from eche.eche_types import Symbol, List def def_exclamation_mark(ast): from eche.eval import eval_ast _, key, val = ast l = List() l.append(key) l.append(val) l.env = ast.env _, val = eval_ast(l, ast.env) ast.env[key] = val # if not isinstance(ast, Node): # ast = Node(data=ast) return ast def let_star(ast, env=None): from eche.env import get_default_env from eche.eval import eval_ast inner_env = get_default_env() inner_env.outer = env _, new_bindings, commands_in_new_env = ast new_bindings = partition(2, list(new_bindings.data)) for binding in new_bindings: key, val = binding inner_env[key] = val commands_in_new_env = eval_ast(commands_in_new_env, inner_env) new_ast = eval_ast(commands_in_new_env, inner_env) return new_ast special_forms = { Symbol('def!'): def_exclamation_mark, Symbol('let*'): let_star }
from funcy.seqs import partition from eche.eche_types import Symbol, List def def_exclamation_mark(ast, env=None): from eche.eval import eval_ast _, key, val = ast l = List() l.append(key) l.append(val) l.env = ast.env _, val = eval_ast(l, ast.env) ast.env[key] = val # if not isinstance(ast, Node): # ast = Node(data=ast) return ast def let_star(ast, env=None): from eche.env import get_default_env from eche.eval import eval_ast inner_env = get_default_env() inner_env.outer = env _, new_bindings, commands_in_new_env = ast new_bindings = partition(2, list(new_bindings.data)) for binding in new_bindings: key, val = binding inner_env[key] = val commands_in_new_env = eval_ast(commands_in_new_env, inner_env) new_ast = eval_ast(commands_in_new_env, inner_env) return new_ast special_forms = { Symbol('def!'): def_exclamation_mark, Symbol('let*'): let_star }
Add missing env keyword arg.
Add missing env keyword arg.
Python
mit
skk/eche
from funcy.seqs import partition from eche.eche_types import Symbol, List def def_exclamation_mark(ast): from eche.eval import eval_ast _, key, val = ast l = List() l.append(key) l.append(val) l.env = ast.env _, val = eval_ast(l, ast.env) ast.env[key] = val # if not isinstance(ast, Node): # ast = Node(data=ast) return ast def let_star(ast, env=None): from eche.env import get_default_env from eche.eval import eval_ast inner_env = get_default_env() inner_env.outer = env _, new_bindings, commands_in_new_env = ast new_bindings = partition(2, list(new_bindings.data)) for binding in new_bindings: key, val = binding inner_env[key] = val commands_in_new_env = eval_ast(commands_in_new_env, inner_env) new_ast = eval_ast(commands_in_new_env, inner_env) return new_ast special_forms = { Symbol('def!'): def_exclamation_mark, Symbol('let*'): let_star } Add missing env keyword arg.
from funcy.seqs import partition from eche.eche_types import Symbol, List def def_exclamation_mark(ast, env=None): from eche.eval import eval_ast _, key, val = ast l = List() l.append(key) l.append(val) l.env = ast.env _, val = eval_ast(l, ast.env) ast.env[key] = val # if not isinstance(ast, Node): # ast = Node(data=ast) return ast def let_star(ast, env=None): from eche.env import get_default_env from eche.eval import eval_ast inner_env = get_default_env() inner_env.outer = env _, new_bindings, commands_in_new_env = ast new_bindings = partition(2, list(new_bindings.data)) for binding in new_bindings: key, val = binding inner_env[key] = val commands_in_new_env = eval_ast(commands_in_new_env, inner_env) new_ast = eval_ast(commands_in_new_env, inner_env) return new_ast special_forms = { Symbol('def!'): def_exclamation_mark, Symbol('let*'): let_star }
<commit_before>from funcy.seqs import partition from eche.eche_types import Symbol, List def def_exclamation_mark(ast): from eche.eval import eval_ast _, key, val = ast l = List() l.append(key) l.append(val) l.env = ast.env _, val = eval_ast(l, ast.env) ast.env[key] = val # if not isinstance(ast, Node): # ast = Node(data=ast) return ast def let_star(ast, env=None): from eche.env import get_default_env from eche.eval import eval_ast inner_env = get_default_env() inner_env.outer = env _, new_bindings, commands_in_new_env = ast new_bindings = partition(2, list(new_bindings.data)) for binding in new_bindings: key, val = binding inner_env[key] = val commands_in_new_env = eval_ast(commands_in_new_env, inner_env) new_ast = eval_ast(commands_in_new_env, inner_env) return new_ast special_forms = { Symbol('def!'): def_exclamation_mark, Symbol('let*'): let_star } <commit_msg>Add missing env keyword arg.<commit_after>
from funcy.seqs import partition from eche.eche_types import Symbol, List def def_exclamation_mark(ast, env=None): from eche.eval import eval_ast _, key, val = ast l = List() l.append(key) l.append(val) l.env = ast.env _, val = eval_ast(l, ast.env) ast.env[key] = val # if not isinstance(ast, Node): # ast = Node(data=ast) return ast def let_star(ast, env=None): from eche.env import get_default_env from eche.eval import eval_ast inner_env = get_default_env() inner_env.outer = env _, new_bindings, commands_in_new_env = ast new_bindings = partition(2, list(new_bindings.data)) for binding in new_bindings: key, val = binding inner_env[key] = val commands_in_new_env = eval_ast(commands_in_new_env, inner_env) new_ast = eval_ast(commands_in_new_env, inner_env) return new_ast special_forms = { Symbol('def!'): def_exclamation_mark, Symbol('let*'): let_star }
from funcy.seqs import partition from eche.eche_types import Symbol, List def def_exclamation_mark(ast): from eche.eval import eval_ast _, key, val = ast l = List() l.append(key) l.append(val) l.env = ast.env _, val = eval_ast(l, ast.env) ast.env[key] = val # if not isinstance(ast, Node): # ast = Node(data=ast) return ast def let_star(ast, env=None): from eche.env import get_default_env from eche.eval import eval_ast inner_env = get_default_env() inner_env.outer = env _, new_bindings, commands_in_new_env = ast new_bindings = partition(2, list(new_bindings.data)) for binding in new_bindings: key, val = binding inner_env[key] = val commands_in_new_env = eval_ast(commands_in_new_env, inner_env) new_ast = eval_ast(commands_in_new_env, inner_env) return new_ast special_forms = { Symbol('def!'): def_exclamation_mark, Symbol('let*'): let_star } Add missing env keyword arg.from funcy.seqs import partition from eche.eche_types import Symbol, List def def_exclamation_mark(ast, env=None): from eche.eval import eval_ast _, key, val = ast l = List() l.append(key) l.append(val) l.env = ast.env _, val = eval_ast(l, ast.env) ast.env[key] = val # if not isinstance(ast, Node): # ast = Node(data=ast) return ast def let_star(ast, env=None): from eche.env import get_default_env from eche.eval import eval_ast inner_env = get_default_env() inner_env.outer = env _, new_bindings, commands_in_new_env = ast new_bindings = partition(2, list(new_bindings.data)) for binding in new_bindings: key, val = binding inner_env[key] = val commands_in_new_env = eval_ast(commands_in_new_env, inner_env) new_ast = eval_ast(commands_in_new_env, inner_env) return new_ast special_forms = { Symbol('def!'): def_exclamation_mark, Symbol('let*'): let_star }
<commit_before>from funcy.seqs import partition from eche.eche_types import Symbol, List def def_exclamation_mark(ast): from eche.eval import eval_ast _, key, val = ast l = List() l.append(key) l.append(val) l.env = ast.env _, val = eval_ast(l, ast.env) ast.env[key] = val # if not isinstance(ast, Node): # ast = Node(data=ast) return ast def let_star(ast, env=None): from eche.env import get_default_env from eche.eval import eval_ast inner_env = get_default_env() inner_env.outer = env _, new_bindings, commands_in_new_env = ast new_bindings = partition(2, list(new_bindings.data)) for binding in new_bindings: key, val = binding inner_env[key] = val commands_in_new_env = eval_ast(commands_in_new_env, inner_env) new_ast = eval_ast(commands_in_new_env, inner_env) return new_ast special_forms = { Symbol('def!'): def_exclamation_mark, Symbol('let*'): let_star } <commit_msg>Add missing env keyword arg.<commit_after>from funcy.seqs import partition from eche.eche_types import Symbol, List def def_exclamation_mark(ast, env=None): from eche.eval import eval_ast _, key, val = ast l = List() l.append(key) l.append(val) l.env = ast.env _, val = eval_ast(l, ast.env) ast.env[key] = val # if not isinstance(ast, Node): # ast = Node(data=ast) return ast def let_star(ast, env=None): from eche.env import get_default_env from eche.eval import eval_ast inner_env = get_default_env() inner_env.outer = env _, new_bindings, commands_in_new_env = ast new_bindings = partition(2, list(new_bindings.data)) for binding in new_bindings: key, val = binding inner_env[key] = val commands_in_new_env = eval_ast(commands_in_new_env, inner_env) new_ast = eval_ast(commands_in_new_env, inner_env) return new_ast special_forms = { Symbol('def!'): def_exclamation_mark, Symbol('let*'): let_star }
e2e1ea416d38565a419fff75f6ad4b776b74bc8e
blog/models.py
blog/models.py
from django.db import models from organizer.models import Startup, Tag # Model Field Reference # https://docs.djangoproject.com/en/1.8/ref/models/fields/ class Post(models.Model): title = models.CharField(max_length=63) slug = models.SlugField( max_length=63, help_text='A label for URL config', unique_for_month='pub_date') text = models.TextField() pub_date = models.DateField( 'date published', auto_now_add=True) tags = models.ManyToManyField( Tag, related_name='blog_posts') startups = models.ManyToManyField( Startup, related_name='blog_posts') def __str__(self): return "{} on {}".format( self.title, self.pub_date.strftime('%Y-%m-%d'))
from django.db import models from organizer.models import Startup, Tag # Model Field Reference # https://docs.djangoproject.com/en/1.8/ref/models/fields/ class Post(models.Model): title = models.CharField(max_length=63) slug = models.SlugField( max_length=63, help_text='A label for URL config', unique_for_month='pub_date') text = models.TextField() pub_date = models.DateField( 'date published', auto_now_add=True) tags = models.ManyToManyField( Tag, related_name='blog_posts') startups = models.ManyToManyField( Startup, related_name='blog_posts') class Meta: verbose_name = 'blog post' ordering = ['-pub_date', 'title'] get_latest_by = 'pub_date' def __str__(self): return "{} on {}".format( self.title, self.pub_date.strftime('%Y-%m-%d'))
Declare Meta class in Post model.
Ch03: Declare Meta class in Post model. [skip ci]
Python
bsd-2-clause
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
from django.db import models from organizer.models import Startup, Tag # Model Field Reference # https://docs.djangoproject.com/en/1.8/ref/models/fields/ class Post(models.Model): title = models.CharField(max_length=63) slug = models.SlugField( max_length=63, help_text='A label for URL config', unique_for_month='pub_date') text = models.TextField() pub_date = models.DateField( 'date published', auto_now_add=True) tags = models.ManyToManyField( Tag, related_name='blog_posts') startups = models.ManyToManyField( Startup, related_name='blog_posts') def __str__(self): return "{} on {}".format( self.title, self.pub_date.strftime('%Y-%m-%d')) Ch03: Declare Meta class in Post model. [skip ci]
from django.db import models from organizer.models import Startup, Tag # Model Field Reference # https://docs.djangoproject.com/en/1.8/ref/models/fields/ class Post(models.Model): title = models.CharField(max_length=63) slug = models.SlugField( max_length=63, help_text='A label for URL config', unique_for_month='pub_date') text = models.TextField() pub_date = models.DateField( 'date published', auto_now_add=True) tags = models.ManyToManyField( Tag, related_name='blog_posts') startups = models.ManyToManyField( Startup, related_name='blog_posts') class Meta: verbose_name = 'blog post' ordering = ['-pub_date', 'title'] get_latest_by = 'pub_date' def __str__(self): return "{} on {}".format( self.title, self.pub_date.strftime('%Y-%m-%d'))
<commit_before>from django.db import models from organizer.models import Startup, Tag # Model Field Reference # https://docs.djangoproject.com/en/1.8/ref/models/fields/ class Post(models.Model): title = models.CharField(max_length=63) slug = models.SlugField( max_length=63, help_text='A label for URL config', unique_for_month='pub_date') text = models.TextField() pub_date = models.DateField( 'date published', auto_now_add=True) tags = models.ManyToManyField( Tag, related_name='blog_posts') startups = models.ManyToManyField( Startup, related_name='blog_posts') def __str__(self): return "{} on {}".format( self.title, self.pub_date.strftime('%Y-%m-%d')) <commit_msg>Ch03: Declare Meta class in Post model. [skip ci]<commit_after>
from django.db import models from organizer.models import Startup, Tag # Model Field Reference # https://docs.djangoproject.com/en/1.8/ref/models/fields/ class Post(models.Model): title = models.CharField(max_length=63) slug = models.SlugField( max_length=63, help_text='A label for URL config', unique_for_month='pub_date') text = models.TextField() pub_date = models.DateField( 'date published', auto_now_add=True) tags = models.ManyToManyField( Tag, related_name='blog_posts') startups = models.ManyToManyField( Startup, related_name='blog_posts') class Meta: verbose_name = 'blog post' ordering = ['-pub_date', 'title'] get_latest_by = 'pub_date' def __str__(self): return "{} on {}".format( self.title, self.pub_date.strftime('%Y-%m-%d'))
from django.db import models from organizer.models import Startup, Tag # Model Field Reference # https://docs.djangoproject.com/en/1.8/ref/models/fields/ class Post(models.Model): title = models.CharField(max_length=63) slug = models.SlugField( max_length=63, help_text='A label for URL config', unique_for_month='pub_date') text = models.TextField() pub_date = models.DateField( 'date published', auto_now_add=True) tags = models.ManyToManyField( Tag, related_name='blog_posts') startups = models.ManyToManyField( Startup, related_name='blog_posts') def __str__(self): return "{} on {}".format( self.title, self.pub_date.strftime('%Y-%m-%d')) Ch03: Declare Meta class in Post model. [skip ci]from django.db import models from organizer.models import Startup, Tag # Model Field Reference # https://docs.djangoproject.com/en/1.8/ref/models/fields/ class Post(models.Model): title = models.CharField(max_length=63) slug = models.SlugField( max_length=63, help_text='A label for URL config', unique_for_month='pub_date') text = models.TextField() pub_date = models.DateField( 'date published', auto_now_add=True) tags = models.ManyToManyField( Tag, related_name='blog_posts') startups = models.ManyToManyField( Startup, related_name='blog_posts') class Meta: verbose_name = 'blog post' ordering = ['-pub_date', 'title'] get_latest_by = 'pub_date' def __str__(self): return "{} on {}".format( self.title, self.pub_date.strftime('%Y-%m-%d'))
<commit_before>from django.db import models from organizer.models import Startup, Tag # Model Field Reference # https://docs.djangoproject.com/en/1.8/ref/models/fields/ class Post(models.Model): title = models.CharField(max_length=63) slug = models.SlugField( max_length=63, help_text='A label for URL config', unique_for_month='pub_date') text = models.TextField() pub_date = models.DateField( 'date published', auto_now_add=True) tags = models.ManyToManyField( Tag, related_name='blog_posts') startups = models.ManyToManyField( Startup, related_name='blog_posts') def __str__(self): return "{} on {}".format( self.title, self.pub_date.strftime('%Y-%m-%d')) <commit_msg>Ch03: Declare Meta class in Post model. [skip ci]<commit_after>from django.db import models from organizer.models import Startup, Tag # Model Field Reference # https://docs.djangoproject.com/en/1.8/ref/models/fields/ class Post(models.Model): title = models.CharField(max_length=63) slug = models.SlugField( max_length=63, help_text='A label for URL config', unique_for_month='pub_date') text = models.TextField() pub_date = models.DateField( 'date published', auto_now_add=True) tags = models.ManyToManyField( Tag, related_name='blog_posts') startups = models.ManyToManyField( Startup, related_name='blog_posts') class Meta: verbose_name = 'blog post' ordering = ['-pub_date', 'title'] get_latest_by = 'pub_date' def __str__(self): return "{} on {}".format( self.title, self.pub_date.strftime('%Y-%m-%d'))
47c74590587e1e2ff5c79fd33c0019724ca96818
tasty_rest_framework/renderers.py
tasty_rest_framework/renderers.py
from django.utils import timezone from rest_framework.renderers import JSONRenderer from rest_framework.utils import encoders import datetime import warnings class TastyPieJSONEncoder(encoders.JSONEncoder): def default(self, o): if isinstance(o, datetime.datetime): # TastyPie doesn't modify timestamps, so we won't either. # Remove TZ -- this has got to be a bug in TastyPie! warning_message = "TastyPieJSONEncoder strips timezone information from datetime objects and converts them to the timezone in settings.TIME_ZONE. It is recommended that you don't use the TastyPieJSONRenderer unless you require *strict* TastyPie compatibility for datetime JSON serialization." warnings.warn(warning_message) r = timezone.make_naive(o, timezone.get_current_timezone()) return r.isoformat() return super(TastyPieJSONEncoder, self).default(o) class TastyPieJSONRenderer(JSONRenderer): encoder_class = TastyPieJSONEncoder
from django.utils import timezone from rest_framework.renderers import JSONRenderer from rest_framework.utils import encoders import datetime import warnings class TastyPieJSONEncoder(encoders.JSONEncoder): def default(self, o): if isinstance(o, datetime.datetime): # TastyPie doesn't modify timestamps, so we won't either. # Remove TZ -- this has got to be a bug in TastyPie! warning_message = "TastyPieJSONEncoder strips timezone information from datetime objects and converts them to the timezone in settings.TIME_ZONE. It is recommended that you don't use the TastyPieJSONRenderer unless you require *strict* TastyPie compatibility for datetime JSON serialization." warnings.warn(warning_message) try: r = timezone.make_naive(o, timezone.get_current_timezone()) except ValueError: # must already be naive r = o return r.isoformat() return super(TastyPieJSONEncoder, self).default(o) class TastyPieJSONRenderer(JSONRenderer): encoder_class = TastyPieJSONEncoder
Handle exception generated by timezone naive datetime objects.
Handle exception generated by timezone naive datetime objects.
Python
mit
erikcw/tasty_rest_framework
from django.utils import timezone from rest_framework.renderers import JSONRenderer from rest_framework.utils import encoders import datetime import warnings class TastyPieJSONEncoder(encoders.JSONEncoder): def default(self, o): if isinstance(o, datetime.datetime): # TastyPie doesn't modify timestamps, so we won't either. # Remove TZ -- this has got to be a bug in TastyPie! warning_message = "TastyPieJSONEncoder strips timezone information from datetime objects and converts them to the timezone in settings.TIME_ZONE. It is recommended that you don't use the TastyPieJSONRenderer unless you require *strict* TastyPie compatibility for datetime JSON serialization." warnings.warn(warning_message) r = timezone.make_naive(o, timezone.get_current_timezone()) return r.isoformat() return super(TastyPieJSONEncoder, self).default(o) class TastyPieJSONRenderer(JSONRenderer): encoder_class = TastyPieJSONEncoder Handle exception generated by timezone naive datetime objects.
from django.utils import timezone from rest_framework.renderers import JSONRenderer from rest_framework.utils import encoders import datetime import warnings class TastyPieJSONEncoder(encoders.JSONEncoder): def default(self, o): if isinstance(o, datetime.datetime): # TastyPie doesn't modify timestamps, so we won't either. # Remove TZ -- this has got to be a bug in TastyPie! warning_message = "TastyPieJSONEncoder strips timezone information from datetime objects and converts them to the timezone in settings.TIME_ZONE. It is recommended that you don't use the TastyPieJSONRenderer unless you require *strict* TastyPie compatibility for datetime JSON serialization." warnings.warn(warning_message) try: r = timezone.make_naive(o, timezone.get_current_timezone()) except ValueError: # must already be naive r = o return r.isoformat() return super(TastyPieJSONEncoder, self).default(o) class TastyPieJSONRenderer(JSONRenderer): encoder_class = TastyPieJSONEncoder
<commit_before>from django.utils import timezone from rest_framework.renderers import JSONRenderer from rest_framework.utils import encoders import datetime import warnings class TastyPieJSONEncoder(encoders.JSONEncoder): def default(self, o): if isinstance(o, datetime.datetime): # TastyPie doesn't modify timestamps, so we won't either. # Remove TZ -- this has got to be a bug in TastyPie! warning_message = "TastyPieJSONEncoder strips timezone information from datetime objects and converts them to the timezone in settings.TIME_ZONE. It is recommended that you don't use the TastyPieJSONRenderer unless you require *strict* TastyPie compatibility for datetime JSON serialization." warnings.warn(warning_message) r = timezone.make_naive(o, timezone.get_current_timezone()) return r.isoformat() return super(TastyPieJSONEncoder, self).default(o) class TastyPieJSONRenderer(JSONRenderer): encoder_class = TastyPieJSONEncoder <commit_msg>Handle exception generated by timezone naive datetime objects.<commit_after>
from django.utils import timezone from rest_framework.renderers import JSONRenderer from rest_framework.utils import encoders import datetime import warnings class TastyPieJSONEncoder(encoders.JSONEncoder): def default(self, o): if isinstance(o, datetime.datetime): # TastyPie doesn't modify timestamps, so we won't either. # Remove TZ -- this has got to be a bug in TastyPie! warning_message = "TastyPieJSONEncoder strips timezone information from datetime objects and converts them to the timezone in settings.TIME_ZONE. It is recommended that you don't use the TastyPieJSONRenderer unless you require *strict* TastyPie compatibility for datetime JSON serialization." warnings.warn(warning_message) try: r = timezone.make_naive(o, timezone.get_current_timezone()) except ValueError: # must already be naive r = o return r.isoformat() return super(TastyPieJSONEncoder, self).default(o) class TastyPieJSONRenderer(JSONRenderer): encoder_class = TastyPieJSONEncoder
from django.utils import timezone from rest_framework.renderers import JSONRenderer from rest_framework.utils import encoders import datetime import warnings class TastyPieJSONEncoder(encoders.JSONEncoder): def default(self, o): if isinstance(o, datetime.datetime): # TastyPie doesn't modify timestamps, so we won't either. # Remove TZ -- this has got to be a bug in TastyPie! warning_message = "TastyPieJSONEncoder strips timezone information from datetime objects and converts them to the timezone in settings.TIME_ZONE. It is recommended that you don't use the TastyPieJSONRenderer unless you require *strict* TastyPie compatibility for datetime JSON serialization." warnings.warn(warning_message) r = timezone.make_naive(o, timezone.get_current_timezone()) return r.isoformat() return super(TastyPieJSONEncoder, self).default(o) class TastyPieJSONRenderer(JSONRenderer): encoder_class = TastyPieJSONEncoder Handle exception generated by timezone naive datetime objects.from django.utils import timezone from rest_framework.renderers import JSONRenderer from rest_framework.utils import encoders import datetime import warnings class TastyPieJSONEncoder(encoders.JSONEncoder): def default(self, o): if isinstance(o, datetime.datetime): # TastyPie doesn't modify timestamps, so we won't either. # Remove TZ -- this has got to be a bug in TastyPie! warning_message = "TastyPieJSONEncoder strips timezone information from datetime objects and converts them to the timezone in settings.TIME_ZONE. It is recommended that you don't use the TastyPieJSONRenderer unless you require *strict* TastyPie compatibility for datetime JSON serialization." warnings.warn(warning_message) try: r = timezone.make_naive(o, timezone.get_current_timezone()) except ValueError: # must already be naive r = o return r.isoformat() return super(TastyPieJSONEncoder, self).default(o) class TastyPieJSONRenderer(JSONRenderer): encoder_class = TastyPieJSONEncoder
<commit_before>from django.utils import timezone from rest_framework.renderers import JSONRenderer from rest_framework.utils import encoders import datetime import warnings class TastyPieJSONEncoder(encoders.JSONEncoder): def default(self, o): if isinstance(o, datetime.datetime): # TastyPie doesn't modify timestamps, so we won't either. # Remove TZ -- this has got to be a bug in TastyPie! warning_message = "TastyPieJSONEncoder strips timezone information from datetime objects and converts them to the timezone in settings.TIME_ZONE. It is recommended that you don't use the TastyPieJSONRenderer unless you require *strict* TastyPie compatibility for datetime JSON serialization." warnings.warn(warning_message) r = timezone.make_naive(o, timezone.get_current_timezone()) return r.isoformat() return super(TastyPieJSONEncoder, self).default(o) class TastyPieJSONRenderer(JSONRenderer): encoder_class = TastyPieJSONEncoder <commit_msg>Handle exception generated by timezone naive datetime objects.<commit_after>from django.utils import timezone from rest_framework.renderers import JSONRenderer from rest_framework.utils import encoders import datetime import warnings class TastyPieJSONEncoder(encoders.JSONEncoder): def default(self, o): if isinstance(o, datetime.datetime): # TastyPie doesn't modify timestamps, so we won't either. # Remove TZ -- this has got to be a bug in TastyPie! warning_message = "TastyPieJSONEncoder strips timezone information from datetime objects and converts them to the timezone in settings.TIME_ZONE. It is recommended that you don't use the TastyPieJSONRenderer unless you require *strict* TastyPie compatibility for datetime JSON serialization." warnings.warn(warning_message) try: r = timezone.make_naive(o, timezone.get_current_timezone()) except ValueError: # must already be naive r = o return r.isoformat() return super(TastyPieJSONEncoder, self).default(o) class TastyPieJSONRenderer(JSONRenderer): encoder_class = TastyPieJSONEncoder
8aa6b13ca491d65a0519e429727073f082993aac
tests/framework/test_bmi_ugrid.py
tests/framework/test_bmi_ugrid.py
"""Unit tests for the pymt.framwork.bmi_ugrid module.""" import xarray as xr from pymt.framework.bmi_ugrid import Scalar, Vector from pymt.framework.bmi_bridge import _BmiCap grid_id = 0 class TestScalar: def get_grid_rank(self, grid_id): return 0 class ScalarBmi(_BmiCap): _cls = TestScalar def test_scalar_grid(): """Testing creating a scalar grid.""" bmi = ScalarBmi() grid = Scalar(bmi, grid_id) assert grid.ndim == 0 assert grid.metadata["type"] == "scalar" assert isinstance(grid, xr.Dataset) class TestVector: def get_grid_rank(self, grid_id): return 1 class VectorBmi(_BmiCap): _cls = TestVector def test_vector_grid(): """Testing creating a vector grid.""" bmi = VectorBmi() grid = Vector(bmi, grid_id) assert grid.ndim == 1 assert grid.metadata["type"] == "vector" assert isinstance(grid, xr.Dataset)
"""Unit tests for the pymt.framwork.bmi_ugrid module.""" import xarray as xr from pymt.framework.bmi_ugrid import Scalar, Vector from pymt.framework.bmi_bridge import _BmiCap grid_id = 0 class TestScalar: def get_grid_rank(self, grid_id): return 0 class ScalarBmi(_BmiCap): _cls = TestScalar def test_scalar_grid(): """Testing creating a scalar grid.""" bmi = ScalarBmi() grid = Scalar(bmi, grid_id) assert grid.ndim == 0 assert grid.metadata["type"] == "scalar" assert grid.data_vars["mesh"].attrs["type"] == "scalar" assert isinstance(grid, xr.Dataset) class TestVector: def get_grid_rank(self, grid_id): return 1 class VectorBmi(_BmiCap): _cls = TestVector def test_vector_grid(): """Testing creating a vector grid.""" bmi = VectorBmi() grid = Vector(bmi, grid_id) assert grid.ndim == 1 assert grid.metadata["type"] == "vector" assert grid.data_vars["mesh"].attrs["type"] == "vector" assert isinstance(grid, xr.Dataset)
Test that attrs are passed to 'mesh' DataArray
Test that attrs are passed to 'mesh' DataArray
Python
mit
csdms/pymt
"""Unit tests for the pymt.framwork.bmi_ugrid module.""" import xarray as xr from pymt.framework.bmi_ugrid import Scalar, Vector from pymt.framework.bmi_bridge import _BmiCap grid_id = 0 class TestScalar: def get_grid_rank(self, grid_id): return 0 class ScalarBmi(_BmiCap): _cls = TestScalar def test_scalar_grid(): """Testing creating a scalar grid.""" bmi = ScalarBmi() grid = Scalar(bmi, grid_id) assert grid.ndim == 0 assert grid.metadata["type"] == "scalar" assert isinstance(grid, xr.Dataset) class TestVector: def get_grid_rank(self, grid_id): return 1 class VectorBmi(_BmiCap): _cls = TestVector def test_vector_grid(): """Testing creating a vector grid.""" bmi = VectorBmi() grid = Vector(bmi, grid_id) assert grid.ndim == 1 assert grid.metadata["type"] == "vector" assert isinstance(grid, xr.Dataset) Test that attrs are passed to 'mesh' DataArray
"""Unit tests for the pymt.framwork.bmi_ugrid module.""" import xarray as xr from pymt.framework.bmi_ugrid import Scalar, Vector from pymt.framework.bmi_bridge import _BmiCap grid_id = 0 class TestScalar: def get_grid_rank(self, grid_id): return 0 class ScalarBmi(_BmiCap): _cls = TestScalar def test_scalar_grid(): """Testing creating a scalar grid.""" bmi = ScalarBmi() grid = Scalar(bmi, grid_id) assert grid.ndim == 0 assert grid.metadata["type"] == "scalar" assert grid.data_vars["mesh"].attrs["type"] == "scalar" assert isinstance(grid, xr.Dataset) class TestVector: def get_grid_rank(self, grid_id): return 1 class VectorBmi(_BmiCap): _cls = TestVector def test_vector_grid(): """Testing creating a vector grid.""" bmi = VectorBmi() grid = Vector(bmi, grid_id) assert grid.ndim == 1 assert grid.metadata["type"] == "vector" assert grid.data_vars["mesh"].attrs["type"] == "vector" assert isinstance(grid, xr.Dataset)
<commit_before>"""Unit tests for the pymt.framwork.bmi_ugrid module.""" import xarray as xr from pymt.framework.bmi_ugrid import Scalar, Vector from pymt.framework.bmi_bridge import _BmiCap grid_id = 0 class TestScalar: def get_grid_rank(self, grid_id): return 0 class ScalarBmi(_BmiCap): _cls = TestScalar def test_scalar_grid(): """Testing creating a scalar grid.""" bmi = ScalarBmi() grid = Scalar(bmi, grid_id) assert grid.ndim == 0 assert grid.metadata["type"] == "scalar" assert isinstance(grid, xr.Dataset) class TestVector: def get_grid_rank(self, grid_id): return 1 class VectorBmi(_BmiCap): _cls = TestVector def test_vector_grid(): """Testing creating a vector grid.""" bmi = VectorBmi() grid = Vector(bmi, grid_id) assert grid.ndim == 1 assert grid.metadata["type"] == "vector" assert isinstance(grid, xr.Dataset) <commit_msg>Test that attrs are passed to 'mesh' DataArray<commit_after>
"""Unit tests for the pymt.framwork.bmi_ugrid module.""" import xarray as xr from pymt.framework.bmi_ugrid import Scalar, Vector from pymt.framework.bmi_bridge import _BmiCap grid_id = 0 class TestScalar: def get_grid_rank(self, grid_id): return 0 class ScalarBmi(_BmiCap): _cls = TestScalar def test_scalar_grid(): """Testing creating a scalar grid.""" bmi = ScalarBmi() grid = Scalar(bmi, grid_id) assert grid.ndim == 0 assert grid.metadata["type"] == "scalar" assert grid.data_vars["mesh"].attrs["type"] == "scalar" assert isinstance(grid, xr.Dataset) class TestVector: def get_grid_rank(self, grid_id): return 1 class VectorBmi(_BmiCap): _cls = TestVector def test_vector_grid(): """Testing creating a vector grid.""" bmi = VectorBmi() grid = Vector(bmi, grid_id) assert grid.ndim == 1 assert grid.metadata["type"] == "vector" assert grid.data_vars["mesh"].attrs["type"] == "vector" assert isinstance(grid, xr.Dataset)
"""Unit tests for the pymt.framwork.bmi_ugrid module.""" import xarray as xr from pymt.framework.bmi_ugrid import Scalar, Vector from pymt.framework.bmi_bridge import _BmiCap grid_id = 0 class TestScalar: def get_grid_rank(self, grid_id): return 0 class ScalarBmi(_BmiCap): _cls = TestScalar def test_scalar_grid(): """Testing creating a scalar grid.""" bmi = ScalarBmi() grid = Scalar(bmi, grid_id) assert grid.ndim == 0 assert grid.metadata["type"] == "scalar" assert isinstance(grid, xr.Dataset) class TestVector: def get_grid_rank(self, grid_id): return 1 class VectorBmi(_BmiCap): _cls = TestVector def test_vector_grid(): """Testing creating a vector grid.""" bmi = VectorBmi() grid = Vector(bmi, grid_id) assert grid.ndim == 1 assert grid.metadata["type"] == "vector" assert isinstance(grid, xr.Dataset) Test that attrs are passed to 'mesh' DataArray"""Unit tests for the pymt.framwork.bmi_ugrid module.""" import xarray as xr from pymt.framework.bmi_ugrid import Scalar, Vector from pymt.framework.bmi_bridge import _BmiCap grid_id = 0 class TestScalar: def get_grid_rank(self, grid_id): return 0 class ScalarBmi(_BmiCap): _cls = TestScalar def test_scalar_grid(): """Testing creating a scalar grid.""" bmi = ScalarBmi() grid = Scalar(bmi, grid_id) assert grid.ndim == 0 assert grid.metadata["type"] == "scalar" assert grid.data_vars["mesh"].attrs["type"] == "scalar" assert isinstance(grid, xr.Dataset) class TestVector: def get_grid_rank(self, grid_id): return 1 class VectorBmi(_BmiCap): _cls = TestVector def test_vector_grid(): """Testing creating a vector grid.""" bmi = VectorBmi() grid = Vector(bmi, grid_id) assert grid.ndim == 1 assert grid.metadata["type"] == "vector" assert grid.data_vars["mesh"].attrs["type"] == "vector" assert isinstance(grid, xr.Dataset)
<commit_before>"""Unit tests for the pymt.framwork.bmi_ugrid module.""" import xarray as xr from pymt.framework.bmi_ugrid import Scalar, Vector from pymt.framework.bmi_bridge import _BmiCap grid_id = 0 class TestScalar: def get_grid_rank(self, grid_id): return 0 class ScalarBmi(_BmiCap): _cls = TestScalar def test_scalar_grid(): """Testing creating a scalar grid.""" bmi = ScalarBmi() grid = Scalar(bmi, grid_id) assert grid.ndim == 0 assert grid.metadata["type"] == "scalar" assert isinstance(grid, xr.Dataset) class TestVector: def get_grid_rank(self, grid_id): return 1 class VectorBmi(_BmiCap): _cls = TestVector def test_vector_grid(): """Testing creating a vector grid.""" bmi = VectorBmi() grid = Vector(bmi, grid_id) assert grid.ndim == 1 assert grid.metadata["type"] == "vector" assert isinstance(grid, xr.Dataset) <commit_msg>Test that attrs are passed to 'mesh' DataArray<commit_after>"""Unit tests for the pymt.framwork.bmi_ugrid module.""" import xarray as xr from pymt.framework.bmi_ugrid import Scalar, Vector from pymt.framework.bmi_bridge import _BmiCap grid_id = 0 class TestScalar: def get_grid_rank(self, grid_id): return 0 class ScalarBmi(_BmiCap): _cls = TestScalar def test_scalar_grid(): """Testing creating a scalar grid.""" bmi = ScalarBmi() grid = Scalar(bmi, grid_id) assert grid.ndim == 0 assert grid.metadata["type"] == "scalar" assert grid.data_vars["mesh"].attrs["type"] == "scalar" assert isinstance(grid, xr.Dataset) class TestVector: def get_grid_rank(self, grid_id): return 1 class VectorBmi(_BmiCap): _cls = TestVector def test_vector_grid(): """Testing creating a vector grid.""" bmi = VectorBmi() grid = Vector(bmi, grid_id) assert grid.ndim == 1 assert grid.metadata["type"] == "vector" assert grid.data_vars["mesh"].attrs["type"] == "vector" assert isinstance(grid, xr.Dataset)
18a0be3abb34c84c16c7cbd86ac25c984c1ab15e
thatforum/settings/home_server.py
thatforum/settings/home_server.py
from .base import * DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { #'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'ENGINE': 'django.contrib.sqlite3', 'NAME': 'db_thatforum.sqlite3', # Or path to database file if using sqlite3. # The following settings are not used with sqlite3: } }
from .base import * DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'db_thatforum.sqlite3', # Or path to database file if using sqlite3. } }
Update database settings in home server
Update database settings in home server
Python
mit
hellsgate1001/thatforum_django,hellsgate1001/thatforum_django,hellsgate1001/thatforum_django
from .base import * DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { #'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'ENGINE': 'django.contrib.sqlite3', 'NAME': 'db_thatforum.sqlite3', # Or path to database file if using sqlite3. # The following settings are not used with sqlite3: } } Update database settings in home server
from .base import * DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'db_thatforum.sqlite3', # Or path to database file if using sqlite3. } }
<commit_before>from .base import * DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { #'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'ENGINE': 'django.contrib.sqlite3', 'NAME': 'db_thatforum.sqlite3', # Or path to database file if using sqlite3. # The following settings are not used with sqlite3: } } <commit_msg>Update database settings in home server<commit_after>
from .base import * DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'db_thatforum.sqlite3', # Or path to database file if using sqlite3. } }
from .base import * DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { #'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'ENGINE': 'django.contrib.sqlite3', 'NAME': 'db_thatforum.sqlite3', # Or path to database file if using sqlite3. # The following settings are not used with sqlite3: } } Update database settings in home serverfrom .base import * DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'db_thatforum.sqlite3', # Or path to database file if using sqlite3. } }
<commit_before>from .base import * DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { #'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'ENGINE': 'django.contrib.sqlite3', 'NAME': 'db_thatforum.sqlite3', # Or path to database file if using sqlite3. # The following settings are not used with sqlite3: } } <commit_msg>Update database settings in home server<commit_after>from .base import * DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'db_thatforum.sqlite3', # Or path to database file if using sqlite3. } }
b913963f58f2e3a6842518b1cf0344ca262ecdde
src/shelltoprocess/__init__.py
src/shelltoprocess/__init__.py
""" This package implements a wxPython shell, based on PyShell, which controls a seperate Python process, creating with the `multiprocessing` package. Here is the canonical way to use it: 1. Subclass multiprocessing.Process: import multiprocessing class CustomProcess(multiprocessing.Process): def __init__(self,*args,**kwargs): multiprocessing.Process.__init__(self,*args,**kwargs) self.queue_pack=shelltoprocess.make_queue_pack() # Put whatever code you want here def run(self): # Put whatever code you want here self.console = shelltoprocess.Console(queue_pack=self.queue_pack) self.console.interact() custom_process = CustomProcess() custom_process.start() 2. Set up the shell in the appropriate part of your code: self.shell = shelltoprocess.Shell(parent_window, queue_pack=custom_process.queue_pack) """ import multiprocessing def make_queue_pack(): """ Creates a "queue pack". This is the one object that connects between the Shell and the Console. The same queue_pack must be fed into both. See package documentation for more info. """ return [multiprocessing.Queue() for _ in range(4)] __all__ = ["Shell", "Console", "make_queue_pack"]
""" This package implements a wxPython shell, based on PyShell, which controls a seperate Python process, creating with the `multiprocessing` package. Here is the canonical way to use it: 1. Subclass multiprocessing.Process: import multiprocessing class CustomProcess(multiprocessing.Process): def __init__(self,*args,**kwargs): multiprocessing.Process.__init__(self,*args,**kwargs) self.queue_pack=shelltoprocess.make_queue_pack() # Put whatever code you want here def run(self): # Put whatever code you want here self.console = shelltoprocess.Console(queue_pack=self.queue_pack) self.console.interact() custom_process = CustomProcess() custom_process.start() 2. Set up the shell in the appropriate part of your code: self.shell = shelltoprocess.Shell(parent_window, queue_pack=custom_process.queue_pack) """ import multiprocessing from shell import Shell from console import Console def make_queue_pack(): """ Creates a "queue pack". This is the one object that connects between the Shell and the Console. The same queue_pack must be fed into both. See package documentation for more info. """ return [multiprocessing.Queue() for _ in range(4)] __all__ = ["Shell", "Console", "make_queue_pack"]
Add supposedly unused imports back again
Add supposedly unused imports back again
Python
mit
bittner/PythonTurtle,cool-RR/PythonTurtle
""" This package implements a wxPython shell, based on PyShell, which controls a seperate Python process, creating with the `multiprocessing` package. Here is the canonical way to use it: 1. Subclass multiprocessing.Process: import multiprocessing class CustomProcess(multiprocessing.Process): def __init__(self,*args,**kwargs): multiprocessing.Process.__init__(self,*args,**kwargs) self.queue_pack=shelltoprocess.make_queue_pack() # Put whatever code you want here def run(self): # Put whatever code you want here self.console = shelltoprocess.Console(queue_pack=self.queue_pack) self.console.interact() custom_process = CustomProcess() custom_process.start() 2. Set up the shell in the appropriate part of your code: self.shell = shelltoprocess.Shell(parent_window, queue_pack=custom_process.queue_pack) """ import multiprocessing def make_queue_pack(): """ Creates a "queue pack". This is the one object that connects between the Shell and the Console. The same queue_pack must be fed into both. See package documentation for more info. """ return [multiprocessing.Queue() for _ in range(4)] __all__ = ["Shell", "Console", "make_queue_pack"] Add supposedly unused imports back again
""" This package implements a wxPython shell, based on PyShell, which controls a seperate Python process, creating with the `multiprocessing` package. Here is the canonical way to use it: 1. Subclass multiprocessing.Process: import multiprocessing class CustomProcess(multiprocessing.Process): def __init__(self,*args,**kwargs): multiprocessing.Process.__init__(self,*args,**kwargs) self.queue_pack=shelltoprocess.make_queue_pack() # Put whatever code you want here def run(self): # Put whatever code you want here self.console = shelltoprocess.Console(queue_pack=self.queue_pack) self.console.interact() custom_process = CustomProcess() custom_process.start() 2. Set up the shell in the appropriate part of your code: self.shell = shelltoprocess.Shell(parent_window, queue_pack=custom_process.queue_pack) """ import multiprocessing from shell import Shell from console import Console def make_queue_pack(): """ Creates a "queue pack". This is the one object that connects between the Shell and the Console. The same queue_pack must be fed into both. See package documentation for more info. """ return [multiprocessing.Queue() for _ in range(4)] __all__ = ["Shell", "Console", "make_queue_pack"]
<commit_before>""" This package implements a wxPython shell, based on PyShell, which controls a seperate Python process, creating with the `multiprocessing` package. Here is the canonical way to use it: 1. Subclass multiprocessing.Process: import multiprocessing class CustomProcess(multiprocessing.Process): def __init__(self,*args,**kwargs): multiprocessing.Process.__init__(self,*args,**kwargs) self.queue_pack=shelltoprocess.make_queue_pack() # Put whatever code you want here def run(self): # Put whatever code you want here self.console = shelltoprocess.Console(queue_pack=self.queue_pack) self.console.interact() custom_process = CustomProcess() custom_process.start() 2. Set up the shell in the appropriate part of your code: self.shell = shelltoprocess.Shell(parent_window, queue_pack=custom_process.queue_pack) """ import multiprocessing def make_queue_pack(): """ Creates a "queue pack". This is the one object that connects between the Shell and the Console. The same queue_pack must be fed into both. See package documentation for more info. """ return [multiprocessing.Queue() for _ in range(4)] __all__ = ["Shell", "Console", "make_queue_pack"] <commit_msg>Add supposedly unused imports back again<commit_after>
""" This package implements a wxPython shell, based on PyShell, which controls a seperate Python process, creating with the `multiprocessing` package. Here is the canonical way to use it: 1. Subclass multiprocessing.Process: import multiprocessing class CustomProcess(multiprocessing.Process): def __init__(self,*args,**kwargs): multiprocessing.Process.__init__(self,*args,**kwargs) self.queue_pack=shelltoprocess.make_queue_pack() # Put whatever code you want here def run(self): # Put whatever code you want here self.console = shelltoprocess.Console(queue_pack=self.queue_pack) self.console.interact() custom_process = CustomProcess() custom_process.start() 2. Set up the shell in the appropriate part of your code: self.shell = shelltoprocess.Shell(parent_window, queue_pack=custom_process.queue_pack) """ import multiprocessing from shell import Shell from console import Console def make_queue_pack(): """ Creates a "queue pack". This is the one object that connects between the Shell and the Console. The same queue_pack must be fed into both. See package documentation for more info. """ return [multiprocessing.Queue() for _ in range(4)] __all__ = ["Shell", "Console", "make_queue_pack"]
""" This package implements a wxPython shell, based on PyShell, which controls a seperate Python process, creating with the `multiprocessing` package. Here is the canonical way to use it: 1. Subclass multiprocessing.Process: import multiprocessing class CustomProcess(multiprocessing.Process): def __init__(self,*args,**kwargs): multiprocessing.Process.__init__(self,*args,**kwargs) self.queue_pack=shelltoprocess.make_queue_pack() # Put whatever code you want here def run(self): # Put whatever code you want here self.console = shelltoprocess.Console(queue_pack=self.queue_pack) self.console.interact() custom_process = CustomProcess() custom_process.start() 2. Set up the shell in the appropriate part of your code: self.shell = shelltoprocess.Shell(parent_window, queue_pack=custom_process.queue_pack) """ import multiprocessing def make_queue_pack(): """ Creates a "queue pack". This is the one object that connects between the Shell and the Console. The same queue_pack must be fed into both. See package documentation for more info. """ return [multiprocessing.Queue() for _ in range(4)] __all__ = ["Shell", "Console", "make_queue_pack"] Add supposedly unused imports back again""" This package implements a wxPython shell, based on PyShell, which controls a seperate Python process, creating with the `multiprocessing` package. Here is the canonical way to use it: 1. Subclass multiprocessing.Process: import multiprocessing class CustomProcess(multiprocessing.Process): def __init__(self,*args,**kwargs): multiprocessing.Process.__init__(self,*args,**kwargs) self.queue_pack=shelltoprocess.make_queue_pack() # Put whatever code you want here def run(self): # Put whatever code you want here self.console = shelltoprocess.Console(queue_pack=self.queue_pack) self.console.interact() custom_process = CustomProcess() custom_process.start() 2. Set up the shell in the appropriate part of your code: self.shell = shelltoprocess.Shell(parent_window, queue_pack=custom_process.queue_pack) """ import multiprocessing from shell import Shell from console import Console def make_queue_pack(): """ Creates a "queue pack". This is the one object that connects between the Shell and the Console. The same queue_pack must be fed into both. See package documentation for more info. """ return [multiprocessing.Queue() for _ in range(4)] __all__ = ["Shell", "Console", "make_queue_pack"]
<commit_before>""" This package implements a wxPython shell, based on PyShell, which controls a seperate Python process, creating with the `multiprocessing` package. Here is the canonical way to use it: 1. Subclass multiprocessing.Process: import multiprocessing class CustomProcess(multiprocessing.Process): def __init__(self,*args,**kwargs): multiprocessing.Process.__init__(self,*args,**kwargs) self.queue_pack=shelltoprocess.make_queue_pack() # Put whatever code you want here def run(self): # Put whatever code you want here self.console = shelltoprocess.Console(queue_pack=self.queue_pack) self.console.interact() custom_process = CustomProcess() custom_process.start() 2. Set up the shell in the appropriate part of your code: self.shell = shelltoprocess.Shell(parent_window, queue_pack=custom_process.queue_pack) """ import multiprocessing def make_queue_pack(): """ Creates a "queue pack". This is the one object that connects between the Shell and the Console. The same queue_pack must be fed into both. See package documentation for more info. """ return [multiprocessing.Queue() for _ in range(4)] __all__ = ["Shell", "Console", "make_queue_pack"] <commit_msg>Add supposedly unused imports back again<commit_after>""" This package implements a wxPython shell, based on PyShell, which controls a seperate Python process, creating with the `multiprocessing` package. Here is the canonical way to use it: 1. Subclass multiprocessing.Process: import multiprocessing class CustomProcess(multiprocessing.Process): def __init__(self,*args,**kwargs): multiprocessing.Process.__init__(self,*args,**kwargs) self.queue_pack=shelltoprocess.make_queue_pack() # Put whatever code you want here def run(self): # Put whatever code you want here self.console = shelltoprocess.Console(queue_pack=self.queue_pack) self.console.interact() custom_process = CustomProcess() custom_process.start() 2. Set up the shell in the appropriate part of your code: self.shell = shelltoprocess.Shell(parent_window, queue_pack=custom_process.queue_pack) """ import multiprocessing from shell import Shell from console import Console def make_queue_pack(): """ Creates a "queue pack". This is the one object that connects between the Shell and the Console. The same queue_pack must be fed into both. See package documentation for more info. """ return [multiprocessing.Queue() for _ in range(4)] __all__ = ["Shell", "Console", "make_queue_pack"]
c668bf1179e91be66f12857fc7b31ef66d287a42
downstream_node/lib/node.py
downstream_node/lib/node.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from downstream_node.config import config from downstream_node.models import Challenges, Files from heartbeat import Heartbeat from downstream_node.startup import db __all__ = ['create_token', 'delete_token', 'add_file', 'remove_file', 'gen_challenges', 'update_challenges'] def create_token(*args, **kwargs): raise NotImplementedError def delete_token(*args, **kwargs): raise NotImplementedError def add_file(*args, **kwargs): raise NotImplementedError def remove_file(*args, **kwargs): raise NotImplementedError def gen_challenges(filepath, root_seed): secret = getattr(config, 'HEARTBEAT_SECRET') hb = Heartbeat(filepath, secret=secret) hb.generate_challenges(1000, root_seed) files = Files(name=filepath) db.session.add(files) for challenge in hb.challenges: chal = Challenges( filename=filepath, rootseed=root_seed, block=challenge.block, seed=challenge.seed, response=challenge.response, ) db.session.add(chal) db.session.commit() def update_challenges(*args, **kwargs): raise NotImplementedError
#!/usr/bin/env python # -*- coding: utf-8 -*- import os from downstream_node.config import config from downstream_node.models import Challenges, Files from heartbeat import Heartbeat from downstream_node.startup import db __all__ = ['create_token', 'delete_token', 'add_file', 'remove_file', 'gen_challenges', 'update_challenges'] def create_token(*args, **kwargs): raise NotImplementedError def delete_token(*args, **kwargs): raise NotImplementedError def add_file(*args, **kwargs): raise NotImplementedError def remove_file(*args, **kwargs): raise NotImplementedError def gen_challenges(filepath, root_seed): secret = getattr(config, 'HEARTBEAT_SECRET') hb = Heartbeat(filepath, secret=secret) hb.generate_challenges(1000, root_seed) files = Files(name=os.path.split(filepath)[1]) db.session.add(files) for challenge in hb.challenges: chal = Challenges( filename=filepath, rootseed=root_seed, block=challenge.block, seed=challenge.seed, response=challenge.response, ) db.session.add(chal) db.session.commit() def update_challenges(*args, **kwargs): raise NotImplementedError
Insert filename only, not path
Insert filename only, not path
Python
mit
Storj/downstream-node,Storj/downstream-node
#!/usr/bin/env python # -*- coding: utf-8 -*- from downstream_node.config import config from downstream_node.models import Challenges, Files from heartbeat import Heartbeat from downstream_node.startup import db __all__ = ['create_token', 'delete_token', 'add_file', 'remove_file', 'gen_challenges', 'update_challenges'] def create_token(*args, **kwargs): raise NotImplementedError def delete_token(*args, **kwargs): raise NotImplementedError def add_file(*args, **kwargs): raise NotImplementedError def remove_file(*args, **kwargs): raise NotImplementedError def gen_challenges(filepath, root_seed): secret = getattr(config, 'HEARTBEAT_SECRET') hb = Heartbeat(filepath, secret=secret) hb.generate_challenges(1000, root_seed) files = Files(name=filepath) db.session.add(files) for challenge in hb.challenges: chal = Challenges( filename=filepath, rootseed=root_seed, block=challenge.block, seed=challenge.seed, response=challenge.response, ) db.session.add(chal) db.session.commit() def update_challenges(*args, **kwargs): raise NotImplementedError Insert filename only, not path
#!/usr/bin/env python # -*- coding: utf-8 -*- import os from downstream_node.config import config from downstream_node.models import Challenges, Files from heartbeat import Heartbeat from downstream_node.startup import db __all__ = ['create_token', 'delete_token', 'add_file', 'remove_file', 'gen_challenges', 'update_challenges'] def create_token(*args, **kwargs): raise NotImplementedError def delete_token(*args, **kwargs): raise NotImplementedError def add_file(*args, **kwargs): raise NotImplementedError def remove_file(*args, **kwargs): raise NotImplementedError def gen_challenges(filepath, root_seed): secret = getattr(config, 'HEARTBEAT_SECRET') hb = Heartbeat(filepath, secret=secret) hb.generate_challenges(1000, root_seed) files = Files(name=os.path.split(filepath)[1]) db.session.add(files) for challenge in hb.challenges: chal = Challenges( filename=filepath, rootseed=root_seed, block=challenge.block, seed=challenge.seed, response=challenge.response, ) db.session.add(chal) db.session.commit() def update_challenges(*args, **kwargs): raise NotImplementedError
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from downstream_node.config import config from downstream_node.models import Challenges, Files from heartbeat import Heartbeat from downstream_node.startup import db __all__ = ['create_token', 'delete_token', 'add_file', 'remove_file', 'gen_challenges', 'update_challenges'] def create_token(*args, **kwargs): raise NotImplementedError def delete_token(*args, **kwargs): raise NotImplementedError def add_file(*args, **kwargs): raise NotImplementedError def remove_file(*args, **kwargs): raise NotImplementedError def gen_challenges(filepath, root_seed): secret = getattr(config, 'HEARTBEAT_SECRET') hb = Heartbeat(filepath, secret=secret) hb.generate_challenges(1000, root_seed) files = Files(name=filepath) db.session.add(files) for challenge in hb.challenges: chal = Challenges( filename=filepath, rootseed=root_seed, block=challenge.block, seed=challenge.seed, response=challenge.response, ) db.session.add(chal) db.session.commit() def update_challenges(*args, **kwargs): raise NotImplementedError <commit_msg>Insert filename only, not path<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- import os from downstream_node.config import config from downstream_node.models import Challenges, Files from heartbeat import Heartbeat from downstream_node.startup import db __all__ = ['create_token', 'delete_token', 'add_file', 'remove_file', 'gen_challenges', 'update_challenges'] def create_token(*args, **kwargs): raise NotImplementedError def delete_token(*args, **kwargs): raise NotImplementedError def add_file(*args, **kwargs): raise NotImplementedError def remove_file(*args, **kwargs): raise NotImplementedError def gen_challenges(filepath, root_seed): secret = getattr(config, 'HEARTBEAT_SECRET') hb = Heartbeat(filepath, secret=secret) hb.generate_challenges(1000, root_seed) files = Files(name=os.path.split(filepath)[1]) db.session.add(files) for challenge in hb.challenges: chal = Challenges( filename=filepath, rootseed=root_seed, block=challenge.block, seed=challenge.seed, response=challenge.response, ) db.session.add(chal) db.session.commit() def update_challenges(*args, **kwargs): raise NotImplementedError
#!/usr/bin/env python # -*- coding: utf-8 -*- from downstream_node.config import config from downstream_node.models import Challenges, Files from heartbeat import Heartbeat from downstream_node.startup import db __all__ = ['create_token', 'delete_token', 'add_file', 'remove_file', 'gen_challenges', 'update_challenges'] def create_token(*args, **kwargs): raise NotImplementedError def delete_token(*args, **kwargs): raise NotImplementedError def add_file(*args, **kwargs): raise NotImplementedError def remove_file(*args, **kwargs): raise NotImplementedError def gen_challenges(filepath, root_seed): secret = getattr(config, 'HEARTBEAT_SECRET') hb = Heartbeat(filepath, secret=secret) hb.generate_challenges(1000, root_seed) files = Files(name=filepath) db.session.add(files) for challenge in hb.challenges: chal = Challenges( filename=filepath, rootseed=root_seed, block=challenge.block, seed=challenge.seed, response=challenge.response, ) db.session.add(chal) db.session.commit() def update_challenges(*args, **kwargs): raise NotImplementedError Insert filename only, not path#!/usr/bin/env python # -*- coding: utf-8 -*- import os from downstream_node.config import config from downstream_node.models import Challenges, Files from heartbeat import Heartbeat from downstream_node.startup import db __all__ = ['create_token', 'delete_token', 'add_file', 'remove_file', 'gen_challenges', 'update_challenges'] def create_token(*args, **kwargs): raise NotImplementedError def delete_token(*args, **kwargs): raise NotImplementedError def add_file(*args, **kwargs): raise NotImplementedError def remove_file(*args, **kwargs): raise NotImplementedError def gen_challenges(filepath, root_seed): secret = getattr(config, 'HEARTBEAT_SECRET') hb = Heartbeat(filepath, secret=secret) hb.generate_challenges(1000, root_seed) files = Files(name=os.path.split(filepath)[1]) db.session.add(files) for challenge in hb.challenges: chal = Challenges( filename=filepath, rootseed=root_seed, block=challenge.block, seed=challenge.seed, response=challenge.response, ) db.session.add(chal) db.session.commit() def update_challenges(*args, **kwargs): raise NotImplementedError
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from downstream_node.config import config from downstream_node.models import Challenges, Files from heartbeat import Heartbeat from downstream_node.startup import db __all__ = ['create_token', 'delete_token', 'add_file', 'remove_file', 'gen_challenges', 'update_challenges'] def create_token(*args, **kwargs): raise NotImplementedError def delete_token(*args, **kwargs): raise NotImplementedError def add_file(*args, **kwargs): raise NotImplementedError def remove_file(*args, **kwargs): raise NotImplementedError def gen_challenges(filepath, root_seed): secret = getattr(config, 'HEARTBEAT_SECRET') hb = Heartbeat(filepath, secret=secret) hb.generate_challenges(1000, root_seed) files = Files(name=filepath) db.session.add(files) for challenge in hb.challenges: chal = Challenges( filename=filepath, rootseed=root_seed, block=challenge.block, seed=challenge.seed, response=challenge.response, ) db.session.add(chal) db.session.commit() def update_challenges(*args, **kwargs): raise NotImplementedError <commit_msg>Insert filename only, not path<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- import os from downstream_node.config import config from downstream_node.models import Challenges, Files from heartbeat import Heartbeat from downstream_node.startup import db __all__ = ['create_token', 'delete_token', 'add_file', 'remove_file', 'gen_challenges', 'update_challenges'] def create_token(*args, **kwargs): raise NotImplementedError def delete_token(*args, **kwargs): raise NotImplementedError def add_file(*args, **kwargs): raise NotImplementedError def remove_file(*args, **kwargs): raise NotImplementedError def gen_challenges(filepath, root_seed): secret = getattr(config, 'HEARTBEAT_SECRET') hb = Heartbeat(filepath, secret=secret) hb.generate_challenges(1000, root_seed) files = Files(name=os.path.split(filepath)[1]) db.session.add(files) for challenge in hb.challenges: chal = Challenges( filename=filepath, rootseed=root_seed, block=challenge.block, seed=challenge.seed, response=challenge.response, ) db.session.add(chal) db.session.commit() def update_challenges(*args, **kwargs): raise NotImplementedError
d754e44c725ff2b390d2f4ea52d29475b6e11f82
src/akllt/management/commands/akllt_importnews.py
src/akllt/management/commands/akllt_importnews.py
from django.core.management.base import BaseCommand from wagtail.wagtailcore.models import Page from akllt.dataimport.news import import_news from akllt.models import NewsStory class Command(BaseCommand): args = '<directory name>' help = 'Imports data from old akl.lt website' def handle(self, news_folder, *args, **options): news_count = 0 root = Page.get_first_root_node() if root is None: root = Page.add_root(title='Root page') news = import_news(news_folder) for news_story in news: root.add_child(instance=NewsStory( title=news_story['title'], date=news_story['date'], blurb=news_story['blurb'], body=news_story['body'], )) self.stdout.write('Successfully imported %d news items\n' % news_count)
from django.core.management.base import BaseCommand from wagtail.wagtailcore.models import Page from akllt.dataimport.news import import_news from akllt.models import NewsStory class Command(BaseCommand): args = '<directory name>' help = 'Imports data from old akl.lt website' def handle(self, news_folder, *args, **options): news_count = 0 root = Page.get_first_root_node() if root is None: root = Page.add_root(title='Root page') news = import_news(news_folder) for news_story in news: root.add_child(instance=NewsStory( title=news_story['title'], date=news_story['date'], blurb=news_story['blurb'], body=news_story['body'], )) news_count += 1 self.stdout.write('Successfully imported %d news items\n' % news_count)
Increment item count in news import mgmt command
Increment item count in news import mgmt command
Python
agpl-3.0
python-dirbtuves/akl.lt,python-dirbtuves/akl.lt,python-dirbtuves/akl.lt,python-dirbtuves/akl.lt,python-dirbtuves/akl.lt
from django.core.management.base import BaseCommand from wagtail.wagtailcore.models import Page from akllt.dataimport.news import import_news from akllt.models import NewsStory class Command(BaseCommand): args = '<directory name>' help = 'Imports data from old akl.lt website' def handle(self, news_folder, *args, **options): news_count = 0 root = Page.get_first_root_node() if root is None: root = Page.add_root(title='Root page') news = import_news(news_folder) for news_story in news: root.add_child(instance=NewsStory( title=news_story['title'], date=news_story['date'], blurb=news_story['blurb'], body=news_story['body'], )) self.stdout.write('Successfully imported %d news items\n' % news_count) Increment item count in news import mgmt command
from django.core.management.base import BaseCommand from wagtail.wagtailcore.models import Page from akllt.dataimport.news import import_news from akllt.models import NewsStory class Command(BaseCommand): args = '<directory name>' help = 'Imports data from old akl.lt website' def handle(self, news_folder, *args, **options): news_count = 0 root = Page.get_first_root_node() if root is None: root = Page.add_root(title='Root page') news = import_news(news_folder) for news_story in news: root.add_child(instance=NewsStory( title=news_story['title'], date=news_story['date'], blurb=news_story['blurb'], body=news_story['body'], )) news_count += 1 self.stdout.write('Successfully imported %d news items\n' % news_count)
<commit_before>from django.core.management.base import BaseCommand from wagtail.wagtailcore.models import Page from akllt.dataimport.news import import_news from akllt.models import NewsStory class Command(BaseCommand): args = '<directory name>' help = 'Imports data from old akl.lt website' def handle(self, news_folder, *args, **options): news_count = 0 root = Page.get_first_root_node() if root is None: root = Page.add_root(title='Root page') news = import_news(news_folder) for news_story in news: root.add_child(instance=NewsStory( title=news_story['title'], date=news_story['date'], blurb=news_story['blurb'], body=news_story['body'], )) self.stdout.write('Successfully imported %d news items\n' % news_count) <commit_msg>Increment item count in news import mgmt command<commit_after>
from django.core.management.base import BaseCommand from wagtail.wagtailcore.models import Page from akllt.dataimport.news import import_news from akllt.models import NewsStory class Command(BaseCommand): args = '<directory name>' help = 'Imports data from old akl.lt website' def handle(self, news_folder, *args, **options): news_count = 0 root = Page.get_first_root_node() if root is None: root = Page.add_root(title='Root page') news = import_news(news_folder) for news_story in news: root.add_child(instance=NewsStory( title=news_story['title'], date=news_story['date'], blurb=news_story['blurb'], body=news_story['body'], )) news_count += 1 self.stdout.write('Successfully imported %d news items\n' % news_count)
from django.core.management.base import BaseCommand from wagtail.wagtailcore.models import Page from akllt.dataimport.news import import_news from akllt.models import NewsStory class Command(BaseCommand): args = '<directory name>' help = 'Imports data from old akl.lt website' def handle(self, news_folder, *args, **options): news_count = 0 root = Page.get_first_root_node() if root is None: root = Page.add_root(title='Root page') news = import_news(news_folder) for news_story in news: root.add_child(instance=NewsStory( title=news_story['title'], date=news_story['date'], blurb=news_story['blurb'], body=news_story['body'], )) self.stdout.write('Successfully imported %d news items\n' % news_count) Increment item count in news import mgmt commandfrom django.core.management.base import BaseCommand from wagtail.wagtailcore.models import Page from akllt.dataimport.news import import_news from akllt.models import NewsStory class Command(BaseCommand): args = '<directory name>' help = 'Imports data from old akl.lt website' def handle(self, news_folder, *args, **options): news_count = 0 root = Page.get_first_root_node() if root is None: root = Page.add_root(title='Root page') news = import_news(news_folder) for news_story in news: root.add_child(instance=NewsStory( title=news_story['title'], date=news_story['date'], blurb=news_story['blurb'], body=news_story['body'], )) news_count += 1 self.stdout.write('Successfully imported %d news items\n' % news_count)
<commit_before>from django.core.management.base import BaseCommand from wagtail.wagtailcore.models import Page from akllt.dataimport.news import import_news from akllt.models import NewsStory class Command(BaseCommand): args = '<directory name>' help = 'Imports data from old akl.lt website' def handle(self, news_folder, *args, **options): news_count = 0 root = Page.get_first_root_node() if root is None: root = Page.add_root(title='Root page') news = import_news(news_folder) for news_story in news: root.add_child(instance=NewsStory( title=news_story['title'], date=news_story['date'], blurb=news_story['blurb'], body=news_story['body'], )) self.stdout.write('Successfully imported %d news items\n' % news_count) <commit_msg>Increment item count in news import mgmt command<commit_after>from django.core.management.base import BaseCommand from wagtail.wagtailcore.models import Page from akllt.dataimport.news import import_news from akllt.models import NewsStory class Command(BaseCommand): args = '<directory name>' help = 'Imports data from old akl.lt website' def handle(self, news_folder, *args, **options): news_count = 0 root = Page.get_first_root_node() if root is None: root = Page.add_root(title='Root page') news = import_news(news_folder) for news_story in news: root.add_child(instance=NewsStory( title=news_story['title'], date=news_story['date'], blurb=news_story['blurb'], body=news_story['body'], )) news_count += 1 self.stdout.write('Successfully imported %d news items\n' % news_count)
810961f65c37d27c5e2d99cf102064d0b4e300f3
project/apiv2/views.py
project/apiv2/views.py
from django.db.models import Q from django.shortcuts import render from rest_framework.filters import OrderingFilter, SearchFilter from rest_framework.generics import ListAPIView from rest_framework_json_api.renderers import JSONRenderer from rest_framework.generics import RetrieveUpdateDestroyAPIView from bookmarks.models import Bookmark from bookmarks.serializers import BookmarkSerializer class BookmarkListCreateAPIView(ListAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer resource_name = 'bookmark' action = 'list' renderer_classes = (JSONRenderer,) filter_backends = (SearchFilter, OrderingFilter) search_fields = ('url', 'title') ordering_fields = ('id', 'url', 'title', 'bookmarked_at') class BookmarkRetrieveUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer lookup_field = 'bookmark_id'
from django.db.models import Q from django.shortcuts import render from rest_framework.filters import OrderingFilter, SearchFilter from rest_framework_json_api.renderers import JSONRenderer from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView from bookmarks.models import Bookmark from bookmarks.serializers import BookmarkSerializer class BookmarkListCreateAPIView(ListCreateAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer resource_name = 'bookmark' action = 'list' renderer_classes = (JSONRenderer,) filter_backends = (SearchFilter, OrderingFilter) search_fields = ('url', 'title') ordering_fields = ('id', 'url', 'title', 'bookmarked_at') class BookmarkRetrieveUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer lookup_field = 'bookmark_id'
Use ListCreateAPIView as base class to support bookmark creation
Use ListCreateAPIView as base class to support bookmark creation
Python
mit
hnakamur/django-bootstrap-table-example,hnakamur/django-bootstrap-table-example,hnakamur/django-bootstrap-table-example
from django.db.models import Q from django.shortcuts import render from rest_framework.filters import OrderingFilter, SearchFilter from rest_framework.generics import ListAPIView from rest_framework_json_api.renderers import JSONRenderer from rest_framework.generics import RetrieveUpdateDestroyAPIView from bookmarks.models import Bookmark from bookmarks.serializers import BookmarkSerializer class BookmarkListCreateAPIView(ListAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer resource_name = 'bookmark' action = 'list' renderer_classes = (JSONRenderer,) filter_backends = (SearchFilter, OrderingFilter) search_fields = ('url', 'title') ordering_fields = ('id', 'url', 'title', 'bookmarked_at') class BookmarkRetrieveUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer lookup_field = 'bookmark_id' Use ListCreateAPIView as base class to support bookmark creation
from django.db.models import Q from django.shortcuts import render from rest_framework.filters import OrderingFilter, SearchFilter from rest_framework_json_api.renderers import JSONRenderer from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView from bookmarks.models import Bookmark from bookmarks.serializers import BookmarkSerializer class BookmarkListCreateAPIView(ListCreateAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer resource_name = 'bookmark' action = 'list' renderer_classes = (JSONRenderer,) filter_backends = (SearchFilter, OrderingFilter) search_fields = ('url', 'title') ordering_fields = ('id', 'url', 'title', 'bookmarked_at') class BookmarkRetrieveUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer lookup_field = 'bookmark_id'
<commit_before>from django.db.models import Q from django.shortcuts import render from rest_framework.filters import OrderingFilter, SearchFilter from rest_framework.generics import ListAPIView from rest_framework_json_api.renderers import JSONRenderer from rest_framework.generics import RetrieveUpdateDestroyAPIView from bookmarks.models import Bookmark from bookmarks.serializers import BookmarkSerializer class BookmarkListCreateAPIView(ListAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer resource_name = 'bookmark' action = 'list' renderer_classes = (JSONRenderer,) filter_backends = (SearchFilter, OrderingFilter) search_fields = ('url', 'title') ordering_fields = ('id', 'url', 'title', 'bookmarked_at') class BookmarkRetrieveUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer lookup_field = 'bookmark_id' <commit_msg>Use ListCreateAPIView as base class to support bookmark creation<commit_after>
from django.db.models import Q from django.shortcuts import render from rest_framework.filters import OrderingFilter, SearchFilter from rest_framework_json_api.renderers import JSONRenderer from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView from bookmarks.models import Bookmark from bookmarks.serializers import BookmarkSerializer class BookmarkListCreateAPIView(ListCreateAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer resource_name = 'bookmark' action = 'list' renderer_classes = (JSONRenderer,) filter_backends = (SearchFilter, OrderingFilter) search_fields = ('url', 'title') ordering_fields = ('id', 'url', 'title', 'bookmarked_at') class BookmarkRetrieveUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer lookup_field = 'bookmark_id'
from django.db.models import Q from django.shortcuts import render from rest_framework.filters import OrderingFilter, SearchFilter from rest_framework.generics import ListAPIView from rest_framework_json_api.renderers import JSONRenderer from rest_framework.generics import RetrieveUpdateDestroyAPIView from bookmarks.models import Bookmark from bookmarks.serializers import BookmarkSerializer class BookmarkListCreateAPIView(ListAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer resource_name = 'bookmark' action = 'list' renderer_classes = (JSONRenderer,) filter_backends = (SearchFilter, OrderingFilter) search_fields = ('url', 'title') ordering_fields = ('id', 'url', 'title', 'bookmarked_at') class BookmarkRetrieveUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer lookup_field = 'bookmark_id' Use ListCreateAPIView as base class to support bookmark creationfrom django.db.models import Q from django.shortcuts import render from rest_framework.filters import OrderingFilter, SearchFilter from rest_framework_json_api.renderers import JSONRenderer from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView from bookmarks.models import Bookmark from bookmarks.serializers import BookmarkSerializer class BookmarkListCreateAPIView(ListCreateAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer resource_name = 'bookmark' action = 'list' renderer_classes = (JSONRenderer,) filter_backends = (SearchFilter, OrderingFilter) search_fields = ('url', 'title') ordering_fields = ('id', 'url', 'title', 'bookmarked_at') class BookmarkRetrieveUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer lookup_field = 'bookmark_id'
<commit_before>from django.db.models import Q from django.shortcuts import render from rest_framework.filters import OrderingFilter, SearchFilter from rest_framework.generics import ListAPIView from rest_framework_json_api.renderers import JSONRenderer from rest_framework.generics import RetrieveUpdateDestroyAPIView from bookmarks.models import Bookmark from bookmarks.serializers import BookmarkSerializer class BookmarkListCreateAPIView(ListAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer resource_name = 'bookmark' action = 'list' renderer_classes = (JSONRenderer,) filter_backends = (SearchFilter, OrderingFilter) search_fields = ('url', 'title') ordering_fields = ('id', 'url', 'title', 'bookmarked_at') class BookmarkRetrieveUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer lookup_field = 'bookmark_id' <commit_msg>Use ListCreateAPIView as base class to support bookmark creation<commit_after>from django.db.models import Q from django.shortcuts import render from rest_framework.filters import OrderingFilter, SearchFilter from rest_framework_json_api.renderers import JSONRenderer from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView from bookmarks.models import Bookmark from bookmarks.serializers import BookmarkSerializer class BookmarkListCreateAPIView(ListCreateAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer resource_name = 'bookmark' action = 'list' renderer_classes = (JSONRenderer,) filter_backends = (SearchFilter, OrderingFilter) search_fields = ('url', 'title') ordering_fields = ('id', 'url', 'title', 'bookmarked_at') class BookmarkRetrieveUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView): queryset = Bookmark.objects.all() serializer_class = BookmarkSerializer lookup_field = 'bookmark_id'
65dd9b35b75dd4ccc6dbc34d53071716a377d532
thecut/authorship/factories.py
thecut/authorship/factories.py
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals try: from faker import Factory as FakerFactory except ImportError, error: message = '{0}. Try running `pip install fake-factory`.'.format(error) raise ImportError(message) try: import factory except ImportError, error: message = '{0}. Try running `pip install factory_boy`.'.format(error) raise ImportError(message) faker = FakerFactory.create() class AuthorshipFactory(factory.django.DjangoModelFactory): class Meta(object): abstract = True created_by = factory.SubFactory('thecut.authorship.factories.UserFactory') updated_by = factory.SelfAttribute('created_by') class UserFactory(factory.django.DjangoModelFactory): class Meta(object): model = 'auth.User' django_get_or_create = ['username'] username = factory.Sequence(lambda n: 'user-{0}'.format(n)) class UserFakerFactory(UserFactory): class Meta(object): model = 'auth.User' django_get_or_create = ['username'] first_name = factory.LazyAttribute(lambda o: faker.first_name()) last_name = factory.LazyAttribute(lambda o: faker.last_name()) username = factory.LazyAttribute( lambda o: '{0}.{1}'.format(o.first_name.lower(), o.last_name.lower())) email = factory.LazyAttribute( lambda o: '{0}@example.com'.format(o.username))
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals try: from faker import Factory as FakerFactory except ImportError as error: message = '{0}. Try running `pip install fake-factory`.'.format(error) raise ImportError(message) try: import factory except ImportError as error: message = '{0}. Try running `pip install factory_boy`.'.format(error) raise ImportError(message) faker = FakerFactory.create() class AuthorshipFactory(factory.django.DjangoModelFactory): class Meta(object): abstract = True created_by = factory.SubFactory('thecut.authorship.factories.UserFactory') updated_by = factory.SelfAttribute('created_by') class UserFactory(factory.django.DjangoModelFactory): class Meta(object): model = 'auth.User' django_get_or_create = ['username'] username = factory.Sequence(lambda n: 'user-{0}'.format(n)) class UserFakerFactory(UserFactory): class Meta(object): model = 'auth.User' django_get_or_create = ['username'] first_name = factory.LazyAttribute(lambda o: faker.first_name()) last_name = factory.LazyAttribute(lambda o: faker.last_name()) username = factory.LazyAttribute( lambda o: '{0}.{1}'.format(o.first_name.lower(), o.last_name.lower())) email = factory.LazyAttribute( lambda o: '{0}@example.com'.format(o.username))
Update exception handling syntax for python 3 compatibility.
Update exception handling syntax for python 3 compatibility.
Python
apache-2.0
thecut/thecut-authorship
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals try: from faker import Factory as FakerFactory except ImportError, error: message = '{0}. Try running `pip install fake-factory`.'.format(error) raise ImportError(message) try: import factory except ImportError, error: message = '{0}. Try running `pip install factory_boy`.'.format(error) raise ImportError(message) faker = FakerFactory.create() class AuthorshipFactory(factory.django.DjangoModelFactory): class Meta(object): abstract = True created_by = factory.SubFactory('thecut.authorship.factories.UserFactory') updated_by = factory.SelfAttribute('created_by') class UserFactory(factory.django.DjangoModelFactory): class Meta(object): model = 'auth.User' django_get_or_create = ['username'] username = factory.Sequence(lambda n: 'user-{0}'.format(n)) class UserFakerFactory(UserFactory): class Meta(object): model = 'auth.User' django_get_or_create = ['username'] first_name = factory.LazyAttribute(lambda o: faker.first_name()) last_name = factory.LazyAttribute(lambda o: faker.last_name()) username = factory.LazyAttribute( lambda o: '{0}.{1}'.format(o.first_name.lower(), o.last_name.lower())) email = factory.LazyAttribute( lambda o: '{0}@example.com'.format(o.username)) Update exception handling syntax for python 3 compatibility.
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals try: from faker import Factory as FakerFactory except ImportError as error: message = '{0}. Try running `pip install fake-factory`.'.format(error) raise ImportError(message) try: import factory except ImportError as error: message = '{0}. Try running `pip install factory_boy`.'.format(error) raise ImportError(message) faker = FakerFactory.create() class AuthorshipFactory(factory.django.DjangoModelFactory): class Meta(object): abstract = True created_by = factory.SubFactory('thecut.authorship.factories.UserFactory') updated_by = factory.SelfAttribute('created_by') class UserFactory(factory.django.DjangoModelFactory): class Meta(object): model = 'auth.User' django_get_or_create = ['username'] username = factory.Sequence(lambda n: 'user-{0}'.format(n)) class UserFakerFactory(UserFactory): class Meta(object): model = 'auth.User' django_get_or_create = ['username'] first_name = factory.LazyAttribute(lambda o: faker.first_name()) last_name = factory.LazyAttribute(lambda o: faker.last_name()) username = factory.LazyAttribute( lambda o: '{0}.{1}'.format(o.first_name.lower(), o.last_name.lower())) email = factory.LazyAttribute( lambda o: '{0}@example.com'.format(o.username))
<commit_before># -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals try: from faker import Factory as FakerFactory except ImportError, error: message = '{0}. Try running `pip install fake-factory`.'.format(error) raise ImportError(message) try: import factory except ImportError, error: message = '{0}. Try running `pip install factory_boy`.'.format(error) raise ImportError(message) faker = FakerFactory.create() class AuthorshipFactory(factory.django.DjangoModelFactory): class Meta(object): abstract = True created_by = factory.SubFactory('thecut.authorship.factories.UserFactory') updated_by = factory.SelfAttribute('created_by') class UserFactory(factory.django.DjangoModelFactory): class Meta(object): model = 'auth.User' django_get_or_create = ['username'] username = factory.Sequence(lambda n: 'user-{0}'.format(n)) class UserFakerFactory(UserFactory): class Meta(object): model = 'auth.User' django_get_or_create = ['username'] first_name = factory.LazyAttribute(lambda o: faker.first_name()) last_name = factory.LazyAttribute(lambda o: faker.last_name()) username = factory.LazyAttribute( lambda o: '{0}.{1}'.format(o.first_name.lower(), o.last_name.lower())) email = factory.LazyAttribute( lambda o: '{0}@example.com'.format(o.username)) <commit_msg>Update exception handling syntax for python 3 compatibility.<commit_after>
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals try: from faker import Factory as FakerFactory except ImportError as error: message = '{0}. Try running `pip install fake-factory`.'.format(error) raise ImportError(message) try: import factory except ImportError as error: message = '{0}. Try running `pip install factory_boy`.'.format(error) raise ImportError(message) faker = FakerFactory.create() class AuthorshipFactory(factory.django.DjangoModelFactory): class Meta(object): abstract = True created_by = factory.SubFactory('thecut.authorship.factories.UserFactory') updated_by = factory.SelfAttribute('created_by') class UserFactory(factory.django.DjangoModelFactory): class Meta(object): model = 'auth.User' django_get_or_create = ['username'] username = factory.Sequence(lambda n: 'user-{0}'.format(n)) class UserFakerFactory(UserFactory): class Meta(object): model = 'auth.User' django_get_or_create = ['username'] first_name = factory.LazyAttribute(lambda o: faker.first_name()) last_name = factory.LazyAttribute(lambda o: faker.last_name()) username = factory.LazyAttribute( lambda o: '{0}.{1}'.format(o.first_name.lower(), o.last_name.lower())) email = factory.LazyAttribute( lambda o: '{0}@example.com'.format(o.username))
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals try: from faker import Factory as FakerFactory except ImportError, error: message = '{0}. Try running `pip install fake-factory`.'.format(error) raise ImportError(message) try: import factory except ImportError, error: message = '{0}. Try running `pip install factory_boy`.'.format(error) raise ImportError(message) faker = FakerFactory.create() class AuthorshipFactory(factory.django.DjangoModelFactory): class Meta(object): abstract = True created_by = factory.SubFactory('thecut.authorship.factories.UserFactory') updated_by = factory.SelfAttribute('created_by') class UserFactory(factory.django.DjangoModelFactory): class Meta(object): model = 'auth.User' django_get_or_create = ['username'] username = factory.Sequence(lambda n: 'user-{0}'.format(n)) class UserFakerFactory(UserFactory): class Meta(object): model = 'auth.User' django_get_or_create = ['username'] first_name = factory.LazyAttribute(lambda o: faker.first_name()) last_name = factory.LazyAttribute(lambda o: faker.last_name()) username = factory.LazyAttribute( lambda o: '{0}.{1}'.format(o.first_name.lower(), o.last_name.lower())) email = factory.LazyAttribute( lambda o: '{0}@example.com'.format(o.username)) Update exception handling syntax for python 3 compatibility.# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals try: from faker import Factory as FakerFactory except ImportError as error: message = '{0}. Try running `pip install fake-factory`.'.format(error) raise ImportError(message) try: import factory except ImportError as error: message = '{0}. Try running `pip install factory_boy`.'.format(error) raise ImportError(message) faker = FakerFactory.create() class AuthorshipFactory(factory.django.DjangoModelFactory): class Meta(object): abstract = True created_by = factory.SubFactory('thecut.authorship.factories.UserFactory') updated_by = factory.SelfAttribute('created_by') class UserFactory(factory.django.DjangoModelFactory): class Meta(object): model = 'auth.User' django_get_or_create = ['username'] username = factory.Sequence(lambda n: 'user-{0}'.format(n)) class UserFakerFactory(UserFactory): class Meta(object): model = 'auth.User' django_get_or_create = ['username'] first_name = factory.LazyAttribute(lambda o: faker.first_name()) last_name = factory.LazyAttribute(lambda o: faker.last_name()) username = factory.LazyAttribute( lambda o: '{0}.{1}'.format(o.first_name.lower(), o.last_name.lower())) email = factory.LazyAttribute( lambda o: '{0}@example.com'.format(o.username))
<commit_before># -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals try: from faker import Factory as FakerFactory except ImportError, error: message = '{0}. Try running `pip install fake-factory`.'.format(error) raise ImportError(message) try: import factory except ImportError, error: message = '{0}. Try running `pip install factory_boy`.'.format(error) raise ImportError(message) faker = FakerFactory.create() class AuthorshipFactory(factory.django.DjangoModelFactory): class Meta(object): abstract = True created_by = factory.SubFactory('thecut.authorship.factories.UserFactory') updated_by = factory.SelfAttribute('created_by') class UserFactory(factory.django.DjangoModelFactory): class Meta(object): model = 'auth.User' django_get_or_create = ['username'] username = factory.Sequence(lambda n: 'user-{0}'.format(n)) class UserFakerFactory(UserFactory): class Meta(object): model = 'auth.User' django_get_or_create = ['username'] first_name = factory.LazyAttribute(lambda o: faker.first_name()) last_name = factory.LazyAttribute(lambda o: faker.last_name()) username = factory.LazyAttribute( lambda o: '{0}.{1}'.format(o.first_name.lower(), o.last_name.lower())) email = factory.LazyAttribute( lambda o: '{0}@example.com'.format(o.username)) <commit_msg>Update exception handling syntax for python 3 compatibility.<commit_after># -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals try: from faker import Factory as FakerFactory except ImportError as error: message = '{0}. Try running `pip install fake-factory`.'.format(error) raise ImportError(message) try: import factory except ImportError as error: message = '{0}. Try running `pip install factory_boy`.'.format(error) raise ImportError(message) faker = FakerFactory.create() class AuthorshipFactory(factory.django.DjangoModelFactory): class Meta(object): abstract = True created_by = factory.SubFactory('thecut.authorship.factories.UserFactory') updated_by = factory.SelfAttribute('created_by') class UserFactory(factory.django.DjangoModelFactory): class Meta(object): model = 'auth.User' django_get_or_create = ['username'] username = factory.Sequence(lambda n: 'user-{0}'.format(n)) class UserFakerFactory(UserFactory): class Meta(object): model = 'auth.User' django_get_or_create = ['username'] first_name = factory.LazyAttribute(lambda o: faker.first_name()) last_name = factory.LazyAttribute(lambda o: faker.last_name()) username = factory.LazyAttribute( lambda o: '{0}.{1}'.format(o.first_name.lower(), o.last_name.lower())) email = factory.LazyAttribute( lambda o: '{0}@example.com'.format(o.username))
3dfe299893a5c4259b2f6abd3f9e2e458a32ef44
src/sentry/web/frontend/react_page.py
src/sentry/web/frontend/react_page.py
from __future__ import absolute_import from django.http import HttpResponse from django.template import loader, Context from sentry.web.frontend.base import BaseView, OrganizationView class ReactMixin(object): def handle_react(self, request): context = Context({'request': request}) template = loader.render_to_string('sentry/bases/react.html', context) response = HttpResponse(template) response['Content-Type'] = 'text/html' return response # TODO(dcramer): once we implement basic auth hooks in React we can make this # generic class ReactPageView(OrganizationView, ReactMixin): def handle(self, request, **kwargs): return self.handle_react(request) class GenericReactPageView(BaseView, ReactMixin): def handle(self, request, **kwargs): return self.handle_react(request)
from __future__ import absolute_import from django.core.context_processors import csrf from django.http import HttpResponse from django.template import loader, Context from sentry.web.frontend.base import BaseView, OrganizationView class ReactMixin(object): def handle_react(self, request): context = Context({'request': request}) context.update(csrf(request)) template = loader.render_to_string('sentry/bases/react.html', context) response = HttpResponse(template) response['Content-Type'] = 'text/html' return response # TODO(dcramer): once we implement basic auth hooks in React we can make this # generic class ReactPageView(OrganizationView, ReactMixin): def handle(self, request, **kwargs): return self.handle_react(request) class GenericReactPageView(BaseView, ReactMixin): def handle(self, request, **kwargs): return self.handle_react(request)
Make sure all React pages have a CSRF token in session
Make sure all React pages have a CSRF token in session Fixes GH-2419
Python
bsd-3-clause
mitsuhiko/sentry,zenefits/sentry,gencer/sentry,daevaorn/sentry,daevaorn/sentry,alexm92/sentry,fotinakis/sentry,looker/sentry,JamesMura/sentry,gencer/sentry,mvaled/sentry,JackDanger/sentry,ifduyue/sentry,nicholasserra/sentry,jean/sentry,mvaled/sentry,mvaled/sentry,BuildingLink/sentry,JackDanger/sentry,JamesMura/sentry,ifduyue/sentry,fotinakis/sentry,zenefits/sentry,daevaorn/sentry,ifduyue/sentry,zenefits/sentry,looker/sentry,jean/sentry,BuildingLink/sentry,zenefits/sentry,gencer/sentry,BuildingLink/sentry,looker/sentry,jean/sentry,jean/sentry,ifduyue/sentry,ifduyue/sentry,JamesMura/sentry,jean/sentry,BuildingLink/sentry,looker/sentry,nicholasserra/sentry,fotinakis/sentry,fotinakis/sentry,JamesMura/sentry,alexm92/sentry,mvaled/sentry,JackDanger/sentry,beeftornado/sentry,JamesMura/sentry,mvaled/sentry,beeftornado/sentry,alexm92/sentry,daevaorn/sentry,mvaled/sentry,mitsuhiko/sentry,nicholasserra/sentry,beeftornado/sentry,BuildingLink/sentry,gencer/sentry,gencer/sentry,looker/sentry,zenefits/sentry
from __future__ import absolute_import from django.http import HttpResponse from django.template import loader, Context from sentry.web.frontend.base import BaseView, OrganizationView class ReactMixin(object): def handle_react(self, request): context = Context({'request': request}) template = loader.render_to_string('sentry/bases/react.html', context) response = HttpResponse(template) response['Content-Type'] = 'text/html' return response # TODO(dcramer): once we implement basic auth hooks in React we can make this # generic class ReactPageView(OrganizationView, ReactMixin): def handle(self, request, **kwargs): return self.handle_react(request) class GenericReactPageView(BaseView, ReactMixin): def handle(self, request, **kwargs): return self.handle_react(request) Make sure all React pages have a CSRF token in session Fixes GH-2419
from __future__ import absolute_import from django.core.context_processors import csrf from django.http import HttpResponse from django.template import loader, Context from sentry.web.frontend.base import BaseView, OrganizationView class ReactMixin(object): def handle_react(self, request): context = Context({'request': request}) context.update(csrf(request)) template = loader.render_to_string('sentry/bases/react.html', context) response = HttpResponse(template) response['Content-Type'] = 'text/html' return response # TODO(dcramer): once we implement basic auth hooks in React we can make this # generic class ReactPageView(OrganizationView, ReactMixin): def handle(self, request, **kwargs): return self.handle_react(request) class GenericReactPageView(BaseView, ReactMixin): def handle(self, request, **kwargs): return self.handle_react(request)
<commit_before>from __future__ import absolute_import from django.http import HttpResponse from django.template import loader, Context from sentry.web.frontend.base import BaseView, OrganizationView class ReactMixin(object): def handle_react(self, request): context = Context({'request': request}) template = loader.render_to_string('sentry/bases/react.html', context) response = HttpResponse(template) response['Content-Type'] = 'text/html' return response # TODO(dcramer): once we implement basic auth hooks in React we can make this # generic class ReactPageView(OrganizationView, ReactMixin): def handle(self, request, **kwargs): return self.handle_react(request) class GenericReactPageView(BaseView, ReactMixin): def handle(self, request, **kwargs): return self.handle_react(request) <commit_msg>Make sure all React pages have a CSRF token in session Fixes GH-2419<commit_after>
from __future__ import absolute_import from django.core.context_processors import csrf from django.http import HttpResponse from django.template import loader, Context from sentry.web.frontend.base import BaseView, OrganizationView class ReactMixin(object): def handle_react(self, request): context = Context({'request': request}) context.update(csrf(request)) template = loader.render_to_string('sentry/bases/react.html', context) response = HttpResponse(template) response['Content-Type'] = 'text/html' return response # TODO(dcramer): once we implement basic auth hooks in React we can make this # generic class ReactPageView(OrganizationView, ReactMixin): def handle(self, request, **kwargs): return self.handle_react(request) class GenericReactPageView(BaseView, ReactMixin): def handle(self, request, **kwargs): return self.handle_react(request)
from __future__ import absolute_import from django.http import HttpResponse from django.template import loader, Context from sentry.web.frontend.base import BaseView, OrganizationView class ReactMixin(object): def handle_react(self, request): context = Context({'request': request}) template = loader.render_to_string('sentry/bases/react.html', context) response = HttpResponse(template) response['Content-Type'] = 'text/html' return response # TODO(dcramer): once we implement basic auth hooks in React we can make this # generic class ReactPageView(OrganizationView, ReactMixin): def handle(self, request, **kwargs): return self.handle_react(request) class GenericReactPageView(BaseView, ReactMixin): def handle(self, request, **kwargs): return self.handle_react(request) Make sure all React pages have a CSRF token in session Fixes GH-2419from __future__ import absolute_import from django.core.context_processors import csrf from django.http import HttpResponse from django.template import loader, Context from sentry.web.frontend.base import BaseView, OrganizationView class ReactMixin(object): def handle_react(self, request): context = Context({'request': request}) context.update(csrf(request)) template = loader.render_to_string('sentry/bases/react.html', context) response = HttpResponse(template) response['Content-Type'] = 'text/html' return response # TODO(dcramer): once we implement basic auth hooks in React we can make this # generic class ReactPageView(OrganizationView, ReactMixin): def handle(self, request, **kwargs): return self.handle_react(request) class GenericReactPageView(BaseView, ReactMixin): def handle(self, request, **kwargs): return self.handle_react(request)
<commit_before>from __future__ import absolute_import from django.http import HttpResponse from django.template import loader, Context from sentry.web.frontend.base import BaseView, OrganizationView class ReactMixin(object): def handle_react(self, request): context = Context({'request': request}) template = loader.render_to_string('sentry/bases/react.html', context) response = HttpResponse(template) response['Content-Type'] = 'text/html' return response # TODO(dcramer): once we implement basic auth hooks in React we can make this # generic class ReactPageView(OrganizationView, ReactMixin): def handle(self, request, **kwargs): return self.handle_react(request) class GenericReactPageView(BaseView, ReactMixin): def handle(self, request, **kwargs): return self.handle_react(request) <commit_msg>Make sure all React pages have a CSRF token in session Fixes GH-2419<commit_after>from __future__ import absolute_import from django.core.context_processors import csrf from django.http import HttpResponse from django.template import loader, Context from sentry.web.frontend.base import BaseView, OrganizationView class ReactMixin(object): def handle_react(self, request): context = Context({'request': request}) context.update(csrf(request)) template = loader.render_to_string('sentry/bases/react.html', context) response = HttpResponse(template) response['Content-Type'] = 'text/html' return response # TODO(dcramer): once we implement basic auth hooks in React we can make this # generic class ReactPageView(OrganizationView, ReactMixin): def handle(self, request, **kwargs): return self.handle_react(request) class GenericReactPageView(BaseView, ReactMixin): def handle(self, request, **kwargs): return self.handle_react(request)
b5bb360a78eb3493a52a4f085bb7ae2ef1355cdd
scavenger/net_utils.py
scavenger/net_utils.py
import subprocess import requests def logged_in(): """Check whether the device has logged in. Return a dictionary containing: username byte duration (in seconds) Return False if no logged in """ r = requests.post('http://net.tsinghua.edu.cn/cgi-bin/do_login', data={'action': 'check_online'}) if r: # status: OK infos = r.text.split(',') if len(infos) == 5: # Decode successfully return dict(username=infos[1], byte=infos[2], duration=infos[4]) # Failed to get infos return False def arp_scan(): """Generate (IP, MAC) pairs using arp-scan""" proc = subprocess.Popen(['sudo', 'arp-scan', '-lq'], stdout=subprocess.PIPE) out = proc.stdout # Skip the first two lines. next(out) next(out) # Parse IPs & MACs for line in out: infos = line.split() if not infos: # Empty line at the end of the output return if len(infos) < 2: raise RuntimeError('Invalid output of arp-scan: "%s"' % line) yield (infos[0], infos[1]) # Generate (IP, MAC)
import subprocess import requests def check_online(): """Check whether the device has logged in. Return a dictionary containing: username byte duration (in seconds) Return False if no logged in """ r = requests.post('http://net.tsinghua.edu.cn/cgi-bin/do_login', data={'action': 'check_online'}) if r: # status: OK infos = r.text.split(',') if len(infos) == 5: # Decode successfully return dict(username=infos[1], byte=infos[2], duration=infos[4]) # Failed to get infos return False def arp_scan(): """Generate (IP, MAC) pairs using arp-scan""" proc = subprocess.Popen(['sudo', 'arp-scan', '-lq'], stdout=subprocess.PIPE) out = proc.stdout # Skip the first two lines. next(out) next(out) # Parse IPs & MACs for line in out: infos = line.split() if not infos: # Empty line at the end of the output return if len(infos) < 2: raise RuntimeError('Invalid output of arp-scan: "%s"' % line) yield (infos[0], infos[1]) # Generate (IP, MAC)
Change name: logged_in => check_online
Change name: logged_in => check_online
Python
mit
ThomasLee969/scavenger
import subprocess import requests def logged_in(): """Check whether the device has logged in. Return a dictionary containing: username byte duration (in seconds) Return False if no logged in """ r = requests.post('http://net.tsinghua.edu.cn/cgi-bin/do_login', data={'action': 'check_online'}) if r: # status: OK infos = r.text.split(',') if len(infos) == 5: # Decode successfully return dict(username=infos[1], byte=infos[2], duration=infos[4]) # Failed to get infos return False def arp_scan(): """Generate (IP, MAC) pairs using arp-scan""" proc = subprocess.Popen(['sudo', 'arp-scan', '-lq'], stdout=subprocess.PIPE) out = proc.stdout # Skip the first two lines. next(out) next(out) # Parse IPs & MACs for line in out: infos = line.split() if not infos: # Empty line at the end of the output return if len(infos) < 2: raise RuntimeError('Invalid output of arp-scan: "%s"' % line) yield (infos[0], infos[1]) # Generate (IP, MAC) Change name: logged_in => check_online
import subprocess import requests def check_online(): """Check whether the device has logged in. Return a dictionary containing: username byte duration (in seconds) Return False if no logged in """ r = requests.post('http://net.tsinghua.edu.cn/cgi-bin/do_login', data={'action': 'check_online'}) if r: # status: OK infos = r.text.split(',') if len(infos) == 5: # Decode successfully return dict(username=infos[1], byte=infos[2], duration=infos[4]) # Failed to get infos return False def arp_scan(): """Generate (IP, MAC) pairs using arp-scan""" proc = subprocess.Popen(['sudo', 'arp-scan', '-lq'], stdout=subprocess.PIPE) out = proc.stdout # Skip the first two lines. next(out) next(out) # Parse IPs & MACs for line in out: infos = line.split() if not infos: # Empty line at the end of the output return if len(infos) < 2: raise RuntimeError('Invalid output of arp-scan: "%s"' % line) yield (infos[0], infos[1]) # Generate (IP, MAC)
<commit_before>import subprocess import requests def logged_in(): """Check whether the device has logged in. Return a dictionary containing: username byte duration (in seconds) Return False if no logged in """ r = requests.post('http://net.tsinghua.edu.cn/cgi-bin/do_login', data={'action': 'check_online'}) if r: # status: OK infos = r.text.split(',') if len(infos) == 5: # Decode successfully return dict(username=infos[1], byte=infos[2], duration=infos[4]) # Failed to get infos return False def arp_scan(): """Generate (IP, MAC) pairs using arp-scan""" proc = subprocess.Popen(['sudo', 'arp-scan', '-lq'], stdout=subprocess.PIPE) out = proc.stdout # Skip the first two lines. next(out) next(out) # Parse IPs & MACs for line in out: infos = line.split() if not infos: # Empty line at the end of the output return if len(infos) < 2: raise RuntimeError('Invalid output of arp-scan: "%s"' % line) yield (infos[0], infos[1]) # Generate (IP, MAC) <commit_msg>Change name: logged_in => check_online<commit_after>
import subprocess import requests def check_online(): """Check whether the device has logged in. Return a dictionary containing: username byte duration (in seconds) Return False if no logged in """ r = requests.post('http://net.tsinghua.edu.cn/cgi-bin/do_login', data={'action': 'check_online'}) if r: # status: OK infos = r.text.split(',') if len(infos) == 5: # Decode successfully return dict(username=infos[1], byte=infos[2], duration=infos[4]) # Failed to get infos return False def arp_scan(): """Generate (IP, MAC) pairs using arp-scan""" proc = subprocess.Popen(['sudo', 'arp-scan', '-lq'], stdout=subprocess.PIPE) out = proc.stdout # Skip the first two lines. next(out) next(out) # Parse IPs & MACs for line in out: infos = line.split() if not infos: # Empty line at the end of the output return if len(infos) < 2: raise RuntimeError('Invalid output of arp-scan: "%s"' % line) yield (infos[0], infos[1]) # Generate (IP, MAC)
import subprocess import requests def logged_in(): """Check whether the device has logged in. Return a dictionary containing: username byte duration (in seconds) Return False if no logged in """ r = requests.post('http://net.tsinghua.edu.cn/cgi-bin/do_login', data={'action': 'check_online'}) if r: # status: OK infos = r.text.split(',') if len(infos) == 5: # Decode successfully return dict(username=infos[1], byte=infos[2], duration=infos[4]) # Failed to get infos return False def arp_scan(): """Generate (IP, MAC) pairs using arp-scan""" proc = subprocess.Popen(['sudo', 'arp-scan', '-lq'], stdout=subprocess.PIPE) out = proc.stdout # Skip the first two lines. next(out) next(out) # Parse IPs & MACs for line in out: infos = line.split() if not infos: # Empty line at the end of the output return if len(infos) < 2: raise RuntimeError('Invalid output of arp-scan: "%s"' % line) yield (infos[0], infos[1]) # Generate (IP, MAC) Change name: logged_in => check_onlineimport subprocess import requests def check_online(): """Check whether the device has logged in. Return a dictionary containing: username byte duration (in seconds) Return False if no logged in """ r = requests.post('http://net.tsinghua.edu.cn/cgi-bin/do_login', data={'action': 'check_online'}) if r: # status: OK infos = r.text.split(',') if len(infos) == 5: # Decode successfully return dict(username=infos[1], byte=infos[2], duration=infos[4]) # Failed to get infos return False def arp_scan(): """Generate (IP, MAC) pairs using arp-scan""" proc = subprocess.Popen(['sudo', 'arp-scan', '-lq'], stdout=subprocess.PIPE) out = proc.stdout # Skip the first two lines. next(out) next(out) # Parse IPs & MACs for line in out: infos = line.split() if not infos: # Empty line at the end of the output return if len(infos) < 2: raise RuntimeError('Invalid output of arp-scan: "%s"' % line) yield (infos[0], infos[1]) # Generate (IP, MAC)
<commit_before>import subprocess import requests def logged_in(): """Check whether the device has logged in. Return a dictionary containing: username byte duration (in seconds) Return False if no logged in """ r = requests.post('http://net.tsinghua.edu.cn/cgi-bin/do_login', data={'action': 'check_online'}) if r: # status: OK infos = r.text.split(',') if len(infos) == 5: # Decode successfully return dict(username=infos[1], byte=infos[2], duration=infos[4]) # Failed to get infos return False def arp_scan(): """Generate (IP, MAC) pairs using arp-scan""" proc = subprocess.Popen(['sudo', 'arp-scan', '-lq'], stdout=subprocess.PIPE) out = proc.stdout # Skip the first two lines. next(out) next(out) # Parse IPs & MACs for line in out: infos = line.split() if not infos: # Empty line at the end of the output return if len(infos) < 2: raise RuntimeError('Invalid output of arp-scan: "%s"' % line) yield (infos[0], infos[1]) # Generate (IP, MAC) <commit_msg>Change name: logged_in => check_online<commit_after>import subprocess import requests def check_online(): """Check whether the device has logged in. Return a dictionary containing: username byte duration (in seconds) Return False if no logged in """ r = requests.post('http://net.tsinghua.edu.cn/cgi-bin/do_login', data={'action': 'check_online'}) if r: # status: OK infos = r.text.split(',') if len(infos) == 5: # Decode successfully return dict(username=infos[1], byte=infos[2], duration=infos[4]) # Failed to get infos return False def arp_scan(): """Generate (IP, MAC) pairs using arp-scan""" proc = subprocess.Popen(['sudo', 'arp-scan', '-lq'], stdout=subprocess.PIPE) out = proc.stdout # Skip the first two lines. next(out) next(out) # Parse IPs & MACs for line in out: infos = line.split() if not infos: # Empty line at the end of the output return if len(infos) < 2: raise RuntimeError('Invalid output of arp-scan: "%s"' % line) yield (infos[0], infos[1]) # Generate (IP, MAC)
ad87d1d9860ea394af261c3298403016d78dc1e1
bitcoin/__init__.py
bitcoin/__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright © 2012-2013 by its contributors. See AUTHORS for details. # # Distributed under the MIT/X11 software license, see the accompanying # file LICENSE or http://www.opensource.org/licenses/mit-license.php. # VERSION = (0,0,1, 'alpha', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%s pre-alpha' % version else: if VERSION[3] != 'final': version = "%s %s" % (version, VERSION[3]) if VERSION[4] != 0: version = '%s%s' % (version, VERSION[4]) return version # # End of File #
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright © 2012-2013 by its contributors. See AUTHORS for details. # # Distributed under the MIT/X11 software license, see the accompanying # file LICENSE or http://www.opensource.org/licenses/mit-license.php. # VERSION = (0,0,1, 'alpha', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%spre-alpha' % version else: if VERSION[3] != 'final': version = "%s%s" % (version, VERSION[3]) if VERSION[4] != 0: version = '%s%s' % (version, VERSION[4]) return version # # End of File #
Remove spaces from version string.
Remove spaces from version string.
Python
mit
monetizeio/python-bitcoin
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright © 2012-2013 by its contributors. See AUTHORS for details. # # Distributed under the MIT/X11 software license, see the accompanying # file LICENSE or http://www.opensource.org/licenses/mit-license.php. # VERSION = (0,0,1, 'alpha', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%s pre-alpha' % version else: if VERSION[3] != 'final': version = "%s %s" % (version, VERSION[3]) if VERSION[4] != 0: version = '%s%s' % (version, VERSION[4]) return version # # End of File # Remove spaces from version string.
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright © 2012-2013 by its contributors. See AUTHORS for details. # # Distributed under the MIT/X11 software license, see the accompanying # file LICENSE or http://www.opensource.org/licenses/mit-license.php. # VERSION = (0,0,1, 'alpha', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%spre-alpha' % version else: if VERSION[3] != 'final': version = "%s%s" % (version, VERSION[3]) if VERSION[4] != 0: version = '%s%s' % (version, VERSION[4]) return version # # End of File #
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright © 2012-2013 by its contributors. See AUTHORS for details. # # Distributed under the MIT/X11 software license, see the accompanying # file LICENSE or http://www.opensource.org/licenses/mit-license.php. # VERSION = (0,0,1, 'alpha', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%s pre-alpha' % version else: if VERSION[3] != 'final': version = "%s %s" % (version, VERSION[3]) if VERSION[4] != 0: version = '%s%s' % (version, VERSION[4]) return version # # End of File # <commit_msg>Remove spaces from version string.<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright © 2012-2013 by its contributors. See AUTHORS for details. # # Distributed under the MIT/X11 software license, see the accompanying # file LICENSE or http://www.opensource.org/licenses/mit-license.php. # VERSION = (0,0,1, 'alpha', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%spre-alpha' % version else: if VERSION[3] != 'final': version = "%s%s" % (version, VERSION[3]) if VERSION[4] != 0: version = '%s%s' % (version, VERSION[4]) return version # # End of File #
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright © 2012-2013 by its contributors. See AUTHORS for details. # # Distributed under the MIT/X11 software license, see the accompanying # file LICENSE or http://www.opensource.org/licenses/mit-license.php. # VERSION = (0,0,1, 'alpha', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%s pre-alpha' % version else: if VERSION[3] != 'final': version = "%s %s" % (version, VERSION[3]) if VERSION[4] != 0: version = '%s%s' % (version, VERSION[4]) return version # # End of File # Remove spaces from version string.#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright © 2012-2013 by its contributors. See AUTHORS for details. # # Distributed under the MIT/X11 software license, see the accompanying # file LICENSE or http://www.opensource.org/licenses/mit-license.php. # VERSION = (0,0,1, 'alpha', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%spre-alpha' % version else: if VERSION[3] != 'final': version = "%s%s" % (version, VERSION[3]) if VERSION[4] != 0: version = '%s%s' % (version, VERSION[4]) return version # # End of File #
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright © 2012-2013 by its contributors. See AUTHORS for details. # # Distributed under the MIT/X11 software license, see the accompanying # file LICENSE or http://www.opensource.org/licenses/mit-license.php. # VERSION = (0,0,1, 'alpha', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%s pre-alpha' % version else: if VERSION[3] != 'final': version = "%s %s" % (version, VERSION[3]) if VERSION[4] != 0: version = '%s%s' % (version, VERSION[4]) return version # # End of File # <commit_msg>Remove spaces from version string.<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright © 2012-2013 by its contributors. See AUTHORS for details. # # Distributed under the MIT/X11 software license, see the accompanying # file LICENSE or http://www.opensource.org/licenses/mit-license.php. # VERSION = (0,0,1, 'alpha', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%spre-alpha' % version else: if VERSION[3] != 'final': version = "%s%s" % (version, VERSION[3]) if VERSION[4] != 0: version = '%s%s' % (version, VERSION[4]) return version # # End of File #
643634e96554b00214ca4f0d45343e61b0df8e5a
foxybot/bot_help.py
foxybot/bot_help.py
"""Provide a class to load and parse the help file and Provide a simple interface for retrieving help entries""" import json import os class HelpManager(object): _help_dict = {} _last_modified = 0 @staticmethod def get_help(lang, key): """ Retrieve a given commands help text with given language. :param lang: ISO 639-1 language code specifying language to try to retrieve :param key: name of the command :return: description in `lang` for `key` """ if os.path.getmtime('help.json') > HelpManager._last_modified: HelpManager.load_help() lang = lang.lower() key = key.lower() if lang not in HelpManager._help_dict: print(f"[ERROR] tried to access `_help_dict[{lang}]`") lang = 'en' if key not in HelpManager._help_dict[lang]: print(f"[ERROR] tried to access `_help_dict[{lang}][{key}]`") return None return HelpManager._help_dict[lang][key] @staticmethod def load_help(): try: with open('help.json', 'r', encoding='utf-8') as infile: HelpManager._help_dict = json.load(infile) HelpManager._last_modified = os.path.getmtime('help.json') except OSError as ex: print("[ERROR] Cannot find `help.json`") print(ex) print(HelpManager._help_dict)
"""Provide a class to load and parse the help file and Provide a simple interface for retrieving help entries""" import json import os class HelpManager(object): _help_dict = {} _last_modified = 0 @staticmethod def get_help(lang, key): """ Retrieve a given commands help text with given language. :param lang: ISO 639-1 language code specifying language to try to retrieve :param key: name of the command :return: description in `lang` for `key` """ if os.path.getmtime('help.json') > HelpManager._last_modified: HelpManager.load_help() lang = lang.lower() key = key.lower() if lang not in HelpManager._help_dict: print(f"[ERROR] tried to access `_help_dict[{lang}]`") lang = 'en' if key not in HelpManager._help_dict[lang]: print(f"[ERROR] tried to access `_help_dict[{lang}][{key}]`") return None return HelpManager._help_dict[lang][key] @staticmethod def load_help(): try: with open('help.json', 'r', encoding='utf-8') as infile: HelpManager._help_dict = json.load(infile) HelpManager._last_modified = os.path.getmtime('help.json') except OSError as ex: print("[ERROR] Cannot find `help.json`") print(ex)
Remove unneeded debug cod e
Remove unneeded debug cod e
Python
bsd-2-clause
6180/foxybot
"""Provide a class to load and parse the help file and Provide a simple interface for retrieving help entries""" import json import os class HelpManager(object): _help_dict = {} _last_modified = 0 @staticmethod def get_help(lang, key): """ Retrieve a given commands help text with given language. :param lang: ISO 639-1 language code specifying language to try to retrieve :param key: name of the command :return: description in `lang` for `key` """ if os.path.getmtime('help.json') > HelpManager._last_modified: HelpManager.load_help() lang = lang.lower() key = key.lower() if lang not in HelpManager._help_dict: print(f"[ERROR] tried to access `_help_dict[{lang}]`") lang = 'en' if key not in HelpManager._help_dict[lang]: print(f"[ERROR] tried to access `_help_dict[{lang}][{key}]`") return None return HelpManager._help_dict[lang][key] @staticmethod def load_help(): try: with open('help.json', 'r', encoding='utf-8') as infile: HelpManager._help_dict = json.load(infile) HelpManager._last_modified = os.path.getmtime('help.json') except OSError as ex: print("[ERROR] Cannot find `help.json`") print(ex) print(HelpManager._help_dict) Remove unneeded debug cod e
"""Provide a class to load and parse the help file and Provide a simple interface for retrieving help entries""" import json import os class HelpManager(object): _help_dict = {} _last_modified = 0 @staticmethod def get_help(lang, key): """ Retrieve a given commands help text with given language. :param lang: ISO 639-1 language code specifying language to try to retrieve :param key: name of the command :return: description in `lang` for `key` """ if os.path.getmtime('help.json') > HelpManager._last_modified: HelpManager.load_help() lang = lang.lower() key = key.lower() if lang not in HelpManager._help_dict: print(f"[ERROR] tried to access `_help_dict[{lang}]`") lang = 'en' if key not in HelpManager._help_dict[lang]: print(f"[ERROR] tried to access `_help_dict[{lang}][{key}]`") return None return HelpManager._help_dict[lang][key] @staticmethod def load_help(): try: with open('help.json', 'r', encoding='utf-8') as infile: HelpManager._help_dict = json.load(infile) HelpManager._last_modified = os.path.getmtime('help.json') except OSError as ex: print("[ERROR] Cannot find `help.json`") print(ex)
<commit_before>"""Provide a class to load and parse the help file and Provide a simple interface for retrieving help entries""" import json import os class HelpManager(object): _help_dict = {} _last_modified = 0 @staticmethod def get_help(lang, key): """ Retrieve a given commands help text with given language. :param lang: ISO 639-1 language code specifying language to try to retrieve :param key: name of the command :return: description in `lang` for `key` """ if os.path.getmtime('help.json') > HelpManager._last_modified: HelpManager.load_help() lang = lang.lower() key = key.lower() if lang not in HelpManager._help_dict: print(f"[ERROR] tried to access `_help_dict[{lang}]`") lang = 'en' if key not in HelpManager._help_dict[lang]: print(f"[ERROR] tried to access `_help_dict[{lang}][{key}]`") return None return HelpManager._help_dict[lang][key] @staticmethod def load_help(): try: with open('help.json', 'r', encoding='utf-8') as infile: HelpManager._help_dict = json.load(infile) HelpManager._last_modified = os.path.getmtime('help.json') except OSError as ex: print("[ERROR] Cannot find `help.json`") print(ex) print(HelpManager._help_dict) <commit_msg>Remove unneeded debug cod e<commit_after>
"""Provide a class to load and parse the help file and Provide a simple interface for retrieving help entries""" import json import os class HelpManager(object): _help_dict = {} _last_modified = 0 @staticmethod def get_help(lang, key): """ Retrieve a given commands help text with given language. :param lang: ISO 639-1 language code specifying language to try to retrieve :param key: name of the command :return: description in `lang` for `key` """ if os.path.getmtime('help.json') > HelpManager._last_modified: HelpManager.load_help() lang = lang.lower() key = key.lower() if lang not in HelpManager._help_dict: print(f"[ERROR] tried to access `_help_dict[{lang}]`") lang = 'en' if key not in HelpManager._help_dict[lang]: print(f"[ERROR] tried to access `_help_dict[{lang}][{key}]`") return None return HelpManager._help_dict[lang][key] @staticmethod def load_help(): try: with open('help.json', 'r', encoding='utf-8') as infile: HelpManager._help_dict = json.load(infile) HelpManager._last_modified = os.path.getmtime('help.json') except OSError as ex: print("[ERROR] Cannot find `help.json`") print(ex)
"""Provide a class to load and parse the help file and Provide a simple interface for retrieving help entries""" import json import os class HelpManager(object): _help_dict = {} _last_modified = 0 @staticmethod def get_help(lang, key): """ Retrieve a given commands help text with given language. :param lang: ISO 639-1 language code specifying language to try to retrieve :param key: name of the command :return: description in `lang` for `key` """ if os.path.getmtime('help.json') > HelpManager._last_modified: HelpManager.load_help() lang = lang.lower() key = key.lower() if lang not in HelpManager._help_dict: print(f"[ERROR] tried to access `_help_dict[{lang}]`") lang = 'en' if key not in HelpManager._help_dict[lang]: print(f"[ERROR] tried to access `_help_dict[{lang}][{key}]`") return None return HelpManager._help_dict[lang][key] @staticmethod def load_help(): try: with open('help.json', 'r', encoding='utf-8') as infile: HelpManager._help_dict = json.load(infile) HelpManager._last_modified = os.path.getmtime('help.json') except OSError as ex: print("[ERROR] Cannot find `help.json`") print(ex) print(HelpManager._help_dict) Remove unneeded debug cod e"""Provide a class to load and parse the help file and Provide a simple interface for retrieving help entries""" import json import os class HelpManager(object): _help_dict = {} _last_modified = 0 @staticmethod def get_help(lang, key): """ Retrieve a given commands help text with given language. :param lang: ISO 639-1 language code specifying language to try to retrieve :param key: name of the command :return: description in `lang` for `key` """ if os.path.getmtime('help.json') > HelpManager._last_modified: HelpManager.load_help() lang = lang.lower() key = key.lower() if lang not in HelpManager._help_dict: print(f"[ERROR] tried to access `_help_dict[{lang}]`") lang = 'en' if key not in HelpManager._help_dict[lang]: print(f"[ERROR] tried to access `_help_dict[{lang}][{key}]`") return None return HelpManager._help_dict[lang][key] @staticmethod def load_help(): try: with open('help.json', 'r', encoding='utf-8') as infile: HelpManager._help_dict = json.load(infile) HelpManager._last_modified = os.path.getmtime('help.json') except OSError as ex: print("[ERROR] Cannot find `help.json`") print(ex)
<commit_before>"""Provide a class to load and parse the help file and Provide a simple interface for retrieving help entries""" import json import os class HelpManager(object): _help_dict = {} _last_modified = 0 @staticmethod def get_help(lang, key): """ Retrieve a given commands help text with given language. :param lang: ISO 639-1 language code specifying language to try to retrieve :param key: name of the command :return: description in `lang` for `key` """ if os.path.getmtime('help.json') > HelpManager._last_modified: HelpManager.load_help() lang = lang.lower() key = key.lower() if lang not in HelpManager._help_dict: print(f"[ERROR] tried to access `_help_dict[{lang}]`") lang = 'en' if key not in HelpManager._help_dict[lang]: print(f"[ERROR] tried to access `_help_dict[{lang}][{key}]`") return None return HelpManager._help_dict[lang][key] @staticmethod def load_help(): try: with open('help.json', 'r', encoding='utf-8') as infile: HelpManager._help_dict = json.load(infile) HelpManager._last_modified = os.path.getmtime('help.json') except OSError as ex: print("[ERROR] Cannot find `help.json`") print(ex) print(HelpManager._help_dict) <commit_msg>Remove unneeded debug cod e<commit_after>"""Provide a class to load and parse the help file and Provide a simple interface for retrieving help entries""" import json import os class HelpManager(object): _help_dict = {} _last_modified = 0 @staticmethod def get_help(lang, key): """ Retrieve a given commands help text with given language. :param lang: ISO 639-1 language code specifying language to try to retrieve :param key: name of the command :return: description in `lang` for `key` """ if os.path.getmtime('help.json') > HelpManager._last_modified: HelpManager.load_help() lang = lang.lower() key = key.lower() if lang not in HelpManager._help_dict: print(f"[ERROR] tried to access `_help_dict[{lang}]`") lang = 'en' if key not in HelpManager._help_dict[lang]: print(f"[ERROR] tried to access `_help_dict[{lang}][{key}]`") return None return HelpManager._help_dict[lang][key] @staticmethod def load_help(): try: with open('help.json', 'r', encoding='utf-8') as infile: HelpManager._help_dict = json.load(infile) HelpManager._last_modified = os.path.getmtime('help.json') except OSError as ex: print("[ERROR] Cannot find `help.json`") print(ex)
c434cf202de60d052f61f8608e48b5d7645be1c0
dear_astrid/test/test_rtm_importer.py
dear_astrid/test/test_rtm_importer.py
# pylint: disable=wildcard-import,unused-wildcard-import,missing-docstring from __future__ import absolute_import from unittest import TestCase from nose.tools import * from mock import * from dear_astrid.rtm.importer import Importer as rtmimp class TestRTMImport(TestCase): def setUp(self): self.patches = dict( time = patch('time.sleep'), rtm = patch('rtm.createRTM'), ) self.mocks = dict() for (k, v) in self.patches.items(): self.mocks[k] = v.start() def test_sleep_before_rtm(self): imp = rtmimp(['task']) imp._rtm = Mock() assert not self.mocks['time'].called # assert that it is our mock object assert_equal(imp.rtm, imp._rtm) self.mocks['time'].assert_called_once_with(1) # test calling other methods imp.rtm.foo.bar self.mocks['time'].assert_has_calls([ call(1), call(1) ]) # not used this time assert not self.mocks['rtm'].called def test_deobfuscator(self): imp = rtmimp(['task']) imp.key = 'a92' assert imp.key == '21a' imp.secret = 'deadbeef' assert imp.secret == '56253667'
# pylint: disable=wildcard-import,unused-wildcard-import,missing-docstring from __future__ import absolute_import from unittest import TestCase from nose.tools import * from mock import * from dear_astrid.rtm.importer import * class TestRTMImport(TestCase): def setUp(self): self.patches = dict( time = patch('time.sleep'), rtm = patch('rtm.createRTM'), ) self.mocks = dict() for (k, v) in self.patches.items(): self.mocks[k] = v.start() def test_sleep_before_rtm(self): imp = Importer(['task']) imp._rtm = Mock() assert not self.mocks['time'].called # Assert that it is in fact our mock object. assert_equal(imp.rtm, imp._rtm) self.mocks['time'].assert_called_once_with(1) # Test chaining method calls. imp.rtm.foo.bar self.mocks['time'].assert_has_calls([ call(1), call(1) ]) # Not used this time. assert not self.mocks['rtm'].called def test_deobfuscator(self): imp = Importer(['task']) imp.key = 'a92' assert imp.key == '21a' imp.secret = 'deadbeef' assert imp.secret == '56253667'
Clean up names and comments for consistency
Clean up names and comments for consistency
Python
mit
rwstauner/dear_astrid,rwstauner/dear_astrid
# pylint: disable=wildcard-import,unused-wildcard-import,missing-docstring from __future__ import absolute_import from unittest import TestCase from nose.tools import * from mock import * from dear_astrid.rtm.importer import Importer as rtmimp class TestRTMImport(TestCase): def setUp(self): self.patches = dict( time = patch('time.sleep'), rtm = patch('rtm.createRTM'), ) self.mocks = dict() for (k, v) in self.patches.items(): self.mocks[k] = v.start() def test_sleep_before_rtm(self): imp = rtmimp(['task']) imp._rtm = Mock() assert not self.mocks['time'].called # assert that it is our mock object assert_equal(imp.rtm, imp._rtm) self.mocks['time'].assert_called_once_with(1) # test calling other methods imp.rtm.foo.bar self.mocks['time'].assert_has_calls([ call(1), call(1) ]) # not used this time assert not self.mocks['rtm'].called def test_deobfuscator(self): imp = rtmimp(['task']) imp.key = 'a92' assert imp.key == '21a' imp.secret = 'deadbeef' assert imp.secret == '56253667' Clean up names and comments for consistency
# pylint: disable=wildcard-import,unused-wildcard-import,missing-docstring from __future__ import absolute_import from unittest import TestCase from nose.tools import * from mock import * from dear_astrid.rtm.importer import * class TestRTMImport(TestCase): def setUp(self): self.patches = dict( time = patch('time.sleep'), rtm = patch('rtm.createRTM'), ) self.mocks = dict() for (k, v) in self.patches.items(): self.mocks[k] = v.start() def test_sleep_before_rtm(self): imp = Importer(['task']) imp._rtm = Mock() assert not self.mocks['time'].called # Assert that it is in fact our mock object. assert_equal(imp.rtm, imp._rtm) self.mocks['time'].assert_called_once_with(1) # Test chaining method calls. imp.rtm.foo.bar self.mocks['time'].assert_has_calls([ call(1), call(1) ]) # Not used this time. assert not self.mocks['rtm'].called def test_deobfuscator(self): imp = Importer(['task']) imp.key = 'a92' assert imp.key == '21a' imp.secret = 'deadbeef' assert imp.secret == '56253667'
<commit_before># pylint: disable=wildcard-import,unused-wildcard-import,missing-docstring from __future__ import absolute_import from unittest import TestCase from nose.tools import * from mock import * from dear_astrid.rtm.importer import Importer as rtmimp class TestRTMImport(TestCase): def setUp(self): self.patches = dict( time = patch('time.sleep'), rtm = patch('rtm.createRTM'), ) self.mocks = dict() for (k, v) in self.patches.items(): self.mocks[k] = v.start() def test_sleep_before_rtm(self): imp = rtmimp(['task']) imp._rtm = Mock() assert not self.mocks['time'].called # assert that it is our mock object assert_equal(imp.rtm, imp._rtm) self.mocks['time'].assert_called_once_with(1) # test calling other methods imp.rtm.foo.bar self.mocks['time'].assert_has_calls([ call(1), call(1) ]) # not used this time assert not self.mocks['rtm'].called def test_deobfuscator(self): imp = rtmimp(['task']) imp.key = 'a92' assert imp.key == '21a' imp.secret = 'deadbeef' assert imp.secret == '56253667' <commit_msg>Clean up names and comments for consistency<commit_after>
# pylint: disable=wildcard-import,unused-wildcard-import,missing-docstring from __future__ import absolute_import from unittest import TestCase from nose.tools import * from mock import * from dear_astrid.rtm.importer import * class TestRTMImport(TestCase): def setUp(self): self.patches = dict( time = patch('time.sleep'), rtm = patch('rtm.createRTM'), ) self.mocks = dict() for (k, v) in self.patches.items(): self.mocks[k] = v.start() def test_sleep_before_rtm(self): imp = Importer(['task']) imp._rtm = Mock() assert not self.mocks['time'].called # Assert that it is in fact our mock object. assert_equal(imp.rtm, imp._rtm) self.mocks['time'].assert_called_once_with(1) # Test chaining method calls. imp.rtm.foo.bar self.mocks['time'].assert_has_calls([ call(1), call(1) ]) # Not used this time. assert not self.mocks['rtm'].called def test_deobfuscator(self): imp = Importer(['task']) imp.key = 'a92' assert imp.key == '21a' imp.secret = 'deadbeef' assert imp.secret == '56253667'
# pylint: disable=wildcard-import,unused-wildcard-import,missing-docstring from __future__ import absolute_import from unittest import TestCase from nose.tools import * from mock import * from dear_astrid.rtm.importer import Importer as rtmimp class TestRTMImport(TestCase): def setUp(self): self.patches = dict( time = patch('time.sleep'), rtm = patch('rtm.createRTM'), ) self.mocks = dict() for (k, v) in self.patches.items(): self.mocks[k] = v.start() def test_sleep_before_rtm(self): imp = rtmimp(['task']) imp._rtm = Mock() assert not self.mocks['time'].called # assert that it is our mock object assert_equal(imp.rtm, imp._rtm) self.mocks['time'].assert_called_once_with(1) # test calling other methods imp.rtm.foo.bar self.mocks['time'].assert_has_calls([ call(1), call(1) ]) # not used this time assert not self.mocks['rtm'].called def test_deobfuscator(self): imp = rtmimp(['task']) imp.key = 'a92' assert imp.key == '21a' imp.secret = 'deadbeef' assert imp.secret == '56253667' Clean up names and comments for consistency# pylint: disable=wildcard-import,unused-wildcard-import,missing-docstring from __future__ import absolute_import from unittest import TestCase from nose.tools import * from mock import * from dear_astrid.rtm.importer import * class TestRTMImport(TestCase): def setUp(self): self.patches = dict( time = patch('time.sleep'), rtm = patch('rtm.createRTM'), ) self.mocks = dict() for (k, v) in self.patches.items(): self.mocks[k] = v.start() def test_sleep_before_rtm(self): imp = Importer(['task']) imp._rtm = Mock() assert not self.mocks['time'].called # Assert that it is in fact our mock object. assert_equal(imp.rtm, imp._rtm) self.mocks['time'].assert_called_once_with(1) # Test chaining method calls. imp.rtm.foo.bar self.mocks['time'].assert_has_calls([ call(1), call(1) ]) # Not used this time. assert not self.mocks['rtm'].called def test_deobfuscator(self): imp = Importer(['task']) imp.key = 'a92' assert imp.key == '21a' imp.secret = 'deadbeef' assert imp.secret == '56253667'
<commit_before># pylint: disable=wildcard-import,unused-wildcard-import,missing-docstring from __future__ import absolute_import from unittest import TestCase from nose.tools import * from mock import * from dear_astrid.rtm.importer import Importer as rtmimp class TestRTMImport(TestCase): def setUp(self): self.patches = dict( time = patch('time.sleep'), rtm = patch('rtm.createRTM'), ) self.mocks = dict() for (k, v) in self.patches.items(): self.mocks[k] = v.start() def test_sleep_before_rtm(self): imp = rtmimp(['task']) imp._rtm = Mock() assert not self.mocks['time'].called # assert that it is our mock object assert_equal(imp.rtm, imp._rtm) self.mocks['time'].assert_called_once_with(1) # test calling other methods imp.rtm.foo.bar self.mocks['time'].assert_has_calls([ call(1), call(1) ]) # not used this time assert not self.mocks['rtm'].called def test_deobfuscator(self): imp = rtmimp(['task']) imp.key = 'a92' assert imp.key == '21a' imp.secret = 'deadbeef' assert imp.secret == '56253667' <commit_msg>Clean up names and comments for consistency<commit_after># pylint: disable=wildcard-import,unused-wildcard-import,missing-docstring from __future__ import absolute_import from unittest import TestCase from nose.tools import * from mock import * from dear_astrid.rtm.importer import * class TestRTMImport(TestCase): def setUp(self): self.patches = dict( time = patch('time.sleep'), rtm = patch('rtm.createRTM'), ) self.mocks = dict() for (k, v) in self.patches.items(): self.mocks[k] = v.start() def test_sleep_before_rtm(self): imp = Importer(['task']) imp._rtm = Mock() assert not self.mocks['time'].called # Assert that it is in fact our mock object. assert_equal(imp.rtm, imp._rtm) self.mocks['time'].assert_called_once_with(1) # Test chaining method calls. imp.rtm.foo.bar self.mocks['time'].assert_has_calls([ call(1), call(1) ]) # Not used this time. assert not self.mocks['rtm'].called def test_deobfuscator(self): imp = Importer(['task']) imp.key = 'a92' assert imp.key == '21a' imp.secret = 'deadbeef' assert imp.secret == '56253667'
3a72b9164fc31e4e7f29715729160a48a7ce2f84
source/tyr/migrations/versions/266658781c00_instances_nullable_in_equipments_provider.py
source/tyr/migrations/versions/266658781c00_instances_nullable_in_equipments_provider.py
""" column 'instances' will be deleted later. Has to be nullable for transition Revision ID: 266658781c00 Revises: 204aae05372a Create Date: 2019-04-15 16:27:22.362244 """ # revision identifiers, used by Alembic. revision = '266658781c00' down_revision = '204aae05372a' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): op.alter_column('equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=True) def downgrade(): op.alter_column( 'equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=False )
""" column 'instances' will be deleted later. Has to be nullable for transition Revision ID: 266658781c00 Revises: 204aae05372a Create Date: 2019-04-15 16:27:22.362244 """ # revision identifiers, used by Alembic. revision = '266658781c00' down_revision = '204aae05372a' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): op.alter_column('equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=True) def downgrade(): op.execute("UPDATE equipments_provider SET instances = '{null_instance}';") op.alter_column( 'equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=False )
Add required default value before downgrade migration
Add required default value before downgrade migration
Python
agpl-3.0
xlqian/navitia,kinnou02/navitia,xlqian/navitia,Tisseo/navitia,kinnou02/navitia,xlqian/navitia,xlqian/navitia,Tisseo/navitia,Tisseo/navitia,CanalTP/navitia,kinnou02/navitia,Tisseo/navitia,CanalTP/navitia,CanalTP/navitia,xlqian/navitia,CanalTP/navitia,CanalTP/navitia,Tisseo/navitia,kinnou02/navitia
""" column 'instances' will be deleted later. Has to be nullable for transition Revision ID: 266658781c00 Revises: 204aae05372a Create Date: 2019-04-15 16:27:22.362244 """ # revision identifiers, used by Alembic. revision = '266658781c00' down_revision = '204aae05372a' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): op.alter_column('equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=True) def downgrade(): op.alter_column( 'equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=False ) Add required default value before downgrade migration
""" column 'instances' will be deleted later. Has to be nullable for transition Revision ID: 266658781c00 Revises: 204aae05372a Create Date: 2019-04-15 16:27:22.362244 """ # revision identifiers, used by Alembic. revision = '266658781c00' down_revision = '204aae05372a' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): op.alter_column('equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=True) def downgrade(): op.execute("UPDATE equipments_provider SET instances = '{null_instance}';") op.alter_column( 'equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=False )
<commit_before>""" column 'instances' will be deleted later. Has to be nullable for transition Revision ID: 266658781c00 Revises: 204aae05372a Create Date: 2019-04-15 16:27:22.362244 """ # revision identifiers, used by Alembic. revision = '266658781c00' down_revision = '204aae05372a' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): op.alter_column('equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=True) def downgrade(): op.alter_column( 'equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=False ) <commit_msg>Add required default value before downgrade migration<commit_after>
""" column 'instances' will be deleted later. Has to be nullable for transition Revision ID: 266658781c00 Revises: 204aae05372a Create Date: 2019-04-15 16:27:22.362244 """ # revision identifiers, used by Alembic. revision = '266658781c00' down_revision = '204aae05372a' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): op.alter_column('equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=True) def downgrade(): op.execute("UPDATE equipments_provider SET instances = '{null_instance}';") op.alter_column( 'equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=False )
""" column 'instances' will be deleted later. Has to be nullable for transition Revision ID: 266658781c00 Revises: 204aae05372a Create Date: 2019-04-15 16:27:22.362244 """ # revision identifiers, used by Alembic. revision = '266658781c00' down_revision = '204aae05372a' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): op.alter_column('equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=True) def downgrade(): op.alter_column( 'equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=False ) Add required default value before downgrade migration""" column 'instances' will be deleted later. Has to be nullable for transition Revision ID: 266658781c00 Revises: 204aae05372a Create Date: 2019-04-15 16:27:22.362244 """ # revision identifiers, used by Alembic. revision = '266658781c00' down_revision = '204aae05372a' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): op.alter_column('equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=True) def downgrade(): op.execute("UPDATE equipments_provider SET instances = '{null_instance}';") op.alter_column( 'equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=False )
<commit_before>""" column 'instances' will be deleted later. Has to be nullable for transition Revision ID: 266658781c00 Revises: 204aae05372a Create Date: 2019-04-15 16:27:22.362244 """ # revision identifiers, used by Alembic. revision = '266658781c00' down_revision = '204aae05372a' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): op.alter_column('equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=True) def downgrade(): op.alter_column( 'equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=False ) <commit_msg>Add required default value before downgrade migration<commit_after>""" column 'instances' will be deleted later. Has to be nullable for transition Revision ID: 266658781c00 Revises: 204aae05372a Create Date: 2019-04-15 16:27:22.362244 """ # revision identifiers, used by Alembic. revision = '266658781c00' down_revision = '204aae05372a' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): op.alter_column('equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=True) def downgrade(): op.execute("UPDATE equipments_provider SET instances = '{null_instance}';") op.alter_column( 'equipments_provider', 'instances', existing_type=postgresql.ARRAY(sa.TEXT()), nullable=False )